From 3d38421628ca65367964b36974e0ac0decba58b7 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Dec 2020 17:01:28 +0000 Subject: [PATCH 001/503] Calls: expand vnext metadata function --- Cargo.lock | 1 + frame/support/Cargo.toml | 2 ++ .../procedural/src/pallet/expand/call.rs | 23 +++++++++++++++++++ frame/support/src/lib.rs | 2 ++ frame/support/src/metadata.rs | 1 + .../test/tests/pallet_compatibility.rs | 3 ++- .../tests/pallet_compatibility_instance.rs | 3 ++- 7 files changed, 33 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff6f679ed78bb..a8e00db681b40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1631,6 +1631,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "pretty_assertions", + "scale-info", "serde", "smallvec 1.5.0", "sp-api", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 0189dc172fb65..802f4ad041923 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,6 +17,7 @@ log = "0.4" serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } @@ -44,6 +45,7 @@ default = ["std"] std = [ "once_cell", "serde", + "scale-info/std", "sp-io/std", "codec/std", "sp-std/std", diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 2709995bf88b3..76dae1b5b08d8 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -70,6 +70,12 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); + let args_is_compact = def.call.methods.iter().map(|method| { + method.args.iter() + .map(|(is_compact, _, _)| is_compact) + .collect::>() + }); + quote::quote_spanned!(def.call.attr_span => #[derive( #frame_support::RuntimeDebugNoBound, @@ -196,6 +202,23 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { }, )* ] } + + #[doc(hidden)] + pub fn call_functions_vnext() -> Vec<#frame_support::metadata::vnext::FunctionMetadata> { + vec![ #( + #frame_support::metadata::vnext::FunctionMetadata { + name: stringify!(#fn_name), + arguments: vec![ #( + #frame_support::metadata::vnext::FunctionArgumentMetadata { + name: stringify!(#args_name), + ty: #frame_support::scale_info::meta_type::<#args_type>(), + is_compact: #args_is_compact, + }, + )* ], + documentation: vec![ #( #fn_doc ),* ], + }, + )* ] + } } ) } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index da4bfbb5d86bf..7f4e42aeb4219 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -27,6 +27,8 @@ pub use sp_tracing; #[cfg(feature = "std")] pub use serde; +#[doc(hidden)] +pub use scale_info; pub use sp_core::Void; #[doc(hidden)] pub use sp_std; diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index a60481933701b..6f49275c6421a 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -20,6 +20,7 @@ pub use frame_metadata::{ DefaultByteGetter, RuntimeMetadataPrefixed, StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, ModuleErrorMetadata, ExtrinsicMetadata, + vnext, }; /// Implements the metadata support for the given runtime and all its modules. diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 7cc3392ef0427..14e00e71b66f9 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -94,6 +94,7 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { use super::SomeAssociation; + use frame_support::scale_info; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; use frame_system::ensure_root; @@ -101,7 +102,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + SomeAssociation; + + MaybeSerializeDeserialize + SomeAssociation + scale_info::TypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index 05ad44e7a7ff1..b7b509c4c4ea5 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -84,6 +84,7 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { + use frame_support::scale_info; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; use frame_system::ensure_root; @@ -91,7 +92,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize; + + MaybeSerializeDeserialize + scale_info::TypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; From 123295125561c83e2ad5100e890423b717bee1d8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Dec 2020 17:28:09 +0000 Subject: [PATCH 002/503] Event: expand vnext metadata function # Conflicts: # frame/support/procedural/src/pallet/parse/event.rs --- Cargo.lock | 19 +++++++++++ .../procedural/src/pallet/expand/event.rs | 33 +++++++++++++++++-- .../procedural/src/pallet/parse/event.rs | 15 ++++++--- 3 files changed, 61 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a8e00db681b40..2949a0636d28c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7572,6 +7572,25 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.4.1" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "0.2.1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 76eda4448ba1a..778bd42f89f51 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -43,8 +43,10 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let event_use_gen = &event.gen_kind.type_use_gen(); let event_impl_gen= &event.gen_kind.type_impl_gen(); let metadata = event.metadata.iter() - .map(|(ident, args, docs)| { - let name = format!("{}", ident); + .map(|event| { + let name = format!("{}", event.name); + let args = event.args.iter().map(|arg| arg.1.clone()); + let docs = &event.docs; quote::quote!( #frame_support::event::EventMetadata { name: #frame_support::event::DecodeDifferent::Encode(#name), @@ -58,6 +60,27 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { ) }); + let metadata_vnext = event.metadata.iter() + .map(|event| { + let name = format!("{}", event.name); + let args = event.args + .iter() + .map(|(ty, name)| { + quote::quote!( + #frame_support::metadata::vnext::TypeSpec::new::<#ty>(#name) + ) + }); + let docs = &event.docs; + quote::quote!( + #frame_support::metadata::vnext::EventMetadata { + name: #name, + arguments: vec![ #( #args, )* ], + documentation: vec![ #( #docs, )* ], + }, + ) + }); + + let event_item_span = def.item.content.as_mut().expect("Checked by def parser").1[event.index].span(); @@ -138,6 +161,12 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { pub fn metadata() -> &'static [#frame_support::event::EventMetadata] { &[ #( #metadata )* ] } + + #[allow(dead_code)] + #[doc(hidden)] + pub fn metadata_vnext() -> Vec<#frame_support::metadata::vnext::EventMetadata> { + vec![ #( #metadata_vnext )* ] + } } ) } diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index ef0c3e2e92855..ecf57f15c1f52 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -36,7 +36,7 @@ pub struct EventDef { /// The keyword Event used (contains span). pub event: keyword::Event, /// Event metadatas: `(name, args, docs)`. - pub metadata: Vec<(syn::Ident, Vec, Vec)>, + pub metadata: Vec, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, /// The kind of generic the type `Event` has. @@ -47,6 +47,13 @@ pub struct EventDef { pub where_clause: Option, } +/// Metadata for a pallet event variant. +pub struct EventDefMetadata { + pub name: syn::Ident, + pub args: Vec<(syn::Type, String)>, + pub docs: Vec, +} + /// Attribute for Event: defines metadata name to use. /// /// Syntax is: @@ -196,14 +203,14 @@ impl EventDef { let args = variant.fields.iter() .map(|field| { metadata.iter().find(|m| m.0 == field.ty) - .map(|m| m.1.clone()) + .cloned() .unwrap_or_else(|| { - clean_type_string(&field.ty.to_token_stream().to_string()) + (field.ty.clone(), clean_type_string(&field.ty.to_token_stream().to_string())) }) }) .collect(); - (name, args, docs) + EventDefMetadata { name, args, docs } }) .collect(); From 04205778d3abf84e05e4d5659d4192d33b3228f5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Dec 2020 17:30:00 +0000 Subject: [PATCH 003/503] Start to add some TypeInfo bounds --- frame/support/test/tests/pallet_compatibility.rs | 2 +- frame/support/test/tests/pallet_compatibility_instance.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 3 ++- frame/system/src/lib.rs | 3 ++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 14e00e71b66f9..63c119974e296 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -102,7 +102,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + SomeAssociation + scale_info::TypeInfo; + + MaybeSerializeDeserialize + SomeAssociation + scale_info::TypeInfo + 'static; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index b7b509c4c4ea5..5f3bb89b76cb8 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -92,7 +92,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + scale_info::TypeInfo; + + MaybeSerializeDeserialize + scale_info::TypeInfo + 'static; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 2317fb05a2be3..923986e04b60d 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -29,6 +29,7 @@ use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; #[frame_support::pallet] pub mod pallet { use sp_std::any::TypeId; + use frame_support::scale_info; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; @@ -38,7 +39,7 @@ pub mod pallet { pub trait Config: frame_system::Config { #[pallet::constant] type MyGetParam: Get; - type Balance: Parameter + Default; + type Balance: Parameter + Default + scale_info::TypeInfo + 'static; type Event: From> + IsType<::Event>; } diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 463712ba68df5..eafee334885ab 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -125,6 +125,7 @@ use frame_support::{ extract_actual_weight, PerDispatchClass, }, dispatch::DispatchResultWithPostInfo, + scale_info, }; use codec::{Encode, Decode, FullCodec, EncodeLike}; @@ -208,7 +209,7 @@ pub trait Config: 'static + Eq + Clone { /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default; + + Default + scale_info::TypeInfo + 'static; /// Converting trait to take a source type and convert to `AccountId`. /// From c8261af3e6cefe92032b56f36dd36a5174f053a7 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Dec 2020 17:34:46 +0000 Subject: [PATCH 004/503] Implement TypeInfo for sr25519::Public --- Cargo.lock | 1 + primitives/core/Cargo.toml | 1 + primitives/core/src/sr25519.rs | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 2949a0636d28c..391a7c18f4a88 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8287,6 +8287,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 1d0ff4f208282..80b093ec2eba4 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -33,6 +33,7 @@ zeroize = { version = "1.2.0", default-features = false } secrecy = { version = "0.7.0", default-features = false } lazy_static = { version = "1.4.0", default-features = false, optional = true } parking_lot = { version = "0.10.0", optional = true } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-debug-derive = { version = "2.0.0", path = "../debug-derive" } sp-externalities = { version = "0.8.0", optional = true, path = "../externalities" } sp-storage = { version = "2.0.0", default-features = false, path = "../storage" } diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index 37926d8f801c5..392cad5a0b364 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -60,7 +60,7 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"sr25"); /// An Schnorrkel/Ristretto x25519 ("sr25519") public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, ::scale_info::TypeInfo)] pub struct Public(pub [u8; 32]); /// An Schnorrkel/Ristretto x25519 ("sr25519") key pair. From be39af70d0ee5b3ae2b2457166140d03348f9381 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 11:36:53 +0000 Subject: [PATCH 005/503] Add 'static bound to event impl generics From 7a4a30870e6dbabbae3fc18ce6b8683d8a095a2d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 11:37:28 +0000 Subject: [PATCH 006/503] Add some TypeInfo bounds to pallet --- frame/support/test/tests/pallet.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 1e4bfa7474e6e..ae69c84470ef2 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -22,6 +22,7 @@ use frame_support::{ }, dispatch::{UnfilteredDispatchable, Parameter}, storage::unhashed, + scale_info, }; use sp_runtime::{traits::Block as _, DispatchError}; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -47,10 +48,10 @@ impl From for u64 { fn from(_t: SomeType6) -> Self { 0u64 } } pub struct SomeType7; impl From for u64 { fn from(_t: SomeType7) -> Self { 0u64 } } -pub trait SomeAssociation1 { type _1: Parameter; } +pub trait SomeAssociation1 { type _1: Parameter + scale_info::TypeInfo; } impl SomeAssociation1 for u64 { type _1 = u64; } -pub trait SomeAssociation2 { type _2: Parameter; } +pub trait SomeAssociation2 { type _2: Parameter + scale_info::TypeInfo; } impl SomeAssociation2 for u64 { type _2 = u64; } #[frame_support::pallet] @@ -59,6 +60,7 @@ pub mod pallet { SomeType1, SomeType2, SomeType3, SomeType4, SomeType5, SomeType6, SomeType7, SomeAssociation1, SomeAssociation2, }; + use frame_support::scale_info; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; @@ -66,7 +68,9 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config - where ::AccountId: From + SomeAssociation1, + where + ::AccountId: From + SomeAssociation1, + ::_1: scale_info::TypeInfo, { /// Some comment /// Some comment @@ -81,7 +85,7 @@ pub mod pallet { #[pallet::constant] type MyGetParam3: Get<::_1>; - type Balance: Parameter + Default; + type Balance: Parameter + Default + scale_info::TypeInfo; type Event: From> + IsType<::Event>; } From f9ef2e2abdeafe57c214ec7e2ee23d36729b44d8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 11:58:33 +0000 Subject: [PATCH 007/503] vnext V12 metadata # Conflicts: # frame/metadata/src/vnext.rs --- frame/metadata/src/vnext.rs | 288 ++++++++++++++++++++++++++++++++++++ 1 file changed, 288 insertions(+) create mode 100644 frame/metadata/src/vnext.rs diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs new file mode 100644 index 0000000000000..5bf56c244ccca --- /dev/null +++ b/frame/metadata/src/vnext.rs @@ -0,0 +1,288 @@ +// This file is part of Substrate. + +// Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Decodable variant of the RuntimeMetadata. +//! +//! This really doesn't belong here, but is necessary for the moment. In the future +//! it should be removed entirely to an external module for shimming on to the +//! codec-encoded metadata. + +#![cfg_attr(not(feature = "std"), no_std)] + +#[cfg(feature = "std")] +use codec::Decode; +use codec::Encode; +use sp_core::RuntimeDebug; +use sp_std::vec::Vec; + +use scale_info::{ + form::{ + CompactForm, + Form, + MetaForm, + }, + meta_type, + IntoCompact, + Registry, + TypeInfo, +}; + +pub type RuntimeMetadataLastVersion = RuntimeMetadataV12; + +/// Metadata prefixed by a u32 for reserved usage +#[derive(Eq, Encode, PartialEq, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct RuntimeMetadataPrefixed(pub u32, pub RuntimeMetadata); + +impl From> for RuntimeMetadataPrefixed { + fn from(metadata: RuntimeMetadataLastVersion) -> RuntimeMetadataPrefixed { + RuntimeMetadataPrefixed(super::META_RESERVED, RuntimeMetadata::V12(metadata)) + } +} + +impl From> for sp_core::OpaqueMetadata { + fn from(metadata: RuntimeMetadataPrefixed) -> Self { + sp_core::OpaqueMetadata::new(metadata.encode()) + } +} + +/// The metadata of a runtime. +/// The version ID encoded/decoded through +/// the enum nature of `RuntimeMetadata`. +#[derive(Eq, Encode, PartialEq, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub enum RuntimeMetadata { + /// Version 11 for runtime metadata. + V12(RuntimeMetadataV12), +} + +/// The metadata of a runtime. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct RuntimeMetadataV12 { + /// Metadata of all the modules. + pub modules: Vec>, + // /// Metadata of the extrinsic. + // pub extrinsic: ExtrinsicMetadata, +} + +impl IntoCompact for RuntimeMetadataV12 { + type Output = RuntimeMetadataV12; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + RuntimeMetadataV12 { + modules: registry.map_into_compact(self.modules), + // extrinsic: self.extrinsic.into_compact(registry), + } + } +} + +/// Metadata of the extrinsic used by the runtime. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct ExtrinsicMetadata { + /// Extrinsic version. + pub version: u8, + /// The signed extensions in the order they appear in the extrinsic. + pub signed_extensions: Vec, +} + +impl IntoCompact for ExtrinsicMetadata { + type Output = ExtrinsicMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + ExtrinsicMetadata { + version: self.version, + signed_extensions: registry.register_types(self.signed_extensions), + } + } +} + +/// All metadata about an runtime module. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct ModuleMetadata { + pub name: T::String, + // pub storage: Option, StorageMetadata>>, + pub calls: Option>>, + pub event: Option>>, + // pub constants: DFnA, + // pub errors: DFnA, +} + +impl IntoCompact for ModuleMetadata { + type Output = ModuleMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + ModuleMetadata { + name: self.name.into_compact(registry), + calls: self.calls.map(|calls| registry.map_into_compact(calls)), + event: self.event.map(|event| registry.map_into_compact(event)), + } + } +} + +/// All the metadata about a function. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct FunctionMetadata { + pub name: T::String, + pub arguments: Vec>, + pub documentation: Vec, +} + +impl IntoCompact for FunctionMetadata { + type Output = FunctionMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + FunctionMetadata { + name: self.name.into_compact(registry), + arguments: registry.map_into_compact(self.arguments), + documentation: registry.map_into_compact(self.documentation), + } + } +} + +/// All the metadata about a function argument. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct FunctionArgumentMetadata { + pub name: T::String, + pub ty: T::Type, + pub is_compact: bool, +} + +impl IntoCompact for FunctionArgumentMetadata { + type Output = FunctionArgumentMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + FunctionArgumentMetadata { + name: self.name.into_compact(registry), + ty: registry.register_type(&self.ty), + is_compact: self.is_compact, + } + } +} + +/// All the metadata about an outer event. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct OuterEventMetadata { + pub name: T::String, + pub events: Vec>, +} + +impl IntoCompact for OuterEventMetadata { + type Output = OuterEventMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + OuterEventMetadata { + name: self.name.into_compact(registry), + events: registry.map_into_compact(self.events), + } + } +} + +/// Metadata about a module event. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct ModuleEventMetadata { + pub name: T::String, + pub events: Vec>, +} + +impl IntoCompact for ModuleEventMetadata { + type Output = ModuleEventMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + ModuleEventMetadata { + name: self.name.into_compact(registry), + events: registry.map_into_compact(self.events), + } + } +} + +/// All the metadata about an event. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct EventMetadata { + pub name: T::String, + pub arguments: Vec>, + pub documentation: Vec, +} + +impl IntoCompact for EventMetadata { + type Output = EventMetadata; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + EventMetadata { + name: self.name.into_compact(registry), + arguments: registry.map_into_compact(self.arguments), + documentation: registry.map_into_compact(self.documentation), + } + } +} + +/// A type specification. +/// +/// This contains the actual type as well as an optional compile-time +/// known displayed representation of the type. This is useful for cases +/// where the type is used through a type alias in order to provide +/// information about the alias name. +/// +/// # Examples +/// +/// Consider the following Rust function: +/// ```no_compile +/// fn is_sorted(input: &[i32], pred: Predicate) -> bool; +/// ``` +/// In this above example `input` would have no displayable name, +/// `pred`'s display name is `Predicate` and the display name of +/// the return type is simply `bool`. Note that `Predicate` could +/// simply be a type alias to `fn(i32, i32) -> Ordering`. +#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Decode))] +pub struct TypeSpec { + /// The actual type. + ty: T::Type, + /// The compile-time known displayed representation of the type. + name: T::String, +} + +impl IntoCompact for TypeSpec { + type Output = TypeSpec; + + fn into_compact(self, registry: &mut Registry) -> Self::Output { + TypeSpec { + ty: registry.register_type(&self.ty), + name: self.name.into_compact(registry), + } + } +} + +impl TypeSpec { + /// Creates a new type specification without a display name. + pub fn new(name: &'static str) -> Self + where + T: TypeInfo + 'static, + { + Self { + ty: meta_type::(), + name, + } + } +} From a1829fc5b97f9aca042000c72c49515bd937ba06 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 15:33:16 +0000 Subject: [PATCH 008/503] Add initial vnext metadata test --- frame/support/test/tests/construct_runtime.rs | 231 ++++++++++++++++++ 1 file changed, 231 insertions(+) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 2b9f026487b19..ffd7c1cdcec89 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -508,6 +508,237 @@ fn test_metadata() { pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V12(expected_metadata)); } +#[test] +fn test_metadata_vnext() { + use frame_metadata::*; + use frame_support::scale_info::form::{MetaForm}; + let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { + modules: vec![ + vnext::ModuleMetadata { + name: "System", + // storage: None, + calls: Some(vec![ + vnext::FunctionMetadata { + name: "noop", + arguments: vec![], + documentation: vec![], + }]), + event: Some(vec![ + vnext::EventMetadata { + name: "ExtrinsicSuccess", + arguments: vec![], + documentation: vec![], + }, + vnext::EventMetadata { + name: "ExtrinsicFailed", + arguments: vec![], + documentation: vec![], + }, + vnext::EventMetadata { + name: "Ignore", + arguments: vec![ + vnext::TypeSpec::new::<::BlockNumber>("BlockNumber") + ], + documentation: vec![], + } + ]), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 30, + }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_1"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance1Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ + // FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }, + // ]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 31, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module2"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ + // FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }, + // ]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[ + // EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }, + // ]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 32, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_2"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance2Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 33, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_3"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance3Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: None, + // event: None, + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 6, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_4"), + // storage: None, + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: None, + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 3, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_5"), + // storage: None, + // calls: None, + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 4, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_6"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance6Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 1, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_7"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance7Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 2, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_8"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance8Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 12, + // }, + // ModuleMetadata { + // name: DecodeDifferent::Encode("Module1_9"), + // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { + // prefix: DecodeDifferent::Encode("Instance9Module"), + // entries: DecodeDifferent::Encode(&[]), + // }))), + // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { + // name: DecodeDifferent::Encode("fail"), + // arguments: DecodeDifferent::Encode(&[]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { + // name: DecodeDifferent::Encode("A"), + // arguments: DecodeDifferent::Encode(&["AccountId"]), + // documentation: DecodeDifferent::Encode(&[]), + // }]))), + // constants: DecodeDifferent::Encode(FnEncode(|| &[])), + // errors: DecodeDifferent::Encode(FnEncode(|| &[])), + // index: 13, + // }, + ], + // extrinsic: ExtrinsicMetadata { + // version: 4, + // signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], + // }, + }; + pretty_assertions::assert_eq!(Runtime::metadata_vnext().1, vnext::RuntimeMetadata::V12(expected_metadata)); +} + #[test] fn pallet_in_runtime_is_correct() { assert_eq!(PalletInfo::index::().unwrap(), 30); From 9143bf14c11eeee2a42e83d2893c22dace0a1ad0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 15:33:50 +0000 Subject: [PATCH 009/503] Generate event metadata vnext functions --- frame/support/src/event.rs | 52 +++++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index b55f5d7e0b2ae..94fe4754f5376 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -22,6 +22,7 @@ // along with Substrate. If not, see . pub use frame_metadata::{EventMetadata, DecodeDifferent, OuterEventMetadata, FnEncode}; +pub use frame_metadata::vnext; /// Implement the `Event` for a module. /// @@ -547,6 +548,55 @@ macro_rules! __impl_outer_event_json_metadata { } } +#[macro_export] +#[doc(hidden)] +macro_rules! __impl_outer_event_json_metadata_vnext { + ( + $runtime:ident; + $event_name:ident; + $( $module_name:ident::Event < $( $generic_params:path ),* > $( $instance:ident )?, )*; + ) => { + impl $runtime { + #[allow(dead_code)] + pub fn outer_event_metadata_vnext() -> $crate::event::vnext::OuterEventMetadata { + $crate::event::vnext::OuterEventMetadata { + name: stringify!($event_name), + events: vec![ + $( + $crate::metadata::vnext::ModuleEventMetadata { + name: stringify!($module_name), + events: $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() + } + ),* + ] + } + } + + $crate::__impl_outer_event_json_metadata! { + @DECL_MODULE_EVENT_FNS + $( $module_name < $( $generic_params ),* > $( $instance )? ; )* + } + } + }; + + (@DECL_MODULE_EVENT_FNS + $( + $module_name:ident < $( $generic_params:path ),* > $( $instance:ident )? ; + )* + ) => { + $crate::paste::item! { + $( + #[allow(dead_code)] + pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> + Vec<$crate::event::vnext::EventMetadata> + { + $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() + } + )* + } + } +} + #[cfg(test)] #[allow(dead_code)] mod tests { @@ -827,7 +877,7 @@ mod tests { event_module2::Event::::TestEvent(3) ); assert_eq!(runtime_2_event_module_2.encode()[0], 5); - + let runtime_2_event_module_3 = TestEventSystemRenamed::event_module3( event_module3::Event::HiEvent ); From 5b43fe28e5df8f3bdfaa083c1d1415f74c1fe7d0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 16:40:55 +0000 Subject: [PATCH 010/503] Add metadata_vnext impl_runtime_metadata macros --- .../procedural/src/construct_runtime/mod.rs | 9 +- frame/support/src/dispatch.rs | 8 + frame/support/src/event.rs | 13 +- frame/support/src/lib.rs | 2 + frame/support/src/metadata_vnext.rs | 195 ++++++++++++++++++ 5 files changed, 224 insertions(+), 3 deletions(-) create mode 100644 frame/support/src/metadata_vnext.rs diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index 31fc71faf44fc..061fa3b437504 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -320,12 +320,19 @@ fn decl_runtime_metadata<'a>( quote!( #module::Module #(#instance)* as #name { index #index } with #(#filtered_names)*, ) - }); + }) + .collect::>(); + let modules_tokens_vnext = modules_tokens.clone(); quote!( #scrate::impl_runtime_metadata!{ for #runtime with modules where Extrinsic = #extrinsic #(#modules_tokens)* } + + #scrate::impl_runtime_metadata_vnext!{ + for #runtime with modules where Extrinsic = #extrinsic + #(#modules_tokens_vnext)* + } ) } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 03cda0e4d40e0..89d7819fab32f 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2124,6 +2124,14 @@ macro_rules! __dispatch_impl_metadata { pub fn call_functions() -> &'static [$crate::dispatch::FunctionMetadata] { $crate::__call_to_functions!($($rest)*) } + + /// vnext metadata implemented in new frame support proc macros. + /// Returns empty vec for now to allow mixing of old style and new style pallets. + #[doc(hidden)] + #[allow(dead_code)] + pub fn call_functions_vnext() -> Vec<$crate::metadata::vnext::FunctionMetadata> { + vec![] + } } } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 94fe4754f5376..6baca16402f9f 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -494,6 +494,15 @@ macro_rules! impl_outer_event { $( $generic_instance )?, )*; ); + $crate::__impl_outer_event_json_metadata_vnext!( + $runtime; + $name; + $( + $module_name::Event + < $( $generic_param )? $(, $module_name::$generic_instance )? > + $( $generic_instance )?, + )*; + ); } } @@ -572,7 +581,7 @@ macro_rules! __impl_outer_event_json_metadata_vnext { } } - $crate::__impl_outer_event_json_metadata! { + $crate::__impl_outer_event_json_metadata_vnext! { @DECL_MODULE_EVENT_FNS $( $module_name < $( $generic_params ),* > $( $instance )? ; )* } @@ -587,7 +596,7 @@ macro_rules! __impl_outer_event_json_metadata_vnext { $crate::paste::item! { $( #[allow(dead_code)] - pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> + pub fn [< __module_events_vnext_ $module_name $( _ $instance )? >] () -> Vec<$crate::event::vnext::EventMetadata> { $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 7f4e42aeb4219..22189a10ffde3 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -60,6 +60,8 @@ pub mod event; #[macro_use] pub mod metadata; #[macro_use] +pub mod metadata_vnext; +#[macro_use] pub mod genesis_config; #[macro_use] pub mod inherent; diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs new file mode 100644 index 0000000000000..d23f0914a3c80 --- /dev/null +++ b/frame/support/src/metadata_vnext.rs @@ -0,0 +1,195 @@ +// This file is part of Substrate. + +// Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +pub use frame_metadata::vnext; + +#[macro_export] +macro_rules! impl_runtime_metadata_vnext { + ( + for $runtime:ident with modules where Extrinsic = $ext:ident + $( $rest:tt )* + ) => { + impl $runtime { + pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed { + $crate::metadata::vnext::RuntimeMetadataLastVersion { + modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + // extrinsic: $crate::metadata::ExtrinsicMetadata { + // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + // signed_extensions: < + // < + // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata + // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension + // >::identifier() + // .into_iter() + // .map($crate::metadata::DecodeDifferent::Encode) + // .collect(), + // }, + }.into() + } + } + } +} + +#[macro_export] +#[doc(hidden)] +macro_rules! __runtime_modules_to_metadata_vnext { + ( + $runtime: ident; + $( $metadata:expr ),*; + $mod:ident::$module:ident $( < $instance:ident > )? as $name:ident + { index $index:tt } + $(with)+ $($kw:ident)* + , + $( $rest:tt )* + ) => { + $crate::__runtime_modules_to_metadata_vnext!( + $runtime; + $( $metadata, )* $crate::metadata::vnext::ModuleMetadata { + name: stringify!($name), + // index: $index, + // storage: $crate::__runtime_modules_to_metadata_calls_storage!( + // $mod, $module $( <$instance> )?, $runtime, $(with $kw)* + // ), + calls: $crate::__runtime_modules_to_metadata_calls_call_vnext!( + $mod, $module $( <$instance> )?, $runtime, $(with $kw)* + ), + event: $crate::__runtime_modules_to_metadata_calls_event_vnext!( + $mod, $module $( <$instance> )?, $runtime, $(with $kw)* + ), + // constants: $crate::metadata::DecodeDifferent::Encode( + // $crate::metadata::FnEncode( + // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata + // ) + // ), + // errors: $crate::metadata::DecodeDifferent::Encode( + // $crate::metadata::FnEncode( + // <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata + // ) + // ) + }; + $( $rest )* + ) + }; + ( + $runtime:ident; + $( $metadata:expr ),*; + ) => { + vec![$( $metadata ),* ] + }; +} + +#[macro_export] +#[doc(hidden)] +macro_rules! __runtime_modules_to_metadata_calls_call_vnext { + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with Call + $(with $kws:ident)* + ) => { + Some($mod::$module::<$runtime $(, $mod::$instance )?>::call_functions_vnext()) + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with $_:ident + $(with $kws:ident)* + ) => { + $crate::__runtime_modules_to_metadata_calls_call_vnext! { + $mod, $module $( <$instance> )?, $runtime, $(with $kws)* + }; + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + ) => { + None + }; +} + + +#[macro_export] +#[doc(hidden)] +macro_rules! __runtime_modules_to_metadata_calls_event_vnext { + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with Event + $(with $kws:ident)* + ) => { + Some($crate::paste::expr!{ + $runtime:: [< __module_events_vnext_ $mod $(_ $instance)?>]() + } + ) + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with $_:ident + $(with $kws:ident)* + ) => { + $crate::__runtime_modules_to_metadata_calls_event_vnext!( $mod, $module $( <$instance> )?, $runtime, $(with $kws)* ); + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + ) => { + None + }; +} + +// #[macro_export] +// #[doc(hidden)] +// macro_rules! __runtime_modules_to_metadata_calls_storage_vnext { +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// with Storage +// $(with $kws:ident)* +// ) => { +// Some($crate::metadata::DecodeDifferent::Encode( +// $crate::metadata::FnEncode( +// $mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata +// ) +// )) +// }; +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// with $_:ident +// $(with $kws:ident)* +// ) => { +// $crate::__runtime_modules_to_metadata_calls_storage! { +// $mod, $module $( <$instance> )?, $runtime, $(with $kws)* +// }; +// }; +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// ) => { +// None +// }; +// } From 6f6d8c4074cb7b08f11923008904fe0580070812 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 16:53:50 +0000 Subject: [PATCH 011/503] Compactify metadata --- frame/support/src/metadata_vnext.rs | 34 ++++++++++++++++------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index d23f0914a3c80..da6dfa8ae1d5f 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -24,21 +24,25 @@ macro_rules! impl_runtime_metadata_vnext { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed { - $crate::metadata::vnext::RuntimeMetadataLastVersion { - modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), - // extrinsic: $crate::metadata::ExtrinsicMetadata { - // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - // signed_extensions: < - // < - // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata - // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension - // >::identifier() - // .into_iter() - // .map($crate::metadata::DecodeDifferent::Encode) - // .collect(), - // }, - }.into() + pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed<$crate::scale_info::form::CompactForm> { + let mut registry = $crate::scale_info::Registry::new(); + let metadata = + $crate::metadata::vnext::RuntimeMetadataLastVersion { + modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + // extrinsic: $crate::metadata::ExtrinsicMetadata { + // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + // signed_extensions: < + // < + // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata + // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension + // >::identifier() + // .into_iter() + // .map($crate::metadata::DecodeDifferent::Encode) + // .collect(), + // }, + }; + use $crate::scale_info::IntoCompact as _; + metadata.into_compact(&mut registry).into() } } } From dace095b8e150f1d45bd441093bfbe7ddfa96839 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 17:18:42 +0000 Subject: [PATCH 012/503] Empty event vnext impls for old style pallet decl_event --- frame/support/src/event.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 6baca16402f9f..716e52bd4046c 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -149,6 +149,13 @@ macro_rules! decl_event { pub fn metadata() -> &'static [ $crate::event::EventMetadata ] { $crate::__events_to_metadata!(; $( $events )* ) } + + /// Metadata vnext only supported by new frame support macros + #[allow(dead_code)] + #[doc(hidden)] + pub fn metadata_vnext() -> Vec<$crate::metadata::vnext::EventMetadata> { + vec![] + } } } } @@ -297,6 +304,13 @@ macro_rules! __decl_generic_event { pub fn metadata() -> &'static [$crate::event::EventMetadata] { $crate::__events_to_metadata!(; $( $events )* ) } + + /// Metadata vnext only supported by new frame support macros + #[allow(dead_code)] + #[doc(hidden)] + pub fn metadata_vnext() -> Vec<$crate::metadata::vnext::EventMetadata> { + vec![] + } } }; (@cannot_parse $ty:ty) => { From f3a75154e8cdca7bb2f6e35ad63686e6cb7e8fde Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Dec 2020 17:27:39 +0000 Subject: [PATCH 013/503] Fix test compilation. COMPILES, test fails. --- frame/support/test/tests/construct_runtime.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index ffd7c1cdcec89..3de53c3b208e6 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -511,7 +511,7 @@ fn test_metadata() { #[test] fn test_metadata_vnext() { use frame_metadata::*; - use frame_support::scale_info::form::{MetaForm}; + use frame_support::scale_info::{form::MetaForm, IntoCompact, Registry}; let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { modules: vec![ vnext::ModuleMetadata { @@ -736,6 +736,8 @@ fn test_metadata_vnext() { // signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], // }, }; + let mut registry = Registry::new(); + let expected_metadata = expected_metadata.into_compact(&mut registry); pretty_assertions::assert_eq!(Runtime::metadata_vnext().1, vnext::RuntimeMetadata::V12(expected_metadata)); } From ab24563bdc3ad2946a2c9c33c0edcd98bdf14ac5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Dec 2020 10:41:45 +0000 Subject: [PATCH 014/503] Pallet metadata test with calls and events compiling but failing --- frame/support/test/tests/pallet.rs | 95 ++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index ae69c84470ef2..6d779b170230f 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -767,3 +767,98 @@ fn metadata() { pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); } + +#[test] +fn metadata_vnext() { + use frame_metadata::*; + use crate::scale_info::{meta_type, Registry, IntoCompact}; + use codec::{Decode, Encode}; + + let expected_pallet_metadata = vnext::ModuleMetadata { + // index: 1, + name: "Example", + // storage: + calls: Some(vec![ + vnext::FunctionMetadata { + name: "foo", + arguments: vec![ + vnext::FunctionArgumentMetadata { + name: "_foo", + ty: meta_type::(), + is_compact: true, + } + ], + documentation: vec![" Doc comment put in metadata"], + }, + vnext::FunctionMetadata { + name: "foo", + arguments: vec![ + vnext::FunctionArgumentMetadata { + name: "_foo", + ty: meta_type::(), + is_compact: true, + }, + vnext::FunctionArgumentMetadata { + name: "_bar", + ty: meta_type::(), + is_compact: false, + }, + ], + documentation: vec![" Doc comment put in metadata"], + }, + vnext::FunctionMetadata { + name: "foo_transactional", + arguments: vec![ + vnext::FunctionArgumentMetadata { + name: "foo", + ty: meta_type::(), + is_compact: true, + }, + ], + documentation: vec![" Doc comment put in metadata"], + }, + ]), + event: Some(vec![ + vnext::EventMetadata { + name: "Proposed", + arguments: vec![ + vnext::TypeSpec::new::<::AccountId>("::AccountId"), + ], + documentation: vec![" doc comment put in metadata"], + }, + vnext::EventMetadata { + name: "Spending", + arguments: vec![ + vnext::TypeSpec::new::<::Balance>("Balance"), + ], + documentation: vec![" doc"], + }, + vnext::EventMetadata { + name: "Something", + arguments: vec![ + vnext::TypeSpec::new::("Other"), + ], + documentation: vec![], + }, + vnext::EventMetadata { + name: "SomethingElse", + arguments: vec![ + vnext::TypeSpec::new::<<::AccountId as SomeAssociation1>::_1>("::_1"), + ], + documentation: vec![], + }, + ]), + // constants: , + // errors: + }; + + let metadata = match Runtime::metadata_vnext().1 { + vnext::RuntimeMetadata::V12(metadata) => metadata, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; + + let mut registry = Registry::new(); + let expected_pallet_metadata = expected_pallet_metadata.into_compact(&mut registry); + + pretty_assertions::assert_eq!(metadata.modules[1], expected_pallet_metadata); +} From e0cd065aa674661067f9d84aa99ceab97c910923 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Dec 2020 11:03:23 +0000 Subject: [PATCH 015/503] construct_runtime vnext metadata test with empty definitions for legacy pallet macros --- frame/support/test/tests/construct_runtime.rs | 267 ++++-------------- 1 file changed, 54 insertions(+), 213 deletions(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 3de53c3b208e6..daa6f49923d3c 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -512,225 +512,66 @@ fn test_metadata() { fn test_metadata_vnext() { use frame_metadata::*; use frame_support::scale_info::{form::MetaForm, IntoCompact, Registry}; + // vnext modules defined with legacy macros have empty metadata let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { modules: vec![ vnext::ModuleMetadata { name: "System", - // storage: None, - calls: Some(vec![ - vnext::FunctionMetadata { - name: "noop", - arguments: vec![], - documentation: vec![], - }]), - event: Some(vec![ - vnext::EventMetadata { - name: "ExtrinsicSuccess", - arguments: vec![], - documentation: vec![], - }, - vnext::EventMetadata { - name: "ExtrinsicFailed", - arguments: vec![], - documentation: vec![], - }, - vnext::EventMetadata { - name: "Ignore", - arguments: vec![ - vnext::TypeSpec::new::<::BlockNumber>("BlockNumber") - ], - documentation: vec![], - } - ]), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 30, + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_1", + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module2", + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_2", + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_3", + calls: None, + event: None, + }, + vnext::ModuleMetadata { + name: "Module1_4", + calls: Some(vec![]), + event: None, + }, + vnext::ModuleMetadata { + name: "Module1_5", + calls: None, + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_6", + calls: Some(vec![]), + event: Some(vec![]), }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_1"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance1Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ - // FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }, - // ]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 31, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module2"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ - // FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }, - // ]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[ - // EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }, - // ]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 32, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_2"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance2Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 33, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_3"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance3Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: None, - // event: None, - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 6, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_4"), - // storage: None, - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: None, - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 3, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_5"), - // storage: None, - // calls: None, - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 4, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_6"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance6Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 1, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_7"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance7Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 2, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_8"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance8Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 12, - // }, - // ModuleMetadata { - // name: DecodeDifferent::Encode("Module1_9"), - // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - // prefix: DecodeDifferent::Encode("Instance9Module"), - // entries: DecodeDifferent::Encode(&[]), - // }))), - // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - // name: DecodeDifferent::Encode("fail"), - // arguments: DecodeDifferent::Encode(&[]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - // name: DecodeDifferent::Encode("A"), - // arguments: DecodeDifferent::Encode(&["AccountId"]), - // documentation: DecodeDifferent::Encode(&[]), - // }]))), - // constants: DecodeDifferent::Encode(FnEncode(|| &[])), - // errors: DecodeDifferent::Encode(FnEncode(|| &[])), - // index: 13, - // }, + vnext::ModuleMetadata { + name: "Module1_7", + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_8", + calls: Some(vec![]), + event: Some(vec![]), + }, + vnext::ModuleMetadata { + name: "Module1_9", + calls: Some(vec![]), + event: Some(vec![]), + } ], + // todo [AJ]: add back extrinsic // extrinsic: ExtrinsicMetadata { // version: 4, // signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], From eaf9362bdaa7d40abcaea0f1d4dec5a399fca312 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Dec 2020 11:09:40 +0000 Subject: [PATCH 016/503] Passing basic pallet test with calls and events --- frame/metadata/src/vnext.rs | 2 +- frame/support/test/tests/pallet.rs | 13 ------------- 2 files changed, 1 insertion(+), 14 deletions(-) diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs index 5bf56c244ccca..d8465008cc9b4 100644 --- a/frame/metadata/src/vnext.rs +++ b/frame/metadata/src/vnext.rs @@ -66,7 +66,7 @@ impl From> for sp_core::OpaqueMetadata { #[derive(Eq, Encode, PartialEq, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Decode))] pub enum RuntimeMetadata { - /// Version 11 for runtime metadata. + /// Version 12 for runtime metadata. V12(RuntimeMetadataV12), } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 6d779b170230f..82eca50602a2a 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -772,24 +772,12 @@ fn metadata() { fn metadata_vnext() { use frame_metadata::*; use crate::scale_info::{meta_type, Registry, IntoCompact}; - use codec::{Decode, Encode}; let expected_pallet_metadata = vnext::ModuleMetadata { // index: 1, name: "Example", // storage: calls: Some(vec![ - vnext::FunctionMetadata { - name: "foo", - arguments: vec![ - vnext::FunctionArgumentMetadata { - name: "_foo", - ty: meta_type::(), - is_compact: true, - } - ], - documentation: vec![" Doc comment put in metadata"], - }, vnext::FunctionMetadata { name: "foo", arguments: vec![ @@ -854,7 +842,6 @@ fn metadata_vnext() { let metadata = match Runtime::metadata_vnext().1 { vnext::RuntimeMetadata::V12(metadata) => metadata, - _ => panic!("metadata has been bumped, test needs to be updated"), }; let mut registry = Registry::new(); From e2f028cacd6e0883298c855e36426e71ba1b02d3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 14 Dec 2020 13:06:17 +0000 Subject: [PATCH 017/503] Convert Balances pallet to new style attribute macros # Conflicts: # frame/balances/src/lib.rs # frame/balances/src/tests_composite.rs # frame/balances/src/tests_local.rs --- Cargo.lock | 1 + frame/balances/Cargo.toml | 1 + frame/balances/src/lib.rs | 559 +++++++++++++------------- frame/balances/src/tests.rs | 31 +- frame/balances/src/tests_composite.rs | 47 ++- frame/balances/src/tests_local.rs | 58 +-- frame/support/src/traits.rs | 2 +- 7 files changed, 366 insertions(+), 333 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 391a7c18f4a88..9849e3322f7af 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4393,6 +4393,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 21c8abbc24a6c..e477ba1607557 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 4fcda02c4fd2d..0bacdf8ec76e6 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -160,7 +160,7 @@ use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr, convert::Infallible}; use codec::{Codec, Encode, Decode}; use frame_support::{ - StorageValue, Parameter, decl_event, decl_storage, decl_module, decl_error, ensure, + Parameter, ensure, traits::{ Currency, OnKilledAccount, OnUnbalanced, TryDrop, StoredMap, WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, @@ -179,87 +179,218 @@ use frame_system::{self as system, ensure_signed, ensure_root}; pub use self::imbalances::{PositiveImbalance, NegativeImbalance}; pub use weights::WeightInfo; -pub trait Subtrait: frame_system::Config { - /// The balance of an account. - type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; +pub use pallet::*; - /// The minimum amount required to keep an account open. - type ExistentialDeposit: Get; +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use super::*; - /// The means of storing the balances of an account. - type AccountStore: StoredMap>; + #[pallet::config] + pub trait Config: frame_system::Config { + /// The balance of an account. + type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + + MaybeSerializeDeserialize + Debug + ::scale_info::TypeInfo; - /// Weight information for the extrinsics in this pallet. - type WeightInfo: WeightInfo; + /// Handler for the unbalanced reduction when removing a dust account. + type DustRemoval: OnUnbalanced>; - /// The maximum number of locks that should exist on an account. - /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get; -} + /// The overarching event type. + type Event: From> + IsType<::Event>; -pub trait Config: frame_system::Config { - /// The balance of an account. - type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; + /// The minimum amount required to keep an account open. + #[pallet::constant] + type ExistentialDeposit: Get; - /// Handler for the unbalanced reduction when removing a dust account. - type DustRemoval: OnUnbalanced>; + /// The means of storing the balances of an account. + type AccountStore: StoredMap>; - /// The overarching event type. - type Event: From> + Into<::Event>; + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; - /// The minimum amount required to keep an account open. - type ExistentialDeposit: Get; + /// The maximum number of locks that should exist on an account. + /// Not strictly enforced, but used for weight estimation. + type MaxLocks: Get; + } - /// The means of storing the balances of an account. - type AccountStore: StoredMap>; + #[pallet::pallet] + pub struct Pallet(PhantomData<(T, I)>); - /// Weight information for extrinsics in this pallet. - type WeightInfo: WeightInfo; + #[pallet::hooks] + impl, I: 'static> Hooks> for Pallet {} - /// The maximum number of locks that should exist on an account. - /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get; -} + #[pallet::call] + impl, I: 'static> Pallet + where ::Source: ::scale_info::TypeInfo + { + /// Transfer some liquid free balance to another account. + /// + /// `transfer` will set the `FreeBalance` of the sender and receiver. + /// It will decrease the total issuance of the system by the `TransferFee`. + /// If the sender's account is below the existential deposit as a result + /// of the transfer, the account will be reaped. + /// + /// The dispatch origin for this call must be `Signed` by the transactor. + /// + /// # + /// - Dependent on arguments but not critical, given proper implementations for + /// input config types. See related functions below. + /// - It contains a limited number of reads and writes internally and no complex computation. + /// + /// Related functions: + /// + /// - `ensure_can_withdraw` is always called internally but has a bounded complexity. + /// - Transferring balances to accounts that did not exist before will cause + /// `T::OnNewAccount::on_new_account` to be called. + /// - Removing enough funds from an account will trigger `T::DustRemoval::on_unbalanced`. + /// - `transfer_keep_alive` works the same way as `transfer`, but has an additional + /// check that the transfer will not kill the origin account. + /// --------------------------------- + /// - Base Weight: 73.64 µs, worst case scenario (account created, account removed) + /// - DB Weight: 1 Read and 1 Write to destination account + /// - Origin account is already in memory, so no DB operations for them. + /// # + #[pallet::weight(T::WeightInfo::transfer())] + pub fn transfer( + origin: OriginFor, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + let transactor = ensure_signed(origin)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&transactor, &dest, value, ExistenceRequirement::AllowDeath)?; + Ok(().into()) + } -impl, I: Instance> Subtrait for T { - type Balance = T::Balance; - type ExistentialDeposit = T::ExistentialDeposit; - type AccountStore = T::AccountStore; - type WeightInfo = >::WeightInfo; - type MaxLocks = T::MaxLocks; -} + /// Set the balances of a given account. + /// + /// This will alter `FreeBalance` and `ReservedBalance` in storage. it will + /// also decrease the total issuance of the system (`TotalIssuance`). + /// If the new free or reserved balance is below the existential deposit, + /// it will reset the account nonce (`frame_system::AccountNonce`). + /// + /// The dispatch origin for this call is `root`. + /// + /// # + /// - Independent of the arguments. + /// - Contains a limited number of reads and writes. + /// --------------------- + /// - Base Weight: + /// - Creating: 27.56 µs + /// - Killing: 35.11 µs + /// - DB Weight: 1 Read, 1 Write to `who` + /// # + #[pallet::weight( + T::WeightInfo::set_balance_creating() // Creates a new account. + .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. + )] + pub(super) fn set_balance( + origin: OriginFor, + who: ::Source, + #[pallet::compact] new_free: T::Balance, + #[pallet::compact] new_reserved: T::Balance + ) -> DispatchResultWithPostInfo { + ensure_root(origin)?; + let who = T::Lookup::lookup(who)?; + let existential_deposit = T::ExistentialDeposit::get(); -decl_event!( - pub enum Event where - ::AccountId, - >::Balance - { + let wipeout = new_free + new_reserved < existential_deposit; + let new_free = if wipeout { Zero::zero() } else { new_free }; + let new_reserved = if wipeout { Zero::zero() } else { new_reserved }; + + let (free, reserved) = Self::mutate_account(&who, |account| { + if new_free > account.free { + mem::drop(PositiveImbalance::::new(new_free - account.free)); + } else if new_free < account.free { + mem::drop(NegativeImbalance::::new(account.free - new_free)); + } + + if new_reserved > account.reserved { + mem::drop(PositiveImbalance::::new(new_reserved - account.reserved)); + } else if new_reserved < account.reserved { + mem::drop(NegativeImbalance::::new(account.reserved - new_reserved)); + } + + account.free = new_free; + account.reserved = new_reserved; + + (account.free, account.reserved) + }); + Self::deposit_event(Event::BalanceSet(who, free, reserved)); + Ok(().into()) + } + + /// Exactly as `transfer`, except the origin must be root and the source account may be + /// specified. + /// # + /// - Same as transfer, but additional read and write because the source account is + /// not assumed to be in the overlay. + /// # + #[pallet::weight(T::WeightInfo::force_transfer())] + pub fn force_transfer( + origin: OriginFor, + source: ::Source, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + ensure_root(origin)?; + let source = T::Lookup::lookup(source)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&source, &dest, value, ExistenceRequirement::AllowDeath)?; + Ok(().into()) + } + + /// Same as the [`transfer`] call, but with a check that the transfer will not kill the + /// origin account. + /// + /// 99% of the time you want [`transfer`] instead. + /// + /// [`transfer`]: struct.Module.html#method.transfer + /// # + /// - Cheaper than transfer because account cannot be killed. + /// - Base Weight: 51.4 µs + /// - DB Weight: 1 Read and 1 Write to dest (sender is in overlay already) + /// # + #[pallet::weight(T::WeightInfo::transfer_keep_alive())] + pub fn transfer_keep_alive( + origin: OriginFor, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + let transactor = ensure_signed(origin)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&transactor, &dest, value, KeepAlive)?; + Ok(().into()) + } + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + pub enum Event, I: 'static = ()> { /// An account was created with some free balance. \[account, free_balance\] - Endowed(AccountId, Balance), + Endowed(T::AccountId, T::Balance), /// An account was removed whose balance was non-zero but below ExistentialDeposit, /// resulting in an outright loss. \[account, balance\] - DustLost(AccountId, Balance), + DustLost(T::AccountId, T::Balance), /// Transfer succeeded. \[from, to, value\] - Transfer(AccountId, AccountId, Balance), + Transfer(T::AccountId, T::AccountId, T::Balance), /// A balance was set by root. \[who, free, reserved\] - BalanceSet(AccountId, Balance, Balance), + BalanceSet(T::AccountId, T::Balance, T::Balance), /// Some amount was deposited (e.g. for transaction fees). \[who, deposit\] - Deposit(AccountId, Balance), + Deposit(T::AccountId, T::Balance), /// Some balance was reserved (moved from free to reserved). \[who, value\] - Reserved(AccountId, Balance), + Reserved(T::AccountId, T::Balance), /// Some balance was unreserved (moved from reserved to free). \[who, value\] - Unreserved(AccountId, Balance), + Unreserved(T::AccountId, T::Balance), /// Some balance was moved from the reserve of the first account to the second account. /// Final argument indicates the destination balance type. /// \[from, to, balance, destination_status\] - ReserveRepatriated(AccountId, AccountId, Balance, Status), + ReserveRepatriated(T::AccountId, T::AccountId, T::Balance, Status), } -); -decl_error! { - pub enum Error for Module, I: Instance> { + #[pallet::error] + pub enum Error { /// Vesting balance too high to send value VestingBalance, /// Account liquidity restrictions prevent withdrawal @@ -277,6 +408,83 @@ decl_error! { /// Beneficiary account must pre-exist DeadAccount, } + + /// The total units issued in the system. + #[pallet::storage] + #[pallet::getter(fn total_issuance)] + pub type TotalIssuance, I: 'static = ()> = StorageValue<_, T::Balance, ValueQuery>; + + /// The balance of an account. + /// + /// NOTE: This is only used in the case that this module is used to store balances. + #[pallet::storage] + pub type Account, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, AccountData, ValueQuery>; + + /// Any liquidity locks on some account balances. + /// NOTE: Should only be accessed when setting, changing and freeing a lock. + #[pallet::storage] + #[pallet::getter(fn locks)] + pub type Locks, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, Vec>, ValueQuery>; + + /// Storage version of the pallet. + /// + /// This is set to v2.0.0 for new networks. + #[pallet::storage] + pub(super) type StorageVersion, I: 'static = ()> = StorageValue<_, Releases, ValueQuery>; + + + #[pallet::genesis_config] + pub struct GenesisConfig, I: 'static = ()> { + pub balances: Vec<(T::AccountId, T::Balance)>, + } + + #[cfg(feature = "std")] + impl, I: 'static> Default for GenesisConfig { + fn default() -> Self { + Self { + balances: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl, I: 'static> GenesisBuild for GenesisConfig { + fn build(&self) { + let total = self.balances.iter() + .fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); + >::put(total); + >::put(Releases::V2_0_0); + for (_, balance) in &self.balances { + assert!( + *balance >= >::ExistentialDeposit::get(), + "the balance of any account should always be more than existential deposit.", + ) + } + for &(ref who, free) in &self.balances { + T::AccountStore::insert(who, AccountData { free, .. Default::default() }); + } + } + } + + #[cfg(feature = "std")] + impl, I: 'static> GenesisConfig { + /// Direct implementation of `GenesisBuild::build_storage`. + /// + /// Kept in order not to break dependency. + pub fn build_storage(&self) -> Result { + >::build_storage(self) + } + + /// Direct implementation of `GenesisBuild::assimilate_storage`. + /// + /// Kept in order not to break dependency. + pub fn assimilate_storage( + &self, + storage: &mut sp_runtime::Storage + ) -> Result<(), String> { + >::assimilate_storage(self, storage) + } + } } /// Simplified reasons for withdrawing balance. @@ -381,197 +589,7 @@ impl Default for Releases { } } -decl_storage! { - trait Store for Module, I: Instance=DefaultInstance> as Balances { - /// The total units issued in the system. - pub TotalIssuance get(fn total_issuance) build(|config: &GenesisConfig| { - config.balances.iter().fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n) - }): T::Balance; - - /// The balance of an account. - /// - /// NOTE: This is only used in the case that this module is used to store balances. - pub Account: map hasher(blake2_128_concat) T::AccountId => AccountData; - - /// Any liquidity locks on some account balances. - /// NOTE: Should only be accessed when setting, changing and freeing a lock. - pub Locks get(fn locks): map hasher(blake2_128_concat) T::AccountId => Vec>; - - /// Storage version of the pallet. - /// - /// This is set to v2.0.0 for new networks. - StorageVersion build(|_: &GenesisConfig| Releases::V2_0_0): Releases; - } - add_extra_genesis { - config(balances): Vec<(T::AccountId, T::Balance)>; - // ^^ begin, length, amount liquid at genesis - build(|config: &GenesisConfig| { - for (_, balance) in &config.balances { - assert!( - *balance >= >::ExistentialDeposit::get(), - "the balance of any account should always be at least the existential deposit.", - ) - } - - // ensure no duplicates exist. - let endowed_accounts = config.balances.iter().map(|(x, _)| x).cloned().collect::>(); - - assert!(endowed_accounts.len() == config.balances.len(), "duplicate balances in genesis."); - - for &(ref who, free) in config.balances.iter() { - T::AccountStore::insert(who, AccountData { free, .. Default::default() }); - } - }); - } -} - -decl_module! { - pub struct Module, I: Instance = DefaultInstance> for enum Call where origin: T::Origin { - type Error = Error; - - /// The minimum amount required to keep an account open. - const ExistentialDeposit: T::Balance = T::ExistentialDeposit::get(); - - fn deposit_event() = default; - - /// Transfer some liquid free balance to another account. - /// - /// `transfer` will set the `FreeBalance` of the sender and receiver. - /// It will decrease the total issuance of the system by the `TransferFee`. - /// If the sender's account is below the existential deposit as a result - /// of the transfer, the account will be reaped. - /// - /// The dispatch origin for this call must be `Signed` by the transactor. - /// - /// # - /// - Dependent on arguments but not critical, given proper implementations for - /// input config types. See related functions below. - /// - It contains a limited number of reads and writes internally and no complex computation. - /// - /// Related functions: - /// - /// - `ensure_can_withdraw` is always called internally but has a bounded complexity. - /// - Transferring balances to accounts that did not exist before will cause - /// `T::OnNewAccount::on_new_account` to be called. - /// - Removing enough funds from an account will trigger `T::DustRemoval::on_unbalanced`. - /// - `transfer_keep_alive` works the same way as `transfer`, but has an additional - /// check that the transfer will not kill the origin account. - /// --------------------------------- - /// - Base Weight: 73.64 µs, worst case scenario (account created, account removed) - /// - DB Weight: 1 Read and 1 Write to destination account - /// - Origin account is already in memory, so no DB operations for them. - /// # - #[weight = T::WeightInfo::transfer()] - pub fn transfer( - origin, - dest: ::Source, - #[compact] value: T::Balance - ) { - let transactor = ensure_signed(origin)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&transactor, &dest, value, ExistenceRequirement::AllowDeath)?; - } - - /// Set the balances of a given account. - /// - /// This will alter `FreeBalance` and `ReservedBalance` in storage. it will - /// also decrease the total issuance of the system (`TotalIssuance`). - /// If the new free or reserved balance is below the existential deposit, - /// it will reset the account nonce (`frame_system::AccountNonce`). - /// - /// The dispatch origin for this call is `root`. - /// - /// # - /// - Independent of the arguments. - /// - Contains a limited number of reads and writes. - /// --------------------- - /// - Base Weight: - /// - Creating: 27.56 µs - /// - Killing: 35.11 µs - /// - DB Weight: 1 Read, 1 Write to `who` - /// # - #[weight = T::WeightInfo::set_balance_creating() // Creates a new account. - .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. - ] - fn set_balance( - origin, - who: ::Source, - #[compact] new_free: T::Balance, - #[compact] new_reserved: T::Balance - ) { - ensure_root(origin)?; - let who = T::Lookup::lookup(who)?; - let existential_deposit = T::ExistentialDeposit::get(); - - let wipeout = new_free + new_reserved < existential_deposit; - let new_free = if wipeout { Zero::zero() } else { new_free }; - let new_reserved = if wipeout { Zero::zero() } else { new_reserved }; - - let (free, reserved) = Self::mutate_account(&who, |account| { - if new_free > account.free { - mem::drop(PositiveImbalance::::new(new_free - account.free)); - } else if new_free < account.free { - mem::drop(NegativeImbalance::::new(account.free - new_free)); - } - - if new_reserved > account.reserved { - mem::drop(PositiveImbalance::::new(new_reserved - account.reserved)); - } else if new_reserved < account.reserved { - mem::drop(NegativeImbalance::::new(account.reserved - new_reserved)); - } - - account.free = new_free; - account.reserved = new_reserved; - - (account.free, account.reserved) - }); - Self::deposit_event(RawEvent::BalanceSet(who, free, reserved)); - } - - /// Exactly as `transfer`, except the origin must be root and the source account may be - /// specified. - /// # - /// - Same as transfer, but additional read and write because the source account is - /// not assumed to be in the overlay. - /// # - #[weight = T::WeightInfo::force_transfer()] - pub fn force_transfer( - origin, - source: ::Source, - dest: ::Source, - #[compact] value: T::Balance - ) { - ensure_root(origin)?; - let source = T::Lookup::lookup(source)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&source, &dest, value, ExistenceRequirement::AllowDeath)?; - } - - /// Same as the [`transfer`] call, but with a check that the transfer will not kill the - /// origin account. - /// - /// 99% of the time you want [`transfer`] instead. - /// - /// [`transfer`]: struct.Module.html#method.transfer - /// # - /// - Cheaper than transfer because account cannot be killed. - /// - Base Weight: 51.4 µs - /// - DB Weight: 1 Read and 1 Write to dest (sender is in overlay already) - /// # - #[weight = T::WeightInfo::transfer_keep_alive()] - pub fn transfer_keep_alive( - origin, - dest: ::Source, - #[compact] value: T::Balance - ) { - let transactor = ensure_signed(origin)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&transactor, &dest, value, KeepAlive)?; - } - } -} - -impl, I: Instance> Module { +impl, I: 'static> Pallet { // PRIVATE MUTABLES /// Get the free balance of an account. @@ -615,7 +633,7 @@ impl, I: Instance> Module { if total < T::ExistentialDeposit::get() { if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); - Self::deposit_event(RawEvent::DustLost(who.clone(), total)); + Self::deposit_event(Event::DustLost(who.clone(), total)); } None } else { @@ -662,7 +680,7 @@ impl, I: Instance> Module { }) }).map(|(maybe_endowed, result)| { if let Some(endowed) = maybe_endowed { - Self::deposit_event(RawEvent::Endowed(who.clone(), endowed)); + Self::deposit_event(Event::Endowed(who.clone(), endowed)); } result }) @@ -710,17 +728,16 @@ impl, I: Instance> Module { // of the inner member. mod imbalances { use super::{ - result, DefaultInstance, Imbalance, Config, Zero, Instance, Saturating, - StorageValue, TryDrop, + result, Imbalance, Config, Zero, Saturating, TryDrop, }; use sp_std::mem; /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been created without any equal and opposite accounting. #[must_use] - pub struct PositiveImbalance, I: Instance=DefaultInstance>(T::Balance); + pub struct PositiveImbalance, I: 'static>(T::Balance); - impl, I: Instance> PositiveImbalance { + impl, I: 'static> PositiveImbalance { /// Create a new positive imbalance from a balance. pub fn new(amount: T::Balance) -> Self { PositiveImbalance(amount) @@ -730,22 +747,22 @@ mod imbalances { /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been destroyed without any equal and opposite accounting. #[must_use] - pub struct NegativeImbalance, I: Instance=DefaultInstance>(T::Balance); + pub struct NegativeImbalance, I: 'static>(T::Balance); - impl, I: Instance> NegativeImbalance { + impl, I: 'static> NegativeImbalance { /// Create a new negative imbalance from a balance. pub fn new(amount: T::Balance) -> Self { NegativeImbalance(amount) } } - impl, I: Instance> TryDrop for PositiveImbalance { + impl, I: 'static> TryDrop for PositiveImbalance { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } - impl, I: Instance> Imbalance for PositiveImbalance { + impl, I: 'static> Imbalance for PositiveImbalance { type Opposite = NegativeImbalance; fn zero() -> Self { @@ -790,13 +807,13 @@ mod imbalances { } } - impl, I: Instance> TryDrop for NegativeImbalance { + impl, I: 'static> TryDrop for NegativeImbalance { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } - impl, I: Instance> Imbalance for NegativeImbalance { + impl, I: 'static> Imbalance for NegativeImbalance { type Opposite = PositiveImbalance; fn zero() -> Self { @@ -841,7 +858,7 @@ mod imbalances { } } - impl, I: Instance> Drop for PositiveImbalance { + impl, I: 'static> Drop for PositiveImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { >::mutate( @@ -850,7 +867,7 @@ mod imbalances { } } - impl, I: Instance> Drop for NegativeImbalance { + impl, I: 'static> Drop for NegativeImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { >::mutate( @@ -860,7 +877,7 @@ mod imbalances { } } -impl, I: Instance> Currency for Module where +impl, I: 'static> Currency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { type Balance = T::Balance; @@ -974,7 +991,7 @@ impl, I: Instance> Currency for Module where })?; // Emit transfer event. - Self::deposit_event(RawEvent::Transfer(transactor.clone(), dest.clone(), value)); + Self::deposit_event(Event::Transfer(transactor.clone(), dest.clone(), value)); Ok(()) } @@ -1109,7 +1126,7 @@ impl, I: Instance> Currency for Module where } } -impl, I: Instance> ReservableCurrency for Module where +impl, I: 'static> ReservableCurrency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { /// Check if `who` can reserve `value` from their free balance. @@ -1140,7 +1157,7 @@ impl, I: Instance> ReservableCurrency for Module, I: Instance> ReservableCurrency for Module, I: Instance> ReservableCurrency for Module, I: Instance> ReservableCurrency for Module, I: Instance> OnKilledAccount for Module { +impl, I: 'static> OnKilledAccount for Pallet { fn on_killed_account(who: &T::AccountId) { Account::::mutate_exists(who, |account| { let total = account.as_ref().map(|acc| acc.total()).unwrap_or_default(); if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); - Self::deposit_event(RawEvent::DustLost(who.clone(), total)); + Self::deposit_event(Event::DustLost(who.clone(), total)); } *account = None; }); } } -impl, I: Instance> LockableCurrency for Module +impl, I: 'static> LockableCurrency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { @@ -1302,7 +1319,7 @@ where } } -impl, I: Instance> IsDeadAccount for Module where +impl, I: 'static> IsDeadAccount for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { fn is_dead_account(who: &T::AccountId) -> bool { diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index 728bf036bb3b9..e0712853bd703 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -52,10 +52,7 @@ macro_rules! decl_tests { const ID_1: LockIdentifier = *b"1 "; const ID_2: LockIdentifier = *b"2 "; - pub type System = frame_system::Module<$test>; - pub type Balances = Module<$test>; - - pub const CALL: &<$test as frame_system::Config>::Call = &$crate::tests::CallWithDispatchInfo; + pub const CALL: &<$test as frame_system::Config>::Call = &Call::System(system::Call::remark(vec![])); /// create a transaction info struct from weight. Handy to avoid building the whole struct. pub fn info_from_weight(w: Weight) -> DispatchInfo { @@ -492,7 +489,7 @@ macro_rules! decl_tests { assert_ok!(Balances::repatriate_reserved(&1, &2, 41, Status::Free), 0); assert_eq!( last_event(), - Event::balances(RawEvent::ReserveRepatriated(1, 2, 41, Status::Free)), + Event::balances(balances::Event::ReserveRepatriated(1, 2, 41, Status::Free)), ); assert_eq!(Balances::reserved_balance(1), 69); assert_eq!(Balances::free_balance(1), 0); @@ -634,7 +631,7 @@ macro_rules! decl_tests { fn cannot_set_genesis_value_below_ed() { ($existential_deposit).with(|v| *v.borrow_mut() = 11); let mut t = frame_system::GenesisConfig::default().build_storage::<$test>().unwrap(); - let _ = GenesisConfig::<$test> { + let _ = balances::GenesisConfig::<$test> { balances: vec![(1, 10)], }.assimilate_storage(&mut t).unwrap(); } @@ -713,7 +710,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(RawEvent::Reserved(1, 10)), + Event::balances(balances::Event::Reserved(1, 10)), ); System::set_block_number(3); @@ -721,7 +718,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(RawEvent::Unreserved(1, 5)), + Event::balances(balances::Event::Unreserved(1, 5)), ); System::set_block_number(4); @@ -730,7 +727,7 @@ macro_rules! decl_tests { // should only unreserve 5 assert_eq!( last_event(), - Event::balances(RawEvent::Unreserved(1, 5)), + Event::balances(balances::Event::Unreserved(1, 5)), ); }); } @@ -746,9 +743,9 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::system(system::RawEvent::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::system(system::Event::NewAccount(1)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); @@ -757,8 +754,8 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::balances(RawEvent::DustLost(1, 99)), - Event::system(system::RawEvent::KilledAccount(1)) + Event::balances(balances::Event::DustLost(1, 99)), + Event::system(system::Event::KilledAccount(1)) ] ); }); @@ -775,9 +772,9 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::system(system::RawEvent::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::system(system::Event::NewAccount(1)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 7cb9b9d502ba5..80829bfec13a2 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -20,35 +20,44 @@ #![cfg(test)] use sp_runtime::{ - traits::IdentityLookup, + traits::{Block as _, IdentityLookup}, testing::Header, }; use sp_core::H256; use sp_io; -use frame_support::{impl_outer_origin, impl_outer_event, parameter_types}; +use frame_support::{impl_outer_origin, impl_outer_event, parameter_types, StorageValue}; +use frame_support::traits::Get; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use pallet_transaction_payment::CurrencyAdapter; -use crate::{GenesisConfig, Module, Config, decl_tests, tests::CallWithDispatchInfo}; +use std::cell::RefCell; +use crate::{Pallet, Config, decl_tests}; use frame_system as system; -impl_outer_origin!{ - pub enum Origin for Test {} -} +use crate as balances; -mod balances { - pub use crate::Event; -} +pub type Block = sp_runtime::generic::Block; +pub type UncheckedExtrinsic = sp_runtime::generic::UncheckedExtrinsic; -impl_outer_event! { - pub enum Event for Test { - system, - balances, +frame_support::construct_runtime!( + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + System: system::{Module, Call, Event, Config}, + Balances: balances::{Pallet, Call, Event, Config}, } +); + +thread_local! { + static EXISTENTIAL_DEPOSIT: RefCell = RefCell::new(0); +} + +pub struct ExistentialDeposit; +impl Get for ExistentialDeposit { + fn get() -> u64 { EXISTENTIAL_DEPOSIT.with(|v| *v.borrow()) } } -// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = @@ -63,7 +72,7 @@ impl frame_system::Config for Test { type Origin = Origin; type Index = u64; type BlockNumber = u64; - type Call = CallWithDispatchInfo; + type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = u64; @@ -72,7 +81,7 @@ impl frame_system::Config for Test { type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); - type PalletInfo = (); + type PalletInfo = PalletInfo; type AccountData = super::AccountData; type OnNewAccount = (); type OnKilledAccount = (); @@ -83,7 +92,7 @@ parameter_types! { pub const TransactionByteFee: u64 = 1; } impl pallet_transaction_payment::Config for Test { - type OnChargeTransaction = CurrencyAdapter, ()>; + type OnChargeTransaction = CurrencyAdapter, ()>; type TransactionByteFee = TransactionByteFee; type WeightToFee = IdentityFee; type FeeMultiplierUpdate = (); diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 887b280945f1a..3991782cd85df 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -20,36 +20,44 @@ #![cfg(test)] use sp_runtime::{ - traits::IdentityLookup, + traits::{Block as _, IdentityLookup}, testing::Header, }; use sp_core::H256; use sp_io; -use frame_support::{impl_outer_origin, impl_outer_event, parameter_types}; -use frame_support::traits::StorageMapShim; +use frame_support::{parameter_types, StorageValue}; +use frame_support::traits::{Get, StorageMapShim}; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; -use crate::{GenesisConfig, Module, Config, decl_tests, tests::CallWithDispatchInfo}; +use std::cell::RefCell; +use crate::{Pallet, Config, decl_tests}; use pallet_transaction_payment::CurrencyAdapter; use frame_system as system; -impl_outer_origin!{ - pub enum Origin for Test {} -} +use crate as balances; + +pub type Block = sp_runtime::generic::Block; +pub type UncheckedExtrinsic = sp_runtime::generic::UncheckedExtrinsic; + +frame_support::construct_runtime!( + pub enum Test where + Block = Block, + NodeBlock = Block, + UncheckedExtrinsic = UncheckedExtrinsic + { + System: system::{Pallet, Call, Event, Config}, + Balances: balances::{Pallet, Call, Event, Config}, + } +); -mod balances { - pub use crate::Event; +thread_local! { + static EXISTENTIAL_DEPOSIT: RefCell = RefCell::new(0); } -impl_outer_event! { - pub enum Event for Test { - system, - balances, - } +pub struct ExistentialDeposit; +impl Get for ExistentialDeposit { + fn get() -> u64 { EXISTENTIAL_DEPOSIT.with(|v| *v.borrow()) } } -// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = @@ -64,7 +72,7 @@ impl frame_system::Config for Test { type Origin = Origin; type Index = u64; type BlockNumber = u64; - type Call = CallWithDispatchInfo; + type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = u64; @@ -73,10 +81,10 @@ impl frame_system::Config for Test { type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); - type PalletInfo = (); + type PalletInfo = PalletInfo; type AccountData = super::AccountData; type OnNewAccount = (); - type OnKilledAccount = Module; + type OnKilledAccount = Pallet; type SystemWeightInfo = (); type SS58Prefix = (); } @@ -84,7 +92,7 @@ parameter_types! { pub const TransactionByteFee: u64 = 1; } impl pallet_transaction_payment::Config for Test { - type OnChargeTransaction = CurrencyAdapter, ()>; + type OnChargeTransaction = CurrencyAdapter, ()>; type TransactionByteFee = TransactionByteFee; type WeightToFee = IdentityFee; type FeeMultiplierUpdate = (); @@ -137,7 +145,7 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - GenesisConfig:: { + balances::GenesisConfig:: { balances: if self.monied { vec![ (1, 10 * self.existential_deposit), @@ -171,8 +179,8 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { events(), [ Event::system(system::RawEvent::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); @@ -186,7 +194,7 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { assert_eq!( events(), [ - Event::balances(RawEvent::DustLost(1, 1)), + Event::balances(balances::Event::DustLost(1, 1)), Event::system(system::RawEvent::KilledAccount(1)) ] ); diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index 0b2d3bceea5ec..b3a982561d99f 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -1035,7 +1035,7 @@ pub trait Currency { } /// Status of funds. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] pub enum BalanceStatus { /// Funds are free, as corresponding to `free` item in Balances. Free, From a0c315d485e3a52d8de61e5b9fce5d99510d127e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 14 Dec 2020 16:07:22 +0000 Subject: [PATCH 018/503] Copied Balances pallet impl from gui-macro-attribute-everywhere CAUTION where this may overwrite any more recent changes that weren't on that branch. # Conflicts: # frame/balances/src/tests_composite.rs --- frame/balances/src/lib.rs | 136 ++++++++++++++------------ frame/balances/src/tests.rs | 30 +++--- frame/balances/src/tests_composite.rs | 10 +- frame/balances/src/tests_local.rs | 12 +-- 4 files changed, 99 insertions(+), 89 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 0bacdf8ec76e6..6b500178373f4 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -19,7 +19,7 @@ //! //! The Balances module provides functionality for handling accounts and balances. //! -//! - [`balances::Config`](./trait.Config.html) +//! - [`balances::Config`](./trait.Trait.html) //! - [`Call`](./enum.Call.html) //! - [`Module`](./struct.Module.html) //! @@ -79,7 +79,7 @@ //! - [`Imbalance`](../frame_support/traits/trait.Imbalance.html): Functions for handling //! imbalances between total issuance in the system and account balances. Must be used when a function //! creates new funds (e.g. a reward) or destroys some funds (e.g. a system fee). -//! - [`IsDeadAccount`](../frame_support/traits/trait.IsDeadAccount.html): Determiner to say whether a +//! - [`IsDeadAccount`](../frame_system/trait.IsDeadAccount.html): Determiner to say whether a //! given account is unused. //! //! ## Interface @@ -99,12 +99,12 @@ //! //! ``` //! use frame_support::traits::Currency; -//! # pub trait Config: frame_system::Config { +//! # pub trait Trait: frame_system::Config { //! # type Currency: Currency; //! # } //! -//! pub type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -//! pub type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; +//! pub type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +//! pub type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; //! //! # fn main() {} //! ``` @@ -145,7 +145,7 @@ //! //! ## Assumptions //! -//! * Total issued balanced of all accounts should be less than `Config::Balance::max_value()`. +//! * Total issued balanced of all accounts should be less than `Trait::Balance::max_value()`. #![cfg_attr(not(feature = "std"), no_std)] @@ -160,7 +160,7 @@ use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr, convert::Infallible}; use codec::{Codec, Encode, Decode}; use frame_support::{ - Parameter, ensure, + ensure, traits::{ Currency, OnKilledAccount, OnUnbalanced, TryDrop, StoredMap, WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, @@ -171,11 +171,11 @@ use frame_support::{ use sp_runtime::{ RuntimeDebug, DispatchResult, DispatchError, traits::{ - Zero, AtLeast32BitUnsigned, StaticLookup, Member, CheckedAdd, CheckedSub, + Zero, AtLeast32BitUnsigned, StaticLookup, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, Saturating, Bounded, }, }; -use frame_system::{self as system, ensure_signed, ensure_root}; +use frame_system::{self as system}; pub use self::imbalances::{PositiveImbalance, NegativeImbalance}; pub use weights::WeightInfo; @@ -214,15 +214,22 @@ pub mod pallet { type MaxLocks: Get; } + pub type DefaultInstance = (); + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(PhantomData<(T, I)>); + /// Deperacated name for Pallet + pub type Module = Pallet; + #[pallet::hooks] impl, I: 'static> Hooks> for Pallet {} #[pallet::call] impl, I: 'static> Pallet - where ::Source: ::scale_info::TypeInfo + where + ::Source: ::scale_info::TypeInfo { /// Transfer some liquid free balance to another account. /// @@ -317,7 +324,7 @@ pub mod pallet { (account.free, account.reserved) }); - Self::deposit_event(Event::BalanceSet(who, free, reserved)); + Self::deposit_event(RawEvent::BalanceSet(who, free, reserved)); Ok(().into()) } @@ -389,6 +396,9 @@ pub mod pallet { ReserveRepatriated(T::AccountId, T::AccountId, T::Balance, Status), } + /// Deprecated name for event. + pub type RawEvent = Event; + #[pallet::error] pub enum Error { /// Vesting balance too high to send value @@ -589,7 +599,7 @@ impl Default for Releases { } } -impl, I: 'static> Pallet { +impl, I: 'static> Module { // PRIVATE MUTABLES /// Get the free balance of an account. @@ -633,7 +643,7 @@ impl, I: 'static> Pallet { if total < T::ExistentialDeposit::get() { if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); - Self::deposit_event(Event::DustLost(who.clone(), total)); + Self::deposit_event(RawEvent::DustLost(who.clone(), total)); } None } else { @@ -680,7 +690,7 @@ impl, I: 'static> Pallet { }) }).map(|(maybe_endowed, result)| { if let Some(endowed) = maybe_endowed { - Self::deposit_event(Event::Endowed(who.clone(), endowed)); + Self::deposit_event(RawEvent::Endowed(who.clone(), endowed)); } result }) @@ -728,14 +738,14 @@ impl, I: 'static> Pallet { // of the inner member. mod imbalances { use super::{ - result, Imbalance, Config, Zero, Saturating, TryDrop, + result, DefaultInstance, Imbalance, Config, Zero, Saturating, TryDrop, }; use sp_std::mem; /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been created without any equal and opposite accounting. #[must_use] - pub struct PositiveImbalance, I: 'static>(T::Balance); + pub struct PositiveImbalance, I: 'static=DefaultInstance>(T::Balance); impl, I: 'static> PositiveImbalance { /// Create a new positive imbalance from a balance. @@ -747,7 +757,7 @@ mod imbalances { /// Opaque, move-only struct with private fields that serves as a token denoting that /// funds have been destroyed without any equal and opposite accounting. #[must_use] - pub struct NegativeImbalance, I: 'static>(T::Balance); + pub struct NegativeImbalance, I: 'static=DefaultInstance>(T::Balance); impl, I: 'static> NegativeImbalance { /// Create a new negative imbalance from a balance. @@ -877,7 +887,7 @@ mod imbalances { } } -impl, I: 'static> Currency for Pallet where +impl, I: 'static> Currency for Module where T::Balance: MaybeSerializeDeserialize + Debug { type Balance = T::Balance; @@ -991,7 +1001,7 @@ impl, I: 'static> Currency for Pallet where })?; // Emit transfer event. - Self::deposit_event(Event::Transfer(transactor.clone(), dest.clone(), value)); + Self::deposit_event(RawEvent::Transfer(transactor.clone(), dest.clone(), value)); Ok(()) } @@ -1079,54 +1089,54 @@ impl, I: 'static> Currency for Pallet where if value.is_zero() { return Ok(NegativeImbalance::zero()); } Self::try_mutate_account(who, |account, _| - -> Result - { - let new_free_account = account.free.checked_sub(&value) - .ok_or(Error::::InsufficientBalance)?; + -> Result + { + let new_free_account = account.free.checked_sub(&value) + .ok_or(Error::::InsufficientBalance)?; - // bail if we need to keep the account alive and this would kill it. - let ed = T::ExistentialDeposit::get(); - let would_be_dead = new_free_account + account.reserved < ed; - let would_kill = would_be_dead && account.free + account.reserved >= ed; - ensure!(liveness == AllowDeath || !would_kill, Error::::KeepAlive); + // bail if we need to keep the account alive and this would kill it. + let ed = T::ExistentialDeposit::get(); + let would_be_dead = new_free_account + account.reserved < ed; + let would_kill = would_be_dead && account.free + account.reserved >= ed; + ensure!(liveness == AllowDeath || !would_kill, Error::::KeepAlive); - Self::ensure_can_withdraw(who, value, reasons, new_free_account)?; + Self::ensure_can_withdraw(who, value, reasons, new_free_account)?; - account.free = new_free_account; + account.free = new_free_account; - Ok(NegativeImbalance::new(value)) - }) + Ok(NegativeImbalance::new(value)) + }) } /// Force the new free balance of a target account `who` to some new value `balance`. fn make_free_balance_be(who: &T::AccountId, value: Self::Balance) - -> SignedImbalance + -> SignedImbalance { Self::try_mutate_account(who, |account, is_new| - -> Result, ()> - { - let ed = T::ExistentialDeposit::get(); - // If we're attempting to set an existing account to less than ED, then - // bypass the entire operation. It's a no-op if you follow it through, but - // since this is an instance where we might account for a negative imbalance - // (in the dust cleaner of set_account) before we account for its actual - // equal and opposite cause (returned as an Imbalance), then in the - // instance that there's no other accounts on the system at all, we might - // underflow the issuance and our arithmetic will be off. - ensure!(value.saturating_add(account.reserved) >= ed || !is_new, ()); - - let imbalance = if account.free <= value { - SignedImbalance::Positive(PositiveImbalance::new(value - account.free)) - } else { - SignedImbalance::Negative(NegativeImbalance::new(account.free - value)) - }; - account.free = value; - Ok(imbalance) - }).unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero())) + -> Result, ()> + { + let ed = T::ExistentialDeposit::get(); + // If we're attempting to set an existing account to less than ED, then + // bypass the entire operation. It's a no-op if you follow it through, but + // since this is an instance where we might account for a negative imbalance + // (in the dust cleaner of set_account) before we account for its actual + // equal and opposite cause (returned as an Imbalance), then in the + // instance that there's no other accounts on the system at all, we might + // underflow the issuance and our arithmetic will be off. + ensure!(value.saturating_add(account.reserved) >= ed || !is_new, ()); + + let imbalance = if account.free <= value { + SignedImbalance::Positive(PositiveImbalance::new(value - account.free)) + } else { + SignedImbalance::Negative(NegativeImbalance::new(account.free - value)) + }; + account.free = value; + Ok(imbalance) + }).unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero())) } } -impl, I: 'static> ReservableCurrency for Pallet where +impl, I: 'static> ReservableCurrency for Module where T::Balance: MaybeSerializeDeserialize + Debug { /// Check if `who` can reserve `value` from their free balance. @@ -1157,7 +1167,7 @@ impl, I: 'static> ReservableCurrency for Pallet Self::ensure_can_withdraw(&who, value.clone(), WithdrawReasons::RESERVE, account.free) })?; - Self::deposit_event(Event::Reserved(who.clone(), value)); + Self::deposit_event(RawEvent::Reserved(who.clone(), value)); Ok(()) } @@ -1176,7 +1186,7 @@ impl, I: 'static> ReservableCurrency for Pallet actual }); - Self::deposit_event(Event::Unreserved(who.clone(), actual.clone())); + Self::deposit_event(RawEvent::Unreserved(who.clone(), actual.clone())); value - actual } @@ -1231,7 +1241,7 @@ impl, I: 'static> ReservableCurrency for Pallet }) })?; - Self::deposit_event(Event::ReserveRepatriated(slashed.clone(), beneficiary.clone(), actual, status)); + Self::deposit_event(RawEvent::ReserveRepatriated(slashed.clone(), beneficiary.clone(), actual, status)); Ok(value - actual) } } @@ -1241,22 +1251,22 @@ impl, I: 'static> ReservableCurrency for Pallet /// NOTE: You probably won't need to use this! This only needs to be "wired in" to System module /// if you're using the local balance storage. **If you're using the composite system account /// storage (which is the default in most examples and tests) then there's no need.** -impl, I: 'static> OnKilledAccount for Pallet { +impl, I: 'static> OnKilledAccount for Module { fn on_killed_account(who: &T::AccountId) { Account::::mutate_exists(who, |account| { let total = account.as_ref().map(|acc| acc.total()).unwrap_or_default(); if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); - Self::deposit_event(Event::DustLost(who.clone(), total)); + Self::deposit_event(RawEvent::DustLost(who.clone(), total)); } *account = None; }); } } -impl, I: 'static> LockableCurrency for Pallet -where - T::Balance: MaybeSerializeDeserialize + Debug +impl, I: 'static> LockableCurrency for Module + where + T::Balance: MaybeSerializeDeserialize + Debug { type Moment = T::BlockNumber; @@ -1319,7 +1329,7 @@ where } } -impl, I: 'static> IsDeadAccount for Pallet where +impl, I: 'static> IsDeadAccount for Module where T::Balance: MaybeSerializeDeserialize + Debug { fn is_dead_account(who: &T::AccountId) -> bool { diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index e0712853bd703..a38ad891eb857 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -19,7 +19,7 @@ #![cfg(test)] -#[derive(Debug)] +#[derive(Debug, codec::Encode, codec::Decode, Clone, Eq, PartialEq)] pub struct CallWithDispatchInfo; impl sp_runtime::traits::Dispatchable for CallWithDispatchInfo { type Origin = (); @@ -28,8 +28,8 @@ impl sp_runtime::traits::Dispatchable for CallWithDispatchInfo { type PostInfo = frame_support::weights::PostDispatchInfo; fn dispatch(self, _origin: Self::Origin) - -> sp_runtime::DispatchResultWithInfo { - panic!("Do not use dummy implementation for dispatch."); + -> sp_runtime::DispatchResultWithInfo { + panic!("Do not use dummy implementation for dispatch."); } } @@ -489,7 +489,7 @@ macro_rules! decl_tests { assert_ok!(Balances::repatriate_reserved(&1, &2, 41, Status::Free), 0); assert_eq!( last_event(), - Event::balances(balances::Event::ReserveRepatriated(1, 2, 41, Status::Free)), + Event::balances(RawEvent::ReserveRepatriated(1, 2, 41, Status::Free)), ); assert_eq!(Balances::reserved_balance(1), 69); assert_eq!(Balances::free_balance(1), 0); @@ -710,7 +710,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(balances::Event::Reserved(1, 10)), + Event::balances(RawEvent::Reserved(1, 10)), ); System::set_block_number(3); @@ -718,7 +718,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(balances::Event::Unreserved(1, 5)), + Event::balances(RawEvent::Unreserved(1, 5)), ); System::set_block_number(4); @@ -727,7 +727,7 @@ macro_rules! decl_tests { // should only unreserve 5 assert_eq!( last_event(), - Event::balances(balances::Event::Unreserved(1, 5)), + Event::balances(RawEvent::Unreserved(1, 5)), ); }); } @@ -743,9 +743,9 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::system(system::Event::NewAccount(1)), - Event::balances(balances::Event::Endowed(1, 100)), - Event::balances(balances::Event::BalanceSet(1, 100, 0)), + Event::system(system::RawEvent::NewAccount(1)), + Event::balances(RawEvent::Endowed(1, 100)), + Event::balances(RawEvent::BalanceSet(1, 100, 0)), ] ); @@ -754,8 +754,8 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::balances(balances::Event::DustLost(1, 99)), - Event::system(system::Event::KilledAccount(1)) + Event::balances(RawEvent::DustLost(1, 99)), + Event::system(system::RawEvent::KilledAccount(1)) ] ); }); @@ -772,9 +772,9 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::system(system::Event::NewAccount(1)), - Event::balances(balances::Event::Endowed(1, 100)), - Event::balances(balances::Event::BalanceSet(1, 100, 0)), + Event::system(system::RawEvent::NewAccount(1)), + Event::balances(RawEvent::Endowed(1, 100)), + Event::balances(RawEvent::BalanceSet(1, 100, 0)), ] ); diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 80829bfec13a2..bc0f93f259f97 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -25,12 +25,12 @@ use sp_runtime::{ }; use sp_core::H256; use sp_io; -use frame_support::{impl_outer_origin, impl_outer_event, parameter_types, StorageValue}; +use frame_support::{parameter_types, StorageValue}; use frame_support::traits::Get; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use pallet_transaction_payment::CurrencyAdapter; use std::cell::RefCell; -use crate::{Pallet, Config, decl_tests}; +use crate::{Module, Config, decl_tests}; use frame_system as system; use crate as balances; @@ -45,7 +45,7 @@ frame_support::construct_runtime!( UncheckedExtrinsic = UncheckedExtrinsic { System: system::{Module, Call, Event, Config}, - Balances: balances::{Pallet, Call, Event, Config}, + Balances: balances::{Module, Call, Event, Config}, } ); @@ -92,7 +92,7 @@ parameter_types! { pub const TransactionByteFee: u64 = 1; } impl pallet_transaction_payment::Config for Test { - type OnChargeTransaction = CurrencyAdapter, ()>; + type OnChargeTransaction = CurrencyAdapter, ()>; type TransactionByteFee = TransactionByteFee; type WeightToFee = IdentityFee; type FeeMultiplierUpdate = (); @@ -135,7 +135,7 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - GenesisConfig:: { + balances::GenesisConfig:: { balances: if self.monied { vec![ (1, 10 * self.existential_deposit), diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 3991782cd85df..e5381fba8b31c 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -29,7 +29,7 @@ use frame_support::{parameter_types, StorageValue}; use frame_support::traits::{Get, StorageMapShim}; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use std::cell::RefCell; -use crate::{Pallet, Config, decl_tests}; +use crate::{Module, Config, decl_tests}; use pallet_transaction_payment::CurrencyAdapter; use frame_system as system; @@ -84,7 +84,7 @@ impl frame_system::Config for Test { type PalletInfo = PalletInfo; type AccountData = super::AccountData; type OnNewAccount = (); - type OnKilledAccount = Pallet; + type OnKilledAccount = Module; type SystemWeightInfo = (); type SS58Prefix = (); } @@ -92,7 +92,7 @@ parameter_types! { pub const TransactionByteFee: u64 = 1; } impl pallet_transaction_payment::Config for Test { - type OnChargeTransaction = CurrencyAdapter, ()>; + type OnChargeTransaction = CurrencyAdapter, ()>; type TransactionByteFee = TransactionByteFee; type WeightToFee = IdentityFee; type FeeMultiplierUpdate = (); @@ -179,8 +179,8 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { events(), [ Event::system(system::RawEvent::NewAccount(1)), - Event::balances(balances::Event::Endowed(1, 100)), - Event::balances(balances::Event::BalanceSet(1, 100, 0)), + Event::balances(RawEvent::Endowed(1, 100)), + Event::balances(RawEvent::BalanceSet(1, 100, 0)), ] ); @@ -194,7 +194,7 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { assert_eq!( events(), [ - Event::balances(balances::Event::DustLost(1, 1)), + Event::balances(RawEvent::DustLost(1, 1)), Event::system(system::RawEvent::KilledAccount(1)) ] ); From f06700eed23a42e1a561002c6af3180b1b05bfa2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 14 Dec 2020 16:55:58 +0000 Subject: [PATCH 019/503] Copied System pallet impl from gui-macro-attribute-everywhere !!! CAUTION where this may overwrite any more recent changes that weren't on that branch. # Conflicts: # frame/system/src/lib.rs --- frame/system/src/lib.rs | 938 +++++++++++++++++++++++----------------- 1 file changed, 533 insertions(+), 405 deletions(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index eafee334885ab..6e35caa6314dc 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -102,7 +102,7 @@ use sp_std::marker::PhantomData; use sp_std::fmt::Debug; use sp_version::RuntimeVersion; use sp_runtime::{ - RuntimeDebug, Perbill, DispatchError, Either, generic, + RuntimeDebug, Perbill, Either, generic, traits::{ self, CheckEqual, AtLeast32Bit, Zero, Lookup, LookupError, SimpleBitOps, Hash, Member, MaybeDisplay, BadOrigin, @@ -114,8 +114,7 @@ use sp_runtime::{ use sp_core::{ChangesTrieConfiguration, storage::well_known_keys}; use frame_support::{ - decl_module, decl_event, decl_storage, decl_error, Parameter, ensure, debug, - storage, + Parameter, debug, storage, traits::{ Contains, Get, PalletInfo, OnNewAccount, OnKilledAccount, IsDeadAccount, Happened, StoredMap, EnsureOrigin, OriginTrait, Filter, @@ -125,12 +124,13 @@ use frame_support::{ extract_actual_weight, PerDispatchClass, }, dispatch::DispatchResultWithPostInfo, - scale_info, }; use codec::{Encode, Decode, FullCodec, EncodeLike}; #[cfg(any(feature = "std", test))] use sp_io::TestExternalities; +#[cfg(feature = "std")] +use frame_support::traits::GenesisBuild; pub mod offchain; pub mod limits; @@ -162,399 +162,177 @@ pub fn extrinsics_data_root(xts: Vec>) -> H::Output { H::ordered_trie_root(xts) } -/// An object to track the currently used extrinsic weight in a block. -pub type ConsumedWeight = PerDispatchClass; - -/// System configuration trait. Implemented by runtime. -pub trait Config: 'static + Eq + Clone { - /// The basic call filter to use in Origin. All origins are built with this filter as base, - /// except Root. - type BaseCallFilter: Filter; - - /// Block & extrinsics weights: base values and limits. - type BlockWeights: Get; - - /// The maximum length of a block (in bytes). - type BlockLength: Get; - - /// The `Origin` type used by dispatchable calls. - type Origin: - Into, Self::Origin>> - + From> - + Clone - + OriginTrait; - - /// The aggregated `Call` type. - type Call: Dispatchable + Debug; - - /// Account index (aka nonce) type. This stores the number of previous transactions associated - /// with a sender account. - type Index: +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use super::pallet_prelude::*; + use super::*; + use crate as frame_system; + + /// System configuration trait. Implemented by runtime. + #[pallet::config] + #[pallet::disable_frame_system_supertrait_check] + pub trait Config: 'static + Eq + Clone { + /// The basic call filter to use in Origin. All origins are built with this filter as base, + /// except Root. + type BaseCallFilter: Filter; + + /// The `Origin` type used by dispatchable calls. + type Origin: + Into, Self::Origin>> + + From> + + Clone + + OriginTrait; + + /// The aggregated `Call` type. + type Call: Dispatchable + Debug; + + /// Account index (aka nonce) type. This stores the number of previous transactions + /// associated with a sender account. + type Index: Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit + Copy; - /// The block number type used by the runtime. - type BlockNumber: + /// The block number type used by the runtime. + type BlockNumber: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + sp_std::str::FromStr + MaybeMallocSizeOf; - /// The output of the `Hashing` function. - type Hash: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + Ord - + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf; - - /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). - type Hashing: Hash; - - /// The user account identifier type for the runtime. - type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default + scale_info::TypeInfo + 'static; - - /// Converting trait to take a source type and convert to `AccountId`. - /// - /// Used to define the type and conversion mechanism for referencing accounts in transactions. - /// It's perfectly reasonable for this to be an identity conversion (with the source type being - /// `AccountId`), but other modules (e.g. Indices module) may provide more functional/efficient - /// alternatives. - type Lookup: StaticLookup; - - /// The block header. - type Header: Parameter + traits::Header< - Number = Self::BlockNumber, - Hash = Self::Hash, - >; - - /// The aggregated event type of the runtime. - type Event: Parameter + Member + From> + Debug; - - /// Maximum number of block number to block hash mappings to keep (oldest pruned first). - type BlockHashCount: Get; - - /// The weight of runtime database operations the runtime can invoke. - type DbWeight: Get; - - /// Get the chain's current version. - type Version: Get; - - /// Provides information about the pallet setup in the runtime. - /// - /// Expects the `PalletInfo` type that is being generated by `construct_runtime!` in the - /// runtime. - /// - /// For tests it is okay to use `()` as type, however it will provide "useless" data. - type PalletInfo: PalletInfo; - - /// Data to be associated with an account (other than nonce/transaction counter, which this - /// module does regardless). - type AccountData: Member + FullCodec + Clone + Default; - - /// Handler for when a new account has just been created. - type OnNewAccount: OnNewAccount; - - /// A function that is invoked when an account has been determined to be dead. - /// - /// All resources should be cleaned up associated with the given account. - type OnKilledAccount: OnKilledAccount; - - type SystemWeightInfo: WeightInfo; - - /// The designated SS85 prefix of this chain. - /// - /// This replaces the "ss58Format" property declared in the chain spec. Reason is - /// that the runtime should know about the prefix in order to make use of it as - /// an identifier of the chain. - type SS58Prefix: Get; -} - -pub type DigestOf = generic::Digest<::Hash>; -pub type DigestItemOf = generic::DigestItem<::Hash>; - -pub type Key = Vec; -pub type KeyValue = (Vec, Vec); - -/// A phase of a block's execution. -#[derive(Encode, Decode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] -pub enum Phase { - /// Applying an extrinsic. - ApplyExtrinsic(u32), - /// Finalizing the block. - Finalization, - /// Initializing the block. - Initialization, -} - -impl Default for Phase { - fn default() -> Self { - Self::Initialization - } -} - -/// Record of an event happening. -#[derive(Encode, Decode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] -pub struct EventRecord { - /// The phase of the block it happened in. - pub phase: Phase, - /// The event itself. - pub event: E, - /// The list of the topics this event has. - pub topics: Vec, -} - -/// Origin for the System module. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] -pub enum RawOrigin { - /// The system itself ordained this dispatch to happen: this is the highest privilege level. - Root, - /// It is signed by some public key and we provide the `AccountId`. - Signed(AccountId), - /// It is signed by nobody, can be either: - /// * included and agreed upon by the validators anyway, - /// * or unsigned transaction validated by a module. - None, -} - -impl From> for RawOrigin { - fn from(s: Option) -> RawOrigin { - match s { - Some(who) => RawOrigin::Signed(who), - None => RawOrigin::None, - } - } -} + /// The output of the `Hashing` function. + type Hash: + Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + + Ord + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + + MaybeMallocSizeOf; -/// Exposed trait-generic origin type. -pub type Origin = RawOrigin<::AccountId>; + /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). + type Hashing: Hash; -// Create a Hash with 69 for each byte, -// only used to build genesis config. -#[cfg(feature = "std")] -fn hash69 + Default>() -> T { - let mut h = T::default(); - h.as_mut().iter_mut().for_each(|byte| *byte = 69); - h -} - -/// This type alias represents an index of an event. -/// -/// We use `u32` here because this index is used as index for `Events` -/// which can't contain more than `u32::max_value()` items. -type EventIndex = u32; + /// The user account identifier type for the runtime. + type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord + + Default; -/// Type used to encode the number of references an account has. -pub type RefCount = u32; - -/// Information of an account. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] -pub struct AccountInfo { - /// The number of transactions this account has sent. - pub nonce: Index, - /// The number of other modules that currently depend on this account's existence. The account - /// cannot be reaped until this is zero. - pub refcount: RefCount, - /// The additional data that belongs to this account. Used to store the balance(s) in a lot of - /// chains. - pub data: AccountData, -} - -/// Stores the `spec_version` and `spec_name` of when the last runtime upgrade -/// happened. -#[derive(sp_runtime::RuntimeDebug, Encode, Decode)] -#[cfg_attr(feature = "std", derive(PartialEq))] -pub struct LastRuntimeUpgradeInfo { - pub spec_version: codec::Compact, - pub spec_name: sp_runtime::RuntimeString, -} - -impl LastRuntimeUpgradeInfo { - /// Returns if the runtime was upgraded in comparison of `self` and `current`. - /// - /// Checks if either the `spec_version` increased or the `spec_name` changed. - pub fn was_upgraded(&self, current: &sp_version::RuntimeVersion) -> bool { - current.spec_version > self.spec_version.0 || current.spec_name != self.spec_name - } -} - -impl From for LastRuntimeUpgradeInfo { - fn from(version: sp_version::RuntimeVersion) -> Self { - Self { - spec_version: version.spec_version.into(), - spec_name: version.spec_name, - } - } -} - -decl_storage! { - trait Store for Module as System { - /// The full account information for a particular account ID. - pub Account get(fn account): - map hasher(blake2_128_concat) T::AccountId => AccountInfo; - - /// Total extrinsics count for the current block. - ExtrinsicCount: Option; - - /// The current weight for the block. - BlockWeight get(fn block_weight): ConsumedWeight; - - /// Total length (in bytes) for all extrinsics put together, for the current block. - AllExtrinsicsLen: Option; - - /// Map of block numbers to block hashes. - pub BlockHash get(fn block_hash) build(|_| vec![(T::BlockNumber::zero(), hash69())]): - map hasher(twox_64_concat) T::BlockNumber => T::Hash; + /// Converting trait to take a source type and convert to `AccountId`. + /// + /// Used to define the type and conversion mechanism for referencing accounts in + /// transactions. + /// It's perfectly reasonable for this to be an identity conversion (with the source type + /// being `AccountId`), but other modules (e.g. Indices module) may provide more + /// functional/efficient alternatives. + type Lookup: StaticLookup; + + /// The block header. + type Header: Parameter + traits::Header< + Number = Self::BlockNumber, + Hash = Self::Hash, + >; + + /// The aggregated event type of the runtime. + type Event: Parameter + Member + From> + Debug + + IsType<::Event>; + + /// Maximum number of block number to block hash mappings to keep (oldest pruned first). + #[pallet::constant] + type BlockHashCount: Get; + + /// The maximum weight of a block. + #[pallet::constant] + type MaximumBlockWeight: Get; - /// Extrinsics data for the current block (maps an extrinsic's index to its data). - ExtrinsicData get(fn extrinsic_data): map hasher(twox_64_concat) u32 => Vec; + /// The weight of runtime database operations the runtime can invoke. + #[pallet::constant] + type DbWeight: Get; - /// The current block number being processed. Set by `execute_block`. - Number get(fn block_number): T::BlockNumber; + /// The base weight of executing a block, independent of the transactions in the block. + #[pallet::constant] + type BlockExecutionWeight: Get; - /// Hash of the previous block. - ParentHash get(fn parent_hash) build(|_| hash69()): T::Hash; + /// The base weight of an Extrinsic in the block, independent of the of extrinsic being + /// executed. + #[pallet::constant] + type ExtrinsicBaseWeight: Get; - /// Digest of the current block, also part of the block header. - Digest get(fn digest): DigestOf; + /// The maximal weight of a single Extrinsic. This should be set to at most + /// `MaximumBlockWeight - AverageOnInitializeWeight`. The limit only applies to extrinsics + /// containing `Normal` dispatch class calls. + type MaximumExtrinsicWeight: Get; - /// Events deposited for the current block. - Events get(fn events): Vec>; + /// The maximum length of a block (in bytes). + #[pallet::constant] + type MaximumBlockLength: Get; - /// The number of events in the `Events` list. - EventCount get(fn event_count): EventIndex; + /// The portion of the block that is available to normal transaction. The rest can only be + /// used by operational transactions. This can be applied to any resource limit managed by + /// the system module, including weight and length. + type AvailableBlockRatio: Get; - // TODO: https://github.com/paritytech/substrate/issues/2553 - // Possibly, we can improve it by using something like: - // `Option<(BlockNumber, Vec)>`, however in this case we won't be able to use - // `EventTopics::append`. + /// Get the chain's current version. + type Version: Get; - /// Mapping between a topic (represented by T::Hash) and a vector of indexes - /// of events in the `>` list. + /// Provides information about the pallet setup in the runtime. /// - /// All topic vectors have deterministic storage locations depending on the topic. This - /// allows light-clients to leverage the changes trie storage tracking mechanism and - /// in case of changes fetch the list of events of interest. + /// Expects the `PalletInfo` type that is being generated by `construct_runtime!` in the + /// runtime. /// - /// The value has the type `(T::BlockNumber, EventIndex)` because if we used only just - /// the `EventIndex` then in case if the topic has the same contents on the next block - /// no notification will be triggered thus the event might be lost. - EventTopics get(fn event_topics): map hasher(blake2_128_concat) T::Hash => Vec<(T::BlockNumber, EventIndex)>; + /// For tests it is okay to use `()` as type, however it will provide "useless" data. + type PalletInfo: PalletInfo; - /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade happened. - pub LastRuntimeUpgrade build(|_| Some(LastRuntimeUpgradeInfo::from(T::Version::get()))): Option; + /// Data to be associated with an account (other than nonce/transaction counter, which this + /// module does regardless). + type AccountData: Member + FullCodec + Clone + Default; - /// True if we have upgraded so that `type RefCount` is `u32`. False (default) if not. - UpgradedToU32RefCount build(|_| true): bool; + /// Handler for when a new account has just been created. + type OnNewAccount: OnNewAccount; - /// The execution phase of the block. - ExecutionPhase: Option; - } - add_extra_genesis { - config(changes_trie_config): Option; - #[serde(with = "sp_core::bytes")] - config(code): Vec; - - build(|config: &GenesisConfig| { - use codec::Encode; - - sp_io::storage::set(well_known_keys::CODE, &config.code); - sp_io::storage::set(well_known_keys::EXTRINSIC_INDEX, &0u32.encode()); - - if let Some(ref changes_trie_config) = config.changes_trie_config { - sp_io::storage::set( - well_known_keys::CHANGES_TRIE_CONFIG, - &changes_trie_config.encode(), - ); - } - }); - } -} - -decl_event!( - /// Event for the System module. - pub enum Event where AccountId = ::AccountId { - /// An extrinsic completed successfully. \[info\] - ExtrinsicSuccess(DispatchInfo), - /// An extrinsic failed. \[error, info\] - ExtrinsicFailed(DispatchError, DispatchInfo), - /// `:code` was updated. - CodeUpdated, - /// A new \[account\] was created. - NewAccount(AccountId), - /// An \[account\] was reaped. - KilledAccount(AccountId), - } -); - -decl_error! { - /// Error for the System module - pub enum Error for Module { - /// The name of specification does not match between the current runtime - /// and the new runtime. - InvalidSpecName, - /// The specification version is not allowed to decrease between the current runtime - /// and the new runtime. - SpecVersionNeedsToIncrease, - /// Failed to extract the runtime version from the new runtime. + /// A function that is invoked when an account has been determined to be dead. /// - /// Either calling `Core_version` or decoding `RuntimeVersion` failed. - FailedToExtractRuntimeVersion, - /// Suicide called when the account has non-default composite data. - NonDefaultComposite, - /// There is a non-zero reference count preventing the account from being purged. - NonZeroRefCount, - } -} - -/// Pallet struct placeholder on which is implemented the pallet logic. -/// -/// It is currently an alias for `Module` as old macros still generate/use old name. -pub type Pallet = Module; + /// All resources should be cleaned up associated with the given account. + type OnKilledAccount: OnKilledAccount; -decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self { - type Error = Error; - - /// The maximum number of blocks to allow in mortal eras. - const BlockHashCount: T::BlockNumber = T::BlockHashCount::get(); - - /// The weight of runtime database operations the runtime can invoke. - const DbWeight: RuntimeDbWeight = T::DbWeight::get(); - - /// The weight configuration (limits & base values) for each class of extrinsics and block. - const BlockWeights: limits::BlockWeights = T::BlockWeights::get(); + type SystemWeightInfo: WeightInfo; /// The designated SS85 prefix of this chain. /// /// This replaces the "ss58Format" property declared in the chain spec. Reason is /// that the runtime should know about the prefix in order to make use of it as /// an identifier of the chain. - const SS58Prefix: u8 = T::SS58Prefix::get(); + type SS58Prefix: Get; +} + - fn on_runtime_upgrade() -> frame_support::weights::Weight { - if !UpgradedToU32RefCount::get() { + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(PhantomData); + + #[pallet::hooks] + impl Hooks> for Pallet { + fn on_runtime_upgrade() -> Weight { + if !>::get() { Account::::translate::<(T::Index, u8, T::AccountData), _>(|_key, (nonce, rc, data)| Some(AccountInfo { nonce, refcount: rc as RefCount, data }) ); - UpgradedToU32RefCount::put(true); - T::BlockWeights::get().max_block + >::put(true); + T::MaximumBlockWeight::get() } else { 0 } } + } - fn integrity_test() { - T::BlockWeights::get() - .validate() - .expect("The weights are invalid."); - } - + #[pallet::call] + impl Pallet { /// A dispatch that will fill the block weight up to the given ratio. // TODO: This should only be available for testing, rather than in general usage, but - // that's not possible at present (since it's within the decl_module macro). - #[weight = *_ratio * T::BlockWeights::get().max_block] - fn fill_block(origin, _ratio: Perbill) { + // that's not possible at present (since it's within the pallet macro). + #[pallet::weight(*_ratio * T::MaximumBlockWeight::get())] + pub(super) fn fill_block( + origin: OriginFor, + _ratio: Perbill + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; + Ok(().into()) } /// Make some on-chain remark. @@ -564,9 +342,13 @@ decl_module! { /// - Base Weight: 0.665 µs, independent of remark length. /// - No DB operations. /// # - #[weight = T::SystemWeightInfo::remark(_remark.len() as u32)] - fn remark(origin, _remark: Vec) { + #[pallet::weight(T::SystemWeightInfo::remark(_remark.len() as u32))] + pub(super) fn remark( + origin: OriginFor, + _remark: Vec + ) -> DispatchResultWithPostInfo { ensure_signed(origin)?; + Ok(().into()) } /// Set the number of pages in the WebAssembly environment's heap. @@ -577,10 +359,14 @@ decl_module! { /// - Base Weight: 1.405 µs /// - 1 write to HEAP_PAGES /// # - #[weight = (T::SystemWeightInfo::set_heap_pages(), DispatchClass::Operational)] - fn set_heap_pages(origin, pages: u64) { + #[pallet::weight((T::SystemWeightInfo::set_heap_pages(), DispatchClass::Operational))] + pub(super) fn set_heap_pages( + origin: OriginFor, + pages: u64 + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; storage::unhashed::put_raw(well_known_keys::HEAP_PAGES, &pages.encode()); + Ok(().into()) } /// Set the new runtime code. @@ -593,13 +379,14 @@ decl_module! { /// The weight of this function is dependent on the runtime, but generally this is very expensive. /// We will treat this as a full block. /// # - #[weight = (T::BlockWeights::get().max_block, DispatchClass::Operational)] - pub fn set_code(origin, code: Vec) { + #[pallet::weight((T::MaximumBlockWeight::get(), DispatchClass::Operational))] + pub fn set_code(origin: OriginFor, code: Vec) -> DispatchResultWithPostInfo { ensure_root(origin)?; Self::can_set_code(&code)?; storage::unhashed::put_raw(well_known_keys::CODE, &code); - Self::deposit_event(RawEvent::CodeUpdated); + Self::deposit_event(Event::CodeUpdated); + Ok(().into()) } /// Set the new runtime code without doing any checks of the given `code`. @@ -610,11 +397,12 @@ decl_module! { /// - 1 event. /// The weight of this function is dependent on the runtime. We will treat this as a full block. /// # - #[weight = (T::BlockWeights::get().max_block, DispatchClass::Operational)] - pub fn set_code_without_checks(origin, code: Vec) { + #[pallet::weight((T::MaximumBlockWeight::get(), DispatchClass::Operational))] + pub fn set_code_without_checks(origin: OriginFor, code: Vec) -> DispatchResultWithPostInfo { ensure_root(origin)?; storage::unhashed::put_raw(well_known_keys::CODE, &code); - Self::deposit_event(RawEvent::CodeUpdated); + Self::deposit_event(Event::CodeUpdated); + Ok(().into()) } /// Set the new changes trie configuration. @@ -627,8 +415,11 @@ decl_module! { /// - DB Weight: /// - Writes: Changes Trie, System Digest /// # - #[weight = (T::SystemWeightInfo::set_changes_trie_config(), DispatchClass::Operational)] - pub fn set_changes_trie_config(origin, changes_trie_config: Option) { + #[pallet::weight((T::SystemWeightInfo::set_changes_trie_config(), DispatchClass::Operational))] + pub fn set_changes_trie_config( + origin: OriginFor, + changes_trie_config: Option + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; match changes_trie_config.clone() { Some(changes_trie_config) => storage::unhashed::put_raw( @@ -642,6 +433,7 @@ decl_module! { generic::ChangesTrieSignal::NewConfiguration(changes_trie_config), ); Self::deposit_log(log.into()); + Ok(().into()) } /// Set some items of storage. @@ -652,15 +444,19 @@ decl_module! { /// - Base Weight: 0.568 * i µs /// - Writes: Number of items /// # - #[weight = ( - T::SystemWeightInfo::set_storage(items.len() as u32), - DispatchClass::Operational, - )] - fn set_storage(origin, items: Vec) { + #[pallet::weight(( + T::SystemWeightInfo::set_storage(items.len() as u32), + DispatchClass::Operational, + ))] + pub(super) fn set_storage( + origin: OriginFor, + items: Vec + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; for i in &items { storage::unhashed::put_raw(&i.0, &i.1); } + Ok(().into()) } /// Kill some items from storage. @@ -671,15 +467,19 @@ decl_module! { /// - Base Weight: .378 * i µs /// - Writes: Number of items /// # - #[weight = ( + #[pallet::weight(( T::SystemWeightInfo::kill_storage(keys.len() as u32), DispatchClass::Operational, - )] - fn kill_storage(origin, keys: Vec) { + ))] + pub(super) fn kill_storage( + origin: OriginFor, + keys: Vec + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; for key in &keys { storage::unhashed::kill(&key); } + Ok(().into()) } /// Kill all storage items with a key that starts with the given prefix. @@ -693,13 +493,18 @@ decl_module! { /// - Base Weight: 0.834 * P µs /// - Writes: Number of subkeys + 1 /// # - #[weight = ( + #[pallet::weight(( T::SystemWeightInfo::kill_prefix(_subkeys.saturating_add(1)), DispatchClass::Operational, - )] - fn kill_prefix(origin, prefix: Key, _subkeys: u32) { + ))] + pub(super) fn kill_prefix( + origin: OriginFor, + prefix: Key, + _subkeys: u32 + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; storage::unhashed::kill_prefix(&prefix); + Ok(().into()) } /// Kill the sending account, assuming there are no references outstanding and the composite @@ -712,13 +517,328 @@ decl_module! { /// Base Weight: 8.626 µs /// No DB Read or Write operations because caller is already in overlay /// # - #[weight = (T::SystemWeightInfo::suicide(), DispatchClass::Operational)] - pub fn suicide(origin) { + #[pallet::weight((T::SystemWeightInfo::suicide(), DispatchClass::Operational))] + pub fn suicide(origin: OriginFor) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let account = Account::::get(&who); ensure!(account.refcount == 0, Error::::NonZeroRefCount); ensure!(account.data == T::AccountData::default(), Error::::NonDefaultComposite); Self::kill_account(&who); + Ok(().into()) + } + } + + /// Event for the System module. + #[pallet::event] + pub enum Event { + /// An extrinsic completed successfully. \[info\] + ExtrinsicSuccess(DispatchInfo), + /// An extrinsic failed. \[error, info\] + ExtrinsicFailed(DispatchError, DispatchInfo), + /// `:code` was updated. + CodeUpdated, + /// A new \[account\] was created. + NewAccount(T::AccountId), + /// An \[account\] was reaped. + KilledAccount(T::AccountId), + } + + /// Error for the System module + #[pallet::error] + pub enum Error { + /// The name of specification does not match between the current runtime + /// and the new runtime. + InvalidSpecName, + /// The specification version is not allowed to decrease between the current runtime + /// and the new runtime. + SpecVersionNeedsToIncrease, + /// Failed to extract the runtime version from the new runtime. + /// + /// Either calling `Core_version` or decoding `RuntimeVersion` failed. + FailedToExtractRuntimeVersion, + /// Suicide called when the account has non-default composite data. + NonDefaultComposite, + /// There is a non-zero reference count preventing the account from being purged. + NonZeroRefCount, + } + + /// Exposed trait-generic origin type. + #[pallet::origin] + pub type Origin = RawOrigin<::AccountId>; + + /// The full account information for a particular account ID. + #[pallet::storage] + #[pallet::getter(fn account)] + pub type Account = StorageMap< + _, Blake2_128Concat, T::AccountId, AccountInfo, ValueQuery + >; + + /// Total extrinsics count for the current block. + #[pallet::storage] + pub(super) type ExtrinsicCount = StorageValue<_, u32>; + + /// The current weight for the block. + #[pallet::storage] + #[pallet::getter(fn block_weight)] + pub(super) type BlockWeight = StorageValue<_, weight::ExtrinsicsWeight, ValueQuery>; + + /// Total length (in bytes) for all extrinsics put together, for the current block. + #[pallet::storage] + pub(super) type AllExtrinsicsLen = StorageValue<_, u32>; + + /// Map of block numbers to block hashes. + #[pallet::storage] + #[pallet::getter(fn block_hash)] + pub type BlockHash = StorageMap< + _, Twox64Concat, T::BlockNumber, T::Hash, ValueQuery + >; + + /// Extrinsics data for the current block (maps an extrinsic's index to its data). + #[pallet::storage] + #[pallet::getter(fn extrinsic_data)] + pub(super) type ExtrinsicData = StorageMap< + _, Twox64Concat, u32, Vec, ValueQuery + >; + + /// The current block number being processed. Set by `execute_block`. + #[pallet::storage] + #[pallet::getter(fn block_number)] + pub(super) type Number = StorageValue<_, T::BlockNumber, ValueQuery>; + + /// Hash of the previous block. + #[pallet::storage] + #[pallet::getter(fn parent_hash)] + pub(super) type ParentHash = StorageValue<_, T::Hash, ValueQuery>; + + /// Extrinsics root of the current block, also part of the block header. + #[pallet::storage] + #[pallet::getter(fn extrinsics_root)] + pub(super) type ExtrinsicsRoot = StorageValue<_, T::Hash, ValueQuery>; + + /// Digest of the current block, also part of the block header. + #[pallet::storage] + #[pallet::getter(fn digest)] + pub(super) type Digest = StorageValue<_, DigestOf, ValueQuery>; + + /// Events deposited for the current block. + #[pallet::storage] + #[pallet::getter(fn events)] + pub(super) type Events = StorageValue< + _, Vec>, ValueQuery + >; + + /// The number of events in the `Events` list. + #[pallet::storage] + #[pallet::getter(fn event_count)] + pub(super) type EventCount = StorageValue<_, EventIndex, ValueQuery>; + + // TODO: https://github.com/paritytech/substrate/issues/2553 + // Possibly, we can improve it by using something like: + // `Option<(BlockNumber, Vec)>`, however in this case we won't be able to use + // `EventTopics::append`. + + /// Mapping between a topic (represented by T::Hash) and a vector of indexes + /// of events in the `>` list. + /// + /// All topic vectors have deterministic storage locations depending on the topic. This + /// allows light-clients to leverage the changes trie storage tracking mechanism and + /// in case of changes fetch the list of events of interest. + /// + /// The value has the type `(T::BlockNumber, EventIndex)` because if we used only just + /// the `EventIndex` then in case if the topic has the same contents on the next block + /// no notification will be triggered thus the event might be lost. + #[pallet::storage] + #[pallet::getter(fn event_topics)] + pub(super) type EventTopics = StorageMap< + _, Blake2_128Concat, T::Hash, Vec<(T::BlockNumber, EventIndex)>, ValueQuery + >; + + /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade happened. + #[pallet::storage] + pub type LastRuntimeUpgrade = StorageValue<_, LastRuntimeUpgradeInfo>; + + /// True if we have upgraded so that `type RefCount` is `u32`. False (default) if not. + #[pallet::storage] + pub(super) type UpgradedToU32RefCount = StorageValue<_, bool, ValueQuery>; + + /// The execution phase of the block. + #[pallet::storage] + pub(super) type ExecutionPhase = StorageValue<_, Phase>; + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub changes_trie_config: Option, + #[serde(with = "sp_core::bytes")] + pub code: Vec, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + changes_trie_config: Default::default(), + code: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + >::insert::<_, T::Hash>(T::BlockNumber::zero(), hash69()); + >::put::(hash69()); + >::put(LastRuntimeUpgradeInfo::from(T::Version::get())); + >::put(true); + + sp_io::storage::set(well_known_keys::CODE, &self.code); + sp_io::storage::set(well_known_keys::EXTRINSIC_INDEX, &0u32.encode()); + if let Some(ref changes_trie_config) = self.changes_trie_config { + sp_io::storage::set(well_known_keys::CHANGES_TRIE_CONFIG, &changes_trie_config.encode()); + } + } + } +} + +/// Pallet struct placeholder on which is implemented the pallet logic. +/// +/// It is currently an alias for `Module` as old macros still generate/use old name. +pub type Module = Pallet; + +/// Alias to Event to prevent breaking code. Soon to be deprecated. +pub type RawEvent = Event; + +#[cfg(feature = "std")] +impl GenesisConfig { + /// Direct implementation of `GenesisBuild::build_storage`. + /// + /// Kept in order not to break dependency. + pub fn build_storage(&self) -> Result { + >::build_storage(self) + } + + /// Direct implementation of `GenesisBuild::assimilate_storage`. + /// + /// Kept in order not to break dependency. + pub fn assimilate_storage( + &self, + storage: &mut sp_runtime::Storage + ) -> Result<(), String> { + >::assimilate_storage(self, storage) + } +} + +pub type DigestOf = generic::Digest<::Hash>; +pub type DigestItemOf = generic::DigestItem<::Hash>; + +pub type Key = Vec; +pub type KeyValue = (Vec, Vec); + +/// A phase of a block's execution. +#[derive(Encode, Decode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] +pub enum Phase { + /// Applying an extrinsic. + ApplyExtrinsic(u32), + /// Finalizing the block. + Finalization, + /// Initializing the block. + Initialization, +} + +impl Default for Phase { + fn default() -> Self { + Self::Initialization + } +} + +/// Record of an event happening. +#[derive(Encode, Decode, RuntimeDebug)] +#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] +pub struct EventRecord { + /// The phase of the block it happened in. + pub phase: Phase, + /// The event itself. + pub event: E, + /// The list of the topics this event has. + pub topics: Vec, +} + +/// Origin for the System module. +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] +pub enum RawOrigin { + /// The system itself ordained this dispatch to happen: this is the highest privilege level. + Root, + /// It is signed by some public key and we provide the `AccountId`. + Signed(AccountId), + /// It is signed by nobody, can be either: + /// * included and agreed upon by the validators anyway, + /// * or unsigned transaction validated by a module. + None, +} + +impl From> for RawOrigin { + fn from(s: Option) -> RawOrigin { + match s { + Some(who) => RawOrigin::Signed(who), + None => RawOrigin::None, + } + } +} + +// Create a Hash with 69 for each byte, +// only used to build genesis config. +#[cfg(feature = "std")] +fn hash69 + Default>() -> T { + let mut h = T::default(); + h.as_mut().iter_mut().for_each(|byte| *byte = 69); + h +} + +/// This type alias represents an index of an event. +/// +/// We use `u32` here because this index is used as index for `Events` +/// which can't contain more than `u32::max_value()` items. +type EventIndex = u32; + +/// Type used to encode the number of references an account has. +pub type RefCount = u32; + +/// Information of an account. +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +pub struct AccountInfo { + /// The number of transactions this account has sent. + pub nonce: Index, + /// The number of other modules that currently depend on this account's existence. The account + /// cannot be reaped until this is zero. + pub refcount: RefCount, + /// The additional data that belongs to this account. Used to store the balance(s) in a lot of + /// chains. + pub data: AccountData, +} + +/// Stores the `spec_version` and `spec_name` of when the last runtime upgrade +/// happened. +#[derive(sp_runtime::RuntimeDebug, Encode, Decode)] +#[cfg_attr(feature = "std", derive(PartialEq))] +pub struct LastRuntimeUpgradeInfo { + pub spec_version: codec::Compact, + pub spec_name: sp_runtime::RuntimeString, +} + +impl LastRuntimeUpgradeInfo { + /// Returns if the runtime was upgraded in comparison of `self` and `current`. + /// + /// Checks if either the `spec_version` increased or the `spec_name` changed. + pub fn was_upgraded(&self, current: &sp_version::RuntimeVersion) -> bool { + current.spec_version > self.spec_version.0 || current.spec_name != self.spec_name + } +} + +impl From for LastRuntimeUpgradeInfo { + fn from(version: sp_version::RuntimeVersion) -> Self { + Self { + spec_version: version.spec_version.into(), + spec_name: version.spec_name, } } } @@ -941,7 +1061,7 @@ impl Module { // Don't populate events on genesis. if block_number.is_zero() { return } - let phase = ExecutionPhase::get().unwrap_or_default(); + let phase = ExecutionPhase::::get().unwrap_or_default(); let event = EventRecord { phase, event, @@ -950,14 +1070,14 @@ impl Module { // Index of the to be added event. let event_idx = { - let old_event_count = EventCount::get(); + let old_event_count = EventCount::::get(); let new_event_count = match old_event_count.checked_add(1) { // We've reached the maximum number of events at this block, just // don't do anything and leave the event_count unaltered. None => return, Some(nc) => nc, }; - EventCount::put(new_event_count); + EventCount::::put(new_event_count); old_event_count }; @@ -975,11 +1095,11 @@ impl Module { /// Gets extrinsics count. pub fn extrinsic_count() -> u32 { - ExtrinsicCount::get().unwrap_or_default() + ExtrinsicCount::::get().unwrap_or_default() } pub fn all_extrinsics_len() -> u32 { - AllExtrinsicsLen::get().unwrap_or_default() + AllExtrinsicsLen::::get().unwrap_or_default() } /// Inform the system module of some additional weight that should be accounted for, in the @@ -998,7 +1118,7 @@ impl Module { /// /// Another potential use-case could be for the `on_initialize` and `on_finalize` hooks. pub fn register_extra_weight_unchecked(weight: Weight, class: DispatchClass) { - BlockWeight::mutate(|current_weight| { + BlockWeight::::mutate(|current_weight| { current_weight.add(weight, class); }); } @@ -1011,7 +1131,7 @@ impl Module { kind: InitKind, ) { // populate environment - ExecutionPhase::put(Phase::Initialization); + ExecutionPhase::::put(Phase::Initialization); storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &0u32); >::put(number); >::put(digest); @@ -1019,12 +1139,12 @@ impl Module { >::insert(*number - One::one(), parent_hash); // Remove previous block data from storage - BlockWeight::kill(); + BlockWeight::::kill(); // Kill inspectable storage entries in state when `InitKind::Full`. if let InitKind::Full = kind { >::kill(); - EventCount::kill(); + EventCount::::kill(); >::remove_all(); } } @@ -1032,8 +1152,8 @@ impl Module { /// Remove temporary "environment" entries in storage, compute the storage root and return the /// resulting header for this block. pub fn finalize() -> T::Header { - ExecutionPhase::kill(); - AllExtrinsicsLen::kill(); + ExecutionPhase::::kill(); + AllExtrinsicsLen::::kill(); // The following fields // @@ -1125,11 +1245,11 @@ impl Module { /// Set the current block weight. This should only be used in some integration tests. #[cfg(any(feature = "std", test))] - pub fn set_block_consumed_resources(weight: Weight, len: usize) { - BlockWeight::mutate(|current_weight| { - current_weight.set(weight, DispatchClass::Normal) + pub fn set_block_limits(weight: Weight, len: usize) { + BlockWeight::::mutate(|current_weight| { + current_weight.put(weight, DispatchClass::Normal) }); - AllExtrinsicsLen::put(len as u32); + AllExtrinsicsLen::::put(len as u32); } /// Reset events. Can be used as an alternative to @@ -1137,7 +1257,7 @@ impl Module { #[cfg(any(feature = "std", feature = "runtime-benchmarks", test))] pub fn reset_events() { >::kill(); - EventCount::kill(); + EventCount::::kill(); >::remove_all(); } @@ -1159,7 +1279,7 @@ impl Module { /// This is required to be called before applying an extrinsic. The data will used /// in [`Self::finalize`] to calculate the correct extrinsics root. pub fn note_extrinsic(encoded_xt: Vec) { - ExtrinsicData::insert(Self::extrinsic_index().unwrap_or_default(), encoded_xt); + ExtrinsicData::::insert(Self::extrinsic_index().unwrap_or_default(), encoded_xt); } /// To be called immediately after an extrinsic has been applied. @@ -1167,10 +1287,10 @@ impl Module { info.weight = extract_actual_weight(r, &info); Self::deposit_event( match r { - Ok(_) => RawEvent::ExtrinsicSuccess(info), + Ok(_) => Event::ExtrinsicSuccess(info), Err(err) => { sp_runtime::print(err); - RawEvent::ExtrinsicFailed(err.error, info) + Event::ExtrinsicFailed(err.error, info) }, } ); @@ -1178,7 +1298,7 @@ impl Module { let next_extrinsic_index = Self::extrinsic_index().unwrap_or_default() + 1u32; storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &next_extrinsic_index); - ExecutionPhase::put(Phase::ApplyExtrinsic(next_extrinsic_index)); + ExecutionPhase::::put(Phase::ApplyExtrinsic(next_extrinsic_index)); } /// To be called immediately after `note_applied_extrinsic` of the last extrinsic of the block @@ -1186,26 +1306,34 @@ impl Module { pub fn note_finished_extrinsics() { let extrinsic_index: u32 = storage::unhashed::take(well_known_keys::EXTRINSIC_INDEX) .unwrap_or_default(); - ExtrinsicCount::put(extrinsic_index); - ExecutionPhase::put(Phase::Finalization); + ExtrinsicCount::::put(extrinsic_index); + ExecutionPhase::::put(Phase::Finalization); } /// To be called immediately after finishing the initialization of the block /// (e.g., called `on_initialize` for all modules). pub fn note_finished_initialize() { - ExecutionPhase::put(Phase::ApplyExtrinsic(0)) + ExecutionPhase::::put(Phase::ApplyExtrinsic(0)) + } + + /// Remove all extrinsic data and save the extrinsics trie root. + pub fn derive_extrinsics() { + let extrinsics = (0..ExtrinsicCount::::get().unwrap_or_default()) + .map(ExtrinsicData::::take).collect(); + let xts_root = extrinsics_data_root::(extrinsics); + >::put(xts_root); } /// An account is being created. pub fn on_created_account(who: T::AccountId) { T::OnNewAccount::on_new_account(&who); - Self::deposit_event(RawEvent::NewAccount(who)); + Self::deposit_event(Event::NewAccount(who)); } /// Do anything that needs to be done after an account has been killed. fn on_killed_account(who: T::AccountId) { T::OnKilledAccount::on_killed_account(&who); - Self::deposit_event(RawEvent::KilledAccount(who)); + Self::deposit_event(Event::KilledAccount(who)); } /// Remove an account from storage. This should only be done when its refs are zero or you'll @@ -1332,7 +1460,7 @@ impl StoredMap for Module { /// Split an `option` into two constituent options, as defined by a `splitter` function. pub fn split_inner(option: Option, splitter: impl FnOnce(T) -> (R, S)) - -> (Option, Option) + -> (Option, Option) { match option { Some(inner) => { From 464f4c5540e9645e517968c0493375fcbb73bdfc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 14 Dec 2020 17:22:04 +0000 Subject: [PATCH 020/503] Add some TypeInfo derives and type hints for system module --- Cargo.lock | 3 +++ frame/executive/src/lib.rs | 4 ++-- frame/support/src/weights.rs | 6 +++--- frame/system/Cargo.toml | 1 + frame/system/src/extensions/check_weight.rs | 6 +++--- frame/system/src/lib.rs | 20 ++++++++++---------- primitives/arithmetic/Cargo.toml | 1 + primitives/arithmetic/src/per_things.rs | 2 +- primitives/core/src/changes_trie.rs | 2 +- primitives/runtime/Cargo.toml | 1 + primitives/runtime/src/lib.rs | 2 +- 11 files changed, 27 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9849e3322f7af..6a482f3771942 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1704,6 +1704,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -8097,6 +8098,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8488,6 +8490,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-application-crypto", diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index fdde914b07e04..7047fa0bd2894 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -261,11 +261,11 @@ where /// Returns if the runtime was upgraded since the last time this function was called. fn runtime_upgraded() -> bool { - let last = frame_system::LastRuntimeUpgrade::get(); + let last = frame_system::LastRuntimeUpgrade::::get(); let current = >::get(); if last.map(|v| v.was_upgraded(¤t)).unwrap_or(true) { - frame_system::LastRuntimeUpgrade::put( + frame_system::LastRuntimeUpgrade::::put( frame_system::LastRuntimeUpgradeInfo::from(current), ); true diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 7fde8b342c4bb..42ce053f4f00a 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -198,7 +198,7 @@ pub trait PaysFee { } /// Explicit enum to denote if a transaction pays fee or not. -#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, ::scale_info::TypeInfo)] pub enum Pays { /// Transactor will pay related fees. Yes, @@ -218,7 +218,7 @@ impl Default for Pays { /// [DispatchClass::all] and [DispatchClass::non_mandatory] helper functions. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] pub enum DispatchClass { /// A normal dispatch. Normal, @@ -303,7 +303,7 @@ pub mod priority { } /// A bundle of static information collected from the `#[weight = $x]` attributes. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, ::scale_info::TypeInfo)] pub struct DispatchInfo { /// Weight of this transaction. pub weight: Weight, diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index cebf761a907c7..7f4c687b73a31 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index c84c29518593f..39c57b49667ff 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -115,8 +115,8 @@ impl CheckWeight where let next_weight = Self::check_block_weight(info)?; Self::check_extrinsic_weight(info)?; - crate::AllExtrinsicsLen::put(next_len); - crate::BlockWeight::put(next_weight); + crate::AllExtrinsicsLen::::put(next_len); + crate::BlockWeight::::put(next_weight); Ok(()) } @@ -257,7 +257,7 @@ impl SignedExtension for CheckWeight where let unspent = post_info.calc_unspent(info); if unspent > 0 { - crate::BlockWeight::mutate(|current_weight| { + crate::BlockWeight::::mutate(|current_weight| { current_weight.sub(unspent, info.class); }) } diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 6e35caa6314dc..2b77f029fba6e 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -192,27 +192,27 @@ pub mod pallet { /// Account index (aka nonce) type. This stores the number of previous transactions /// associated with a sender account. type Index: - Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy; + Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit + + Copy; /// The block number type used by the runtime. type BlockNumber: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + - AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + - sp_std::str::FromStr + MaybeMallocSizeOf; + Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + + AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + + sp_std::str::FromStr + MaybeMallocSizeOf; /// The output of the `Hashing` function. type Hash: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + - Ord + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + - MaybeMallocSizeOf; + Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + + Ord + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + + MaybeMallocSizeOf; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). type Hashing: Hash; /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default; + + Default + ::scale_info::TypeInfo; /// Converting trait to take a source type and convert to `AccountId`. /// @@ -231,7 +231,7 @@ pub mod pallet { /// The aggregated event type of the runtime. type Event: Parameter + Member + From> + Debug + - IsType<::Event>; + IsType<::Event>; /// Maximum number of block number to block hash mappings to keep (oldest pruned first). #[pallet::constant] diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index c8f812215f4ac..0d11093ee379b 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,6 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/arithmetic/src/per_things.rs b/primitives/arithmetic/src/per_things.rs index c6a31a0ffe869..4d4dd7c6155c9 100644 --- a/primitives/arithmetic/src/per_things.rs +++ b/primitives/arithmetic/src/per_things.rs @@ -321,7 +321,7 @@ macro_rules! implement_per_thing { /// #[doc = $title] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug)] + #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, ::scale_info::TypeInfo)] pub struct $name($type); /// Implementation makes any compact encoding of `PerThing::Inner` valid, diff --git a/primitives/core/src/changes_trie.rs b/primitives/core/src/changes_trie.rs index 32991ce44a506..14a7f4a9d2331 100644 --- a/primitives/core/src/changes_trie.rs +++ b/primitives/core/src/changes_trie.rs @@ -24,7 +24,7 @@ use num_traits::Zero; /// Substrate changes trie configuration. #[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] -#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode, ::scale_info::TypeInfo)] pub struct ChangesTrieConfiguration { /// Interval (in blocks) at which level1-digests are created. Digests are not /// created when this is less or equal to 1. diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 0ad05561581a6..1213a4a4e00ea 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 563e0965d83aa..f5fe1e3bb2e84 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -391,7 +391,7 @@ pub type DispatchResult = sp_std::result::Result<(), DispatchError>; pub type DispatchResultWithInfo = sp_std::result::Result>; /// Reason why a dispatch call failed. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum DispatchError { /// Some error occurred. From 3f60abfd6cd8495e2c5092f1042e7215a000b05e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Dec 2020 07:34:45 +0000 Subject: [PATCH 021/503] Convert pallet-timestamp to new macro style to fix error --- frame/timestamp/Cargo.toml | 1 + frame/timestamp/src/lib.rs | 177 ++++++++++++++++++++----------------- 2 files changed, 99 insertions(+), 79 deletions(-) diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 5a99c5d02c5af..b9a90dafc0958 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index 44f88347c08d3..c43ecf2803164 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -19,7 +19,7 @@ //! //! The Timestamp module provides functionality to get and set the on-chain time. //! -//! - [`timestamp::Config`](./trait.Config.html) +//! - [`timestamp::Trait`](./trait.Trait.html) //! - [`Call`](./enum.Call.html) //! - [`Module`](./struct.Module.html) //! @@ -46,7 +46,7 @@ //! * `get` - Gets the current time for the current block. If this function is called prior to //! setting the timestamp, it will return the timestamp of the previous block. //! -//! ### Config Getters +//! ### Trait Getters //! //! * `MinimumPeriod` - Gets the minimum (and advised) period between blocks for the chain. //! @@ -66,7 +66,7 @@ //! # use pallet_timestamp as timestamp; //! use frame_system::ensure_signed; //! -//! pub trait Config: timestamp::Config {} +//! pub trait Trait: timestamp::Trait {} //! //! decl_module! { //! pub struct Module for enum Call where origin: T::Origin { @@ -117,33 +117,61 @@ use sp_timestamp::{ }; pub use weights::WeightInfo; -/// The module configuration trait -pub trait Config: frame_system::Config { - /// Type used for expressing timestamp. - type Moment: Parameter + Default + AtLeast32Bit - + Scale + Copy; +pub use pallet::*; - /// Something which can be notified when the timestamp is set. Set this to `()` if not needed. - type OnTimestampSet: OnTimestampSet; +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use super::*; - /// The minimum period between blocks. Beware that this is different to the *expected* period - /// that the block production apparatus provides. Your chosen consensus system will generally - /// work with this to determine a sensible block time. e.g. For Aura, it will be double this - /// period on default settings. - type MinimumPeriod: Get; + /// The module configuration trait + #[pallet::config] + pub trait Config: frame_system::Config { + /// Type used for expressing timestamp. + type Moment: Parameter + Default + AtLeast32Bit + + Scale + Copy + ::scale_info::TypeInfo; - /// Weight information for extrinsics in this pallet. - type WeightInfo: WeightInfo; -} + /// Something which can be notified when the timestamp is set. Set this to `()` if not needed. + type OnTimestampSet: OnTimestampSet; -decl_module! { - pub struct Module for enum Call where origin: T::Origin { /// The minimum period between blocks. Beware that this is different to the *expected* period /// that the block production apparatus provides. Your chosen consensus system will generally /// work with this to determine a sensible block time. e.g. For Aura, it will be double this /// period on default settings. - const MinimumPeriod: T::Moment = T::MinimumPeriod::get(); + #[pallet::constant] + type MinimumPeriod: Get; + + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; + } + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(PhantomData); + + /// Deprecated old pallet name + pub type Module = Pallet; + + #[pallet::hooks] + impl Hooks> for Pallet { + /// dummy `on_initialize` to return the weight used in `on_finalize`. + fn on_initialize(_: T::BlockNumber) -> Weight { + // weight of `on_finalize` + T::WeightInfo::on_finalize() + } + + /// # + /// - `O(1)` + /// - 1 storage deletion (codec `O(1)`). + /// # + fn on_finalize(_: T::BlockNumber) { + assert!(::DidUpdate::take(), "Timestamp must be updated once in the block"); + } + } + #[pallet::call] + impl Pallet { /// Set the current time. /// /// This call should be invoked exactly once per block. It will panic at the finalization @@ -159,11 +187,14 @@ decl_module! { /// - 1 storage read and 1 storage mutation (codec `O(1)`). (because of `DidUpdate::take` in `on_finalize`) /// - 1 event handler `on_timestamp_set`. Must be `O(1)`. /// # - #[weight = ( + #[pallet::weight(( T::WeightInfo::set(), DispatchClass::Mandatory - )] - fn set(origin, #[compact] now: T::Moment) { + ))] + pub(super) fn set( + origin: OriginFor, + #[pallet::compact] now: T::Moment + ) -> DispatchResultWithPostInfo { ensure_none(origin)?; assert!(!::DidUpdate::exists(), "Timestamp must be updated only once in the block"); let prev = Self::now(); @@ -171,38 +202,61 @@ decl_module! { prev.is_zero() || now >= prev + T::MinimumPeriod::get(), "Timestamp must increment by at least between sequential blocks" ); - ::Now::put(now); - ::DidUpdate::put(true); + <::Now>::put(now); + <::DidUpdate>::put(true); >::on_timestamp_set(now); + Ok(().into()) } + } - /// dummy `on_initialize` to return the weight used in `on_finalize`. - fn on_initialize() -> Weight { - // weight of `on_finalize` - T::WeightInfo::on_finalize() + #[pallet::inherent] + impl ProvideInherent for Pallet { + type Call = Call; + type Error = InherentError; + const INHERENT_IDENTIFIER: InherentIdentifier = INHERENT_IDENTIFIER; + + fn create_inherent(data: &InherentData) -> Option { + let data: T::Moment = extract_inherent_data(data) + .expect("Gets and decodes timestamp inherent data") + .saturated_into(); + + let next_time = cmp::max(data, Self::now() + T::MinimumPeriod::get()); + Some(Call::set(next_time.into())) } - /// # - /// - `O(1)` - /// - 1 storage deletion (codec `O(1)`). - /// # - fn on_finalize() { - assert!(::DidUpdate::take(), "Timestamp must be updated once in the block"); + fn check_inherent(call: &Self::Call, data: &InherentData) -> result::Result<(), Self::Error> { + const MAX_TIMESTAMP_DRIFT_MILLIS: u64 = 30 * 1000; + + let t: u64 = match call { + Call::set(ref t) => t.clone().saturated_into::(), + _ => return Ok(()), + }; + + let data = extract_inherent_data(data).map_err(|e| InherentError::Other(e))?; + + let minimum = (Self::now() + T::MinimumPeriod::get()).saturated_into::(); + if t > data + MAX_TIMESTAMP_DRIFT_MILLIS { + Err(InherentError::Other("Timestamp too far in future to accept".into())) + } else if t < minimum { + Err(InherentError::ValidAtTimestamp(minimum)) + } else { + Ok(()) + } } } -} -decl_storage! { - trait Store for Module as Timestamp { - /// Current time for the current block. - pub Now get(fn now): T::Moment; + /// Current time for the current block. + #[pallet::storage] + #[pallet::getter(fn now)] + pub type Now = StorageValue<_, T::Moment, ValueQuery>; - /// Did the timestamp get updated in this block? - DidUpdate: bool; - } + /// Did the timestamp get updated in this block? + #[pallet::storage] + pub(super) type DidUpdate = StorageValue<_, bool, ValueQuery>; } + impl Module { /// Get the current time for the current block. /// @@ -225,41 +279,6 @@ fn extract_inherent_data(data: &InherentData) -> Result ProvideInherent for Module { - type Call = Call; - type Error = InherentError; - const INHERENT_IDENTIFIER: InherentIdentifier = INHERENT_IDENTIFIER; - - fn create_inherent(data: &InherentData) -> Option { - let data: T::Moment = extract_inherent_data(data) - .expect("Gets and decodes timestamp inherent data") - .saturated_into(); - - let next_time = cmp::max(data, Self::now() + T::MinimumPeriod::get()); - Some(Call::set(next_time.into())) - } - - fn check_inherent(call: &Self::Call, data: &InherentData) -> result::Result<(), Self::Error> { - const MAX_TIMESTAMP_DRIFT_MILLIS: u64 = 30 * 1000; - - let t: u64 = match call { - Call::set(ref t) => t.clone().saturated_into::(), - _ => return Ok(()), - }; - - let data = extract_inherent_data(data).map_err(|e| InherentError::Other(e))?; - - let minimum = (Self::now() + T::MinimumPeriod::get()).saturated_into::(); - if t > data + MAX_TIMESTAMP_DRIFT_MILLIS { - Err(InherentError::Other("Timestamp too far in future to accept".into())) - } else if t < minimum { - Err(InherentError::ValidAtTimestamp(minimum)) - } else { - Ok(()) - } - } -} - impl Time for Module { type Moment = T::Moment; From 55071db674b93742a14b5f372ae39121a8a97bbb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Dec 2020 07:36:03 +0000 Subject: [PATCH 022/503] Explicity use scale_info prelude Vec type and macro --- Cargo.lock | 1 + frame/support/procedural/src/pallet/expand/call.rs | 8 ++++---- frame/support/src/dispatch.rs | 4 ++-- frame/support/src/event.rs | 10 +++++----- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6a482f3771942..2d5c9e3f11aab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5051,6 +5051,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-inherents", diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 76dae1b5b08d8..834db88d3f5d5 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -204,18 +204,18 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { } #[doc(hidden)] - pub fn call_functions_vnext() -> Vec<#frame_support::metadata::vnext::FunctionMetadata> { - vec![ #( + pub fn call_functions_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::vnext::FunctionMetadata> { + #frame_support::scale_info::prelude::vec![ #( #frame_support::metadata::vnext::FunctionMetadata { name: stringify!(#fn_name), - arguments: vec![ #( + arguments: #frame_support::scale_info::prelude::vec![ #( #frame_support::metadata::vnext::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_type>(), is_compact: #args_is_compact, }, )* ], - documentation: vec![ #( #fn_doc ),* ], + documentation: #frame_support::scale_info::prelude::vec![ #( #fn_doc ),* ], }, )* ] } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 89d7819fab32f..d0cdc2e430938 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2129,8 +2129,8 @@ macro_rules! __dispatch_impl_metadata { /// Returns empty vec for now to allow mixing of old style and new style pallets. #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions_vnext() -> Vec<$crate::metadata::vnext::FunctionMetadata> { - vec![] + pub fn call_functions_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::FunctionMetadata> { + $crate::scale_info::prelude::vec![] } } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 716e52bd4046c..b3beeae634e22 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -153,8 +153,8 @@ macro_rules! decl_event { /// Metadata vnext only supported by new frame support macros #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> Vec<$crate::metadata::vnext::EventMetadata> { - vec![] + pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::EventMetadata> { + $crate::scale_info::prelude::vec![] } } } @@ -308,8 +308,8 @@ macro_rules! __decl_generic_event { /// Metadata vnext only supported by new frame support macros #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> Vec<$crate::metadata::vnext::EventMetadata> { - vec![] + pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::EventMetadata> { + $crate::scale_info::prelude::vec![] } } }; @@ -584,7 +584,7 @@ macro_rules! __impl_outer_event_json_metadata_vnext { pub fn outer_event_metadata_vnext() -> $crate::event::vnext::OuterEventMetadata { $crate::event::vnext::OuterEventMetadata { name: stringify!($event_name), - events: vec![ + events: $crate::scale_info::prelude::vec![ $( $crate::metadata::vnext::ModuleEventMetadata { name: stringify!($module_name), From dba995a9d053174f14be1202031fb04a105b8c88 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Dec 2020 07:46:03 +0000 Subject: [PATCH 023/503] Add some TypeInfo impls --- primitives/core/src/crypto.rs | 2 +- primitives/runtime/src/multiaddress.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index 7943ac1beed21..38eebaf873cb8 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -621,7 +621,7 @@ pub trait Public: } /// An opaque 32-byte cryptographic identifier. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, ::scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub struct AccountId32([u8; 32]); diff --git a/primitives/runtime/src/multiaddress.rs b/primitives/runtime/src/multiaddress.rs index d09cd7acaf4db..34ec6d4b6b924 100644 --- a/primitives/runtime/src/multiaddress.rs +++ b/primitives/runtime/src/multiaddress.rs @@ -21,7 +21,7 @@ use codec::{Encode, Decode}; use sp_std::vec::Vec; /// A multi-format address wrapper for on-chain accounts. -#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug)] +#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug, ::scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub enum MultiAddress { /// It's an account ID (pubkey). From 257e32ca01a61ef9dc1a975e72fedb7cc6909684 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Dec 2020 11:19:50 +0000 Subject: [PATCH 024/503] TEMP: rewire rpc to return vnext metadata --- bin/node/runtime/src/lib.rs | 4 ++++ client/rpc/src/state/state_full.rs | 2 +- primitives/api/src/lib.rs | 3 +++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 2060086243453..4829ec2ff98f7 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1051,6 +1051,10 @@ impl_runtime_apis! { fn metadata() -> OpaqueMetadata { Runtime::metadata().into() } + + fn metadata_vnext() -> OpaqueMetadata { + Runtime::metadata_vnext().into() + } } impl sp_block_builder::BlockBuilder for Runtime { diff --git a/client/rpc/src/state/state_full.rs b/client/rpc/src/state/state_full.rs index 8d93d445b08cb..2d827ddc442c5 100644 --- a/client/rpc/src/state/state_full.rs +++ b/client/rpc/src/state/state_full.rs @@ -345,7 +345,7 @@ impl StateBackend for FullState OpaqueMetadata; + + /// Returns the vnext metadata of a runtime. + fn metadata_vnext() -> OpaqueMetadata; } } From 027e92831204d63db093424b2a5ebf7923b32bc4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Dec 2020 15:42:44 +0000 Subject: [PATCH 025/503] Add type registry to encoded metadata --- frame/metadata/src/vnext.rs | 33 +++++++++++++++++++++------- frame/support/src/metadata_vnext.rs | 34 +++++++++++++---------------- 2 files changed, 40 insertions(+), 27 deletions(-) diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs index d8465008cc9b4..6d48a20c934ff 100644 --- a/frame/metadata/src/vnext.rs +++ b/frame/metadata/src/vnext.rs @@ -24,10 +24,13 @@ #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "std")] -use codec::Decode; +use codec::{Decode, Input}; use codec::Encode; use sp_core::RuntimeDebug; -use sp_std::vec::Vec; +use sp_std::{ + vec::Vec, + fmt::Debug, +}; use scale_info::{ form::{ @@ -38,6 +41,7 @@ use scale_info::{ meta_type, IntoCompact, Registry, + RegistryReadOnly, TypeInfo, }; @@ -46,16 +50,29 @@ pub type RuntimeMetadataLastVersion = RuntimeMetadataV12; /// Metadata prefixed by a u32 for reserved usage #[derive(Eq, Encode, PartialEq, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Decode))] -pub struct RuntimeMetadataPrefixed(pub u32, pub RuntimeMetadata); +pub struct RuntimeMetadataPrefixed +where + S: PartialEq + Eq + PartialOrd + Ord + Clone + Debug, +{ + pub prefix: u32, + pub types: RegistryReadOnly, + pub metadata: RuntimeMetadata>, +} -impl From> for RuntimeMetadataPrefixed { - fn from(metadata: RuntimeMetadataLastVersion) -> RuntimeMetadataPrefixed { - RuntimeMetadataPrefixed(super::META_RESERVED, RuntimeMetadata::V12(metadata)) +impl From> for RuntimeMetadataPrefixed { + fn from(metadata: RuntimeMetadataLastVersion) -> RuntimeMetadataPrefixed { + let mut registry = Registry::new(); + let metadata = metadata.into_compact(&mut registry); + RuntimeMetadataPrefixed { + prefix: super::META_RESERVED, + types: registry.into(), + metadata: RuntimeMetadata::V12(metadata), + } } } -impl From> for sp_core::OpaqueMetadata { - fn from(metadata: RuntimeMetadataPrefixed) -> Self { +impl From for sp_core::OpaqueMetadata { + fn from(metadata: RuntimeMetadataPrefixed) -> Self { sp_core::OpaqueMetadata::new(metadata.encode()) } } diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index da6dfa8ae1d5f..d23f0914a3c80 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -24,25 +24,21 @@ macro_rules! impl_runtime_metadata_vnext { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed<$crate::scale_info::form::CompactForm> { - let mut registry = $crate::scale_info::Registry::new(); - let metadata = - $crate::metadata::vnext::RuntimeMetadataLastVersion { - modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), - // extrinsic: $crate::metadata::ExtrinsicMetadata { - // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - // signed_extensions: < - // < - // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata - // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension - // >::identifier() - // .into_iter() - // .map($crate::metadata::DecodeDifferent::Encode) - // .collect(), - // }, - }; - use $crate::scale_info::IntoCompact as _; - metadata.into_compact(&mut registry).into() + pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed { + $crate::metadata::vnext::RuntimeMetadataLastVersion { + modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + // extrinsic: $crate::metadata::ExtrinsicMetadata { + // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + // signed_extensions: < + // < + // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata + // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension + // >::identifier() + // .into_iter() + // .map($crate::metadata::DecodeDifferent::Encode) + // .collect(), + // }, + }.into() } } } From d4243da7fc12aa5c5876bb0bff068d773964a0b0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 4 Jan 2021 12:56:54 +0000 Subject: [PATCH 026/503] Fix up frame metadata --- Cargo.lock | 1 + frame/metadata/Cargo.toml | 1 + frame/metadata/src/lib.rs | 2 ++ 3 files changed, 4 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 2d5c9e3f11aab..a90fed8d03c4b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1611,6 +1611,7 @@ name = "frame-metadata" version = "12.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-std", diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index 2934b15562c43..4fe4cfea13413 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } +scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/metadata/src/lib.rs b/frame/metadata/src/lib.rs index 8e6b8b6bd796d..c05521c48fa59 100644 --- a/frame/metadata/src/lib.rs +++ b/frame/metadata/src/lib.rs @@ -23,6 +23,8 @@ #![cfg_attr(not(feature = "std"), no_std)] +pub mod vnext; + #[cfg(feature = "std")] use serde::Serialize; #[cfg(feature = "std")] From 8a9aff733851c4e3abfe4c15bbf69d6c1e3f636e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 4 Jan 2021 17:07:15 +0000 Subject: [PATCH 027/503] FormString constraint --- frame/metadata/src/vnext.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs index 6d48a20c934ff..00bd9f77df27c 100644 --- a/frame/metadata/src/vnext.rs +++ b/frame/metadata/src/vnext.rs @@ -36,6 +36,7 @@ use scale_info::{ form::{ CompactForm, Form, + FormString, MetaForm, }, meta_type, @@ -52,7 +53,7 @@ pub type RuntimeMetadataLastVersion = RuntimeMetadataV12; #[cfg_attr(feature = "std", derive(Decode))] pub struct RuntimeMetadataPrefixed where - S: PartialEq + Eq + PartialOrd + Ord + Clone + Debug, + S: FormString, { pub prefix: u32, pub types: RegistryReadOnly, From 34d9fd39d529c17573d04528d4bbb3a1212b75c4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 5 Jan 2021 10:18:26 +0000 Subject: [PATCH 028/503] Compact -> Portable rename --- frame/metadata/src/vnext.rs | 100 +++++++++--------- frame/support/test/tests/construct_runtime.rs | 4 +- frame/support/test/tests/pallet.rs | 4 +- primitives/npos-elections/src/tests.rs | 2 +- 4 files changed, 55 insertions(+), 55 deletions(-) diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs index 00bd9f77df27c..aa59903cadd42 100644 --- a/frame/metadata/src/vnext.rs +++ b/frame/metadata/src/vnext.rs @@ -34,15 +34,15 @@ use sp_std::{ use scale_info::{ form::{ - CompactForm, + PortableForm, Form, FormString, MetaForm, }, meta_type, - IntoCompact, + IntoPortable, Registry, - RegistryReadOnly, + PortableRegistry, TypeInfo, }; @@ -56,14 +56,14 @@ where S: FormString, { pub prefix: u32, - pub types: RegistryReadOnly, - pub metadata: RuntimeMetadata>, + pub types: PortableRegistry, + pub metadata: RuntimeMetadata>, } impl From> for RuntimeMetadataPrefixed { fn from(metadata: RuntimeMetadataLastVersion) -> RuntimeMetadataPrefixed { let mut registry = Registry::new(); - let metadata = metadata.into_compact(&mut registry); + let metadata = metadata.into_portable(&mut registry); RuntimeMetadataPrefixed { prefix: super::META_RESERVED, types: registry.into(), @@ -98,13 +98,13 @@ pub struct RuntimeMetadataV12 { // pub extrinsic: ExtrinsicMetadata, } -impl IntoCompact for RuntimeMetadataV12 { - type Output = RuntimeMetadataV12; +impl IntoPortable for RuntimeMetadataV12 { + type Output = RuntimeMetadataV12; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { RuntimeMetadataV12 { - modules: registry.map_into_compact(self.modules), - // extrinsic: self.extrinsic.into_compact(registry), + modules: registry.map_into_portable(self.modules), + // extrinsic: self.extrinsic.into_portable(registry), } } } @@ -119,10 +119,10 @@ pub struct ExtrinsicMetadata { pub signed_extensions: Vec, } -impl IntoCompact for ExtrinsicMetadata { - type Output = ExtrinsicMetadata; +impl IntoPortable for ExtrinsicMetadata { + type Output = ExtrinsicMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { ExtrinsicMetadata { version: self.version, signed_extensions: registry.register_types(self.signed_extensions), @@ -142,14 +142,14 @@ pub struct ModuleMetadata { // pub errors: DFnA, } -impl IntoCompact for ModuleMetadata { - type Output = ModuleMetadata; +impl IntoPortable for ModuleMetadata { + type Output = ModuleMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { ModuleMetadata { - name: self.name.into_compact(registry), - calls: self.calls.map(|calls| registry.map_into_compact(calls)), - event: self.event.map(|event| registry.map_into_compact(event)), + name: self.name.into_portable(registry), + calls: self.calls.map(|calls| registry.map_into_portable(calls)), + event: self.event.map(|event| registry.map_into_portable(event)), } } } @@ -163,14 +163,14 @@ pub struct FunctionMetadata { pub documentation: Vec, } -impl IntoCompact for FunctionMetadata { - type Output = FunctionMetadata; +impl IntoPortable for FunctionMetadata { + type Output = FunctionMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { FunctionMetadata { - name: self.name.into_compact(registry), - arguments: registry.map_into_compact(self.arguments), - documentation: registry.map_into_compact(self.documentation), + name: self.name.into_portable(registry), + arguments: registry.map_into_portable(self.arguments), + documentation: registry.map_into_portable(self.documentation), } } } @@ -184,12 +184,12 @@ pub struct FunctionArgumentMetadata { pub is_compact: bool, } -impl IntoCompact for FunctionArgumentMetadata { - type Output = FunctionArgumentMetadata; +impl IntoPortable for FunctionArgumentMetadata { + type Output = FunctionArgumentMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { FunctionArgumentMetadata { - name: self.name.into_compact(registry), + name: self.name.into_portable(registry), ty: registry.register_type(&self.ty), is_compact: self.is_compact, } @@ -204,13 +204,13 @@ pub struct OuterEventMetadata { pub events: Vec>, } -impl IntoCompact for OuterEventMetadata { - type Output = OuterEventMetadata; +impl IntoPortable for OuterEventMetadata { + type Output = OuterEventMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { OuterEventMetadata { - name: self.name.into_compact(registry), - events: registry.map_into_compact(self.events), + name: self.name.into_portable(registry), + events: registry.map_into_portable(self.events), } } } @@ -223,13 +223,13 @@ pub struct ModuleEventMetadata { pub events: Vec>, } -impl IntoCompact for ModuleEventMetadata { - type Output = ModuleEventMetadata; +impl IntoPortable for ModuleEventMetadata { + type Output = ModuleEventMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { ModuleEventMetadata { - name: self.name.into_compact(registry), - events: registry.map_into_compact(self.events), + name: self.name.into_portable(registry), + events: registry.map_into_portable(self.events), } } } @@ -243,14 +243,14 @@ pub struct EventMetadata { pub documentation: Vec, } -impl IntoCompact for EventMetadata { - type Output = EventMetadata; +impl IntoPortable for EventMetadata { + type Output = EventMetadata; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { EventMetadata { - name: self.name.into_compact(registry), - arguments: registry.map_into_compact(self.arguments), - documentation: registry.map_into_compact(self.documentation), + name: self.name.into_portable(registry), + arguments: registry.map_into_portable(self.arguments), + documentation: registry.map_into_portable(self.documentation), } } } @@ -281,13 +281,13 @@ pub struct TypeSpec { name: T::String, } -impl IntoCompact for TypeSpec { - type Output = TypeSpec; +impl IntoPortable for TypeSpec { + type Output = TypeSpec; - fn into_compact(self, registry: &mut Registry) -> Self::Output { + fn into_portable(self, registry: &mut Registry) -> Self::Output { TypeSpec { ty: registry.register_type(&self.ty), - name: self.name.into_compact(registry), + name: self.name.into_portable(registry), } } } diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index daa6f49923d3c..b8e97a2dd9090 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -511,7 +511,7 @@ fn test_metadata() { #[test] fn test_metadata_vnext() { use frame_metadata::*; - use frame_support::scale_info::{form::MetaForm, IntoCompact, Registry}; + use frame_support::scale_info::{form::MetaForm, IntoPortable, Registry}; // vnext modules defined with legacy macros have empty metadata let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { modules: vec![ @@ -578,7 +578,7 @@ fn test_metadata_vnext() { // }, }; let mut registry = Registry::new(); - let expected_metadata = expected_metadata.into_compact(&mut registry); + let expected_metadata = expected_metadata.into_portable(&mut registry); pretty_assertions::assert_eq!(Runtime::metadata_vnext().1, vnext::RuntimeMetadata::V12(expected_metadata)); } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 82eca50602a2a..5dff6cb3e111a 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -771,7 +771,7 @@ fn metadata() { #[test] fn metadata_vnext() { use frame_metadata::*; - use crate::scale_info::{meta_type, Registry, IntoCompact}; + use crate::scale_info::{meta_type, Registry, IntoPortable}; let expected_pallet_metadata = vnext::ModuleMetadata { // index: 1, @@ -845,7 +845,7 @@ fn metadata_vnext() { }; let mut registry = Registry::new(); - let expected_pallet_metadata = expected_pallet_metadata.into_compact(&mut registry); + let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); pretty_assertions::assert_eq!(metadata.modules[1], expected_pallet_metadata); } diff --git a/primitives/npos-elections/src/tests.rs b/primitives/npos-elections/src/tests.rs index 1d26909911f33..cba03cf161c98 100644 --- a/primitives/npos-elections/src/tests.rs +++ b/primitives/npos-elections/src/tests.rs @@ -1259,7 +1259,7 @@ mod solution_type { } #[test] - fn basic_from_and_into_compact_works_assignments() { + fn basic_from_and_into_portable_works_assignments() { let voters = vec![ 2 as AccountId, 4, From fba938a199be8c9746b368edf2437b18156782b9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 6 Jan 2021 16:46:48 +0000 Subject: [PATCH 029/503] Add ConsumedWeight alias --- frame/system/src/lib.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 2b77f029fba6e..0f5902df17558 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -171,6 +171,9 @@ pub mod pallet { use super::*; use crate as frame_system; + /// An object to track the currently used extrinsic weight in a block. + pub type ConsumedWeight = PerDispatchClass; + /// System configuration trait. Implemented by runtime. #[pallet::config] #[pallet::disable_frame_system_supertrait_check] From 555a9da9b99bb6ea462d79b54a9860de95a35ce6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 14:26:36 +0000 Subject: [PATCH 030/503] Sync system pallet with master --- frame/system/src/lib.rs | 159 ++++++++++++++----------------------- frame/timestamp/src/lib.rs | 3 - 2 files changed, 58 insertions(+), 104 deletions(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 0f5902df17558..f790f22c343e8 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -129,8 +129,6 @@ use codec::{Encode, Decode, FullCodec, EncodeLike}; #[cfg(any(feature = "std", test))] use sp_io::TestExternalities; -#[cfg(feature = "std")] -use frame_support::traits::GenesisBuild; pub mod offchain; pub mod limits; @@ -162,6 +160,9 @@ pub fn extrinsics_data_root(xts: Vec>) -> H::Output { H::ordered_trie_root(xts) } +/// An object to track the currently used extrinsic weight in a block. +pub type ConsumedWeight = PerDispatchClass; + pub use pallet::*; #[frame_support::pallet] @@ -171,9 +172,6 @@ pub mod pallet { use super::*; use crate as frame_system; - /// An object to track the currently used extrinsic weight in a block. - pub type ConsumedWeight = PerDispatchClass; - /// System configuration trait. Implemented by runtime. #[pallet::config] #[pallet::disable_frame_system_supertrait_check] @@ -182,6 +180,13 @@ pub mod pallet { /// except Root. type BaseCallFilter: Filter; + /// Block & extrinsics weights: base values and limits. + #[pallet::constant] + type BlockWeights: Get; + + /// The maximum length of a block (in bytes). + type BlockLength: Get; + /// The `Origin` type used by dispatchable calls. type Origin: Into, Self::Origin>> @@ -192,11 +197,11 @@ pub mod pallet { /// The aggregated `Call` type. type Call: Dispatchable + Debug; - /// Account index (aka nonce) type. This stores the number of previous transactions - /// associated with a sender account. + /// Account index (aka nonce) type. This stores the number of previous transactions associated + /// with a sender account. type Index: Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy; + + Copy; /// The block number type used by the runtime. type BlockNumber: @@ -206,9 +211,8 @@ pub mod pallet { /// The output of the `Hashing` function. type Hash: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + - Ord + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + - MaybeMallocSizeOf; + Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + Ord + + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). type Hashing: Hash; @@ -219,11 +223,10 @@ pub mod pallet { /// Converting trait to take a source type and convert to `AccountId`. /// - /// Used to define the type and conversion mechanism for referencing accounts in - /// transactions. - /// It's perfectly reasonable for this to be an identity conversion (with the source type - /// being `AccountId`), but other modules (e.g. Indices module) may provide more - /// functional/efficient alternatives. + /// Used to define the type and conversion mechanism for referencing accounts in transactions. + /// It's perfectly reasonable for this to be an identity conversion (with the source type being + /// `AccountId`), but other modules (e.g. Indices module) may provide more functional/efficient + /// alternatives. type Lookup: StaticLookup; /// The block header. @@ -240,36 +243,10 @@ pub mod pallet { #[pallet::constant] type BlockHashCount: Get; - /// The maximum weight of a block. - #[pallet::constant] - type MaximumBlockWeight: Get; - /// The weight of runtime database operations the runtime can invoke. #[pallet::constant] type DbWeight: Get; - /// The base weight of executing a block, independent of the transactions in the block. - #[pallet::constant] - type BlockExecutionWeight: Get; - - /// The base weight of an Extrinsic in the block, independent of the of extrinsic being - /// executed. - #[pallet::constant] - type ExtrinsicBaseWeight: Get; - - /// The maximal weight of a single Extrinsic. This should be set to at most - /// `MaximumBlockWeight - AverageOnInitializeWeight`. The limit only applies to extrinsics - /// containing `Normal` dispatch class calls. - type MaximumExtrinsicWeight: Get; - - /// The maximum length of a block (in bytes). - #[pallet::constant] - type MaximumBlockLength: Get; - - /// The portion of the block that is available to normal transaction. The rest can only be - /// used by operational transactions. This can be applied to any resource limit managed by - /// the system module, including weight and length. - type AvailableBlockRatio: Get; /// Get the chain's current version. type Version: Get; @@ -301,9 +278,9 @@ pub mod pallet { /// This replaces the "ss58Format" property declared in the chain spec. Reason is /// that the runtime should know about the prefix in order to make use of it as /// an identifier of the chain. + #[pallet::constant] type SS58Prefix: Get; -} - + } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -317,19 +294,25 @@ pub mod pallet { Some(AccountInfo { nonce, refcount: rc as RefCount, data }) ); >::put(true); - T::MaximumBlockWeight::get() + T::BlockWeights::get().max_block } else { 0 } } + + fn integrity_test() { + T::BlockWeights::get() + .validate() + .expect("The weights are invalid."); + } } #[pallet::call] impl Pallet { /// A dispatch that will fill the block weight up to the given ratio. // TODO: This should only be available for testing, rather than in general usage, but - // that's not possible at present (since it's within the pallet macro). - #[pallet::weight(*_ratio * T::MaximumBlockWeight::get())] + // that's not possible at present (since it's within the decl_module macro). + #[pallet::weight(*_ratio * T::BlockWeights::get().max_block)] pub(super) fn fill_block( origin: OriginFor, _ratio: Perbill @@ -382,8 +365,11 @@ pub mod pallet { /// The weight of this function is dependent on the runtime, but generally this is very expensive. /// We will treat this as a full block. /// # - #[pallet::weight((T::MaximumBlockWeight::get(), DispatchClass::Operational))] - pub fn set_code(origin: OriginFor, code: Vec) -> DispatchResultWithPostInfo { + #[pallet::weight((T::BlockWeights::get().max_block, DispatchClass::Operational))] + pub fn set_code( + origin: OriginFor, + code: Vec + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; Self::can_set_code(&code)?; @@ -400,8 +386,11 @@ pub mod pallet { /// - 1 event. /// The weight of this function is dependent on the runtime. We will treat this as a full block. /// # - #[pallet::weight((T::MaximumBlockWeight::get(), DispatchClass::Operational))] - pub fn set_code_without_checks(origin: OriginFor, code: Vec) -> DispatchResultWithPostInfo { + #[pallet::weight((T::BlockWeights::get().max_block, DispatchClass::Operational))] + pub fn set_code_without_checks( + origin: OriginFor, + code: Vec + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; storage::unhashed::put_raw(well_known_keys::CODE, &code); Self::deposit_event(Event::CodeUpdated); @@ -448,8 +437,8 @@ pub mod pallet { /// - Writes: Number of items /// # #[pallet::weight(( - T::SystemWeightInfo::set_storage(items.len() as u32), - DispatchClass::Operational, + T::SystemWeightInfo::set_storage(items.len() as u32), + DispatchClass::Operational, ))] pub(super) fn set_storage( origin: OriginFor, @@ -583,7 +572,8 @@ pub mod pallet { /// The current weight for the block. #[pallet::storage] #[pallet::getter(fn block_weight)] - pub(super) type BlockWeight = StorageValue<_, weight::ExtrinsicsWeight, ValueQuery>; + pub(super) type BlockWeight = StorageValue<_, ConsumedWeight, ValueQuery>; + /// Total length (in bytes) for all extrinsics put together, for the current block. #[pallet::storage] @@ -613,11 +603,6 @@ pub mod pallet { #[pallet::getter(fn parent_hash)] pub(super) type ParentHash = StorageValue<_, T::Hash, ValueQuery>; - /// Extrinsics root of the current block, also part of the block header. - #[pallet::storage] - #[pallet::getter(fn extrinsics_root)] - pub(super) type ExtrinsicsRoot = StorageValue<_, T::Hash, ValueQuery>; - /// Digest of the current block, also part of the block header. #[pallet::storage] #[pallet::getter(fn digest)] @@ -702,34 +687,6 @@ pub mod pallet { } } -/// Pallet struct placeholder on which is implemented the pallet logic. -/// -/// It is currently an alias for `Module` as old macros still generate/use old name. -pub type Module = Pallet; - -/// Alias to Event to prevent breaking code. Soon to be deprecated. -pub type RawEvent = Event; - -#[cfg(feature = "std")] -impl GenesisConfig { - /// Direct implementation of `GenesisBuild::build_storage`. - /// - /// Kept in order not to break dependency. - pub fn build_storage(&self) -> Result { - >::build_storage(self) - } - - /// Direct implementation of `GenesisBuild::assimilate_storage`. - /// - /// Kept in order not to break dependency. - pub fn assimilate_storage( - &self, - storage: &mut sp_runtime::Storage - ) -> Result<(), String> { - >::assimilate_storage(self, storage) - } -} - pub type DigestOf = generic::Digest<::Hash>; pub type DigestItemOf = generic::DigestItem<::Hash>; @@ -846,6 +803,14 @@ impl From for LastRuntimeUpgradeInfo { } } +/// Pallet struct placeholder on which is implemented the pallet logic. +/// +/// It is currently an alias for `Module` as old macros still generate/use old name. +pub type Module = Pallet; + +/// Alias to Event to prevent breaking code. Soon to be deprecated. +pub type RawEvent = Event; + pub struct EnsureRoot(sp_std::marker::PhantomData); impl< O: Into, O>> + From>, @@ -1161,7 +1126,7 @@ impl Module { // The following fields // // - > - // - > + // - ::> // - > // - > // - > @@ -1172,8 +1137,8 @@ impl Module { let parent_hash = >::get(); let mut digest = >::get(); - let extrinsics = (0..ExtrinsicCount::take().unwrap_or_default()) - .map(ExtrinsicData::take) + let extrinsics = (0..ExtrinsicCount::::take().unwrap_or_default()) + .map(ExtrinsicData::::take) .collect(); let extrinsics_root = extrinsics_data_root::(extrinsics); @@ -1248,9 +1213,9 @@ impl Module { /// Set the current block weight. This should only be used in some integration tests. #[cfg(any(feature = "std", test))] - pub fn set_block_limits(weight: Weight, len: usize) { + pub fn set_block_consumed_resources(weight: Weight, len: usize) { BlockWeight::::mutate(|current_weight| { - current_weight.put(weight, DispatchClass::Normal) + current_weight.set(weight, DispatchClass::Normal) }); AllExtrinsicsLen::::put(len as u32); } @@ -1319,14 +1284,6 @@ impl Module { ExecutionPhase::::put(Phase::ApplyExtrinsic(0)) } - /// Remove all extrinsic data and save the extrinsics trie root. - pub fn derive_extrinsics() { - let extrinsics = (0..ExtrinsicCount::::get().unwrap_or_default()) - .map(ExtrinsicData::::take).collect(); - let xts_root = extrinsics_data_root::(extrinsics); - >::put(xts_root); - } - /// An account is being created. pub fn on_created_account(who: T::AccountId) { T::OnNewAccount::on_new_account(&who); @@ -1463,7 +1420,7 @@ impl StoredMap for Module { /// Split an `option` into two constituent options, as defined by a `splitter` function. pub fn split_inner(option: Option, splitter: impl FnOnce(T) -> (R, S)) - -> (Option, Option) + -> (Option, Option) { match option { Some(inner) => { diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index c43ecf2803164..b5061a24cda7f 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -150,9 +150,6 @@ pub mod pallet { #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(PhantomData); - /// Deprecated old pallet name - pub type Module = Pallet; - #[pallet::hooks] impl Hooks> for Pallet { /// dummy `on_initialize` to return the weight used in `on_finalize`. From 68dab153458c70cc01b15ef09914511d23f7e1b3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 14:29:11 +0000 Subject: [PATCH 031/503] Add missing metadata_vnext impls --- test-utils/runtime/src/lib.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index f7bff69302172..353a2e0dc0c7f 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -575,6 +575,10 @@ cfg_if! { fn metadata() -> OpaqueMetadata { unimplemented!() } + + fn metadata_vnext() -> OpaqueMetadata { + unimplemented!() + } } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { @@ -826,6 +830,10 @@ cfg_if! { fn metadata() -> OpaqueMetadata { unimplemented!() } + + fn metadata_vnext() -> OpaqueMetadata { + unimplemented!() + } } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { From 134882e2e65b9e631394ad1c2767894a5f1a4ae4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 14:40:01 +0000 Subject: [PATCH 032/503] Add back GenesisBuild helpers --- frame/system/src/lib.rs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index f790f22c343e8..0ad1094509569 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -127,6 +127,8 @@ use frame_support::{ }; use codec::{Encode, Decode, FullCodec, EncodeLike}; +#[cfg(feature = "std")] +use frame_support::traits::GenesisBuild; #[cfg(any(feature = "std", test))] use sp_io::TestExternalities; @@ -687,6 +689,26 @@ pub mod pallet { } } +#[cfg(feature = "std")] +impl GenesisConfig { + /// Direct implementation of `GenesisBuild::build_storage`. + /// + /// Kept in order not to break dependency. + pub fn build_storage(&self) -> Result { + >::build_storage(self) + } + + /// Direct implementation of `GenesisBuild::assimilate_storage`. + /// + /// Kept in order not to break dependency. + pub fn assimilate_storage( + &self, + storage: &mut sp_runtime::Storage + ) -> Result<(), String> { + >::assimilate_storage(self, storage) + } +} + pub type DigestOf = generic::Digest<::Hash>; pub type DigestItemOf = generic::DigestItem<::Hash>; From 56da3c2b0e713d08bd45b4ff7a2f9fd804a14ae2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 14:49:06 +0000 Subject: [PATCH 033/503] Fix up system pallet tests --- frame/system/src/extensions/check_weight.rs | 20 ++++++++++---------- frame/system/src/tests.rs | 11 +++++++---- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index 39c57b49667ff..3cda7971ca44a 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -465,7 +465,7 @@ mod tests { let normal_limit = normal_weight_limit(); // given almost full block - BlockWeight::mutate(|current_weight| { + BlockWeight::::mutate(|current_weight| { current_weight.set(normal_limit, DispatchClass::Normal) }); // will not fit. @@ -475,7 +475,7 @@ mod tests { // likewise for length limit. let len = 100_usize; - AllExtrinsicsLen::put(normal_length_limit()); + AllExtrinsicsLen::::put(normal_length_limit()); assert!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &normal, len).is_err()); assert!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &op, len).is_ok()); }) @@ -508,7 +508,7 @@ mod tests { let normal = DispatchInfo::default(); let normal_limit = normal_weight_limit() as usize; let reset_check_weight = |tx, s, f| { - AllExtrinsicsLen::put(0); + AllExtrinsicsLen::::put(0); let r = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, tx, s); if f { assert!(r.is_err()) } else { assert!(r.is_ok()) } }; @@ -544,7 +544,7 @@ mod tests { let len = 0_usize; let reset_check_weight = |i, f, s| { - BlockWeight::mutate(|current_weight| { + BlockWeight::::mutate(|current_weight| { current_weight.set(s, DispatchClass::Normal) }); let r = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, i, len); @@ -570,20 +570,20 @@ mod tests { let base_extrinsic = block_weights().get(DispatchClass::Normal).base_extrinsic; // We allow 75% for normal transaction, so we put 25% - extrinsic base weight - BlockWeight::mutate(|current_weight| { + BlockWeight::::mutate(|current_weight| { current_weight.set(0, DispatchClass::Mandatory); current_weight.set(256 - base_extrinsic, DispatchClass::Normal); }); let pre = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &info, len).unwrap(); - assert_eq!(BlockWeight::get().total(), info.weight + 256); + assert_eq!(BlockWeight::::get().total(), info.weight + 256); assert!( CheckWeight::::post_dispatch(pre, &info, &post_info, len, &Ok(())) .is_ok() ); assert_eq!( - BlockWeight::get().total(), + BlockWeight::::get().total(), post_info.actual_weight.unwrap() + 256, ); }) @@ -599,14 +599,14 @@ mod tests { }; let len = 0_usize; - BlockWeight::mutate(|current_weight| { + BlockWeight::::mutate(|current_weight| { current_weight.set(0, DispatchClass::Mandatory); current_weight.set(128, DispatchClass::Normal); }); let pre = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &info, len).unwrap(); assert_eq!( - BlockWeight::get().total(), + BlockWeight::::get().total(), info.weight + 128 + block_weights().get(DispatchClass::Normal).base_extrinsic, ); @@ -615,7 +615,7 @@ mod tests { .is_ok() ); assert_eq!( - BlockWeight::get().total(), + BlockWeight::::get().total(), info.weight + 128 + block_weights().get(DispatchClass::Normal).base_extrinsic, ); }) diff --git a/frame/system/src/tests.rs b/frame/system/src/tests.rs index ca91630110366..c9e2821375e66 100644 --- a/frame/system/src/tests.rs +++ b/frame/system/src/tests.rs @@ -18,8 +18,11 @@ use crate::*; use mock::{*, Origin}; use sp_core::H256; -use sp_runtime::{DispatchError, traits::{Header, BlakeTwo256}}; -use frame_support::weights::WithPostDispatchInfo; +use sp_runtime::{DispatchError, DispatchErrorWithPostInfo, traits::{Header, BlakeTwo256}}; +use frame_support::{ + dispatch::PostDispatchInfo, + weights::WithPostDispatchInfo +}; #[test] fn origin_works() { @@ -327,7 +330,7 @@ fn set_code_checks_works() { ("test", 1, 2, Err(Error::::SpecVersionNeedsToIncrease)), ("test", 1, 1, Err(Error::::SpecVersionNeedsToIncrease)), ("test2", 1, 1, Err(Error::::InvalidSpecName)), - ("test", 2, 1, Ok(())), + ("test", 2, 1, Ok(PostDispatchInfo::default())), ("test", 0, 1, Err(Error::::SpecVersionNeedsToIncrease)), ("test", 1, 0, Err(Error::::SpecVersionNeedsToIncrease)), ]; @@ -349,7 +352,7 @@ fn set_code_checks_works() { vec![1, 2, 3, 4], ); - assert_eq!(expected.map_err(DispatchError::from), res); + assert_eq!(expected.map_err(DispatchErrorWithPostInfo::from), res); }); } } From 6968dfae7ee39ee291b8e857fcbbf8721e6fe3da Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 16:27:27 +0000 Subject: [PATCH 034/503] Use new frame_metadata crate for v13 --- Cargo.lock | 17 +- frame/balances/Cargo.toml | 2 +- frame/metadata/Cargo.toml | 2 +- frame/metadata/src/lib.rs | 2 - frame/metadata/src/vnext.rs | 306 ------------------ frame/support/Cargo.toml | 3 +- .../procedural/src/pallet/expand/call.rs | 6 +- .../procedural/src/pallet/expand/event.rs | 6 +- frame/support/src/dispatch.rs | 2 +- frame/support/src/event.rs | 7 +- frame/support/src/metadata.rs | 2 +- frame/support/src/metadata_vnext.rs | 8 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 17 files changed, 39 insertions(+), 334 deletions(-) delete mode 100644 frame/metadata/src/vnext.rs diff --git a/Cargo.lock b/Cargo.lock index a90fed8d03c4b..f5c325d5787db 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1617,12 +1617,23 @@ dependencies = [ "sp-std", ] +[[package]] +name = "frame-metadata" +version = "12.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#1ac42f815304186769cc1bce52f84d46508ca225" +dependencies = [ + "cfg-if 1.0.0", + "parity-scale-codec", + "scale-info", +] + [[package]] name = "frame-support" version = "2.0.0" dependencies = [ "bitflags", - "frame-metadata", + "frame-metadata 12.0.0", + "frame-metadata 12.0.0 (git+https://github.com/paritytech/frame-metadata?branch=main)", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1681,7 +1692,7 @@ dependencies = [ name = "frame-support-test" version = "2.0.0" dependencies = [ - "frame-metadata", + "frame-metadata 12.0.0", "frame-support", "frame-system", "parity-scale-codec", @@ -7579,6 +7590,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.4.1" +source = "git+https://github.com/paritytech/scale-info#86bbb8256788d7d579725da8fede406169b14a8c" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -7589,6 +7601,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.2.1" +source = "git+https://github.com/paritytech/scale-info#86bbb8256788d7d579725da8fede406169b14a8c" dependencies = [ "proc-macro2", "quote", diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index e477ba1607557..f64b6f57b7886 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index 4fe4cfea13413..d72e18cabf5f6 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/metadata/src/lib.rs b/frame/metadata/src/lib.rs index c05521c48fa59..8e6b8b6bd796d 100644 --- a/frame/metadata/src/lib.rs +++ b/frame/metadata/src/lib.rs @@ -23,8 +23,6 @@ #![cfg_attr(not(feature = "std"), no_std)] -pub mod vnext; - #[cfg(feature = "std")] use serde::Serialize; #[cfg(feature = "std")] diff --git a/frame/metadata/src/vnext.rs b/frame/metadata/src/vnext.rs deleted file mode 100644 index aa59903cadd42..0000000000000 --- a/frame/metadata/src/vnext.rs +++ /dev/null @@ -1,306 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Decodable variant of the RuntimeMetadata. -//! -//! This really doesn't belong here, but is necessary for the moment. In the future -//! it should be removed entirely to an external module for shimming on to the -//! codec-encoded metadata. - -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use codec::{Decode, Input}; -use codec::Encode; -use sp_core::RuntimeDebug; -use sp_std::{ - vec::Vec, - fmt::Debug, -}; - -use scale_info::{ - form::{ - PortableForm, - Form, - FormString, - MetaForm, - }, - meta_type, - IntoPortable, - Registry, - PortableRegistry, - TypeInfo, -}; - -pub type RuntimeMetadataLastVersion = RuntimeMetadataV12; - -/// Metadata prefixed by a u32 for reserved usage -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct RuntimeMetadataPrefixed -where - S: FormString, -{ - pub prefix: u32, - pub types: PortableRegistry, - pub metadata: RuntimeMetadata>, -} - -impl From> for RuntimeMetadataPrefixed { - fn from(metadata: RuntimeMetadataLastVersion) -> RuntimeMetadataPrefixed { - let mut registry = Registry::new(); - let metadata = metadata.into_portable(&mut registry); - RuntimeMetadataPrefixed { - prefix: super::META_RESERVED, - types: registry.into(), - metadata: RuntimeMetadata::V12(metadata), - } - } -} - -impl From for sp_core::OpaqueMetadata { - fn from(metadata: RuntimeMetadataPrefixed) -> Self { - sp_core::OpaqueMetadata::new(metadata.encode()) - } -} - -/// The metadata of a runtime. -/// The version ID encoded/decoded through -/// the enum nature of `RuntimeMetadata`. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub enum RuntimeMetadata { - /// Version 12 for runtime metadata. - V12(RuntimeMetadataV12), -} - -/// The metadata of a runtime. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct RuntimeMetadataV12 { - /// Metadata of all the modules. - pub modules: Vec>, - // /// Metadata of the extrinsic. - // pub extrinsic: ExtrinsicMetadata, -} - -impl IntoPortable for RuntimeMetadataV12 { - type Output = RuntimeMetadataV12; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - RuntimeMetadataV12 { - modules: registry.map_into_portable(self.modules), - // extrinsic: self.extrinsic.into_portable(registry), - } - } -} - -/// Metadata of the extrinsic used by the runtime. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct ExtrinsicMetadata { - /// Extrinsic version. - pub version: u8, - /// The signed extensions in the order they appear in the extrinsic. - pub signed_extensions: Vec, -} - -impl IntoPortable for ExtrinsicMetadata { - type Output = ExtrinsicMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - ExtrinsicMetadata { - version: self.version, - signed_extensions: registry.register_types(self.signed_extensions), - } - } -} - -/// All metadata about an runtime module. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct ModuleMetadata { - pub name: T::String, - // pub storage: Option, StorageMetadata>>, - pub calls: Option>>, - pub event: Option>>, - // pub constants: DFnA, - // pub errors: DFnA, -} - -impl IntoPortable for ModuleMetadata { - type Output = ModuleMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - ModuleMetadata { - name: self.name.into_portable(registry), - calls: self.calls.map(|calls| registry.map_into_portable(calls)), - event: self.event.map(|event| registry.map_into_portable(event)), - } - } -} - -/// All the metadata about a function. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct FunctionMetadata { - pub name: T::String, - pub arguments: Vec>, - pub documentation: Vec, -} - -impl IntoPortable for FunctionMetadata { - type Output = FunctionMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - FunctionMetadata { - name: self.name.into_portable(registry), - arguments: registry.map_into_portable(self.arguments), - documentation: registry.map_into_portable(self.documentation), - } - } -} - -/// All the metadata about a function argument. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct FunctionArgumentMetadata { - pub name: T::String, - pub ty: T::Type, - pub is_compact: bool, -} - -impl IntoPortable for FunctionArgumentMetadata { - type Output = FunctionArgumentMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - FunctionArgumentMetadata { - name: self.name.into_portable(registry), - ty: registry.register_type(&self.ty), - is_compact: self.is_compact, - } - } -} - -/// All the metadata about an outer event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct OuterEventMetadata { - pub name: T::String, - pub events: Vec>, -} - -impl IntoPortable for OuterEventMetadata { - type Output = OuterEventMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - OuterEventMetadata { - name: self.name.into_portable(registry), - events: registry.map_into_portable(self.events), - } - } -} - -/// Metadata about a module event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct ModuleEventMetadata { - pub name: T::String, - pub events: Vec>, -} - -impl IntoPortable for ModuleEventMetadata { - type Output = ModuleEventMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - ModuleEventMetadata { - name: self.name.into_portable(registry), - events: registry.map_into_portable(self.events), - } - } -} - -/// All the metadata about an event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct EventMetadata { - pub name: T::String, - pub arguments: Vec>, - pub documentation: Vec, -} - -impl IntoPortable for EventMetadata { - type Output = EventMetadata; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - EventMetadata { - name: self.name.into_portable(registry), - arguments: registry.map_into_portable(self.arguments), - documentation: registry.map_into_portable(self.documentation), - } - } -} - -/// A type specification. -/// -/// This contains the actual type as well as an optional compile-time -/// known displayed representation of the type. This is useful for cases -/// where the type is used through a type alias in order to provide -/// information about the alias name. -/// -/// # Examples -/// -/// Consider the following Rust function: -/// ```no_compile -/// fn is_sorted(input: &[i32], pred: Predicate) -> bool; -/// ``` -/// In this above example `input` would have no displayable name, -/// `pred`'s display name is `Predicate` and the display name of -/// the return type is simply `bool`. Note that `Predicate` could -/// simply be a type alias to `fn(i32, i32) -> Ordering`. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode))] -pub struct TypeSpec { - /// The actual type. - ty: T::Type, - /// The compile-time known displayed representation of the type. - name: T::String, -} - -impl IntoPortable for TypeSpec { - type Output = TypeSpec; - - fn into_portable(self, registry: &mut Registry) -> Self::Output { - TypeSpec { - ty: registry.register_type(&self.ty), - name: self.name.into_portable(registry), - } - } -} - -impl TypeSpec { - /// Creates a new type specification without a display name. - pub fn new(name: &'static str) -> Self - where - T: TypeInfo + 'static, - { - Self { - ty: meta_type::(), - name, - } - } -} diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 802f4ad041923..507b512777ace 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,8 @@ log = "0.4" serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 834db88d3f5d5..bbbc6e94d7f8b 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -204,12 +204,12 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { } #[doc(hidden)] - pub fn call_functions_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::vnext::FunctionMetadata> { + pub fn call_functions_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::FunctionMetadata> { #frame_support::scale_info::prelude::vec![ #( - #frame_support::metadata::vnext::FunctionMetadata { + #frame_support::metadata::v13::FunctionMetadata { name: stringify!(#fn_name), arguments: #frame_support::scale_info::prelude::vec![ #( - #frame_support::metadata::vnext::FunctionArgumentMetadata { + #frame_support::metadata::v13::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_type>(), is_compact: #args_is_compact, diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 778bd42f89f51..b8e1916333ea7 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -67,12 +67,12 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { .iter() .map(|(ty, name)| { quote::quote!( - #frame_support::metadata::vnext::TypeSpec::new::<#ty>(#name) + #frame_support::metadata::v13::TypeSpec::new::<#ty>(#name) ) }); let docs = &event.docs; quote::quote!( - #frame_support::metadata::vnext::EventMetadata { + #frame_support::metadata::v13::EventMetadata { name: #name, arguments: vec![ #( #args, )* ], documentation: vec![ #( #docs, )* ], @@ -164,7 +164,7 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> Vec<#frame_support::metadata::vnext::EventMetadata> { + pub fn metadata_vnext() -> Vec<#frame_support::metadata::v13::EventMetadata> { vec![ #( #metadata_vnext )* ] } } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index d0cdc2e430938..da83ef10a51ef 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2129,7 +2129,7 @@ macro_rules! __dispatch_impl_metadata { /// Returns empty vec for now to allow mixing of old style and new style pallets. #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::FunctionMetadata> { + pub fn call_functions_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::FunctionMetadata> { $crate::scale_info::prelude::vec![] } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index b3beeae634e22..e4de1e0c0fb0d 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -22,7 +22,6 @@ // along with Substrate. If not, see . pub use frame_metadata::{EventMetadata, DecodeDifferent, OuterEventMetadata, FnEncode}; -pub use frame_metadata::vnext; /// Implement the `Event` for a module. /// @@ -153,7 +152,7 @@ macro_rules! decl_event { /// Metadata vnext only supported by new frame support macros #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::EventMetadata> { + pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { $crate::scale_info::prelude::vec![] } } @@ -308,7 +307,7 @@ macro_rules! __decl_generic_event { /// Metadata vnext only supported by new frame support macros #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::vnext::EventMetadata> { + pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { $crate::scale_info::prelude::vec![] } } @@ -586,7 +585,7 @@ macro_rules! __impl_outer_event_json_metadata_vnext { name: stringify!($event_name), events: $crate::scale_info::prelude::vec![ $( - $crate::metadata::vnext::ModuleEventMetadata { + $crate::metadata::v13::ModuleEventMetadata { name: stringify!($module_name), events: $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() } diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 6f49275c6421a..2bdd02026502a 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -20,8 +20,8 @@ pub use frame_metadata::{ DefaultByteGetter, RuntimeMetadataPrefixed, StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, ModuleErrorMetadata, ExtrinsicMetadata, - vnext, }; +pub use frame_metadata2::v13; /// Implements the metadata support for the given runtime and all its modules. /// diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index d23f0914a3c80..0fe698c72c419 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -pub use frame_metadata::vnext; +pub use frame_metadata2; #[macro_export] macro_rules! impl_runtime_metadata_vnext { @@ -24,8 +24,8 @@ macro_rules! impl_runtime_metadata_vnext { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata_vnext() -> $crate::metadata::vnext::RuntimeMetadataPrefixed { - $crate::metadata::vnext::RuntimeMetadataLastVersion { + pub fn metadata_vnext() -> $crate::metadata::v13::RuntimeMetadataPrefixed { + $crate::metadata::v13::RuntimeMetadataLastVersion { modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), // extrinsic: $crate::metadata::ExtrinsicMetadata { // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, @@ -58,7 +58,7 @@ macro_rules! __runtime_modules_to_metadata_vnext { ) => { $crate::__runtime_modules_to_metadata_vnext!( $runtime; - $( $metadata, )* $crate::metadata::vnext::ModuleMetadata { + $( $metadata, )* $crate::metadata::v13::ModuleMetadata { name: stringify!($name), // index: $index, // storage: $crate::__runtime_modules_to_metadata_calls_storage!( diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 7f4c687b73a31..62d55ea1515c3 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index b9a90dafc0958..e46a2353507fd 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 0d11093ee379b..421a8ce20f3cc 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 80b093ec2eba4..cab2a8fee5484 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -33,7 +33,7 @@ zeroize = { version = "1.2.0", default-features = false } secrecy = { version = "0.7.0", default-features = false } lazy_static = { version = "1.4.0", default-features = false, optional = true } parking_lot = { version = "0.10.0", optional = true } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-debug-derive = { version = "2.0.0", path = "../debug-derive" } sp-externalities = { version = "0.8.0", optional = true, path = "../externalities" } sp-storage = { version = "2.0.0", default-features = false, path = "../storage" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 1213a4a4e00ea..1268978f2de95 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } -scale-info = { path = "../../../scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From b76497c352157791c922daafe82902531446a4d3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 16:31:14 +0000 Subject: [PATCH 035/503] Remove balances Module type def --- frame/balances/src/lib.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 6b500178373f4..3bcbf388dab04 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -220,9 +220,6 @@ pub mod pallet { #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(PhantomData<(T, I)>); - /// Deperacated name for Pallet - pub type Module = Pallet; - #[pallet::hooks] impl, I: 'static> Hooks> for Pallet {} From b9227a41092a666d3b009645e47261a495bfd326 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 16:55:30 +0000 Subject: [PATCH 036/503] Fix up some errors for v13 metadata construction --- bin/node/runtime/src/lib.rs | 2 +- frame/support/src/event.rs | 6 +++--- frame/support/src/metadata_vnext.rs | 10 +++++++--- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 4829ec2ff98f7..7ed684b39eaef 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1053,7 +1053,7 @@ impl_runtime_apis! { } fn metadata_vnext() -> OpaqueMetadata { - Runtime::metadata_vnext().into() + OpaqueMetadata::new(Runtime::metadata_vnext().encode()) } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index e4de1e0c0fb0d..a0f06952a36d1 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -580,8 +580,8 @@ macro_rules! __impl_outer_event_json_metadata_vnext { ) => { impl $runtime { #[allow(dead_code)] - pub fn outer_event_metadata_vnext() -> $crate::event::vnext::OuterEventMetadata { - $crate::event::vnext::OuterEventMetadata { + pub fn outer_event_metadata_vnext() -> $crate::metadata::v13::OuterEventMetadata { + $crate::metadata::v13::OuterEventMetadata { name: stringify!($event_name), events: $crate::scale_info::prelude::vec![ $( @@ -610,7 +610,7 @@ macro_rules! __impl_outer_event_json_metadata_vnext { $( #[allow(dead_code)] pub fn [< __module_events_vnext_ $module_name $( _ $instance )? >] () -> - Vec<$crate::event::vnext::EventMetadata> + Vec<$crate::metadata::v13::EventMetadata> { $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() } diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index 0fe698c72c419..a04ac7b1d2d02 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -25,8 +25,12 @@ macro_rules! impl_runtime_metadata_vnext { ) => { impl $runtime { pub fn metadata_vnext() -> $crate::metadata::v13::RuntimeMetadataPrefixed { - $crate::metadata::v13::RuntimeMetadataLastVersion { - modules: $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + $crate::metadata::v13::RuntimeMetadataLastVersion::new( + $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + $crate::metadata::v13::ExtrinsicMetadata { + version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + signed_extensions: vec![] // todo: init extensions (see below) + } // extrinsic: $crate::metadata::ExtrinsicMetadata { // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, // signed_extensions: < @@ -38,7 +42,7 @@ macro_rules! impl_runtime_metadata_vnext { // .map($crate::metadata::DecodeDifferent::Encode) // .collect(), // }, - }.into() + ).into() } } } From 10e52ab33b109857b6dfd28300ec15ee9e40b7f4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 17:10:37 +0000 Subject: [PATCH 037/503] Add missing v13::ModuleMetadata fields --- frame/support/src/metadata_vnext.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index a04ac7b1d2d02..c59fc9e660260 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -65,6 +65,8 @@ macro_rules! __runtime_modules_to_metadata_vnext { $( $metadata, )* $crate::metadata::v13::ModuleMetadata { name: stringify!($name), // index: $index, + // todo: [AJ] storage + storage: None, // storage: $crate::__runtime_modules_to_metadata_calls_storage!( // $mod, $module $( <$instance> )?, $runtime, $(with $kw)* // ), @@ -79,6 +81,8 @@ macro_rules! __runtime_modules_to_metadata_vnext { // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata // ) // ), + // todo: [AJ] errors + errors: vec![], // errors: $crate::metadata::DecodeDifferent::Encode( // $crate::metadata::FnEncode( // <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata From f76cbf04849528c167090de4eeba8e805c5f260e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 7 Jan 2021 17:31:52 +0000 Subject: [PATCH 038/503] Remove scale-info dependency from metadata --- Cargo.lock | 1 - frame/metadata/Cargo.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f5c325d5787db..7982453ca5a56 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1611,7 +1611,6 @@ name = "frame-metadata" version = "12.0.0" dependencies = [ "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-std", diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index d72e18cabf5f6..2934b15562c43 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,7 +14,6 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } From 0155494b11dd1f56cf3558aad3e2518a356e205c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 8 Jan 2021 12:00:35 +0000 Subject: [PATCH 039/503] Use chameleon branch of frame_metadata --- Cargo.lock | 4 ++-- frame/support/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7982453ca5a56..68d0b3f7c4473 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1619,7 +1619,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#1ac42f815304186769cc1bce52f84d46508ca225" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon#f3cec248a9a85c265e733b128ba368224aca7175" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -1632,7 +1632,7 @@ version = "2.0.0" dependencies = [ "bitflags", "frame-metadata 12.0.0", - "frame-metadata 12.0.0 (git+https://github.com/paritytech/frame-metadata?branch=main)", + "frame-metadata 12.0.0 (git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon)", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 507b512777ace..ef0e56ac1a516 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,7 @@ log = "0.4" serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-chameleon", default-features = false, features = ["v13"] } scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } From 35dfcdf3fdf5787820a678ff6a0d3749357e6da9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 8 Jan 2021 12:00:53 +0000 Subject: [PATCH 040/503] Add missing metadata api method --- bin/node-template/runtime/src/lib.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index 0812346779646..4ebbb5a2360f1 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -335,6 +335,10 @@ impl_runtime_apis! { fn metadata() -> OpaqueMetadata { Runtime::metadata().into() } + + fn metadata_vnext() -> OpaqueMetadata { + OpaqueMetadata::new(Runtime::metadata_vnext().encode()) + } } impl sp_block_builder::BlockBuilder for Runtime { From c5643a93870e50c8ddcc243611c70c91df137e45 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 8 Jan 2021 12:31:05 +0000 Subject: [PATCH 041/503] Add Encode import for node-template --- bin/node-template/runtime/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index 4ebbb5a2360f1..cdf0ae59a65d7 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -337,6 +337,7 @@ impl_runtime_apis! { } fn metadata_vnext() -> OpaqueMetadata { + use codec::Encode as _; OpaqueMetadata::new(Runtime::metadata_vnext().encode()) } } From 4954430786380da3cc05a9681b3f166fc2284033 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 8 Jan 2021 15:20:07 +0000 Subject: [PATCH 042/503] Update scale-info --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68d0b3f7c4473..03066919bce8b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7589,7 +7589,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.4.1" -source = "git+https://github.com/paritytech/scale-info#86bbb8256788d7d579725da8fede406169b14a8c" +source = "git+https://github.com/paritytech/scale-info#1c80736fefb825af1c9773b3d7f28bec49390722" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -7600,7 +7600,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.2.1" -source = "git+https://github.com/paritytech/scale-info#86bbb8256788d7d579725da8fede406169b14a8c" +source = "git+https://github.com/paritytech/scale-info#1c80736fefb825af1c9773b3d7f28bec49390722" dependencies = [ "proc-macro2", "quote", From 016d41c216ae1625750bdc87cf93960129e8d69d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 12 Jan 2021 12:16:38 +0000 Subject: [PATCH 043/503] Provide TypeInfo for SignedExtensions --- Cargo.lock | 4 ++-- frame/support/src/metadata.rs | 2 +- frame/support/src/metadata_vnext.rs | 28 ++++++++++++---------------- primitives/runtime/src/traits.rs | 8 ++++---- 4 files changed, 19 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 03066919bce8b..ad86bc325afcd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7589,7 +7589,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.4.1" -source = "git+https://github.com/paritytech/scale-info#1c80736fefb825af1c9773b3d7f28bec49390722" +source = "git+https://github.com/paritytech/scale-info#bfed7ff7815ee73bbc598c9733b4b36eede73590" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -7600,7 +7600,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.2.1" -source = "git+https://github.com/paritytech/scale-info#1c80736fefb825af1c9773b3d7f28bec49390722" +source = "git+https://github.com/paritytech/scale-info#bfed7ff7815ee73bbc598c9733b4b36eede73590" dependencies = [ "proc-macro2", "quote", diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 2bdd02026502a..6136c0140aa27 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -81,7 +81,7 @@ macro_rules! impl_runtime_metadata { >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension >::identifier() .into_iter() - .map($crate::metadata::DecodeDifferent::Encode) + .map(|(id, _)| $crate::metadata::DecodeDifferent::Encode(id)) .collect(), }, }.into() diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index c59fc9e660260..ba03121b295d9 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -26,22 +26,18 @@ macro_rules! impl_runtime_metadata_vnext { impl $runtime { pub fn metadata_vnext() -> $crate::metadata::v13::RuntimeMetadataPrefixed { $crate::metadata::v13::RuntimeMetadataLastVersion::new( - $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), - $crate::metadata::v13::ExtrinsicMetadata { - version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - signed_extensions: vec![] // todo: init extensions (see below) - } - // extrinsic: $crate::metadata::ExtrinsicMetadata { - // version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - // signed_extensions: < - // < - // $ext as $crate::sp_runtime::traits::ExtrinsicMetadata - // >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension - // >::identifier() - // .into_iter() - // .map($crate::metadata::DecodeDifferent::Encode) - // .collect(), - // }, + $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), + $crate::metadata::v13::ExtrinsicMetadata { + version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + signed_extensions: < + < + $ext as $crate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension + >::identifier() + .into_iter() + .map(|(_, ty)| ty) + .collect(), + }, ).into() } } diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index b0567b7ae0d05..090c5c5b163d2 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -722,7 +722,7 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq { +pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + ::scale_info::TypeInfo + 'static { /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used @@ -849,8 +849,8 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq /// *exactly* one identifier. /// /// This method provides a default implementation that returns `vec![SELF::IDENTIFIER]`. - fn identifier() -> Vec<&'static str> { - sp_std::vec![Self::IDENTIFIER] + fn identifier() -> Vec<(&'static str, ::scale_info::MetaType)> { + sp_std::vec![(Self::IDENTIFIER, ::scale_info::meta_type::())] } } @@ -914,7 +914,7 @@ impl SignedExtension for Tuple { Ok(()) } - fn identifier() -> Vec<&'static str> { + fn identifier() -> Vec<(&'static str, ::scale_info::MetaType)> { let mut ids = Vec::new(); for_tuples!( #( ids.extend(Tuple::identifier()); )* ); ids From 15d788658f21c76bdbf4b33ec758d75cfb11b0e5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 12 Jan 2021 13:39:39 +0000 Subject: [PATCH 044/503] Add TypeInfo to extensions, not compiling yet --- Cargo.lock | 1 + frame/system/src/extensions/check_genesis.rs | 11 ++++++----- frame/system/src/extensions/check_mortality.rs | 11 ++++++----- frame/system/src/extensions/check_nonce.rs | 9 +++++---- frame/system/src/extensions/check_spec_version.rs | 11 ++++++----- frame/system/src/extensions/check_tx_version.rs | 11 ++++++----- frame/system/src/extensions/check_weight.rs | 11 ++++++----- frame/system/src/lib.rs | 2 +- frame/transaction-payment/Cargo.toml | 1 + frame/transaction-payment/src/lib.rs | 11 ++++++----- primitives/runtime/src/generic/era.rs | 2 +- 11 files changed, 45 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ad86bc325afcd..916351ce6a0fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5099,6 +5099,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "serde", "smallvec 1.5.0", "sp-core", diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index de635b4fb91a6..a23ba8e3de7e7 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -17,16 +17,17 @@ use codec::{Encode, Decode}; use crate::{Config, Module}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{SignedExtension, Zero}, transaction_validity::TransactionValidityError, }; /// Genesis hash check to provide replay protection between different networks. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] -pub struct CheckGenesis(sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +pub struct CheckGenesis(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckGenesis { +impl sp_std::fmt::Debug for CheckGenesis { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckGenesis") @@ -38,14 +39,14 @@ impl sp_std::fmt::Debug for CheckGenesis { } } -impl CheckGenesis { +impl CheckGenesis { /// Creates new `SignedExtension` to check genesis hash. pub fn new() -> Self { Self(sp_std::marker::PhantomData) } } -impl SignedExtension for CheckGenesis { +impl SignedExtension for CheckGenesis { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = T::Hash; diff --git a/frame/system/src/extensions/check_mortality.rs b/frame/system/src/extensions/check_mortality.rs index 8e5fd36e6217a..2b38bd2af220c 100644 --- a/frame/system/src/extensions/check_mortality.rs +++ b/frame/system/src/extensions/check_mortality.rs @@ -18,6 +18,7 @@ use codec::{Encode, Decode}; use crate::{Config, Module, BlockHash}; use frame_support::StorageMap; +use scale_info::TypeInfo; use sp_runtime::{ generic::Era, traits::{SignedExtension, DispatchInfoOf, SaturatedConversion}, @@ -27,17 +28,17 @@ use sp_runtime::{ }; /// Check for transaction mortality. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] -pub struct CheckMortality(Era, sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +pub struct CheckMortality(Era, sp_std::marker::PhantomData); -impl CheckMortality { +impl CheckMortality { /// utility constructor. Used only in client/factory code. pub fn from(era: Era) -> Self { Self(era, sp_std::marker::PhantomData) } } -impl sp_std::fmt::Debug for CheckMortality { +impl sp_std::fmt::Debug for CheckMortality { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckMortality({:?})", self.0) @@ -49,7 +50,7 @@ impl sp_std::fmt::Debug for CheckMortality { } } -impl SignedExtension for CheckMortality { +impl SignedExtension for CheckMortality { type AccountId = T::AccountId; type Call = T::Call; type AdditionalSigned = T::Hash; diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 0c610506d6616..f8ce5d2f19b4b 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -21,6 +21,7 @@ use frame_support::{ weights::DispatchInfo, StorageMap, }; +use scale_info::TypeInfo; use sp_runtime::{ traits::{SignedExtension, DispatchInfoOf, Dispatchable, One}, transaction_validity::{ @@ -34,17 +35,17 @@ use sp_std::vec; /// /// Note that this does not set any priority by default. Make sure that AT LEAST one of the signed /// extension sets some kind of priority upon validating transactions. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] pub struct CheckNonce(#[codec(compact)] T::Index); -impl CheckNonce { +impl CheckNonce { /// utility constructor. Used only in client/factory code. pub fn from(nonce: T::Index) -> Self { Self(nonce) } } -impl sp_std::fmt::Debug for CheckNonce { +impl sp_std::fmt::Debug for CheckNonce { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckNonce({})", self.0) @@ -56,7 +57,7 @@ impl sp_std::fmt::Debug for CheckNonce { } } -impl SignedExtension for CheckNonce where +impl SignedExtension for CheckNonce where T::Call: Dispatchable { type AccountId = T::AccountId; diff --git a/frame/system/src/extensions/check_spec_version.rs b/frame/system/src/extensions/check_spec_version.rs index 1fd8376d342b2..9983859eb8973 100644 --- a/frame/system/src/extensions/check_spec_version.rs +++ b/frame/system/src/extensions/check_spec_version.rs @@ -17,16 +17,17 @@ use crate::{Config, Module}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::SignedExtension, transaction_validity::TransactionValidityError, }; /// Ensure the runtime version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] -pub struct CheckSpecVersion(sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, ::scale_info::TypeInfo)] +pub struct CheckSpecVersion(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckSpecVersion { +impl sp_std::fmt::Debug for CheckSpecVersion { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckSpecVersion") @@ -38,14 +39,14 @@ impl sp_std::fmt::Debug for CheckSpecVersion { } } -impl CheckSpecVersion { +impl CheckSpecVersion { /// Create new `SignedExtension` to check runtime version. pub fn new() -> Self { Self(sp_std::marker::PhantomData) } } -impl SignedExtension for CheckSpecVersion { +impl SignedExtension for CheckSpecVersion { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = u32; diff --git a/frame/system/src/extensions/check_tx_version.rs b/frame/system/src/extensions/check_tx_version.rs index fa11a0a5727f1..089d1722fd1d1 100644 --- a/frame/system/src/extensions/check_tx_version.rs +++ b/frame/system/src/extensions/check_tx_version.rs @@ -17,16 +17,17 @@ use crate::{Config, Module}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::SignedExtension, transaction_validity::TransactionValidityError, }; /// Ensure the transaction version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] -pub struct CheckTxVersion(sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +pub struct CheckTxVersion(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckTxVersion { +impl sp_std::fmt::Debug for CheckTxVersion { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckTxVersion") @@ -38,14 +39,14 @@ impl sp_std::fmt::Debug for CheckTxVersion { } } -impl CheckTxVersion { +impl CheckTxVersion { /// Create new `SignedExtension` to check transaction version. pub fn new() -> Self { Self(sp_std::marker::PhantomData) } } -impl SignedExtension for CheckTxVersion { +impl SignedExtension for CheckTxVersion { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = u32; diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index 3cda7971ca44a..f05e35fa8171e 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -17,6 +17,7 @@ use crate::{limits::BlockWeights, Config, Module}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{SignedExtension, DispatchInfoOf, Dispatchable, PostDispatchInfoOf, Printable}, transaction_validity::{ @@ -32,10 +33,10 @@ use frame_support::{ }; /// Block resource (weight) limit check. -#[derive(Encode, Decode, Clone, Eq, PartialEq, Default)] -pub struct CheckWeight(sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, TypeInfo)] +pub struct CheckWeight(sp_std::marker::PhantomData); -impl CheckWeight where +impl CheckWeight where T::Call: Dispatchable, { /// Checks if the current extrinsic does not exceed the maximum weight a single extrinsic @@ -185,7 +186,7 @@ pub fn calculate_consumed_weight( Ok(all_weight) } -impl SignedExtension for CheckWeight where +impl SignedExtension for CheckWeight where T::Call: Dispatchable { type AccountId = T::AccountId; @@ -266,7 +267,7 @@ impl SignedExtension for CheckWeight where } } -impl sp_std::fmt::Debug for CheckWeight { +impl sp_std::fmt::Debug for CheckWeight { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckWeight") diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 0ad1094509569..c3ac51b419b4a 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -203,7 +203,7 @@ pub mod pallet { /// with a sender account. type Index: Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy; + + Copy + scale_info::TypeInfo; /// The block number type used by the runtime. type BlockNumber: diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 1fa4521900421..d4b4753de15cb 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.1", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 932aaf43dc9d6..f1d8c7b82c0e6 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -44,6 +44,7 @@ use frame_support::{ }, dispatch::DispatchResult, }; +use scale_info::TypeInfo; use sp_runtime::{ FixedU128, FixedPointNumber, FixedPointOperand, Perquintill, RuntimeDebug, transaction_validity::{ @@ -441,10 +442,10 @@ impl Convert> for Module where /// Require the transactor pay for themselves and maybe include a tip to gain additional priority /// in the queue. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] pub struct ChargeTransactionPayment(#[codec(compact)] BalanceOf); -impl ChargeTransactionPayment where +impl ChargeTransactionPayment where T::Call: Dispatchable, BalanceOf: Send + Sync + FixedPointOperand, { @@ -492,7 +493,7 @@ impl ChargeTransactionPayment where } } -impl sp_std::fmt::Debug for ChargeTransactionPayment { +impl sp_std::fmt::Debug for ChargeTransactionPayment { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "ChargeTransactionPayment<{:?}>", self.0) @@ -503,8 +504,8 @@ impl sp_std::fmt::Debug for ChargeTransactionPayment { } } -impl SignedExtension for ChargeTransactionPayment where - BalanceOf: Send + Sync + From + FixedPointOperand, +impl SignedExtension for ChargeTransactionPayment where + BalanceOf: Send + Sync + From + FixedPointOperand + TypeInfo, T::Call: Dispatchable, { const IDENTIFIER: &'static str = "ChargeTransactionPayment"; diff --git a/primitives/runtime/src/generic/era.rs b/primitives/runtime/src/generic/era.rs index 381c34ef419dc..5602ee839e98b 100644 --- a/primitives/runtime/src/generic/era.rs +++ b/primitives/runtime/src/generic/era.rs @@ -29,7 +29,7 @@ pub type Period = u64; pub type Phase = u64; /// An era to describe the longevity of a transaction. -#[derive(PartialEq, Eq, Clone, Copy, sp_core::RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, scale_info::TypeInfo, sp_core::RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum Era { /// The transaction is valid forever. The genesis hash must be present in the signed content. From 85a358647d937ca7b2b5b610f417aad74665d209 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 13 Jan 2021 16:10:00 +0000 Subject: [PATCH 045/503] Add TypeInfo impls required for extensions, compiles now --- Cargo.lock | 3 +++ bin/node-template/runtime/Cargo.toml | 2 ++ bin/node/runtime/Cargo.toml | 2 ++ frame/example/Cargo.toml | 2 ++ frame/example/src/lib.rs | 5 +++-- frame/support/procedural/src/construct_runtime/mod.rs | 2 +- frame/transaction-payment/Cargo.toml | 1 + primitives/runtime/src/generic/unchecked_extrinsic.rs | 2 +- primitives/runtime/src/traits.rs | 2 +- 9 files changed, 16 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 916351ce6a0fa..011073754410a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3985,6 +3985,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4062,6 +4063,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4592,6 +4594,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index f1b15070ddde9..de9c7b015e3bf 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,6 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } @@ -54,6 +55,7 @@ substrate-wasm-builder = { version = "3.0.0", path = "../../../utils/wasm-builde default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-executive/std", "frame-support/std", "pallet-aura/std", diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 4dabc5c015921..09703819504a7 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } @@ -92,6 +93,7 @@ sp-io = { version = "2.0.0", path = "../../../primitives/io" } default = ["std"] with-tracing = [ "frame-executive/with-tracing" ] std = [ + "scale-info/std", "sp-authority-discovery/std", "pallet-assets/std", "pallet-authority-discovery/std", diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 41889ea4828d0..72cc2906d0a35 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.4", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } @@ -32,6 +33,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-runtime/std", "frame-benchmarking/std", "frame-support/std", diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 382d67263d1b0..6c14c530d4781 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -263,6 +263,7 @@ use frame_support::{ use sp_std::prelude::*; use frame_system::{ensure_signed, ensure_root}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ SignedExtension, Bounded, SaturatedConversion, DispatchInfoOf, @@ -601,7 +602,7 @@ impl Module { /// /// Additionally, it drops any transaction with an encoded length higher than 200 bytes. No /// particular reason why, just to demonstrate the power of signed extensions. -#[derive(Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] pub struct WatchDummy(PhantomData); impl sp_std::fmt::Debug for WatchDummy { @@ -610,7 +611,7 @@ impl sp_std::fmt::Debug for WatchDummy { } } -impl SignedExtension for WatchDummy +impl SignedExtension for WatchDummy where ::Call: IsSubType>, { diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index 061fa3b437504..ca0a9a616ffca 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -175,7 +175,7 @@ fn construct_runtime_parsed(definition: RuntimeDefinition) -> Result where Extra: SignedExtension diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 090c5c5b163d2..2f46d0c2a1063 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -722,7 +722,7 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + ::scale_info::TypeInfo + 'static { +pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + scale_info::TypeInfo + 'static { /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used From ea370824aa2f216d045fb18a43da434ea6688f6a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 13 Jan 2021 17:09:11 +0000 Subject: [PATCH 046/503] Initialise updated v13 extrinsic metadata, not compiling Requires some more TypeInfo annotations, specifically on Call which could get interesting so leaving for now... --- Cargo.lock | 6 +++--- frame/support/src/metadata_vnext.rs | 6 +++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 011073754410a..c4a747a17ef41 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1619,7 +1619,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon#f3cec248a9a85c265e733b128ba368224aca7175" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon#b1008c8f6032eb2ed27a590b28ac39356b7b480f" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -7593,7 +7593,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.4.1" -source = "git+https://github.com/paritytech/scale-info#bfed7ff7815ee73bbc598c9733b4b36eede73590" +source = "git+https://github.com/paritytech/scale-info#8b7bd5e372abd6dd83efdecb727522e7c5b0e712" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -7604,7 +7604,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.2.1" -source = "git+https://github.com/paritytech/scale-info#bfed7ff7815ee73bbc598c9733b4b36eede73590" +source = "git+https://github.com/paritytech/scale-info#8b7bd5e372abd6dd83efdecb727522e7c5b0e712" dependencies = [ "proc-macro2", "quote", diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index ba03121b295d9..6da77a5977cdd 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -28,6 +28,7 @@ macro_rules! impl_runtime_metadata_vnext { $crate::metadata::v13::RuntimeMetadataLastVersion::new( $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), $crate::metadata::v13::ExtrinsicMetadata { + ty: $crate::scale_info::meta_type::<$ext>(), version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < < @@ -35,7 +36,10 @@ macro_rules! impl_runtime_metadata_vnext { >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension >::identifier() .into_iter() - .map(|(_, ty)| ty) + .map(|(id, ty)| $crate::metadata::v13::SignedExtensionMetadata { + identifier: id, + ty, + }) .collect(), }, ).into() From 79ff20692b478856a960005ffc5b8958fdbb9da9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 11:33:41 +0000 Subject: [PATCH 047/503] Fix error after merge, don't shadow outer event --- frame/support/procedural/src/pallet/expand/event.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index f50fae14bdf69..5d20d9930cf90 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -42,10 +42,10 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let event_impl_gen= &event.gen_kind.type_impl_gen(event.attr_span); let metadata = event.metadata.iter() - .map(|event| { - let name = format!("{}", event.name); - let args = event.args.iter().map(|arg| arg.1.clone()); - let docs = &event.docs; + .map(|event_def| { + let name = format!("{}", event_def.name); + let args = event_def.args.iter().map(|arg| arg.1.clone()); + let docs = &event_def.docs; quote::quote_spanned!(event.attr_span => #frame_support::event::EventMetadata { name: #frame_support::event::DecodeDifferent::Encode(#name), From c43206ad7d02edb71ee799e14a351f5f53d25ab8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 13:14:10 +0000 Subject: [PATCH 048/503] Initial migration of balances pallet --- frame/balances/src/lib.rs | 572 ++++++++++++++++++++------------------ 1 file changed, 308 insertions(+), 264 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index ef069455bbabe..210c0772259dd 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -15,13 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! # Balances Module +//! # Balances Pallet //! //! The Balances module provides functionality for handling accounts and balances. //! //! - [`balances::Config`](./trait.Config.html) //! - [`Call`](./enum.Call.html) -//! - [`Module`](./struct.Module.html) +//! - [`Pallet`](./struct.Pallet.html) //! //! ## Overview //! @@ -160,7 +160,7 @@ use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr}; use codec::{Codec, Encode, Decode}; use frame_support::{ - StorageValue, Parameter, decl_event, decl_storage, decl_module, decl_error, ensure, + StorageValue, Parameter, traits::{ Currency, OnUnbalanced, TryDrop, StoredMap, WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, @@ -168,6 +168,8 @@ use frame_support::{ ExistenceRequirement::AllowDeath, BalanceStatus as Status, } }; +#[cfg(feature = "std")] +use frame_support::traits::GenesisBuild; use sp_runtime::{ RuntimeDebug, DispatchResult, DispatchError, traits::{ @@ -179,87 +181,219 @@ use frame_system::{self as system, ensure_signed, ensure_root}; pub use self::imbalances::{PositiveImbalance, NegativeImbalance}; pub use weights::WeightInfo; -pub trait Subtrait: frame_system::Config { - /// The balance of an account. - type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use super::*; + + #[pallet::config] + pub trait Config: frame_system::Config { + /// The balance of an account. + type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + MaybeSerializeDeserialize + Debug; - /// The minimum amount required to keep an account open. - type ExistentialDeposit: Get; + /// Handler for the unbalanced reduction when removing a dust account. + type DustRemoval: OnUnbalanced>; - /// The means of storing the balances of an account. - type AccountStore: StoredMap>; + /// The overarching event type. + type Event: From> + Into<::Event>; - /// Weight information for the extrinsics in this pallet. - type WeightInfo: WeightInfo; + /// The minimum amount required to keep an account open. + #[pallet::constant] + type ExistentialDeposit: Get; - /// The maximum number of locks that should exist on an account. - /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get; -} + /// The means of storing the balances of an account. + type AccountStore: StoredMap>; -pub trait Config: frame_system::Config { - /// The balance of an account. - type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; - /// Handler for the unbalanced reduction when removing a dust account. - type DustRemoval: OnUnbalanced>; + /// The maximum number of locks that should exist on an account. + /// Not strictly enforced, but used for weight estimation. + type MaxLocks: Get; + } - /// The overarching event type. - type Event: From> + Into<::Event>; + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(PhantomData<(T, I)>); - /// The minimum amount required to keep an account open. - type ExistentialDeposit: Get; + #[pallet::hooks] + impl, I: 'static> Hooks> for Pallet { + } - /// The means of storing the balances of an account. - type AccountStore: StoredMap>; + #[pallet::call] + impl, I: 'static> Pallet { + /// Transfer some liquid free balance to another account. + /// + /// `transfer` will set the `FreeBalance` of the sender and receiver. + /// It will decrease the total issuance of the system by the `TransferFee`. + /// If the sender's account is below the existential deposit as a result + /// of the transfer, the account will be reaped. + /// + /// The dispatch origin for this call must be `Signed` by the transactor. + /// + /// # + /// - Dependent on arguments but not critical, given proper implementations for + /// input config types. See related functions below. + /// - It contains a limited number of reads and writes internally and no complex computation. + /// + /// Related functions: + /// + /// - `ensure_can_withdraw` is always called internally but has a bounded complexity. + /// - Transferring balances to accounts that did not exist before will cause + /// `T::OnNewAccount::on_new_account` to be called. + /// - Removing enough funds from an account will trigger `T::DustRemoval::on_unbalanced`. + /// - `transfer_keep_alive` works the same way as `transfer`, but has an additional + /// check that the transfer will not kill the origin account. + /// --------------------------------- + /// - Base Weight: 73.64 µs, worst case scenario (account created, account removed) + /// - DB Weight: 1 Read and 1 Write to destination account + /// - Origin account is already in memory, so no DB operations for them. + /// # + #[pallet::weight(T::WeightInfo::transfer())] + pub fn transfer( + origin: OriginFor, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + let transactor = ensure_signed(origin)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&transactor, &dest, value, ExistenceRequirement::AllowDeath)?; + Ok(().into()) + } - /// Weight information for extrinsics in this pallet. - type WeightInfo: WeightInfo; + /// Set the balances of a given account. + /// + /// This will alter `FreeBalance` and `ReservedBalance` in storage. it will + /// also decrease the total issuance of the system (`TotalIssuance`). + /// If the new free or reserved balance is below the existential deposit, + /// it will reset the account nonce (`frame_system::AccountNonce`). + /// + /// The dispatch origin for this call is `root`. + /// + /// # + /// - Independent of the arguments. + /// - Contains a limited number of reads and writes. + /// --------------------- + /// - Base Weight: + /// - Creating: 27.56 µs + /// - Killing: 35.11 µs + /// - DB Weight: 1 Read, 1 Write to `who` + /// # + #[weight(T::WeightInfo::set_balance_creating() // Creates a new account. + .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. + )] + fn set_balance( + origin: OriginFor, + who: ::Source, + #[pallet::compact] new_free: T::Balance, + #[pallet::compact] new_reserved: T::Balance + ) -> DispatchResultWithPostInfo { + ensure_root(origin)?; + let who = T::Lookup::lookup(who)?; + let existential_deposit = T::ExistentialDeposit::get(); - /// The maximum number of locks that should exist on an account. - /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get; -} + let wipeout = new_free + new_reserved < existential_deposit; + let new_free = if wipeout { Zero::zero() } else { new_free }; + let new_reserved = if wipeout { Zero::zero() } else { new_reserved }; -impl, I: Instance> Subtrait for T { - type Balance = T::Balance; - type ExistentialDeposit = T::ExistentialDeposit; - type AccountStore = T::AccountStore; - type WeightInfo = >::WeightInfo; - type MaxLocks = T::MaxLocks; -} + let (free, reserved) = Self::mutate_account(&who, |account| { + if new_free > account.free { + mem::drop(PositiveImbalance::::new(new_free - account.free)); + } else if new_free < account.free { + mem::drop(NegativeImbalance::::new(account.free - new_free)); + } -decl_event!( - pub enum Event where - ::AccountId, - >::Balance - { + if new_reserved > account.reserved { + mem::drop(PositiveImbalance::::new(new_reserved - account.reserved)); + } else if new_reserved < account.reserved { + mem::drop(NegativeImbalance::::new(account.reserved - new_reserved)); + } + + account.free = new_free; + account.reserved = new_reserved; + + (account.free, account.reserved) + })?; + Self::deposit_event(RawEvent::BalanceSet(who, free, reserved)); + Ok(().into()) + } + + /// Exactly as `transfer`, except the origin must be root and the source account may be + /// specified. + /// # + /// - Same as transfer, but additional read and write because the source account is + /// not assumed to be in the overlay. + /// # + #[pallet::weight(T::WeightInfo::force_transfer())] + pub fn force_transfer( + origin: OriginFor, + source: ::Source, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + ensure_root(origin)?; + let source = T::Lookup::lookup(source)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&source, &dest, value, ExistenceRequirement::AllowDeath)?; + Ok(().into()) + } + + /// Same as the [`transfer`] call, but with a check that the transfer will not kill the + /// origin account. + /// + /// 99% of the time you want [`transfer`] instead. + /// + /// [`transfer`]: struct.Pallet.html#method.transfer + /// # + /// - Cheaper than transfer because account cannot be killed. + /// - Base Weight: 51.4 µs + /// - DB Weight: 1 Read and 1 Write to dest (sender is in overlay already) + /// # + #[pallet::weight(T::WeightInfo::transfer_keep_alive())] + pub fn transfer_keep_alive( + origin: OriginFor, + dest: ::Source, + #[pallet::compact] value: T::Balance + ) -> DispatchResultWithPostInfo { + let transactor = ensure_signed(origin)?; + let dest = T::Lookup::lookup(dest)?; + >::transfer(&transactor, &dest, value, KeepAlive)?; + Ok(().into()) + } + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + #[pallet::metadata(T::AccountId = "AccountId")] + #[pallet::metadata(T::Balance = "Balance")] + pub enum Event, I: 'static> { /// An account was created with some free balance. \[account, free_balance\] - Endowed(AccountId, Balance), + Endowed(T::AccountId, T::Balance), /// An account was removed whose balance was non-zero but below ExistentialDeposit, /// resulting in an outright loss. \[account, balance\] - DustLost(AccountId, Balance), + DustLost(T::AccountId, T::Balance), /// Transfer succeeded. \[from, to, value\] - Transfer(AccountId, AccountId, Balance), + Transfer(T::AccountId, T::AccountId, T::Balance), /// A balance was set by root. \[who, free, reserved\] - BalanceSet(AccountId, Balance, Balance), + BalanceSet(T::AccountId, T::Balance, T::Balance), /// Some amount was deposited (e.g. for transaction fees). \[who, deposit\] - Deposit(AccountId, Balance), + Deposit(T::AccountId, T::Balance), /// Some balance was reserved (moved from free to reserved). \[who, value\] - Reserved(AccountId, Balance), + Reserved(T::AccountId, T::Balance), /// Some balance was unreserved (moved from reserved to free). \[who, value\] - Unreserved(AccountId, Balance), + Unreserved(T::AccountId, T::Balance), /// Some balance was moved from the reserve of the first account to the second account. /// Final argument indicates the destination balance type. /// \[from, to, balance, destination_status\] - ReserveRepatriated(AccountId, AccountId, Balance, Status), + ReserveRepatriated(T::AccountId, T::AccountId, T::Balance, Status), } -); -decl_error! { - pub enum Error for Module, I: Instance> { + #[pallet::error] + pub enum Error { /// Vesting balance too high to send value VestingBalance, /// Account liquidity restrictions prevent withdrawal @@ -277,8 +411,108 @@ decl_error! { /// Beneficiary account must pre-exist DeadAccount, } + + // #[pallet::origin] + // TODO_ORIGIN + + // #[pallet::validate_unsigned] + // TODO_VALIDATE_UNSIGNED + + /// The total units issued in the system. + #[pallet::storage] + #[pallet::getter(fn total_issuance)] + pub type TotalIssuance, I: 'static = ()> = StorageValue<_, T::Balance, ValueQuery>; + + /// The balance of an account. + /// + /// NOTE: This is only used in the case that this module is used to store balances. + #[pallet::storage] + pub type Account, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, AccountData, ValueQuery>; + + /// Any liquidity locks on some account balances. + /// NOTE: Should only be accessed when setting, changing and freeing a lock. + #[pallet::storage] + #[pallet::getter(fn locks)] + pub type Locks, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, Vec>, ValueQuery>; + + /// Storage version of the pallet. + /// + /// This is set to v2.0.0 for new networks. + #[pallet::storage] + pub(super) type StorageVersion, I: 'static = ()> = StorageValue<_, Releases, ValueQuery>; + + + #[pallet::genesis_config] + pub struct GenesisConfig, I: 'static = ()> { + pub balances: Vec<(T::AccountId, T::Balance)>, + } + + #[cfg(feature = "std")] + impl, I: 'static> Default for GenesisConfig { + fn default() -> Self { + Self { + balances: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl, I: 'static> GenesisBuild for GenesisConfig { + fn build(&self) { + let total = config.balances + .iter() + .fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); + >::put(total); + + ::put(Releases::V2_0_0); + + for (_, balance) in &config.balances { + assert!( + *balance >= >::ExistentialDeposit::get(), + "the balance of any account should always be at least the existential deposit.", + ) + } + + // ensure no duplicates exist. + let endowed_accounts = config.balances.iter().map(|(x, _)| x).cloned().collect::>(); + + assert!(endowed_accounts.len() == config.balances.len(), "duplicate balances in genesis."); + + for &(ref who, free) in config.balances.iter() { + assert!(T::AccountStore::insert(who, AccountData { free, ..Default::default() }).is_ok()); + } + } + } } +// todo: do we need the subtrait? +// pub trait Subtrait: frame_system::Config { +// /// The balance of an account. +// type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + +// MaybeSerializeDeserialize + Debug; +// +// /// The minimum amount required to keep an account open. +// type ExistentialDeposit: Get; +// +// /// The means of storing the balances of an account. +// type AccountStore: StoredMap>; +// +// /// Weight information for the extrinsics in this pallet. +// type WeightInfo: WeightInfo; +// +// /// The maximum number of locks that should exist on an account. +// /// Not strictly enforced, but used for weight estimation. +// type MaxLocks: Get; +// } +// +// impl, I: 'static> Subtrait for T { +// type Balance = T::Balance; +// type ExistentialDeposit = T::ExistentialDeposit; +// type AccountStore = T::AccountStore; +// type WeightInfo = >::WeightInfo; +// type MaxLocks = T::MaxLocks; +// } + /// Simplified reasons for withdrawing balance. #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] pub enum Reasons { @@ -381,197 +615,7 @@ impl Default for Releases { } } -decl_storage! { - trait Store for Module, I: Instance=DefaultInstance> as Balances { - /// The total units issued in the system. - pub TotalIssuance get(fn total_issuance) build(|config: &GenesisConfig| { - config.balances.iter().fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n) - }): T::Balance; - - /// The balance of an account. - /// - /// NOTE: This is only used in the case that this module is used to store balances. - pub Account: map hasher(blake2_128_concat) T::AccountId => AccountData; - - /// Any liquidity locks on some account balances. - /// NOTE: Should only be accessed when setting, changing and freeing a lock. - pub Locks get(fn locks): map hasher(blake2_128_concat) T::AccountId => Vec>; - - /// Storage version of the pallet. - /// - /// This is set to v2.0.0 for new networks. - StorageVersion build(|_: &GenesisConfig| Releases::V2_0_0): Releases; - } - add_extra_genesis { - config(balances): Vec<(T::AccountId, T::Balance)>; - // ^^ begin, length, amount liquid at genesis - build(|config: &GenesisConfig| { - for (_, balance) in &config.balances { - assert!( - *balance >= >::ExistentialDeposit::get(), - "the balance of any account should always be at least the existential deposit.", - ) - } - - // ensure no duplicates exist. - let endowed_accounts = config.balances.iter().map(|(x, _)| x).cloned().collect::>(); - - assert!(endowed_accounts.len() == config.balances.len(), "duplicate balances in genesis."); - - for &(ref who, free) in config.balances.iter() { - assert!(T::AccountStore::insert(who, AccountData { free, .. Default::default() }).is_ok()); - } - }); - } -} - -decl_module! { - pub struct Module, I: Instance = DefaultInstance> for enum Call where origin: T::Origin { - type Error = Error; - - /// The minimum amount required to keep an account open. - const ExistentialDeposit: T::Balance = T::ExistentialDeposit::get(); - - fn deposit_event() = default; - - /// Transfer some liquid free balance to another account. - /// - /// `transfer` will set the `FreeBalance` of the sender and receiver. - /// It will decrease the total issuance of the system by the `TransferFee`. - /// If the sender's account is below the existential deposit as a result - /// of the transfer, the account will be reaped. - /// - /// The dispatch origin for this call must be `Signed` by the transactor. - /// - /// # - /// - Dependent on arguments but not critical, given proper implementations for - /// input config types. See related functions below. - /// - It contains a limited number of reads and writes internally and no complex computation. - /// - /// Related functions: - /// - /// - `ensure_can_withdraw` is always called internally but has a bounded complexity. - /// - Transferring balances to accounts that did not exist before will cause - /// `T::OnNewAccount::on_new_account` to be called. - /// - Removing enough funds from an account will trigger `T::DustRemoval::on_unbalanced`. - /// - `transfer_keep_alive` works the same way as `transfer`, but has an additional - /// check that the transfer will not kill the origin account. - /// --------------------------------- - /// - Base Weight: 73.64 µs, worst case scenario (account created, account removed) - /// - DB Weight: 1 Read and 1 Write to destination account - /// - Origin account is already in memory, so no DB operations for them. - /// # - #[weight = T::WeightInfo::transfer()] - pub fn transfer( - origin, - dest: ::Source, - #[compact] value: T::Balance - ) { - let transactor = ensure_signed(origin)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&transactor, &dest, value, ExistenceRequirement::AllowDeath)?; - } - - /// Set the balances of a given account. - /// - /// This will alter `FreeBalance` and `ReservedBalance` in storage. it will - /// also decrease the total issuance of the system (`TotalIssuance`). - /// If the new free or reserved balance is below the existential deposit, - /// it will reset the account nonce (`frame_system::AccountNonce`). - /// - /// The dispatch origin for this call is `root`. - /// - /// # - /// - Independent of the arguments. - /// - Contains a limited number of reads and writes. - /// --------------------- - /// - Base Weight: - /// - Creating: 27.56 µs - /// - Killing: 35.11 µs - /// - DB Weight: 1 Read, 1 Write to `who` - /// # - #[weight = T::WeightInfo::set_balance_creating() // Creates a new account. - .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. - ] - fn set_balance( - origin, - who: ::Source, - #[compact] new_free: T::Balance, - #[compact] new_reserved: T::Balance - ) { - ensure_root(origin)?; - let who = T::Lookup::lookup(who)?; - let existential_deposit = T::ExistentialDeposit::get(); - - let wipeout = new_free + new_reserved < existential_deposit; - let new_free = if wipeout { Zero::zero() } else { new_free }; - let new_reserved = if wipeout { Zero::zero() } else { new_reserved }; - - let (free, reserved) = Self::mutate_account(&who, |account| { - if new_free > account.free { - mem::drop(PositiveImbalance::::new(new_free - account.free)); - } else if new_free < account.free { - mem::drop(NegativeImbalance::::new(account.free - new_free)); - } - - if new_reserved > account.reserved { - mem::drop(PositiveImbalance::::new(new_reserved - account.reserved)); - } else if new_reserved < account.reserved { - mem::drop(NegativeImbalance::::new(account.reserved - new_reserved)); - } - - account.free = new_free; - account.reserved = new_reserved; - - (account.free, account.reserved) - })?; - Self::deposit_event(RawEvent::BalanceSet(who, free, reserved)); - } - - /// Exactly as `transfer`, except the origin must be root and the source account may be - /// specified. - /// # - /// - Same as transfer, but additional read and write because the source account is - /// not assumed to be in the overlay. - /// # - #[weight = T::WeightInfo::force_transfer()] - pub fn force_transfer( - origin, - source: ::Source, - dest: ::Source, - #[compact] value: T::Balance - ) { - ensure_root(origin)?; - let source = T::Lookup::lookup(source)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&source, &dest, value, ExistenceRequirement::AllowDeath)?; - } - - /// Same as the [`transfer`] call, but with a check that the transfer will not kill the - /// origin account. - /// - /// 99% of the time you want [`transfer`] instead. - /// - /// [`transfer`]: struct.Module.html#method.transfer - /// # - /// - Cheaper than transfer because account cannot be killed. - /// - Base Weight: 51.4 µs - /// - DB Weight: 1 Read and 1 Write to dest (sender is in overlay already) - /// # - #[weight = T::WeightInfo::transfer_keep_alive()] - pub fn transfer_keep_alive( - origin, - dest: ::Source, - #[compact] value: T::Balance - ) { - let transactor = ensure_signed(origin)?; - let dest = T::Lookup::lookup(dest)?; - >::transfer(&transactor, &dest, value, KeepAlive)?; - } - } -} - -impl, I: Instance> Module { +impl, I: 'static> Pallet { // PRIVATE MUTABLES /// Get the free balance of an account. @@ -695,12 +739,12 @@ impl, I: Instance> Module { if existed { // TODO: use Locks::::hashed_key // https://github.com/paritytech/substrate/issues/4969 - system::Module::::dec_consumers(who); + system::Pallet::::dec_consumers(who); } } else { Locks::::insert(who, locks); if !existed { - if system::Module::::inc_consumers(who).is_err() { + if system::Pallet::::inc_consumers(who).is_err() { // No providers for the locks. This is impossible under normal circumstances // since the funds that are under the lock will themselves be stored in the // account and therefore will need a reference. @@ -727,9 +771,9 @@ mod imbalances { /// funds have been created without any equal and opposite accounting. #[must_use] #[derive(RuntimeDebug, PartialEq, Eq)] - pub struct PositiveImbalance, I: Instance=DefaultInstance>(T::Balance); + pub struct PositiveImbalance, I: 'static>(T::Balance); - impl, I: Instance> PositiveImbalance { + impl, I: 'static> PositiveImbalance { /// Create a new positive imbalance from a balance. pub fn new(amount: T::Balance) -> Self { PositiveImbalance(amount) @@ -740,22 +784,22 @@ mod imbalances { /// funds have been destroyed without any equal and opposite accounting. #[must_use] #[derive(RuntimeDebug, PartialEq, Eq)] - pub struct NegativeImbalance, I: Instance=DefaultInstance>(T::Balance); + pub struct NegativeImbalance, I: 'static>(T::Balance); - impl, I: Instance> NegativeImbalance { + impl, I: 'static> NegativeImbalance { /// Create a new negative imbalance from a balance. pub fn new(amount: T::Balance) -> Self { NegativeImbalance(amount) } } - impl, I: Instance> TryDrop for PositiveImbalance { + impl, I: 'static> TryDrop for PositiveImbalance { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } - impl, I: Instance> Imbalance for PositiveImbalance { + impl, I: 'static> Imbalance for PositiveImbalance { type Opposite = NegativeImbalance; fn zero() -> Self { @@ -800,13 +844,13 @@ mod imbalances { } } - impl, I: Instance> TryDrop for NegativeImbalance { + impl, I: 'static> TryDrop for NegativeImbalance { fn try_drop(self) -> result::Result<(), Self> { self.drop_zero() } } - impl, I: Instance> Imbalance for NegativeImbalance { + impl, I: 'static> Imbalance for NegativeImbalance { type Opposite = PositiveImbalance; fn zero() -> Self { @@ -851,7 +895,7 @@ mod imbalances { } } - impl, I: Instance> Drop for PositiveImbalance { + impl, I: 'static> Drop for PositiveImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { >::mutate( @@ -860,7 +904,7 @@ mod imbalances { } } - impl, I: Instance> Drop for NegativeImbalance { + impl, I: 'static> Drop for NegativeImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { >::mutate( @@ -870,7 +914,7 @@ mod imbalances { } } -impl, I: Instance> Currency for Module where +impl, I: 'static> Currency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { type Balance = T::Balance; @@ -978,7 +1022,7 @@ impl, I: Instance> Currency for Module where // TODO: This is over-conservative. There may now be other providers, and this module // may not even be a provider. let allow_death = existence_requirement == ExistenceRequirement::AllowDeath; - let allow_death = allow_death && !system::Module::::is_provider_required(transactor); + let allow_death = allow_death && !system::Pallet::::is_provider_required(transactor); ensure!(allow_death || from_account.free >= ed, Error::::KeepAlive); Ok(()) @@ -1156,7 +1200,7 @@ impl, I: Instance> Currency for Module where } } -impl, I: Instance> ReservableCurrency for Module where +impl, I: 'static> ReservableCurrency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { /// Check if `who` can reserve `value` from their free balance. @@ -1295,7 +1339,7 @@ impl, I: Instance> ReservableCurrency for Module, I: Instance> LockableCurrency for Module +impl, I: 'static> LockableCurrency for Pallet where T::Balance: MaybeSerializeDeserialize + Debug { From bb0618ba29a7b747af5a5eaedc47c6f5e4c85d2e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 13:48:11 +0000 Subject: [PATCH 049/503] Fix some errors --- frame/balances/src/lib.rs | 48 +++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 210c0772259dd..9d2a5f6dc2fa3 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -160,7 +160,7 @@ use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr}; use codec::{Codec, Encode, Decode}; use frame_support::{ - StorageValue, Parameter, + ensure, Parameter, traits::{ Currency, OnUnbalanced, TryDrop, StoredMap, WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, @@ -193,13 +193,13 @@ pub mod pallet { pub trait Config: frame_system::Config { /// The balance of an account. type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; + MaybeSerializeDeserialize + Debug; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; /// The overarching event type. - type Event: From> + Into<::Event>; + type Event: From> + IsType<::Event>; /// The minimum amount required to keep an account open. #[pallet::constant] @@ -283,8 +283,9 @@ pub mod pallet { /// - Killing: 35.11 µs /// - DB Weight: 1 Read, 1 Write to `who` /// # - #[weight(T::WeightInfo::set_balance_creating() // Creates a new account. - .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. + #[pallet::weight( + T::WeightInfo::set_balance_creating() // Creates a new account. + .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. )] fn set_balance( origin: OriginFor, @@ -318,7 +319,7 @@ pub mod pallet { (account.free, account.reserved) })?; - Self::deposit_event(RawEvent::BalanceSet(who, free, reserved)); + Self::deposit_event(Event::BalanceSet(who, free, reserved)); Ok(().into()) } @@ -368,9 +369,8 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] - #[pallet::metadata(T::Balance = "Balance")] - pub enum Event, I: 'static> { + #[pallet::metadata(T::AccountId = "AccountId", T::Balance = "Balance")] + pub enum Event, I: 'static = ()> { /// An account was created with some free balance. \[account, free_balance\] Endowed(T::AccountId, T::Balance), /// An account was removed whose balance was non-zero but below ExistentialDeposit, @@ -393,7 +393,7 @@ pub mod pallet { } #[pallet::error] - pub enum Error { + pub enum Error { /// Vesting balance too high to send value VestingBalance, /// Account liquidity restrictions prevent withdrawal @@ -459,14 +459,14 @@ pub mod pallet { #[pallet::genesis_build] impl, I: 'static> GenesisBuild for GenesisConfig { fn build(&self) { - let total = config.balances + let total = self.balances .iter() .fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); >::put(total); - ::put(Releases::V2_0_0); + >::put(Releases::V2_0_0); - for (_, balance) in &config.balances { + for (_, balance) in &self.balances { assert!( *balance >= >::ExistentialDeposit::get(), "the balance of any account should always be at least the existential deposit.", @@ -474,11 +474,11 @@ pub mod pallet { } // ensure no duplicates exist. - let endowed_accounts = config.balances.iter().map(|(x, _)| x).cloned().collect::>(); + let endowed_accounts = self.balances.iter().map(|(x, _)| x).cloned().collect::>(); - assert!(endowed_accounts.len() == config.balances.len(), "duplicate balances in genesis."); + assert!(endowed_accounts.len() == self.balances.len(), "duplicate balances in genesis."); - for &(ref who, free) in config.balances.iter() { + for &(ref who, free) in self.balances.iter() { assert!(T::AccountStore::insert(who, AccountData { free, ..Default::default() }).is_ok()); } } @@ -659,7 +659,7 @@ impl, I: 'static> Pallet { if total < T::ExistentialDeposit::get() { if !total.is_zero() { T::DustRemoval::on_unbalanced(NegativeImbalance::new(total)); - Self::deposit_event(RawEvent::DustLost(who.clone(), total)); + Self::deposit_event(Event::DustLost(who.clone(), total)); } None } else { @@ -705,7 +705,7 @@ impl, I: 'static> Pallet { }) }).map(|(maybe_endowed, result)| { if let Some(endowed) = maybe_endowed { - Self::deposit_event(RawEvent::Endowed(who.clone(), endowed)); + Self::deposit_event(Event::Endowed(who.clone(), endowed)); } result }) @@ -762,8 +762,8 @@ impl, I: 'static> Pallet { // of the inner member. mod imbalances { use super::{ - result, DefaultInstance, Imbalance, Config, Zero, Instance, Saturating, - StorageValue, TryDrop, RuntimeDebug, + result, Imbalance, Config, Zero, Saturating, + TryDrop, RuntimeDebug, }; use sp_std::mem; @@ -1030,7 +1030,7 @@ impl, I: 'static> Currency for Pallet where })?; // Emit transfer event. - Self::deposit_event(RawEvent::Transfer(transactor.clone(), dest.clone(), value)); + Self::deposit_event(Event::Transfer(transactor.clone(), dest.clone(), value)); Ok(()) } @@ -1231,7 +1231,7 @@ impl, I: 'static> ReservableCurrency for Pallet Self::ensure_can_withdraw(&who, value.clone(), WithdrawReasons::RESERVE, account.free) })?; - Self::deposit_event(RawEvent::Reserved(who.clone(), value)); + Self::deposit_event(Event::Reserved(who.clone(), value)); Ok(()) } @@ -1259,7 +1259,7 @@ impl, I: 'static> ReservableCurrency for Pallet } }; - Self::deposit_event(RawEvent::Unreserved(who.clone(), actual.clone())); + Self::deposit_event(Event::Unreserved(who.clone(), actual.clone())); value - actual } @@ -1334,7 +1334,7 @@ impl, I: 'static> ReservableCurrency for Pallet }) })?; - Self::deposit_event(RawEvent::ReserveRepatriated(slashed.clone(), beneficiary.clone(), actual, status)); + Self::deposit_event(Event::ReserveRepatriated(slashed.clone(), beneficiary.clone(), actual, status)); Ok(value - actual) } } From b59a92c7ee53690b85c759fb5570cceb9fc835d5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 15:18:22 +0000 Subject: [PATCH 050/503] Remove unused imports --- frame/balances/src/lib.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 9d2a5f6dc2fa3..ec1f79ebe4bef 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -160,7 +160,7 @@ use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr}; use codec::{Codec, Encode, Decode}; use frame_support::{ - ensure, Parameter, + ensure, traits::{ Currency, OnUnbalanced, TryDrop, StoredMap, WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, @@ -168,16 +168,14 @@ use frame_support::{ ExistenceRequirement::AllowDeath, BalanceStatus as Status, } }; -#[cfg(feature = "std")] -use frame_support::traits::GenesisBuild; use sp_runtime::{ RuntimeDebug, DispatchResult, DispatchError, traits::{ - Zero, AtLeast32BitUnsigned, StaticLookup, Member, CheckedAdd, CheckedSub, + Zero, AtLeast32BitUnsigned, StaticLookup, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, Saturating, Bounded, StoredMapError, }, }; -use frame_system::{self as system, ensure_signed, ensure_root}; +use frame_system as system; pub use self::imbalances::{PositiveImbalance, NegativeImbalance}; pub use weights::WeightInfo; From 88aaa9246f86f3aae829bb33fe12fbe43d388d7a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 15:22:14 +0000 Subject: [PATCH 051/503] Formatting and removing some todos --- frame/balances/src/lib.rs | 37 +++++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index ec1f79ebe4bef..b23514e626238 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -255,7 +255,7 @@ pub mod pallet { pub fn transfer( origin: OriginFor, dest: ::Source, - #[pallet::compact] value: T::Balance + #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { let transactor = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; @@ -289,7 +289,7 @@ pub mod pallet { origin: OriginFor, who: ::Source, #[pallet::compact] new_free: T::Balance, - #[pallet::compact] new_reserved: T::Balance + #[pallet::compact] new_reserved: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; let who = T::Lookup::lookup(who)?; @@ -332,7 +332,7 @@ pub mod pallet { origin: OriginFor, source: ::Source, dest: ::Source, - #[pallet::compact] value: T::Balance + #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; let source = T::Lookup::lookup(source)?; @@ -356,7 +356,7 @@ pub mod pallet { pub fn transfer_keep_alive( origin: OriginFor, dest: ::Source, - #[pallet::compact] value: T::Balance + #[pallet::compact] value: T::Balance, ) -> DispatchResultWithPostInfo { let transactor = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; @@ -410,12 +410,6 @@ pub mod pallet { DeadAccount, } - // #[pallet::origin] - // TODO_ORIGIN - - // #[pallet::validate_unsigned] - // TODO_VALIDATE_UNSIGNED - /// The total units issued in the system. #[pallet::storage] #[pallet::getter(fn total_issuance)] @@ -425,20 +419,35 @@ pub mod pallet { /// /// NOTE: This is only used in the case that this module is used to store balances. #[pallet::storage] - pub type Account, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, AccountData, ValueQuery>; + pub type Account, I: 'static = ()> = StorageMap< + _, + Blake2_128Concat, + T::AccountId, + AccountData, + ValueQuery + >; /// Any liquidity locks on some account balances. /// NOTE: Should only be accessed when setting, changing and freeing a lock. #[pallet::storage] #[pallet::getter(fn locks)] - pub type Locks, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, Vec>, ValueQuery>; + pub type Locks, I: 'static = ()> = StorageMap< + _, + Blake2_128Concat, + T::AccountId, + Vec>, + ValueQuery + >; /// Storage version of the pallet. /// /// This is set to v2.0.0 for new networks. #[pallet::storage] - pub(super) type StorageVersion, I: 'static = ()> = StorageValue<_, Releases, ValueQuery>; - + pub(super) type StorageVersion, I: 'static = ()> = StorageValue< + _, + Releases, + ValueQuery + >; #[pallet::genesis_config] pub struct GenesisConfig, I: 'static = ()> { From 81150476820f89006fa0f7ecc2c90fb10e20dbbf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 15:23:44 +0000 Subject: [PATCH 052/503] Delete Subtrait --- frame/balances/src/lib.rs | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index b23514e626238..d2fc1c97ed80f 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -492,34 +492,6 @@ pub mod pallet { } } -// todo: do we need the subtrait? -// pub trait Subtrait: frame_system::Config { -// /// The balance of an account. -// type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + -// MaybeSerializeDeserialize + Debug; -// -// /// The minimum amount required to keep an account open. -// type ExistentialDeposit: Get; -// -// /// The means of storing the balances of an account. -// type AccountStore: StoredMap>; -// -// /// Weight information for the extrinsics in this pallet. -// type WeightInfo: WeightInfo; -// -// /// The maximum number of locks that should exist on an account. -// /// Not strictly enforced, but used for weight estimation. -// type MaxLocks: Get; -// } -// -// impl, I: 'static> Subtrait for T { -// type Balance = T::Balance; -// type ExistentialDeposit = T::ExistentialDeposit; -// type AccountStore = T::AccountStore; -// type WeightInfo = >::WeightInfo; -// type MaxLocks = T::MaxLocks; -// } - /// Simplified reasons for withdrawing balance. #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] pub enum Reasons { From 9886c96188d11799a925170717872bc7ad6afd91 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 15:40:53 +0000 Subject: [PATCH 053/503] Add genesis builder impls for tests --- frame/balances/src/lib.rs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index d2fc1c97ed80f..c7eaf3a14cfb7 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -168,6 +168,8 @@ use frame_support::{ ExistenceRequirement::AllowDeath, BalanceStatus as Status, } }; +#[cfg(feature = "std")] +use frame_support::traits::GenesisBuild; use sp_runtime::{ RuntimeDebug, DispatchResult, DispatchError, traits::{ @@ -492,6 +494,26 @@ pub mod pallet { } } +#[cfg(feature = "std")] +impl GenesisConfig { + /// Direct implementation of `GenesisBuild::build_storage`. + /// + /// Kept in order not to break dependency. + pub fn build_storage(&self) -> Result { + >::build_storage(self) + } + + /// Direct implementation of `GenesisBuild::assimilate_storage`. + /// + /// Kept in order not to break dependency. + pub fn assimilate_storage( + &self, + storage: &mut sp_runtime::Storage + ) -> Result<(), String> { + >::assimilate_storage(self, storage) + } +} + /// Simplified reasons for withdrawing balance. #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] pub enum Reasons { From 58c981f02a44b1d9818e8b56ffdb1c1a4da3df9d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 15:52:12 +0000 Subject: [PATCH 054/503] Fix GenesisConfig impl --- frame/balances/src/lib.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index c7eaf3a14cfb7..e551139f87f3c 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -495,22 +495,22 @@ pub mod pallet { } #[cfg(feature = "std")] -impl GenesisConfig { +impl, I: 'static> GenesisConfig { /// Direct implementation of `GenesisBuild::build_storage`. /// /// Kept in order not to break dependency. - pub fn build_storage(&self) -> Result { - >::build_storage(self) + pub fn build_storage(&self) -> Result { + >::build_storage(self) } /// Direct implementation of `GenesisBuild::assimilate_storage`. /// /// Kept in order not to break dependency. - pub fn assimilate_storage( + pub fn assimilate_storage( &self, storage: &mut sp_runtime::Storage ) -> Result<(), String> { - >::assimilate_storage(self, storage) + >::assimilate_storage(self, storage) } } From 5a7f0770eaffa3b0d711e2663035fdc8e5d33d8d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 16:04:17 +0000 Subject: [PATCH 055/503] Make set_balance visible to tests, rename RawEvent to Event --- frame/balances/src/lib.rs | 2 +- frame/balances/src/tests_local.rs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index e551139f87f3c..29629dbc84be1 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -287,7 +287,7 @@ pub mod pallet { T::WeightInfo::set_balance_creating() // Creates a new account. .max(T::WeightInfo::set_balance_killing()) // Kills an existing account. )] - fn set_balance( + pub(crate) fn set_balance( origin: OriginFor, who: ::Source, #[pallet::compact] new_free: T::Balance, diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 762ebe871b3e8..ab294ece9777b 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -171,8 +171,8 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { events(), [ Event::system(system::Event::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::balances(Event::Endowed(1, 100)), + Event::balances(Event::BalanceSet(1, 100, 0)), ] ); @@ -186,7 +186,7 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { assert_eq!( events(), [ - Event::balances(RawEvent::DustLost(1, 1)), + Event::balances(Event::DustLost(1, 1)), Event::system(system::Event::KilledAccount(1)) ] ); From 33f5d6fdffc7c2cc481efa1dd41fa6e6d4e87f9d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 16:27:14 +0000 Subject: [PATCH 056/503] Fix tests with Event rename etc. --- frame/balances/src/tests.rs | 20 ++++++++++---------- frame/balances/src/tests_local.rs | 6 +++--- frame/system/src/lib.rs | 2 +- frame/transaction-payment/src/lib.rs | 2 +- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index 7a1b57a7b4db1..007f7f9a00033 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -40,7 +40,7 @@ macro_rules! decl_tests { use crate::*; use sp_runtime::{FixedPointNumber, traits::{SignedExtension, BadOrigin}}; use frame_support::{ - assert_noop, assert_storage_noop, assert_ok, assert_err, + assert_noop, assert_storage_noop, assert_ok, assert_err, StorageValue, traits::{ LockableCurrency, LockIdentifier, WithdrawReasons, Currency, ReservableCurrency, ExistenceRequirement::AllowDeath @@ -485,7 +485,7 @@ macro_rules! decl_tests { assert_ok!(Balances::repatriate_reserved(&1, &2, 41, Status::Free), 0); assert_eq!( last_event(), - Event::balances(RawEvent::ReserveRepatriated(1, 2, 41, Status::Free)), + Event::balances(balances::Event::ReserveRepatriated(1, 2, 41, Status::Free)), ); assert_eq!(Balances::reserved_balance(1), 69); assert_eq!(Balances::free_balance(1), 0); @@ -704,7 +704,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(RawEvent::Reserved(1, 10)), + Event::balances(balances::Event::Reserved(1, 10)), ); System::set_block_number(3); @@ -712,7 +712,7 @@ macro_rules! decl_tests { assert_eq!( last_event(), - Event::balances(RawEvent::Unreserved(1, 5)), + Event::balances(balances::Event::Unreserved(1, 5)), ); System::set_block_number(4); @@ -721,7 +721,7 @@ macro_rules! decl_tests { // should only unreserve 5 assert_eq!( last_event(), - Event::balances(RawEvent::Unreserved(1, 5)), + Event::balances(balances::Event::Unreserved(1, 5)), ); }); } @@ -738,8 +738,8 @@ macro_rules! decl_tests { events(), [ Event::system(system::Event::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); @@ -748,7 +748,7 @@ macro_rules! decl_tests { assert_eq!( events(), [ - Event::balances(RawEvent::DustLost(1, 99)), + Event::balances(balances::Event::DustLost(1, 99)), Event::system(system::Event::KilledAccount(1)) ] ); @@ -767,8 +767,8 @@ macro_rules! decl_tests { events(), [ Event::system(system::Event::NewAccount(1)), - Event::balances(RawEvent::Endowed(1, 100)), - Event::balances(RawEvent::BalanceSet(1, 100, 0)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index ab294ece9777b..5d6bbd8384e4a 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -171,8 +171,8 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { events(), [ Event::system(system::Event::NewAccount(1)), - Event::balances(Event::Endowed(1, 100)), - Event::balances(Event::BalanceSet(1, 100, 0)), + Event::balances(balances::Event::Endowed(1, 100)), + Event::balances(balances::Event::BalanceSet(1, 100, 0)), ] ); @@ -186,7 +186,7 @@ fn emit_events_with_no_existential_deposit_suicide_with_dust() { assert_eq!( events(), [ - Event::balances(Event::DustLost(1, 1)), + Event::balances(balances::Event::DustLost(1, 1)), Event::system(system::Event::KilledAccount(1)) ] ); diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index cdb26623734fd..b9595df5a0c01 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -277,7 +277,7 @@ pub mod pallet { #[pallet::call] impl Pallet { - /// A dispatch that will fill the block weight up to the given ratio. + /// A dispatch that will fill the block weight up to the given ratio.a // TODO: This should only be available for testing, rather than in general usage, but // that's not possible at present (since it's within the pallet macro). #[pallet::weight(*_ratio * T::BlockWeights::get().max_block)] diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 7521fcd80bf0a..95c885602bba2 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -1165,7 +1165,7 @@ mod tests { assert_eq!(Balances::free_balance(2), 0); // Transfer Event assert!(System::events().iter().any(|event| { - event.event == Event::pallet_balances(pallet_balances::RawEvent::Transfer(2, 3, 80)) + event.event == Event::pallet_balances(pallet_balances::Event::Transfer(2, 3, 80)) })); // Killed Event assert!(System::events().iter().any(|event| { From 284d8bed527ffbf0c1cbf3d091df52ef31061fb3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 16:29:06 +0000 Subject: [PATCH 057/503] More test RawEvent renames --- frame/contracts/src/tests.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/frame/contracts/src/tests.rs b/frame/contracts/src/tests.rs index 96bcf99bf8e81..6a5e5b529a485 100644 --- a/frame/contracts/src/tests.rs +++ b/frame/contracts/src/tests.rs @@ -481,7 +481,7 @@ fn instantiate_and_call_and_deposit_event() { EventRecord { phase: Phase::Initialization, event: MetaEvent::balances( - pallet_balances::RawEvent::Endowed(ALICE, 1_000_000) + pallet_balances::Event::Endowed(ALICE, 1_000_000) ), topics: vec![], }, @@ -498,14 +498,14 @@ fn instantiate_and_call_and_deposit_event() { EventRecord { phase: Phase::Initialization, event: MetaEvent::balances( - pallet_balances::RawEvent::Endowed(addr.clone(), subsistence * 3) + pallet_balances::Event::Endowed(addr.clone(), subsistence * 3) ), topics: vec![], }, EventRecord { phase: Phase::Initialization, event: MetaEvent::balances( - pallet_balances::RawEvent::Transfer(ALICE, addr.clone(), subsistence * 3) + pallet_balances::Event::Transfer(ALICE, addr.clone(), subsistence * 3) ), topics: vec![], }, @@ -658,7 +658,7 @@ fn test_set_rent_code_and_hash() { }, EventRecord { phase: Phase::Initialization, - event: MetaEvent::balances(pallet_balances::RawEvent::Endowed( + event: MetaEvent::balances(pallet_balances::Event::Endowed( ALICE, 1_000_000 )), topics: vec![], @@ -1240,7 +1240,7 @@ fn restoration(test_different_storage: bool, test_restore_to_with_dirty_storage: }, EventRecord { phase: Phase::Initialization, - event: MetaEvent::balances(pallet_balances::RawEvent::Endowed(ALICE, 1_000_000)), + event: MetaEvent::balances(pallet_balances::Event::Endowed(ALICE, 1_000_000)), topics: vec![], }, EventRecord { @@ -1395,7 +1395,7 @@ fn restoration(test_different_storage: bool, test_restore_to_with_dirty_storage: }, EventRecord { phase: Phase::Initialization, - event: MetaEvent::balances(pallet_balances::RawEvent::Endowed(CHARLIE, 1_000_000)), + event: MetaEvent::balances(pallet_balances::Event::Endowed(CHARLIE, 1_000_000)), topics: vec![], }, EventRecord { @@ -1405,13 +1405,13 @@ fn restoration(test_different_storage: bool, test_restore_to_with_dirty_storage: }, EventRecord { phase: Phase::Initialization, - event: MetaEvent::balances(pallet_balances::RawEvent::Endowed(addr_django.clone(), 30_000)), + event: MetaEvent::balances(pallet_balances::Event::Endowed(addr_django.clone(), 30_000)), topics: vec![], }, EventRecord { phase: Phase::Initialization, event: MetaEvent::balances( - pallet_balances::RawEvent::Transfer(CHARLIE, addr_django.clone(), 30_000) + pallet_balances::Event::Transfer(CHARLIE, addr_django.clone(), 30_000) ), topics: vec![], }, From 8f25812a8c7ce13f0b5cc6e10a1fb93dc33822e9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 20 Jan 2021 16:35:16 +0000 Subject: [PATCH 058/503] Even more RawEvent renames --- bin/node/executor/tests/basic.rs | 8 ++++---- bin/node/executor/tests/fees.rs | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/bin/node/executor/tests/basic.rs b/bin/node/executor/tests/basic.rs index f007ba41ccc61..5c9943612bc21 100644 --- a/bin/node/executor/tests/basic.rs +++ b/bin/node/executor/tests/basic.rs @@ -17,7 +17,7 @@ use codec::{Encode, Decode, Joiner}; use frame_support::{ - StorageValue, StorageMap, + StorageMap, traits::Currency, weights::{GetDispatchInfo, DispatchInfo, DispatchClass}, }; @@ -336,7 +336,7 @@ fn full_native_block_import_works() { }, EventRecord { phase: Phase::ApplyExtrinsic(1), - event: Event::pallet_balances(pallet_balances::RawEvent::Transfer( + event: Event::pallet_balances(pallet_balances::Event::Transfer( alice().into(), bob().into(), 69 * DOLLARS, @@ -389,7 +389,7 @@ fn full_native_block_import_works() { EventRecord { phase: Phase::ApplyExtrinsic(1), event: Event::pallet_balances( - pallet_balances::RawEvent::Transfer( + pallet_balances::Event::Transfer( bob().into(), alice().into(), 5 * DOLLARS, @@ -412,7 +412,7 @@ fn full_native_block_import_works() { EventRecord { phase: Phase::ApplyExtrinsic(2), event: Event::pallet_balances( - pallet_balances::RawEvent::Transfer( + pallet_balances::Event::Transfer( alice().into(), bob().into(), 15 * DOLLARS, diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 9d83610b689de..2e92077c4ada3 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -17,7 +17,6 @@ use codec::{Encode, Joiner}; use frame_support::{ - StorageValue, traits::Currency, weights::{GetDispatchInfo, constants::ExtrinsicBaseWeight, IdentityFee, WeightToFeePolynomial}, }; From 6fe23138fbbc40b00bbc840cf3214da229005522 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 21 Jan 2021 15:51:08 +0000 Subject: [PATCH 059/503] Rename module to pallet in comments --- frame/balances/src/lib.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 29629dbc84be1..81ed784c4090a 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -17,7 +17,7 @@ //! # Balances Pallet //! -//! The Balances module provides functionality for handling accounts and balances. +//! The Balances pallet provides functionality for handling accounts and balances. //! //! - [`balances::Config`](./trait.Config.html) //! - [`Call`](./enum.Call.html) @@ -25,7 +25,7 @@ //! //! ## Overview //! -//! The Balances module provides functions for: +//! The Balances pallet provides functions for: //! //! - Getting and setting free balances. //! - Retrieving total, reserved and unreserved balances. @@ -43,7 +43,7 @@ //! fall below this, then the account is said to be dead; and it loses its functionality as well as any //! prior history and all information on it is removed from the chain's state. //! No account should ever have a total balance that is strictly between 0 and the existential -//! deposit (exclusive). If this ever happens, it indicates either a bug in this module or an +//! deposit (exclusive). If this ever happens, it indicates either a bug in this pallet or an //! erroneous raw mutation of storage. //! //! - **Total Issuance:** The total number of units in existence in a system. @@ -67,8 +67,8 @@ //! //! ### Implementations //! -//! The Balances module provides implementations for the following traits. If these traits provide the functionality -//! that you need, then you can avoid coupling with the Balances module. +//! The Balances pallet provides implementations for the following traits. If these traits provide the functionality +//! that you need, then you can avoid coupling with the Balances pallet. //! //! - [`Currency`](../frame_support/traits/trait.Currency.html): Functions for dealing with a //! fungible assets system. @@ -91,11 +91,11 @@ //! //! ## Usage //! -//! The following examples show how to use the Balances module in your custom module. +//! The following examples show how to use the Balances pallet in your custom pallet. //! //! ### Examples from the FRAME //! -//! The Contract module uses the `Currency` trait to handle gas payment, and its types inherit from `Currency`: +//! The Contract pallet uses the `Currency` trait to handle gas payment, and its types inherit from `Currency`: //! //! ``` //! use frame_support::traits::Currency; @@ -109,7 +109,7 @@ //! # fn main() {} //! ``` //! -//! The Staking module uses the `LockableCurrency` trait to lock a stash account's funds: +//! The Staking pallet uses the `LockableCurrency` trait to lock a stash account's funds: //! //! ``` //! use frame_support::traits::{WithdrawReasons, LockableCurrency}; @@ -141,7 +141,7 @@ //! //! ## Genesis config //! -//! The Balances module depends on the [`GenesisConfig`](./struct.GenesisConfig.html). +//! The Balances pallet depends on the [`GenesisConfig`](./struct.GenesisConfig.html). //! //! ## Assumptions //! @@ -419,7 +419,7 @@ pub mod pallet { /// The balance of an account. /// - /// NOTE: This is only used in the case that this module is used to store balances. + /// NOTE: This is only used in the case that this pallet is used to store balances. #[pallet::storage] pub type Account, I: 'static = ()> = StorageMap< _, @@ -977,7 +977,7 @@ impl, I: 'static> Currency for Pallet where // // # // Despite iterating over a list of locks, they are limited by the number of - // lock IDs, which means the number of runtime modules that intend to use and create locks. + // lock IDs, which means the number of runtime pallets that intend to use and create locks. // # fn ensure_can_withdraw( who: &T::AccountId, @@ -1020,7 +1020,7 @@ impl, I: 'static> Currency for Pallet where from_account.free, ).map_err(|_| Error::::LiquidityRestrictions)?; - // TODO: This is over-conservative. There may now be other providers, and this module + // TODO: This is over-conservative. There may now be other providers, and this pallet // may not even be a provider. let allow_death = existence_requirement == ExistenceRequirement::AllowDeath; let allow_death = allow_death && !system::Pallet::::is_provider_required(transactor); From c147734e3d8469aa1623a07b51d82cf48d95143b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 21 Jan 2021 16:38:40 +0000 Subject: [PATCH 060/503] Add PalletInfo impl to avid storage collision, fixes tests --- frame/balances/src/tests_local.rs | 35 ++++++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 5d6bbd8384e4a..2fe45b0208ba7 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -30,6 +30,7 @@ use frame_support::traits::StorageMapShim; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use crate::{GenesisConfig, Module, Config, decl_tests, tests::CallWithDispatchInfo}; use pallet_transaction_payment::CurrencyAdapter; +use std::any::TypeId; use frame_system as system; impl_outer_origin!{ @@ -47,6 +48,38 @@ impl_outer_event! { } } +/// Provides an implementation of `PalletInfo`. +/// +/// Usually this is generated by the `construct_runtime!` macro, but tests typically use `()`. +/// However the impl for `()` returns the same prefix "test" for all modules, which causes +/// collisions between pallets with storage items with the same name. For instance in this case +/// System and Balances both have a storage item called `Account`. +pub struct PalletInfo; + +impl frame_support::traits::PalletInfo for PalletInfo { + fn index() -> Option { + let type_id = TypeId::of::

(); + if type_id == TypeId::of::>() { + return Some(0) + } + if type_id == TypeId::of::>() { + return Some(1) + } + None + } + + fn name() -> Option<&'static str> { + let type_id = TypeId::of::

(); + if type_id == TypeId::of::>() { + return Some("System") + } + if type_id == TypeId::of::>() { + return Some("Balances") + } + None + } +} + // Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. #[derive(Clone, PartialEq, Eq, Debug)] pub struct Test; @@ -73,7 +106,7 @@ impl frame_system::Config for Test { type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); - type PalletInfo = (); + type PalletInfo = PalletInfo; type AccountData = (); type OnNewAccount = (); type OnKilledAccount = (); From e3b697dceb89735d45a9e6e295d381513ac2b690 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 22 Jan 2021 11:24:33 +0000 Subject: [PATCH 061/503] Fix errors after merge --- frame/balances/src/lib.rs | 2 +- frame/balances/src/tests.rs | 13 +++++--- frame/balances/src/tests_composite.rs | 47 +++++++++++---------------- frame/balances/src/tests_local.rs | 2 +- frame/system/src/lib.rs | 4 +-- primitives/runtime/src/traits.rs | 8 ++--- 6 files changed, 35 insertions(+), 41 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 81ed784c4090a..010a39b2cdda9 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -193,7 +193,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// The balance of an account. type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; + MaybeSerializeDeserialize + Debug + scale_info::TypeInfo; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index 80a95fef2cb43..007f7f9a00033 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -19,7 +19,7 @@ #![cfg(test)] -#[derive(Debug, codec::Encode, codec::Decode, Clone, Eq, PartialEq)] +#[derive(Debug)] pub struct CallWithDispatchInfo; impl sp_runtime::traits::Dispatchable for CallWithDispatchInfo { type Origin = (); @@ -28,8 +28,8 @@ impl sp_runtime::traits::Dispatchable for CallWithDispatchInfo { type PostInfo = frame_support::weights::PostDispatchInfo; fn dispatch(self, _origin: Self::Origin) - -> sp_runtime::DispatchResultWithInfo { - panic!("Do not use dummy implementation for dispatch."); + -> sp_runtime::DispatchResultWithInfo { + panic!("Do not use dummy implementation for dispatch."); } } @@ -52,7 +52,10 @@ macro_rules! decl_tests { const ID_1: LockIdentifier = *b"1 "; const ID_2: LockIdentifier = *b"2 "; - pub const CALL: &<$test as frame_system::Config>::Call = &Call::System(system::Call::remark(vec![])); + pub type System = frame_system::Module<$test>; + pub type Balances = Module<$test>; + + pub const CALL: &<$test as frame_system::Config>::Call = &$crate::tests::CallWithDispatchInfo; /// create a transaction info struct from weight. Handy to avoid building the whole struct. pub fn info_from_weight(w: Weight) -> DispatchInfo { @@ -623,7 +626,7 @@ macro_rules! decl_tests { fn cannot_set_genesis_value_below_ed() { ($existential_deposit).with(|v| *v.borrow_mut() = 11); let mut t = frame_system::GenesisConfig::default().build_storage::<$test>().unwrap(); - let _ = balances::GenesisConfig::<$test> { + let _ = GenesisConfig::<$test> { balances: vec![(1, 10)], }.assimilate_storage(&mut t).unwrap(); } diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index bc0f93f259f97..a7fae34d44fae 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -20,44 +20,35 @@ #![cfg(test)] use sp_runtime::{ - traits::{Block as _, IdentityLookup}, + traits::IdentityLookup, testing::Header, }; use sp_core::H256; use sp_io; -use frame_support::{parameter_types, StorageValue}; -use frame_support::traits::Get; +use frame_support::{impl_outer_origin, impl_outer_event, parameter_types}; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use pallet_transaction_payment::CurrencyAdapter; -use std::cell::RefCell; -use crate::{Module, Config, decl_tests}; +use crate::{GenesisConfig, Module, Config, decl_tests, tests::CallWithDispatchInfo}; use frame_system as system; -use crate as balances; - -pub type Block = sp_runtime::generic::Block; -pub type UncheckedExtrinsic = sp_runtime::generic::UncheckedExtrinsic; - -frame_support::construct_runtime!( - pub enum Test where - Block = Block, - NodeBlock = Block, - UncheckedExtrinsic = UncheckedExtrinsic - { - System: system::{Module, Call, Event, Config}, - Balances: balances::{Module, Call, Event, Config}, - } -); +impl_outer_origin!{ + pub enum Origin for Test {} +} -thread_local! { - static EXISTENTIAL_DEPOSIT: RefCell = RefCell::new(0); +mod balances { + pub use crate::Event; } -pub struct ExistentialDeposit; -impl Get for ExistentialDeposit { - fn get() -> u64 { EXISTENTIAL_DEPOSIT.with(|v| *v.borrow()) } +impl_outer_event! { + pub enum Event for Test { + system, + balances, + } } +// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. +#[derive(Clone, PartialEq, Eq, Debug, scale_info::TypeInfo)] +pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = @@ -72,7 +63,7 @@ impl frame_system::Config for Test { type Origin = Origin; type Index = u64; type BlockNumber = u64; - type Call = Call; + type Call = CallWithDispatchInfo; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = u64; @@ -81,7 +72,7 @@ impl frame_system::Config for Test { type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); - type PalletInfo = PalletInfo; + type PalletInfo = (); type AccountData = super::AccountData; type OnNewAccount = (); type OnKilledAccount = (); @@ -135,7 +126,7 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - balances::GenesisConfig:: { + GenesisConfig:: { balances: if self.monied { vec![ (1, 10 * self.existential_deposit), diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 2fe45b0208ba7..4853e9ba9dd15 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -81,7 +81,7 @@ impl frame_support::traits::PalletInfo for PalletInfo { } // Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, scale_info::TypeInfo)] pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index b9595df5a0c01..563f3d2e2ea17 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -173,7 +173,7 @@ pub mod pallet { /// with a sender account. type Index: Parameter + Member + MaybeSerialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy; + + Copy + scale_info::TypeInfo; /// The block number type used by the runtime. type BlockNumber: @@ -191,7 +191,7 @@ pub mod pallet { /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default; + + Default + scale_info::TypeInfo; /// Converting trait to take a source type and convert to `AccountId`. /// diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 11e93d8428e18..00fae6fe6abd6 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -203,7 +203,7 @@ pub trait Lookup { /// context. pub trait StaticLookup { /// Type to lookup from. - type Source: Codec + Clone + PartialEq + Debug; + type Source: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo; /// Type to lookup into. type Target; /// Attempt a lookup. @@ -215,7 +215,7 @@ pub trait StaticLookup { /// A lookup implementation returning the input value. #[derive(Default)] pub struct IdentityLookup(PhantomData); -impl StaticLookup for IdentityLookup { +impl StaticLookup for IdentityLookup { type Source = T; type Target = T; fn lookup(x: T) -> Result { Ok(x) } @@ -232,8 +232,8 @@ impl Lookup for IdentityLookup { pub struct AccountIdLookup(PhantomData<(AccountId, AccountIndex)>); impl StaticLookup for AccountIdLookup where - AccountId: Codec + Clone + PartialEq + Debug, - AccountIndex: Codec + Clone + PartialEq + Debug, + AccountId: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo + 'static, + AccountIndex: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo + 'static, crate::MultiAddress: Codec, { type Source = crate::MultiAddress; From 3ae377204a9ff02a82e98297c4cbc309385cb98e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 22 Jan 2021 15:49:14 +0000 Subject: [PATCH 062/503] Add some TypeInfo annotations --- primitives/core/src/ecdsa.rs | 9 +++++++++ primitives/core/src/ed25519.rs | 2 +- primitives/core/src/sr25519.rs | 2 +- primitives/runtime/src/lib.rs | 2 +- primitives/runtime/src/multiaddress.rs | 2 +- 5 files changed, 13 insertions(+), 4 deletions(-) diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 0f654f816c472..57ce89d91106e 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -230,6 +230,15 @@ impl sp_std::hash::Hash for Public { #[derive(Encode, Decode, PassByInner)] pub struct Signature(pub [u8; 65]); +// todo: remove this once https://github.com/paritytech/scale-info/pull/54 is merged, which +// introduces const generics for arrays and should support a 65 element array. +impl scale_info::TypeInfo for Signature { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::TypeDefArray::new(65, scale_info::MetaType::new::()).into() + } +} impl sp_std::convert::TryFrom<&[u8]> for Signature { type Error = (); diff --git a/primitives/core/src/ed25519.rs b/primitives/core/src/ed25519.rs index 6589310931200..df8005e1d5e94 100644 --- a/primitives/core/src/ed25519.rs +++ b/primitives/core/src/ed25519.rs @@ -190,7 +190,7 @@ impl<'de> Deserialize<'de> for Public { } /// A signature (a 512-bit value). -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index 392cad5a0b364..4a76cbe692401 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -189,7 +189,7 @@ impl<'de> Deserialize<'de> for Public { /// An Schnorrkel/Ristretto x25519 ("sr25519") signature. /// /// Instead of importing it for the local module, alias it to be available as a public type -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 29674fc19aa60..214414e2ccf22 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -174,7 +174,7 @@ pub type ConsensusEngineId = [u8; 4]; /// Signature verify that can work with any known signature types.. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum MultiSignature { /// An Ed25519 signature. Ed25519(ed25519::Signature), diff --git a/primitives/runtime/src/multiaddress.rs b/primitives/runtime/src/multiaddress.rs index 34ec6d4b6b924..a472010082ec4 100644 --- a/primitives/runtime/src/multiaddress.rs +++ b/primitives/runtime/src/multiaddress.rs @@ -21,7 +21,7 @@ use codec::{Encode, Decode}; use sp_std::vec::Vec; /// A multi-format address wrapper for on-chain accounts. -#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug, ::scale_info::TypeInfo)] +#[derive(Encode, Decode, PartialEq, Eq, Clone, crate::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub enum MultiAddress { /// It's an account ID (pubkey). From daf6dcfcacf334bf9eb8fdd09b5812374b6fd1a4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 2 Feb 2021 09:44:37 +0000 Subject: [PATCH 063/503] Use scale-info decode feature branch --- Cargo.lock | 416 ++++++++++++++++----------- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 12 files changed, 263 insertions(+), 175 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6dce67fc7ffdf..3ae4f219c28fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -433,7 +433,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41262f11d771fd4a61aa3ce019fca363b4b6c282fca9da2a31186d3965a47a5c" dependencies = [ "either", - "radium", + "radium 0.3.0", +] + +[[package]] +name = "bitvec" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5011ffc90248764d7005b0e10c7294f5aa1bd87d9dd7248f4ad475b347c294d" +dependencies = [ + "funty", + "radium 0.6.2", + "tap", + "wyz", ] [[package]] @@ -567,6 +579,12 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0a5e3906bcbf133e33c1d4d95afc664ad37fbdb9f6568d8043e7ea8c27d93d3" +[[package]] +name = "byte-slice-cast" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65c1bf4a04a88c54f589125563643d773f3254b5c38571395e2b591c693bbc81" + [[package]] name = "byte-tools" version = "0.3.1" @@ -1480,7 +1498,7 @@ dependencies = [ "futures-timer 2.0.2", "log", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.9.0", "rand 0.6.5", ] @@ -1526,7 +1544,7 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" name = "fork-tree" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", ] [[package]] @@ -1547,7 +1565,7 @@ dependencies = [ "frame-system", "hex-literal", "linregress", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "paste 0.1.18", "sp-api", "sp-io", @@ -1565,7 +1583,7 @@ dependencies = [ "chrono", "frame-benchmarking", "handlebars", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-cli", "sc-client-db", "sc-executor", @@ -1589,7 +1607,7 @@ dependencies = [ "pallet-balances", "pallet-indices", "pallet-transaction-payment", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -1605,15 +1623,15 @@ version = "12.0.0" source = "git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon#b1008c8f6032eb2ed27a590b28ac39356b7b480f" dependencies = [ "cfg-if 1.0.0", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.4.1", ] [[package]] name = "frame-metadata" version = "12.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-std", @@ -1631,11 +1649,11 @@ dependencies = [ "impl-trait-for-tuples 0.2.0", "log", "once_cell", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "paste 0.1.18", "pretty_assertions", - "scale-info", + "scale-info 0.5.0", "serde", "smallvec 1.5.0", "sp-api", @@ -1688,7 +1706,7 @@ dependencies = [ "frame-metadata 12.0.1", "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "pretty_assertions", "rustversion", "serde", @@ -1708,8 +1726,8 @@ dependencies = [ "criterion", "frame-support", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-core", "sp-externalities", @@ -1727,7 +1745,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -1739,7 +1757,7 @@ dependencies = [ name = "frame-system-rpc-runtime-api" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", ] @@ -1783,6 +1801,12 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "futures" version = "0.1.30" @@ -2446,7 +2470,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1be51a921b067b0eaca2fad532d9400041561aa922221cc65f95a85641c6bf53" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", ] [[package]] @@ -3779,7 +3803,7 @@ dependencies = [ "pallet-staking", "pallet-timestamp", "pallet-transaction-payment", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "platforms", "rand 0.7.3", @@ -3849,7 +3873,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-treasury", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-executor", "sp-application-crypto", "sp-core", @@ -3870,7 +3894,7 @@ version = "0.8.0" dependencies = [ "derive_more", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-cli", "sc-client-api", "sc-service", @@ -3885,7 +3909,7 @@ name = "node-primitives" version = "2.0.0" dependencies = [ "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "pretty_assertions", "sp-application-crypto", "sp-core", @@ -3988,8 +4012,8 @@ dependencies = [ "pallet-treasury", "pallet-utility", "pallet-vesting", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-api", "sp-authority-discovery", @@ -4067,8 +4091,8 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-api", "sp-block-builder", @@ -4107,7 +4131,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-treasury", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-block-builder", "sc-cli", "sc-client-api", @@ -4295,7 +4319,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4310,7 +4334,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4327,7 +4351,7 @@ dependencies = [ "lazy_static", "pallet-session", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "serde", "sp-application-crypto", @@ -4347,7 +4371,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-session", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4365,7 +4389,7 @@ dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-authorship", "sp-core", "sp-inherents", @@ -4388,7 +4412,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4411,8 +4435,8 @@ dependencies = [ "frame-support", "frame-system", "pallet-transaction-payment", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-core", "sp-io", @@ -4429,7 +4453,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-treasury", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4447,7 +4471,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4469,7 +4493,7 @@ dependencies = [ "pallet-contracts-proc-macro", "pallet-randomness-collective-flip", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-wasm 0.41.0", "paste 1.0.3", "pretty_assertions", @@ -4491,7 +4515,7 @@ name = "pallet-contracts-primitives" version = "2.0.1" dependencies = [ "bitflags", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-runtime", "sp-std", ] @@ -4514,7 +4538,7 @@ dependencies = [ "jsonrpc-derive", "pallet-contracts-primitives", "pallet-contracts-rpc-runtime-api", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "serde_json", "sp-api", @@ -4529,7 +4553,7 @@ name = "pallet-contracts-rpc-runtime-api" version = "0.8.1" dependencies = [ "pallet-contracts-primitives", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-runtime", "sp-std", @@ -4545,7 +4569,7 @@ dependencies = [ "hex-literal", "pallet-balances", "pallet-scheduler", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4563,7 +4587,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4580,7 +4604,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4598,8 +4622,8 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-core", "sp-io", @@ -4614,7 +4638,7 @@ dependencies = [ "frame-support", "frame-system", "lite-json", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4629,7 +4653,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-io", "sp-runtime", @@ -4652,7 +4676,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-application-crypto", "sp-core", @@ -4674,7 +4698,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4691,7 +4715,7 @@ dependencies = [ "frame-system", "pallet-authorship", "pallet-session", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-application-crypto", "sp-core", @@ -4709,7 +4733,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4726,7 +4750,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-io", "sp-runtime", @@ -4739,7 +4763,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4757,7 +4781,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4773,7 +4797,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4788,7 +4812,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4802,7 +4826,7 @@ version = "2.0.0" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4817,7 +4841,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4842,7 +4866,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4860,7 +4884,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-utility", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4874,7 +4898,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "safe-mix", "sp-core", "sp-io", @@ -4890,7 +4914,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4905,7 +4929,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4921,7 +4945,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -4938,7 +4962,7 @@ dependencies = [ "impl-trait-for-tuples 0.1.3", "lazy_static", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-application-crypto", "sp-core", @@ -4962,7 +4986,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rand 0.7.3", "serde", "sp-core", @@ -4979,7 +5003,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rand_chacha 0.2.2", "serde", "sp-core", @@ -5001,7 +5025,7 @@ dependencies = [ "pallet-session", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "rand_chacha 0.2.2", "serde", @@ -5031,7 +5055,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-io", "sp-npos-elections", @@ -5056,7 +5080,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -5070,7 +5094,7 @@ version = "2.0.0" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-io", "sp-runtime", @@ -5084,8 +5108,8 @@ dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "sp-core", "sp-inherents", @@ -5104,7 +5128,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-treasury", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -5120,8 +5144,8 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", - "scale-info", + "parity-scale-codec 1.3.6", + "scale-info 0.5.0", "serde", "serde_json", "smallvec 1.5.0", @@ -5140,7 +5164,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-blockchain", "sp-core", @@ -5153,7 +5177,7 @@ name = "pallet-transaction-payment-rpc-runtime-api" version = "2.0.1" dependencies = [ "pallet-transaction-payment", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-runtime", ] @@ -5167,7 +5191,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -5184,7 +5208,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -5202,7 +5226,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -5250,9 +5274,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79602888a81ace83e3d1d4b2873286c1f5f906c84db667594e8db8da3506c383" dependencies = [ "arrayvec 0.5.2", - "bitvec", - "byte-slice-cast", - "parity-scale-codec-derive", + "bitvec 0.17.4", + "byte-slice-cast 0.3.5", + "parity-scale-codec-derive 1.2.2", + "serde", +] + +[[package]] +name = "parity-scale-codec" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c823fdae1bb5ff5708ee61a62697e6296175dc671710876871c853f48592b3" +dependencies = [ + "arrayvec 0.5.2", + "bitvec 0.20.1", + "byte-slice-cast 1.0.0", + "parity-scale-codec-derive 2.0.0", "serde", ] @@ -5268,6 +5305,18 @@ dependencies = [ "syn", ] +[[package]] +name = "parity-scale-codec-derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9029e65297c7fd6d7013f0579e193ec2b34ae78eabca854c9417504ad8a2d214" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "parity-send-wrapper" version = "0.1.0" @@ -5940,6 +5989,12 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "def50a86306165861203e7f84ecffbbdfdea79f0e51039b33de1e952358c47ac" +[[package]] +name = "radium" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" + [[package]] name = "rand" version = "0.3.23" @@ -6486,7 +6541,7 @@ dependencies = [ "futures-timer 3.0.2", "libp2p", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "prost 0.7.0", "prost-build", "quickcheck", @@ -6513,7 +6568,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", @@ -6535,7 +6590,7 @@ dependencies = [ name = "sc-block-builder" version = "0.8.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-client-api", "sp-api", "sp-block-builder", @@ -6554,7 +6609,7 @@ name = "sc-chain-spec" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-chain-spec-derive", "sc-consensus-babe", "sc-consensus-epochs", @@ -6590,7 +6645,7 @@ dependencies = [ "libp2p", "log", "names", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rand 0.7.3", "regex", "rpassword", @@ -6629,7 +6684,7 @@ dependencies = [ "kvdb-memorydb", "lazy_static", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-executor", "sp-api", @@ -6666,7 +6721,7 @@ dependencies = [ "linked-hash-map", "log", "parity-db", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parking_lot 0.11.1", "quickcheck", @@ -6707,7 +6762,7 @@ dependencies = [ "futures-timer 3.0.2", "getrandom 0.2.1", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", @@ -6751,7 +6806,7 @@ dependencies = [ "num-bigint", "num-rational", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "pdqselect", "rand 0.7.3", @@ -6826,7 +6881,7 @@ name = "sc-consensus-epochs" version = "0.8.1" dependencies = [ "fork-tree", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-client-api", "sp-blockchain", @@ -6844,7 +6899,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-basic-authorship", "sc-client-api", @@ -6878,7 +6933,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-client-api", "sp-api", @@ -6900,7 +6955,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-client-api", "sc-telemetry", @@ -6942,7 +6997,7 @@ dependencies = [ "lazy_static", "libsecp256k1", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-wasm 0.41.0", "parking_lot 0.11.1", "paste 0.1.18", @@ -6977,7 +7032,7 @@ name = "sc-executor-common" version = "0.8.1" dependencies = [ "derive_more", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-wasm 0.41.0", "sp-allocator", "sp-core", @@ -6992,7 +7047,7 @@ name = "sc-executor-wasmi" version = "0.8.1" dependencies = [ "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-executor-common", "sp-allocator", "sp-core", @@ -7007,7 +7062,7 @@ version = "0.8.1" dependencies = [ "assert_matches", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-wasm 0.41.0", "pwasm-utils 0.14.0", "sc-executor-common", @@ -7031,7 +7086,7 @@ dependencies = [ "futures-timer 3.0.2", "linked-hash-map", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "pin-project 0.4.27", "rand 0.7.3", @@ -7078,7 +7133,7 @@ dependencies = [ "jsonrpc-pubsub", "lazy_static", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-block-builder", "sc-client-api", "sc-finality-grandpa", @@ -7103,7 +7158,7 @@ dependencies = [ "futures 0.3.9", "log", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "prost 0.6.1", "sc-client-api", @@ -7157,7 +7212,7 @@ version = "2.0.1" dependencies = [ "hash-db", "lazy_static", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-client-api", "sc-executor", @@ -7195,7 +7250,7 @@ dependencies = [ "log", "lru", "nohash-hasher", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "pin-project 0.4.27", "prost 0.7.0", @@ -7287,7 +7342,7 @@ dependencies = [ "lazy_static", "log", "num_cpus", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "rand 0.7.3", "sc-block-builder", @@ -7342,7 +7397,7 @@ dependencies = [ "jsonrpc-pubsub", "lazy_static", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-block-builder", "sc-cli", @@ -7383,7 +7438,7 @@ dependencies = [ "jsonrpc-derive", "jsonrpc-pubsub", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "serde", "serde_json", @@ -7441,7 +7496,7 @@ dependencies = [ "jsonrpc-pubsub", "lazy_static", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parking_lot 0.11.1", "pin-project 0.4.27", @@ -7504,7 +7559,7 @@ dependencies = [ "futures 0.3.9", "hex-literal", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", @@ -7536,7 +7591,7 @@ name = "sc-state-db" version = "0.8.1" dependencies = [ "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parity-util-mem-derive", "parking_lot 0.11.1", @@ -7632,7 +7687,7 @@ dependencies = [ "futures 0.3.9", "linked-hash-map", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parking_lot 0.11.1", "retain_mut", @@ -7657,7 +7712,7 @@ dependencies = [ "hex", "intervalier", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parking_lot 0.11.1", "sc-block-builder", @@ -7686,8 +7741,19 @@ source = "git+https://github.com/paritytech/scale-info#8b7bd5e372abd6dd83efdecb7 dependencies = [ "cfg-if 1.0.0", "derive_more", - "parity-scale-codec", - "scale-info-derive", + "parity-scale-codec 1.3.6", + "scale-info-derive 0.2.1", +] + +[[package]] +name = "scale-info" +version = "0.5.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-decode-feature#dd2befe7e801f34b913975e33463f4341dbcf56a" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec 2.0.0", + "scale-info-derive 0.3.0", ] [[package]] @@ -7700,6 +7766,16 @@ dependencies = [ "syn", ] +[[package]] +name = "scale-info-derive" +version = "0.3.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-decode-feature#dd2befe7e801f34b913975e33463f4341dbcf56a" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8087,7 +8163,7 @@ name = "sp-api" version = "2.0.1" dependencies = [ "hash-db", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api-proc-macro", "sp-core", "sp-runtime", @@ -8114,7 +8190,7 @@ name = "sp-api-test" version = "2.0.1" dependencies = [ "criterion", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rustversion", "sc-block-builder", "sp-api", @@ -8132,7 +8208,7 @@ dependencies = [ name = "sp-application-crypto" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-core", "sp-io", @@ -8158,10 +8234,10 @@ dependencies = [ "criterion", "integer-sqrt", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.5.0", "serde", "serde_json", "sp-debug-derive", @@ -8183,7 +8259,7 @@ dependencies = [ name = "sp-authority-discovery" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8194,7 +8270,7 @@ dependencies = [ name = "sp-authorship" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-inherents", "sp-runtime", "sp-std", @@ -8204,7 +8280,7 @@ dependencies = [ name = "sp-block-builder" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-inherents", "sp-runtime", @@ -8218,7 +8294,7 @@ dependencies = [ "futures 0.3.9", "log", "lru", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sp-api", "sp-consensus", @@ -8244,7 +8320,7 @@ dependencies = [ "futures-timer 3.0.2", "libp2p", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "serde", "sp-api", @@ -8266,7 +8342,7 @@ dependencies = [ name = "sp-consensus-aura" version = "0.8.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-application-crypto", "sp-inherents", @@ -8280,7 +8356,7 @@ name = "sp-consensus-babe" version = "0.8.1" dependencies = [ "merlin", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8298,7 +8374,7 @@ dependencies = [ name = "sp-consensus-pow" version = "0.8.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-core", "sp-runtime", @@ -8309,7 +8385,7 @@ dependencies = [ name = "sp-consensus-slots" version = "0.8.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-runtime", ] @@ -8317,7 +8393,7 @@ dependencies = [ name = "sp-consensus-vrf" version = "0.8.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "schnorrkel", "sp-core", "sp-runtime", @@ -8345,7 +8421,7 @@ dependencies = [ "log", "merlin", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions", @@ -8353,7 +8429,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", + "scale-info 0.5.0", "schnorrkel", "secrecy", "serde", @@ -8395,7 +8471,7 @@ dependencies = [ name = "sp-election-providers" version = "2.0.0" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -8407,7 +8483,7 @@ name = "sp-externalities" version = "0.8.1" dependencies = [ "environmental", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-std", "sp-storage", ] @@ -8418,7 +8494,7 @@ version = "2.0.1" dependencies = [ "finality-grandpa", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-api", "sp-application-crypto", @@ -8432,7 +8508,7 @@ dependencies = [ name = "sp-inherents" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sp-core", "sp-std", @@ -8447,7 +8523,7 @@ dependencies = [ "hash-db", "libsecp256k1", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sp-core", "sp-externalities", @@ -8480,7 +8556,7 @@ dependencies = [ "derive_more", "futures 0.3.9", "merlin", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "rand 0.7.3", "rand_chacha 0.2.2", @@ -8494,7 +8570,7 @@ dependencies = [ name = "sp-npos-elections" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rand 0.7.3", "serde", "sp-arithmetic", @@ -8520,7 +8596,7 @@ name = "sp-npos-elections-fuzzer" version = "2.0.0-alpha.5" dependencies = [ "honggfuzz", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "rand 0.7.3", "sp-npos-elections", "sp-runtime", @@ -8561,11 +8637,11 @@ dependencies = [ "hash256-std-hasher", "impl-trait-for-tuples 0.2.0", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "paste 0.1.18", "rand 0.7.3", - "scale-info", + "scale-info 0.5.0", "serde", "serde_json", "sp-application-crypto", @@ -8581,7 +8657,7 @@ name = "sp-runtime-interface" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "primitive-types", "rustversion", "sp-core", @@ -8652,7 +8728,7 @@ name = "sp-sandbox" version = "0.8.1" dependencies = [ "assert_matches", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-io", "sp-std", @@ -8673,7 +8749,7 @@ dependencies = [ name = "sp-session" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-core", "sp-runtime", @@ -8685,7 +8761,7 @@ dependencies = [ name = "sp-staking" version = "2.0.1" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-runtime", "sp-std", ] @@ -8698,7 +8774,7 @@ dependencies = [ "hex-literal", "log", "num-traits", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "pretty_assertions", "rand 0.7.3", @@ -8723,7 +8799,7 @@ name = "sp-storage" version = "2.0.1" dependencies = [ "impl-serde", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "ref-cast", "serde", "sp-debug-derive", @@ -8735,7 +8811,7 @@ name = "sp-tasks" version = "2.0.0" dependencies = [ "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-externalities", "sp-io", @@ -8747,7 +8823,7 @@ dependencies = [ name = "sp-test-primitives" version = "2.0.0" dependencies = [ - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "serde", "sp-application-crypto", @@ -8760,7 +8836,7 @@ name = "sp-timestamp" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-api", "sp-inherents", "sp-runtime", @@ -8773,7 +8849,7 @@ name = "sp-tracing" version = "2.0.1" dependencies = [ "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-std", "tracing", "tracing-core", @@ -8787,7 +8863,7 @@ dependencies = [ "derive_more", "futures 0.3.9", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-api", "sp-blockchain", @@ -8803,7 +8879,7 @@ dependencies = [ "hash-db", "hex-literal", "memory-db", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-core", "sp-runtime", "sp-std", @@ -8829,7 +8905,7 @@ name = "sp-version" version = "2.0.1" dependencies = [ "impl-serde", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "serde", "sp-runtime", "sp-std", @@ -8840,7 +8916,7 @@ name = "sp-wasm-interface" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sp-std", "wasmi", ] @@ -9016,7 +9092,7 @@ dependencies = [ "futures 0.3.9", "jsonrpc-client-transports", "jsonrpc-core", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-rpc-api", "serde", "sp-storage", @@ -9033,7 +9109,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "log", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-client-api", "sc-rpc-api", "sc-transaction-pool", @@ -9069,7 +9145,7 @@ dependencies = [ "futures 0.3.9", "hash-db", "hex", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-client-api", "sc-client-db", "sc-consensus", @@ -9100,7 +9176,7 @@ dependencies = [ "memory-db", "pallet-babe", "pallet-timestamp", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parity-util-mem", "sc-block-builder", "sc-executor", @@ -9136,7 +9212,7 @@ name = "substrate-test-runtime-client" version = "2.0.0" dependencies = [ "futures 0.3.9", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "sc-block-builder", "sc-client-api", "sc-consensus", @@ -9157,7 +9233,7 @@ version = "2.0.0" dependencies = [ "derive_more", "futures 0.3.9", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "parking_lot 0.11.1", "sc-transaction-graph", "sp-blockchain", @@ -9250,6 +9326,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" +[[package]] +name = "tap" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36474e732d1affd3a6ed582781b3683df3d0563714c59c39591e8ff707cf078e" + [[package]] name = "target-lexicon" version = "0.10.0" @@ -9781,7 +9863,7 @@ dependencies = [ "hash-db", "keccak-hasher", "memory-db", - "parity-scale-codec", + "parity-scale-codec 1.3.6", "trie-db", "trie-root", "trie-standardmap", @@ -10503,6 +10585,12 @@ dependencies = [ "winapi-build", ] +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "x25519-dalek" version = "1.1.0" diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index a269ebba9ba20..7607553d1f274 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "2.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 790bf5d4fcbde..faa0beab110f4 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index b7ba95cb513a8..470e4d545dea7 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index df669bdc7cf62..ffeb289b3faf3 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index a4551f72fa73d..59627385d761c 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-chameleon", default-features = false, features = ["v13"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 16b5793410aa1..7730ce8295474 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 11975b33e459f..9ebda6ecf51e7 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index e08f4722cde93..31a5b5cf996ce 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 82815fd448fac..f9aa74353817e 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index f7531b4d0fed3..6c11d373cd942 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 14cdcb73165b7..ec6343d2d3853 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From c09aff60f75089527c9960d58e24f6947c864dbd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 3 Feb 2021 09:53:13 +0000 Subject: [PATCH 064/503] Use scale-info substrate branch --- Cargo.lock | 56 +++++++++------------------- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/support/Cargo.toml | 4 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 12 files changed, 30 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3ae4f219c28fa..b040f5600a69c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1620,11 +1620,11 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-chameleon#b1008c8f6032eb2ed27a590b28ac39356b7b480f" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-scale-info-decode-feature#0ec6a4c32b96ecc6f9137d92b51c48dac78a9ec1" dependencies = [ "cfg-if 1.0.0", - "parity-scale-codec 1.3.6", - "scale-info 0.4.1", + "parity-scale-codec 2.0.0", + "scale-info", ] [[package]] @@ -1653,7 +1653,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "pretty_assertions", - "scale-info 0.5.0", + "scale-info", "serde", "smallvec 1.5.0", "sp-api", @@ -1727,7 +1727,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -4013,7 +4013,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4092,7 +4092,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4436,7 +4436,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -4623,7 +4623,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5109,7 +5109,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5145,7 +5145,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info 0.5.0", + "scale-info", "serde", "serde_json", "smallvec 1.5.0", @@ -7734,43 +7734,23 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.4.1" -source = "git+https://github.com/paritytech/scale-info#8b7bd5e372abd6dd83efdecb727522e7c5b0e712" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec 1.3.6", - "scale-info-derive 0.2.1", -] - [[package]] name = "scale-info" version = "0.5.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-decode-feature#dd2befe7e801f34b913975e33463f4341dbcf56a" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#71cf6e5259ea02898add4710a3917eb7a4ad79a4" dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec 2.0.0", - "scale-info-derive 0.3.0", -] - -[[package]] -name = "scale-info-derive" -version = "0.2.1" -source = "git+https://github.com/paritytech/scale-info#8b7bd5e372abd6dd83efdecb727522e7c5b0e712" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "scale-info-derive", ] [[package]] name = "scale-info-derive" version = "0.3.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-decode-feature#dd2befe7e801f34b913975e33463f4341dbcf56a" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#71cf6e5259ea02898add4710a3917eb7a4ad79a4" dependencies = [ + "proc-macro-crate", "proc-macro2", "quote", "syn", @@ -8237,7 +8217,7 @@ dependencies = [ "parity-scale-codec 1.3.6", "primitive-types", "rand 0.7.3", - "scale-info 0.5.0", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8429,7 +8409,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info 0.5.0", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8641,7 +8621,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "rand 0.7.3", - "scale-info 0.5.0", + "scale-info", "serde", "serde_json", "sp-application-crypto", diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 7607553d1f274..fa626e2380ad1 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "2.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index faa0beab110f4..d3ad9933a08c2 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 470e4d545dea7..4f4f8af8ef7a6 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index ffeb289b3faf3..e9de6d6f020b1 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 59627385d761c..1c22e0fe6fd55 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,8 +17,8 @@ log = "0.4" serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-chameleon", default-features = false, features = ["v13"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 7730ce8295474..631c1825216f8 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 9ebda6ecf51e7..4100e3636bf1d 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 31a5b5cf996ce..2d76664f9dc5d 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index f9aa74353817e..59c78e7d3f4ae 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 6c11d373cd942..9c7842380f792 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index ec6343d2d3853..270d96ddc20c5 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-decode-feature", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From f99e2f8743f0a2ef8a6be175f22e9089ca4dae58 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 3 Feb 2021 10:58:09 +0000 Subject: [PATCH 065/503] Use old rev to see which commit causing the overflow --- Cargo.lock | 51 ++++++++++++++++++++-------- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 12 files changed, 47 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b040f5600a69c..2a07a23541c3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1624,7 +1624,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=aj-scale-info- dependencies = [ "cfg-if 1.0.0", "parity-scale-codec 2.0.0", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", ] [[package]] @@ -1653,7 +1653,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "pretty_assertions", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "smallvec 1.5.0", "sp-api", @@ -1727,7 +1727,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-core", "sp-externalities", @@ -4013,7 +4013,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-api", "sp-authority-discovery", @@ -4092,7 +4092,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-api", "sp-block-builder", @@ -4436,7 +4436,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-core", "sp-io", @@ -4623,7 +4623,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-core", "sp-io", @@ -5109,7 +5109,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "sp-core", "sp-inherents", @@ -5145,7 +5145,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "serde_json", "smallvec 1.5.0", @@ -7737,18 +7737,39 @@ dependencies = [ [[package]] name = "scale-info" version = "0.5.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#71cf6e5259ea02898add4710a3917eb7a4ad79a4" +source = "git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03#787de293fd502e196ac62d02fb73372094e9ce03" dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec 2.0.0", - "scale-info-derive", + "scale-info-derive 0.3.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", +] + +[[package]] +name = "scale-info" +version = "0.5.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#4b12675d2e49f05e42dc26d62742f58285c8c15c" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec 2.0.0", + "scale-info-derive 0.3.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", +] + +[[package]] +name = "scale-info-derive" +version = "0.3.0" +source = "git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03#787de293fd502e196ac62d02fb73372094e9ce03" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] name = "scale-info-derive" version = "0.3.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#71cf6e5259ea02898add4710a3917eb7a4ad79a4" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#4b12675d2e49f05e42dc26d62742f58285c8c15c" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -8217,7 +8238,7 @@ dependencies = [ "parity-scale-codec 1.3.6", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "serde_json", "sp-debug-derive", @@ -8409,7 +8430,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "schnorrkel", "secrecy", "serde", @@ -8621,7 +8642,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "rand 0.7.3", - "scale-info", + "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", "serde", "serde_json", "sp-application-crypto", diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index fa626e2380ad1..c4a9f7a88b4d2 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "2.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index d3ad9933a08c2..acb619997d9c2 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 4f4f8af8ef7a6..89234def88d77 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index e9de6d6f020b1..6a619012911d0 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 1c22e0fe6fd55..5427eff56ff36 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 631c1825216f8..d4e94a8d8800b 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 4100e3636bf1d..ede8a2a099370 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 2d76664f9dc5d..5da1a3a7603ea 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 59c78e7d3f4ae..c70d4fd2f5ea1 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 9c7842380f792..d9d0e661c8dcd 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 270d96ddc20c5..2119b4dcf704d 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From 76ffb4924d8b5f30fb6e4b16303889ebc2067f2d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 3 Feb 2021 11:00:06 +0000 Subject: [PATCH 066/503] Revert "Use old rev to see which commit causing the overflow" This reverts commit f99e2f87 --- Cargo.lock | 47 ++++++++-------------------- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 12 files changed, 24 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a07a23541c3e..b584e2ffc932e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1624,7 +1624,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=aj-scale-info- dependencies = [ "cfg-if 1.0.0", "parity-scale-codec 2.0.0", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", ] [[package]] @@ -1653,7 +1653,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "pretty_assertions", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "smallvec 1.5.0", "sp-api", @@ -1727,7 +1727,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -4013,7 +4013,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4092,7 +4092,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4436,7 +4436,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4623,7 +4623,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5109,7 +5109,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5145,7 +5145,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 1.3.6", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "serde_json", "smallvec 1.5.0", @@ -7734,17 +7734,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.5.0" -source = "git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03#787de293fd502e196ac62d02fb73372094e9ce03" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec 2.0.0", - "scale-info-derive 0.3.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", -] - [[package]] name = "scale-info" version = "0.5.0" @@ -7753,17 +7742,7 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec 2.0.0", - "scale-info-derive 0.3.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", -] - -[[package]] -name = "scale-info-derive" -version = "0.3.0" -source = "git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03#787de293fd502e196ac62d02fb73372094e9ce03" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "scale-info-derive", ] [[package]] @@ -8238,7 +8217,7 @@ dependencies = [ "parity-scale-codec 1.3.6", "primitive-types", "rand 0.7.3", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8430,7 +8409,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8642,7 +8621,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "rand 0.7.3", - "scale-info 0.5.0 (git+https://github.com/paritytech/scale-info?rev=787de293fd502e196ac62d02fb73372094e9ce03)", + "scale-info", "serde", "serde_json", "sp-application-crypto", diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index c4a9f7a88b4d2..fa626e2380ad1 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "2.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index acb619997d9c2..d3ad9933a08c2 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 89234def88d77..4f4f8af8ef7a6 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 6a619012911d0..e9de6d6f020b1 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 5427eff56ff36..1c22e0fe6fd55 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index d4e94a8d8800b..631c1825216f8 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index ede8a2a099370..4100e3636bf1d 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 5da1a3a7603ea..2d76664f9dc5d 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index c70d4fd2f5ea1..59c78e7d3f4ae 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index d9d0e661c8dcd..9c7842380f792 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 2119b4dcf704d..270d96ddc20c5 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", rev = "787de293fd502e196ac62d02fb73372094e9ce03", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From c919efd6ce1d0de84968c48562c7e2e11fd260e0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 3 Feb 2021 14:11:51 +0000 Subject: [PATCH 067/503] Update to codec 2.0.0 ahead of merge --- Cargo.lock | 260 +++++++++--------- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/cli/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/inspect/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- bin/node/testing/Cargo.toml | 2 +- client/api/Cargo.toml | 2 +- client/basic-authorship/Cargo.toml | 2 +- client/block-builder/Cargo.toml | 2 +- client/chain-spec/Cargo.toml | 2 +- client/consensus/aura/Cargo.toml | 2 +- client/consensus/babe/Cargo.toml | 2 +- client/consensus/epochs/Cargo.toml | 2 +- client/consensus/manual-seal/Cargo.toml | 2 +- client/consensus/pow/Cargo.toml | 2 +- client/consensus/slots/Cargo.toml | 2 +- client/db/Cargo.toml | 2 +- client/executor/Cargo.toml | 2 +- client/executor/common/Cargo.toml | 2 +- client/executor/wasmi/Cargo.toml | 2 +- client/executor/wasmtime/Cargo.toml | 2 +- client/light/Cargo.toml | 2 +- client/network/Cargo.toml | 2 +- client/offchain/Cargo.toml | 2 +- client/rpc-api/Cargo.toml | 2 +- client/rpc/Cargo.toml | 2 +- client/service/Cargo.toml | 2 +- client/state-db/Cargo.toml | 2 +- client/transaction-pool/Cargo.toml | 2 +- client/transaction-pool/graph/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/metadata/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/staking/fuzzer/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/system/rpc/runtime-api/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-payment/rpc/Cargo.toml | 2 +- .../rpc/runtime-api/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/api/Cargo.toml | 2 +- primitives/api/test/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authorship/Cargo.toml | 2 +- primitives/block-builder/Cargo.toml | 2 +- primitives/blockchain/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/common/Cargo.toml | 2 +- primitives/consensus/pow/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/externalities/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/inherents/Cargo.toml | 2 +- primitives/io/Cargo.toml | 2 +- primitives/keystore/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- primitives/runtime-interface/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/sandbox/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- primitives/state-machine/Cargo.toml | 2 +- primitives/storage/Cargo.toml | 2 +- primitives/test-primitives/Cargo.toml | 2 +- primitives/timestamp/Cargo.toml | 2 +- primitives/transaction-pool/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- primitives/wasm-interface/Cargo.toml | 2 +- test-utils/client/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- test-utils/runtime/client/Cargo.toml | 2 +- .../runtime/transaction-pool/Cargo.toml | 2 +- utils/fork-tree/Cargo.toml | 2 +- utils/frame/rpc/support/Cargo.toml | 2 +- utils/frame/rpc/system/Cargo.toml | 2 +- 131 files changed, 260 insertions(+), 260 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b584e2ffc932e..9d5de765de130 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1544,7 +1544,7 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" name = "fork-tree" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", ] [[package]] @@ -1565,7 +1565,7 @@ dependencies = [ "frame-system", "hex-literal", "linregress", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "paste 0.1.18", "sp-api", "sp-io", @@ -1607,7 +1607,7 @@ dependencies = [ "pallet-balances", "pallet-indices", "pallet-transaction-payment", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -1631,7 +1631,7 @@ dependencies = [ name = "frame-metadata" version = "12.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-std", @@ -1649,7 +1649,7 @@ dependencies = [ "impl-trait-for-tuples 0.2.0", "log", "once_cell", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "paste 0.1.18", "pretty_assertions", @@ -1706,7 +1706,7 @@ dependencies = [ "frame-metadata 12.0.1", "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "pretty_assertions", "rustversion", "serde", @@ -1726,7 +1726,7 @@ dependencies = [ "criterion", "frame-support", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-core", @@ -1745,7 +1745,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -1757,7 +1757,7 @@ dependencies = [ name = "frame-system-rpc-runtime-api" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", ] @@ -3803,7 +3803,7 @@ dependencies = [ "pallet-staking", "pallet-timestamp", "pallet-transaction-payment", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "platforms", "rand 0.7.3", @@ -3873,7 +3873,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-treasury", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-executor", "sp-application-crypto", "sp-core", @@ -3894,7 +3894,7 @@ version = "0.8.0" dependencies = [ "derive_more", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-cli", "sc-client-api", "sc-service", @@ -3909,7 +3909,7 @@ name = "node-primitives" version = "2.0.0" dependencies = [ "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "pretty_assertions", "sp-application-crypto", "sp-core", @@ -4012,7 +4012,7 @@ dependencies = [ "pallet-treasury", "pallet-utility", "pallet-vesting", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-api", @@ -4091,7 +4091,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-api", @@ -4131,7 +4131,7 @@ dependencies = [ "pallet-timestamp", "pallet-transaction-payment", "pallet-treasury", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-block-builder", "sc-cli", "sc-client-api", @@ -4319,7 +4319,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4334,7 +4334,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4351,7 +4351,7 @@ dependencies = [ "lazy_static", "pallet-session", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "serde", "sp-application-crypto", @@ -4371,7 +4371,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-session", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4389,7 +4389,7 @@ dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-authorship", "sp-core", "sp-inherents", @@ -4412,7 +4412,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4435,7 +4435,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-transaction-payment", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-core", @@ -4453,7 +4453,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-treasury", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4471,7 +4471,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4493,7 +4493,7 @@ dependencies = [ "pallet-contracts-proc-macro", "pallet-randomness-collective-flip", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-wasm 0.41.0", "paste 1.0.3", "pretty_assertions", @@ -4515,7 +4515,7 @@ name = "pallet-contracts-primitives" version = "2.0.1" dependencies = [ "bitflags", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-runtime", "sp-std", ] @@ -4538,7 +4538,7 @@ dependencies = [ "jsonrpc-derive", "pallet-contracts-primitives", "pallet-contracts-rpc-runtime-api", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "serde_json", "sp-api", @@ -4553,7 +4553,7 @@ name = "pallet-contracts-rpc-runtime-api" version = "0.8.1" dependencies = [ "pallet-contracts-primitives", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-runtime", "sp-std", @@ -4569,7 +4569,7 @@ dependencies = [ "hex-literal", "pallet-balances", "pallet-scheduler", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4587,7 +4587,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4604,7 +4604,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4622,7 +4622,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-core", @@ -4638,7 +4638,7 @@ dependencies = [ "frame-support", "frame-system", "lite-json", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4653,7 +4653,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-io", "sp-runtime", @@ -4676,7 +4676,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-application-crypto", "sp-core", @@ -4698,7 +4698,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4715,7 +4715,7 @@ dependencies = [ "frame-system", "pallet-authorship", "pallet-session", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-application-crypto", "sp-core", @@ -4733,7 +4733,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4750,7 +4750,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-io", "sp-runtime", @@ -4763,7 +4763,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4781,7 +4781,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4797,7 +4797,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4812,7 +4812,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4826,7 +4826,7 @@ version = "2.0.0" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4841,7 +4841,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4866,7 +4866,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4884,7 +4884,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-utility", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4898,7 +4898,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "safe-mix", "sp-core", "sp-io", @@ -4914,7 +4914,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4929,7 +4929,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4945,7 +4945,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -4962,7 +4962,7 @@ dependencies = [ "impl-trait-for-tuples 0.1.3", "lazy_static", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-application-crypto", "sp-core", @@ -4986,7 +4986,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "rand 0.7.3", "serde", "sp-core", @@ -5003,7 +5003,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "rand_chacha 0.2.2", "serde", "sp-core", @@ -5025,7 +5025,7 @@ dependencies = [ "pallet-session", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "rand_chacha 0.2.2", "serde", @@ -5055,7 +5055,7 @@ dependencies = [ "pallet-staking", "pallet-staking-reward-curve", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-io", "sp-npos-elections", @@ -5080,7 +5080,7 @@ version = "2.0.1" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -5094,7 +5094,7 @@ version = "2.0.0" dependencies = [ "frame-support", "frame-system", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-io", "sp-runtime", @@ -5108,7 +5108,7 @@ dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "sp-core", @@ -5128,7 +5128,7 @@ dependencies = [ "frame-system", "pallet-balances", "pallet-treasury", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -5144,7 +5144,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "scale-info", "serde", "serde_json", @@ -5164,7 +5164,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "pallet-transaction-payment-rpc-runtime-api", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-blockchain", "sp-core", @@ -5177,7 +5177,7 @@ name = "pallet-transaction-payment-rpc-runtime-api" version = "2.0.1" dependencies = [ "pallet-transaction-payment", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-runtime", ] @@ -5191,7 +5191,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -5208,7 +5208,7 @@ dependencies = [ "frame-support", "frame-system", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -5226,7 +5226,7 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -6568,7 +6568,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", @@ -6590,7 +6590,7 @@ dependencies = [ name = "sc-block-builder" version = "0.8.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-client-api", "sp-api", "sp-block-builder", @@ -6609,7 +6609,7 @@ name = "sc-chain-spec" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-chain-spec-derive", "sc-consensus-babe", "sc-consensus-epochs", @@ -6684,7 +6684,7 @@ dependencies = [ "kvdb-memorydb", "lazy_static", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-executor", "sp-api", @@ -6721,7 +6721,7 @@ dependencies = [ "linked-hash-map", "log", "parity-db", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parking_lot 0.11.1", "quickcheck", @@ -6762,7 +6762,7 @@ dependencies = [ "futures-timer 3.0.2", "getrandom 0.2.1", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", @@ -6806,7 +6806,7 @@ dependencies = [ "num-bigint", "num-rational", "num-traits", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "pdqselect", "rand 0.7.3", @@ -6881,7 +6881,7 @@ name = "sc-consensus-epochs" version = "0.8.1" dependencies = [ "fork-tree", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-client-api", "sp-blockchain", @@ -6899,7 +6899,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-basic-authorship", "sc-client-api", @@ -6933,7 +6933,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-client-api", "sp-api", @@ -6955,7 +6955,7 @@ dependencies = [ "futures 0.3.9", "futures-timer 3.0.2", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-client-api", "sc-telemetry", @@ -6997,7 +6997,7 @@ dependencies = [ "lazy_static", "libsecp256k1", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-wasm 0.41.0", "parking_lot 0.11.1", "paste 0.1.18", @@ -7032,7 +7032,7 @@ name = "sc-executor-common" version = "0.8.1" dependencies = [ "derive_more", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-wasm 0.41.0", "sp-allocator", "sp-core", @@ -7047,7 +7047,7 @@ name = "sc-executor-wasmi" version = "0.8.1" dependencies = [ "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-executor-common", "sp-allocator", "sp-core", @@ -7062,7 +7062,7 @@ version = "0.8.1" dependencies = [ "assert_matches", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-wasm 0.41.0", "pwasm-utils 0.14.0", "sc-executor-common", @@ -7212,7 +7212,7 @@ version = "2.0.1" dependencies = [ "hash-db", "lazy_static", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-client-api", "sc-executor", @@ -7250,7 +7250,7 @@ dependencies = [ "log", "lru", "nohash-hasher", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "pin-project 0.4.27", "prost 0.7.0", @@ -7342,7 +7342,7 @@ dependencies = [ "lazy_static", "log", "num_cpus", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "rand 0.7.3", "sc-block-builder", @@ -7397,7 +7397,7 @@ dependencies = [ "jsonrpc-pubsub", "lazy_static", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-block-builder", "sc-cli", @@ -7438,7 +7438,7 @@ dependencies = [ "jsonrpc-derive", "jsonrpc-pubsub", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "serde", "serde_json", @@ -7496,7 +7496,7 @@ dependencies = [ "jsonrpc-pubsub", "lazy_static", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parking_lot 0.11.1", "pin-project 0.4.27", @@ -7591,7 +7591,7 @@ name = "sc-state-db" version = "0.8.1" dependencies = [ "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parity-util-mem-derive", "parking_lot 0.11.1", @@ -7687,7 +7687,7 @@ dependencies = [ "futures 0.3.9", "linked-hash-map", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parking_lot 0.11.1", "retain_mut", @@ -7712,7 +7712,7 @@ dependencies = [ "hex", "intervalier", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parking_lot 0.11.1", "sc-block-builder", @@ -8143,7 +8143,7 @@ name = "sp-api" version = "2.0.1" dependencies = [ "hash-db", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api-proc-macro", "sp-core", "sp-runtime", @@ -8170,7 +8170,7 @@ name = "sp-api-test" version = "2.0.1" dependencies = [ "criterion", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "rustversion", "sc-block-builder", "sp-api", @@ -8188,7 +8188,7 @@ dependencies = [ name = "sp-application-crypto" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-core", "sp-io", @@ -8214,7 +8214,7 @@ dependencies = [ "criterion", "integer-sqrt", "num-traits", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "primitive-types", "rand 0.7.3", "scale-info", @@ -8250,7 +8250,7 @@ dependencies = [ name = "sp-authorship" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-inherents", "sp-runtime", "sp-std", @@ -8260,7 +8260,7 @@ dependencies = [ name = "sp-block-builder" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-inherents", "sp-runtime", @@ -8274,7 +8274,7 @@ dependencies = [ "futures 0.3.9", "log", "lru", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sp-api", "sp-consensus", @@ -8300,7 +8300,7 @@ dependencies = [ "futures-timer 3.0.2", "libp2p", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "serde", "sp-api", @@ -8322,7 +8322,7 @@ dependencies = [ name = "sp-consensus-aura" version = "0.8.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-application-crypto", "sp-inherents", @@ -8336,7 +8336,7 @@ name = "sp-consensus-babe" version = "0.8.1" dependencies = [ "merlin", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8354,7 +8354,7 @@ dependencies = [ name = "sp-consensus-pow" version = "0.8.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-core", "sp-runtime", @@ -8365,7 +8365,7 @@ dependencies = [ name = "sp-consensus-slots" version = "0.8.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-runtime", ] @@ -8401,7 +8401,7 @@ dependencies = [ "log", "merlin", "num-traits", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions", @@ -8463,7 +8463,7 @@ name = "sp-externalities" version = "0.8.1" dependencies = [ "environmental", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-std", "sp-storage", ] @@ -8474,7 +8474,7 @@ version = "2.0.1" dependencies = [ "finality-grandpa", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-api", "sp-application-crypto", @@ -8488,7 +8488,7 @@ dependencies = [ name = "sp-inherents" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sp-core", "sp-std", @@ -8503,7 +8503,7 @@ dependencies = [ "hash-db", "libsecp256k1", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sp-core", "sp-externalities", @@ -8536,7 +8536,7 @@ dependencies = [ "derive_more", "futures 0.3.9", "merlin", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "rand 0.7.3", "rand_chacha 0.2.2", @@ -8550,7 +8550,7 @@ dependencies = [ name = "sp-npos-elections" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "rand 0.7.3", "serde", "sp-arithmetic", @@ -8576,7 +8576,7 @@ name = "sp-npos-elections-fuzzer" version = "2.0.0-alpha.5" dependencies = [ "honggfuzz", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "rand 0.7.3", "sp-npos-elections", "sp-runtime", @@ -8617,7 +8617,7 @@ dependencies = [ "hash256-std-hasher", "impl-trait-for-tuples 0.2.0", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "paste 0.1.18", "rand 0.7.3", @@ -8637,7 +8637,7 @@ name = "sp-runtime-interface" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "primitive-types", "rustversion", "sp-core", @@ -8708,7 +8708,7 @@ name = "sp-sandbox" version = "0.8.1" dependencies = [ "assert_matches", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-io", "sp-std", @@ -8729,7 +8729,7 @@ dependencies = [ name = "sp-session" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-core", "sp-runtime", @@ -8741,7 +8741,7 @@ dependencies = [ name = "sp-staking" version = "2.0.1" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-runtime", "sp-std", ] @@ -8754,7 +8754,7 @@ dependencies = [ "hex-literal", "log", "num-traits", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "pretty_assertions", "rand 0.7.3", @@ -8779,7 +8779,7 @@ name = "sp-storage" version = "2.0.1" dependencies = [ "impl-serde", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "ref-cast", "serde", "sp-debug-derive", @@ -8803,7 +8803,7 @@ dependencies = [ name = "sp-test-primitives" version = "2.0.0" dependencies = [ - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "serde", "sp-application-crypto", @@ -8816,7 +8816,7 @@ name = "sp-timestamp" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-api", "sp-inherents", "sp-runtime", @@ -8843,7 +8843,7 @@ dependencies = [ "derive_more", "futures 0.3.9", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-api", "sp-blockchain", @@ -8859,7 +8859,7 @@ dependencies = [ "hash-db", "hex-literal", "memory-db", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-core", "sp-runtime", "sp-std", @@ -8885,7 +8885,7 @@ name = "sp-version" version = "2.0.1" dependencies = [ "impl-serde", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "serde", "sp-runtime", "sp-std", @@ -8896,7 +8896,7 @@ name = "sp-wasm-interface" version = "2.0.1" dependencies = [ "impl-trait-for-tuples 0.2.0", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sp-std", "wasmi", ] @@ -9072,7 +9072,7 @@ dependencies = [ "futures 0.3.9", "jsonrpc-client-transports", "jsonrpc-core", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-rpc-api", "serde", "sp-storage", @@ -9089,7 +9089,7 @@ dependencies = [ "jsonrpc-core-client", "jsonrpc-derive", "log", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-client-api", "sc-rpc-api", "sc-transaction-pool", @@ -9125,7 +9125,7 @@ dependencies = [ "futures 0.3.9", "hash-db", "hex", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-client-api", "sc-client-db", "sc-consensus", @@ -9156,7 +9156,7 @@ dependencies = [ "memory-db", "pallet-babe", "pallet-timestamp", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parity-util-mem", "sc-block-builder", "sc-executor", @@ -9192,7 +9192,7 @@ name = "substrate-test-runtime-client" version = "2.0.0" dependencies = [ "futures 0.3.9", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "sc-block-builder", "sc-client-api", "sc-consensus", @@ -9213,7 +9213,7 @@ version = "2.0.0" dependencies = [ "derive_more", "futures 0.3.9", - "parity-scale-codec 1.3.6", + "parity-scale-codec 2.0.0", "parking_lot 0.11.1", "sc-transaction-graph", "sp-blockchain", diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index f6d69206209e8..7fb283cfb4114 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [dependencies.frame-support] default-features = false diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index fa626e2380ad1..404427b7c6d3d 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -11,7 +11,7 @@ repository = "https://github.com/paritytech/substrate/" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/cli/Cargo.toml b/bin/node/cli/Cargo.toml index 4c245dcf629fb..af27b52377a43 100644 --- a/bin/node/cli/Cargo.toml +++ b/bin/node/cli/Cargo.toml @@ -34,7 +34,7 @@ crate-type = ["cdylib", "rlib"] [dependencies] # third-party dependencies -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } serde = { version = "1.0.102", features = ["derive"] } futures = { version = "0.3.9", features = ["compat"] } hex-literal = "0.3.1" diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index a2177ac9cd79d..36af51bd80fdc 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -12,7 +12,7 @@ repository = "https://github.com/paritytech/substrate/" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "2.0.0", path = "../runtime" } sc-executor = { version = "0.8.0", path = "../../../client/executor" } diff --git a/bin/node/inspect/Cargo.toml b/bin/node/inspect/Cargo.toml index 7f94e15bb8fc8..14acb18956016 100644 --- a/bin/node/inspect/Cargo.toml +++ b/bin/node/inspect/Cargo.toml @@ -11,7 +11,7 @@ repository = "https://github.com/paritytech/substrate/" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } derive_more = "0.99" log = "0.4.8" sc-cli = { version = "0.8.0", path = "../../../client/cli" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index db28472087fe2..7a4b29cacea30 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -11,7 +11,7 @@ repository = "https://github.com/paritytech/substrate/" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-system = { version = "2.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "2.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index d3ad9933a08c2..74569a6ee4ca3 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] # third-party dependencies -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" diff --git a/bin/node/testing/Cargo.toml b/bin/node/testing/Cargo.toml index f6cf92d77e8e8..e92e475952df3 100644 --- a/bin/node/testing/Cargo.toml +++ b/bin/node/testing/Cargo.toml @@ -17,7 +17,7 @@ pallet-balances = { version = "2.0.0", path = "../../../frame/balances" } sc-service = { version = "0.8.0", features = ["test-helpers", "db"], path = "../../../client/service" } sc-client-db = { version = "0.8.0", path = "../../../client/db/", features = ["kvdb-rocksdb", "parity-db"] } sc-client-api = { version = "2.0.0", path = "../../../client/api/" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } pallet-contracts = { version = "2.0.0", path = "../../../frame/contracts" } pallet-grandpa = { version = "2.0.0", path = "../../../frame/grandpa" } pallet-indices = { version = "2.0.0", path = "../../../frame/indices" } diff --git a/client/api/Cargo.toml b/client/api/Cargo.toml index 205d5a51cde34..4e4890b91ec21 100644 --- a/client/api/Cargo.toml +++ b/client/api/Cargo.toml @@ -14,7 +14,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-consensus = { version = "0.8.0", path = "../../primitives/consensus/common" } derive_more = "0.99.2" sc-executor = { version = "0.8.0", path = "../executor" } diff --git a/client/basic-authorship/Cargo.toml b/client/basic-authorship/Cargo.toml index c1df76253a46a..2c0e8a2d1c5e2 100644 --- a/client/basic-authorship/Cargo.toml +++ b/client/basic-authorship/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } futures = "0.3.9" futures-timer = "3.0.1" log = "0.4.8" diff --git a/client/block-builder/Cargo.toml b/client/block-builder/Cargo.toml index 64a82505a9fa4..b0a20857b86d3 100644 --- a/client/block-builder/Cargo.toml +++ b/client/block-builder/Cargo.toml @@ -23,7 +23,7 @@ sp-core = { version = "2.0.0", path = "../../primitives/core" } sp-block-builder = { version = "2.0.0", path = "../../primitives/block-builder" } sp-inherents = { version = "2.0.0", path = "../../primitives/inherents" } sc-client-api = { version = "2.0.0", path = "../api" } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } [dev-dependencies] substrate-test-runtime-client = { path = "../../test-utils/runtime/client" } diff --git a/client/chain-spec/Cargo.toml b/client/chain-spec/Cargo.toml index e7144d330c693..3903ebf21d5d2 100644 --- a/client/chain-spec/Cargo.toml +++ b/client/chain-spec/Cargo.toml @@ -22,7 +22,7 @@ serde_json = "1.0.41" sp-runtime = { version = "2.0.0", path = "../../primitives/runtime" } sp-chain-spec = { version = "2.0.0", path = "../../primitives/chain-spec" } sc-telemetry = { version = "2.0.0", path = "../telemetry" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-consensus-babe = { version = "0.8.0-rc6", path = "../consensus/babe" } sp-consensus-babe = { version = "0.8.0-rc6", path = "../../primitives/consensus/babe" } sc-consensus-epochs = { version = "0.8.0-rc6", path = "../consensus/epochs" } diff --git a/client/consensus/aura/Cargo.toml b/client/consensus/aura/Cargo.toml index b6e1ba6bc10dd..0c3c54e692339 100644 --- a/client/consensus/aura/Cargo.toml +++ b/client/consensus/aura/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.8.0", path = "../../../primitives/consensus/a sp-block-builder = { version = "2.0.0", path = "../../../primitives/block-builder" } sc-block-builder = { version = "0.8.0", path = "../../block-builder" } sc-client-api = { version = "2.0.0", path = "../../api" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sp-consensus = { version = "0.8.0", path = "../../../primitives/consensus/common" } derive_more = "0.99.2" futures = "0.3.9" diff --git a/client/consensus/babe/Cargo.toml b/client/consensus/babe/Cargo.toml index 8104ca2047ca7..ce7662533018b 100644 --- a/client/consensus/babe/Cargo.toml +++ b/client/consensus/babe/Cargo.toml @@ -14,7 +14,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } sp-consensus-babe = { version = "0.8.0", path = "../../../primitives/consensus/babe" } sp-core = { version = "2.0.0", path = "../../../primitives/core" } sp-application-crypto = { version = "2.0.0", path = "../../../primitives/application-crypto" } diff --git a/client/consensus/epochs/Cargo.toml b/client/consensus/epochs/Cargo.toml index 752280e3547dd..7d1f74ab76d61 100644 --- a/client/consensus/epochs/Cargo.toml +++ b/client/consensus/epochs/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } parking_lot = "0.11.1" fork-tree = { version = "2.0.0", path = "../../../utils/fork-tree" } sp-runtime = { path = "../../../primitives/runtime" , version = "2.0.0"} diff --git a/client/consensus/manual-seal/Cargo.toml b/client/consensus/manual-seal/Cargo.toml index b13cbc7b5590c..3778b79960bf8 100644 --- a/client/consensus/manual-seal/Cargo.toml +++ b/client/consensus/manual-seal/Cargo.toml @@ -20,7 +20,7 @@ jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" log = "0.4.8" parking_lot = "0.11.1" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } serde = { version = "1.0", features=["derive"] } assert_matches = "1.3.0" diff --git a/client/consensus/pow/Cargo.toml b/client/consensus/pow/Cargo.toml index b5112f9fa628f..2aae25ef931fe 100644 --- a/client/consensus/pow/Cargo.toml +++ b/client/consensus/pow/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } sp-core = { version = "2.0.0", path = "../../../primitives/core" } sp-blockchain = { version = "2.0.0", path = "../../../primitives/blockchain" } sp-runtime = { version = "2.0.0", path = "../../../primitives/runtime" } diff --git a/client/consensus/slots/Cargo.toml b/client/consensus/slots/Cargo.toml index bdf28f35236b2..03bf48bd6246f 100644 --- a/client/consensus/slots/Cargo.toml +++ b/client/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-client-api = { version = "2.0.0", path = "../../api" } sp-core = { version = "2.0.0", path = "../../../primitives/core" } sp-trie = { version = "2.0.0", path = "../../../primitives/trie" } diff --git a/client/db/Cargo.toml b/client/db/Cargo.toml index 23f6fa9b1f628..79e76aacb9d62 100644 --- a/client/db/Cargo.toml +++ b/client/db/Cargo.toml @@ -21,7 +21,7 @@ kvdb-memorydb = "0.8.0" linked-hash-map = "0.5.2" hash-db = "0.15.2" parity-util-mem = { version = "0.8.0", default-features = false, features = ["std"] } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } blake2-rfc = "0.2.18" sc-client-api = { version = "2.0.0", path = "../api" } diff --git a/client/executor/Cargo.toml b/client/executor/Cargo.toml index 8cfbe8d600d49..bfa50518aeebf 100644 --- a/client/executor/Cargo.toml +++ b/client/executor/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] derive_more = "0.99.2" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sp-io = { version = "2.0.0", path = "../../primitives/io" } sp-core = { version = "2.0.0", path = "../../primitives/core" } sp-tasks = { version = "2.0.0", path = "../../primitives/tasks" } diff --git a/client/executor/common/Cargo.toml b/client/executor/common/Cargo.toml index a479f4e1f4ddb..b8f735f0c1798 100644 --- a/client/executor/common/Cargo.toml +++ b/client/executor/common/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] derive_more = "0.99.2" parity-wasm = "0.41.0" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } wasmi = "0.6.2" sp-core = { version = "2.0.0", path = "../../../primitives/core" } sp-allocator = { version = "2.0.0", path = "../../../primitives/allocator" } diff --git a/client/executor/wasmi/Cargo.toml b/client/executor/wasmi/Cargo.toml index 38d1cf3072a1e..ea571b91f12bc 100644 --- a/client/executor/wasmi/Cargo.toml +++ b/client/executor/wasmi/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] log = "0.4.8" wasmi = "0.6.2" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-executor-common = { version = "0.8.0", path = "../common" } sp-wasm-interface = { version = "2.0.0", path = "../../../primitives/wasm-interface" } sp-runtime-interface = { version = "2.0.0", path = "../../../primitives/runtime-interface" } diff --git a/client/executor/wasmtime/Cargo.toml b/client/executor/wasmtime/Cargo.toml index 071cbc66001d6..dcd162c900fb6 100644 --- a/client/executor/wasmtime/Cargo.toml +++ b/client/executor/wasmtime/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] log = "0.4.8" scoped-tls = "1.0" parity-wasm = "0.41.0" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-executor-common = { version = "0.8.0", path = "../common" } sp-wasm-interface = { version = "2.0.0", path = "../../../primitives/wasm-interface" } sp-runtime-interface = { version = "2.0.0", path = "../../../primitives/runtime-interface" } diff --git a/client/light/Cargo.toml b/client/light/Cargo.toml index 60d16ff0359ce..5ec87419332fa 100644 --- a/client/light/Cargo.toml +++ b/client/light/Cargo.toml @@ -21,7 +21,7 @@ sp-core = { version = "2.0.0", path = "../../primitives/core" } sp-state-machine = { version = "0.8.0", path = "../../primitives/state-machine" } sc-client-api = { version = "2.0.0", path = "../api" } sp-api = { version = "2.0.0", path = "../../primitives/api" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-executor = { version = "0.8.0", path = "../executor" } [features] diff --git a/client/network/Cargo.toml b/client/network/Cargo.toml index bf948ff4dd37d..8c6fc4e668d08 100644 --- a/client/network/Cargo.toml +++ b/client/network/Cargo.toml @@ -22,7 +22,7 @@ async-std = "1.6.5" bitflags = "1.2.0" bs58 = "0.4.0" bytes = "1" -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } derive_more = "0.99.2" either = "1.5.3" erased-serde = "0.3.9" diff --git a/client/offchain/Cargo.toml b/client/offchain/Cargo.toml index 7d0f01a0c7ed7..b53ff5616db45 100644 --- a/client/offchain/Cargo.toml +++ b/client/offchain/Cargo.toml @@ -23,7 +23,7 @@ log = "0.4.8" threadpool = "1.7" num_cpus = "1.10" sp-offchain = { version = "2.0.0", path = "../../primitives/offchain" } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } parking_lot = "0.11.1" sp-core = { version = "2.0.0", path = "../../primitives/core" } rand = "0.7.2" diff --git a/client/rpc-api/Cargo.toml b/client/rpc-api/Cargo.toml index 546deb1283c57..9e51b8ce6b5ea 100644 --- a/client/rpc-api/Cargo.toml +++ b/client/rpc-api/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } derive_more = "0.99.2" futures = { version = "0.3.1", features = ["compat"] } jsonrpc-core = "15.1.0" diff --git a/client/rpc/Cargo.toml b/client/rpc/Cargo.toml index bab436c93a7f2..54f0aa78e5c8d 100644 --- a/client/rpc/Cargo.toml +++ b/client/rpc/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sc-rpc-api = { version = "0.8.0", path = "../rpc-api" } sc-client-api = { version = "2.0.0", path = "../api" } sp-api = { version = "2.0.0", path = "../../primitives/api" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } futures = { version = "0.3.1", features = ["compat"] } jsonrpc-pubsub = "15.1.0" log = "0.4.8" diff --git a/client/service/Cargo.toml b/client/service/Cargo.toml index 95ce02e195f1e..04eaeb9e7649c 100644 --- a/client/service/Cargo.toml +++ b/client/service/Cargo.toml @@ -61,7 +61,7 @@ sc-light = { version = "2.0.0", path = "../light" } sc-client-api = { version = "2.0.0", path = "../api" } sp-api = { version = "2.0.0", path = "../../primitives/api" } sc-client-db = { version = "0.8.0", default-features = false, path = "../db" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-executor = { version = "0.8.0", path = "../executor" } sc-transaction-pool = { version = "2.0.0", path = "../transaction-pool" } sp-transaction-pool = { version = "2.0.0", path = "../../primitives/transaction-pool" } diff --git a/client/state-db/Cargo.toml b/client/state-db/Cargo.toml index 26939b769b8a7..89ac72f3507a0 100644 --- a/client/state-db/Cargo.toml +++ b/client/state-db/Cargo.toml @@ -18,6 +18,6 @@ parking_lot = "0.11.1" log = "0.4.11" sc-client-api = { version = "2.0.0", path = "../api" } sp-core = { version = "2.0.0", path = "../../primitives/core" } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } parity-util-mem = { version = "0.8.0", default-features = false, features = ["primitive-types"] } parity-util-mem-derive = "0.1.0" diff --git a/client/transaction-pool/Cargo.toml b/client/transaction-pool/Cargo.toml index e68e39f5542dd..d79ea3867d090 100644 --- a/client/transaction-pool/Cargo.toml +++ b/client/transaction-pool/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } thiserror = "1.0.21" futures = { version = "0.3.1", features = ["compat"] } futures-diagnose = "1.0" diff --git a/client/transaction-pool/graph/Cargo.toml b/client/transaction-pool/graph/Cargo.toml index f6143f8837bfd..ab261e68d0f87 100644 --- a/client/transaction-pool/graph/Cargo.toml +++ b/client/transaction-pool/graph/Cargo.toml @@ -31,7 +31,7 @@ retain_mut = "0.1.2" [dev-dependencies] assert_matches = "1.3.0" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } substrate-test-runtime = { version = "2.0.0", path = "../../../test-utils/runtime" } criterion = "0.3" diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index fe7b30eaace87..fe84145ba57ca 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 55d8de86582ac..0f166c9be791a 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index cc8e678fb559b..467f684af594a 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../primitives/application-crypto" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-inherents = { version = "2.0.0", default-features = false, path = "../../primitives/inherents" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 3538d8a5f81ca..5b83de19a515d 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-authority-discovery = { version = "2.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../primitives/application-crypto" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "2.0.0", features = ["historical" ], path = "../session", default-features = false } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index d957fb9094049..0daf06590608f 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-inherents = { version = "2.0.0", default-features = false, path = "../../primitives/inherents" } sp-authorship = { version = "2.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 13ac2e4034c9f..787835e33fe04 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 4f4f8af8ef7a6..4873dbaf86e9a 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index e045f259be77e..960c7d731f0b9 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] linregress = { version = "0.4.0", optional = true } paste = "0.1" -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-api = { version = "2.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "2.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "2.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 0e37e3b9d4a57..83a47087db49d 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 47f8414ef4bb6..400321d7c70f4 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 710258037e7aa..d9e4f0d0e7edd 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -16,7 +16,7 @@ publish = false targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index ff5f38637765d..8ef6022db9f07 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] # This crate should not rely on any of the frame primitives. bitflags = "1.0" -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../../primitives/runtime" } diff --git a/frame/contracts/rpc/Cargo.toml b/frame/contracts/rpc/Cargo.toml index 39c3b373c8cf8..c714f0002a829 100644 --- a/frame/contracts/rpc/Cargo.toml +++ b/frame/contracts/rpc/Cargo.toml @@ -14,7 +14,7 @@ publish = false targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index fe1cb91b84535..7d7c7bd4f5ed2 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "2.0.0", default-features = false, path = "../../../../primitives/api" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-contracts-primitives = { version = "2.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index a633829225459..6a67b9545185c 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 2571dff7c8904..2103196ce5587 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "2.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 90e69ea212754..becb519be0a9d 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index be3c03e4c4541..db52d4760670b 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index ee816d963be98..359a295b1687b 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -12,7 +12,7 @@ description = "FRAME example pallet using runtime worker threads" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index e9de6d6f020b1..bbb4cc3673954 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 3f9820b5f3f57..6ee378b222ca3 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index c6a76de23e454..8e5a68af1e35d 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "2.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 124ac4f006448..982df0a0e5ed3 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 95948c86de490..fc84a8d8cb1ba 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "2.0.0", default-features = false, path = "../authorship" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index afe315cfaa6b3..cc5bc67c35dcf 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-keyring = { version = "2.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index b223625c87a02..49ae53ff1dfd3 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index f1ce20df17ed3..ba46b555afac1 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 096333680c6ad..832ce5aee92df 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -12,7 +12,7 @@ description = "FRAME Merkle Mountain Range pallet." targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index c809b3d1fcbd4..3965e581b3784 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 70412fa6de0ad..33289f98ec141 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 3e1ddf897d34c..aaba763e4d128 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index d78ffd13fd579..b0a7eefc6c648 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 2860e3ef8ee2f..17df3d0a2b4af 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 80492288d74bf..0199d23d38583 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../../benchmarking" } frame-support = { version = "2.0.0", default-features = false, path = "../../support" } frame-system = { version = "2.0.0", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index da3d50ab22345..fff6aab6abfdd 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 0f6b48ff0757a..8eccd1c7d6a69 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index a0d4c0f14df5d..c333e6ea99570 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 7d21b125e6e65..90c3799d1cd3b 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 6c9bceb32e007..33588230adda1 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 3a9f4609a2e25..8a13f905f0dc0 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index fc3099e1b95cb..5404cea88baa5 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -25,7 +25,7 @@ rand = { version = "0.7.2", default-features = false } [dev-dependencies] serde = { version = "1.0.101" } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } sp-core = { version = "2.0.0", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "2.0.0", path = "../../staking/reward-curve" } sp-io ={ version = "2.0.0", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index fce6ebe51bb30..9f49b29bf3d55 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 7c2fc21fde54e..2cd25daa8094e 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-npos-elections = { version = "2.0.0", default-features = false, path = "../../primitives/npos-elections" } sp-io ={ version = "2.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/staking/fuzzer/Cargo.toml b/frame/staking/fuzzer/Cargo.toml index db65e347d8e2a..920f53c86939a 100644 --- a/frame/staking/fuzzer/Cargo.toml +++ b/frame/staking/fuzzer/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] honggfuzz = "0.5" -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } pallet-staking = { version = "2.0.0", path = "..", features = ["runtime-benchmarks"] } pallet-staking-reward-curve = { version = "2.0.0", path = "../reward-curve" } pallet-session = { version = "2.0.0", path = "../../session" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index a566cd2a9f061..cae9615cdabb4 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 1c22e0fe6fd55..6f474babff9b6 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] log = "0.4" serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index ef66bd1902155..a34ba3e45ef9f 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-io = { version = "2.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.8.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "2.0.0", default-features = false, path = "../" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 631c1825216f8..198eecfdb4fe8 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index e164a0d62e0fb..0569ba1f84e39 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../../benchmarking" } diff --git a/frame/system/rpc/runtime-api/Cargo.toml b/frame/system/rpc/runtime-api/Cargo.toml index 4820df10fe16d..77421fd1fa3ca 100644 --- a/frame/system/rpc/runtime-api/Cargo.toml +++ b/frame/system/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "2.0.0", default-features = false, path = "../../../../primitives/api" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } [features] default = ["std"] diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 4100e3636bf1d..e25e2c9b2d598 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 0ce81a6d5d1b8..dde071d585f53 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 2d76664f9dc5d..f8d6547798fbc 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/transaction-payment/rpc/Cargo.toml b/frame/transaction-payment/rpc/Cargo.toml index 410827d0efb5b..167fe56ff049d 100644 --- a/frame/transaction-payment/rpc/Cargo.toml +++ b/frame/transaction-payment/rpc/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" diff --git a/frame/transaction-payment/rpc/runtime-api/Cargo.toml b/frame/transaction-payment/rpc/runtime-api/Cargo.toml index 64c082b420c93..1a1980a91b31c 100644 --- a/frame/transaction-payment/rpc/runtime-api/Cargo.toml +++ b/frame/transaction-payment/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "2.0.0", default-features = false, path = "../../../../primitives/api" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-transaction-payment = { version = "2.0.0", default-features = false, path = "../../../transaction-payment" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 07b22002ee384..dd3bd9bb10907 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "2.0.0", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index ea8dc1ac015c7..5b800ab6495f6 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index a151219501559..af48fdace81a0 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/api/Cargo.toml b/primitives/api/Cargo.toml index c1effc523fcb6..1a66d460023d3 100644 --- a/primitives/api/Cargo.toml +++ b/primitives/api/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-api-proc-macro = { version = "2.0.0", path = "proc-macro" } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } diff --git a/primitives/api/test/Cargo.toml b/primitives/api/test/Cargo.toml index 310840d1ca9c7..458a805c7552f 100644 --- a/primitives/api/test/Cargo.toml +++ b/primitives/api/test/Cargo.toml @@ -19,7 +19,7 @@ sp-runtime = { version = "2.0.0", path = "../../runtime" } sp-blockchain = { version = "2.0.0", path = "../../blockchain" } sp-consensus = { version = "0.8.0", path = "../../consensus/common" } sc-block-builder = { version = "0.8.0", path = "../../../client/block-builder" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sp-state-machine = { version = "0.8.0", path = "../../state-machine" } trybuild = "1.0.38" rustversion = "1.0.0" diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 8791ce4174bbf..9709ed9fc18ae 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "2.0.0", default-features = false, path = "../core" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-io = { version = "2.0.0", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 59c78e7d3f4ae..ad7aa957ff055 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } diff --git a/primitives/authorship/Cargo.toml b/primitives/authorship/Cargo.toml index a63f75467ebf3..e37994b73a9f0 100644 --- a/primitives/authorship/Cargo.toml +++ b/primitives/authorship/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-inherents = { version = "2.0.0", default-features = false, path = "../inherents" } sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [features] default = [ "std" ] diff --git a/primitives/block-builder/Cargo.toml b/primitives/block-builder/Cargo.toml index 5c6dad5ab7676..87246f4d9e67e 100644 --- a/primitives/block-builder/Cargo.toml +++ b/primitives/block-builder/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } sp-api = { version = "2.0.0", default-features = false, path = "../api" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-inherents = { version = "2.0.0", default-features = false, path = "../inherents" } [features] diff --git a/primitives/blockchain/Cargo.toml b/primitives/blockchain/Cargo.toml index 7d2d64de85e7a..fea84adc819c4 100644 --- a/primitives/blockchain/Cargo.toml +++ b/primitives/blockchain/Cargo.toml @@ -19,7 +19,7 @@ lru = "0.6.1" parking_lot = "0.11.1" thiserror = "1.0.21" futures = "0.3.9" -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-consensus = { version = "0.8.0", path = "../consensus/common" } sp-runtime = { version = "2.0.0", path = "../runtime" } sp-state-machine = { version = "0.8.0", path = "../state-machine" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index eed368e5c1d1c..759605881a3e6 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../application-crypto" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../../std" } sp-api = { version = "2.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 65321d324a695..83f62d8643ca7 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "2.0.0", default-features = false, path = "../../application-crypto" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } merlin = { version = "2.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../../std" } sp-api = { version = "2.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/common/Cargo.toml b/primitives/consensus/common/Cargo.toml index a9d2d92998a69..aa9c14ae94c90 100644 --- a/primitives/consensus/common/Cargo.toml +++ b/primitives/consensus/common/Cargo.toml @@ -29,7 +29,7 @@ sp-runtime = { version = "2.0.0", path = "../../runtime" } sp-utils = { version = "2.0.0", path = "../../utils" } sp-trie = { version = "2.0.0", path = "../../trie" } sp-api = { version = "2.0.0", path = "../../api" } -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } parking_lot = "0.11.1" serde = { version = "1.0", features = ["derive"] } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../../utils/prometheus", version = "0.8.0"} diff --git a/primitives/consensus/pow/Cargo.toml b/primitives/consensus/pow/Cargo.toml index 15b37d6690ba1..e4a7963131b27 100644 --- a/primitives/consensus/pow/Cargo.toml +++ b/primitives/consensus/pow/Cargo.toml @@ -17,7 +17,7 @@ sp-api = { version = "2.0.0", default-features = false, path = "../../api" } sp-std = { version = "2.0.0", default-features = false, path = "../../std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../runtime" } sp-core = { version = "2.0.0", default-features = false, path = "../../core" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [features] default = ["std"] diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 11f81628b38a8..38ab8d55c61d3 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../../runtime" } [features] diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 9c7842380f792..99010ea07f0e7 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/externalities/Cargo.toml b/primitives/externalities/Cargo.toml index f1990e89d757b..6586d91808f7f 100644 --- a/primitives/externalities/Cargo.toml +++ b/primitives/externalities/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-storage = { version = "2.0.0", path = "../storage", default-features = false } sp-std = { version = "2.0.0", path = "../std", default-features = false } environmental = { version = "1.1.2", default-features = false } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } [features] default = ["std"] diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index f96196bdb190c..78202b407360e 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.12.3", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/inherents/Cargo.toml b/primitives/inherents/Cargo.toml index f73bd97bf4b0e..e1577a6a1b672 100644 --- a/primitives/inherents/Cargo.toml +++ b/primitives/inherents/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] parking_lot = { version = "0.11.1", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-core = { version = "2.0.0", default-features = false, path = "../core" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } thiserror = { version = "1.0.21", optional = true } [features] diff --git a/primitives/io/Cargo.toml b/primitives/io/Cargo.toml index 01ea58e87e3e2..1f509f7f9f214 100644 --- a/primitives/io/Cargo.toml +++ b/primitives/io/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } hash-db = { version = "0.15.2", default-features = false } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-keystore = { version = "0.8.0", default-features = false, optional = true, path = "../keystore" } diff --git a/primitives/keystore/Cargo.toml b/primitives/keystore/Cargo.toml index 7fb6b4b93fc21..186b569a96b8a 100644 --- a/primitives/keystore/Cargo.toml +++ b/primitives/keystore/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] async-trait = "0.1.30" derive_more = "0.99.2" -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } futures = { version = "0.3.1" } schnorrkel = { version = "0.9.1", features = ["preaudit_deprecated", "u64_backend"], default-features = false } merlin = { version = "2.0", default-features = false } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 82ce6b005a954..f9b0c260676bc 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "2.0.0", path = "./compact" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index a1fa4a2f4ca40..690896f0152eb 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "2.0.0", path = "../../std" } sp-runtime = { version = "2.0.0", path = "../../runtime" } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [[bin]] name = "reduce" diff --git a/primitives/runtime-interface/Cargo.toml b/primitives/runtime-interface/Cargo.toml index bbf02578848e4..4cdc0503e5a66 100644 --- a/primitives/runtime-interface/Cargo.toml +++ b/primitives/runtime-interface/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-tracing = { version = "2.0.0", default-features = false, path = "../tracing" } sp-runtime-interface-proc-macro = { version = "2.0.0", path = "proc-macro" } sp-externalities = { version = "0.8.0", optional = true, path = "../externalities" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } static_assertions = "1.0.0" primitive-types = { version = "0.8.0", default-features = false } sp-storage = { version = "2.0.0", default-features = false, path = "../storage" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 270d96ddc20c5..66d24436db490 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } diff --git a/primitives/sandbox/Cargo.toml b/primitives/sandbox/Cargo.toml index 5ec8c203b54d7..44b52c3881432 100755 --- a/primitives/sandbox/Cargo.toml +++ b/primitives/sandbox/Cargo.toml @@ -18,7 +18,7 @@ sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-io = { version = "2.0.0", default-features = false, path = "../io" } sp-wasm-interface = { version = "2.0.0", default-features = false, path = "../wasm-interface" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } [dev-dependencies] wat = "1.0" diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index d47a8062ef1a9..5b83e88c44fae 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-api = { version = "2.0.0", default-features = false, path = "../api" } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index fbe4b30f00b8a..f8203c130d470 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } diff --git a/primitives/state-machine/Cargo.toml b/primitives/state-machine/Cargo.toml index c594c27fc7a24..0c1a2a558f25b 100644 --- a/primitives/state-machine/Cargo.toml +++ b/primitives/state-machine/Cargo.toml @@ -23,7 +23,7 @@ trie-root = { version = "0.16.0", default-features = false } sp-trie = { version = "2.0.0", path = "../trie", default-features = false } sp-core = { version = "2.0.0", path = "../core", default-features = false } sp-panic-handler = { version = "2.0.0", path = "../panic-handler", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } num-traits = { version = "0.2.8", default-features = false } rand = { version = "0.7.2", optional = true } sp-externalities = { version = "0.8.0", path = "../externalities", default-features = false } diff --git a/primitives/storage/Cargo.toml b/primitives/storage/Cargo.toml index b025b5a106715..88580efb164ef 100644 --- a/primitives/storage/Cargo.toml +++ b/primitives/storage/Cargo.toml @@ -19,7 +19,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } impl-serde = { version = "0.3.1", optional = true } ref-cast = "1.0.0" sp-debug-derive = { version = "2.0.0", path = "../debug-derive" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [features] default = [ "std" ] diff --git a/primitives/test-primitives/Cargo.toml b/primitives/test-primitives/Cargo.toml index 1bfb793610b64..546e927ad948e 100644 --- a/primitives/test-primitives/Cargo.toml +++ b/primitives/test-primitives/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/timestamp/Cargo.toml b/primitives/timestamp/Cargo.toml index de1271b0dd027..dc9f1fae92563 100644 --- a/primitives/timestamp/Cargo.toml +++ b/primitives/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-api = { version = "2.0.0", default-features = false, path = "../api" } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-inherents = { version = "2.0.0", default-features = false, path = "../inherents" } impl-trait-for-tuples = "0.2.0" wasm-timer = { version = "0.2", optional = true } diff --git a/primitives/transaction-pool/Cargo.toml b/primitives/transaction-pool/Cargo.toml index 675987e3a127c..6454ff509fdad 100644 --- a/primitives/transaction-pool/Cargo.toml +++ b/primitives/transaction-pool/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] thiserror = { version = "1.0.21", optional = true } -codec = { package = "parity-scale-codec", version = "1.3.6", optional = true } +codec = { package = "parity-scale-codec", version = "2.0.0", optional = true } derive_more = { version = "0.99.11", optional = true } futures = { version = "0.3.1", optional = true } log = { version = "0.4.8", optional = true } diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 4392f01d222aa..c293347bd8d27 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -18,7 +18,7 @@ name = "bench" harness = false [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "2.0.0", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.2", default-features = false } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index 113639434d5bc..0c38e8a741842 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/wasm-interface/Cargo.toml b/primitives/wasm-interface/Cargo.toml index 32c283a8527fa..21d2fc4f214a6 100644 --- a/primitives/wasm-interface/Cargo.toml +++ b/primitives/wasm-interface/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] wasmi = { version = "0.6.2", optional = true } impl-trait-for-tuples = "0.2.0" sp-std = { version = "2.0.0", path = "../std", default-features = false } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } [features] default = [ "std" ] diff --git a/test-utils/client/Cargo.toml b/test-utils/client/Cargo.toml index fad66c5a6708d..0b5fba78c1143 100644 --- a/test-utils/client/Cargo.toml +++ b/test-utils/client/Cargo.toml @@ -12,7 +12,7 @@ publish = false targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } futures = "0.3.9" futures01 = { package = "futures", version = "0.1.29" } hash-db = "0.15.2" diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index c99ec9a05e7c4..0b5b6d6ab5d06 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -17,7 +17,7 @@ sp-application-crypto = { version = "2.0.0", default-features = false, path = ". sp-consensus-aura = { version = "0.8.0", default-features = false, path = "../../primitives/consensus/aura" } sp-consensus-babe = { version = "0.8.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "2.0.0", default-features = false, path = "../../primitives/block-builder" } -codec = { package = "parity-scale-codec", version = "1.3.6", default-features = false, features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-executive = { version = "2.0.0", default-features = false, path = "../../frame/executive" } sp-inherents = { version = "2.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "2.0.0", optional = true, path = "../../primitives/keyring" } diff --git a/test-utils/runtime/client/Cargo.toml b/test-utils/runtime/client/Cargo.toml index 2540e29c8b0e2..9a6a4fb60b073 100644 --- a/test-utils/runtime/client/Cargo.toml +++ b/test-utils/runtime/client/Cargo.toml @@ -21,7 +21,7 @@ substrate-test-runtime = { version = "2.0.0", path = "../../runtime" } sp-runtime = { version = "2.0.0", path = "../../../primitives/runtime" } sp-api = { version = "2.0.0", path = "../../../primitives/api" } sp-blockchain = { version = "2.0.0", path = "../../../primitives/blockchain" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sc-client-api = { version = "2.0.0", path = "../../../client/api" } sc-consensus = { version = "0.8.0", path = "../../../client/consensus/common" } sc-service = { version = "0.8.0", default-features = false, path = "../../../client/service" } diff --git a/test-utils/runtime/transaction-pool/Cargo.toml b/test-utils/runtime/transaction-pool/Cargo.toml index 7fbea1e3c0edb..1e254a4c24502 100644 --- a/test-utils/runtime/transaction-pool/Cargo.toml +++ b/test-utils/runtime/transaction-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] substrate-test-runtime-client = { version = "2.0.0", path = "../client" } parking_lot = "0.11.1" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } sp-blockchain = { version = "2.0.0", path = "../../../primitives/blockchain" } sp-runtime = { version = "2.0.0", path = "../../../primitives/runtime" } sp-transaction-pool = { version = "2.0.0", path = "../../../primitives/transaction-pool" } diff --git a/utils/fork-tree/Cargo.toml b/utils/fork-tree/Cargo.toml index 292d1a83b7e58..73dc3aa1e6bd2 100644 --- a/utils/fork-tree/Cargo.toml +++ b/utils/fork-tree/Cargo.toml @@ -14,4 +14,4 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -codec = { package = "parity-scale-codec", version = "1.3.6", features = ["derive"] } +codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index b9ee76b846e0d..a9c55132e2408 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] futures = { version = "0.3.0", features = ["compat"] } jsonrpc-client-transports = { version = "15.1.0", default-features = false, features = ["http"] } jsonrpc-core = "15.1.0" -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } serde = "1" frame-support = { version = "2.0.0", path = "../../../../frame/support" } sp-storage = { version = "2.0.0", path = "../../../../primitives/storage" } diff --git a/utils/frame/rpc/system/Cargo.toml b/utils/frame/rpc/system/Cargo.toml index 03016462cbeac..5a75d01c4d479 100644 --- a/utils/frame/rpc/system/Cargo.toml +++ b/utils/frame/rpc/system/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sc-client-api = { version = "2.0.0", path = "../../../../client/api" } -codec = { package = "parity-scale-codec", version = "1.3.6" } +codec = { package = "parity-scale-codec", version = "2.0.0" } futures = { version = "0.3.4", features = ["compat"] } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" From f2ebf65972ba3be80e4664453e05012f883c948a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 5 Feb 2021 12:01:43 +0000 Subject: [PATCH 068/503] Fix type checking overflow with additional TypeInfo bounds --- primitives/runtime/src/traits.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 00fae6fe6abd6..370bbdc892535 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -232,9 +232,9 @@ impl Lookup for IdentityLookup { pub struct AccountIdLookup(PhantomData<(AccountId, AccountIndex)>); impl StaticLookup for AccountIdLookup where - AccountId: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo + 'static, - AccountIndex: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo + 'static, - crate::MultiAddress: Codec, + AccountId: Codec + Clone + PartialEq + Debug, + AccountIndex: Codec + Clone + PartialEq + Debug, + crate::MultiAddress: Codec + scale_info::TypeInfo + 'static, { type Source = crate::MultiAddress; type Target = AccountId; From 3c7f24ac36a744adc1faae83df36ef6bf077f1ef Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 8 Feb 2021 09:45:42 +0000 Subject: [PATCH 069/503] Use latest scale-info 0.6 release --- Cargo.lock | 50 ++++++++++++++++++++-------- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- 12 files changed, 48 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b236c74b9b6d..1ba52225075ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1665,7 +1665,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=aj-scale-info- dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", + "scale-info 0.5.0", ] [[package]] @@ -1694,7 +1694,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "pretty_assertions", - "scale-info", + "scale-info 0.6.0", "serde", "smallvec 1.5.0", "sp-api", @@ -1769,7 +1769,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples 0.2.0", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-core", "sp-externalities", @@ -4070,7 +4070,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-api", "sp-authority-discovery", @@ -4149,7 +4149,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-api", "sp-block-builder", @@ -4493,7 +4493,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-core", "sp-io", @@ -4680,7 +4680,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-core", "sp-io", @@ -5184,7 +5184,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples 0.2.0", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "sp-core", "sp-inherents", @@ -5220,7 +5220,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0", "serde", "serde_json", "smallvec 1.5.0", @@ -7710,7 +7710,19 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive", + "scale-info-derive 0.3.0", +] + +[[package]] +name = "scale-info" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdd819984fe6ce661ebed1f451c0848d301a05ff56b8a4b0ae420de7dca046ea" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive 0.4.0", ] [[package]] @@ -7724,6 +7736,18 @@ dependencies = [ "syn", ] +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e321c3d4ef7d3a90b0b4eda276d4215c6cbf3d59f66a9934e7866a48dcaa29b3" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8185,7 +8209,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0", "serde", "serde_json", "sp-debug-derive", @@ -8379,7 +8403,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", + "scale-info 0.6.0", "schnorrkel", "secrecy", "serde", @@ -8591,7 +8615,7 @@ dependencies = [ "parity-util-mem", "paste 0.1.18", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0", "serde", "serde_json", "sp-application-crypto", diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 404427b7c6d3d..4da1e23dd87ca 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } pallet-aura = { version = "2.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "2.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "2.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 74569a6ee4ca3..fa59ed29e8234 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 4873dbaf86e9a..af727c20f5b14 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "2.0.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 9aa28612beaa5..9d3d4066c4cef 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "2.0.0", default-features = false, path = "../support" } frame-system = { version = "2.0.0", default-features = false, path = "../system" } pallet-balances = { version = "2.0.0", default-features = false, path = "../balances" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 5ad14e323a679..663c96944ff03 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } frame-metadata = { version = "12.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 198eecfdb4fe8..7a0aa87e9e7e3 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index e25e2c9b2d598..906c7d6b75a3c 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "2.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index f8d6547798fbc..8ae85b387dfdf 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "2.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "2.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index e2276e96ec25c..8c812e814aebf 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "2.0.0", default-features = false, path = "../std" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-debug-derive = { version = "2.0.0", default-features = false, path = "../debug-derive" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 86e7a6e9e060d..8aef9bedbc6e4 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "2.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 57c6a04d9b914..a668f3fe9fea7 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", package = "scale-info", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "2.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "2.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "2.0.0", default-features = false, path = "../arithmetic" } From 03a54e2004f5afce02950c22fa6f743044158279 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 17 Feb 2021 16:26:55 +0000 Subject: [PATCH 070/503] Add HasCompact type constraint for CheckNonce --- frame/system/src/extensions/check_nonce.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index be3822a23dc93..2ea342d29529e 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -55,7 +55,8 @@ impl sp_std::fmt::Debug for CheckNonce { } impl SignedExtension for CheckNonce where - T::Call: Dispatchable + T::Call: Dispatchable, + <::Index as codec::HasCompact>::Type: TypeInfo, // todo: [AJ] this is a result of the derived compact TypeInfo impl, can we get rid of it? { type AccountId = T::AccountId; type Call = T::Call; From 21379a00e2db5eee2015ea2dc7f4b1ace0b4c9ca Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 17 Feb 2021 16:31:10 +0000 Subject: [PATCH 071/503] Use frame-metadata main branch --- Cargo.lock | 50 +++++++++++----------------------------- frame/support/Cargo.toml | 2 +- 2 files changed, 15 insertions(+), 37 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1449e55594a4c..1628982054519 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1734,11 +1734,11 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-scale-info-decode-feature#0ec6a4c32b96ecc6f9137d92b51c48dac78a9ec1" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info 0.5.0", + "scale-info", ] [[package]] @@ -1767,7 +1767,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions", - "scale-info 0.6.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-api", @@ -1842,7 +1842,7 @@ dependencies = [ "frame-support", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -4129,7 +4129,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4208,7 +4208,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4544,7 +4544,7 @@ dependencies = [ "frame-system", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -4731,7 +4731,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5238,7 +5238,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5274,7 +5274,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -7781,17 +7781,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.5.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#625733707093d68ff9b799ca87f3d30ca5fc2afa" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive 0.3.0", -] - [[package]] name = "scale-info" version = "0.6.0" @@ -7801,18 +7790,7 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive 0.4.0", -] - -[[package]] -name = "scale-info-derive" -version = "0.3.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#625733707093d68ff9b799ca87f3d30ca5fc2afa" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn", + "scale-info-derive", ] [[package]] @@ -8296,7 +8274,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info 0.6.0", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8490,7 +8468,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info 0.6.0", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8702,7 +8680,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info 0.6.0", + "scale-info", "serde", "serde_json", "sp-application-crypto", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 2cade43a7ce7f..9c3dbce1d413b 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -18,7 +18,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-scale-info-decode-feature", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } From ab0bbee12eadef24c458dcc11b1690b9bcac5447 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 17 Feb 2021 17:28:41 +0000 Subject: [PATCH 072/503] Add some more TypeInfo bounds --- frame/balances/src/lib.rs | 2 +- frame/timestamp/src/lib.rs | 2 +- frame/transaction-payment/src/lib.rs | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index ddaab519fa311..b15affabc0a88 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -192,7 +192,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// The balance of an account. type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug; + MaybeSerializeDeserialize + Debug + scale_info::TypeInfo; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index 86ca0c11a70c8..0c3f07c0a5457 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -125,7 +125,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// Type used for expressing timestamp. type Moment: Parameter + Default + AtLeast32Bit - + Scale + Copy; + + Scale + Copy + scale_info::TypeInfo + 'static; /// Something which can be notified when the timestamp is set. Set this to `()` if not needed. type OnTimestampSet: OnTimestampSet; diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 9e93bf90470f5..60ce5488aa91b 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -539,6 +539,7 @@ impl sp_std::fmt::Debug for ChargeTransactionPayment { impl SignedExtension for ChargeTransactionPayment where BalanceOf: Send + Sync + From + FixedPointOperand + TypeInfo, T::Call: Dispatchable, + <<::OnChargeTransaction as payment::OnChargeTransaction>::Balance as codec::HasCompact>::Type: TypeInfo, // todo: [AJ] this is a result of the derived compact TypeInfo impl, can we get rid of it? { const IDENTIFIER: &'static str = "ChargeTransactionPayment"; type AccountId = T::AccountId; From b3e325c9859156f5980ec2d5ca39455c1b6dc61d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 11:14:32 +0000 Subject: [PATCH 073/503] Fully qualify vec usage for now in macro expansion --- frame/support/procedural/src/pallet/expand/event.rs | 8 ++++---- frame/support/src/metadata_vnext.rs | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 5d20d9930cf90..ac331306961e8 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -73,8 +73,8 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { quote::quote!( #frame_support::metadata::v13::EventMetadata { name: #name, - arguments: vec![ #( #args, )* ], - documentation: vec![ #( #docs, )* ], + arguments: #frame_support::scale_info::prelude::vec![ #( #args, )* ], + documentation: #frame_support::scale_info::prelude::vec![ #( #docs, )* ], }, ) }); @@ -159,8 +159,8 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata_vnext() -> Vec<#frame_support::metadata::v13::EventMetadata> { - vec![ #( #metadata_vnext )* ] + pub fn metadata_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::EventMetadata> { + #frame_support::scale_info::prelude::vec![ #( #metadata_vnext )* ] } } ) diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index 6da77a5977cdd..3c6758e5eafa2 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -24,7 +24,7 @@ macro_rules! impl_runtime_metadata_vnext { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata_vnext() -> $crate::metadata::v13::RuntimeMetadataPrefixed { + pub fn metadata_vnext() -> $crate::metadata::RuntimeMetadataPrefixed { $crate::metadata::v13::RuntimeMetadataLastVersion::new( $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), $crate::metadata::v13::ExtrinsicMetadata { From 06ff16b9ea0a958b48e48a7f2e9d20ddccbb0289 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 11:14:40 +0000 Subject: [PATCH 074/503] Add some more TypeInfo bounds --- Cargo.lock | 1 + frame/indices/Cargo.toml | 1 + frame/indices/src/lib.rs | 7 +++++-- 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1628982054519..1b3841ae34568 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4843,6 +4843,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index cde3cdeeecba5..d651957922998 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index c925d3a0533e0..e5502e29d2cc4 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -43,7 +43,7 @@ type BalanceOf = <::Currency as Currency<; @@ -295,7 +295,10 @@ impl Module { } } -impl StaticLookup for Module { +impl StaticLookup for Module +where + <::AccountIndex as codec::HasCompact>::Type: scale_info::TypeInfo +{ type Source = MultiAddress; type Target = T::AccountId; From 44f9397c347246033d959ba04f6c2a1239206ba5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 11:50:40 +0000 Subject: [PATCH 075/503] Add TypeInfo bounds to assets --- Cargo.lock | 1 + frame/assets/Cargo.toml | 1 + frame/assets/src/lib.rs | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 1b3841ae34568..e1a990164b59e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4429,6 +4429,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 67fa0af3d63be..806c5f5c5c915 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 7b04ea11bafed..94de4b3d72883 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -155,7 +155,7 @@ pub mod pallet { type Balance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy; /// The arithmetic type of asset identifier. - type AssetId: Member + Parameter + Default + Copy + HasCompact; + type AssetId: Member + Parameter + Default + Copy + HasCompact + scale_info::TypeInfo; /// The currency mechanism. type Currency: ReservableCurrency; From b6edb33c819c0a416d824403d1c9674dbd1029e6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 15:45:08 +0000 Subject: [PATCH 076/503] Add Assets::Balance: TypeInfo trait bound --- frame/assets/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 94de4b3d72883..31a6e670a4d91 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -152,7 +152,7 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// The units in which we record balances. - type Balance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy; + type Balance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy + scale_info::TypeInfo; /// The arithmetic type of asset identifier. type AssetId: Member + Parameter + Default + Copy + HasCompact + scale_info::TypeInfo; From fb5f4c68200e7054caf761e0f2c5fec5c6fc7a0c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 15:45:45 +0000 Subject: [PATCH 077/503] Set index and empty constants for ModuleMetadata --- frame/support/src/metadata_vnext.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index 3c6758e5eafa2..32deb107bf732 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -64,7 +64,7 @@ macro_rules! __runtime_modules_to_metadata_vnext { $runtime; $( $metadata, )* $crate::metadata::v13::ModuleMetadata { name: stringify!($name), - // index: $index, + index: $index, // todo: [AJ] storage storage: None, // storage: $crate::__runtime_modules_to_metadata_calls_storage!( @@ -76,6 +76,8 @@ macro_rules! __runtime_modules_to_metadata_vnext { event: $crate::__runtime_modules_to_metadata_calls_event_vnext!( $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), + // todo: [AJ] constants + constants: None, // constants: $crate::metadata::DecodeDifferent::Encode( // $crate::metadata::FnEncode( // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata From 78860be869a44b48c43300410df1256d86e0677a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Feb 2021 16:46:48 +0000 Subject: [PATCH 078/503] Fix metadata_vnext return type --- frame/support/src/metadata_vnext.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index 32deb107bf732..6a9849d10f251 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -24,7 +24,7 @@ macro_rules! impl_runtime_metadata_vnext { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata_vnext() -> $crate::metadata::RuntimeMetadataPrefixed { + pub fn metadata_vnext() -> $crate::metadata_vnext::frame_metadata2::RuntimeMetadataPrefixed { $crate::metadata::v13::RuntimeMetadataLastVersion::new( $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), $crate::metadata::v13::ExtrinsicMetadata { From b6ecbc35916e713f8577f9d97c19e302759a2eb8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 19 Feb 2021 11:43:15 +0000 Subject: [PATCH 079/503] Add temporary manual TypeInfo impl for UncheckedExtrinsic. Required until all pallets converted to frame v2 so `Call` can derive TypeInfo --- .../src/generic/unchecked_extrinsic.rs | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index be385c7d25869..ced866918106b 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -35,7 +35,7 @@ const EXTRINSIC_VERSION: u8 = 4; /// A extrinsic right from the external world. This is unchecked and so /// can contain a signature. -#[derive(PartialEq, Eq, Clone, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone)] pub struct UncheckedExtrinsic where Extra: SignedExtension @@ -48,6 +48,24 @@ where pub function: Call, } +// todo: [AJ] remove this manual impl once the top level runtime Call implements TypeInfo... +// ...which it should be able to once all pallets are converted to frame v2 macros and all types have +// scale_info support. It does mean for now that we won't have enough metadata to decode the raw +// UncheckedExtrinsic bytes until all Pallet Calls are converted. +impl scale_info::TypeInfo for UncheckedExtrinsic +where + Extra: SignedExtension + scale_info::TypeInfo, +{ + type Identity = (); + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("UncheckedExtrinsic", module_path!())) + // dummy impl because we can't bound `Call` type param to `TypeInfo` yet (see above todo + .composite(scale_info::build::Fields::unit()) + } +} + #[cfg(feature = "std")] impl parity_util_mem::MallocSizeOf for UncheckedExtrinsic From 9f4fe4b9c2a1dea20c4190a889b4a7a137d409e4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 19 Feb 2021 12:01:59 +0000 Subject: [PATCH 080/503] Add missing scale-info dependency --- Cargo.lock | 1 + frame/staking/fuzzer/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index e1a990164b59e..4d75b2d852a94 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5185,6 +5185,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/staking/fuzzer/Cargo.toml b/frame/staking/fuzzer/Cargo.toml index a88e9619174c6..748f75061cdcc 100644 --- a/frame/staking/fuzzer/Cargo.toml +++ b/frame/staking/fuzzer/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] honggfuzz = "0.5" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } pallet-staking = { version = "3.0.0", path = "..", features = ["runtime-benchmarks"] } pallet-staking-reward-curve = { version = "3.0.0", path = "../reward-curve" } pallet-session = { version = "3.0.0", path = "../../session" } From 1f64969f8b09c8145f872d5de875b4b1241d1954 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Feb 2021 16:19:15 +0000 Subject: [PATCH 081/503] Add some more TypeInfo derives --- Cargo.lock | 59 +++++++++++++++++++++++++++----- frame/contracts/Cargo.toml | 1 + frame/contracts/src/schedule.rs | 6 ++-- frame/support/src/traits.rs | 2 +- primitives/runtime/src/traits.rs | 6 ++-- 5 files changed, 58 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 119419f29fb5b..45dc948521090 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1629,6 +1629,17 @@ dependencies = [ "rand 0.8.3", ] +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +dependencies = [ + "byteorder", + "rand 0.8.3", + "rustc-hex", + "static_assertions", +] + [[package]] name = "fixed-hash" version = "0.7.0" @@ -2592,6 +2603,14 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "impl-codec" +version = "0.5.0" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +dependencies = [ + "parity-scale-codec", +] + [[package]] name = "impl-codec" version = "0.5.0" @@ -3156,7 +3175,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -4635,6 +4654,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", + "scale-info", "serde", "sp-core", "sp-io", @@ -5488,7 +5508,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-util-mem-derive", "parking_lot 0.11.1", - "primitive-types", + "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 1.6.1", "winapi 0.3.9", ] @@ -5915,16 +5935,26 @@ dependencies = [ "output_vt100", ] +[[package]] +name = "primitive-types" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +dependencies = [ + "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common)", + "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common)", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", +] + [[package]] name = "primitive-types" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ - "fixed-hash", - "impl-codec", + "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "impl-serde", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -8319,7 +8349,7 @@ dependencies = [ "integer-sqrt", "num-traits", "parity-scale-codec", - "primitive-types", + "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3", "scale-info", "serde", @@ -8335,7 +8365,7 @@ dependencies = [ "honggfuzz", "num-bigint", "num-traits", - "primitive-types", + "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "sp-arithmetic", ] @@ -8511,7 +8541,7 @@ dependencies = [ "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions", - "primitive-types", + "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3", "rand_chacha 0.2.2", "regex", @@ -8744,7 +8774,7 @@ version = "3.0.0" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "primitive-types", + "primitive-types 0.9.0 (git+https://github.com/paritytech/parity-common)", "rustversion", "sp-core", "sp-externalities", @@ -10052,6 +10082,17 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +[[package]] +name = "uint" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "uint" version = "0.9.0" diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index c5ba615504c6b..e27951643432a 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.0.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index c86134bc415d1..c11d37e35353d 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -42,7 +42,7 @@ pub const INSTR_BENCHMARK_BATCH_SIZE: u32 = 1_000; /// Definition of the cost schedule and other parameterizations for wasm vm. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(bound(serialize = "", deserialize = "")))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, scale_info::TypeInfo)] pub struct Schedule { /// Version of the schedule. pub version: u32, @@ -134,7 +134,7 @@ impl Limits { /// that use them as supporting instructions. Supporting means mainly pushing arguments /// and dropping return values in order to maintain a valid module. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] pub struct InstructionWeights { pub i64const: u32, pub i64load: u32, @@ -193,7 +193,7 @@ pub struct InstructionWeights { /// Describes the weight for each imported function that a contract is allowed to call. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] pub struct HostFnWeights { /// Weight of calling `seal_caller`. pub caller: Weight, diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index dd1983df28fd9..7cd0e043459e0 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -924,7 +924,7 @@ impl< pub trait Currency { /// The balance of an account. type Balance: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + - Default; + Default + scale_info::TypeInfo; /// The opaque token type for an imbalance. This is returned by unbalanced operations /// and must be dealt with. It may be dropped but cannot be cloned. diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 370bbdc892535..3f1d5e40c9380 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -387,7 +387,7 @@ impl::Output> { /// The hash type produced. type Output: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash - + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + Encode + Decode; + + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + Encode + Decode + scale_info::TypeInfo; /// Produce the hash of some byte-slice. fn hash(s: &[u8]) -> Self::Output { @@ -552,7 +552,7 @@ pub trait Header: /// Header hash type type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> - + AsMut<[u8]> + MaybeMallocSizeOf; + + AsMut<[u8]> + MaybeMallocSizeOf + scale_info::TypeInfo; /// Hashing algorithm type Hashing: Hash; @@ -608,7 +608,7 @@ pub trait Block: Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + May /// Block hash type. type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf; + + MaybeMallocSizeOf + scale_info::TypeInfo; /// Returns a reference to the header. fn header(&self) -> &Self::Header; From 7bc68bb7e98c6c973f98409c75c43077a53eede0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Feb 2021 17:35:45 +0000 Subject: [PATCH 082/503] Add scale-info features and patch primitive-types --- Cargo.lock | 73 ++++++++++++++++---------------------- Cargo.toml | 4 +++ primitives/core/Cargo.toml | 2 +- 3 files changed, 35 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 45dc948521090..6eec798623aba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1640,18 +1640,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "fixed-hash" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" -dependencies = [ - "byteorder", - "rand 0.8.3", - "rustc-hex", - "static_assertions", -] - [[package]] name = "fixedbitset" version = "0.2.0" @@ -2612,12 +2600,11 @@ dependencies = [ ] [[package]] -name = "impl-codec" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +name = "impl-serde" +version = "0.3.1" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" dependencies = [ - "parity-scale-codec", + "serde", ] [[package]] @@ -5500,19 +5487,28 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.11.1", - "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "primitive-types", "smallvec 1.6.1", "winapi 0.3.9", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-util-mem-derive" version = "0.1.0" @@ -5940,23 +5936,14 @@ name = "primitive-types" version = "0.9.0" source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" dependencies = [ - "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common)", - "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common)", + "fixed-hash", + "impl-codec", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", + "parity-scale-codec", + "scale-info", "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] -[[package]] -name = "primitive-types" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" -dependencies = [ - "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "impl-serde", - "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "proc-macro-crate" version = "0.1.5" @@ -7717,7 +7704,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8349,7 +8336,7 @@ dependencies = [ "integer-sqrt", "num-traits", "parity-scale-codec", - "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "primitive-types", "rand 0.7.3", "scale-info", "serde", @@ -8365,7 +8352,7 @@ dependencies = [ "honggfuzz", "num-bigint", "num-traits", - "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "primitive-types", "sp-arithmetic", ] @@ -8531,7 +8518,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "libsecp256k1", "log", @@ -8541,7 +8528,7 @@ dependencies = [ "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions", - "primitive-types 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "primitive-types", "rand 0.7.3", "rand_chacha 0.2.2", "regex", @@ -8774,7 +8761,7 @@ version = "3.0.0" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "primitive-types 0.9.0 (git+https://github.com/paritytech/parity-common)", + "primitive-types", "rustversion", "sp-core", "sp-externalities", @@ -8914,7 +8901,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", "ref-cast", "serde", @@ -9020,7 +9007,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", "serde", "sp-runtime", diff --git a/Cargo.toml b/Cargo.toml index adc8960ffd765..d3016dd8c69d2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -261,3 +261,7 @@ zeroize = { opt-level = 3 } [profile.release] # Substrate runtime requires unwinding. panic = "unwind" + +[patch.crates-io] +parity-util-mem = { git = "https://github.com/paritytech/parity-common" } +primitive-types = { git = "https://github.com/paritytech/parity-common" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index b79266762dba9..cfcc931788e82 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -19,7 +19,7 @@ scale-info = { version = "0.6.0", default-features = false, features = ["derive" log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { version = "0.9.0", default-features = false, features = ["codec"] } +primitive-types = { version = "0.9.0", default-features = false, features = ["codec", "scale-info"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.6.2", optional = true } hash-db = { version = "0.15.2", default-features = false } From 50b1373db922c5d22d839ba584ba6c0829718dce Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 24 Feb 2021 12:35:08 +0000 Subject: [PATCH 083/503] More TypeInfo bounds and derives --- Cargo.lock | 2 ++ frame/contracts/src/lib.rs | 6 +++--- frame/contracts/src/schedule.rs | 2 +- frame/example-offchain-worker/Cargo.toml | 1 + frame/example-offchain-worker/src/lib.rs | 2 +- frame/example-parallel/Cargo.toml | 1 + frame/example-parallel/src/lib.rs | 2 +- frame/system/src/lib.rs | 6 +++--- frame/system/src/offchain.rs | 6 ++++-- primitives/core/src/ecdsa.rs | 10 ++++++++++ primitives/core/src/ed25519.rs | 2 +- primitives/core/src/sr25519.rs | 2 +- primitives/runtime/src/lib.rs | 2 +- 13 files changed, 30 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6eec798623aba..dc52addc9904e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4781,6 +4781,7 @@ dependencies = [ "frame-system", "lite-json", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4796,6 +4797,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 1f21a59e61584..1841018b39721 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -146,7 +146,7 @@ pub mod pallet { use super::*; #[pallet::config] - pub trait Config: frame_system::Config { + pub trait Config: frame_system::Config + scale_info::TypeInfo { // todo: we shouldn't need this TypeInfo bound /// The time implementation used to supply timestamps to conntracts through `seal_now`. type Time: Time; @@ -271,7 +271,7 @@ pub mod pallet { impl Pallet where T::AccountId: UncheckedFrom, - T::AccountId: AsRef<[u8]>, + T::AccountId: AsRef<[u8]> + scale_info::TypeInfo, { /// Updates the schedule for metering contracts. /// @@ -659,7 +659,7 @@ pub mod pallet { impl Module where - T::AccountId: UncheckedFrom + AsRef<[u8]>, + T::AccountId: UncheckedFrom + AsRef<[u8]> + scale_info::TypeInfo, { /// Perform a call to a specified contract. /// diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index c11d37e35353d..98ef7740ef17a 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -63,7 +63,7 @@ pub struct Schedule { /// Describes the upper limits on various metrics. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct Limits { /// The maximum number of topics supported by an event. pub event_topics: u32, diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 5a2db258f8a19..73f16b1c8b35a 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index 0c5e92a96e6ac..8d533e9a65d2e 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -326,7 +326,7 @@ pub mod pallet { /// Payload used by this example crate to hold price /// data required to submit a transaction. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct PricePayload { block_number: BlockNumber, price: u32, diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index b2f28887cec0b..b6216c902960d 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/src/lib.rs b/frame/example-parallel/src/lib.rs index e777100c6f54b..c28cf5ff4b491 100644 --- a/frame/example-parallel/src/lib.rs +++ b/frame/example-parallel/src/lib.rs @@ -97,7 +97,7 @@ pub mod pallet { } /// Request to enlist participant. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct EnlistedParticipant { pub account: Vec, pub signature: Vec, diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 2b14a480fc940..8d8773d0d81e5 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -175,18 +175,18 @@ pub mod pallet { /// with a sender account. type Index: Parameter + Member + MaybeSerializeDeserialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy + scale_info::TypeInfo; + + Copy + scale_info::TypeInfo; // todo [AJ] add TypeInfo bound to `Parameter` /// The block number type used by the runtime. type BlockNumber: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + - sp_std::str::FromStr + MaybeMallocSizeOf; + sp_std::str::FromStr + MaybeMallocSizeOf + scale_info::TypeInfo; /// The output of the `Hashing` function. type Hash: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + Ord - + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf; + + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf + scale_info::TypeInfo; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). type Hashing: Hash; diff --git a/frame/system/src/offchain.rs b/frame/system/src/offchain.rs index f2f446913c477..21c251b2e0df2 100644 --- a/frame/system/src/offchain.rs +++ b/frame/system/src/offchain.rs @@ -457,13 +457,15 @@ pub trait SigningTypes: crate::Config { + IdentifyAccount + core::fmt::Debug + codec::Codec - + Ord; + + Ord + + scale_info::TypeInfo; /// A matching `Signature` type. type Signature: Clone + PartialEq + core::fmt::Debug - + codec::Codec; + + codec::Codec + + scale_info::TypeInfo; } /// A definition of types required to submit transactions from within the runtime. diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 593d171c4cab0..41432a4747a61 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -55,6 +55,16 @@ type Seed = [u8; 32]; #[derive(Clone, Encode, Decode, PassByInner)] pub struct Public(pub [u8; 33]); +// todo: remove this once https://github.com/paritytech/scale-info/pull/54 is merged, which +// introduces const generics for arrays and should support a 33 element array. +impl scale_info::TypeInfo for Public { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::TypeDefArray::new(33, scale_info::MetaType::new::()).into() + } +} + impl PartialOrd for Public { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) diff --git a/primitives/core/src/ed25519.rs b/primitives/core/src/ed25519.rs index df8005e1d5e94..a89f97d885336 100644 --- a/primitives/core/src/ed25519.rs +++ b/primitives/core/src/ed25519.rs @@ -56,7 +56,7 @@ type Seed = [u8; 32]; /// A public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, scale_info::TypeInfo)] pub struct Public(pub [u8; 32]); /// A key pair. diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index 4a76cbe692401..a535bc5a2bc42 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -60,7 +60,7 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"sr25"); /// An Schnorrkel/Ristretto x25519 ("sr25519") public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, ::scale_info::TypeInfo)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, scale_info::TypeInfo)] pub struct Public(pub [u8; 32]); /// An Schnorrkel/Ristretto x25519 ("sr25519") key pair. diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 214414e2ccf22..b4a4e7e7f1d79 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -230,7 +230,7 @@ impl Default for MultiSignature { } /// Public key for any known crypto algorithm. -#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum MultiSigner { /// An Ed25519 identity. From 2c32ebbbe837cdf34c93ec7ba54483d2c87942bf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 15 Mar 2021 12:55:23 +0000 Subject: [PATCH 084/503] Remove unnecessary crate root paths --- frame/support/src/traits.rs | 2 +- frame/support/src/weights.rs | 6 +++--- frame/system/src/extensions/check_genesis.rs | 2 +- frame/system/src/extensions/check_spec_version.rs | 2 +- primitives/arithmetic/src/per_things.rs | 2 +- primitives/core/src/changes_trie.rs | 2 +- primitives/core/src/crypto.rs | 2 +- primitives/runtime/src/lib.rs | 2 +- primitives/runtime/src/traits.rs | 6 +++--- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index 6a2227d1bda64..cf68ea533deea 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -1129,7 +1129,7 @@ pub trait Currency { } /// Status of funds. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum BalanceStatus { /// Funds are free, as corresponding to `free` item in Balances. Free, diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index f02f5995aaef9..1e738958b8ce8 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -199,7 +199,7 @@ pub trait PaysFee { } /// Explicit enum to denote if a transaction pays fee or not. -#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, ::scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum Pays { /// Transactor will pay related fees. Yes, @@ -219,7 +219,7 @@ impl Default for Pays { /// [DispatchClass::all] and [DispatchClass::non_mandatory] helper functions. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum DispatchClass { /// A normal dispatch. Normal, @@ -304,7 +304,7 @@ pub mod priority { } /// A bundle of static information collected from the `#[weight = $x]` attributes. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, ::scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub struct DispatchInfo { /// Weight of this transaction. pub weight: Weight, diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index a23ba8e3de7e7..a7c6b4baff461 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -27,7 +27,7 @@ use sp_runtime::{ #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] pub struct CheckGenesis(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckGenesis { +impl sp_std::fmt::Debug for CheckGenesis { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckGenesis") diff --git a/frame/system/src/extensions/check_spec_version.rs b/frame/system/src/extensions/check_spec_version.rs index 9983859eb8973..ae9c56bedd321 100644 --- a/frame/system/src/extensions/check_spec_version.rs +++ b/frame/system/src/extensions/check_spec_version.rs @@ -24,7 +24,7 @@ use sp_runtime::{ }; /// Ensure the runtime version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq, ::scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, scale_info::TypeInfo)] pub struct CheckSpecVersion(sp_std::marker::PhantomData); impl sp_std::fmt::Debug for CheckSpecVersion { diff --git a/primitives/arithmetic/src/per_things.rs b/primitives/arithmetic/src/per_things.rs index 0f749d02e6183..0d9cc3b612a01 100644 --- a/primitives/arithmetic/src/per_things.rs +++ b/primitives/arithmetic/src/per_things.rs @@ -333,7 +333,7 @@ macro_rules! implement_per_thing { /// #[doc = $title] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] - #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, ::scale_info::TypeInfo)] + #[derive(Encode, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, scale_info::TypeInfo)] pub struct $name($type); /// Implementation makes any compact encoding of `PerThing::Inner` valid, diff --git a/primitives/core/src/changes_trie.rs b/primitives/core/src/changes_trie.rs index 5cf93f89debfc..567720130d0af 100644 --- a/primitives/core/src/changes_trie.rs +++ b/primitives/core/src/changes_trie.rs @@ -24,7 +24,7 @@ use num_traits::Zero; /// Substrate changes trie configuration. #[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] -#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode, ::scale_info::TypeInfo)] +#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode, scale_info::TypeInfo)] pub struct ChangesTrieConfiguration { /// Interval (in blocks) at which level1-digests are created. Digests are not /// created when this is less or equal to 1. diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index 73be8e7bffb52..5007951c01404 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -688,7 +688,7 @@ pub trait Public: } /// An opaque 32-byte cryptographic identifier. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, ::scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub struct AccountId32([u8; 32]); diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index e8488923ed926..0e7b1c8729036 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -392,7 +392,7 @@ pub type DispatchResult = sp_std::result::Result<(), DispatchError>; pub type DispatchResultWithInfo = sp_std::result::Result>; /// Reason why a dispatch call failed. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug, ::scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum DispatchError { /// Some error occurred. diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 3f1d5e40c9380..c20973523a4fc 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -868,8 +868,8 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq /// *exactly* one identifier. /// /// This method provides a default implementation that returns `vec![SELF::IDENTIFIER]`. - fn identifier() -> Vec<(&'static str, ::scale_info::MetaType)> { - sp_std::vec![(Self::IDENTIFIER, ::scale_info::meta_type::())] + fn identifier() -> Vec<(&'static str, scale_info::MetaType)> { + sp_std::vec![(Self::IDENTIFIER, scale_info::meta_type::())] } } @@ -933,7 +933,7 @@ impl SignedExtension for Tuple { Ok(()) } - fn identifier() -> Vec<(&'static str, ::scale_info::MetaType)> { + fn identifier() -> Vec<(&'static str, scale_info::MetaType)> { let mut ids = Vec::new(); for_tuples!( #( ids.extend(Tuple::identifier()); )* ); ids From 7e311df4b428ec4b58ef6a704c2d1f94117b5c8e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 15 Mar 2021 16:55:09 +0000 Subject: [PATCH 085/503] Add some TypeInfo derives --- Cargo.lock | 1 + frame/assets/src/lib.rs | 2 +- frame/election-provider-multi-phase/Cargo.toml | 1 + frame/election-provider-multi-phase/src/lib.rs | 9 +++++---- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9b17524812bb5..cb3dd541b2cc1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4804,6 +4804,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 5bf302e9010ce..72108bc5c585c 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -238,7 +238,7 @@ pub struct AssetMetadata { } /// Witness data for the destroy transactions. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct DestroyWitness { /// The number of accounts holding the asset. #[codec(compact)] diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 1d63f9df40a25..1cc3c9eb734aa 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 4ee6caae0a641..841d8e7658e4d 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -343,7 +343,7 @@ pub enum FallbackStrategy { } /// The type of `Computation` that provided this election data. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ElectionCompute { /// Election was computed on-chain. OnChain, @@ -365,7 +365,7 @@ impl Default for ElectionCompute { /// /// Such a solution should never become effective in anyway before being checked by the /// `Pallet::feasibility_check` -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct RawSolution { /// Compact election edges. compact: C, @@ -415,7 +415,7 @@ pub struct RoundSnapshot { /// This is stored automatically on-chain, and it contains the **size of the entire snapshot**. /// This is also used in dispatchables as weight witness data and should **only contain the size of /// the presented solution**, not the entire snapshot. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct SolutionOrSnapshotSize { /// The length of voters. #[codec(compact)] @@ -535,7 +535,8 @@ pub mod pallet { + Eq + Clone + sp_std::fmt::Debug - + CompactSolution; + + CompactSolution + + scale_info::TypeInfo; /// Accuracy used for fallback on-chain election. type OnChainAccuracy: PerThing128; From 6634022d63335463a6e5c7f24b6c36b6beabe682 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 15 Mar 2021 17:31:36 +0000 Subject: [PATCH 086/503] Add dummy TypeInfo impl for CompactAssignments --- Cargo.lock | 1 + frame/staking/Cargo.toml | 1 + frame/staking/src/lib.rs | 12 ++++++++++++ 3 files changed, 14 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index cb3dd541b2cc1..c5e7fdef40af1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5338,6 +5338,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 1f9f29570a223..63af6f4c6bb3f 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # TWO_PHASE_NOTE:: ideally we should be able to get rid of this. sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 05511be63bb07..cf7659b7e8763 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -380,6 +380,18 @@ generate_solution_type!( pub struct CompactAssignments::(16) ); +impl scale_info::TypeInfo for CompactAssignments { + type Identity = Self; + + fn type_info() -> scale_info::Type { + // todo [AJ] possibly provide custom TypeInfo impl in generate_solution_type + // see https://github.com/polkadot-js/api/blob/d0a08dadd9be5a094d8cd02cc163b9de497154bd/packages/types/src/interfaces/staking/definitions.ts#L18 + scale_info::Type::builder() + .path(scale_info::Path::new("CompactAssignments", module_path!())) + .composite(scale_info::build::Fields::unit()) + } +} + /// Accuracy used for on-chain election. pub type ChainAccuracy = Perbill; From 04fb889d051a1c77e2048a51ae5df45a94cead91 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Mar 2021 17:22:10 +0000 Subject: [PATCH 087/503] cargo generate-lockfile --- Cargo.lock | 130 ++++++++++++++++++++++++++--------------------------- 1 file changed, 64 insertions(+), 66 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 257e312c35f91..13d101f044de0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,7 +1,5 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - [[package]] name = "Inflector" version = "0.11.4" @@ -337,9 +335,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.47" +version = "0.1.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e098e9c493fdf92832223594d9a164f96bdf17ba81a42aff86f85c76768726a" +checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" dependencies = [ "proc-macro2", "quote", @@ -444,9 +442,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bincode" -version = "1.3.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" +checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d" dependencies = [ "byteorder", "serde", @@ -659,9 +657,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.3.4" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -1626,7 +1624,7 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "byteorder", "rand 0.8.3", @@ -1686,7 +1684,7 @@ dependencies = [ "linregress", "log", "parity-scale-codec", - "paste 1.0.4", + "paste 1.0.5", "serde", "sp-api", "sp-io", @@ -1785,7 +1783,7 @@ dependencies = [ "once_cell", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", "scale-info", "serde", @@ -2608,7 +2606,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "parity-scale-codec", ] @@ -2625,7 +2623,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "serde", ] @@ -2739,9 +2737,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cfb73131c35423a367daf8cbd24100af0d077668c8c2943f0e7dd775fef0f65" +checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" dependencies = [ "wasm-bindgen", ] @@ -2871,13 +2869,14 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.2.0-alpha" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "124797a4ea7430d0675db78e065e53316e3f1a3cbf0ee4d6dbdd42db7b08e193" +checksum = "9b15fc3a0ef2e02d770aa1a221d3412443dcaedc43e27d80c957dd5bbd65321b" dependencies = [ "async-trait", "futures 0.3.13", "hyper 0.13.10", + "hyper-rustls", "jsonrpsee-types", "jsonrpsee-utils", "log", @@ -2890,9 +2889,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.2.0-alpha.2" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb3f732ccbeafd15cefb59c7c7b5ac6c553c2653613b63e5e7feb7f06a219e9" +checksum = "6bb4afbda476e2ee11cc6245055c498c116fc8002d2d60fe8338b6ee15d84c3a" dependencies = [ "Inflector", "proc-macro2", @@ -2902,9 +2901,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.2.0-alpha.2" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a8cd20c190e75dc56f7543b9d5713c3186351b301b5507ea6b85d8c403aac78" +checksum = "c42a82588b5f7830e94341bb7e79d15f46070ab6f64dde1e3b3719721b61c5bf" dependencies = [ "async-trait", "futures 0.3.13", @@ -2917,9 +2916,9 @@ dependencies = [ [[package]] name = "jsonrpsee-utils" -version = "0.2.0-alpha" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0e45394ec3175a767c3c5bac584560e6ad9b56ebd73216c85ec8bab49619244" +checksum = "e65c77838fce96bc554b4a3a159d0b9a2497319ae9305c66ee853998c7ed2fd3" dependencies = [ "futures 0.3.13", "globset", @@ -3043,9 +3042,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a" [[package]] name = "libc" -version = "0.2.88" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b07a082330a35e43f63177cc01689da34fbffa0105e1246cf0311472cac73a" +checksum = "538c092e5586f4cdd7dd8078c4a79220e3e168880218124dcbce860f0ea938c6" [[package]] name = "libloading" @@ -3862,9 +3861,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "multistream-select" @@ -4719,7 +4718,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parity-wasm 0.41.0", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", "pwasm-utils 0.16.0", "rand 0.7.3", @@ -4817,7 +4816,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", "scale-info", "serde", @@ -5586,9 +5585,9 @@ dependencies = [ [[package]] name = "parity-multiaddr" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c6805f98667a3828afb2ec2c396a8d610497e8d546f5447188aae47c5a79ec" +checksum = "58341485071825827b7f03cf7efd1cb21e6a709bea778fb50227fd45d2f361b4" dependencies = [ "arrayref", "bs58", @@ -5655,7 +5654,7 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "cfg-if 1.0.0", "hashbrown", @@ -5681,7 +5680,7 @@ dependencies = [ [[package]] name = "parity-util-mem-derive" version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "proc-macro2", "syn", @@ -5814,9 +5813,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d65c4d95931acda4498f675e332fcbdc9a06705cd07086c510e9b6009cd1c1" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" [[package]] name = "paste-impl" @@ -6102,7 +6101,7 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "fixed-hash", "impl-codec", @@ -6579,14 +6578,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.4.3" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +checksum = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19" dependencies = [ "aho-corasick", "memchr", "regex-syntax", - "thread_local", ] [[package]] @@ -6601,9 +6599,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.22" +version = "0.6.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" +checksum = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548" [[package]] name = "region" @@ -7283,7 +7281,7 @@ dependencies = [ "parity-scale-codec", "parity-wasm 0.41.0", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "sc-executor-common", "sc-executor-wasmi", "sc-executor-wasmtime", @@ -8327,9 +8325,9 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "signal-hook" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" +checksum = "6aa894ef3fade0ee7243422f4fbbd6c2b48e6de767e621d37ef65f2310f53cea" dependencies = [ "libc", "signal-hook-registry", @@ -8918,7 +8916,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", "scale-info", "serde", @@ -9579,9 +9577,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.62" +version = "1.0.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "123a78a3596b24fee53a6464ce52d8ecbf62241e6294c7e7fe12086cd161f512" +checksum = "3fd9d1e9976102a03c542daa2eff1b43f9d72306342f3f8b3ed5fb8908195d6f" dependencies = [ "proc-macro2", "quote", @@ -9614,9 +9612,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422045212ea98508ae3d28025bc5aaa2bd4a9cdaecd442a08da2ee620ee9ea95" +checksum = "62b29e388d11a2c0605bdc806ce6ed1d623a5bdbbdd5b423053444999331184e" [[package]] name = "tempfile" @@ -10056,9 +10054,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.13" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a9bd1db7706f2373a190b0d067146caa39350c486f3d455b0e33b431f94c07" +checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" dependencies = [ "proc-macro2", "quote", @@ -10107,9 +10105,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ab8966ac3ca27126141f7999361cc97dd6fb4b71da04c02044fa9045d98bb96" +checksum = "705096c6f83bf68ea5d357a6aa01829ddbdac531b357b45abeca842938085baa" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10236,9 +10234,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" +checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06" [[package]] name = "ucd-trie" @@ -10261,7 +10259,7 @@ dependencies = [ [[package]] name = "uint" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#db7c985b6dfe11cb7ef44072cbcaeb7022ae2ca4" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "byteorder", "crunchy", @@ -10412,9 +10410,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "void" @@ -10510,9 +10508,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3de431a2910c86679c34283a33f66f4e4abd7e0aec27b6669060148872aadf94" +checksum = "8e67a5806118af01f0d9045915676b22aaebecf4178ae7021bc171dab0b897ab" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10551,9 +10549,9 @@ checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" [[package]] name = "wasm-bindgen-test" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" +checksum = "2ea9e4f0050d5498a160e6b9d278a9699598e445b51dacd05598da55114c801a" dependencies = [ "console_error_panic_hook", "js-sys", @@ -10565,9 +10563,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" +checksum = "43f40402f495d92df6cdd0d329e7cc2580c8f99bcd74faff0e468923a764b7d4" dependencies = [ "proc-macro2", "quote", @@ -10836,9 +10834,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c40dc691fc48003eba817c38da7113c15698142da971298003cac3ef175680b3" +checksum = "ec600b26223b2948cedfde2a0aa6756dcf1fef616f43d7b3097aaf53a6c4d92b" dependencies = [ "js-sys", "wasm-bindgen", From c43376891d957f1e8e9d22508e37510805c1c822 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 18 Mar 2021 17:30:55 +0000 Subject: [PATCH 088/503] module pallet rename fix --- frame/support/procedural/src/construct_runtime/mod.rs | 6 +++--- frame/support/src/metadata_vnext.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index 70a73e434b5a3..93f519eaa636a 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -331,7 +331,7 @@ fn decl_runtime_metadata<'a>( ) }) .collect::>(); - let modules_tokens_vnext = modules_tokens.clone(); + let pallets_tokens_vnext = pallets_tokens.clone(); quote!( #scrate::impl_runtime_metadata!{ for #runtime with pallets where Extrinsic = #extrinsic @@ -339,8 +339,8 @@ fn decl_runtime_metadata<'a>( } #scrate::impl_runtime_metadata_vnext!{ - for #runtime with modules where Extrinsic = #extrinsic - #(#modules_tokens_vnext)* + for #runtime with pallets where Extrinsic = #extrinsic + #(#pallets_tokens_vnext)* } ) } diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs index 6a9849d10f251..6e72a25a7db7a 100644 --- a/frame/support/src/metadata_vnext.rs +++ b/frame/support/src/metadata_vnext.rs @@ -20,7 +20,7 @@ pub use frame_metadata2; #[macro_export] macro_rules! impl_runtime_metadata_vnext { ( - for $runtime:ident with modules where Extrinsic = $ext:ident + for $runtime:ident with pallets where Extrinsic = $ext:ident $( $rest:tt )* ) => { impl $runtime { From 35126757d1785099ad4139faa840e16218af5dd0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 11:08:02 +0000 Subject: [PATCH 089/503] WIP add TypeInfo bound to Parameter, start annotating types --- Cargo.lock | 5 +++++ frame/support/src/dispatch.rs | 4 ++-- primitives/application-crypto/Cargo.toml | 1 + primitives/application-crypto/src/lib.rs | 3 +++ primitives/authority-discovery/Cargo.toml | 1 + primitives/consensus/babe/Cargo.toml | 1 + primitives/consensus/babe/src/digests.rs | 2 +- primitives/consensus/babe/src/lib.rs | 2 +- primitives/consensus/slots/Cargo.toml | 1 + primitives/consensus/slots/src/lib.rs | 4 ++-- primitives/core/src/lib.rs | 2 +- primitives/finality-grandpa/Cargo.toml | 1 + 12 files changed, 20 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 13d101f044de0..ef62b830225fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8491,6 +8491,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8542,6 +8543,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8641,6 +8643,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8671,6 +8674,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8770,6 +8774,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 872be0c8cbf33..3549fb900d12b 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -61,8 +61,8 @@ pub type CallableCallFor = >::Call; /// A type that can be used as a parameter in a dispatchable function. /// /// When using `decl_module` all arguments for call functions must implement this trait. -pub trait Parameter: Codec + EncodeLike + Clone + Eq + fmt::Debug {} -impl Parameter for T where T: Codec + EncodeLike + Clone + Eq + fmt::Debug {} +pub trait Parameter: Codec + EncodeLike + Clone + Eq + fmt::Debug + scale_info::TypeInfo {} +impl Parameter for T where T: Codec + EncodeLike + Clone + Eq + fmt::Debug + scale_info::TypeInfo {} /// Declares a `Module` struct and a `Call` enum, which implements the dispatch logic. /// diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index fff289e9a1d86..16d03492a128f 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,6 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "3.0.0", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-io = { version = "3.0.0", default-features = false, path = "../io" } diff --git a/primitives/application-crypto/src/lib.rs b/primitives/application-crypto/src/lib.rs index d085d961a1026..2ad3db5e88359 100644 --- a/primitives/application-crypto/src/lib.rs +++ b/primitives/application-crypto/src/lib.rs @@ -31,6 +31,8 @@ pub use sp_core::crypto::{KeyTypeId, CryptoTypeId, key_types}; #[doc(hidden)] pub use codec; #[doc(hidden)] +pub use scale_info; +#[doc(hidden)] #[cfg(feature = "std")] pub use serde; #[doc(hidden)] @@ -198,6 +200,7 @@ macro_rules! app_crypto_public_full_crypto { $crate::codec::Encode, $crate::codec::Decode, $crate::RuntimeDebug, + $crate::scale_info::TypeInfo, )] #[derive(Hash)] pub struct Public($public); diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index a32b13ca728d8..90f9cfcd823d4 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index a8ab03dcdaa49..9eaeeebb71763 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/babe/src/digests.rs b/primitives/consensus/babe/src/digests.rs index f34a38bc8b016..21d8cf392b602 100644 --- a/primitives/consensus/babe/src/digests.rs +++ b/primitives/consensus/babe/src/digests.rs @@ -134,7 +134,7 @@ pub struct NextEpochDescriptor { /// Information about the next epoch config, if changed. This is broadcast in the first /// block of the epoch, and applies using the same rules as `NextEpochDescriptor`. -#[derive(Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(Decode, Encode, PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] pub enum NextConfigDescriptor { /// Version 1. #[codec(index = 1)] diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index da9f089e4561c..62e52c2f41715 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -217,7 +217,7 @@ pub struct BabeGenesisConfiguration { } /// Types of allowed slots. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum AllowedSlots { /// Only allow primary slots. diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 46dbaca1a6ad4..20e070c2f169e 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../arithmetic" } diff --git a/primitives/consensus/slots/src/lib.rs b/primitives/consensus/slots/src/lib.rs index 545d18af1f9be..9c839430c9c06 100644 --- a/primitives/consensus/slots/src/lib.rs +++ b/primitives/consensus/slots/src/lib.rs @@ -22,7 +22,7 @@ use codec::{Decode, Encode}; /// Unit type wrapper that represents a slot. -#[derive(Debug, Encode, Decode, Eq, Clone, Copy, Default, Ord)] +#[derive(Debug, Encode, Decode, scale_info::TypeInfo, Eq, Clone, Copy, Default, Ord)] pub struct Slot(u64); impl core::ops::Deref for Slot { @@ -96,7 +96,7 @@ impl From for u64 { /// produces more than one block on the same slot. The proof of equivocation /// are the given distinct headers that were signed by the validator and which /// include the slot number. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] pub struct EquivocationProof { /// Returns the authority id of the equivocator. pub offender: Id, diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index c72f38ea0827e..9459e06d8cd2d 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -177,7 +177,7 @@ impl sp_std::ops::Deref for OpaqueMetadata { } /// Simple blob to hold a `PeerId` without committing to its format. -#[derive(Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner)] +#[derive(Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct OpaquePeerId(pub Vec); diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 95aa65c930f78..6af7337f4b94e 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.0", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } From e0dfb9f776e97b4274d02448a881799e3adf48a8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 11:16:38 +0000 Subject: [PATCH 090/503] Add some scale-info dependencies --- Cargo.lock | 2 ++ bin/node/primitives/Cargo.toml | 1 + primitives/consensus/aura/Cargo.toml | 1 + 3 files changed, 4 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index ef62b830225fe..9b12c16de7c01 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4133,6 +4133,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -8627,6 +8628,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 043ec5ab21cec..ed3e449dcb3d0 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,6 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-system = { version = "3.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 105c74bb317d7..82676fdaf9b7d 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } From c04a8069e7a94194c4dfbb326954b1d5bc4df80a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 11:24:18 +0000 Subject: [PATCH 091/503] Moar TypeInfo --- Cargo.lock | 1 + frame/gilt/Cargo.toml | 1 + frame/gilt/src/lib.rs | 3 ++- primitives/application-crypto/src/lib.rs | 1 + 4 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 9b12c16de7c01..b3aee436eccc7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4928,6 +4928,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-arithmetic", "sp-core", diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 4df0dc49aaf93..08cd9fb6cb9dc 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index 23596a8b6e147..50629d119bd02 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -102,7 +102,8 @@ pub mod pallet { /// `From`. type CurrencyBalance: sp_runtime::traits::AtLeast32BitUnsigned + codec::FullCodec + Copy - + MaybeSerializeDeserialize + sp_std::fmt::Debug + Default + From; + + MaybeSerializeDeserialize + sp_std::fmt::Debug + Default + From + + scale_info::TypeInfo; /// Origin required for setting the target proportion to be under gilt. type AdminOrigin: EnsureOrigin; diff --git a/primitives/application-crypto/src/lib.rs b/primitives/application-crypto/src/lib.rs index 2ad3db5e88359..abedcb7aca722 100644 --- a/primitives/application-crypto/src/lib.rs +++ b/primitives/application-crypto/src/lib.rs @@ -236,6 +236,7 @@ macro_rules! app_crypto_public_not_full_crypto { $crate::codec::Encode, $crate::codec::Decode, $crate::RuntimeDebug, + $crate::scale_info::TypeInfo, )] pub struct Public($public); } From f3ed3852223fce85a3b15540718ba216a9a96cf0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 12:08:50 +0000 Subject: [PATCH 092/503] Dummy manual TypeInfo for header --- Cargo.lock | 1 + primitives/runtime/src/generic/header.rs | 15 +++++++++++++++ test-utils/runtime/Cargo.toml | 1 + test-utils/runtime/src/lib.rs | 2 +- 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index b3aee436eccc7..755ca5d83799e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9467,6 +9467,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index 62f9908fbe58d..c830fa53838f9 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -115,6 +115,21 @@ impl Encode for Header where } } +impl scale_info::TypeInfo for Header where + Number: HasCompact + Copy + Into + TryFrom + 'static, + Hash: HashT, + Hash::Output: scale_info::TypeInfo, +{ + type Identity = Self; + + fn type_info() -> scale_info::Type { + // todo [AJ] provide accurate custom TypeInfo impl + scale_info::Type::builder() + .path(scale_info::Path::new("Header", module_path!())) + .composite(scale_info::build::Fields::unit()) + } +} + impl codec::EncodeLike for Header where Number: HasCompact + Copy + Into + TryFrom, Hash: HashT, diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 89da7929e64b8..94f6f6bb1f1f7 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,6 +18,7 @@ sp-consensus-aura = { version = "0.9.0", default-features = false, path = "../.. sp-consensus-babe = { version = "0.9.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "3.0.0", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.26.0", default-features = false } diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index 0854d617fd1d3..f1ed8845e20c6 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -439,7 +439,7 @@ impl_outer_origin!{ pub enum Origin for Runtime where system = frame_system {} } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct Event; impl From> for Event { From 35fd511d0f9578a39844008dbd488fd9012e0db9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 12:36:09 +0000 Subject: [PATCH 093/503] Blanket add of scale-info dependency to pallets --- Cargo.lock | 33 +++++++++++++++++++++ frame/aura/Cargo.toml | 1 + frame/authority-discovery/Cargo.toml | 1 + frame/authorship/Cargo.toml | 1 + frame/babe/Cargo.toml | 1 + frame/benchmarking/Cargo.toml | 1 + frame/bounties/Cargo.toml | 1 + frame/collective/Cargo.toml | 1 + frame/contracts/common/Cargo.toml | 1 + frame/contracts/rpc/runtime-api/Cargo.toml | 1 + frame/democracy/Cargo.toml | 1 + frame/election-provider-support/Cargo.toml | 1 + frame/elections-phragmen/Cargo.toml | 1 + frame/elections/Cargo.toml | 1 + frame/executive/Cargo.toml | 1 + frame/grandpa/Cargo.toml | 1 + frame/identity/Cargo.toml | 1 + frame/im-online/Cargo.toml | 1 + frame/lottery/Cargo.toml | 1 + frame/metadata/Cargo.toml | 1 + frame/nicks/Cargo.toml | 1 + frame/node-authorization/Cargo.toml | 1 + frame/offences/Cargo.toml | 1 + frame/randomness-collective-flip/Cargo.toml | 1 + frame/recovery/Cargo.toml | 1 + frame/scored-pool/Cargo.toml | 1 + frame/session/Cargo.toml | 1 + frame/society/Cargo.toml | 1 + frame/sudo/Cargo.toml | 1 + frame/support/src/event.rs | 2 ++ frame/support/test/Cargo.toml | 1 + frame/tips/Cargo.toml | 1 + frame/treasury/Cargo.toml | 1 + frame/utility/Cargo.toml | 1 + frame/vesting/Cargo.toml | 1 + primitives/core/src/lib.rs | 2 +- 36 files changed, 69 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 755ca5d83799e..86ae9f296e4fb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1685,6 +1685,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.5", + "scale-info", "serde", "sp-api", "sp-io", @@ -1723,6 +1724,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1740,6 +1742,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -1764,6 +1767,7 @@ name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-std", @@ -1840,6 +1844,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions", "rustversion", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -4575,6 +4580,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4592,6 +4598,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4610,6 +4617,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4636,6 +4644,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4676,6 +4685,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4695,6 +4705,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4741,6 +4752,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] @@ -4779,6 +4791,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4795,6 +4808,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4841,6 +4855,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4859,6 +4874,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4955,6 +4971,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -4977,6 +4994,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4995,6 +5013,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5032,6 +5051,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5130,6 +5150,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5145,6 +5166,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5161,6 +5183,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5220,6 +5243,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", + "scale-info", "serde", "sp-core", "sp-io", @@ -5236,6 +5260,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5268,6 +5293,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5285,6 +5311,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5329,6 +5356,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-core", "sp-io", @@ -5411,6 +5439,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5462,6 +5491,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5525,6 +5555,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5542,6 +5573,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5560,6 +5592,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 6cae6c94c9a89..a1b1abaca10dc 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", default-features = false, path = "../session" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 85844cf716f03..ec56767ea28f5 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "3.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", features = ["historical" ], path = "../session", default-features = false } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 3bbbe9749c63b..f86a32882c969 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-authorship = { version = "3.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index f7bebce98acf3..4dba374bbbbda 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 3b20cf7dd048c..3e78b31901ea8 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.0", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "3.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "3.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index ff1a3a6807098..79c084224df1f 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index b8f825cc52931..00b1970d0e711 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 050e18fc44d14..6d7bba0c7d144 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] # This crate should not rely on any of the frame primitives. bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index 32de637f10822..29bf252ce5dd6 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "3.0.0", default-features = false, path = "../../../../primitives/api" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-contracts-primitives = { version = "3.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index f9b0d035b089d..d0e7e8868fb0f 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index b360cd89eb57b..a53034211e9a1 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 89723cb85fbe1..8142a36b70b2f 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index ac3c709300f51..48d92f42a0e17 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 6a0042308736e..dfa2efd89aef4 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 547e3966d52a4..2805154a53bf6 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 08109fda2584c..6e8dd97f3ef02 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 4c5b4a8863bcd..f78884844500b 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "3.0.0", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 73de239a4d801..bfccc7c7fe11f 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index cede8a836123d..546807e311ec1 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 6c8b609b401ca..91a872babc8bf 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 786eb84d1e523..81894b6f43574 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index a34c5f6bc3a3d..9341939eb3b6f 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index ad9bcb97837db..27bd9e4c5d5e4 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 1f8003bd4d056..4c0e878b7b1ae 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 97e3a954d7e25..c8198ecf296a7 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 52b8ebbdf4780..a6368de0ef8d0 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index a3c6dcadab86d..95f080346859d 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index c1b841c30c6a3..4d71e37b00b38 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index f6d2feccbd088..db30fdb530835 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -129,6 +129,7 @@ macro_rules! decl_event { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] /// Events for this module. @@ -464,6 +465,7 @@ macro_rules! impl_outer_event { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] $(#[$attr])* diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 7d2f0ec463a38..8232d3d5da7de 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "3.0.0", default-features = false, path = "../" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index a16c9b91327ec..e7213abd698cb 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index da0ffcb725c9b..2634b5d0cf87a 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index f55cff4d653c5..0c761b830eb37 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index e1335237eb508..ff9d119712abe 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index 9459e06d8cd2d..b8f802ced21bc 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -352,7 +352,7 @@ pub fn to_substrate_wasm_fn_return_value(value: &impl Encode) -> u64 { /// The void type - it cannot exist. // Oh rust, you crack me up... -#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub enum Void {} /// Macro for creating `Maybe*` marker traits. From 7a037e2955cbce44f7d5bce8508db52e519fbe55 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 22 Mar 2021 17:36:41 +0000 Subject: [PATCH 094/503] Add Call and DefaultInstance TypeInfo derives --- Cargo.lock | 2 ++ frame/membership/Cargo.toml | 1 + frame/multisig/Cargo.toml | 1 + frame/support/procedural/src/storage/instance_trait.rs | 1 + frame/support/src/dispatch.rs | 3 ++- frame/support/src/lib.rs | 2 +- 6 files changed, 8 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 86ae9f296e4fb..f0cc995804df8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5066,6 +5066,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5135,6 +5136,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 37e7aa2cb8248..83829b9eb1a90 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index e48f80567f67a..606aab6cb9f4e 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/support/procedural/src/storage/instance_trait.rs b/frame/support/procedural/src/storage/instance_trait.rs index 5468c3d344193..a4db35dba480e 100644 --- a/frame/support/procedural/src/storage/instance_trait.rs +++ b/frame/support/procedural/src/storage/instance_trait.rs @@ -123,6 +123,7 @@ fn create_and_impl_instance_struct( Clone, Eq, PartialEq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] #doc diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 3549fb900d12b..beeaba36cff01 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1807,7 +1807,7 @@ macro_rules! decl_module { /// Dispatchable calls. /// /// Each variant of this enum maps to a dispatchable function from the associated module. - #[derive($crate::codec::Encode, $crate::codec::Decode)] + #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { @@ -2178,6 +2178,7 @@ macro_rules! impl_outer_dispatch { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] pub enum $call_type { diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 21619af3f7d1c..530844a2f885c 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -88,7 +88,7 @@ pub use sp_runtime::{self, ConsensusEngineId, print, traits::Printable}; pub const LOG_TARGET: &'static str = "runtime::frame-support"; /// A type that cannot be instantiated. -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, scale_info::TypeInfo)] // todo: [AJ] remove this TypeInfo derive once we support skip pub enum Never {} /// Create new implementations of the [`Get`](crate::traits::Get) trait. From 59759d4ee15bf83f62e36b0513961a9e299ecc0b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 10:33:42 +0000 Subject: [PATCH 095/503] Use master branch scale-info --- Cargo.lock | 9 ++++----- Cargo.toml | 1 + bin/node-template/runtime/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- frame/election-provider-multi-phase/Cargo.toml | 2 +- frame/election-provider-support/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/metadata/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 1 + frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/staking/fuzzer/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- 66 files changed, 69 insertions(+), 68 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f0cc995804df8..ff6d734278550 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5279,6 +5279,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8063,8 +8064,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdd819984fe6ce661ebed1f451c0848d301a05ff56b8a4b0ae420de7dca046ea" +source = "git+https://github.com/paritytech/scale-info?branch=master#22c430a6872631e56d2641f9f04d3ef8f472d43e" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8075,10 +8075,9 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e321c3d4ef7d3a90b0b4eda276d4215c6cbf3d59f66a9934e7866a48dcaa29b3" +source = "git+https://github.com/paritytech/scale-info?branch=master#22c430a6872631e56d2641f9f04d3ef8f472d43e" dependencies = [ - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.0.0", "proc-macro2", "quote", "syn", diff --git a/Cargo.toml b/Cargo.toml index 5c3224a2598ee..c8cea159aef9c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -268,3 +268,4 @@ panic = "unwind" [patch.crates-io] parity-util-mem = { git = "https://github.com/paritytech/parity-common" } primitive-types = { git = "https://github.com/paritytech/parity-common" } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 0892535d80f24..548f9237e7a78 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } pallet-aura = { version = "3.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "3.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "3.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index ed3e449dcb3d0..5981c44f8042e 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-system = { version = "3.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index ccc7871334222..e2606fb81ba85 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 794feb3fe5e9b..2c0d49ff0dd8c 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index a1b1abaca10dc..5320090cb8f8a 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", default-features = false, path = "../session" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index ec56767ea28f5..dc1d0a8694fc9 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "3.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", features = ["historical" ], path = "../session", default-features = false } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index f86a32882c969..67dac4d845f03 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-authorship = { version = "3.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 4dba374bbbbda..3adb29fd9bfce 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 411583eea4c1a..bd38b8576b736 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 3e78b31901ea8..80087eefa91b5 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.0", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "3.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "3.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 79c084224df1f..356c285657964 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 00b1970d0e711..cf1c0a165b2d7 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index d3ef293d0438f..43d90f1484955 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 6d7bba0c7d144..1d745f1f90e03 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] # This crate should not rely on any of the frame primitives. bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index 29bf252ce5dd6..df5ef91e5a6a0 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "3.0.0", default-features = false, path = "../../../../primitives/api" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-contracts-primitives = { version = "3.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index d0e7e8868fb0f..98e8ffb67eb06 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index b9f6762e450ad..efd504997c40a 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index a53034211e9a1..bd343d073b36f 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 8142a36b70b2f..6be8f7fbe7b75 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 48d92f42a0e17..0675f919e4d7a 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 34ec0137b9ba4..6791c4b2f81f3 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 44b0b5827e770..fe7da1805efae 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index f57f399881d63..4d005f4805933 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index dfa2efd89aef4..deb9ce637d877 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 08cd9fb6cb9dc..d8b308d76ff61 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 2805154a53bf6..b15a42da20de1 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 6e8dd97f3ef02..91aa0a59b0365 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index f78884844500b..2528e9f8e0850 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "3.0.0", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index ba8e542ed252c..b7fa9eee82eb1 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index bfccc7c7fe11f..ef1746a8d958d 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 83829b9eb1a90..3bcff87279ee9 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index 546807e311ec1..fdf9c01103185 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 606aab6cb9f4e..5d142bd31ecf1 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 91a872babc8bf..0aae8fac3d6d4 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 81894b6f43574..817c8c03d1ac1 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 9341939eb3b6f..dc8b9d62cca9f 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 27bd9e4c5d5e4..acb2af94ded4b 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 4c0e878b7b1ae..46a5367c8a175 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 4d82133b6af9d..3112be3126e77 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -12,6 +12,7 @@ readme = "README.md" [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index c8198ecf296a7..86b80d1909402 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index a6368de0ef8d0..e378138ed794d 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 95f080346859d..33f3542694e37 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index d74e57ef93884..09b59a8ce7392 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # TWO_PHASE_NOTE:: ideally we should be able to get rid of this. sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/staking/fuzzer/Cargo.toml b/frame/staking/fuzzer/Cargo.toml index f8866501dc17f..4d232be9dac1b 100644 --- a/frame/staking/fuzzer/Cargo.toml +++ b/frame/staking/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] honggfuzz = "0.5" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } pallet-staking = { version = "3.0.0", path = "..", features = ["runtime-benchmarks"] } pallet-staking-reward-curve = { version = "3.0.0", path = "../reward-curve" } pallet-session = { version = "3.0.0", path = "../../session" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 4d71e37b00b38..00d2f48d0fb71 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 895c9bdc224a9..b7726d283a7ae 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 8232d3d5da7de..e827ac4b3ec2d 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "3.0.0", default-features = false, path = "../" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 40fb48d5b8e89..22c40bd7dc330 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index b94cc271127d8..688ca856f54a6 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index e7213abd698cb..b0b5dced9a0b2 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index e767cff34e57a..97f82cafb4f0e 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 2634b5d0cf87a..3f39da86b290c 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 0c761b830eb37..f91804bdfc7cd 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index ff9d119712abe..6cdece3e8036c 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 16d03492a128f..cdb0f6e4d79d1 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "3.0.0", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-io = { version = "3.0.0", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 53b0f9e75890c..e9ae0b80b8389 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" num-traits = { version = "0.2.8", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 90f9cfcd823d4..9596a00b6ff62 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 82676fdaf9b7d..d326b6e6dd5e1 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 9eaeeebb71763..4a6d8bf1179b5 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 20e070c2f169e..263572721839c 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index cfcc931788e82..1375bfd1abb48 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "3.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 6af7337f4b94e..9f5e73ecf0144 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.0", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index e3815ae1fb968..2c8ab4e7a4758 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../arithmetic" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 94f6f6bb1f1f7..dbd294dbf3442 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.9.0", default-features = false, path = "../.. sp-consensus-babe = { version = "0.9.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "3.0.0", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.6.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.26.0", default-features = false } From 4628e6fd552fbbb57c88a0c84ae155be7ec673bd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 12:33:00 +0000 Subject: [PATCH 096/503] Add more TypeInfo annotations and scale-info deps --- Cargo.lock | 3 +++ bin/node-template/pallets/template/Cargo.toml | 2 ++ frame/atomic-swap/Cargo.toml | 2 ++ frame/democracy/src/conviction.rs | 2 +- frame/democracy/src/vote_threshold.rs | 2 +- frame/elections-phragmen/src/lib.rs | 6 +++--- frame/identity/src/lib.rs | 16 ++++++++++++++-- frame/merkle-mountain-range/Cargo.toml | 2 ++ frame/society/src/lib.rs | 10 +++++----- frame/support/src/event.rs | 1 + 10 files changed, 34 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff6d734278550..f68652814456c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4562,6 +4562,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5086,6 +5087,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5458,6 +5460,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index 9f0c6ee182670..4cd1a764d1261 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } [dependencies.frame-support] default-features = false @@ -53,6 +54,7 @@ path = "../../../../primitives/runtime" default = ['std'] std = [ 'codec/std', + 'scale-info/std', 'frame-support/std', 'frame-system/std', 'frame-benchmarking/std', diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index a3b62d65e56a3..25314c40805db 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } @@ -30,6 +31,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-runtime/std", diff --git a/frame/democracy/src/conviction.rs b/frame/democracy/src/conviction.rs index c2dff741a9c23..a31cdc99cd172 100644 --- a/frame/democracy/src/conviction.rs +++ b/frame/democracy/src/conviction.rs @@ -23,7 +23,7 @@ use codec::{Encode, Decode}; use crate::types::Delegations; /// A value denoting the strength of conviction of a vote. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, scale_info::TypeInfo)] pub enum Conviction { /// 0.1x votes, unlocked. None, diff --git a/frame/democracy/src/vote_threshold.rs b/frame/democracy/src/vote_threshold.rs index 3114b22499d0e..fd1736afa680d 100644 --- a/frame/democracy/src/vote_threshold.rs +++ b/frame/democracy/src/vote_threshold.rs @@ -25,7 +25,7 @@ use sp_std::ops::{Add, Mul, Div, Rem}; use crate::Tally; /// A means of determining if a vote is past pass threshold. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum VoteThreshold { /// A supermajority of approvals is needed to pass this vote. diff --git a/frame/elections-phragmen/src/lib.rs b/frame/elections-phragmen/src/lib.rs index 26b9c9190a965..42fe807c3dad8 100644 --- a/frame/elections-phragmen/src/lib.rs +++ b/frame/elections-phragmen/src/lib.rs @@ -134,7 +134,7 @@ type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; /// An indication that the renouncing account currently has which of the below roles. -#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub enum Renouncing { /// A member is renouncing. Member, @@ -145,7 +145,7 @@ pub enum Renouncing { } /// An active voter. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, scale_info::TypeInfo)] pub struct Voter { /// The members being backed. pub votes: Vec, @@ -158,7 +158,7 @@ pub struct Voter { } /// A holder of a seat as either a member or a runner-up. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, scale_info::TypeInfo)] pub struct SeatHolder { /// The holder. pub who: AccountId, diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index fed32afa2e62f..ec61a27eea6f9 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -140,7 +140,7 @@ pub trait Config: frame_system::Config { /// than 32-bytes then it will be truncated when encoding. /// /// Can also be `None`. -#[derive(Clone, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub enum Data { /// No data here. None, @@ -290,12 +290,24 @@ impl Decode for IdentityFields { Ok(Self(>::from_bits(field as u64).map_err(|_| "invalid value")?)) } } +impl scale_info::TypeInfo for IdentityFields { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("IdentityFields", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field_of::("BitFlags") + ) + } +} /// Information concerning the identity of the controller of an account. /// /// NOTE: This should be stored at the end of the storage item to facilitate the addition of extra /// fields in a backwards compatible way through a specialized `Decode` impl. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(test, derive(Default))] pub struct IdentityInfo { /// Additional fields of the identity that are not catered for with the struct's explicit diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 8861ba5c0c8b9..5d0615c46e0eb 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } @@ -32,6 +33,7 @@ hex-literal = "0.3" default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "frame-support/std", "frame-system/std", diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index 3546ea68d4dca..722a6f5f950a3 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -330,7 +330,7 @@ pub enum Vote { } /// A judgement by the suspension judgement origin on a suspended candidate. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum Judgement { /// The suspension judgement origin takes no direct judgment /// and places the candidate back into the bid pool. @@ -342,7 +342,7 @@ pub enum Judgement { } /// Details of a payout given as a per-block linear "trickle". -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct Payout { /// Total value of the payout. value: Balance, @@ -355,7 +355,7 @@ pub struct Payout { } /// Status of a vouching member. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum VouchingStatus { /// Member is currently vouching for a user. Vouching, @@ -367,7 +367,7 @@ pub enum VouchingStatus { pub type StrikeCount = u32; /// A bid for entry into society. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug,)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct Bid { /// The bidder/candidate trying to enter society who: AccountId, @@ -378,7 +378,7 @@ pub struct Bid { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum BidKind { /// The CandidateDeposit was paid for this bid. Deposit(Balance), diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index db30fdb530835..ce9d6e252f8eb 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -280,6 +280,7 @@ macro_rules! __decl_generic_event { Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] /// Events for this module. From 9d9704e37f6cb56d722fd81197da25492b0ba75f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 12:36:34 +0000 Subject: [PATCH 097/503] TypeInfo annotations for Staking --- frame/staking/src/lib.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 25196e8181488..4ceaa218f17f5 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -410,7 +410,7 @@ type NegativeImbalanceOf = <::Currency as Currency< >>::NegativeImbalance; /// Information regarding the active era (era in used in session). -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct ActiveEraInfo { /// Index of era. pub index: EraIndex, @@ -445,7 +445,7 @@ pub enum StakerStatus { } /// A destination account for payment. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum RewardDestination { /// Pay into the stash account, increasing the amount at stake accordingly. Staked, @@ -466,7 +466,7 @@ impl Default for RewardDestination { } /// Preference of what happens regarding validation. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct ValidatorPrefs { /// Reward that validator takes up-front; only the rest is split between themselves and /// nominators. @@ -488,7 +488,7 @@ impl Default for ValidatorPrefs { } /// Just a Balance/BlockNumber tuple to encode when a chunk of funds will be unlocked. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct UnlockChunk { /// Amount of funds to be unlocked. #[codec(compact)] @@ -499,7 +499,7 @@ pub struct UnlockChunk { } /// The ledger of a (bonded) stash. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct StakingLedger { /// The stash account whose balance is actually locked and at stake. pub stash: AccountId, @@ -628,7 +628,7 @@ impl StakingLedger where } /// A record of the nominations made by a specific account. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct Nominations { /// The targets of nomination. pub targets: Vec, @@ -644,7 +644,7 @@ pub struct Nominations { } /// The amount of exposure (to slashing) than an individual nominator has. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct IndividualExposure { /// The stash account of the nominator in question. pub who: AccountId, @@ -683,7 +683,7 @@ pub struct UnappliedSlash { } /// Indicate how an election round was computed. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ElectionCompute { /// Result was forcefully computed on chain at the end of the session. OnChain, @@ -695,7 +695,7 @@ pub enum ElectionCompute { } /// The result of an election round. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct ElectionResult { /// Flat list of validators who have been elected. elected_stashes: Vec, @@ -707,7 +707,7 @@ pub struct ElectionResult { } /// The status of the upcoming (offchain) election. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ElectionStatus { /// Nothing has and will happen for now. submission window is not open. Closed, @@ -720,7 +720,7 @@ pub enum ElectionStatus { /// Note that these values must reflect the __total__ number, not only those that are present in the /// solution. In short, these should be the same size as the size of the values dumped in /// `SnapshotValidators` and `SnapshotNominators`. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct ElectionSize { /// Number of validators in the snapshot of the current election round. #[codec(compact)] @@ -948,7 +948,7 @@ pub trait Config: frame_system::Config + SendTransactionTypes> { } /// Mode of era-forcing. -#[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug)] +#[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum Forcing { /// Not forcing anything - just let whatever happen. From 5b9a285fc139dab201dd00bd50edb311d03e333b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 16:16:13 +0000 Subject: [PATCH 098/503] Derive TypeInfo for Call and Event in frame v2, etc. --- Cargo.lock | 5 +++-- Cargo.toml | 3 ++- frame/balances/Cargo.toml | 1 + frame/proxy/Cargo.toml | 2 ++ frame/staking/src/lib.rs | 10 +++++----- frame/support/procedural/src/pallet/expand/call.rs | 1 + frame/support/procedural/src/pallet/expand/event.rs | 1 + frame/system/Cargo.toml | 1 + primitives/application-crypto/src/lib.rs | 2 ++ primitives/finality-grandpa/Cargo.toml | 1 + primitives/finality-grandpa/src/lib.rs | 4 ++-- primitives/runtime/src/lib.rs | 2 ++ primitives/runtime/src/traits.rs | 1 + 13 files changed, 24 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f68652814456c..78280a2db8abd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1608,8 +1608,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" dependencies = [ "either", "futures 0.3.13", @@ -1619,6 +1618,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] @@ -5232,6 +5232,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/Cargo.toml b/Cargo.toml index c8cea159aef9c..b4a8c1ef09fd5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -268,4 +268,5 @@ panic = "unwind" [patch.crates-io] parity-util-mem = { git = "https://github.com/paritytech/parity-common" } primitive-types = { git = "https://github.com/paritytech/parity-common" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master" } +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index bd38b8576b736..fb22d04232e13 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -33,6 +33,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-benchmarking/std", diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 2934b9953b316..5d1278651ee89 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } @@ -34,6 +35,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 4ceaa218f17f5..2c696d305170a 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -424,7 +424,7 @@ pub struct ActiveEraInfo { /// Reward points of an era. Used to split era total payout between validators. /// /// This points will be used to reward validators and their respective nominators. -#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct EraRewardPoints { /// Total number of points. Equals the sum of reward points for each validator. total: RewardPoint, @@ -433,7 +433,7 @@ pub struct EraRewardPoints { } /// Indicates the initial status of the staker. -#[derive(RuntimeDebug)] +#[derive(RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum StakerStatus { /// Chilling. @@ -654,7 +654,7 @@ pub struct IndividualExposure { } /// A snapshot of the stake backing a single validator in the system. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct Exposure { /// The total balance backing this validator. #[codec(compact)] @@ -668,7 +668,7 @@ pub struct Exposure { /// A pending slash record. The value of the slash has been computed but not applied yet, /// rather deferred for several eras. -#[derive(Encode, Decode, Default, RuntimeDebug)] +#[derive(Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct UnappliedSlash { /// The stash ID of the offending validator. validator: AccountId, @@ -970,7 +970,7 @@ impl Default for Forcing { // A value placed in storage that represents the current version of the Staking storage. This value // is used by the `on_runtime_upgrade` logic to determine whether we run storage migration logic. // This should match directly with the semantic versions of the Rust crate. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] enum Releases { V1_0_0Ancient, V2_0_0, diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 26194d6a41357..afa58c1a884c2 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -94,6 +94,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::PartialEqNoBound, #frame_support::codec::Encode, #frame_support::codec::Decode, + #frame_support::scale_info::TypeInfo, )] #[allow(non_camel_case_types)] pub enum #call_ident<#type_decl_bounded_gen> #where_clause { diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 5c4ce843a5ff0..1850eabc457c8 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -121,6 +121,7 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::RuntimeDebugNoBound, #frame_support::codec::Encode, #frame_support::codec::Decode, + #frame_support::scale_info::TypeInfo, )] )); diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 22c40bd7dc330..5a31816d13281 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -35,6 +35,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/primitives/application-crypto/src/lib.rs b/primitives/application-crypto/src/lib.rs index abedcb7aca722..efc35cd681a05 100644 --- a/primitives/application-crypto/src/lib.rs +++ b/primitives/application-crypto/src/lib.rs @@ -404,6 +404,7 @@ macro_rules! app_crypto_signature_full_crypto { $crate::codec::Encode, $crate::codec::Decode, $crate::RuntimeDebug, + $crate::scale_info::TypeInfo, )] #[derive(Hash)] pub struct Signature($sig); @@ -437,6 +438,7 @@ macro_rules! app_crypto_signature_not_full_crypto { #[derive(Clone, Default, Eq, PartialEq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] pub struct Signature($sig); diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 9f5e73ecf0144..28c52bbe75f1e 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -33,6 +33,7 @@ std = [ "log", "serde", "codec/std", + "scale-info/std", "grandpa/std", "sp-api/std", "sp-application-crypto/std", diff --git a/primitives/finality-grandpa/src/lib.rs b/primitives/finality-grandpa/src/lib.rs index 5b393bd1d80e6..1ea63bc28a851 100644 --- a/primitives/finality-grandpa/src/lib.rs +++ b/primitives/finality-grandpa/src/lib.rs @@ -171,7 +171,7 @@ impl ConsensusLog { /// GRANDPA happens when a voter votes on the same round (either at prevote or /// precommit stage) for different blocks. Proving is achieved by collecting the /// signed messages of conflicting votes. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] pub struct EquivocationProof { set_id: SetId, equivocation: Equivocation, @@ -208,7 +208,7 @@ impl EquivocationProof { /// Wrapper object for GRANDPA equivocation proofs, useful for unifying prevote /// and precommit equivocations under a common type. -#[derive(Clone, Debug, Decode, Encode, PartialEq)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] pub enum Equivocation { /// Proof of equivocation at prevote stage. Prevote(grandpa::Equivocation, AuthoritySignature>), diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index c25937571ca46..295bc8ded2986 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -26,6 +26,8 @@ #[doc(hidden)] pub use codec; +#[doc(hidden)] +pub use scale_info; #[cfg(feature = "std")] #[doc(hidden)] pub use serde; diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index c20973523a4fc..0f02a91b06102 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -1223,6 +1223,7 @@ macro_rules! impl_opaque_keys { Default, Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] #[cfg_attr(feature = "std", derive($crate::serde::Serialize, $crate::serde::Deserialize))] From 4b806aebaaaced5b13cc660f944b26b68c314f85 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 16:43:57 +0000 Subject: [PATCH 099/503] More TypeInfo derives --- Cargo.lock | 1 + bin/node/runtime/src/lib.rs | 2 +- frame/democracy/src/vote.rs | 8 ++++---- frame/identity/src/lib.rs | 8 ++++---- frame/im-online/src/lib.rs | 6 +++--- frame/multisig/src/lib.rs | 4 ++-- frame/vesting/src/lib.rs | 2 +- primitives/core/src/offchain/mod.rs | 4 ++-- primitives/runtime/src/testing.rs | 2 +- primitives/session/Cargo.toml | 2 ++ primitives/session/src/lib.rs | 2 +- 11 files changed, 22 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78280a2db8abd..c0814e749cd1a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9076,6 +9076,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-core", "sp-runtime", diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 98929e692a8d1..d85664085b14a 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -251,7 +251,7 @@ parameter_types! { } /// The type used to represent the kinds of proxying allowed. -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ProxyType { Any, NonTransfer, diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index 5adc76f4ae00b..dbf55249578e3 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -23,7 +23,7 @@ use sp_runtime::{RuntimeDebug, traits::{Saturating, Zero}}; use crate::{Conviction, ReferendumIndex, Delegations}; /// A number of lock periods, plus a vote, one way or the other. -#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug)] +#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct Vote { pub aye: bool, pub conviction: Conviction, @@ -49,7 +49,7 @@ impl Decode for Vote { } /// A vote for a referendum of a particular account. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub enum AccountVote { /// A standard vote, one-way (approve or reject) with a given amount of conviction. Standard { vote: Vote, balance: Balance }, @@ -89,7 +89,7 @@ impl AccountVote { } /// A "prior" lock, i.e. a lock for some now-forgotten reason. -#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug)] +#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, scale_info::TypeInfo)] pub struct PriorLock(BlockNumber, Balance); impl PriorLock { @@ -112,7 +112,7 @@ impl PriorLock { /// The account is voting directly. `delegations` is the total amount of post-conviction voting /// weight that it controls from those that have delegated to it. diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index ec61a27eea6f9..e10323132b061 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -211,7 +211,7 @@ pub type RegistrarIndex = u32; /// /// NOTE: Registrars may pay little attention to some fields. Registrars may want to make clear /// which fields their attestation is relevant for by off-chain means. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub enum Judgement< Balance: Encode + Decode + Copy + Clone + Debug + Eq + PartialEq > { @@ -262,7 +262,7 @@ impl< /// The fields that we use to identify the owner of an account with. Each corresponds to a field /// in the `IdentityInfo` struct. #[repr(u64)] -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug, scale_info::TypeInfo)] pub enum IdentityField { Display = 0b0000000000000000000000000000000000000000000000000000000000000001, Legal = 0b0000000000000000000000000000000000000000000000000000000000000010, @@ -359,7 +359,7 @@ pub struct IdentityInfo { /// /// NOTE: This is stored separately primarily to facilitate the addition of extra fields in a /// backwards compatible way through a specialized `Decode` impl. -#[derive(Clone, Encode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct Registration< Balance: Encode + Decode + Copy + Clone + Debug + Eq + PartialEq > { @@ -394,7 +394,7 @@ impl< } /// Information concerning a registrar. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct RegistrarInfo< Balance: Encode + Decode + Clone + Debug + Eq + PartialEq, AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index ec8c6218b3f12..899c97751764c 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -149,7 +149,7 @@ const INCLUDE_THRESHOLD: u32 = 3; /// This stores the block number at which heartbeat was requested and when the worker /// has actually managed to produce it. /// Note we store such status for every `authority_index` separately. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] struct HeartbeatStatus { /// An index of the session that we are supposed to send heartbeat for. pub session_index: SessionIndex, @@ -210,7 +210,7 @@ impl sp_std::fmt::Debug for OffchainErr where BlockNumber: PartialEq + Eq + Decode + Encode, { @@ -770,7 +770,7 @@ impl frame_support::unsigned::ValidateUnsigned for Module { } /// An offence that is filed if a validator didn't send a heartbeat message. -#[derive(RuntimeDebug)] +#[derive(RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Clone, PartialEq, Eq))] pub struct UnresponsivenessOffence { /// The current session index in which we report the unresponsive validators. diff --git a/frame/multisig/src/lib.rs b/frame/multisig/src/lib.rs index 3b434ec484041..f05316d0ce190 100644 --- a/frame/multisig/src/lib.rs +++ b/frame/multisig/src/lib.rs @@ -101,7 +101,7 @@ pub trait Config: frame_system::Config { /// A global extrinsic index, formed as the extrinsic index within a block, together with that /// block's height. This allows a transaction in which a multisig operation of a particular /// composite was created to be uniquely identified. -#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct Timepoint { /// The height of the chain at the point in time. height: BlockNumber, @@ -110,7 +110,7 @@ pub struct Timepoint { } /// An open multisig operation. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct Multisig { /// The extrinsic when the multisig operation was opened. when: Timepoint, diff --git a/frame/vesting/src/lib.rs b/frame/vesting/src/lib.rs index 7b725f7486dfd..ecd67290117e7 100644 --- a/frame/vesting/src/lib.rs +++ b/frame/vesting/src/lib.rs @@ -87,7 +87,7 @@ pub trait Config: frame_system::Config { const VESTING_ID: LockIdentifier = *b"vesting "; /// Struct to encode the vesting schedule of an individual account. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct VestingInfo { /// Locked amount at genesis. pub locked: Balance, diff --git a/primitives/core/src/offchain/mod.rs b/primitives/core/src/offchain/mod.rs index 8b587b887efdd..ee1a6d0458d41 100644 --- a/primitives/core/src/offchain/mod.rs +++ b/primitives/core/src/offchain/mod.rs @@ -181,7 +181,7 @@ impl TryFrom for HttpRequestStatus { /// A blob to hold information about the local node's network state /// without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Default))] pub struct OpaqueNetworkState { /// PeerId of the local node in SCALE encoded. @@ -191,7 +191,7 @@ pub struct OpaqueNetworkState { } /// Simple blob to hold a `Multiaddr` without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner, scale_info::TypeInfo)] pub struct OpaqueMultiaddr(pub Vec); impl OpaqueMultiaddr { diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index b6d2641f01083..3296f02a468a8 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -36,7 +36,7 @@ use crate::transaction_validity::{TransactionValidity, TransactionValidityError, /// 2. Can be converted to any `Public` key. /// 3. Implements `RuntimeAppPublic` so it can be used instead of regular application-specific /// crypto. -#[derive(Default, PartialEq, Eq, Clone, Encode, Decode, Debug, Hash, Serialize, Deserialize, PartialOrd, Ord)] +#[derive(Default, PartialEq, Eq, Clone, Encode, Decode, Debug, Hash, Serialize, Deserialize, PartialOrd, Ord, scale_info::TypeInfo)] pub struct UintAuthorityId(pub u64); impl From for UintAuthorityId { diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index c04b271bc0370..ff782c925e4a0 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } @@ -24,6 +25,7 @@ sp-runtime = { version = "3.0.0", optional = true, path = "../runtime" } default = [ "std" ] std = [ "codec/std", + "scale-info/std", "sp-api/std", "sp-core/std", "sp-std/std", diff --git a/primitives/session/src/lib.rs b/primitives/session/src/lib.rs index 9f63d64d414b0..4c7f9524043f7 100644 --- a/primitives/session/src/lib.rs +++ b/primitives/session/src/lib.rs @@ -54,7 +54,7 @@ sp_api::decl_runtime_apis! { pub type ValidatorCount = u32; /// Proof of membership of a specific key in a given session. -#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Eq, PartialEq, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct MembershipProof { /// The session index on which the specific key is a member. pub session: SessionIndex, From f781fc13eaf398c550965b35be8bda23b3bc85bd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 23 Mar 2021 17:36:57 +0000 Subject: [PATCH 100/503] Add vnext event metadata for legacy frame macros --- frame/support/src/event.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index ce9d6e252f8eb..f0458bb171184 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -349,6 +349,36 @@ macro_rules! __events_to_metadata { } } +#[macro_export] +#[doc(hidden)] +macro_rules! __events_to_metadata_vnext { + ( + $( $metadata:expr ),*; + $( #[doc = $doc_attr:tt] )* + $event:ident $( ( $( $param:path ),* $(,)? ) )*, + $( $rest:tt )* + ) => { + $crate::__events_to_metadata_vnext!( + $( $metadata, )* + $crate::metadata::v13::EventMetadata { + name: stringify!($event), + arguments: $crate::scale_info::prelude::vec![ + $( $( $crate::metadata::v13::TypeSpec::new::<$param>(stringify!($param)) ),* )* + ], + documentation: $crate::scale_info::prelude::vec![ + $( $doc_attr ),* + ], + }; + $( $rest )* + ) + }; + ( + $( $metadata:expr ),*; + ) => { + &[ $( $metadata ),* ] + } +} + /// Constructs an Event type for a runtime. This is usually called automatically by the /// construct_runtime macro. #[macro_export] From f539986a0df633d8f50c9be21baed80344f76068 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 24 Mar 2021 12:10:00 +0000 Subject: [PATCH 101/503] Remove explicit TypeInfo bounds, after adding to Parameter trait --- frame/system/src/lib.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 08ef7a237e40f..dcfa385c345c3 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -175,25 +175,25 @@ pub mod pallet { /// with a sender account. type Index: Parameter + Member + MaybeSerializeDeserialize + Debug + Default + MaybeDisplay + AtLeast32Bit - + Copy + scale_info::TypeInfo; // todo [AJ] add TypeInfo bound to `Parameter` + + Copy; /// The block number type used by the runtime. type BlockNumber: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + - sp_std::str::FromStr + MaybeMallocSizeOf + scale_info::TypeInfo; + sp_std::str::FromStr + MaybeMallocSizeOf; /// The output of the `Hashing` function. type Hash: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + Ord - + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf + scale_info::TypeInfo; + + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). type Hashing: Hash; /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default + scale_info::TypeInfo; + + Default; /// Converting trait to take a source type and convert to `AccountId`. /// From 7721913ff6609690c9a860bf400b270999c0a5e2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 24 Mar 2021 12:10:36 +0000 Subject: [PATCH 102/503] Generate Event metadata for legacy frame macros --- Cargo.lock | 2 ++ frame/support/src/event.rs | 7 +++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0814e749cd1a..49f0a5a46c7da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,5 +1,7 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "Inflector" version = "0.11.4" diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index f0458bb171184..27ce4ac872214 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -154,7 +154,7 @@ macro_rules! decl_event { #[allow(dead_code)] #[doc(hidden)] pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { - $crate::scale_info::prelude::vec![] + $crate::__events_to_metadata_vnext!(; $( $events )* ) } } } @@ -306,11 +306,10 @@ macro_rules! __decl_generic_event { $crate::__events_to_metadata!(; $( $events )* ) } - /// Metadata vnext only supported by new frame support macros #[allow(dead_code)] #[doc(hidden)] pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { - $crate::scale_info::prelude::vec![] + $crate::__events_to_metadata_vnext!(; $( $events )* ) } } }; @@ -375,7 +374,7 @@ macro_rules! __events_to_metadata_vnext { ( $( $metadata:expr ),*; ) => { - &[ $( $metadata ),* ] + $crate::scale_info::prelude::vec![ $( $metadata ),* ] } } From 3ae3b2428827ba670917bddd35fa9a4b28a1c536 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 13:04:08 +0000 Subject: [PATCH 103/503] tmp add upgrade file --- frame/indices/src/upgrade.rs | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 frame/indices/src/upgrade.rs diff --git a/frame/indices/src/upgrade.rs b/frame/indices/src/upgrade.rs new file mode 100644 index 0000000000000..e69de29bb2d1d From 7fe823f309482af0f26183665bd73ce021f676f4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 14:41:54 +0000 Subject: [PATCH 104/503] Migrate pallet-indices to `pallet!` --- frame/indices/src/lib.rs | 187 +++++++++++++++++++++-------------- frame/indices/src/upgrade.rs | 112 +++++++++++++++++++++ 2 files changed, 227 insertions(+), 72 deletions(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index c925d3a0533e0..27b064d83e5d1 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -39,76 +39,44 @@ pub use weights::WeightInfo; type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -/// The module's config trait. -pub trait Config: frame_system::Config { - /// Type used for storing an account's index; implies the maximum number of accounts the system - /// can hold. - type AccountIndex: Parameter + Member + Codec + Default + AtLeast32Bit + Copy; +pub use pallet::*; - /// The currency trait. - type Currency: ReservableCurrency; +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use super::*; - /// The deposit needed for reserving an index. - type Deposit: Get>; + /// The module's config trait. + #[pallet::config] + pub trait Config: frame_system::Config { + /// Type used for storing an account's index; implies the maximum number of accounts the system + /// can hold. + type AccountIndex: Parameter + Member + Codec + Default + AtLeast32Bit + Copy; - /// The overarching event type. - type Event: From> + Into<::Event>; + /// The currency trait. + type Currency: ReservableCurrency; - /// Weight information for extrinsics in this pallet. - type WeightInfo: WeightInfo; -} - -decl_storage! { - trait Store for Module as Indices { - /// The lookup from index to account. - pub Accounts build(|config: &GenesisConfig| - config.indices.iter() - .cloned() - .map(|(a, b)| (a, (b, Zero::zero(), false))) - .collect::>() - ): map hasher(blake2_128_concat) T::AccountIndex => Option<(T::AccountId, BalanceOf, bool)>; - } - add_extra_genesis { - config(indices): Vec<(T::AccountIndex, T::AccountId)>; - } -} + /// The deposit needed for reserving an index. + #[pallet::constant] + type Deposit: Get>; -decl_event!( - pub enum Event where - ::AccountId, - ::AccountIndex - { - /// A account index was assigned. \[index, who\] - IndexAssigned(AccountId, AccountIndex), - /// A account index has been freed up (unassigned). \[index\] - IndexFreed(AccountIndex), - /// A account index has been frozen to its current account ID. \[index, who\] - IndexFrozen(AccountIndex, AccountId), - } -); + /// The overarching event type. + type Event: From> + Into<::Event>; -decl_error! { - pub enum Error for Module { - /// The index was not already assigned. - NotAssigned, - /// The index is assigned to another account. - NotOwner, - /// The index was not available. - InUse, - /// The source and destination accounts are identical. - NotTransfer, - /// The index is permanent and may not be freed/changed. - Permanent, + /// Weight information for extrinsics in this pallet. + type WeightInfo: WeightInfo; } -} -decl_module! { - pub struct Module for enum Call where origin: T::Origin, system = frame_system { - /// The deposit needed for reserving an index. - const Deposit: BalanceOf = T::Deposit::get(); + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(PhantomData); - fn deposit_event() = default; + #[pallet::hooks] + impl Hooks> for Pallet {} + #[pallet::call] + impl Pallet { /// Assign an previously unassigned index. /// /// Payment: `Deposit` is reserved from the sender account. @@ -127,8 +95,8 @@ decl_module! { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight = T::WeightInfo::claim()] - fn claim(origin, index: T::AccountIndex) { + #[weight(T::WeightInfo::claim())] + fn claim(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| { @@ -137,6 +105,7 @@ decl_module! { T::Currency::reserve(&who, T::Deposit::get()) })?; Self::deposit_event(RawEvent::IndexAssigned(who, index)); + Ok(()) } /// Assign an index already owned by the sender to another account. The balance reservation @@ -159,8 +128,12 @@ decl_module! { /// - Reads: Indices Accounts, System Account (recipient) /// - Writes: Indices Accounts, System Account (recipient) /// # - #[weight = T::WeightInfo::transfer()] - fn transfer(origin, new: T::AccountId, index: T::AccountIndex) { + #[weight(T::WeightInfo::transfer())] + fn transfer( + origin: OriginFor, + new: T::AccountId, + index: T::AccountIndex, + ) -> DispatchResult { let who = ensure_signed(origin)?; ensure!(who != new, Error::::NotTransfer); @@ -173,6 +146,7 @@ decl_module! { Ok(()) })?; Self::deposit_event(RawEvent::IndexAssigned(new, index)); + Ok(()) } /// Free up an index owned by the sender. @@ -193,8 +167,8 @@ decl_module! { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight = T::WeightInfo::free()] - fn free(origin, index: T::AccountIndex) { + #[weight(T::WeightInfo::free())] + fn free(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| -> DispatchResult { @@ -205,6 +179,7 @@ decl_module! { Ok(()) })?; Self::deposit_event(RawEvent::IndexFreed(index)); + Ok(()) } /// Force an index to an account. This doesn't require a deposit. If the index is already @@ -228,8 +203,13 @@ decl_module! { /// - Reads: Indices Accounts, System Account (original owner) /// - Writes: Indices Accounts, System Account (original owner) /// # - #[weight = T::WeightInfo::force_transfer()] - fn force_transfer(origin, new: T::AccountId, index: T::AccountIndex, freeze: bool) { + #[weight(T::WeightInfo::force_transfer())] + fn force_transfer( + origin: OriginFor, + new: T::AccountId, + index: T::AccountIndex, + freeze: bool, + ) -> DispatchResult { ensure_root(origin)?; Accounts::::mutate(index, |maybe_value| { @@ -239,6 +219,7 @@ decl_module! { *maybe_value = Some((new.clone(), Zero::zero(), freeze)); }); Self::deposit_event(RawEvent::IndexAssigned(new, index)); + Ok(()) } /// Freeze an index so it will always point to the sender account. This consumes the deposit. @@ -258,8 +239,8 @@ decl_module! { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight = T::WeightInfo::freeze()] - fn freeze(origin, index: T::AccountIndex) { + #[weight(T::WeightInfo::freeze())] + fn freeze(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| -> DispatchResult { @@ -271,11 +252,73 @@ decl_module! { Ok(()) })?; Self::deposit_event(RawEvent::IndexFrozen(index, who)); + Ok(()) + } + } + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + #[pallet::metadata(T::AccountId = "AccountId", T::AccountIndex = "AccountIndex")] + pub enum Event { + /// A account index was assigned. \[index, who\] + IndexAssigned(T::AccountId, T::AccountIndex), + /// A account index has been freed up (unassigned). \[index\] + IndexFreed(T::AccountIndex), + /// A account index has been frozen to its current account ID. \[index, who\] + IndexFrozen(T::AccountIndex, T::AccountId), + } + + /// Old name generated by `decl_event`. + #[deprecated(note="use `Event` instead")] + pub type RawEvent = Event; + + #[pallet::error] + pub enum Error { + /// The index was not already assigned. + NotAssigned, + /// The index is assigned to another account. + NotOwner, + /// The index was not available. + InUse, + /// The source and destination accounts are identical. + NotTransfer, + /// The index is permanent and may not be freed/changed. + Permanent, + } + + /// The lookup from index to account. + #[pallet::storage] + pub type Accounts = StorageMap< + _, Blake2_128Concat, + T::AccountIndex, + (T::AccountId, BalanceOf, bool) + >; + + #[pallet::genesis_config] + pub struct GenesisConfig { + pub indices: Vec<(T::AccountIndex, T::AccountId)>, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { + Self { + indices: Default::default(), + } + } + } + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { + for (a, b) in &self.indices { + >::insert(a, (b, Zero::zero(), false)) + } } } } -impl Module { +impl Pallet { // PUBLIC IMMUTABLES /// Lookup an T::AccountIndex to get an Id, if there's one there. @@ -295,7 +338,7 @@ impl Module { } } -impl StaticLookup for Module { +impl StaticLookup for Pallet { type Source = MultiAddress; type Target = T::AccountId; diff --git a/frame/indices/src/upgrade.rs b/frame/indices/src/upgrade.rs index e69de29bb2d1d..f13d4e4063010 100644 --- a/frame/indices/src/upgrade.rs +++ b/frame/indices/src/upgrade.rs @@ -0,0 +1,112 @@ + +// Template for pallet upgrade for Indices + +pub use pallet::*; + +#[frame_support::pallet] +pub mod pallet { + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use super::*; + + #[pallet::config] + pub trait Config: frame_system::Config + // TODO_MAYBE_ADDITIONAL_BOUNDS_AND_WHERE_CLAUSE + { + // TODO_ASSOCIATED_TYPE_AND_CONSTANTS + } + + + + #[pallet::pallet] + #[pallet::generate_store(pub(super) trait Store)] + pub struct Pallet(PhantomData); + + #[pallet::hooks] + impl Hooks> for Pallet + // TODO_MAYBE_WHERE_CLAUSE + { + // TODO_ON_FINALIZE + // TODO_ON_INITIALIZE + // TODO_ON_RUNTIME_UPGRADE + // TODO_INTEGRITY_TEST + // TODO_OFFCHAIN_WORKER + } + + #[pallet::call] + impl Pallet + // TODO_MAYBE_WHERE_CLAUSE + { + // TODO_UPGRADE_DISPATCHABLES + } + + #[pallet::inherent] + // TODO_INHERENT + + #[pallet::event] + #[pallet::generate_deposit(pub(super) fn deposit_event)] + // TODO_EVENT + + // TODO_REMOVE_IF_NO_EVENT + /// Old name generated by `decl_event`. + #[deprecated(note="use `Event` instead")] + pub type RawEvent /* TODO_PUT_EVENT_GENERICS */ = Event /* TODO_PUT_EVENT_GENERICS */; + + #[pallet::error] + // TODO_ERROR + + #[pallet::origin] + // TODO_ORIGIN + + #[pallet::validate_unsigned] + // TODO_VALIDATE_UNSIGNED + + + + /// The lookup from index to account. + #[pallet::storage] + pub type Accounts = StorageMap<_, Blake2_128Concat, T::AccountIndex, (T::AccountId, BalanceOf, bool)>; + + + #[pallet::genesis_config] + pub struct GenesisConfig + // TODO_MAYBE_WHERE_CLAUSE + { + pub indices: Vec<(T::AccountIndex, T::AccountId)>, + } + + #[cfg(feature = "std")] + impl Default for GenesisConfig + // TODO_MAYBE_WHERE_CLAUSE + { + fn default() -> Self { + Self { + indices: Default::default(), + } + } + } + + + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig + // TODO_MAYBE_WHERE_CLAUSE + { + fn build(&self) { + { + let builder : fn(& Self) -> _ = | config : & GenesisConfig | config + . indices . iter() . cloned() . + map(|(a, b) |(a,(b, Zero::zero(), false))) . collect::> + (); let data = & builder(self); let data : & frame_support::sp_std:: + vec::Vec<(T::AccountIndex,(T::AccountId, BalanceOf, bool)) + > = data; data . iter() . + for_each(|(k, v) | + { + as frame_support::storage::StorageMap< + T::AccountIndex,(T::AccountId, BalanceOf, bool)> + >::insert::<& T::AccountIndex, & + (T::AccountId, BalanceOf, bool)>(k, v); + }); +} + } + } +} From 0a22ee4613191562362a8916fa9f24401472ce27 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 14:49:01 +0000 Subject: [PATCH 105/503] Delete temp upgrade file --- frame/indices/src/upgrade.rs | 112 ----------------------------------- 1 file changed, 112 deletions(-) delete mode 100644 frame/indices/src/upgrade.rs diff --git a/frame/indices/src/upgrade.rs b/frame/indices/src/upgrade.rs deleted file mode 100644 index f13d4e4063010..0000000000000 --- a/frame/indices/src/upgrade.rs +++ /dev/null @@ -1,112 +0,0 @@ - -// Template for pallet upgrade for Indices - -pub use pallet::*; - -#[frame_support::pallet] -pub mod pallet { - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - use super::*; - - #[pallet::config] - pub trait Config: frame_system::Config - // TODO_MAYBE_ADDITIONAL_BOUNDS_AND_WHERE_CLAUSE - { - // TODO_ASSOCIATED_TYPE_AND_CONSTANTS - } - - - - #[pallet::pallet] - #[pallet::generate_store(pub(super) trait Store)] - pub struct Pallet(PhantomData); - - #[pallet::hooks] - impl Hooks> for Pallet - // TODO_MAYBE_WHERE_CLAUSE - { - // TODO_ON_FINALIZE - // TODO_ON_INITIALIZE - // TODO_ON_RUNTIME_UPGRADE - // TODO_INTEGRITY_TEST - // TODO_OFFCHAIN_WORKER - } - - #[pallet::call] - impl Pallet - // TODO_MAYBE_WHERE_CLAUSE - { - // TODO_UPGRADE_DISPATCHABLES - } - - #[pallet::inherent] - // TODO_INHERENT - - #[pallet::event] - #[pallet::generate_deposit(pub(super) fn deposit_event)] - // TODO_EVENT - - // TODO_REMOVE_IF_NO_EVENT - /// Old name generated by `decl_event`. - #[deprecated(note="use `Event` instead")] - pub type RawEvent /* TODO_PUT_EVENT_GENERICS */ = Event /* TODO_PUT_EVENT_GENERICS */; - - #[pallet::error] - // TODO_ERROR - - #[pallet::origin] - // TODO_ORIGIN - - #[pallet::validate_unsigned] - // TODO_VALIDATE_UNSIGNED - - - - /// The lookup from index to account. - #[pallet::storage] - pub type Accounts = StorageMap<_, Blake2_128Concat, T::AccountIndex, (T::AccountId, BalanceOf, bool)>; - - - #[pallet::genesis_config] - pub struct GenesisConfig - // TODO_MAYBE_WHERE_CLAUSE - { - pub indices: Vec<(T::AccountIndex, T::AccountId)>, - } - - #[cfg(feature = "std")] - impl Default for GenesisConfig - // TODO_MAYBE_WHERE_CLAUSE - { - fn default() -> Self { - Self { - indices: Default::default(), - } - } - } - - - #[pallet::genesis_build] - impl GenesisBuild for GenesisConfig - // TODO_MAYBE_WHERE_CLAUSE - { - fn build(&self) { - { - let builder : fn(& Self) -> _ = | config : & GenesisConfig | config - . indices . iter() . cloned() . - map(|(a, b) |(a,(b, Zero::zero(), false))) . collect::> - (); let data = & builder(self); let data : & frame_support::sp_std:: - vec::Vec<(T::AccountIndex,(T::AccountId, BalanceOf, bool)) - > = data; data . iter() . - for_each(|(k, v) | - { - as frame_support::storage::StorageMap< - T::AccountIndex,(T::AccountId, BalanceOf, bool)> - >::insert::<& T::AccountIndex, & - (T::AccountId, BalanceOf, bool)>(k, v); - }); -} - } - } -} From 5ca9a98b9b2bd53e28154b18ca187342df9b8825 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 14:52:49 +0000 Subject: [PATCH 106/503] Fix some migration errors --- frame/indices/src/lib.rs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 27b064d83e5d1..47ba70ec34ae2 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -62,7 +62,7 @@ pub mod pallet { type Deposit: Get>; /// The overarching event type. - type Event: From> + Into<::Event>; + type Event: From> + IsType<::Event>; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; @@ -95,8 +95,8 @@ pub mod pallet { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight(T::WeightInfo::claim())] - fn claim(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { + #[pallet::weight(T::WeightInfo::claim())] + pub(crate) fn claim(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| { @@ -128,8 +128,8 @@ pub mod pallet { /// - Reads: Indices Accounts, System Account (recipient) /// - Writes: Indices Accounts, System Account (recipient) /// # - #[weight(T::WeightInfo::transfer())] - fn transfer( + #[pallet::weight(T::WeightInfo::transfer())] + pub(crate) fn transfer( origin: OriginFor, new: T::AccountId, index: T::AccountIndex, @@ -167,8 +167,8 @@ pub mod pallet { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight(T::WeightInfo::free())] - fn free(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { + #[pallet::weight(T::WeightInfo::free())] + pub(crate) fn free(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| -> DispatchResult { @@ -203,8 +203,8 @@ pub mod pallet { /// - Reads: Indices Accounts, System Account (original owner) /// - Writes: Indices Accounts, System Account (original owner) /// # - #[weight(T::WeightInfo::force_transfer())] - fn force_transfer( + #[pallet::weight(T::WeightInfo::force_transfer())] + pub(crate) fn force_transfer( origin: OriginFor, new: T::AccountId, index: T::AccountIndex, @@ -239,8 +239,8 @@ pub mod pallet { /// ------------------- /// - DB Weight: 1 Read/Write (Accounts) /// # - #[weight(T::WeightInfo::freeze())] - fn freeze(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { + #[pallet::weight(T::WeightInfo::freeze())] + pub(crate) fn freeze(origin: OriginFor, index: T::AccountIndex) -> DispatchResult { let who = ensure_signed(origin)?; Accounts::::try_mutate(index, |maybe_value| -> DispatchResult { @@ -259,7 +259,7 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", T::AccountIndex = "AccountIndex")] - pub enum Event { + pub enum Event { /// A account index was assigned. \[index, who\] IndexAssigned(T::AccountId, T::AccountIndex), /// A account index has been freed up (unassigned). \[index\] From 79287fe160fdcaee793700ed51e3cbc2a6be195e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 14:59:28 +0000 Subject: [PATCH 107/503] Fix some warnings --- frame/indices/src/lib.rs | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 47ba70ec34ae2..a1b11f17f4680 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -29,12 +29,9 @@ use sp_std::prelude::*; use codec::Codec; use sp_runtime::MultiAddress; use sp_runtime::traits::{ - StaticLookup, Member, LookupError, Zero, Saturating, AtLeast32Bit + StaticLookup, LookupError, Zero, Saturating, AtLeast32Bit }; -use frame_support::{Parameter, decl_module, decl_error, decl_event, decl_storage, ensure}; -use frame_support::dispatch::DispatchResult; -use frame_support::traits::{Currency, ReservableCurrency, Get, BalanceStatus::Reserved}; -use frame_system::{ensure_signed, ensure_root}; +use frame_support::traits::{Currency, ReservableCurrency, BalanceStatus::Reserved}; pub use weights::WeightInfo; type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; @@ -104,7 +101,7 @@ pub mod pallet { *maybe_value = Some((who.clone(), T::Deposit::get(), false)); T::Currency::reserve(&who, T::Deposit::get()) })?; - Self::deposit_event(RawEvent::IndexAssigned(who, index)); + Self::deposit_event(Event::IndexAssigned(who, index)); Ok(()) } @@ -145,7 +142,7 @@ pub mod pallet { *maybe_value = Some((new.clone(), amount.saturating_sub(lost), false)); Ok(()) })?; - Self::deposit_event(RawEvent::IndexAssigned(new, index)); + Self::deposit_event(Event::IndexAssigned(new, index)); Ok(()) } @@ -178,7 +175,7 @@ pub mod pallet { T::Currency::unreserve(&who, amount); Ok(()) })?; - Self::deposit_event(RawEvent::IndexFreed(index)); + Self::deposit_event(Event::IndexFreed(index)); Ok(()) } @@ -218,7 +215,7 @@ pub mod pallet { } *maybe_value = Some((new.clone(), Zero::zero(), freeze)); }); - Self::deposit_event(RawEvent::IndexAssigned(new, index)); + Self::deposit_event(Event::IndexAssigned(new, index)); Ok(()) } @@ -251,7 +248,7 @@ pub mod pallet { *maybe_value = Some((account, Zero::zero(), true)); Ok(()) })?; - Self::deposit_event(RawEvent::IndexFrozen(index, who)); + Self::deposit_event(Event::IndexFrozen(index, who)); Ok(()) } } From e7175712dbd1343925b9148a4f1b734f9f7f6aac Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 15:25:29 +0000 Subject: [PATCH 108/503] Add serde bound, explicit balance type --- frame/indices/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index a1b11f17f4680..19697f2d941bb 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -49,7 +49,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// Type used for storing an account's index; implies the maximum number of accounts the system /// can hold. - type AccountIndex: Parameter + Member + Codec + Default + AtLeast32Bit + Copy; + type AccountIndex: Parameter + Member + MaybeSerializeDeserialize + Codec + Default + AtLeast32Bit + Copy; /// The currency trait. type Currency: ReservableCurrency; @@ -309,7 +309,7 @@ pub mod pallet { impl GenesisBuild for GenesisConfig { fn build(&self) { for (a, b) in &self.indices { - >::insert(a, (b, Zero::zero(), false)) + >::insert(a, (b, >::zero(), false)) } } } From d701aa9ee72b8dd63f3aeca921111bf4e7570e16 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 26 Mar 2021 15:55:41 +0000 Subject: [PATCH 109/503] Module -> Pallet --- frame/indices/src/benchmarking.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/indices/src/benchmarking.rs b/frame/indices/src/benchmarking.rs index 6ea39e9ccc23e..625a994af38f6 100644 --- a/frame/indices/src/benchmarking.rs +++ b/frame/indices/src/benchmarking.rs @@ -24,7 +24,7 @@ use frame_system::RawOrigin; use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; -use crate::Module as Indices; +use crate::Pallet as Indices; const SEED: u32 = 0; From 15c466ed6511347fa99a718edd139cf981011980 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 29 Mar 2021 15:06:42 +0100 Subject: [PATCH 110/503] Regenerate lock file --- Cargo.lock | 404 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 268 insertions(+), 136 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1a18f3a8120b..ec8e280992546 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,7 +1,5 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - [[package]] name = "Inflector" version = "0.11.4" @@ -116,9 +114,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" +checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b" [[package]] name = "approx" @@ -351,9 +349,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.47" +version = "0.1.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e098e9c493fdf92832223594d9a164f96bdf17ba81a42aff86f85c76768726a" +checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" dependencies = [ "proc-macro2", "quote", @@ -458,9 +456,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bincode" -version = "1.3.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" +checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d" dependencies = [ "byteorder", "serde", @@ -689,9 +687,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.3.4" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -924,6 +922,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.7.0" @@ -1157,7 +1161,7 @@ dependencies = [ "cfg-if 1.0.0", "crossbeam-utils 0.8.3", "lazy_static", - "memoffset 0.6.1", + "memoffset 0.6.3", "scopeguard", ] @@ -1253,9 +1257,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" +checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d" dependencies = [ "quote", "syn", @@ -1326,10 +1330,11 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.11" +version = "0.99.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +checksum = "f82b1b72f1263f214c0f823371768776c4f5841b942c9883aa8e5ec584fd0ba6" dependencies = [ + "convert_case", "proc-macro2", "quote", "syn", @@ -1652,8 +1657,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" dependencies = [ "either", "futures 0.3.13", @@ -1663,13 +1667,13 @@ dependencies = [ "parity-scale-codec 2.0.1", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "byteorder", "rand 0.8.3", @@ -1729,7 +1733,8 @@ dependencies = [ "linregress", "log", "parity-scale-codec 2.0.1", - "paste 1.0.4", + "paste 1.0.5", + "scale-info", "serde", "sp-api", "sp-io", @@ -1768,6 +1773,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1785,6 +1791,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -1794,11 +1801,22 @@ dependencies = [ "sp-version", ] +[[package]] +name = "frame-metadata" +version = "12.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" +dependencies = [ + "cfg-if 1.0.0", + "parity-scale-codec 2.0.1", + "scale-info", +] + [[package]] name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-std", @@ -1809,7 +1827,8 @@ name = "frame-support" version = "3.0.0" dependencies = [ "bitflags", - "frame-metadata", + "frame-metadata 12.0.0", + "frame-metadata 13.0.0", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1817,8 +1836,9 @@ dependencies = [ "once_cell", "parity-scale-codec 2.0.1", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1867,12 +1887,13 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", + "frame-metadata 13.0.0", "frame-support", "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", "rustversion", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -1892,6 +1913,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2050,7 +2072,7 @@ dependencies = [ "lazy_static", "log", "parking_lot 0.9.0", - "pin-project 0.4.27", + "pin-project 0.4.28", "serde", "serde_json", ] @@ -2319,9 +2341,9 @@ checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" [[package]] name = "handlebars" -version = "3.5.3" +version = "3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" +checksum = "580b6f551b29a3a02436318aed09ba1c58eea177dc49e39beac627ad356730a5" dependencies = [ "log", "pest", @@ -2561,7 +2583,7 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project 1.0.5", + "pin-project 1.0.6", "socket2 0.3.19", "tokio 0.2.25", "tower-service", @@ -2649,8 +2671,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "parity-scale-codec 2.0.1", ] @@ -2664,6 +2685,14 @@ dependencies = [ "serde", ] +[[package]] +name = "impl-serde" +version = "0.3.1" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "serde", +] + [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -2785,9 +2814,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cfb73131c35423a367daf8cbd24100af0d077668c8c2943f0e7dd775fef0f65" +checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" dependencies = [ "wasm-bindgen", ] @@ -2917,13 +2946,14 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.2.0-alpha" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "124797a4ea7430d0675db78e065e53316e3f1a3cbf0ee4d6dbdd42db7b08e193" +checksum = "9b15fc3a0ef2e02d770aa1a221d3412443dcaedc43e27d80c957dd5bbd65321b" dependencies = [ "async-trait", "futures 0.3.13", "hyper 0.13.10", + "hyper-rustls", "jsonrpsee-types", "jsonrpsee-utils", "log", @@ -2936,9 +2966,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.2.0-alpha.2" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb3f732ccbeafd15cefb59c7c7b5ac6c553c2653613b63e5e7feb7f06a219e9" +checksum = "6bb4afbda476e2ee11cc6245055c498c116fc8002d2d60fe8338b6ee15d84c3a" dependencies = [ "Inflector", "proc-macro2", @@ -2948,9 +2978,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.2.0-alpha.2" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a8cd20c190e75dc56f7543b9d5713c3186351b301b5507ea6b85d8c403aac78" +checksum = "c42a82588b5f7830e94341bb7e79d15f46070ab6f64dde1e3b3719721b61c5bf" dependencies = [ "async-trait", "futures 0.3.13", @@ -2963,9 +2993,9 @@ dependencies = [ [[package]] name = "jsonrpsee-utils" -version = "0.2.0-alpha" +version = "0.2.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0e45394ec3175a767c3c5bac584560e6ad9b56ebd73216c85ec8bab49619244" +checksum = "e65c77838fce96bc554b4a3a159d0b9a2497319ae9305c66ee853998c7ed2fd3" dependencies = [ "futures 0.3.13", "globset", @@ -3089,9 +3119,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a" [[package]] name = "libc" -version = "0.2.90" +version = "0.2.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4aede83fc3617411dc6993bc8c70919750c1c257c6ca6a502aed6e0e2394ae" +checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7" [[package]] name = "libloading" @@ -3143,7 +3173,7 @@ dependencies = [ "libp2p-yamux", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "smallvec 1.6.1", "wasm-timer", ] @@ -3168,7 +3198,7 @@ dependencies = [ "multistream-select", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "rand 0.7.3", @@ -3287,7 +3317,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3394,7 +3424,7 @@ checksum = "6ce3374f3b28162db9d3442c9347c4f14cb01e8290052615c7d341d40eae0599" dependencies = [ "futures 0.3.13", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "salsa20", "sha3", @@ -3413,7 +3443,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "rand 0.7.3", @@ -3758,9 +3788,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" +checksum = "f83fb6581e8ed1f85fd45c116db8405483899489e38406156c25eb743554361d" dependencies = [ "autocfg", ] @@ -3863,7 +3893,7 @@ checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656" dependencies = [ "log", "mio", - "miow 0.3.6", + "miow 0.3.7", "winapi 0.3.9", ] @@ -3892,11 +3922,10 @@ dependencies = [ [[package]] name = "miow" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" dependencies = [ - "socket2 0.3.19", "winapi 0.3.9", ] @@ -3950,9 +3979,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "multistream-select" @@ -3963,7 +3992,7 @@ dependencies = [ "bytes 1.0.1", "futures 0.3.13", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "smallvec 1.6.1", "unsigned-varint 0.7.0", ] @@ -3997,12 +4026,12 @@ dependencies = [ [[package]] name = "nb-connect" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670361df1bc2399ee1ff50406a0d422587dd3bb0da596e1978fe8e05dabddf4f" +checksum = "a19900e7eee95eb2b3c2e26d12a874cc80aaf750e31be6fcbe743ead369fa45d" dependencies = [ "libc", - "socket2 0.3.19", + "socket2 0.4.0", ] [[package]] @@ -4222,6 +4251,7 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4329,6 +4359,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4408,6 +4439,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4628,6 +4660,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4643,6 +4676,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4661,6 +4695,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4678,6 +4713,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4696,6 +4732,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4722,6 +4759,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4744,6 +4782,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4761,6 +4800,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4780,6 +4820,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4804,11 +4845,12 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parity-wasm 0.41.0", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", + "scale-info", "serde", "sp-core", "sp-io", @@ -4825,6 +4867,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec 2.0.1", + "scale-info", "sp-runtime", "sp-std", ] @@ -4863,6 +4906,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4879,6 +4923,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4901,8 +4946,9 @@ dependencies = [ "pallet-balances", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4924,6 +4970,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4942,6 +4989,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4961,6 +5009,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4977,6 +5026,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4992,6 +5042,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5009,6 +5060,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5035,6 +5087,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5057,6 +5110,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5075,6 +5129,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5093,6 +5148,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5111,6 +5167,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5125,6 +5182,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5144,6 +5202,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5194,6 +5253,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5209,6 +5269,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5224,6 +5285,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5240,6 +5302,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5284,6 +5347,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5299,6 +5363,7 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "safe-mix", + "scale-info", "serde", "sp-core", "sp-io", @@ -5315,6 +5380,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5331,6 +5397,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5347,6 +5414,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5364,6 +5432,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5408,6 +5477,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec 2.0.1", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-core", "sp-io", @@ -5432,8 +5502,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5473,6 +5544,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5488,6 +5560,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5504,6 +5577,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5523,6 +5597,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5539,6 +5614,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5585,6 +5661,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5602,6 +5679,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5620,6 +5698,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5717,7 +5796,7 @@ dependencies = [ "libc", "log", "mio-named-pipes", - "miow 0.3.6", + "miow 0.3.7", "rand 0.7.3", "tokio 0.1.22", "tokio-named-pipes", @@ -5728,13 +5807,12 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5752,6 +5830,16 @@ dependencies = [ "synstructure", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-wasm" version = "0.32.0" @@ -5878,9 +5966,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d65c4d95931acda4498f675e332fcbdc9a06705cd07086c510e9b6009cd1c1" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" [[package]] name = "paste-impl" @@ -5989,27 +6077,27 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15" +checksum = "918192b5c59119d51e0cd221f4d49dde9112824ba717369e903c97d076083d0f" dependencies = [ - "pin-project-internal 0.4.27", + "pin-project-internal 0.4.28", ] [[package]] name = "pin-project" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" +checksum = "bc174859768806e91ae575187ada95c91a29e96a98dc5d2cd9a1fed039501ba6" dependencies = [ - "pin-project-internal 1.0.5", + "pin-project-internal 1.0.6", ] [[package]] name = "pin-project-internal" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" +checksum = "3be26700300be6d9d23264c73211d8190e755b6b5ca7a1b28230025511b52a5e" dependencies = [ "proc-macro2", "quote", @@ -6018,9 +6106,9 @@ dependencies = [ [[package]] name = "pin-project-internal" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" +checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" dependencies = [ "proc-macro2", "quote", @@ -6087,11 +6175,11 @@ dependencies = [ [[package]] name = "polling" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" +checksum = "4fc12d774e799ee9ebae13f4076ca003b40d18a11ac0f3641e6f899618580b7b" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "libc", "log", "wepoll-sys", @@ -6166,13 +6254,14 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde", - "uint", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", + "parity-scale-codec 2.0.1", + "scale-info", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -6278,7 +6367,7 @@ dependencies = [ "prost", "prost-types", "tempfile", - "which 4.0.2", + "which 4.1.0", ] [[package]] @@ -6648,14 +6737,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.4.3" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +checksum = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19" dependencies = [ "aho-corasick", "memchr", "regex-syntax", - "thread_local", ] [[package]] @@ -6670,9 +6758,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.22" +version = "0.6.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" +checksum = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548" [[package]] name = "region" @@ -6853,7 +6941,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" dependencies = [ "futures 0.3.13", - "pin-project 0.4.27", + "pin-project 0.4.28", "static_assertions", ] @@ -7363,7 +7451,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "parity-wasm 0.41.0", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "sc-executor-common", "sc-executor-wasmi", "sc-executor-wasmtime", @@ -7452,7 +7540,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "sc-block-builder", "sc-client-api", @@ -7624,7 +7712,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "quickcheck", @@ -7873,7 +7961,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "parity-util-mem", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "sc-block-builder", "sc-chain-spec", @@ -7967,7 +8055,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8002,7 +8090,7 @@ dependencies = [ "libp2p", "log", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "serde", "serde_json", @@ -8105,6 +8193,28 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec 2.0.1", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8265,9 +8375,9 @@ checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" [[package]] name = "serde" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd761ff957cb2a45fbb9ab3da6512de9de55872866160b23c25f1a841e99d29f" +checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171" dependencies = [ "serde_derive", ] @@ -8284,9 +8394,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1800f7693e94e186f5e25a28291ae1570da908aff7d97a095dec1e56ff99069b" +checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" dependencies = [ "proc-macro2", "quote", @@ -8383,9 +8493,9 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "signal-hook" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" +checksum = "6aa894ef3fade0ee7243422f4fbbd6c2b48e6de767e621d37ef65f2310f53cea" dependencies = [ "libc", "signal-hook-registry", @@ -8559,6 +8669,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -8587,6 +8698,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8609,6 +8721,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8692,6 +8805,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8708,6 +8822,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8738,6 +8853,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8768,7 +8884,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "libsecp256k1", "log", @@ -8782,6 +8898,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8836,6 +8953,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8986,8 +9104,9 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9099,6 +9218,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9147,7 +9267,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 2.0.1", "ref-cast", "serde", @@ -9252,7 +9372,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 2.0.1", "serde", "sp-runtime", @@ -9528,6 +9648,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9646,9 +9767,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.62" +version = "1.0.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "123a78a3596b24fee53a6464ce52d8ecbf62241e6294c7e7fe12086cd161f512" +checksum = "6498a9efc342871f91cc2d0d694c674368b4ceb40f62b65a7a08c3792935e702" dependencies = [ "proc-macro2", "quote", @@ -10205,9 +10326,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.13" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a9bd1db7706f2373a190b0d067146caa39350c486f3d455b0e33b431f94c07" +checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" dependencies = [ "proc-macro2", "quote", @@ -10229,7 +10350,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.5", + "pin-project 1.0.6", "tracing", ] @@ -10256,9 +10377,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ab8966ac3ca27126141f7999361cc97dd6fb4b71da04c02044fa9045d98bb96" +checksum = "705096c6f83bf68ea5d357a6aa01829ddbdac531b357b45abeca842938085baa" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10428,9 +10549,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" +checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06" [[package]] name = "ucd-trie" @@ -10450,6 +10571,17 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unicase" version = "2.6.0" @@ -10581,9 +10713,9 @@ checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" [[package]] name = "vec-arena" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d" +checksum = "34b2f665b594b07095e3ac3f718e13c2197143416fae4c5706cffb7b1af8d7f1" [[package]] name = "vec_map" @@ -10593,9 +10725,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "void" @@ -10620,9 +10752,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi 0.3.9", @@ -10691,9 +10823,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3de431a2910c86679c34283a33f66f4e4abd7e0aec27b6669060148872aadf94" +checksum = "8e67a5806118af01f0d9045915676b22aaebecf4178ae7021bc171dab0b897ab" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10732,9 +10864,9 @@ checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" [[package]] name = "wasm-bindgen-test" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" +checksum = "2ea9e4f0050d5498a160e6b9d278a9699598e445b51dacd05598da55114c801a" dependencies = [ "console_error_panic_hook", "js-sys", @@ -10746,9 +10878,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" +checksum = "43f40402f495d92df6cdd0d329e7cc2580c8f99bcd74faff0e468923a764b7d4" dependencies = [ "proc-macro2", "quote", @@ -10988,7 +11120,7 @@ dependencies = [ "lazy_static", "libc", "log", - "memoffset 0.6.1", + "memoffset 0.6.3", "more-asserts", "psm", "region", @@ -10999,9 +11131,9 @@ dependencies = [ [[package]] name = "wast" -version = "35.0.0" +version = "35.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db5ae96da18bb5926341516fd409b5a8ce4e4714da7f0a1063d3b20ac9f9a1e1" +checksum = "1a5800e9f86a1eae935e38bea11e60fd253f6d514d153fb39b3e5535a7b37b56" dependencies = [ "leb128", ] @@ -11017,9 +11149,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c40dc691fc48003eba817c38da7113c15698142da971298003cac3ef175680b3" +checksum = "ec600b26223b2948cedfde2a0aa6756dcf1fef616f43d7b3097aaf53a6c4d92b" dependencies = [ "js-sys", "wasm-bindgen", @@ -11064,12 +11196,12 @@ dependencies = [ [[package]] name = "which" -version = "4.0.2" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c14ef7e1b8b8ecfc75d5eca37949410046e66f15d185c01d70824f1f8111ef" +checksum = "b55551e42cbdf2ce2bedd2203d0cc08dba002c27510f86dab6d0ce304cba3dfe" dependencies = [ + "either", "libc", - "thiserror", ] [[package]] From 603e2cc3b8d4f94010715cc868d7833f971092b6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 29 Mar 2021 16:26:56 +0100 Subject: [PATCH 111/503] Copy Cargo.lock from master --- Cargo.lock | 404 ++++++++++++++++++----------------------------------- 1 file changed, 136 insertions(+), 268 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ec8e280992546..e1a18f3a8120b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,5 +1,7 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. +version = 3 + [[package]] name = "Inflector" version = "0.11.4" @@ -114,9 +116,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.40" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b" +checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" [[package]] name = "approx" @@ -349,9 +351,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.48" +version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" +checksum = "7e098e9c493fdf92832223594d9a164f96bdf17ba81a42aff86f85c76768726a" dependencies = [ "proc-macro2", "quote", @@ -456,9 +458,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bincode" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d" +checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" dependencies = [ "byteorder", "serde", @@ -687,9 +689,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "bytes" @@ -922,12 +924,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - [[package]] name = "core-foundation" version = "0.7.0" @@ -1161,7 +1157,7 @@ dependencies = [ "cfg-if 1.0.0", "crossbeam-utils 0.8.3", "lazy_static", - "memoffset 0.6.3", + "memoffset 0.6.1", "scopeguard", ] @@ -1257,9 +1253,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.20" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d" +checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" dependencies = [ "quote", "syn", @@ -1330,11 +1326,10 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.13" +version = "0.99.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f82b1b72f1263f214c0f823371768776c4f5841b942c9883aa8e5ec584fd0ba6" +checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" dependencies = [ - "convert_case", "proc-macro2", "quote", "syn", @@ -1657,7 +1652,8 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" dependencies = [ "either", "futures 0.3.13", @@ -1667,13 +1663,13 @@ dependencies = [ "parity-scale-codec 2.0.1", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" dependencies = [ "byteorder", "rand 0.8.3", @@ -1733,8 +1729,7 @@ dependencies = [ "linregress", "log", "parity-scale-codec 2.0.1", - "paste 1.0.5", - "scale-info", + "paste 1.0.4", "serde", "sp-api", "sp-io", @@ -1773,7 +1768,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", - "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1791,7 +1785,6 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -1801,22 +1794,11 @@ dependencies = [ "sp-version", ] -[[package]] -name = "frame-metadata" -version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" -dependencies = [ - "cfg-if 1.0.0", - "parity-scale-codec 2.0.1", - "scale-info", -] - [[package]] name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-std", @@ -1827,8 +1809,7 @@ name = "frame-support" version = "3.0.0" dependencies = [ "bitflags", - "frame-metadata 12.0.0", - "frame-metadata 13.0.0", + "frame-metadata", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1836,9 +1817,8 @@ dependencies = [ "once_cell", "parity-scale-codec 2.0.1", "parity-util-mem", - "paste 1.0.5", + "paste 1.0.4", "pretty_assertions", - "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1887,13 +1867,12 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata 13.0.0", + "frame-metadata", "frame-support", "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", "rustversion", - "scale-info", "serde", "sp-core", "sp-inherents", @@ -1913,7 +1892,6 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-externalities", @@ -2072,7 +2050,7 @@ dependencies = [ "lazy_static", "log", "parking_lot 0.9.0", - "pin-project 0.4.28", + "pin-project 0.4.27", "serde", "serde_json", ] @@ -2341,9 +2319,9 @@ checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" [[package]] name = "handlebars" -version = "3.5.4" +version = "3.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "580b6f551b29a3a02436318aed09ba1c58eea177dc49e39beac627ad356730a5" +checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" dependencies = [ "log", "pest", @@ -2583,7 +2561,7 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project 1.0.6", + "pin-project 1.0.5", "socket2 0.3.19", "tokio 0.2.25", "tower-service", @@ -2671,7 +2649,8 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" dependencies = [ "parity-scale-codec 2.0.1", ] @@ -2685,14 +2664,6 @@ dependencies = [ "serde", ] -[[package]] -name = "impl-serde" -version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" -dependencies = [ - "serde", -] - [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -2814,9 +2785,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.48" +version = "0.3.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" +checksum = "5cfb73131c35423a367daf8cbd24100af0d077668c8c2943f0e7dd775fef0f65" dependencies = [ "wasm-bindgen", ] @@ -2946,14 +2917,13 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b15fc3a0ef2e02d770aa1a221d3412443dcaedc43e27d80c957dd5bbd65321b" +checksum = "124797a4ea7430d0675db78e065e53316e3f1a3cbf0ee4d6dbdd42db7b08e193" dependencies = [ "async-trait", "futures 0.3.13", "hyper 0.13.10", - "hyper-rustls", "jsonrpsee-types", "jsonrpsee-utils", "log", @@ -2966,9 +2936,9 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb4afbda476e2ee11cc6245055c498c116fc8002d2d60fe8338b6ee15d84c3a" +checksum = "3cb3f732ccbeafd15cefb59c7c7b5ac6c553c2653613b63e5e7feb7f06a219e9" dependencies = [ "Inflector", "proc-macro2", @@ -2978,9 +2948,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42a82588b5f7830e94341bb7e79d15f46070ab6f64dde1e3b3719721b61c5bf" +checksum = "5a8cd20c190e75dc56f7543b9d5713c3186351b301b5507ea6b85d8c403aac78" dependencies = [ "async-trait", "futures 0.3.13", @@ -2993,9 +2963,9 @@ dependencies = [ [[package]] name = "jsonrpsee-utils" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e65c77838fce96bc554b4a3a159d0b9a2497319ae9305c66ee853998c7ed2fd3" +checksum = "c0e45394ec3175a767c3c5bac584560e6ad9b56ebd73216c85ec8bab49619244" dependencies = [ "futures 0.3.13", "globset", @@ -3119,9 +3089,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a" [[package]] name = "libc" -version = "0.2.91" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7" +checksum = "ba4aede83fc3617411dc6993bc8c70919750c1c257c6ca6a502aed6e0e2394ae" [[package]] name = "libloading" @@ -3173,7 +3143,7 @@ dependencies = [ "libp2p-yamux", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "smallvec 1.6.1", "wasm-timer", ] @@ -3198,7 +3168,7 @@ dependencies = [ "multistream-select", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "prost", "prost-build", "rand 0.7.3", @@ -3317,7 +3287,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uint", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3424,7 +3394,7 @@ checksum = "6ce3374f3b28162db9d3442c9347c4f14cb01e8290052615c7d341d40eae0599" dependencies = [ "futures 0.3.13", "log", - "pin-project 1.0.6", + "pin-project 1.0.5", "rand 0.7.3", "salsa20", "sha3", @@ -3443,7 +3413,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "pin-project 1.0.6", + "pin-project 1.0.5", "prost", "prost-build", "rand 0.7.3", @@ -3788,9 +3758,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.6.3" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83fb6581e8ed1f85fd45c116db8405483899489e38406156c25eb743554361d" +checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" dependencies = [ "autocfg", ] @@ -3893,7 +3863,7 @@ checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656" dependencies = [ "log", "mio", - "miow 0.3.7", + "miow 0.3.6", "winapi 0.3.9", ] @@ -3922,10 +3892,11 @@ dependencies = [ [[package]] name = "miow" -version = "0.3.7" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" dependencies = [ + "socket2 0.3.19", "winapi 0.3.9", ] @@ -3979,9 +3950,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.3" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" [[package]] name = "multistream-select" @@ -3992,7 +3963,7 @@ dependencies = [ "bytes 1.0.1", "futures 0.3.13", "log", - "pin-project 1.0.6", + "pin-project 1.0.5", "smallvec 1.6.1", "unsigned-varint 0.7.0", ] @@ -4026,12 +3997,12 @@ dependencies = [ [[package]] name = "nb-connect" -version = "1.1.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19900e7eee95eb2b3c2e26d12a874cc80aaf750e31be6fcbe743ead369fa45d" +checksum = "670361df1bc2399ee1ff50406a0d422587dd3bb0da596e1978fe8e05dabddf4f" dependencies = [ "libc", - "socket2 0.4.0", + "socket2 0.3.19", ] [[package]] @@ -4251,7 +4222,6 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", - "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4359,7 +4329,6 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4439,7 +4408,6 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4660,7 +4628,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4676,7 +4643,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4695,7 +4661,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4713,7 +4678,6 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4732,7 +4696,6 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-authorship", "sp-core", @@ -4759,7 +4722,6 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4782,7 +4744,6 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4800,7 +4761,6 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4820,7 +4780,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4845,12 +4804,11 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parity-wasm 0.41.0", - "paste 1.0.5", + "paste 1.0.4", "pretty_assertions", "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", - "scale-info", "serde", "sp-core", "sp-io", @@ -4867,7 +4825,6 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec 2.0.1", - "scale-info", "sp-runtime", "sp-std", ] @@ -4906,7 +4863,6 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec 2.0.1", - "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4923,7 +4879,6 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4946,9 +4901,8 @@ dependencies = [ "pallet-balances", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "paste 1.0.5", + "paste 1.0.4", "rand 0.7.3", - "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4970,7 +4924,6 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -4989,7 +4942,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5009,7 +4961,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5026,7 +4977,6 @@ dependencies = [ "lite-json", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5042,7 +4992,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5060,7 +5009,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5087,7 +5035,6 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5110,7 +5057,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5129,7 +5075,6 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5148,7 +5093,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5167,7 +5111,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5182,7 +5125,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5202,7 +5144,6 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5253,7 +5194,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5269,7 +5209,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5285,7 +5224,6 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5302,7 +5240,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5347,7 +5284,6 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5363,7 +5299,6 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "safe-mix", - "scale-info", "serde", "sp-core", "sp-io", @@ -5380,7 +5315,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5397,7 +5331,6 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5414,7 +5347,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5432,7 +5364,6 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5477,7 +5408,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec 2.0.1", "rand_chacha 0.2.2", - "scale-info", "serde", "sp-core", "sp-io", @@ -5502,9 +5432,8 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "paste 1.0.5", + "paste 1.0.4", "rand_chacha 0.2.2", - "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5544,7 +5473,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5560,7 +5488,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5577,7 +5504,6 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-inherents", @@ -5597,7 +5523,6 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5614,7 +5539,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5661,7 +5585,6 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5679,7 +5602,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5698,7 +5620,6 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -5796,7 +5717,7 @@ dependencies = [ "libc", "log", "mio-named-pipes", - "miow 0.3.7", + "miow 0.3.6", "rand 0.7.3", "tokio 0.1.22", "tokio-named-pipes", @@ -5807,12 +5728,13 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-util-mem-derive", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5830,16 +5752,6 @@ dependencies = [ "synstructure", ] -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" -dependencies = [ - "proc-macro2", - "syn", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.32.0" @@ -5966,9 +5878,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.5" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" +checksum = "c5d65c4d95931acda4498f675e332fcbdc9a06705cd07086c510e9b6009cd1c1" [[package]] name = "paste-impl" @@ -6077,27 +5989,27 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.28" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "918192b5c59119d51e0cd221f4d49dde9112824ba717369e903c97d076083d0f" +checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15" dependencies = [ - "pin-project-internal 0.4.28", + "pin-project-internal 0.4.27", ] [[package]] name = "pin-project" -version = "1.0.6" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc174859768806e91ae575187ada95c91a29e96a98dc5d2cd9a1fed039501ba6" +checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" dependencies = [ - "pin-project-internal 1.0.6", + "pin-project-internal 1.0.5", ] [[package]] name = "pin-project-internal" -version = "0.4.28" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be26700300be6d9d23264c73211d8190e755b6b5ca7a1b28230025511b52a5e" +checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" dependencies = [ "proc-macro2", "quote", @@ -6106,9 +6018,9 @@ dependencies = [ [[package]] name = "pin-project-internal" -version = "1.0.6" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" +checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" dependencies = [ "proc-macro2", "quote", @@ -6175,11 +6087,11 @@ dependencies = [ [[package]] name = "polling" -version = "2.0.3" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc12d774e799ee9ebae13f4076ca003b40d18a11ac0f3641e6f899618580b7b" +checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" dependencies = [ - "cfg-if 1.0.0", + "cfg-if 0.1.10", "libc", "log", "wepoll-sys", @@ -6254,14 +6166,13 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", - "parity-scale-codec 2.0.1", - "scale-info", - "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", + "impl-serde", + "uint", ] [[package]] @@ -6367,7 +6278,7 @@ dependencies = [ "prost", "prost-types", "tempfile", - "which 4.1.0", + "which 4.0.2", ] [[package]] @@ -6737,13 +6648,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.4.5" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19" +checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" dependencies = [ "aho-corasick", "memchr", "regex-syntax", + "thread_local", ] [[package]] @@ -6758,9 +6670,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.23" +version = "0.6.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548" +checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" [[package]] name = "region" @@ -6941,7 +6853,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" dependencies = [ "futures 0.3.13", - "pin-project 0.4.28", + "pin-project 0.4.27", "static_assertions", ] @@ -7451,7 +7363,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "parity-wasm 0.41.0", "parking_lot 0.11.1", - "paste 1.0.5", + "paste 1.0.4", "sc-executor-common", "sc-executor-wasmi", "sc-executor-wasmtime", @@ -7540,7 +7452,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "rand 0.7.3", "sc-block-builder", "sc-client-api", @@ -7712,7 +7624,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "prost", "prost-build", "quickcheck", @@ -7961,7 +7873,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "parity-util-mem", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "rand 0.7.3", "sc-block-builder", "sc-chain-spec", @@ -8055,7 +7967,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parity-util-mem", - "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-util-mem-derive", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8090,7 +8002,7 @@ dependencies = [ "libp2p", "log", "parking_lot 0.11.1", - "pin-project 1.0.6", + "pin-project 1.0.5", "rand 0.7.3", "serde", "serde_json", @@ -8193,28 +8105,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec 2.0.1", - "scale-info-derive", -] - -[[package]] -name = "scale-info-derive" -version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" -dependencies = [ - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "schannel" version = "0.1.19" @@ -8375,9 +8265,9 @@ checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" [[package]] name = "serde" -version = "1.0.125" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171" +checksum = "bd761ff957cb2a45fbb9ab3da6512de9de55872866160b23c25f1a841e99d29f" dependencies = [ "serde_derive", ] @@ -8394,9 +8284,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.125" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" +checksum = "1800f7693e94e186f5e25a28291ae1570da908aff7d97a095dec1e56ff99069b" dependencies = [ "proc-macro2", "quote", @@ -8493,9 +8383,9 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "signal-hook" -version = "0.3.7" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aa894ef3fade0ee7243422f4fbbd6c2b48e6de767e621d37ef65f2310f53cea" +checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" dependencies = [ "libc", "signal-hook-registry", @@ -8669,7 +8559,6 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-core", "sp-io", @@ -8698,7 +8587,6 @@ dependencies = [ "parity-scale-codec 2.0.1", "primitive-types", "rand 0.7.3", - "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8721,7 +8609,6 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8805,7 +8692,6 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8822,7 +8708,6 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8853,7 +8738,6 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8884,7 +8768,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "lazy_static", "libsecp256k1", "log", @@ -8898,7 +8782,6 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", "schnorrkel", "secrecy", "serde", @@ -8953,7 +8836,6 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec 2.0.1", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9104,9 +8986,8 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parity-util-mem", - "paste 1.0.5", + "paste 1.0.4", "rand 0.7.3", - "scale-info", "serde", "serde_json", "sp-api", @@ -9218,7 +9099,6 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", - "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9267,7 +9147,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec 2.0.1", "ref-cast", "serde", @@ -9372,7 +9252,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec 2.0.1", "serde", "sp-runtime", @@ -9648,7 +9528,6 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9767,9 +9646,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.67" +version = "1.0.62" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6498a9efc342871f91cc2d0d694c674368b4ceb40f62b65a7a08c3792935e702" +checksum = "123a78a3596b24fee53a6464ce52d8ecbf62241e6294c7e7fe12086cd161f512" dependencies = [ "proc-macro2", "quote", @@ -10326,9 +10205,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.15" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" +checksum = "a8a9bd1db7706f2373a190b0d067146caa39350c486f3d455b0e33b431f94c07" dependencies = [ "proc-macro2", "quote", @@ -10350,7 +10229,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.6", + "pin-project 1.0.5", "tracing", ] @@ -10377,9 +10256,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.17" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705096c6f83bf68ea5d357a6aa01829ddbdac531b357b45abeca842938085baa" +checksum = "8ab8966ac3ca27126141f7999361cc97dd6fb4b71da04c02044fa9045d98bb96" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10549,9 +10428,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.13.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06" +checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" [[package]] name = "ucd-trie" @@ -10571,17 +10450,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "uint" -version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - [[package]] name = "unicase" version = "2.6.0" @@ -10713,9 +10581,9 @@ checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" [[package]] name = "vec-arena" -version = "1.1.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b2f665b594b07095e3ac3f718e13c2197143416fae4c5706cffb7b1af8d7f1" +checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d" [[package]] name = "vec_map" @@ -10725,9 +10593,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.3" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" +checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" [[package]] name = "void" @@ -10752,9 +10620,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" dependencies = [ "same-file", "winapi 0.3.9", @@ -10823,9 +10691,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.21" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e67a5806118af01f0d9045915676b22aaebecf4178ae7021bc171dab0b897ab" +checksum = "3de431a2910c86679c34283a33f66f4e4abd7e0aec27b6669060148872aadf94" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10864,9 +10732,9 @@ checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" [[package]] name = "wasm-bindgen-test" -version = "0.3.21" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ea9e4f0050d5498a160e6b9d278a9699598e445b51dacd05598da55114c801a" +checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" dependencies = [ "console_error_panic_hook", "js-sys", @@ -10878,9 +10746,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.21" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f40402f495d92df6cdd0d329e7cc2580c8f99bcd74faff0e468923a764b7d4" +checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" dependencies = [ "proc-macro2", "quote", @@ -11120,7 +10988,7 @@ dependencies = [ "lazy_static", "libc", "log", - "memoffset 0.6.3", + "memoffset 0.6.1", "more-asserts", "psm", "region", @@ -11131,9 +10999,9 @@ dependencies = [ [[package]] name = "wast" -version = "35.0.1" +version = "35.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a5800e9f86a1eae935e38bea11e60fd253f6d514d153fb39b3e5535a7b37b56" +checksum = "db5ae96da18bb5926341516fd409b5a8ce4e4714da7f0a1063d3b20ac9f9a1e1" dependencies = [ "leb128", ] @@ -11149,9 +11017,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.48" +version = "0.3.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec600b26223b2948cedfde2a0aa6756dcf1fef616f43d7b3097aaf53a6c4d92b" +checksum = "c40dc691fc48003eba817c38da7113c15698142da971298003cac3ef175680b3" dependencies = [ "js-sys", "wasm-bindgen", @@ -11196,12 +11064,12 @@ dependencies = [ [[package]] name = "which" -version = "4.1.0" +version = "4.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b55551e42cbdf2ce2bedd2203d0cc08dba002c27510f86dab6d0ce304cba3dfe" +checksum = "87c14ef7e1b8b8ecfc75d5eca37949410046e66f15d185c01d70824f1f8111ef" dependencies = [ - "either", "libc", + "thiserror", ] [[package]] From ab1bb0344501b01a523e9af28af4347f4dea420c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 29 Mar 2021 16:30:00 +0100 Subject: [PATCH 112/503] cargo update -p wasmtime, to fix byteorder resolver --- Cargo.lock | 176 +++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 152 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1a18f3a8120b..459118b545eb6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -458,9 +458,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bincode" -version = "1.3.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" +checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d" dependencies = [ "byteorder", "serde", @@ -689,9 +689,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.3.4" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -1652,8 +1652,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" dependencies = [ "either", "futures 0.3.13", @@ -1663,13 +1662,13 @@ dependencies = [ "parity-scale-codec 2.0.1", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "byteorder", "rand 0.8.3", @@ -1730,6 +1729,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "paste 1.0.4", + "scale-info", "serde", "sp-api", "sp-io", @@ -1768,6 +1768,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1785,6 +1786,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -1794,11 +1796,22 @@ dependencies = [ "sp-version", ] +[[package]] +name = "frame-metadata" +version = "12.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" +dependencies = [ + "cfg-if 1.0.0", + "parity-scale-codec 2.0.1", + "scale-info", +] + [[package]] name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-std", @@ -1809,7 +1822,8 @@ name = "frame-support" version = "3.0.0" dependencies = [ "bitflags", - "frame-metadata", + "frame-metadata 12.0.0", + "frame-metadata 13.0.0", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1819,6 +1833,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1867,12 +1882,13 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", + "frame-metadata 13.0.0", "frame-support", "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", "rustversion", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -1892,6 +1908,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2649,8 +2666,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "parity-scale-codec 2.0.1", ] @@ -2664,6 +2680,14 @@ dependencies = [ "serde", ] +[[package]] +name = "impl-serde" +version = "0.3.1" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "serde", +] + [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -3287,7 +3311,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -4222,6 +4246,7 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "pretty_assertions", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4329,6 +4354,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4408,6 +4434,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4628,6 +4655,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4643,6 +4671,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4661,6 +4690,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec 2.0.1", "parking_lot 0.11.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4678,6 +4708,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4696,6 +4727,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4722,6 +4754,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4744,6 +4777,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4761,6 +4795,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4780,6 +4815,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4809,6 +4845,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", + "scale-info", "serde", "sp-core", "sp-io", @@ -4825,6 +4862,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec 2.0.1", + "scale-info", "sp-runtime", "sp-std", ] @@ -4863,6 +4901,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4879,6 +4918,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4903,6 +4943,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4924,6 +4965,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4942,6 +4984,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4961,6 +5004,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4977,6 +5021,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -4992,6 +5037,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5009,6 +5055,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5035,6 +5082,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5057,6 +5105,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5075,6 +5124,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5093,6 +5143,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5111,6 +5162,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5125,6 +5177,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5144,6 +5197,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5194,6 +5248,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5209,6 +5264,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5224,6 +5280,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5240,6 +5297,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5284,6 +5342,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5299,6 +5358,7 @@ dependencies = [ "frame-system", "parity-scale-codec 2.0.1", "safe-mix", + "scale-info", "serde", "sp-core", "sp-io", @@ -5315,6 +5375,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5331,6 +5392,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5347,6 +5409,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5364,6 +5427,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5408,6 +5472,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec 2.0.1", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-core", "sp-io", @@ -5434,6 +5499,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5473,6 +5539,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5488,6 +5555,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5504,6 +5572,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5523,6 +5592,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5539,6 +5609,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5585,6 +5656,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5602,6 +5674,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5620,6 +5693,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -5728,13 +5802,12 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5752,6 +5825,16 @@ dependencies = [ "synstructure", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-wasm" version = "0.32.0" @@ -6166,13 +6249,14 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde", - "uint", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", + "parity-scale-codec 2.0.1", + "scale-info", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -7967,7 +8051,7 @@ dependencies = [ "log", "parity-scale-codec 2.0.1", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8105,6 +8189,28 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec 2.0.1", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#c51a1d584092f27cd479b059e9de665e5da8c40e" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8559,6 +8665,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-core", "sp-io", @@ -8587,6 +8694,7 @@ dependencies = [ "parity-scale-codec 2.0.1", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8609,6 +8717,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8692,6 +8801,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8708,6 +8818,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8738,6 +8849,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8768,7 +8880,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "libsecp256k1", "log", @@ -8782,6 +8894,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8836,6 +8949,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec 2.0.1", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8988,6 +9102,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9099,6 +9214,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec 2.0.1", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9147,7 +9263,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 2.0.1", "ref-cast", "serde", @@ -9252,7 +9368,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec 2.0.1", "serde", "sp-runtime", @@ -9528,6 +9644,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -10450,6 +10567,17 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unicase" version = "2.6.0" From 53b2914e606346f4067be3db779ca88f9a6b0452 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 29 Mar 2021 16:49:05 +0100 Subject: [PATCH 113/503] Add some more TypeInfo bounds --- frame/support/src/traits/tokens/misc.rs | 6 +++--- primitives/runtime/src/lib.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/src/traits/tokens/misc.rs b/frame/support/src/traits/tokens/misc.rs index 02f7ba384bd00..479506df4575b 100644 --- a/frame/support/src/traits/tokens/misc.rs +++ b/frame/support/src/traits/tokens/misc.rs @@ -115,7 +115,7 @@ pub enum ExistenceRequirement { } /// Status of funds. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum BalanceStatus { /// Funds are free, as corresponding to `free` item in Balances. Free, @@ -164,5 +164,5 @@ pub trait AssetId: FullCodec + Copy + Default + Eq + PartialEq {} impl AssetId for T {} /// Simple amalgamation trait to collect together properties for a Balance under one roof. -pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default {} -impl Balance for T {} +pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default + scale_info::TypeInfo {} +impl Balance for T {} diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 7df523cd62f69..c3c0b305d83e0 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -537,7 +537,7 @@ impl From for DispatchError { } /// Description of what went wrong when trying to complete an operation on a token. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum TokenError { /// Funds are unavailable. From 73822c61273cb5e65d426fc11ee08c3e495d8d70 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 29 Mar 2021 17:32:48 +0100 Subject: [PATCH 114/503] Derive type infos now with scale-info const generics --- primitives/core/src/ecdsa.rs | 23 ++--------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 7f9492e9e7ecb..19a1f497bb8e2 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -52,19 +52,9 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"ecds"); type Seed = [u8; 32]; /// The ECDSA compressed public key. -#[derive(Clone, Encode, Decode, PassByInner)] +#[derive(Clone, Encode, Decode, PassByInner, scale_info::TypeInfo)] pub struct Public(pub [u8; 33]); -// todo: remove this once https://github.com/paritytech/scale-info/pull/54 is merged, which -// introduces const generics for arrays and should support a 33 element array. -impl scale_info::TypeInfo for Public { - type Identity = Self; - - fn type_info() -> scale_info::Type { - scale_info::TypeDefArray::new(33, scale_info::MetaType::new::()).into() - } -} - impl PartialOrd for Public { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) @@ -237,18 +227,9 @@ impl sp_std::hash::Hash for Public { } /// A signature (a 512-bit value, plus 8 bits for recovery ID). -#[derive(Encode, Decode, PassByInner)] +#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] pub struct Signature(pub [u8; 65]); -// todo: remove this once https://github.com/paritytech/scale-info/pull/54 is merged, which -// introduces const generics for arrays and should support a 65 element array. -impl scale_info::TypeInfo for Signature { - type Identity = Self; - - fn type_info() -> scale_info::Type { - scale_info::TypeDefArray::new(65, scale_info::MetaType::new::()).into() - } -} impl sp_std::convert::TryFrom<&[u8]> for Signature { type Error = (); From 3571c18767890762d7e61fde9a1ce36947874f01 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 30 Mar 2021 12:57:31 +0100 Subject: [PATCH 115/503] Add bounds TypeInfo to legacy RawEvent impl generics expansion --- frame/support/src/event.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 27ce4ac872214..35767aaa63cb6 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -299,7 +299,10 @@ macro_rules! __decl_generic_event { impl<$( $generic_param ),* $(, $instance)? > From> for () { fn from(_: RawEvent<$( $generic_param ),* $(, $instance)?>) -> () { () } } - impl<$( $generic_param ),* $(, $instance)?> RawEvent<$( $generic_param ),* $(, $instance)?> { + impl<$( $generic_param ),* $(, $instance)?> RawEvent<$( $generic_param ),* $(, $instance)?> + where + $( $generic_param: $crate::scale_info::TypeInfo + 'static ),* + { #[allow(dead_code)] #[doc(hidden)] pub fn metadata() -> &'static [$crate::event::EventMetadata] { From 5bf59961adbb616b880bed4b2af68c4a7833adad Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 30 Mar 2021 13:02:40 +0100 Subject: [PATCH 116/503] Add some compact Type bounds --- frame/indices/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 19697f2d941bb..b2d46deb8a39f 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -335,7 +335,10 @@ impl Pallet { } } -impl StaticLookup for Pallet { +impl StaticLookup for Pallet +where + ::Type: scale_info::TypeInfo // todo: [AJ] this is a result of the derived compact TypeInfo impl, can we get rid of it? +{ type Source = MultiAddress; type Target = T::AccountId; From a88f1f17550076f96f649faf1ea2117a72391ac3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 30 Mar 2021 13:08:14 +0100 Subject: [PATCH 117/503] Derive TypeInfo for types --- frame/assets/src/types.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/frame/assets/src/types.rs b/frame/assets/src/types.rs index 7e0e235b1b7e6..e23c59aefd6b7 100644 --- a/frame/assets/src/types.rs +++ b/frame/assets/src/types.rs @@ -21,7 +21,7 @@ use super::*; pub(super) type DepositBalanceOf = <::Currency as Currency<::AccountId>>::Balance; -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct AssetDetails< Balance, AccountId, @@ -65,7 +65,7 @@ impl AssetDetails { /// The owner of the funds that are being approved. pub(super) owner: AccountId, @@ -74,7 +74,7 @@ pub struct ApprovalKey { } /// Data concerning an approval. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct Approval { /// The amount of funds approved for the balance transfer from the owner to some delegated /// target. @@ -83,7 +83,7 @@ pub struct Approval { pub(super) deposit: DepositBalance, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct AssetBalance { /// The balance. pub(super) balance: Balance, @@ -95,7 +95,7 @@ pub struct AssetBalance { pub(super) extra: Extra, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct AssetMetadata { /// The balance deposited for this metadata. /// @@ -112,7 +112,7 @@ pub struct AssetMetadata { } /// Witness data for the destroy transactions. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct DestroyWitness { /// The number of accounts holding the asset. #[codec(compact)] From 078a7a786c62a2e395c954b624b09f51d35864d5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 30 Mar 2021 13:27:48 +0100 Subject: [PATCH 118/503] Temp custom TypeInfo impl for NposCompactSolution16 --- bin/node/runtime/src/lib.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 66db76c5be15c..1c4bd8c84bfcb 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -526,6 +526,17 @@ sp_npos_elections::generate_solution_type!( >(16) ); +// todo [AJ] probably need to generate custom TypeInfo in generate_solution_type!, see polkadot.js +impl scale_info::TypeInfo for NposCompactSolution16 { + type Identity = (); + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("NposCompactSolution16", module_path!())) + .composite(scale_info::build::Fields::unit()) + } +} + impl pallet_election_provider_multi_phase::Config for Runtime { type Event = Event; type Currency = Balances; From 9ce874449b59321c9bcb626ef9870fe96408a22a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Apr 2021 16:07:28 +0100 Subject: [PATCH 119/503] cargo generate-lockfile --- Cargo.lock | 423 +++++++++++++++++++++++++++++++++++------------------ 1 file changed, 277 insertions(+), 146 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1824e956ced36..68c55ac14edd2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,7 +1,5 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - [[package]] name = "Inflector" version = "0.11.4" @@ -116,9 +114,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" +checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b" [[package]] name = "approx" @@ -357,9 +355,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.47" +version = "0.1.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e098e9c493fdf92832223594d9a164f96bdf17ba81a42aff86f85c76768726a" +checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" dependencies = [ "proc-macro2", "quote", @@ -464,9 +462,9 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" [[package]] name = "bincode" -version = "1.3.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" +checksum = "f30d3a39baa26f9651f17b375061f3233dde33424a8b72b0dbe93a68a0bc896d" dependencies = [ "byteorder", "serde", @@ -679,9 +677,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.3.4" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -914,6 +912,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.7.0" @@ -1149,7 +1153,7 @@ dependencies = [ "cfg-if 1.0.0", "crossbeam-utils 0.8.3", "lazy_static", - "memoffset 0.6.1", + "memoffset 0.6.3", "scopeguard", ] @@ -1245,9 +1249,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" +checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d" dependencies = [ "quote", "syn", @@ -1318,10 +1322,11 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.11" +version = "0.99.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +checksum = "f82b1b72f1263f214c0f823371768776c4f5841b942c9883aa8e5ec584fd0ba6" dependencies = [ + "convert_case", "proc-macro2", "quote", "syn", @@ -1644,8 +1649,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" dependencies = [ "either", "futures 0.3.13", @@ -1655,13 +1659,13 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "byteorder", "rand 0.8.3", @@ -1721,7 +1725,8 @@ dependencies = [ "linregress", "log", "parity-scale-codec", - "paste 1.0.4", + "paste 1.0.5", + "scale-info", "serde", "sp-api", "sp-io", @@ -1760,6 +1765,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1777,6 +1783,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -1786,11 +1793,22 @@ dependencies = [ "sp-version", ] +[[package]] +name = "frame-metadata" +version = "12.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" +dependencies = [ + "cfg-if 1.0.0", + "parity-scale-codec", + "scale-info", +] + [[package]] name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-std", @@ -1801,7 +1819,8 @@ name = "frame-support" version = "3.0.0" dependencies = [ "bitflags", - "frame-metadata", + "frame-metadata 12.0.0", + "frame-metadata 13.0.0", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1809,8 +1828,9 @@ dependencies = [ "once_cell", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1859,12 +1879,13 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", + "frame-metadata 13.0.0", "frame-support", "frame-system", "parity-scale-codec", "pretty_assertions", "rustversion", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -1884,6 +1905,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2042,7 +2064,7 @@ dependencies = [ "lazy_static", "log", "parking_lot 0.9.0", - "pin-project 0.4.27", + "pin-project 0.4.28", "serde", "serde_json", ] @@ -2311,9 +2333,9 @@ checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" [[package]] name = "handlebars" -version = "3.5.3" +version = "3.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" +checksum = "580b6f551b29a3a02436318aed09ba1c58eea177dc49e39beac627ad356730a5" dependencies = [ "log", "pest", @@ -2553,7 +2575,7 @@ dependencies = [ "httparse", "httpdate", "itoa", - "pin-project 1.0.5", + "pin-project 1.0.6", "socket2 0.3.19", "tokio 0.2.25", "tower-service", @@ -2641,8 +2663,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "parity-scale-codec", ] @@ -2656,6 +2677,14 @@ dependencies = [ "serde", ] +[[package]] +name = "impl-serde" +version = "0.3.1" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "serde", +] + [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -2777,9 +2806,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cfb73131c35423a367daf8cbd24100af0d077668c8c2943f0e7dd775fef0f65" +checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" dependencies = [ "wasm-bindgen", ] @@ -3082,9 +3111,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a" [[package]] name = "libc" -version = "0.2.90" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba4aede83fc3617411dc6993bc8c70919750c1c257c6ca6a502aed6e0e2394ae" +checksum = "9385f66bf6105b241aa65a61cb923ef20efc665cb9f9bb50ac2f0c4b7f378d41" [[package]] name = "libloading" @@ -3136,7 +3165,7 @@ dependencies = [ "libp2p-yamux", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "smallvec 1.6.1", "wasm-timer", ] @@ -3161,7 +3190,7 @@ dependencies = [ "multistream-select", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "rand 0.7.3", @@ -3188,9 +3217,9 @@ dependencies = [ [[package]] name = "libp2p-dns" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9712eb3e9f7dcc77cc5ca7d943b6a85ce4b1faaf91a67e003442412a26d6d6f8" +checksum = "62e63dab8b5ff35e0c101a3e51e843ba782c07bbb1682f5fd827622e0d02b98b" dependencies = [ "async-std-resolver", "futures 0.3.13", @@ -3280,7 +3309,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3387,7 +3416,7 @@ checksum = "6ce3374f3b28162db9d3442c9347c4f14cb01e8290052615c7d341d40eae0599" dependencies = [ "futures 0.3.13", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "salsa20", "sha3", @@ -3406,7 +3435,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "rand 0.7.3", @@ -3629,9 +3658,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "5a3c91c24eae6777794bb1997ad98bbb87daf92890acab859f7eaa4320333176" dependencies = [ "scopeguard", ] @@ -3733,9 +3762,9 @@ dependencies = [ [[package]] name = "memmap2" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e3e85b970d650e2ae6d70592474087051c11c54da7f7b4949725c5735fbcc6" +checksum = "397d1a6d6d0563c0f5462bbdae662cf6c784edf5e828e40c7257f85d82bf56dd" dependencies = [ "libc", ] @@ -3751,9 +3780,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.6.1" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" +checksum = "f83fb6581e8ed1f85fd45c116db8405483899489e38406156c25eb743554361d" dependencies = [ "autocfg", ] @@ -3856,7 +3885,7 @@ checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656" dependencies = [ "log", "mio", - "miow 0.3.6", + "miow 0.3.7", "winapi 0.3.9", ] @@ -3885,11 +3914,10 @@ dependencies = [ [[package]] name = "miow" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" dependencies = [ - "socket2 0.3.19", "winapi 0.3.9", ] @@ -3943,9 +3971,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "multistream-select" @@ -3956,7 +3984,7 @@ dependencies = [ "bytes 1.0.1", "futures 0.3.13", "log", - "pin-project 1.0.5", + "pin-project 1.0.6", "smallvec 1.6.1", "unsigned-varint 0.7.0", ] @@ -3990,12 +4018,12 @@ dependencies = [ [[package]] name = "nb-connect" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670361df1bc2399ee1ff50406a0d422587dd3bb0da596e1978fe8e05dabddf4f" +checksum = "a19900e7eee95eb2b3c2e26d12a874cc80aaf750e31be6fcbe743ead369fa45d" dependencies = [ "libc", - "socket2 0.3.19", + "socket2 0.4.0", ] [[package]] @@ -4216,6 +4244,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4322,6 +4351,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4401,6 +4431,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4615,6 +4646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4630,6 +4662,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4648,6 +4681,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4665,6 +4699,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4683,6 +4718,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4709,6 +4745,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4731,6 +4768,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4748,6 +4786,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4767,6 +4806,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4791,11 +4831,12 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parity-wasm 0.41.0", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions", "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", + "scale-info", "serde", "sp-core", "sp-io", @@ -4812,6 +4853,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] @@ -4850,6 +4892,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4866,6 +4909,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4888,8 +4932,9 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4911,6 +4956,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4929,6 +4975,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4948,6 +4995,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4964,6 +5012,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4979,6 +5028,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4996,6 +5046,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5022,6 +5073,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5044,6 +5096,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5062,6 +5115,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5080,6 +5134,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5098,6 +5153,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5112,6 +5168,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5131,6 +5188,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5181,6 +5239,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5196,6 +5255,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5211,6 +5271,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5227,6 +5288,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5271,6 +5333,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5286,6 +5349,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", + "scale-info", "serde", "sp-core", "sp-io", @@ -5302,6 +5366,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5318,6 +5383,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5334,6 +5400,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5351,6 +5418,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5395,6 +5463,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-core", "sp-io", @@ -5419,8 +5488,9 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5460,6 +5530,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5475,6 +5546,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5491,6 +5563,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5510,6 +5583,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5526,6 +5600,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5572,6 +5647,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5589,6 +5665,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5607,6 +5684,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5692,7 +5770,7 @@ dependencies = [ "libc", "log", "mio-named-pipes", - "miow 0.3.6", + "miow 0.3.7", "rand 0.7.3", "tokio 0.1.22", "tokio-named-pipes", @@ -5703,13 +5781,12 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5727,6 +5804,16 @@ dependencies = [ "synstructure", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-wasm" version = "0.32.0" @@ -5794,7 +5881,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" dependencies = [ "instant", - "lock_api 0.4.2", + "lock_api 0.4.3", "parking_lot_core 0.8.3", ] @@ -5853,9 +5940,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d65c4d95931acda4498f675e332fcbdc9a06705cd07086c510e9b6009cd1c1" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" [[package]] name = "paste-impl" @@ -5964,27 +6051,27 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15" +checksum = "918192b5c59119d51e0cd221f4d49dde9112824ba717369e903c97d076083d0f" dependencies = [ - "pin-project-internal 0.4.27", + "pin-project-internal 0.4.28", ] [[package]] name = "pin-project" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" +checksum = "bc174859768806e91ae575187ada95c91a29e96a98dc5d2cd9a1fed039501ba6" dependencies = [ - "pin-project-internal 1.0.5", + "pin-project-internal 1.0.6", ] [[package]] name = "pin-project-internal" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" +checksum = "3be26700300be6d9d23264c73211d8190e755b6b5ca7a1b28230025511b52a5e" dependencies = [ "proc-macro2", "quote", @@ -5993,9 +6080,9 @@ dependencies = [ [[package]] name = "pin-project-internal" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" +checksum = "a490329918e856ed1b083f244e3bfe2d8c4f336407e4ea9e1a9f479ff09049e5" dependencies = [ "proc-macro2", "quote", @@ -6062,11 +6149,11 @@ dependencies = [ [[package]] name = "polling" -version = "2.0.2" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" +checksum = "4fc12d774e799ee9ebae13f4076ca003b40d18a11ac0f3641e6f899618580b7b" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "libc", "log", "wepoll-sys", @@ -6141,13 +6228,14 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde", - "uint", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", + "parity-scale-codec", + "scale-info", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -6207,9 +6295,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0704ee1a7e00d7bb417d0770ea303c1bccbabf0ef1667dae92b5967f5f8a71" +checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec" dependencies = [ "unicode-xid", ] @@ -6253,7 +6341,7 @@ dependencies = [ "prost", "prost-types", "tempfile", - "which 4.0.2", + "which 4.1.0", ] [[package]] @@ -6607,14 +6695,13 @@ dependencies = [ [[package]] name = "regex" -version = "1.4.3" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +checksum = "957056ecddbeba1b26965114e191d2e8589ce74db242b6ea25fc4062427a5c19" dependencies = [ "aho-corasick", "memchr", "regex-syntax", - "thread_local", ] [[package]] @@ -6629,9 +6716,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.22" +version = "0.6.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" +checksum = "24d5f089152e60f62d28b835fbff2cd2e8dc0baf1ac13343bef92ab7eed84548" [[package]] name = "region" @@ -6812,7 +6899,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" dependencies = [ "futures 0.3.13", - "pin-project 0.4.27", + "pin-project 0.4.28", "static_assertions", ] @@ -7327,7 +7414,7 @@ dependencies = [ "parity-scale-codec", "parity-wasm 0.41.0", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "sc-executor-common", "sc-executor-wasmi", "sc-executor-wasmtime", @@ -7418,7 +7505,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "sc-block-builder", "sc-client-api", @@ -7590,7 +7677,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "prost", "prost-build", "quickcheck", @@ -7841,7 +7928,7 @@ dependencies = [ "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "sc-block-builder", "sc-chain-spec", @@ -7935,7 +8022,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -7970,7 +8057,7 @@ dependencies = [ "libp2p", "log", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.6", "rand 0.7.3", "serde", "serde_json", @@ -8073,6 +8160,28 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#a02b1516a8657c850f1c97e73f7402eb1304af62" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=master#a02b1516a8657c850f1c97e73f7402eb1304af62" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8233,9 +8342,9 @@ checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" [[package]] name = "serde" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd761ff957cb2a45fbb9ab3da6512de9de55872866160b23c25f1a841e99d29f" +checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171" dependencies = [ "serde_derive", ] @@ -8252,9 +8361,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.124" +version = "1.0.125" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1800f7693e94e186f5e25a28291ae1570da908aff7d97a095dec1e56ff99069b" +checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d" dependencies = [ "proc-macro2", "quote", @@ -8351,9 +8460,9 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "signal-hook" -version = "0.3.6" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" +checksum = "ef33d6d0cd06e0840fba9985aab098c147e67e05cee14d412d3345ed14ff30ac" dependencies = [ "libc", "signal-hook-registry", @@ -8527,6 +8636,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8555,6 +8665,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8577,6 +8688,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8661,6 +8773,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8677,6 +8790,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8707,6 +8821,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8737,7 +8852,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "libsecp256k1", "log", @@ -8751,6 +8866,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8805,6 +8921,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8955,8 +9072,9 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9068,6 +9186,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9116,7 +9235,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", "ref-cast", "serde", @@ -9221,7 +9340,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", "serde", "sp-runtime", @@ -9499,6 +9618,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9617,9 +9737,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.62" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "123a78a3596b24fee53a6464ce52d8ecbf62241e6294c7e7fe12086cd161f512" +checksum = "3ce15dd3ed8aa2f8eeac4716d6ef5ab58b6b9256db41d7e1a0224c2788e8fd87" dependencies = [ "proc-macro2", "quote", @@ -9849,9 +9969,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" dependencies = [ "tinyvec_macros", ] @@ -10168,9 +10288,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.13" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a9bd1db7706f2373a190b0d067146caa39350c486f3d455b0e33b431f94c07" +checksum = "c42e6fa53307c8a17e4ccd4dc81cf5ec38db9209f59b222210375b54ee40d1e2" dependencies = [ "proc-macro2", "quote", @@ -10192,7 +10312,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.5", + "pin-project 1.0.6", "tracing", ] @@ -10219,9 +10339,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ab8966ac3ca27126141f7999361cc97dd6fb4b71da04c02044fa9045d98bb96" +checksum = "705096c6f83bf68ea5d357a6aa01829ddbdac531b357b45abeca842938085baa" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10391,9 +10511,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" +checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06" [[package]] name = "ucd-trie" @@ -10413,6 +10533,17 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#50fcd2317dc3b970cefaf523c5529bfffd9a5e2a" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unicase" version = "2.6.0" @@ -10424,9 +10555,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0" dependencies = [ "matches", ] @@ -10544,9 +10675,9 @@ checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" [[package]] name = "vec-arena" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d" +checksum = "34b2f665b594b07095e3ac3f718e13c2197143416fae4c5706cffb7b1af8d7f1" [[package]] name = "vec_map" @@ -10556,9 +10687,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "void" @@ -10583,9 +10714,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi 0.3.9", @@ -10654,9 +10785,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3de431a2910c86679c34283a33f66f4e4abd7e0aec27b6669060148872aadf94" +checksum = "8e67a5806118af01f0d9045915676b22aaebecf4178ae7021bc171dab0b897ab" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -10695,9 +10826,9 @@ checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" [[package]] name = "wasm-bindgen-test" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" +checksum = "2ea9e4f0050d5498a160e6b9d278a9699598e445b51dacd05598da55114c801a" dependencies = [ "console_error_panic_hook", "js-sys", @@ -10709,9 +10840,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" +checksum = "43f40402f495d92df6cdd0d329e7cc2580c8f99bcd74faff0e468923a764b7d4" dependencies = [ "proc-macro2", "quote", @@ -10787,7 +10918,7 @@ dependencies = [ "indexmap", "libc", "log", - "paste 1.0.4", + "paste 1.0.5", "region", "rustc-demangle", "serde", @@ -10965,7 +11096,7 @@ dependencies = [ "lazy_static", "libc", "log", - "memoffset 0.6.1", + "memoffset 0.6.3", "more-asserts", "psm", "region", @@ -10976,27 +11107,27 @@ dependencies = [ [[package]] name = "wast" -version = "35.0.0" +version = "35.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db5ae96da18bb5926341516fd409b5a8ce4e4714da7f0a1063d3b20ac9f9a1e1" +checksum = "1a5800e9f86a1eae935e38bea11e60fd253f6d514d153fb39b3e5535a7b37b56" dependencies = [ "leb128", ] [[package]] name = "wat" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b0fa059022c5dabe129f02b429d67086400deb8277f89c975555dacc1dadbcc" +checksum = "8ec280a739b69173e0ffd12c1658507996836ba4e992ed9bc1e5385a0bd72a02" dependencies = [ "wast", ] [[package]] name = "web-sys" -version = "0.3.47" +version = "0.3.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c40dc691fc48003eba817c38da7113c15698142da971298003cac3ef175680b3" +checksum = "ec600b26223b2948cedfde2a0aa6756dcf1fef616f43d7b3097aaf53a6c4d92b" dependencies = [ "js-sys", "wasm-bindgen", @@ -11014,9 +11145,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376" +checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940" dependencies = [ "webpki", ] @@ -11041,12 +11172,12 @@ dependencies = [ [[package]] name = "which" -version = "4.0.2" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c14ef7e1b8b8ecfc75d5eca37949410046e66f15d185c01d70824f1f8111ef" +checksum = "b55551e42cbdf2ce2bedd2203d0cc08dba002c27510f86dab6d0ce304cba3dfe" dependencies = [ + "either", "libc", - "thiserror", ] [[package]] From 7cb1b2a81400d1b394267d7a731a4549737ec4bf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Apr 2021 17:12:15 +0100 Subject: [PATCH 120/503] Upgrade libp2p-wasm-ext and wasm-bindgen to fix compiler errors --- Cargo.lock | 28 ++++++++++++++-------------- bin/node/browser-testing/Cargo.toml | 2 +- bin/node/cli/Cargo.toml | 4 ++-- client/tracing/Cargo.toml | 2 +- utils/browser/Cargo.toml | 4 ++-- 5 files changed, 20 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 68c55ac14edd2..d44e61f5f9328 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2806,9 +2806,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.48" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc9f84f9b115ce7843d60706df1422a916680bfdfcbdb0447c5614ff9d7e4d78" +checksum = "2d99f9e3e84b8f67f846ef5b4cbbc3b1c29f6c759fcbce6f01aa0e73d932a24c" dependencies = [ "wasm-bindgen", ] @@ -3522,9 +3522,9 @@ dependencies = [ [[package]] name = "libp2p-wasm-ext" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6df65fc13f6188edf7e6927b086330448b3ca27af86b49748c6d299d7c8d9040" +checksum = "cef45d61e43c313531b5e903e4e8415212ff6338e0c54c47da5b9b412b5760de" dependencies = [ "futures 0.3.13", "js-sys", @@ -10758,9 +10758,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" [[package]] name = "wasm-bindgen" -version = "0.2.71" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ee1280240b7c461d6a0071313e08f34a60b0365f14260362e5a2b17d1d31aa7" +checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9" dependencies = [ "cfg-if 1.0.0", "serde", @@ -10770,9 +10770,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.71" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b7d8b6942b8bb3a9b0e73fc79b98095a27de6fa247615e59d096754a3bc2aa8" +checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae" dependencies = [ "bumpalo", "lazy_static", @@ -10797,9 +10797,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.71" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ac38da8ef716661f0f36c0d8320b89028efe10c7c0afde65baffb496ce0d3b" +checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -10807,9 +10807,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.71" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc053ec74d454df287b9374ee8abb36ffd5acb95ba87da3ba5b7d3fe20eb401e" +checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c" dependencies = [ "proc-macro2", "quote", @@ -10820,9 +10820,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.71" +version = "0.2.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d6f8ec44822dd71f5f221a5847fb34acd9060535c1211b70a05844c0f6383b1" +checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" [[package]] name = "wasm-bindgen-test" diff --git a/bin/node/browser-testing/Cargo.toml b/bin/node/browser-testing/Cargo.toml index 292ee2cab6bf7..8c4e1958d60ed 100644 --- a/bin/node/browser-testing/Cargo.toml +++ b/bin/node/browser-testing/Cargo.toml @@ -12,7 +12,7 @@ libp2p = { version = "0.36.0", default-features = false } jsonrpc-core = "15.0.0" serde = "1.0.106" serde_json = "1.0.48" -wasm-bindgen = { version = "=0.2.71", features = ["serde-serialize"] } +wasm-bindgen = { version = "0.2.73", features = ["serde-serialize"] } wasm-bindgen-futures = "0.4.18" wasm-bindgen-test = "0.3.18" futures = "0.3.9" diff --git a/bin/node/cli/Cargo.toml b/bin/node/cli/Cargo.toml index 9449edfbf6e08..f5c80a45c7061 100644 --- a/bin/node/cli/Cargo.toml +++ b/bin/node/cli/Cargo.toml @@ -102,10 +102,10 @@ node-inspect = { version = "0.8.0", optional = true, path = "../inspect" } try-runtime-cli = { version = "0.9.0", optional = true, path = "../../../utils/frame/try-runtime/cli" } # WASM-specific dependencies -wasm-bindgen = { version = "0.2.57", optional = true } +wasm-bindgen = { version = "0.2.73", optional = true } wasm-bindgen-futures = { version = "0.4.18", optional = true } browser-utils = { package = "substrate-browser-utils", path = "../../../utils/browser", optional = true, version = "0.9.0"} -libp2p-wasm-ext = { version = "0.28", features = ["websocket"], optional = true } +libp2p-wasm-ext = { version = "0.28.1", features = ["websocket"], optional = true } [target.'cfg(target_arch="x86_64")'.dependencies] node-executor = { version = "2.0.0", path = "../executor", features = [ "wasmtime" ] } diff --git a/client/tracing/Cargo.toml b/client/tracing/Cargo.toml index d84f89b9bce7d..793f38d8833c0 100644 --- a/client/tracing/Cargo.toml +++ b/client/tracing/Cargo.toml @@ -33,5 +33,5 @@ sp-tracing = { version = "3.0.0", path = "../../primitives/tracing" } sc-tracing-proc-macro = { version = "3.0.0", path = "./proc-macro" } [target.'cfg(target_os = "unknown")'.dependencies] -wasm-bindgen = "0.2.67" +wasm-bindgen = "0.2.73" web-sys = { version = "0.3.44", features = ["console"] } diff --git a/utils/browser/Cargo.toml b/utils/browser/Cargo.toml index 3a11df62dc254..31403a5e6fa96 100644 --- a/utils/browser/Cargo.toml +++ b/utils/browser/Cargo.toml @@ -16,10 +16,10 @@ targets = ["x86_64-unknown-linux-gnu"] futures = { version = "0.3", features = ["compat"] } futures01 = { package = "futures", version = "0.1.29" } log = "0.4.8" -libp2p-wasm-ext = { version = "0.28", features = ["websocket"] } +libp2p-wasm-ext = { version = "0.28.1", features = ["websocket"] } console_error_panic_hook = "0.1.6" js-sys = "0.3.34" -wasm-bindgen = "0.2.57" +wasm-bindgen = "0.2.73" wasm-bindgen-futures = "0.4.18" kvdb-web = "0.9.0" sp-database = { version = "3.0.0", path = "../../primitives/database" } From ebf5896a5fcdbe4a0f7830d86408d803a04f30b9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 8 Apr 2021 09:25:21 +0100 Subject: [PATCH 121/503] Add TypeInfo bound to StaticLookup impl --- frame/indices/src/lib.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 19697f2d941bb..1c838e483087a 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -335,7 +335,10 @@ impl Pallet { } } -impl StaticLookup for Pallet { +impl StaticLookup for Pallet +where + <::AccountIndex as codec::HasCompact>::Type: scale_info::TypeInfo, +{ type Source = MultiAddress; type Target = T::AccountId; From 07991889a7ee3b77ef5489a3250de426e5ce4c1c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Apr 2021 10:37:43 +0100 Subject: [PATCH 122/503] WIP replace legacy metadata with v13 --- bin/node-template/runtime/src/lib.rs | 6 +- bin/node/runtime/src/lib.rs | 6 +- client/rpc/src/state/state_full.rs | 2 +- .../procedural/src/construct_runtime/mod.rs | 6 - .../procedural/src/pallet/expand/call.rs | 43 +- .../procedural/src/pallet/expand/event.rs | 29 +- frame/support/src/dispatch.rs | 8 - frame/support/src/event.rs | 117 +-- frame/support/src/lib.rs | 2 - frame/support/src/metadata.rs | 979 +++++++++--------- frame/support/src/metadata_vnext.rs | 205 ---- frame/support/test/tests/construct_runtime.rs | 457 ++++---- frame/support/test/tests/pallet.rs | 485 ++++----- primitives/api/src/lib.rs | 3 - test-utils/runtime/src/lib.rs | 8 - 15 files changed, 978 insertions(+), 1378 deletions(-) delete mode 100644 frame/support/src/metadata_vnext.rs diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index 95345989874a1..9b6707b9520f0 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -344,12 +344,8 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - Runtime::metadata().into() - } - - fn metadata_vnext() -> OpaqueMetadata { use codec::Encode as _; - OpaqueMetadata::new(Runtime::metadata_vnext().encode()) + OpaqueMetadata::new(Runtime::metadata().encode()) } } diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index c7d35142a8ae0..3d5e426022c3d 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1213,11 +1213,7 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - Runtime::metadata().into() - } - - fn metadata_vnext() -> OpaqueMetadata { - OpaqueMetadata::new(Runtime::metadata_vnext().encode()) + OpaqueMetadata::new(Runtime::metadata().encode()) } } diff --git a/client/rpc/src/state/state_full.rs b/client/rpc/src/state/state_full.rs index 20a6186a28313..a55903484adc2 100644 --- a/client/rpc/src/state/state_full.rs +++ b/client/rpc/src/state/state_full.rs @@ -346,7 +346,7 @@ impl StateBackend for FullState( ) }) .collect::>(); - let pallets_tokens_vnext = pallets_tokens.clone(); quote!( #scrate::impl_runtime_metadata!{ for #runtime with pallets where Extrinsic = #extrinsic #(#pallets_tokens)* } - - #scrate::impl_runtime_metadata_vnext!{ - for #runtime with pallets where Extrinsic = #extrinsic - #(#pallets_tokens_vnext)* - } ) } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 14869906860ee..38c5f1b4f8605 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -16,7 +16,6 @@ // limitations under the License. use crate::pallet::Def; -use frame_support_procedural_tools::clean_type_string; use syn::spanned::Spanned; /// * Generate enum call and implement various trait on it. @@ -57,19 +56,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); - let args_metadata_type = def.call.methods.iter().map(|method| { - method.args.iter() - .map(|(is_compact, _, type_)| { - let final_type = if *is_compact { - quote::quote_spanned!(type_.span() => Compact<#type_>) - } else { - quote::quote!(#type_) - }; - clean_type_string(&final_type.to_string()) - }) - .collect::>() - }); - let args_is_compact = def.call.methods.iter().map(|method| { method.args.iter() .map(|(is_compact, _, _)| is_compact) @@ -193,34 +179,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { #[doc(hidden)] - #[allow(dead_code)] - pub fn call_functions() -> &'static [#frame_support::dispatch::FunctionMetadata] { - &[ #( - #frame_support::dispatch::FunctionMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode( - stringify!(#fn_name) - ), - arguments: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( - #frame_support::dispatch::FunctionArgumentMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode( - stringify!(#args_name) - ), - ty: #frame_support::dispatch::DecodeDifferent::Encode( - #args_metadata_type - ), - }, - )* ] - ), - documentation: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( #fn_doc ),* ] - ), - }, - )* ] - } - - #[doc(hidden)] - pub fn call_functions_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::FunctionMetadata> { + pub fn call_functions() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::FunctionMetadata> { #frame_support::scale_info::prelude::vec![ #( #frame_support::metadata::v13::FunctionMetadata { name: stringify!(#fn_name), diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 1850eabc457c8..4a3e93773a6ca 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -41,25 +41,8 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let event_impl_gen= &event.gen_kind.type_impl_gen(event.attr_span); - let metadata = event.metadata.iter() - .map(|event_def| { - let name = format!("{}", event_def.name); - let args = event_def.args.iter().map(|arg| arg.1.clone()); - let docs = &event_def.docs; - quote::quote_spanned!(event.attr_span => - #frame_support::event::EventMetadata { - name: #frame_support::event::DecodeDifferent::Encode(#name), - arguments: #frame_support::event::DecodeDifferent::Encode(&[ - #( #args, )* - ]), - documentation: #frame_support::event::DecodeDifferent::Encode(&[ - #( #docs, )* - ]), - }, - ) - }); - let metadata_vnext = event.metadata.iter() + let metadata = event.metadata.iter() .map(|event| { let name = format!("{}", event.name); let args = event.args @@ -162,14 +145,8 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> &'static [#frame_support::event::EventMetadata] { - &[ #( #metadata )* ] - } - - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata_vnext() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::EventMetadata> { - #frame_support::scale_info::prelude::vec![ #( #metadata_vnext )* ] + pub fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::EventMetadata> { + #frame_support::scale_info::prelude::vec![ #( #metadata )* ] } } ) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 7064e54bd3121..98f373ad1b38c 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2331,14 +2331,6 @@ macro_rules! __dispatch_impl_metadata { pub fn call_functions() -> &'static [$crate::dispatch::FunctionMetadata] { $crate::__call_to_functions!($($rest)*) } - - /// vnext metadata implemented in new frame support proc macros. - /// Returns empty vec for now to allow mixing of old style and new style pallets. - #[doc(hidden)] - #[allow(dead_code)] - pub fn call_functions_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::FunctionMetadata> { - $crate::scale_info::prelude::vec![] - } } } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 35767aaa63cb6..a26a2ec03ff76 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -146,16 +146,9 @@ macro_rules! decl_event { impl Event { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> &'static [ $crate::event::EventMetadata ] { + pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { $crate::__events_to_metadata!(; $( $events )* ) } - - /// Metadata vnext only supported by new frame support macros - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { - $crate::__events_to_metadata_vnext!(; $( $events )* ) - } } } } @@ -305,15 +298,9 @@ macro_rules! __decl_generic_event { { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> &'static [$crate::event::EventMetadata] { + pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { $crate::__events_to_metadata!(; $( $events )* ) } - - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata_vnext() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { - $crate::__events_to_metadata_vnext!(; $( $events )* ) - } } }; (@cannot_parse $ty:ty) => { @@ -331,36 +318,6 @@ macro_rules! __events_to_metadata { $( $rest:tt )* ) => { $crate::__events_to_metadata!( - $( $metadata, )* - $crate::event::EventMetadata { - name: $crate::event::DecodeDifferent::Encode(stringify!($event)), - arguments: $crate::event::DecodeDifferent::Encode(&[ - $( $( stringify!($param) ),* )* - ]), - documentation: $crate::event::DecodeDifferent::Encode(&[ - $( $doc_attr ),* - ]), - }; - $( $rest )* - ) - }; - ( - $( $metadata:expr ),*; - ) => { - &[ $( $metadata ),* ] - } -} - -#[macro_export] -#[doc(hidden)] -macro_rules! __events_to_metadata_vnext { - ( - $( $metadata:expr ),*; - $( #[doc = $doc_attr:tt] )* - $event:ident $( ( $( $param:path ),* $(,)? ) )*, - $( $rest:tt )* - ) => { - $crate::__events_to_metadata_vnext!( $( $metadata, )* $crate::metadata::v13::EventMetadata { name: stringify!($event), @@ -542,15 +499,6 @@ macro_rules! impl_outer_event { $( $generic_instance )?, )*; ); - $crate::__impl_outer_event_json_metadata_vnext!( - $runtime; - $name; - $( - $module_name::Event - < $( $generic_param )? $(, $module_name::$generic_instance )? > - $( $generic_instance )?, - )*; - ); } } @@ -564,72 +512,21 @@ macro_rules! __impl_outer_event_json_metadata { ) => { impl $runtime { #[allow(dead_code)] - pub fn outer_event_metadata() -> $crate::event::OuterEventMetadata { - $crate::event::OuterEventMetadata { - name: $crate::event::DecodeDifferent::Encode(stringify!($event_name)), - events: $crate::event::DecodeDifferent::Encode(&[ - $( - ( - stringify!($module_name), - $crate::event::FnEncode( - $module_name::Event ::< $( $generic_params ),* > ::metadata - ) - ) - ),* - ]) - } - } - - $crate::__impl_outer_event_json_metadata! { - @DECL_MODULE_EVENT_FNS - $( $module_name < $( $generic_params ),* > $( $instance )? ; )* - } - } - }; - - (@DECL_MODULE_EVENT_FNS - $( - $module_name:ident < $( $generic_params:path ),* > $( $instance:ident )? ; - )* - ) => { - $crate::paste::item! { - $( - #[allow(dead_code)] - pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> - &'static [$crate::event::EventMetadata] - { - $module_name::Event ::< $( $generic_params ),* > ::metadata() - } - )* - } - } -} - -#[macro_export] -#[doc(hidden)] -macro_rules! __impl_outer_event_json_metadata_vnext { - ( - $runtime:ident; - $event_name:ident; - $( $module_name:ident::Event < $( $generic_params:path ),* > $( $instance:ident )?, )*; - ) => { - impl $runtime { - #[allow(dead_code)] - pub fn outer_event_metadata_vnext() -> $crate::metadata::v13::OuterEventMetadata { + pub fn outer_event_metadata() -> $crate::metadata::v13::OuterEventMetadata { $crate::metadata::v13::OuterEventMetadata { name: stringify!($event_name), events: $crate::scale_info::prelude::vec![ $( $crate::metadata::v13::ModuleEventMetadata { name: stringify!($module_name), - events: $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() + events: $module_name::Event ::< $( $generic_params ),* > ::metadata() } ),* ] } } - $crate::__impl_outer_event_json_metadata_vnext! { + $crate::__impl_outer_event_json_metadata! { @DECL_MODULE_EVENT_FNS $( $module_name < $( $generic_params ),* > $( $instance )? ; )* } @@ -644,10 +541,10 @@ macro_rules! __impl_outer_event_json_metadata_vnext { $crate::paste::item! { $( #[allow(dead_code)] - pub fn [< __module_events_vnext_ $module_name $( _ $instance )? >] () -> + pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> Vec<$crate::metadata::v13::EventMetadata> { - $module_name::Event ::< $( $generic_params ),* > ::metadata_vnext() + $module_name::Event ::< $( $generic_params ),* > ::metadata() } )* } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 4904ae596873a..4e862b0f5950c 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -60,8 +60,6 @@ pub mod event; #[macro_use] pub mod metadata; #[macro_use] -pub mod metadata_vnext; -#[macro_use] pub mod genesis_config; #[macro_use] pub mod inherent; diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index b9597e6435709..52e712313bce6 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -21,8 +21,10 @@ pub use frame_metadata::{ StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, ModuleErrorMetadata, ExtrinsicMetadata, }; +pub use frame_metadata2; pub use frame_metadata2::v13; +/// todo: [AJ] update docs /// Implements the metadata support for the given runtime and all its modules. /// /// Example: @@ -74,21 +76,25 @@ macro_rules! impl_runtime_metadata { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata() -> $crate::metadata::RuntimeMetadataPrefixed { - $crate::metadata::RuntimeMetadataLastVersion { - modules: $crate::__runtime_modules_to_metadata!($runtime;; $( $rest )*), - extrinsic: $crate::metadata::ExtrinsicMetadata { - version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - signed_extensions: < - < - $ext as $crate::sp_runtime::traits::ExtrinsicMetadata - >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension - >::identifier() - .into_iter() - .map(|(id, _)| $crate::metadata::DecodeDifferent::Encode(id)) - .collect(), - }, - }.into() + pub fn metadata() -> $crate::metadata::frame_metadata2::RuntimeMetadataPrefixed { + $crate::metadata::v13::RuntimeMetadataLastVersion::new( + $crate::__runtime_modules_to_metadata!($runtime;; $( $rest )*), + $crate::metadata::v13::ExtrinsicMetadata { + ty: $crate::scale_info::meta_type::<$ext>(), + version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, + signed_extensions: < + < + $ext as $crate::sp_runtime::traits::ExtrinsicMetadata + >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension + >::identifier() + .into_iter() + .map(|(id, ty)| $crate::metadata::v13::SignedExtensionMetadata { + identifier: id, + ty, + }) + .collect(), + }, + ).into() } } } @@ -108,28 +114,34 @@ macro_rules! __runtime_modules_to_metadata { ) => { $crate::__runtime_modules_to_metadata!( $runtime; - $( $metadata, )* $crate::metadata::ModuleMetadata { - name: $crate::metadata::DecodeDifferent::Encode(stringify!($name)), + $( $metadata, )* $crate::metadata::v13::ModuleMetadata { + name: stringify!($name), index: $index, - storage: $crate::__runtime_modules_to_metadata_calls_storage!( - $mod, $module $( <$instance> )?, $runtime, $(with $kw)* - ), + // todo: [AJ] storage + storage: None, + // storage: $crate::__runtime_modules_to_metadata_calls_storage!( + // $mod, $module $( <$instance> )?, $runtime, $(with $kw)* + // ), calls: $crate::__runtime_modules_to_metadata_calls_call!( $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), event: $crate::__runtime_modules_to_metadata_calls_event!( $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), - constants: $crate::metadata::DecodeDifferent::Encode( - $crate::metadata::FnEncode( - $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata - ) - ), - errors: $crate::metadata::DecodeDifferent::Encode( - $crate::metadata::FnEncode( - <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata - ) - ) + // todo: [AJ] constants + constants: None, + // constants: $crate::metadata::DecodeDifferent::Encode( + // $crate::metadata::FnEncode( + // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata + // ) + // ), + // todo: [AJ] errors + errors: vec![], + // errors: $crate::metadata::DecodeDifferent::Encode( + // $crate::metadata::FnEncode( + // <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata + // ) + // ) }; $( $rest )* ) @@ -152,11 +164,7 @@ macro_rules! __runtime_modules_to_metadata_calls_call { with Call $(with $kws:ident)* ) => { - Some($crate::metadata::DecodeDifferent::Encode( - $crate::metadata::FnEncode( - $mod::$module::<$runtime $(, $mod::$instance )?>::call_functions - ) - )) + Some($mod::$module::<$runtime $(, $mod::$instance )?>::call_functions()) }; ( $mod: ident, @@ -178,7 +186,6 @@ macro_rules! __runtime_modules_to_metadata_calls_call { }; } - #[macro_export] #[doc(hidden)] macro_rules! __runtime_modules_to_metadata_calls_event { @@ -189,13 +196,10 @@ macro_rules! __runtime_modules_to_metadata_calls_event { with Event $(with $kws:ident)* ) => { - Some($crate::metadata::DecodeDifferent::Encode( - $crate::metadata::FnEncode( - $crate::paste::expr!{ - $runtime:: [< __module_events_ $mod $(_ $instance)?>] - } - ) - )) + Some($crate::paste::expr!{ + $runtime:: [< __module_events_ $mod $(_ $instance)?>]() + } + ) }; ( $mod: ident, @@ -215,447 +219,448 @@ macro_rules! __runtime_modules_to_metadata_calls_event { }; } -#[macro_export] -#[doc(hidden)] -macro_rules! __runtime_modules_to_metadata_calls_storage { - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with Storage - $(with $kws:ident)* - ) => { - Some($crate::metadata::DecodeDifferent::Encode( - $crate::metadata::FnEncode( - $mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata - ) - )) - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with $_:ident - $(with $kws:ident)* - ) => { - $crate::__runtime_modules_to_metadata_calls_storage! { - $mod, $module $( <$instance> )?, $runtime, $(with $kws)* - }; - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - ) => { - None - }; -} - - -#[cfg(test)] -// Do not complain about unused `dispatch` and `dispatch_aux`. -#[allow(dead_code)] -mod tests { - use super::*; - use frame_metadata::{ - EventMetadata, StorageEntryModifier, StorageEntryType, FunctionMetadata, StorageEntryMetadata, - ModuleMetadata, RuntimeMetadataPrefixed, DefaultByte, ModuleConstantMetadata, DefaultByteGetter, - ErrorMetadata, ExtrinsicMetadata, - }; - use codec::{Encode, Decode}; - use crate::traits::Get; - use sp_runtime::transaction_validity::TransactionValidityError; - - #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] - struct TestExtension; - impl sp_runtime::traits::SignedExtension for TestExtension { - type AccountId = u32; - type Call = (); - type AdditionalSigned = u32; - type Pre = (); - const IDENTIFIER: &'static str = "testextension"; - fn additional_signed(&self) -> Result { - Ok(1) - } - } - - #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] - struct TestExtension2; - impl sp_runtime::traits::SignedExtension for TestExtension2 { - type AccountId = u32; - type Call = (); - type AdditionalSigned = u32; - type Pre = (); - const IDENTIFIER: &'static str = "testextension2"; - fn additional_signed(&self) -> Result { - Ok(1) - } - } - - struct TestExtrinsic; - - impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic { - const VERSION: u8 = 1; - type SignedExtensions = (TestExtension, TestExtension2); - } - - mod system { - use super::*; - - pub trait Config: 'static { - type BaseCallFilter; - const ASSOCIATED_CONST: u64 = 500; - type Origin: Into, Self::Origin>> - + From>; - type AccountId: From + Encode; - type BlockNumber: From + Encode; - type SomeValue: Get; - type PalletInfo: crate::traits::PalletInfo; - type DbWeight: crate::traits::Get; - type Call; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self { - /// Hi, I am a comment. - const BlockNumber: T::BlockNumber = 100.into(); - const GetType: T::AccountId = T::SomeValue::get().into(); - const ASSOCIATED_CONST: u64 = T::ASSOCIATED_CONST.into(); - } - } - - decl_event!( - pub enum Event { - SystemEvent, - } - ); - - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] - pub enum RawOrigin { - Root, - Signed(AccountId), - None, - } - - impl From> for RawOrigin { - fn from(s: Option) -> RawOrigin { - match s { - Some(who) => RawOrigin::Signed(who), - None => RawOrigin::None, - } - } - } - - pub type Origin = RawOrigin<::AccountId>; - } - - mod event_module { - use crate::dispatch::DispatchResult; - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_event!( - pub enum Event where ::Balance - { - /// Hi, I am a comment. - TestEvent(Balance), - } - ); - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system { - type Error = Error; - - #[weight = 0] - fn aux_0(_origin) -> DispatchResult { unreachable!() } - } - } - - crate::decl_error! { - pub enum Error for Module { - /// Some user input error - UserInputError, - /// Something bad happened - /// this could be due to many reasons - BadThingHappened, - } - } - } - - mod event_module2 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_event!( - pub enum Event where ::Balance - { - TestEvent(Balance), - } - ); - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - crate::decl_storage! { - trait Store for Module as TestStorage { - StorageMethod : Option; - } - add_extra_genesis { - build(|_| {}); - } - } - } - - type EventModule = event_module::Module; - type EventModule2 = event_module2::Module; - - #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode)] - pub struct TestRuntime; - - impl crate::traits::PalletInfo for TestRuntime { - fn index() -> Option { - let type_id = sp_std::any::TypeId::of::

(); - if type_id == sp_std::any::TypeId::of::>() { - return Some(0) - } - if type_id == sp_std::any::TypeId::of::() { - return Some(1) - } - if type_id == sp_std::any::TypeId::of::() { - return Some(2) - } - - None - } - fn name() -> Option<&'static str> { - let type_id = sp_std::any::TypeId::of::

(); - if type_id == sp_std::any::TypeId::of::>() { - return Some("System") - } - if type_id == sp_std::any::TypeId::of::() { - return Some("EventModule") - } - if type_id == sp_std::any::TypeId::of::() { - return Some("EventModule2") - } - - None - } - } - - impl_outer_event! { - pub enum TestEvent for TestRuntime { - system, - event_module, - event_module2, - } - } - - impl_outer_origin! { - pub enum Origin for TestRuntime where system = system {} - } - - impl_outer_dispatch! { - pub enum Call for TestRuntime where origin: Origin { - event_module::EventModule, - event_module2::EventModule2, - } - } - - impl event_module::Config for TestRuntime { - type Balance = u32; - } - - impl event_module2::Config for TestRuntime { - type Balance = u32; - } - - crate::parameter_types! { - pub const SystemValue: u32 = 600; - } - - impl system::Config for TestRuntime { - type BaseCallFilter = (); - type Origin = Origin; - type AccountId = u32; - type BlockNumber = u32; - type SomeValue = SystemValue; - type PalletInfo = Self; - type DbWeight = (); - type Call = Call; - } - - impl_runtime_metadata!( - for TestRuntime with pallets where Extrinsic = TestExtrinsic - system::Pallet as System { index 0 } with Event, - event_module::Module as Module { index 1 } with Event Call, - event_module2::Module as Module2 { index 2 } with Event Storage Call, - ); - - struct ConstantBlockNumberByteGetter; - impl DefaultByte for ConstantBlockNumberByteGetter { - fn default_byte(&self) -> Vec { - 100u32.encode() - } - } - - struct ConstantGetTypeByteGetter; - impl DefaultByte for ConstantGetTypeByteGetter { - fn default_byte(&self) -> Vec { - SystemValue::get().encode() - } - } - - struct ConstantAssociatedConstByteGetter; - impl DefaultByte for ConstantAssociatedConstByteGetter { - fn default_byte(&self) -> Vec { - ::ASSOCIATED_CONST.encode() - } - } - - #[test] - fn runtime_metadata() { - let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { - modules: DecodeDifferent::Encode(&[ - ModuleMetadata { - name: DecodeDifferent::Encode("System"), - index: 0, - storage: None, - calls: None, - event: Some(DecodeDifferent::Encode( - FnEncode(||&[ - EventMetadata { - name: DecodeDifferent::Encode("SystemEvent"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]) - } - ]) - )), - constants: DecodeDifferent::Encode( - FnEncode(|| &[ - ModuleConstantMetadata { - name: DecodeDifferent::Encode("BlockNumber"), - ty: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode( - DefaultByteGetter(&ConstantBlockNumberByteGetter) - ), - documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Encode("GetType"), - ty: DecodeDifferent::Encode("T::AccountId"), - value: DecodeDifferent::Encode( - DefaultByteGetter(&ConstantGetTypeByteGetter) - ), - documentation: DecodeDifferent::Encode(&[]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Encode("ASSOCIATED_CONST"), - ty: DecodeDifferent::Encode("u64"), - value: DecodeDifferent::Encode( - DefaultByteGetter(&ConstantAssociatedConstByteGetter) - ), - documentation: DecodeDifferent::Encode(&[]), - } - ]) - ), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module"), - index: 1, - storage: None, - calls: Some( - DecodeDifferent::Encode(FnEncode(|| &[ - FunctionMetadata { - name: DecodeDifferent::Encode("aux_0"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - } - ]))), - event: Some(DecodeDifferent::Encode( - FnEncode(||&[ - EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&["Balance"]), - documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]) - } - ]) - )), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[ - ErrorMetadata { - name: DecodeDifferent::Encode("UserInputError"), - documentation: DecodeDifferent::Encode(&[" Some user input error"]), - }, - ErrorMetadata { - name: DecodeDifferent::Encode("BadThingHappened"), - documentation: DecodeDifferent::Encode(&[ - " Something bad happened", - " this could be due to many reasons", - ]), - }, - ])), - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module2"), - index: 2, - storage: Some(DecodeDifferent::Encode( - FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("TestStorage"), - entries: DecodeDifferent::Encode( - &[ - StorageEntryMetadata { - name: DecodeDifferent::Encode("StorageMethod"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter( - &event_module2::__GetByteStructStorageMethod( - std::marker::PhantomData:: - ) - ) - ), - documentation: DecodeDifferent::Encode(&[]), - } - ] - ) - }), - )), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[]))), - event: Some(DecodeDifferent::Encode( - FnEncode(||&[ - EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&["Balance"]), - documentation: DecodeDifferent::Encode(&[]) - } - ]) - )), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - }, - ]), - extrinsic: ExtrinsicMetadata { - version: 1, - signed_extensions: vec![ - DecodeDifferent::Encode("testextension"), - DecodeDifferent::Encode("testextension2"), - ], - } - }; - - let metadata_encoded = TestRuntime::metadata().encode(); - let metadata_decoded = RuntimeMetadataPrefixed::decode(&mut &metadata_encoded[..]); - let expected_metadata: RuntimeMetadataPrefixed = expected_metadata.into(); - - pretty_assertions::assert_eq!(expected_metadata, metadata_decoded.unwrap()); - } -} +// todo: [AJ] implement storage metadata vnext +// #[macro_export] +// #[doc(hidden)] +// macro_rules! __runtime_modules_to_metadata_calls_storage { +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// with Storage +// $(with $kws:ident)* +// ) => { +// Some($crate::metadata::DecodeDifferent::Encode( +// $crate::metadata::FnEncode( +// $mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata +// ) +// )) +// }; +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// with $_:ident +// $(with $kws:ident)* +// ) => { +// $crate::__runtime_modules_to_metadata_calls_storage! { +// $mod, $module $( <$instance> )?, $runtime, $(with $kws)* +// }; +// }; +// ( +// $mod: ident, +// $module: ident $( <$instance:ident> )?, +// $runtime: ident, +// ) => { +// None +// }; +// } + +// todo: [AJ] restore metadata tests +// #[cfg(test)] +// // Do not complain about unused `dispatch` and `dispatch_aux`. +// #[allow(dead_code)] +// mod tests { +// use super::*; +// use frame_metadata::{ +// EventMetadata, StorageEntryModifier, StorageEntryType, FunctionMetadata, StorageEntryMetadata, +// ModuleMetadata, RuntimeMetadataPrefixed, DefaultByte, ModuleConstantMetadata, DefaultByteGetter, +// ErrorMetadata, ExtrinsicMetadata, +// }; +// use codec::{Encode, Decode}; +// use crate::traits::Get; +// use sp_runtime::transaction_validity::TransactionValidityError; +// +// #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] +// struct TestExtension; +// impl sp_runtime::traits::SignedExtension for TestExtension { +// type AccountId = u32; +// type Call = (); +// type AdditionalSigned = u32; +// type Pre = (); +// const IDENTIFIER: &'static str = "testextension"; +// fn additional_signed(&self) -> Result { +// Ok(1) +// } +// } +// +// #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] +// struct TestExtension2; +// impl sp_runtime::traits::SignedExtension for TestExtension2 { +// type AccountId = u32; +// type Call = (); +// type AdditionalSigned = u32; +// type Pre = (); +// const IDENTIFIER: &'static str = "testextension2"; +// fn additional_signed(&self) -> Result { +// Ok(1) +// } +// } +// +// struct TestExtrinsic; +// +// impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic { +// const VERSION: u8 = 1; +// type SignedExtensions = (TestExtension, TestExtension2); +// } +// +// mod system { +// use super::*; +// +// pub trait Config: 'static { +// type BaseCallFilter; +// const ASSOCIATED_CONST: u64 = 500; +// type Origin: Into, Self::Origin>> +// + From>; +// type AccountId: From + Encode; +// type BlockNumber: From + Encode; +// type SomeValue: Get; +// type PalletInfo: crate::traits::PalletInfo; +// type DbWeight: crate::traits::Get; +// type Call; +// } +// +// decl_module! { +// pub struct Module for enum Call where origin: T::Origin, system=self { +// /// Hi, I am a comment. +// const BlockNumber: T::BlockNumber = 100.into(); +// const GetType: T::AccountId = T::SomeValue::get().into(); +// const ASSOCIATED_CONST: u64 = T::ASSOCIATED_CONST.into(); +// } +// } +// +// decl_event!( +// pub enum Event { +// SystemEvent, +// } +// ); +// +// #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] +// pub enum RawOrigin { +// Root, +// Signed(AccountId), +// None, +// } +// +// impl From> for RawOrigin { +// fn from(s: Option) -> RawOrigin { +// match s { +// Some(who) => RawOrigin::Signed(who), +// None => RawOrigin::None, +// } +// } +// } +// +// pub type Origin = RawOrigin<::AccountId>; +// } +// +// mod event_module { +// use crate::dispatch::DispatchResult; +// use super::system; +// +// pub trait Config: system::Config { +// type Balance; +// } +// +// decl_event!( +// pub enum Event where ::Balance +// { +// /// Hi, I am a comment. +// TestEvent(Balance), +// } +// ); +// +// decl_module! { +// pub struct Module for enum Call where origin: T::Origin, system=system { +// type Error = Error; +// +// #[weight = 0] +// fn aux_0(_origin) -> DispatchResult { unreachable!() } +// } +// } +// +// crate::decl_error! { +// pub enum Error for Module { +// /// Some user input error +// UserInputError, +// /// Something bad happened +// /// this could be due to many reasons +// BadThingHappened, +// } +// } +// } +// +// mod event_module2 { +// use super::system; +// +// pub trait Config: system::Config { +// type Balance; +// } +// +// decl_event!( +// pub enum Event where ::Balance +// { +// TestEvent(Balance), +// } +// ); +// +// decl_module! { +// pub struct Module for enum Call where origin: T::Origin, system=system {} +// } +// +// crate::decl_storage! { +// trait Store for Module as TestStorage { +// StorageMethod : Option; +// } +// add_extra_genesis { +// build(|_| {}); +// } +// } +// } +// +// type EventModule = event_module::Module; +// type EventModule2 = event_module2::Module; +// +// #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode)] +// pub struct TestRuntime; +// +// impl crate::traits::PalletInfo for TestRuntime { +// fn index() -> Option { +// let type_id = sp_std::any::TypeId::of::

(); +// if type_id == sp_std::any::TypeId::of::>() { +// return Some(0) +// } +// if type_id == sp_std::any::TypeId::of::() { +// return Some(1) +// } +// if type_id == sp_std::any::TypeId::of::() { +// return Some(2) +// } +// +// None +// } +// fn name() -> Option<&'static str> { +// let type_id = sp_std::any::TypeId::of::

(); +// if type_id == sp_std::any::TypeId::of::>() { +// return Some("System") +// } +// if type_id == sp_std::any::TypeId::of::() { +// return Some("EventModule") +// } +// if type_id == sp_std::any::TypeId::of::() { +// return Some("EventModule2") +// } +// +// None +// } +// } +// +// impl_outer_event! { +// pub enum TestEvent for TestRuntime { +// system, +// event_module, +// event_module2, +// } +// } +// +// impl_outer_origin! { +// pub enum Origin for TestRuntime where system = system {} +// } +// +// impl_outer_dispatch! { +// pub enum Call for TestRuntime where origin: Origin { +// event_module::EventModule, +// event_module2::EventModule2, +// } +// } +// +// impl event_module::Config for TestRuntime { +// type Balance = u32; +// } +// +// impl event_module2::Config for TestRuntime { +// type Balance = u32; +// } +// +// crate::parameter_types! { +// pub const SystemValue: u32 = 600; +// } +// +// impl system::Config for TestRuntime { +// type BaseCallFilter = (); +// type Origin = Origin; +// type AccountId = u32; +// type BlockNumber = u32; +// type SomeValue = SystemValue; +// type PalletInfo = Self; +// type DbWeight = (); +// type Call = Call; +// } +// +// impl_runtime_metadata!( +// for TestRuntime with pallets where Extrinsic = TestExtrinsic +// system::Pallet as System { index 0 } with Event, +// event_module::Module as Module { index 1 } with Event Call, +// event_module2::Module as Module2 { index 2 } with Event Storage Call, +// ); +// +// struct ConstantBlockNumberByteGetter; +// impl DefaultByte for ConstantBlockNumberByteGetter { +// fn default_byte(&self) -> Vec { +// 100u32.encode() +// } +// } +// +// struct ConstantGetTypeByteGetter; +// impl DefaultByte for ConstantGetTypeByteGetter { +// fn default_byte(&self) -> Vec { +// SystemValue::get().encode() +// } +// } +// +// struct ConstantAssociatedConstByteGetter; +// impl DefaultByte for ConstantAssociatedConstByteGetter { +// fn default_byte(&self) -> Vec { +// ::ASSOCIATED_CONST.encode() +// } +// } +// +// #[test] +// fn runtime_metadata() { +// let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { +// modules: DecodeDifferent::Encode(&[ +// ModuleMetadata { +// name: DecodeDifferent::Encode("System"), +// index: 0, +// storage: None, +// calls: None, +// event: Some(DecodeDifferent::Encode( +// FnEncode(||&[ +// EventMetadata { +// name: DecodeDifferent::Encode("SystemEvent"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]) +// } +// ]) +// )), +// constants: DecodeDifferent::Encode( +// FnEncode(|| &[ +// ModuleConstantMetadata { +// name: DecodeDifferent::Encode("BlockNumber"), +// ty: DecodeDifferent::Encode("T::BlockNumber"), +// value: DecodeDifferent::Encode( +// DefaultByteGetter(&ConstantBlockNumberByteGetter) +// ), +// documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Encode("GetType"), +// ty: DecodeDifferent::Encode("T::AccountId"), +// value: DecodeDifferent::Encode( +// DefaultByteGetter(&ConstantGetTypeByteGetter) +// ), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Encode("ASSOCIATED_CONST"), +// ty: DecodeDifferent::Encode("u64"), +// value: DecodeDifferent::Encode( +// DefaultByteGetter(&ConstantAssociatedConstByteGetter) +// ), +// documentation: DecodeDifferent::Encode(&[]), +// } +// ]) +// ), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module"), +// index: 1, +// storage: None, +// calls: Some( +// DecodeDifferent::Encode(FnEncode(|| &[ +// FunctionMetadata { +// name: DecodeDifferent::Encode("aux_0"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// } +// ]))), +// event: Some(DecodeDifferent::Encode( +// FnEncode(||&[ +// EventMetadata { +// name: DecodeDifferent::Encode("TestEvent"), +// arguments: DecodeDifferent::Encode(&["Balance"]), +// documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]) +// } +// ]) +// )), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[ +// ErrorMetadata { +// name: DecodeDifferent::Encode("UserInputError"), +// documentation: DecodeDifferent::Encode(&[" Some user input error"]), +// }, +// ErrorMetadata { +// name: DecodeDifferent::Encode("BadThingHappened"), +// documentation: DecodeDifferent::Encode(&[ +// " Something bad happened", +// " this could be due to many reasons", +// ]), +// }, +// ])), +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module2"), +// index: 2, +// storage: Some(DecodeDifferent::Encode( +// FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("TestStorage"), +// entries: DecodeDifferent::Encode( +// &[ +// StorageEntryMetadata { +// name: DecodeDifferent::Encode("StorageMethod"), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), +// default: DecodeDifferent::Encode( +// DefaultByteGetter( +// &event_module2::__GetByteStructStorageMethod( +// std::marker::PhantomData:: +// ) +// ) +// ), +// documentation: DecodeDifferent::Encode(&[]), +// } +// ] +// ) +// }), +// )), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[]))), +// event: Some(DecodeDifferent::Encode( +// FnEncode(||&[ +// EventMetadata { +// name: DecodeDifferent::Encode("TestEvent"), +// arguments: DecodeDifferent::Encode(&["Balance"]), +// documentation: DecodeDifferent::Encode(&[]) +// } +// ]) +// )), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// }, +// ]), +// extrinsic: ExtrinsicMetadata { +// version: 1, +// signed_extensions: vec![ +// DecodeDifferent::Encode("testextension"), +// DecodeDifferent::Encode("testextension2"), +// ], +// } +// }; +// +// let metadata_encoded = TestRuntime::metadata().encode(); +// let metadata_decoded = RuntimeMetadataPrefixed::decode(&mut &metadata_encoded[..]); +// let expected_metadata: RuntimeMetadataPrefixed = expected_metadata.into(); +// +// pretty_assertions::assert_eq!(expected_metadata, metadata_decoded.unwrap()); +// } +// } diff --git a/frame/support/src/metadata_vnext.rs b/frame/support/src/metadata_vnext.rs deleted file mode 100644 index 6e72a25a7db7a..0000000000000 --- a/frame/support/src/metadata_vnext.rs +++ /dev/null @@ -1,205 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) 2018-2020 Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -pub use frame_metadata2; - -#[macro_export] -macro_rules! impl_runtime_metadata_vnext { - ( - for $runtime:ident with pallets where Extrinsic = $ext:ident - $( $rest:tt )* - ) => { - impl $runtime { - pub fn metadata_vnext() -> $crate::metadata_vnext::frame_metadata2::RuntimeMetadataPrefixed { - $crate::metadata::v13::RuntimeMetadataLastVersion::new( - $crate::__runtime_modules_to_metadata_vnext!($runtime;; $( $rest )*), - $crate::metadata::v13::ExtrinsicMetadata { - ty: $crate::scale_info::meta_type::<$ext>(), - version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, - signed_extensions: < - < - $ext as $crate::sp_runtime::traits::ExtrinsicMetadata - >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension - >::identifier() - .into_iter() - .map(|(id, ty)| $crate::metadata::v13::SignedExtensionMetadata { - identifier: id, - ty, - }) - .collect(), - }, - ).into() - } - } - } -} - -#[macro_export] -#[doc(hidden)] -macro_rules! __runtime_modules_to_metadata_vnext { - ( - $runtime: ident; - $( $metadata:expr ),*; - $mod:ident::$module:ident $( < $instance:ident > )? as $name:ident - { index $index:tt } - $(with)+ $($kw:ident)* - , - $( $rest:tt )* - ) => { - $crate::__runtime_modules_to_metadata_vnext!( - $runtime; - $( $metadata, )* $crate::metadata::v13::ModuleMetadata { - name: stringify!($name), - index: $index, - // todo: [AJ] storage - storage: None, - // storage: $crate::__runtime_modules_to_metadata_calls_storage!( - // $mod, $module $( <$instance> )?, $runtime, $(with $kw)* - // ), - calls: $crate::__runtime_modules_to_metadata_calls_call_vnext!( - $mod, $module $( <$instance> )?, $runtime, $(with $kw)* - ), - event: $crate::__runtime_modules_to_metadata_calls_event_vnext!( - $mod, $module $( <$instance> )?, $runtime, $(with $kw)* - ), - // todo: [AJ] constants - constants: None, - // constants: $crate::metadata::DecodeDifferent::Encode( - // $crate::metadata::FnEncode( - // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata - // ) - // ), - // todo: [AJ] errors - errors: vec![], - // errors: $crate::metadata::DecodeDifferent::Encode( - // $crate::metadata::FnEncode( - // <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata - // ) - // ) - }; - $( $rest )* - ) - }; - ( - $runtime:ident; - $( $metadata:expr ),*; - ) => { - vec![$( $metadata ),* ] - }; -} - -#[macro_export] -#[doc(hidden)] -macro_rules! __runtime_modules_to_metadata_calls_call_vnext { - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with Call - $(with $kws:ident)* - ) => { - Some($mod::$module::<$runtime $(, $mod::$instance )?>::call_functions_vnext()) - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with $_:ident - $(with $kws:ident)* - ) => { - $crate::__runtime_modules_to_metadata_calls_call_vnext! { - $mod, $module $( <$instance> )?, $runtime, $(with $kws)* - }; - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - ) => { - None - }; -} - - -#[macro_export] -#[doc(hidden)] -macro_rules! __runtime_modules_to_metadata_calls_event_vnext { - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with Event - $(with $kws:ident)* - ) => { - Some($crate::paste::expr!{ - $runtime:: [< __module_events_vnext_ $mod $(_ $instance)?>]() - } - ) - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - with $_:ident - $(with $kws:ident)* - ) => { - $crate::__runtime_modules_to_metadata_calls_event_vnext!( $mod, $module $( <$instance> )?, $runtime, $(with $kws)* ); - }; - ( - $mod: ident, - $module: ident $( <$instance:ident> )?, - $runtime: ident, - ) => { - None - }; -} - -// #[macro_export] -// #[doc(hidden)] -// macro_rules! __runtime_modules_to_metadata_calls_storage_vnext { -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// with Storage -// $(with $kws:ident)* -// ) => { -// Some($crate::metadata::DecodeDifferent::Encode( -// $crate::metadata::FnEncode( -// $mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata -// ) -// )) -// }; -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// with $_:ident -// $(with $kws:ident)* -// ) => { -// $crate::__runtime_modules_to_metadata_calls_storage! { -// $mod, $module $( <$instance> )?, $runtime, $(with $kws)* -// }; -// }; -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// ) => { -// None -// }; -// } diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 445a408834bb5..64c656729719c 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -281,235 +281,236 @@ fn call_codec() { assert_eq!(Call::Module1_9(module1::Call::fail()).encode()[0], 13); } +// #[test] +// fn test_metadata() { +// use frame_metadata::*; +// let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { +// modules: DecodeDifferent::Encode(&[ +// ModuleMetadata { +// name: DecodeDifferent::Encode("System"), +// storage: None, +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("noop"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[ +// EventMetadata { +// name: DecodeDifferent::Encode("ExtrinsicSuccess"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// EventMetadata { +// name: DecodeDifferent::Encode("ExtrinsicFailed"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// EventMetadata { +// name: DecodeDifferent::Encode("Ignore"), +// arguments: DecodeDifferent::Encode(&["BlockNumber"]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// ]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 30, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_1"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance1Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ +// FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// ]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 31, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module2"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ +// FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// ]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[ +// EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }, +// ]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 32, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_2"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance2Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 33, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_3"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance3Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: None, +// event: None, +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 6, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_4"), +// storage: None, +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: None, +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 3, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_5"), +// storage: None, +// calls: None, +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 4, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_6"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance6Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 1, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_7"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance7Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 2, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_8"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance8Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 12, +// }, +// ModuleMetadata { +// name: DecodeDifferent::Encode("Module1_9"), +// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { +// prefix: DecodeDifferent::Encode("Instance9Module"), +// entries: DecodeDifferent::Encode(&[]), +// }))), +// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { +// name: DecodeDifferent::Encode("fail"), +// arguments: DecodeDifferent::Encode(&[]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { +// name: DecodeDifferent::Encode("A"), +// arguments: DecodeDifferent::Encode(&["AccountId"]), +// documentation: DecodeDifferent::Encode(&[]), +// }]))), +// constants: DecodeDifferent::Encode(FnEncode(|| &[])), +// errors: DecodeDifferent::Encode(FnEncode(|| &[])), +// index: 13, +// }, +// ]), +// extrinsic: ExtrinsicMetadata { +// version: 4, +// signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], +// }, +// }; +// pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V12(expected_metadata)); +// } + +// todo: [AJ] update test below with v13 metadata from above #[test] fn test_metadata() { - use frame_metadata::*; - let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { - modules: DecodeDifferent::Encode(&[ - ModuleMetadata { - name: DecodeDifferent::Encode("System"), - storage: None, - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("noop"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[ - EventMetadata { - name: DecodeDifferent::Encode("ExtrinsicSuccess"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - EventMetadata { - name: DecodeDifferent::Encode("ExtrinsicFailed"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - EventMetadata { - name: DecodeDifferent::Encode("Ignore"), - arguments: DecodeDifferent::Encode(&["BlockNumber"]), - documentation: DecodeDifferent::Encode(&[]), - }, - ]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 30, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_1"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance1Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ - FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - ]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 31, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module2"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ - FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - ]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[ - EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }, - ]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 32, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_2"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance2Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 33, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_3"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance3Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: None, - event: None, - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 6, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_4"), - storage: None, - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: None, - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 3, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_5"), - storage: None, - calls: None, - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 4, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_6"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance6Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 1, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_7"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance7Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 2, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_8"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance8Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 12, - }, - ModuleMetadata { - name: DecodeDifferent::Encode("Module1_9"), - storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { - prefix: DecodeDifferent::Encode("Instance9Module"), - entries: DecodeDifferent::Encode(&[]), - }))), - calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { - name: DecodeDifferent::Encode("fail"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { - name: DecodeDifferent::Encode("A"), - arguments: DecodeDifferent::Encode(&["AccountId"]), - documentation: DecodeDifferent::Encode(&[]), - }]))), - constants: DecodeDifferent::Encode(FnEncode(|| &[])), - errors: DecodeDifferent::Encode(FnEncode(|| &[])), - index: 13, - }, - ]), - extrinsic: ExtrinsicMetadata { - version: 4, - signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], - }, - }; - pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V12(expected_metadata)); -} - -#[test] -fn test_metadata_vnext() { use frame_metadata::*; use frame_support::scale_info::{form::MetaForm, IntoPortable, Registry}; // vnext modules defined with legacy macros have empty metadata @@ -579,7 +580,7 @@ fn test_metadata_vnext() { }; let mut registry = Registry::new(); let expected_metadata = expected_metadata.into_portable(&mut registry); - pretty_assertions::assert_eq!(Runtime::metadata_vnext().1, vnext::RuntimeMetadata::V12(expected_metadata)); + pretty_assertions::assert_eq!(Runtime::metadata().1, vnext::RuntimeMetadata::V12(expected_metadata)); } #[test] diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 57d8ae8db0d1f..e82610ef7ad7a 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -601,248 +601,249 @@ fn pallet_on_genesis() { }) } -#[test] -fn metadata() { - use frame_metadata::*; - use codec::{Decode, Encode}; - - let expected_pallet_metadata = ModuleMetadata { - index: 1, - name: DecodeDifferent::Decoded("Example".to_string()), - storage: Some(DecodeDifferent::Decoded(StorageMetadata { - prefix: DecodeDifferent::Decoded("Example".to_string()), - entries: DecodeDifferent::Decoded(vec![ - StorageEntryMetadata { - name: DecodeDifferent::Decoded("ValueWhereClause".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain( - DecodeDifferent::Decoded( - "::_2".to_string() - ), - ), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Value".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map".to_string()), - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u8".to_string()), - value: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![4, 0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap2".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u64".to_string()), - key1: DecodeDifferent::Decoded("u16".to_string()), - key2: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalValue".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), - hasher: StorageHasher::Twox64Concat, - unused: false, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: DecodeDifferent::Decoded("ConditionalDoubleMap".to_string()), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ]), - })), - calls: Some(DecodeDifferent::Decoded(vec![ - FunctionMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), - }, - FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_bar".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - } - ]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string(), - ]), - }, - FunctionMetadata { - name: DecodeDifferent::Decoded("foo_transactional".to_string()), - arguments: DecodeDifferent::Decoded(vec![ - FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), - } - ]), - documentation: DecodeDifferent::Decoded(vec![ - " Doc comment put in metadata".to_string(), - ]), - }, - FunctionMetadata { - name: DecodeDifferent::Decoded("foo_no_post_info".to_string()), - arguments: DecodeDifferent::Decoded(vec![]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ])), - event: Some(DecodeDifferent::Decoded(vec![ - EventMetadata { - name: DecodeDifferent::Decoded("Proposed".to_string()), - arguments: DecodeDifferent::Decoded(vec!["::AccountId".to_string()]), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put in metadata".to_string() - ]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Spending".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), - documentation: DecodeDifferent::Decoded(vec![ - " doc".to_string() - ]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("Something".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - EventMetadata { - name: DecodeDifferent::Decoded("SomethingElse".to_string()), - arguments: DecodeDifferent::Decoded(vec!["::_1".to_string()]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ])), - constants: DecodeDifferent::Decoded(vec![ - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some comment".to_string(), - " Some comment".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam2".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![11, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some comment".to_string(), - " Some comment".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam3".to_string()), - ty: DecodeDifferent::Decoded("::_1".to_string()), - value: DecodeDifferent::Decoded(vec![12, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("some_extra".to_string()), - ty: DecodeDifferent::Decoded("T::AccountId".to_string()), - value: DecodeDifferent::Decoded(vec![100, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some doc".to_string(), - " Some doc".to_string(), - ]), - }, - ModuleConstantMetadata { - name: DecodeDifferent::Decoded("some_extra_extra".to_string()), - ty: DecodeDifferent::Decoded("T::AccountId".to_string()), - value: DecodeDifferent::Decoded(vec![0, 0, 0, 0, 0, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![ - " Some doc".to_string(), - ]), - }, - ]), - errors: DecodeDifferent::Decoded(vec![ - ErrorMetadata { - name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put into metadata".to_string(), - ]), - }, - ]), - }; - - let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V12(metadata) => metadata, - _ => panic!("metadata has been bump, test needs to be updated"), - }; - - let modules_metadata = match metadata.modules { - DecodeDifferent::Encode(modules_metadata) => modules_metadata, - _ => unreachable!(), - }; - - let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); - - pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); -} +// #[test] +// fn metadata() { +// use frame_metadata::*; +// use codec::{Decode, Encode}; +// +// let expected_pallet_metadata = ModuleMetadata { +// index: 1, +// name: DecodeDifferent::Decoded("Example".to_string()), +// storage: Some(DecodeDifferent::Decoded(StorageMetadata { +// prefix: DecodeDifferent::Decoded("Example".to_string()), +// entries: DecodeDifferent::Decoded(vec![ +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("ValueWhereClause".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Plain( +// DecodeDifferent::Decoded( +// "::_2".to_string() +// ), +// ), +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("Value".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("Map".to_string()), +// modifier: StorageEntryModifier::Default, +// ty: StorageEntryType::Map { +// key: DecodeDifferent::Decoded("u8".to_string()), +// value: DecodeDifferent::Decoded("u16".to_string()), +// hasher: StorageHasher::Blake2_128Concat, +// unused: false, +// }, +// default: DecodeDifferent::Decoded(vec![4, 0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("Map2".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Map { +// key: DecodeDifferent::Decoded("u16".to_string()), +// value: DecodeDifferent::Decoded("u32".to_string()), +// hasher: StorageHasher::Twox64Concat, +// unused: false, +// }, +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("DoubleMap".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::DoubleMap { +// value: DecodeDifferent::Decoded("u32".to_string()), +// key1: DecodeDifferent::Decoded("u8".to_string()), +// key2: DecodeDifferent::Decoded("u16".to_string()), +// hasher: StorageHasher::Blake2_128Concat, +// key2_hasher: StorageHasher::Twox64Concat, +// }, +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// StorageEntryMetadata { +// name: DecodeDifferent::Decoded("DoubleMap2".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::DoubleMap { +// value: DecodeDifferent::Decoded("u64".to_string()), +// key1: DecodeDifferent::Decoded("u16".to_string()), +// key2: DecodeDifferent::Decoded("u32".to_string()), +// hasher: StorageHasher::Twox64Concat, +// key2_hasher: StorageHasher::Blake2_128Concat, +// }, +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { +// name: DecodeDifferent::Decoded("ConditionalValue".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { +// name: DecodeDifferent::Decoded("ConditionalMap".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::Map { +// key: DecodeDifferent::Decoded("u16".to_string()), +// value: DecodeDifferent::Decoded("u32".to_string()), +// hasher: StorageHasher::Twox64Concat, +// unused: false, +// }, +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { +// name: DecodeDifferent::Decoded("ConditionalDoubleMap".to_string()), +// modifier: StorageEntryModifier::Optional, +// ty: StorageEntryType::DoubleMap { +// value: DecodeDifferent::Decoded("u32".to_string()), +// key1: DecodeDifferent::Decoded("u8".to_string()), +// key2: DecodeDifferent::Decoded("u16".to_string()), +// hasher: StorageHasher::Blake2_128Concat, +// key2_hasher: StorageHasher::Twox64Concat, +// }, +// default: DecodeDifferent::Decoded(vec![0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// ]), +// })), +// calls: Some(DecodeDifferent::Decoded(vec![ +// FunctionMetadata { +// name: DecodeDifferent::Decoded("foo".to_string()), +// arguments: DecodeDifferent::Decoded(vec![ +// FunctionArgumentMetadata { +// name: DecodeDifferent::Decoded("_foo".to_string()), +// ty: DecodeDifferent::Decoded("Compact".to_string()), +// }, +// FunctionArgumentMetadata { +// name: DecodeDifferent::Decoded("_bar".to_string()), +// ty: DecodeDifferent::Decoded("u32".to_string()), +// } +// ]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Doc comment put in metadata".to_string(), +// ]), +// }, +// FunctionMetadata { +// name: DecodeDifferent::Decoded("foo_transactional".to_string()), +// arguments: DecodeDifferent::Decoded(vec![ +// FunctionArgumentMetadata { +// name: DecodeDifferent::Decoded("foo".to_string()), +// ty: DecodeDifferent::Decoded("Compact".to_string()), +// } +// ]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Doc comment put in metadata".to_string(), +// ]), +// }, +// FunctionMetadata { +// name: DecodeDifferent::Decoded("foo_no_post_info".to_string()), +// arguments: DecodeDifferent::Decoded(vec![]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// ])), +// event: Some(DecodeDifferent::Decoded(vec![ +// EventMetadata { +// name: DecodeDifferent::Decoded("Proposed".to_string()), +// arguments: DecodeDifferent::Decoded(vec!["::AccountId".to_string()]), +// documentation: DecodeDifferent::Decoded(vec![ +// " doc comment put in metadata".to_string() +// ]), +// }, +// EventMetadata { +// name: DecodeDifferent::Decoded("Spending".to_string()), +// arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), +// documentation: DecodeDifferent::Decoded(vec![ +// " doc".to_string() +// ]), +// }, +// EventMetadata { +// name: DecodeDifferent::Decoded("Something".to_string()), +// arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// EventMetadata { +// name: DecodeDifferent::Decoded("SomethingElse".to_string()), +// arguments: DecodeDifferent::Decoded(vec!["::_1".to_string()]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// ])), +// constants: DecodeDifferent::Decoded(vec![ +// ModuleConstantMetadata { +// name: DecodeDifferent::Decoded("MyGetParam".to_string()), +// ty: DecodeDifferent::Decoded("u32".to_string()), +// value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Some comment".to_string(), +// " Some comment".to_string(), +// ]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Decoded("MyGetParam2".to_string()), +// ty: DecodeDifferent::Decoded("u32".to_string()), +// value: DecodeDifferent::Decoded(vec![11, 0, 0, 0]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Some comment".to_string(), +// " Some comment".to_string(), +// ]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Decoded("MyGetParam3".to_string()), +// ty: DecodeDifferent::Decoded("::_1".to_string()), +// value: DecodeDifferent::Decoded(vec![12, 0, 0, 0, 0, 0, 0, 0]), +// documentation: DecodeDifferent::Decoded(vec![]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Decoded("some_extra".to_string()), +// ty: DecodeDifferent::Decoded("T::AccountId".to_string()), +// value: DecodeDifferent::Decoded(vec![100, 0, 0, 0, 0, 0, 0, 0]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Some doc".to_string(), +// " Some doc".to_string(), +// ]), +// }, +// ModuleConstantMetadata { +// name: DecodeDifferent::Decoded("some_extra_extra".to_string()), +// ty: DecodeDifferent::Decoded("T::AccountId".to_string()), +// value: DecodeDifferent::Decoded(vec![0, 0, 0, 0, 0, 0, 0, 0]), +// documentation: DecodeDifferent::Decoded(vec![ +// " Some doc".to_string(), +// ]), +// }, +// ]), +// errors: DecodeDifferent::Decoded(vec![ +// ErrorMetadata { +// name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), +// documentation: DecodeDifferent::Decoded(vec![ +// " doc comment put into metadata".to_string(), +// ]), +// }, +// ]), +// }; +// +// let metadata = match Runtime::metadata().1 { +// RuntimeMetadata::V12(metadata) => metadata, +// _ => panic!("metadata has been bump, test needs to be updated"), +// }; +// +// let modules_metadata = match metadata.modules { +// DecodeDifferent::Encode(modules_metadata) => modules_metadata, +// _ => unreachable!(), +// }; +// +// let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); +// +// pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); +// } +// todo: [AJ] update this test #[test] fn metadata_vnext() { use frame_metadata::*; @@ -915,7 +916,7 @@ fn metadata_vnext() { // errors: }; - let metadata = match Runtime::metadata_vnext().1 { + let metadata = match Runtime::metadata().1 { vnext::RuntimeMetadata::V12(metadata) => metadata, }; diff --git a/primitives/api/src/lib.rs b/primitives/api/src/lib.rs index 50ff09b803895..afb9af343ba6c 100644 --- a/primitives/api/src/lib.rs +++ b/primitives/api/src/lib.rs @@ -671,8 +671,5 @@ decl_runtime_apis! { pub trait Metadata { /// Returns the metadata of a runtime. fn metadata() -> OpaqueMetadata; - - /// Returns the vnext metadata of a runtime. - fn metadata_vnext() -> OpaqueMetadata; } } diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index efaa4fe21883c..a8cb294b53dc6 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -628,10 +628,6 @@ cfg_if! { fn metadata() -> OpaqueMetadata { unimplemented!() } - - fn metadata_vnext() -> OpaqueMetadata { - unimplemented!() - } } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { @@ -889,10 +885,6 @@ cfg_if! { fn metadata() -> OpaqueMetadata { unimplemented!() } - - fn metadata_vnext() -> OpaqueMetadata { - unimplemented!() - } } impl sp_transaction_pool::runtime_api::TaggedTransactionQueue for Runtime { From f1d0714fba40f2cb23a0e05f378caa6e1b19c62c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Apr 2021 11:57:14 +0100 Subject: [PATCH 123/503] Fix up legacy frame macros metadata call function metadata --- frame/support/src/dispatch.rs | 37 +++++++++------------ frame/support/src/metadata.rs | 2 +- primitives/application-crypto/src/traits.rs | 2 +- primitives/runtime/src/testing.rs | 2 +- 4 files changed, 19 insertions(+), 24 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 98f373ad1b38c..30f6301bf4f45 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2328,7 +2328,7 @@ macro_rules! __dispatch_impl_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions() -> &'static [$crate::dispatch::FunctionMetadata] { + pub fn call_functions() -> $crate::dispatch::Vec<$crate::metadata::v13::FunctionMetadata> { $crate::__call_to_functions!($($rest)*) } } @@ -2512,7 +2512,7 @@ macro_rules! __functions_to_metadata{ $origin_type:ty; $( $function_metadata:expr ),*; ) => { - &[ $( $function_metadata ),* ] + $crate::scale_info::prelude::vec![ $( $function_metadata ),* ] } } @@ -2527,31 +2527,26 @@ macro_rules! __function_to_metadata { $( $fn_doc:expr ),*; $fn_id:expr; ) => { - $crate::dispatch::FunctionMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($fn_name)), - arguments: $crate::dispatch::DecodeDifferent::Encode(&[ + $crate::metadata::v13::FunctionMetadata { + name: stringify!($fn_name), + arguments: $crate::scale_info::prelude::vec![ $( - $crate::dispatch::FunctionArgumentMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($param_name)), - ty: $crate::dispatch::DecodeDifferent::Encode( - $crate::__function_to_metadata!(@stringify_expand_attr - $(#[$codec_attr])* $param_name: $param - ) - ), + $crate::metadata::v13::FunctionArgumentMetadata { + name: stringify!($param_name), + ty: $crate::scale_info::meta_type::<$param>(), + is_compact: $crate::__function_to_metadata!(@has_compact_attr + $(#[$codec_attr])* $param_name + ) } ),* - ]), - documentation: $crate::dispatch::DecodeDifferent::Encode(&[ $( $fn_doc ),* ]), + ], + documentation: $crate::scale_info::prelude::vec![ $( $fn_doc ),* ], } }; - (@stringify_expand_attr #[compact] $param_name:ident : $param:ty) => { - concat!("Compact<", stringify!($param), ">") - }; - - (@stringify_expand_attr $param_name:ident : $param:ty) => { stringify!($param) }; - - (@stringify_expand_attr $(#[codec_attr:ident])* $param_name:ident : $param:ty) => { + (@has_compact_attr #[compact] $param_name:ident) => { true }; + (@has_compact_attr $param_name:ident) => { false }; + (@has_compact_attr $(#[codec_attr:ident])* $param_name:ident) => { compile_error!(concat!( "Invalid attribute for parameter `", stringify!($param_name), "`, the following attributes are supported: `#[compact]`" diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 52e712313bce6..37442961ed6f8 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -150,7 +150,7 @@ macro_rules! __runtime_modules_to_metadata { $runtime:ident; $( $metadata:expr ),*; ) => { - $crate::metadata::DecodeDifferent::Encode(&[ $( $metadata ),* ]) + vec![ $( $metadata ),* ] }; } diff --git a/primitives/application-crypto/src/traits.rs b/primitives/application-crypto/src/traits.rs index 8daa866af63ed..e64b976b65b68 100644 --- a/primitives/application-crypto/src/traits.rs +++ b/primitives/application-crypto/src/traits.rs @@ -122,7 +122,7 @@ pub trait RuntimeAppPublic: Sized { const CRYPTO_ID: CryptoTypeId; /// The signature that will be generated when signing with the corresponding private key. - type Signature: Codec + Debug + MaybeHash + Eq + PartialEq + Clone; + type Signature: Codec + Debug + MaybeHash + Eq + PartialEq + Clone + scale_info::TypeInfo; /// Returns all public keys for this application in the keystore. fn all() -> crate::Vec; diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index 3296f02a468a8..fe84fdbf57f94 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -142,7 +142,7 @@ impl traits::IdentifyAccount for UintAuthorityId { } /// A dummy signature type, to match `UintAuthorityId`. -#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode)] +#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode, scale_info::TypeInfo)] pub struct TestSignature(pub u64, pub Vec); impl traits::Verify for TestSignature { From 9b2b71dbfa15c155d45208627464f596d339a4e9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Apr 2021 17:08:52 +0100 Subject: [PATCH 124/503] Generate v13 metadata for storage --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- .../procedural/src/pallet/expand/storage.rs | 62 +++++++-------- frame/support/src/hash.rs | 16 ++-- frame/support/src/metadata.rs | 75 +++++++++---------- frame/support/src/storage/types/double_map.rs | 11 +-- frame/support/src/storage/types/map.rs | 7 +- frame/support/src/storage/types/mod.rs | 3 +- frame/support/src/storage/types/value.rs | 3 +- 9 files changed, 83 insertions(+), 98 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6a8b614c85c91..b66cac6657db9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1796,7 +1796,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#be8092d7bfb867026836c373d9fc3fdd255cbeb8" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-types#e06c331e33914d13601373000f1fe5a19631efeb" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index b7726d283a7ae..3bf253c449b1b 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-storage-types", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index 86fb84b339b24..5c0e2fbe3aaf3 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -17,7 +17,6 @@ use crate::pallet::Def; use crate::pallet::parse::storage::{Metadata, QueryKind}; -use frame_support_procedural_tools::clean_type_string; /// Generate the prefix_ident related the the storage. /// prefix_ident is used for the prefix struct to be given to storage as first generic param. @@ -94,54 +93,45 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let ty = match &storage.metadata { Metadata::Value { value } => { - let value = clean_type_string("e::quote!(#value).to_string()); quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Plain( - #frame_support::metadata::DecodeDifferent::Encode(#value) + #frame_support::metadata::v13::StorageEntryType::Plain( + #frame_support::scale_info::meta_type::<#value>() ) ) }, Metadata::Map { key, value } => { - let value = clean_type_string("e::quote!(#value).to_string()); - let key = clean_type_string("e::quote!(#key).to_string()); quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Map { + #frame_support::metadata::v13::StorageEntryType::Map { hasher: <#full_ident as #metadata_trait>::HASHER, - key: #frame_support::metadata::DecodeDifferent::Encode(#key), - value: #frame_support::metadata::DecodeDifferent::Encode(#value), + key: #frame_support::scale_info::meta_type::<#key>(), + value: #frame_support::scale_info::meta_type::<#value>(), unused: false, } ) }, Metadata::DoubleMap { key1, key2, value } => { - let value = clean_type_string("e::quote!(#value).to_string()); - let key1 = clean_type_string("e::quote!(#key1).to_string()); - let key2 = clean_type_string("e::quote!(#key2).to_string()); quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::DoubleMap { + #frame_support::metadata::v13::StorageEntryType::DoubleMap { hasher: <#full_ident as #metadata_trait>::HASHER1, key2_hasher: <#full_ident as #metadata_trait>::HASHER2, - key1: #frame_support::metadata::DecodeDifferent::Encode(#key1), - key2: #frame_support::metadata::DecodeDifferent::Encode(#key2), - value: #frame_support::metadata::DecodeDifferent::Encode(#value), + key1: #frame_support::scale_info::meta_type::<#key1>(), + key2: #frame_support::scale_info::meta_type::<#key2>(), + value: #frame_support::scale_info::meta_type::<#value>(), } ) } }; quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { - name: #frame_support::metadata::DecodeDifferent::Encode( - <#full_ident as #metadata_trait>::NAME - ), + #(#cfg_attrs)* #frame_support::metadata::v13::StorageEntryMetadata { + name: <#full_ident as #metadata_trait>::NAME, modifier: <#full_ident as #metadata_trait>::MODIFIER, ty: #ty, - default: #frame_support::metadata::DecodeDifferent::Encode( - <#full_ident as #metadata_trait>::DEFAULT - ), - documentation: #frame_support::metadata::DecodeDifferent::Encode(&[ + // todo: [AJ] do we need the ByteGetter stuff or is a Vec okay? + default: <#full_ident as #metadata_trait>::DEFAULT.0.default_byte(), + documentation: #frame_support::scale_info::prelude::vec![ #( #docs, )* - ]), + ], } ) }); @@ -275,18 +265,16 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #completed_where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::metadata::StorageMetadata { - #frame_support::metadata::StorageMetadata { - prefix: #frame_support::metadata::DecodeDifferent::Encode( - < - ::PalletInfo as - #frame_support::traits::PalletInfo - >::name::<#pallet_ident<#type_use_gen>>() - .expect("Every active pallet has a name in the runtime; qed") - ), - entries: #frame_support::metadata::DecodeDifferent::Encode( - &[ #( #entries, )* ] - ), + pub fn storage_metadata() -> #frame_support::metadata::v13::StorageMetadata { + #frame_support::metadata::v13::StorageMetadata { + prefix: < + ::PalletInfo as + #frame_support::traits::PalletInfo + >::name::<#pallet_ident<#type_use_gen>>() + .expect("Every active pallet has a name in the runtime; qed"), + entries: #frame_support::scale_info::prelude::vec![ + #( #entries, )* + ], } } } diff --git a/frame/support/src/hash.rs b/frame/support/src/hash.rs index 22ccbeb6ceee3..458c8dad6e1d6 100644 --- a/frame/support/src/hash.rs +++ b/frame/support/src/hash.rs @@ -56,7 +56,7 @@ impl Hashable for T { /// Hasher to use to hash keys to insert to storage. pub trait StorageHasher: 'static { - const METADATA: frame_metadata::StorageHasher; + const METADATA: frame_metadata2::v13::StorageHasher; type Output: AsRef<[u8]>; fn hash(x: &[u8]) -> Self::Output; } @@ -74,7 +74,7 @@ pub trait ReversibleStorageHasher: StorageHasher { /// Store the key directly. pub struct Identity; impl StorageHasher for Identity { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Identity; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Identity; type Output = Vec; fn hash(x: &[u8]) -> Vec { x.to_vec() @@ -89,7 +89,7 @@ impl ReversibleStorageHasher for Identity { /// Hash storage keys with `concat(twox64(key), key)` pub struct Twox64Concat; impl StorageHasher for Twox64Concat { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox64Concat; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox64Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { twox_64(x) @@ -112,7 +112,7 @@ impl ReversibleStorageHasher for Twox64Concat { /// Hash storage keys with `concat(blake2_128(key), key)` pub struct Blake2_128Concat; impl StorageHasher for Blake2_128Concat { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128Concat; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_128Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { blake2_128(x) @@ -135,7 +135,7 @@ impl ReversibleStorageHasher for Blake2_128Concat { /// Hash storage keys with blake2 128 pub struct Blake2_128; impl StorageHasher for Blake2_128 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_128; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { blake2_128(x) @@ -145,7 +145,7 @@ impl StorageHasher for Blake2_128 { /// Hash storage keys with blake2 256 pub struct Blake2_256; impl StorageHasher for Blake2_256 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Blake2_256; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { blake2_256(x) @@ -155,7 +155,7 @@ impl StorageHasher for Blake2_256 { /// Hash storage keys with twox 128 pub struct Twox128; impl StorageHasher for Twox128 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox128; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { twox_128(x) @@ -165,7 +165,7 @@ impl StorageHasher for Twox128 { /// Hash storage keys with twox 256 pub struct Twox256; impl StorageHasher for Twox256 { - const METADATA: frame_metadata::StorageHasher = frame_metadata::StorageHasher::Twox256; + const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { twox_256(x) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 37442961ed6f8..e7c697b0bf19b 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -117,11 +117,9 @@ macro_rules! __runtime_modules_to_metadata { $( $metadata, )* $crate::metadata::v13::ModuleMetadata { name: stringify!($name), index: $index, - // todo: [AJ] storage - storage: None, - // storage: $crate::__runtime_modules_to_metadata_calls_storage!( - // $mod, $module $( <$instance> )?, $runtime, $(with $kw)* - // ), + storage: $crate::__runtime_modules_to_metadata_calls_storage!( + $mod, $module $( <$instance> )?, $runtime, $(with $kw)* + ), calls: $crate::__runtime_modules_to_metadata_calls_call!( $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), @@ -219,42 +217,37 @@ macro_rules! __runtime_modules_to_metadata_calls_event { }; } -// todo: [AJ] implement storage metadata vnext -// #[macro_export] -// #[doc(hidden)] -// macro_rules! __runtime_modules_to_metadata_calls_storage { -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// with Storage -// $(with $kws:ident)* -// ) => { -// Some($crate::metadata::DecodeDifferent::Encode( -// $crate::metadata::FnEncode( -// $mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata -// ) -// )) -// }; -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// with $_:ident -// $(with $kws:ident)* -// ) => { -// $crate::__runtime_modules_to_metadata_calls_storage! { -// $mod, $module $( <$instance> )?, $runtime, $(with $kws)* -// }; -// }; -// ( -// $mod: ident, -// $module: ident $( <$instance:ident> )?, -// $runtime: ident, -// ) => { -// None -// }; -// } +#[macro_export] +#[doc(hidden)] +macro_rules! __runtime_modules_to_metadata_calls_storage { + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with Storage + $(with $kws:ident)* + ) => { + Some($mod::$module::<$runtime $(, $mod::$instance )?>::storage_metadata()) + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + with $_:ident + $(with $kws:ident)* + ) => { + $crate::__runtime_modules_to_metadata_calls_storage! { + $mod, $module $( <$instance> )?, $runtime, $(with $kws)* + }; + }; + ( + $mod: ident, + $module: ident $( <$instance:ident> )?, + $runtime: ident, + ) => { + None + }; +} // todo: [AJ] restore metadata tests // #[cfg(test)] diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index f0b5f66eff058..762513e9a7efb 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -26,7 +26,8 @@ use crate::{ }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; +use frame_metadata::DefaultByteGetter; +use frame_metadata2::v13::StorageEntryModifier; use sp_std::vec::Vec; /// A type that allow to store values for `(key1, key2)` couple. Similar to `StorageMap` but allow @@ -391,8 +392,8 @@ pub trait StorageDoubleMapMetadata { const MODIFIER: StorageEntryModifier; const NAME: &'static str; const DEFAULT: DefaultByteGetter; - const HASHER1: frame_metadata::StorageHasher; - const HASHER2: frame_metadata::StorageHasher; + const HASHER1: frame_metadata2::v13::StorageHasher; + const HASHER2: frame_metadata2::v13::StorageHasher; } impl StorageDoubleMapMetadata @@ -407,8 +408,8 @@ impl StorageDou OnEmpty: crate::traits::Get + 'static { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER1: frame_metadata::StorageHasher = Hasher1::METADATA; - const HASHER2: frame_metadata::StorageHasher = Hasher2::METADATA; + const HASHER1: frame_metadata2::v13::StorageHasher = Hasher1::METADATA; + const HASHER2: frame_metadata2::v13::StorageHasher = Hasher2::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; const DEFAULT: DefaultByteGetter = DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 4af28a77cf2b6..69160ffaca003 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -26,7 +26,8 @@ use crate::{ }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; +use frame_metadata::{DefaultByteGetter}; +use frame_metadata2::v13::StorageEntryModifier; use sp_std::prelude::*; /// A type that allow to store value for given key. Allowing to insert/remove/iterate on values. @@ -295,7 +296,7 @@ pub trait StorageMapMetadata { const MODIFIER: StorageEntryModifier; const NAME: &'static str; const DEFAULT: DefaultByteGetter; - const HASHER: frame_metadata::StorageHasher; + const HASHER: frame_metadata2::v13::StorageHasher; } impl StorageMapMetadata @@ -308,7 +309,7 @@ impl StorageMapMetadata OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER: frame_metadata::StorageHasher = Hasher::METADATA; + const HASHER: frame_metadata2::v13::StorageHasher = Hasher::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; const DEFAULT: DefaultByteGetter = DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 5bb6684b79259..6cf58a9c0e26d 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -19,7 +19,8 @@ //! StorageMap and others. use codec::FullCodec; -use frame_metadata::{DefaultByte, StorageEntryModifier}; +use frame_metadata::{DefaultByte}; +use frame_metadata2::v13::StorageEntryModifier; mod value; mod map; diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 39f718956eb64..4313714bf3981 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -25,7 +25,8 @@ use crate::{ }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; +use frame_metadata::{DefaultByteGetter}; +use frame_metadata2::v13::StorageEntryModifier; /// A type that allow to store a value. /// From d1355d461df56a59f1ca146243054f5be56f3ff0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Apr 2021 17:34:01 +0100 Subject: [PATCH 125/503] Derive TypeInfo for storage types --- Cargo.lock | 1 + frame/balances/src/lib.rs | 8 ++++---- frame/contracts/src/storage.rs | 8 ++++---- frame/contracts/src/wasm/mod.rs | 2 +- frame/election-provider-multi-phase/src/lib.rs | 6 +++--- frame/gilt/src/lib.rs | 6 +++--- frame/proxy/src/lib.rs | 4 ++-- frame/support/src/weights.rs | 2 +- frame/system/src/lib.rs | 12 ++++++------ primitives/consensus/babe/src/lib.rs | 2 +- primitives/npos-elections/Cargo.toml | 1 + primitives/npos-elections/src/lib.rs | 2 +- primitives/runtime/src/generic/digest.rs | 6 +++--- primitives/runtime/src/runtime_string.rs | 8 ++++++++ 14 files changed, 39 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b66cac6657db9..4e5cbe2a5dc64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9018,6 +9018,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 87cdf5923b136..f0445a4e40a06 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -519,7 +519,7 @@ impl, I: 'static> GenesisConfig { } /// Simplified reasons for withdrawing balance. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum Reasons { /// Paying system transaction fees. Fee = 0, @@ -551,7 +551,7 @@ impl BitOr for Reasons { /// A single lock on a balance. There can be many of these on an account and they "overlap", so the /// same balance is frozen by multiple locks. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct BalanceLock { /// An identifier for this lock. Only one lock may be in existence for each identifier. pub id: LockIdentifier, @@ -562,7 +562,7 @@ pub struct BalanceLock { } /// All balance information for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct AccountData { /// Non-reserved part of the balance. There may still be restrictions on this, but it is the /// total pool what may in principle be transferred, reserved and used for tipping. @@ -608,7 +608,7 @@ impl AccountData { // A value placed in storage that represents the current version of the Balances storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. This should match directly with the semantic versions of the Rust crate. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] enum Releases { V1_0_0, V2_0_0, diff --git a/frame/contracts/src/storage.rs b/frame/contracts/src/storage.rs index d78551f8f170e..13749b2b722eb 100644 --- a/frame/contracts/src/storage.rs +++ b/frame/contracts/src/storage.rs @@ -46,7 +46,7 @@ pub type TombstoneContractInfo = /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ContractInfo { Alive(AliveContractInfo), Tombstone(TombstoneContractInfo), @@ -82,7 +82,7 @@ impl ContractInfo { /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct RawAliveContractInfo { /// Unique ID for the subtree encoded as a bytes vector. pub trie_id: TrieId, @@ -120,7 +120,7 @@ fn child_trie_info(trie_id: &[u8]) -> ChildInfo { ChildInfo::new_default(trie_id) } -#[derive(Encode, Decode, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct RawTombstoneContractInfo(H, PhantomData); impl RawTombstoneContractInfo @@ -149,7 +149,7 @@ impl From> for ContractInfo { #[cfg_attr(test, derive(PartialEq, Eq, Debug))] pub struct ContractAbsentError; -#[derive(Encode, Decode)] +#[derive(Encode, Decode, scale_info::TypeInfo)] pub struct DeletedContract { pair_count: u32, trie_id: TrieId, diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index 3f92320b94b77..f0409338cc19e 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -51,7 +51,7 @@ pub use tests::MockExt; /// `schedule_version` and `code` when a contract with an outdated instrumention is called. /// Therefore one must be careful when holding any in-memory representation of this type while /// calling into a contract as those fields can get out of date. -#[derive(Clone, Encode, Decode)] +#[derive(Clone, Encode, Decode, scale_info::TypeInfo)] pub struct PrefabWasmModule { /// Version of the schedule with which the code was instrumented. #[codec(compact)] diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 8144b7478d6f0..e1ed67057f6f4 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -298,7 +298,7 @@ impl BenchmarkingConfig for () { } /// Current phase of the pallet. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum Phase { /// Nothing, the election is not happening. Off, @@ -396,7 +396,7 @@ impl Default for RawSolution { } /// A checked solution, ready to be enacted. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct ReadySolution { /// The final supports of the solution. /// @@ -415,7 +415,7 @@ pub struct ReadySolution { /// [`ElectionDataProvider`] and are kept around until the round is finished. /// /// These are stored together because they are often accessed together. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct RoundSnapshot { /// All of the voters. pub voters: Vec<(A, VoteWeight, Vec)>, diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index 50629d119bd02..b9b3d959b4dda 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -172,7 +172,7 @@ pub mod pallet { pub struct Pallet(_); /// A single bid on a gilt, an item of a *queue* in `Queues`. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct GiltBid { /// The amount bid. pub amount: Balance, @@ -181,7 +181,7 @@ pub mod pallet { } /// Information representing an active gilt. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct ActiveGilt { /// The proportion of the effective total issuance (i.e. accounting for any eventual gilt /// expansion or contraction that may eventually be claimed). @@ -205,7 +205,7 @@ pub mod pallet { /// `issuance - frozen + proportion * issuance` /// /// where `issuance = total_issuance - IgnoredIssuance` - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct ActiveGiltsTotal { /// The total amount of funds held in reserve for all active gilts. pub frozen: Balance, diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 5e63e0cd8d3d9..ccdddd4371d6f 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -58,7 +58,7 @@ type BalanceOf = <::Currency as Currency< { /// The account which may act on behalf of another. delegate: AccountId, @@ -70,7 +70,7 @@ pub struct ProxyDefinition { } /// Details surrounding a specific instance of an announcement to make a call. -#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct Announcement { /// The account which made the announcement. real: AccountId, diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 38d1be93dcce2..87874d4a30e52 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -730,7 +730,7 @@ impl WeightToFeePolynomial for IdentityFee where } /// A struct holding value for each `DispatchClass`. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub struct PerDispatchClass { /// Value for `Normal` extrinsics. normal: T, diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 314d3f1265852..d0cd75eb7cd63 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -202,7 +202,7 @@ pub mod pallet { + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + MaybeMallocSizeOf; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). - type Hashing: Hash; + type Hashing: Hash + scale_info::TypeInfo; /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord @@ -247,7 +247,7 @@ pub mod pallet { /// Data to be associated with an account (other than nonce/transaction counter, which this /// pallet does regardless). - type AccountData: Member + FullCodec + Clone + Default; + type AccountData: Member + FullCodec + Clone + Default + scale_info::TypeInfo; /// Handler for when a new account has just been created. type OnNewAccount: OnNewAccount; @@ -714,7 +714,7 @@ pub type Key = Vec; pub type KeyValue = (Vec, Vec); /// A phase of a block's execution. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub enum Phase { /// Applying an extrinsic. @@ -732,7 +732,7 @@ impl Default for Phase { } /// Record of an event happening. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub struct EventRecord { /// The phase of the block it happened in. @@ -784,7 +784,7 @@ type EventIndex = u32; pub type RefCount = u32; /// Information of an account. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub struct AccountInfo { /// The number of transactions this account has sent. pub nonce: Index, @@ -804,7 +804,7 @@ pub struct AccountInfo { /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade /// happened. -#[derive(sp_runtime::RuntimeDebug, Encode, Decode)] +#[derive(sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(PartialEq))] pub struct LastRuntimeUpgradeInfo { pub spec_version: codec::Compact, diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index 62e52c2f41715..1f883bf06d64a 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -250,7 +250,7 @@ impl sp_consensus::SlotData for BabeGenesisConfiguration { } /// Configuration data used by the BABE consensus engine. -#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BabeEpochConfiguration { /// A constant value that is used in the threshold calculation formula. diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 79d46743cd758..bdb6a4d707b3f 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "3.0.0", path = "./compact" } diff --git a/primitives/npos-elections/src/lib.rs b/primitives/npos-elections/src/lib.rs index 05505d06f201e..eb26268eb8c5c 100644 --- a/primitives/npos-elections/src/lib.rs +++ b/primitives/npos-elections/src/lib.rs @@ -605,7 +605,7 @@ impl StakedAssignment { /// /// This, at the current version, resembles the `Exposure` defined in the Staking pallet, yet they /// do not necessarily have to be the same. -#[derive(Default, RuntimeDebug, Encode, Decode, Clone, Eq, PartialEq)] +#[derive(Default, RuntimeDebug, Encode, Decode, Clone, Eq, PartialEq, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Support { /// Total support. diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 8594393c7cdea..ca5bc2e26433f 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -27,7 +27,7 @@ use crate::codec::{Decode, Encode, Input, Error}; use sp_core::{ChangesTrieConfiguration, RuntimeDebug}; /// Generic header digest. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] pub struct Digest { /// A list of logs in the digest. @@ -73,7 +73,7 @@ impl Digest { /// Digest item that is able to encode/decode 'system' digest items and /// provide opaque access to other items. -#[derive(PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(parity_util_mem::MallocSizeOf))] pub enum DigestItem { /// System digest item that contains the root of changes trie at given @@ -113,7 +113,7 @@ pub enum DigestItem { } /// Available changes trie signals. -#[derive(PartialEq, Eq, Clone, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Debug, parity_util_mem::MallocSizeOf))] pub enum ChangesTrieSignal { /// New changes trie configuration is enacted, starting from **next block**. diff --git a/primitives/runtime/src/runtime_string.rs b/primitives/runtime/src/runtime_string.rs index e315de430c12d..d2e961de5c5c4 100644 --- a/primitives/runtime/src/runtime_string.rs +++ b/primitives/runtime/src/runtime_string.rs @@ -32,6 +32,14 @@ pub enum RuntimeString { Owned(Vec), } +impl scale_info::TypeInfo for RuntimeString { + type Identity = str; + + fn type_info() -> scale_info::Type { + Self::Identity::type_info() + } +} + /// Convenience macro to use the format! interface to get a `RuntimeString::Owned` #[macro_export] macro_rules! format_runtime_string { From af346dac75ae8060407268f0c0b69b84dfa74c28 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 10:35:21 +0100 Subject: [PATCH 126/503] Derive TypeInfo for hashers --- primitives/runtime/src/traits.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index a228bad49f289..0b57e21fefb7d 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -406,7 +406,7 @@ pub trait Hash: 'static + MaybeSerializeDeserialize + Debug + Clone + Eq + Parti } /// Blake2-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BlakeTwo256; @@ -433,7 +433,7 @@ impl Hash for BlakeTwo256 { } /// Keccak-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Keccak256; From 46079aa5350c419d9e74f55c96ba956633c0b07b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 10:35:35 +0100 Subject: [PATCH 127/503] Update to latest frame-metadata --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index aaf2a36e59059..29f876e0e0580 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1796,7 +1796,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-types#e06c331e33914d13601373000f1fe5a19631efeb" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-types#477d5e2c8f348c9cf67790423d3a1c6af2943c9b" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From bf43566799b8b9f088a994dd84f3c30dc8d1ead1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 11:38:45 +0100 Subject: [PATCH 128/503] Fix up legacy expansion for v13 metadata --- .../procedural/src/storage/metadata.rs | 47 ++++++++----------- 1 file changed, 19 insertions(+), 28 deletions(-) diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index c321386ae1dc4..c0c37bc9454cf 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -17,31 +17,26 @@ //! Implementation of `storage_metadata` on module structure, used by construct_runtime. -use frame_support_procedural_tools::clean_type_string; use proc_macro2::TokenStream; use quote::quote; use super::{DeclStorageDefExt, StorageLineDefExt, StorageLineTypeDef}; fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> TokenStream { let value_type = &line.value_type; - let value_type = clean_type_string("e!( #value_type ).to_string()); match &line.storage_type { StorageLineTypeDef::Simple(_) => { quote!{ - #scrate::metadata::StorageEntryType::Plain( - #scrate::metadata::DecodeDifferent::Encode(#value_type), - ) + #scrate::scale_info::meta_type::<#value_type>() } }, StorageLineTypeDef::Map(map) => { let hasher = map.hasher.into_metadata(); let key = &map.key; - let key = clean_type_string("e!(#key).to_string()); quote!{ #scrate::metadata::StorageEntryType::Map { - hasher: #scrate::metadata::#hasher, - key: #scrate::metadata::DecodeDifferent::Encode(#key), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), + hasher: #scrate::metadata::v13::#hasher, + key: #scrate::scale_info::meta_type::<#key>(), + value: #scrate::scale_info::meta_type::<#value_type>(), unused: false, } } @@ -50,16 +45,14 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let hasher1 = map.hasher1.into_metadata(); let hasher2 = map.hasher2.into_metadata(); let key1 = &map.key1; - let key1 = clean_type_string("e!(#key1).to_string()); let key2 = &map.key2; - let key2 = clean_type_string("e!(#key2).to_string()); quote!{ #scrate::metadata::StorageEntryType::DoubleMap { - hasher: #scrate::metadata::#hasher1, - key1: #scrate::metadata::DecodeDifferent::Encode(#key1), - key2: #scrate::metadata::DecodeDifferent::Encode(#key2), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), - key2_hasher: #scrate::metadata::#hasher2, + hasher: #scrate::metadata::v13::#hasher1, + key1: #scrate::scale_info::meta_type::<#key1>(), + key2: #scrate::scale_info::meta_type::<#key2>(), + value: #scrate::scale_info::meta_type::<#value_type>(), + key2_hasher: #scrate::metadata::v13::#hasher2, } } }, @@ -148,9 +141,9 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre let str_name = line.name.to_string(); let modifier = if line.is_option { - quote!(#scrate::metadata::StorageEntryModifier::Optional) + quote!(#scrate::metadata::v13::StorageEntryModifier::Optional) } else { - quote!(#scrate::metadata::StorageEntryModifier::Default) + quote!(#scrate::metadata::v13::StorageEntryModifier::Default) }; let ty = storage_line_metadata_type(scrate, line); @@ -171,14 +164,12 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre } let entry = quote! { - #scrate::metadata::StorageEntryMetadata { - name: #scrate::metadata::DecodeDifferent::Encode(#str_name), + #scrate::metadata::v13::StorageEntryMetadata { + name: #str_name, modifier: #modifier, ty: #ty, - default: #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::DefaultByteGetter(&#default_byte_getter_struct_instance) - ), - documentation: #scrate::metadata::DecodeDifferent::Encode(&[ #docs ]), + default: #scrate::metadata::DefaultByteGetter(&#default_byte_getter_struct_instance).0.default_byte(), + documentation: #scrate::scale_info::prelude::vec![ #docs ], }, }; @@ -195,9 +186,9 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre }; let store_metadata = quote!( - #scrate::metadata::StorageMetadata { - prefix: #scrate::metadata::DecodeDifferent::Encode(#prefix), - entries: #scrate::metadata::DecodeDifferent::Encode(&[ #entries ][..]), + #scrate::metadata::v13::StorageMetadata { + prefix: #prefix, + entries: #scrate::scale_info::prelude::vec![ #entries ], } ); @@ -210,7 +201,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre impl#module_impl #module_struct #where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #scrate::metadata::StorageMetadata { + pub fn storage_metadata() -> #scrate::metadata::v13::StorageMetadata { #store_metadata } } From 6781d2aed39a5a3f347e225abbe02a2aa5f46eeb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 12:27:25 +0100 Subject: [PATCH 129/503] Add missing scale-info dependencies --- Cargo.lock | 1 + frame/authorship/src/lib.rs | 2 +- frame/collective/src/lib.rs | 2 +- frame/democracy/src/lib.rs | 4 ++-- frame/democracy/src/types.rs | 8 ++++---- frame/lottery/src/lib.rs | 2 +- frame/recovery/src/lib.rs | 4 ++-- frame/scheduler/src/lib.rs | 4 ++-- frame/society/src/lib.rs | 2 +- frame/support/procedural/src/storage/metadata.rs | 8 +++++--- frame/transaction-payment/src/lib.rs | 2 +- frame/treasury/src/lib.rs | 2 +- primitives/arithmetic/src/fixed_point.rs | 2 +- primitives/core/src/crypto.rs | 2 +- primitives/staking/Cargo.toml | 1 + primitives/staking/src/offence.rs | 2 +- 16 files changed, 26 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29f876e0e0580..f714d25ee9b91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9220,6 +9220,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] diff --git a/frame/authorship/src/lib.rs b/frame/authorship/src/lib.rs index 5e9955f59f9df..94638ec2a2244 100644 --- a/frame/authorship/src/lib.rs +++ b/frame/authorship/src/lib.rs @@ -144,7 +144,7 @@ where } } -#[derive(Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(any(feature = "std", test), derive(PartialEq))] enum UncleEntryItem { InclusionHeight(BlockNumber), diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index 5c33bff3006b7..5c3fa5ff04653 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -177,7 +177,7 @@ impl GetBacking for RawOrigin { /// Origin for the collective module. pub type Origin = RawOrigin<::AccountId, I>; -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] /// Info for keeping track of a motion being voted on. pub struct Votes { /// The proposal's unique index. diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index b3b37b0b34b68..3b98568dd7e84 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -305,7 +305,7 @@ pub trait Config: frame_system::Config + Sized { type MaxProposals: Get; } -#[derive(Clone, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum PreimageStatus { /// The preimage is imminently needed at the argument. Missing(BlockNumber), @@ -332,7 +332,7 @@ impl PreimageStatus { /// The number of aye votes, expressed in terms of post-conviction lock-vote. pub (crate) ayes: Balance, @@ -34,7 +34,7 @@ pub struct Tally { } /// Amount of votes and capital placed in delegation for an account. -#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct Delegations { /// The number of votes (this is post-conviction). pub (crate) votes: Balance, @@ -161,7 +161,7 @@ impl< } /// Info regarding an ongoing referendum. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct ReferendumStatus { /// When voting on this referendum will end. pub (crate) end: BlockNumber, @@ -176,7 +176,7 @@ pub struct ReferendumStatus { } /// Info regarding a referendum, present or past. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum ReferendumInfo { /// Referendum is happening, the arg is the block number at which it will end. Ongoing(ReferendumStatus), diff --git a/frame/lottery/src/lib.rs b/frame/lottery/src/lib.rs index a37238a2d9f85..f04b85b209f3d 100644 --- a/frame/lottery/src/lib.rs +++ b/frame/lottery/src/lib.rs @@ -116,7 +116,7 @@ pub trait Config: frame_system::Config { // We use this to uniquely match someone's incoming call with the calls configured for the lottery. type CallIndex = (u8, u8); -#[derive(Encode, Decode, Default, Eq, PartialEq, RuntimeDebug)] +#[derive(Encode, Decode, Default, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] pub struct LotteryConfig { /// Price per entry. price: Balance, diff --git a/frame/recovery/src/lib.rs b/frame/recovery/src/lib.rs index ceb2f5a688742..3615c3bf645af 100644 --- a/frame/recovery/src/lib.rs +++ b/frame/recovery/src/lib.rs @@ -210,7 +210,7 @@ pub trait Config: frame_system::Config { } /// An active recovery process. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct ActiveRecovery { /// The block number when the recovery process started. created: BlockNumber, @@ -222,7 +222,7 @@ pub struct ActiveRecovery { } /// Configuration for recovering an account. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct RecoveryConfig { /// The minimum number of blocks since the start of the recovery process before the account /// can be recovered. diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 5332aedf7f136..6f43e71ee9d3d 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -116,7 +116,7 @@ struct ScheduledV1 { /// Information regarding an item to be executed in the future. #[cfg_attr(any(feature = "std", test), derive(PartialEq, Eq))] -#[derive(Clone, RuntimeDebug, Encode, Decode)] +#[derive(Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub struct ScheduledV2 { /// The unique identity for this task, if there is one. maybe_id: Option>, @@ -137,7 +137,7 @@ pub type Scheduled = ScheduledV2: system::Config { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum Vote { /// The member has been chosen to be skeptic and has not yet taken any action. Skeptic, diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index c0c37bc9454cf..7df29b051bc23 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -26,14 +26,16 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> match &line.storage_type { StorageLineTypeDef::Simple(_) => { quote!{ - #scrate::scale_info::meta_type::<#value_type>() + #scrate::metadata::v13::StorageEntryType::Plain( + #scrate::scale_info::meta_type::<#value_type>() + ) } }, StorageLineTypeDef::Map(map) => { let hasher = map.hasher.into_metadata(); let key = &map.key; quote!{ - #scrate::metadata::StorageEntryType::Map { + #scrate::metadata::v13::StorageEntryType::Map { hasher: #scrate::metadata::v13::#hasher, key: #scrate::scale_info::meta_type::<#key>(), value: #scrate::scale_info::meta_type::<#value_type>(), @@ -47,7 +49,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let key1 = &map.key1; let key2 = &map.key2; quote!{ - #scrate::metadata::StorageEntryType::DoubleMap { + #scrate::metadata::v13::StorageEntryType::DoubleMap { hasher: #scrate::metadata::v13::#hasher1, key1: #scrate::scale_info::meta_type::<#key1>(), key2: #scrate::scale_info::meta_type::<#key2>(), diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index d4d201d6813eb..0917c3c59f566 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -216,7 +216,7 @@ impl Convert for TargetedFeeAdjustment { /// The account proposing it. proposer: AccountId, diff --git a/primitives/arithmetic/src/fixed_point.rs b/primitives/arithmetic/src/fixed_point.rs index 3dd8b9a1f7ad0..3a1ce9282ac15 100644 --- a/primitives/arithmetic/src/fixed_point.rs +++ b/primitives/arithmetic/src/fixed_point.rs @@ -339,7 +339,7 @@ macro_rules! implement_fixed { /// A fixed point number representation in the range. /// #[doc = $title] - #[derive(Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] + #[derive(Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, scale_info::TypeInfo)] pub struct $name($inner_type); impl From<$inner_type> for $name { diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index 332c976bf41d9..ad51c58f86df3 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -1097,7 +1097,7 @@ pub trait CryptoType { /// public modules. #[derive( Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Encode, Decode, PassByInner, - crate::RuntimeDebug + crate::RuntimeDebug, scale_info::TypeInfo )] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub struct KeyTypeId(pub [u8; 4]); diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index cf2347082a885..b01de2588670d 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/staking/src/offence.rs b/primitives/staking/src/offence.rs index 0212d1bd8f2f7..1531727d1097a 100644 --- a/primitives/staking/src/offence.rs +++ b/primitives/staking/src/offence.rs @@ -181,7 +181,7 @@ impl OnOffenceHandler } /// A details about an offending authority for a particular kind of offence. -#[derive(Clone, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] pub struct OffenceDetails { /// The offending authority id pub offender: Offender, From 91c5ea4451ce13197b267534bf10168139504f98 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 12:39:47 +0100 Subject: [PATCH 130/503] More TypeInfo derives --- frame/bounties/src/lib.rs | 4 ++-- frame/grandpa/src/lib.rs | 6 +++--- frame/merkle-mountain-range/src/lib.rs | 2 +- frame/scheduler/src/lib.rs | 3 ++- frame/staking/src/slashing.rs | 6 +++--- frame/tips/src/lib.rs | 2 +- 6 files changed, 12 insertions(+), 11 deletions(-) diff --git a/frame/bounties/src/lib.rs b/frame/bounties/src/lib.rs index 419713ab5eff5..c15ed88581dd7 100644 --- a/frame/bounties/src/lib.rs +++ b/frame/bounties/src/lib.rs @@ -137,7 +137,7 @@ pub trait Config: frame_system::Config + pallet_treasury::Config { pub type BountyIndex = u32; /// A bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub struct Bounty { /// The account proposing it. proposer: AccountId, @@ -154,7 +154,7 @@ pub struct Bounty { } /// The status of a bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] pub enum BountyStatus { /// The bounty is proposed and waiting for approval. Proposed, diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index eb3dc4f110acb..763d4738b1908 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -110,7 +110,7 @@ pub trait WeightInfo { /// A stored pending change, old format. // TODO: remove shim // https://github.com/paritytech/substrate/issues/1614 -#[derive(Encode, Decode)] +#[derive(Encode, Decode, scale_info::TypeInfo)] pub struct OldStoredPendingChange { /// The block number this was scheduled at. pub scheduled_at: N, @@ -121,7 +121,7 @@ pub struct OldStoredPendingChange { } /// A stored pending change. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] // todo: [AJ] manual TypeInfo impl to express conversion somehow? pub struct StoredPendingChange { /// The block number this was scheduled at. pub scheduled_at: N, @@ -151,7 +151,7 @@ impl Decode for StoredPendingChange { /// Current state of the GRANDPA authority set. State transitions must happen in /// the same order of states defined below, e.g. `Paused` implies a prior /// `PendingPause`. -#[derive(Decode, Encode)] +#[derive(Decode, Encode, scale_info::TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub enum StoredState { /// The current authority set is live, and GRANDPA is enabled. diff --git a/frame/merkle-mountain-range/src/lib.rs b/frame/merkle-mountain-range/src/lib.rs index 6992341f6bbd1..fedf9aca3cbbd 100644 --- a/frame/merkle-mountain-range/src/lib.rs +++ b/frame/merkle-mountain-range/src/lib.rs @@ -109,7 +109,7 @@ pub trait Config: frame_system::Config { /// Required to be provided again, to satisfy trait bounds for storage items. type Hash: traits::Member + traits::MaybeSerializeDeserialize + sp_std::fmt::Debug + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + codec::Codec - + codec::EncodeLike; + + codec::EncodeLike + scale_info::TypeInfo; /// Data stored in the leaf nodes. /// diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 6f43e71ee9d3d..3207716f9f927 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -80,7 +80,8 @@ pub trait Config: system::Config { + From + IsType<::Origin>; /// The caller origin, overarching type of all pallets origins. - type PalletsOrigin: From> + Codec + Clone + Eq; + type PalletsOrigin: From> + Codec + Clone + Eq + + scale_info::TypeInfo; /// The aggregated call type. type Call: Parameter + Dispatchable::Origin> + GetDispatchInfo + From>; diff --git a/frame/staking/src/slashing.rs b/frame/staking/src/slashing.rs index 2b2ac61356c47..836626bb41c69 100644 --- a/frame/staking/src/slashing.rs +++ b/frame/staking/src/slashing.rs @@ -69,7 +69,7 @@ const REWARD_F1: Perbill = Perbill::from_percent(50); pub type SpanIndex = u32; // A range of start..end eras for a slashing span. -#[derive(Encode, Decode)] +#[derive(Encode, Decode, scale_info::TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub(crate) struct SlashingSpan { pub(crate) index: SpanIndex, @@ -84,7 +84,7 @@ impl SlashingSpan { } /// An encoding of all of a nominator's slashing spans. -#[derive(Encode, Decode, RuntimeDebug)] +#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct SlashingSpans { // the index of the current slashing span of the nominator. different for // every stash, resets when the account hits free balance 0. @@ -174,7 +174,7 @@ impl SlashingSpans { } /// A slashing-span record for a particular stash. -#[derive(Encode, Decode, Default)] +#[derive(Encode, Decode, Default, scale_info::TypeInfo)] pub(crate) struct SpanRecord { slashed: Balance, paid_out: Balance, diff --git a/frame/tips/src/lib.rs b/frame/tips/src/lib.rs index b31468797ce41..b108f664a00d0 100644 --- a/frame/tips/src/lib.rs +++ b/frame/tips/src/lib.rs @@ -106,7 +106,7 @@ pub trait Config: frame_system::Config + pallet_treasury::Config { /// An open tipping "motion". Retains all details of a tip including information on the finder /// and the members who have voted. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub struct OpenTip< AccountId: Parameter, Balance: Parameter, From e2839ec82d8d42f94f81d10dc2fef11688a4ad6b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 15:43:53 +0100 Subject: [PATCH 131/503] Use branch to test removing compact type bounds --- Cargo.lock | 170 ++++++++++-------- Cargo.toml | 2 +- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/election-provider-support/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/metadata/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- 72 files changed, 167 insertions(+), 145 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f714d25ee9b91..42891f8c9f185 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1659,7 +1659,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info)", ] [[package]] @@ -1726,7 +1726,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.5", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-io", @@ -1765,7 +1765,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1783,7 +1783,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -1800,7 +1800,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-typ dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", ] [[package]] @@ -1808,7 +1808,7 @@ name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-std", @@ -1830,7 +1830,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "pretty_assertions", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1885,7 +1885,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions", "rustversion", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-inherents", @@ -1905,7 +1905,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-externalities", @@ -4244,7 +4244,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4351,7 +4351,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-authority-discovery", @@ -4431,7 +4431,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-block-builder", @@ -4646,7 +4646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4662,7 +4662,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4681,7 +4681,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4699,7 +4699,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4718,7 +4718,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-authorship", "sp-core", @@ -4745,7 +4745,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4768,7 +4768,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4786,7 +4786,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4806,7 +4806,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4836,7 +4836,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4853,7 +4853,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-runtime", "sp-std", ] @@ -4892,7 +4892,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-api", "sp-runtime", "sp-std", @@ -4909,7 +4909,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4934,7 +4934,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-arithmetic", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4975,7 +4975,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -4995,7 +4995,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5012,7 +5012,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5028,7 +5028,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5046,7 +5046,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-arithmetic", "sp-core", @@ -5073,7 +5073,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-core", @@ -5096,7 +5096,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5115,7 +5115,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-core", @@ -5134,7 +5134,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5153,7 +5153,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5168,7 +5168,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5188,7 +5188,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5239,7 +5239,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5255,7 +5255,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5271,7 +5271,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5288,7 +5288,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5349,7 +5349,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5366,7 +5366,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5383,7 +5383,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5400,7 +5400,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5418,7 +5418,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-core", @@ -5463,7 +5463,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5490,7 +5490,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-application-crypto", "sp-core", @@ -5530,7 +5530,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5546,7 +5546,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5563,7 +5563,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-inherents", @@ -5583,7 +5583,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5600,7 +5600,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "serde_json", "smallvec 1.6.1", @@ -5647,7 +5647,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5665,7 +5665,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -5684,7 +5684,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -6234,7 +6234,7 @@ dependencies = [ "impl-codec", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] @@ -8171,6 +8171,17 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds#53d0d64b26a23ed99700646d98e31e3d7780cc02" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", +] + [[package]] name = "scale-info" version = "0.6.0" @@ -8179,7 +8190,18 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive", + "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info)", +] + +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds#53d0d64b26a23ed99700646d98e31e3d7780cc02" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -8647,7 +8669,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-core", "sp-io", @@ -8676,7 +8698,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "serde_json", "sp-debug-derive", @@ -8700,7 +8722,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8785,7 +8807,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8802,7 +8824,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-application-crypto", @@ -8833,7 +8855,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-arithmetic", "sp-runtime", ] @@ -8878,7 +8900,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "schnorrkel", "secrecy", "serde", @@ -8933,7 +8955,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-application-crypto", @@ -9019,7 +9041,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-arithmetic", "sp-core", @@ -9095,7 +9117,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "serde_json", "sp-api", @@ -9207,7 +9229,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-api", "sp-core", "sp-runtime", @@ -9220,7 +9242,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "sp-runtime", "sp-std", ] @@ -9641,7 +9663,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", "serde", "sp-api", "sp-application-crypto", diff --git a/Cargo.toml b/Cargo.toml index d5089f5f4e25a..39719516106d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -273,5 +273,5 @@ panic = "unwind" [patch.crates-io] parity-util-mem = { git = "https://github.com/paritytech/parity-common" } primitive-types = { git = "https://github.com/paritytech/parity-common" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master" } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds" } finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index cf78129c36046..61d580dcab679 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "3.0.0", path = "../../../../frame/support" } frame-system = { default-features = false, version = "3.0.0", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "3.1.0", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 672c7759b042c..d8664e0e51419 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } pallet-aura = { version = "3.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "3.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "3.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 5981c44f8042e..6d8fec9024d38 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-system = { version = "3.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 7f2f0f6f8906e..2c2c71cae7249 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 2c0d49ff0dd8c..2a333f5de86c8 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 25314c40805db..01700bbd37b90 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 5320090cb8f8a..7b6ef22d760b3 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", default-features = false, path = "../session" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index dc1d0a8694fc9..7587aab79e064 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "3.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", features = ["historical" ], path = "../session", default-features = false } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 67dac4d845f03..0233e2a934797 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-authorship = { version = "3.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 3adb29fd9bfce..7a7326a50ad80 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index fb22d04232e13..32eb3f0ee7406 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 80087eefa91b5..3b85391ab0636 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.0", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "3.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "3.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 356c285657964..85ff03d784af3 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index cf1c0a165b2d7..a8c6dd9d2e80e 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 43d90f1484955..74bcbe5d8e1ab 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 1d745f1f90e03..f853bbb031c41 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] # This crate should not rely on any of the frame primitives. bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index df5ef91e5a6a0..9cb33f2e10727 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "3.0.0", default-features = false, path = "../../../../primitives/api" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-contracts-primitives = { version = "3.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 98e8ffb67eb06..7b7f120674e10 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index efd504997c40a..9cbd2e9970750 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index bd343d073b36f..3e94e0b7a058f 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 6be8f7fbe7b75..c30910e4fcc3a 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 0675f919e4d7a..432877ef6643d 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 6791c4b2f81f3..b0fc75b576f41 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index fe7da1805efae..350d3894e26cb 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 5cedfc62564ee..2a7512441854e 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index deb9ce637d877..bcdfc08df7a0b 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index d8b308d76ff61..84bb57de547e0 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index b15a42da20de1..dae1297a0c29e 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 91aa0a59b0365..719d889d3947e 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 2528e9f8e0850..04f05fc45909a 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "3.0.0", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index b7fa9eee82eb1..d7c4d7ea84d1e 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index ef1746a8d958d..676b5aac1c3dc 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 3bcff87279ee9..ee15dd984df61 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 5d0615c46e0eb..dfba3af948819 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index fdf9c01103185..8cd8d800c466e 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 5d142bd31ecf1..ba98138de59fa 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 0aae8fac3d6d4..0528c1e8e4b7e 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 817c8c03d1ac1..215fcaf100de8 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index dc8b9d62cca9f..6c22575eee5ff 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 5d1278651ee89..c73e7d0ffedff 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index acb2af94ded4b..60d7e7252b673 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 46a5367c8a175..a43bcf4de4504 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 3112be3126e77..cc573a68a623f 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -12,7 +12,7 @@ readme = "README.md" [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 86b80d1909402..7637277289b19 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index e378138ed794d..113c57164b9ee 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 33f3542694e37..16bcba753f8ed 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 3d5c3a11f12ef..957b0ab373450 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io ={ version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 00d2f48d0fb71..8b5c7815119d2 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 3bf253c449b1b..3fa5872d33dbb 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-storage-types", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index e827ac4b3ec2d..5118069584bb1 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "3.0.0", default-features = false, path = "../" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 5a31816d13281..de2d2c91bd278 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 688ca856f54a6..0db616d160258 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index b0b5dced9a0b2..53aa0f3ce06f0 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 97f82cafb4f0e..c22c36586cdfd 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 3f39da86b290c..7d6e14815ef44 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index f91804bdfc7cd..ccd2740361139 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index 6cdece3e8036c..6e341fb2fc2b3 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index cdb0f6e4d79d1..52f9281e577e5 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "3.0.0", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-io = { version = "3.0.0", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index b4b64d36e931a..a6d1621fb5df1 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 9596a00b6ff62..79a099d8db453 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index d326b6e6dd5e1..38a8c1ab58a4e 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 4a6d8bf1179b5..63fa7795281ef 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 263572721839c..8774ff1c40e8c 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 1375bfd1abb48..92deb6e50791a 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "3.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 28c52bbe75f1e..f41c0d12012f9 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.0", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index bdb6a4d707b3f..7cec778d4b741 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "3.0.0", path = "./compact" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 2c8ab4e7a4758..19931f8b342f0 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index ff782c925e4a0..9218e44d243a7 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index b01de2588670d..0a1544028e4e5 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 77580e154c85f..bcd9b0c0b456c 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.9.0", default-features = false, path = "../.. sp-consensus-babe = { version = "0.9.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "3.0.0", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "master", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.26.0", default-features = false } From ddb6114a64be0b7d5b1cd48cdc321490c97fd9da Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 15:49:38 +0100 Subject: [PATCH 132/503] Add Balance TypeInfo bound --- frame/transaction-payment/src/payment.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/transaction-payment/src/payment.rs b/frame/transaction-payment/src/payment.rs index 7292ef4dfee7e..f1db7bd4e1062 100644 --- a/frame/transaction-payment/src/payment.rs +++ b/frame/transaction-payment/src/payment.rs @@ -17,7 +17,8 @@ type NegativeImbalanceOf = /// Handle withdrawing, refunding and depositing of transaction fees. pub trait OnChargeTransaction { /// The underlying integer type in which fees are calculated. - type Balance: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default; + type Balance: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default + + scale_info::TypeInfo; type LiquidityInfo: Default; /// Before the transaction is executed the payment of the transaction fees From 7da3bb82351b2f45f12a06b5be64c87f256a9cc8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 15:52:52 +0100 Subject: [PATCH 133/503] Remove some HasCompact::Type bounds to test scale-info change --- frame/indices/src/lib.rs | 5 +---- frame/system/src/extensions/check_nonce.rs | 3 +-- frame/transaction-payment/src/lib.rs | 1 - 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 1c838e483087a..19697f2d941bb 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -335,10 +335,7 @@ impl Pallet { } } -impl StaticLookup for Pallet -where - <::AccountIndex as codec::HasCompact>::Type: scale_info::TypeInfo, -{ +impl StaticLookup for Pallet { type Source = MultiAddress; type Target = T::AccountId; diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 88bd64d166f1b..c0065cebb27a0 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -55,8 +55,7 @@ impl sp_std::fmt::Debug for CheckNonce { } impl SignedExtension for CheckNonce where - T::Call: Dispatchable, - <::Index as codec::HasCompact>::Type: TypeInfo, // todo: [AJ] this is a result of the derived compact TypeInfo impl, can we get rid of it? + T::Call: Dispatchable { type AccountId = T::AccountId; type Call = T::Call; diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 0917c3c59f566..80bb9e179bd97 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -539,7 +539,6 @@ impl sp_std::fmt::Debug for ChargeTransactionPayment { impl SignedExtension for ChargeTransactionPayment where BalanceOf: Send + Sync + From + FixedPointOperand + TypeInfo, T::Call: Dispatchable, - <<::OnChargeTransaction as payment::OnChargeTransaction>::Balance as codec::HasCompact>::Type: TypeInfo, // todo: [AJ] this is a result of the derived compact TypeInfo impl, can we get rid of it? { const IDENTIFIER: &'static str = "ChargeTransactionPayment"; type AccountId = T::AccountId; From a39f3ac5d17bad6d09deaa2aa4c012948add94cf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Apr 2021 17:01:46 +0100 Subject: [PATCH 134/503] Remaining TypeInfo bounds? --- Cargo.lock | 172 ++++++++++++++++-------------------- frame/collective/src/lib.rs | 2 +- frame/support/src/origin.rs | 2 +- frame/system/src/lib.rs | 2 +- 4 files changed, 78 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 42891f8c9f185..357a6077799fc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1649,7 +1649,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#3d7babf1e43ab630189c2488f3e99c075f0944f4" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#d30be8b4b31be144e2d0199b9c9ac1a0db6e05c9" dependencies = [ "either", "futures 0.3.13", @@ -1659,7 +1659,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info)", + "scale-info", ] [[package]] @@ -1726,7 +1726,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.5", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-io", @@ -1765,7 +1765,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1783,7 +1783,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -1800,7 +1800,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-typ dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", ] [[package]] @@ -1808,7 +1808,7 @@ name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-std", @@ -1830,7 +1830,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "pretty_assertions", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1885,7 +1885,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions", "rustversion", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -1905,7 +1905,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -4244,7 +4244,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4351,7 +4351,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4431,7 +4431,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4646,7 +4646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4662,7 +4662,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4681,7 +4681,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4699,7 +4699,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4718,7 +4718,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4745,7 +4745,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4768,7 +4768,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4786,7 +4786,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4806,7 +4806,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4836,7 +4836,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4853,7 +4853,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-runtime", "sp-std", ] @@ -4892,7 +4892,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4909,7 +4909,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4934,7 +4934,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4975,7 +4975,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4995,7 +4995,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5012,7 +5012,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5028,7 +5028,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5046,7 +5046,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5073,7 +5073,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5096,7 +5096,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5115,7 +5115,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5134,7 +5134,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5153,7 +5153,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5168,7 +5168,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5188,7 +5188,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5239,7 +5239,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5255,7 +5255,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5271,7 +5271,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5288,7 +5288,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5349,7 +5349,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5366,7 +5366,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5383,7 +5383,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5400,7 +5400,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5418,7 +5418,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5463,7 +5463,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5490,7 +5490,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand_chacha 0.2.2", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5530,7 +5530,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5546,7 +5546,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5563,7 +5563,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5583,7 +5583,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5600,7 +5600,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5647,7 +5647,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5665,7 +5665,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5684,7 +5684,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -6234,7 +6234,7 @@ dependencies = [ "impl-codec", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] @@ -8179,18 +8179,7 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", -] - -[[package]] -name = "scale-info" -version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=master#a02b1516a8657c850f1c97e73f7402eb1304af62" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info)", + "scale-info-derive", ] [[package]] @@ -8204,17 +8193,6 @@ dependencies = [ "syn", ] -[[package]] -name = "scale-info-derive" -version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=master#a02b1516a8657c850f1c97e73f7402eb1304af62" -dependencies = [ - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "schannel" version = "0.1.19" @@ -8669,7 +8647,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-core", "sp-io", @@ -8698,7 +8676,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8722,7 +8700,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8807,7 +8785,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8824,7 +8802,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8855,7 +8833,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8900,7 +8878,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8955,7 +8933,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9041,7 +9019,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9117,7 +9095,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "serde_json", "sp-api", @@ -9229,7 +9207,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9242,7 +9220,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "sp-runtime", "sp-std", ] @@ -9663,7 +9641,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index 5c3fa5ff04653..2d80cd4f74888 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -155,7 +155,7 @@ pub trait Config: frame_system::Config { } /// Origin for the collective module. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { /// It has been condoned by a given number of members of the collective from a given total. Members(MemberCount, MemberCount), diff --git a/frame/support/src/origin.rs b/frame/support/src/origin.rs index 6dd38eb1b2ab4..a5a528288c788 100644 --- a/frame/support/src/origin.rs +++ b/frame/support/src/origin.rs @@ -271,7 +271,7 @@ macro_rules! impl_outer_origin { } $crate::paste::item! { - #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode)] + #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] $(#[$attr])* #[allow(non_camel_case_types)] pub enum $caller_name { diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 50d78748010af..da8e1614ca63a 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -744,7 +744,7 @@ pub struct EventRecord { } /// Origin for the System pallet. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { /// The system itself ordained this dispatch to happen: this is the highest privilege level. Root, From 925c94ecc5ed303f8626deb8f20f8090a7eb6525 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Apr 2021 10:11:40 +0100 Subject: [PATCH 135/503] Use main branch of frame-metadata --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 357a6077799fc..dd22c6b2dadaa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1796,7 +1796,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-storage-types#477d5e2c8f348c9cf67790423d3a1c6af2943c9b" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#86bf2b374c9aa7fee82ad19df8b5569b0d1b8c58" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 3fa5872d33dbb..cb60559262420 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-storage-types", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } From bf0ae4acfd394e66222eaa76b7307094d783db88 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Apr 2021 12:17:50 +0100 Subject: [PATCH 136/503] Use scale-info aj-substrate branch --- Cargo.lock | 171 ++++++++++-------- Cargo.toml | 2 +- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/election-provider-support/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/metadata/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- .../procedural/src/pallet/expand/constants.rs | 22 +-- frame/support/src/dispatch.rs | 6 +- frame/support/src/metadata.rs | 8 +- frame/support/src/weights.rs | 2 +- frame/support/test/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/src/limits.rs | 4 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- primitives/version/Cargo.toml | 1 + primitives/version/src/lib.rs | 2 +- test-utils/runtime/Cargo.toml | 2 +- 79 files changed, 186 insertions(+), 172 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dd22c6b2dadaa..29d4949384d85 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1659,7 +1659,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", ] [[package]] @@ -1726,7 +1726,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.5", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-io", @@ -1765,7 +1765,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1783,7 +1783,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -1800,7 +1800,7 @@ source = "git+https://github.com/paritytech/frame-metadata?branch=main#86bf2b374 dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", ] [[package]] @@ -1808,7 +1808,7 @@ name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-std", @@ -1830,7 +1830,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "pretty_assertions", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1885,7 +1885,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions", "rustversion", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-inherents", @@ -1905,7 +1905,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-externalities", @@ -4244,7 +4244,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4351,7 +4351,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-authority-discovery", @@ -4431,7 +4431,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-block-builder", @@ -4646,7 +4646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4662,7 +4662,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4681,7 +4681,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4699,7 +4699,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4718,7 +4718,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-authorship", "sp-core", @@ -4745,7 +4745,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4768,7 +4768,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4786,7 +4786,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4806,7 +4806,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4836,7 +4836,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4853,7 +4853,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-runtime", "sp-std", ] @@ -4892,7 +4892,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-api", "sp-runtime", "sp-std", @@ -4909,7 +4909,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4934,7 +4934,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-arithmetic", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4975,7 +4975,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -4995,7 +4995,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5012,7 +5012,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5028,7 +5028,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5046,7 +5046,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-arithmetic", "sp-core", @@ -5073,7 +5073,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-core", @@ -5096,7 +5096,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5115,7 +5115,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-core", @@ -5134,7 +5134,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5153,7 +5153,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5168,7 +5168,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5188,7 +5188,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5239,7 +5239,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5255,7 +5255,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5271,7 +5271,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5288,7 +5288,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5349,7 +5349,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5366,7 +5366,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5383,7 +5383,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5400,7 +5400,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5418,7 +5418,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-core", @@ -5463,7 +5463,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5490,7 +5490,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-application-crypto", "sp-core", @@ -5530,7 +5530,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5546,7 +5546,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5563,7 +5563,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-inherents", @@ -5583,7 +5583,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5600,7 +5600,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "serde_json", "smallvec 1.6.1", @@ -5647,7 +5647,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5665,7 +5665,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -5684,7 +5684,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -6234,7 +6234,7 @@ dependencies = [ "impl-codec", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] @@ -8179,7 +8179,18 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive", + "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", +] + +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#7809ad7d09fb6af3164507c8d6136f25badce43a" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", ] [[package]] @@ -8193,6 +8204,17 @@ dependencies = [ "syn", ] +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#7809ad7d09fb6af3164507c8d6136f25badce43a" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8647,7 +8669,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-core", "sp-io", @@ -8676,7 +8698,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "serde_json", "sp-debug-derive", @@ -8700,7 +8722,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8785,7 +8807,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8802,7 +8824,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-application-crypto", @@ -8833,7 +8855,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-arithmetic", "sp-runtime", ] @@ -8878,7 +8900,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "schnorrkel", "secrecy", "serde", @@ -8933,7 +8955,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-application-crypto", @@ -9019,7 +9041,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-arithmetic", "sp-core", @@ -9095,7 +9117,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "rand 0.7.3", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "serde_json", "sp-api", @@ -9207,7 +9229,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-api", "sp-core", "sp-runtime", @@ -9220,7 +9242,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "sp-runtime", "sp-std", ] @@ -9364,6 +9386,7 @@ version = "3.0.0" dependencies = [ "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-runtime", "sp-std", @@ -9641,7 +9664,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", + "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", "serde", "sp-api", "sp-application-crypto", diff --git a/Cargo.toml b/Cargo.toml index 39719516106d5..e536ace8e6214 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -273,5 +273,5 @@ panic = "unwind" [patch.crates-io] parity-util-mem = { git = "https://github.com/paritytech/parity-common" } primitive-types = { git = "https://github.com/paritytech/parity-common" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds" } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate" } finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index 61d580dcab679..b28cf1cbf3242 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "3.0.0", path = "../../../../frame/support" } frame-system = { default-features = false, version = "3.0.0", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "3.1.0", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index d8664e0e51419..ebf6fa805bdc0 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } pallet-aura = { version = "3.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "3.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "3.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 6d8fec9024d38..00dfc861bea2c 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-system = { version = "3.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 2c2c71cae7249..ad1097ef76971 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.102", optional = true } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 2a333f5de86c8..bd60e15b70908 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 01700bbd37b90..81c4ce7918651 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 7b6ef22d760b3..cf9353531d86e 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", default-features = false, path = "../session" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 7587aab79e064..7aa079e85e119 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "3.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } pallet-session = { version = "3.0.0", features = ["historical" ], path = "../session", default-features = false } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 0233e2a934797..43189054f1c2f 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-authorship = { version = "3.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 7a7326a50ad80..20555b55080a7 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 32eb3f0ee7406..c25067e42e4cf 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 3b85391ab0636..9a84ed581a2a0 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.0", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "3.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "3.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 85ff03d784af3..ff89636241d7a 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index a8c6dd9d2e80e..b216ccdf539cd 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 74bcbe5d8e1ab..a83883e33172c 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index f853bbb031c41..3017c42be6112 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] # This crate should not rely on any of the frame primitives. bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index 9cb33f2e10727..473c93616cec8 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-api = { version = "3.0.0", default-features = false, path = "../../../../primitives/api" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../../primitives/runtime" } pallet-contracts-primitives = { version = "3.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 7b7f120674e10..b3461f9b640d0 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 9cbd2e9970750..1b6da38985f8b 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index 3e94e0b7a058f..792d00604ed2a 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index c30910e4fcc3a..fcb3803914f60 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 432877ef6643d..59609f7ace81a 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index b0fc75b576f41..5a0ddcdc6b443 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 350d3894e26cb..0dab42f2514a2 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 2a7512441854e..7d9c64c5016e4 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index bcdfc08df7a0b..8447830e0b815 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } serde = { version = "1.0.101", optional = true } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 84bb57de547e0..78f4b942115c4 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index dae1297a0c29e..6feb118c03931 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 719d889d3947e..3506f48ec9f5d 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 04f05fc45909a..821a8e5589ad3 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "3.0.0", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index d7c4d7ea84d1e..58160ccdef981 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 676b5aac1c3dc..7a31611037dcd 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index ee15dd984df61..eddb9b2354071 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index dfba3af948819..5f3b9297803fd 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml index 8cd8d800c466e..9232057e58874 100644 --- a/frame/metadata/Cargo.toml +++ b/frame/metadata/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index ba98138de59fa..4799036b2e0ca 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 0528c1e8e4b7e..63c8ab32c43af 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 215fcaf100de8..91699d0a89e68 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 6c22575eee5ff..10acdb0b15a9b 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index c73e7d0ffedff..3590cf50fea97 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 60d7e7252b673..d850b8677cea3 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index a43bcf4de4504..4b9d29aad147d 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index cc573a68a623f..eef38e09174dc 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -12,7 +12,7 @@ readme = "README.md" [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 7637277289b19..0588e4b76cf05 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 113c57164b9ee..e3c831f4586b2 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 16bcba753f8ed..9076b8405e8a4 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 957b0ab373450..3fdc59b16a86f 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io ={ version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 8b5c7815119d2..ade25decfd17f 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index cb60559262420..a1b19f74c3f6f 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index e5acf42270aa7..a8f21ee58660b 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -16,7 +16,6 @@ // limitations under the License. use crate::pallet::Def; -use frame_support_procedural_tools::clean_type_string; use quote::ToTokens; struct ConstDef { @@ -74,7 +73,6 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let consts = config_consts.chain(extra_consts) .map(|const_| { let const_type = &const_.type_; - let const_type_str = clean_type_string(&const_type.to_token_stream().to_string()); let ident = &const_.ident; let ident_str = format!("{}", ident); let doc = const_.doc.clone().into_iter(); @@ -108,18 +106,14 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { {} #frame_support::dispatch::ModuleConstantMetadata { - name: #frame_support::dispatch::DecodeDifferent::Encode(#ident_str), - ty: #frame_support::dispatch::DecodeDifferent::Encode(#const_type_str), - value: #frame_support::dispatch::DecodeDifferent::Encode( - #frame_support::dispatch::DefaultByteGetter( - &#default_byte_getter::<#type_use_gen>( - #frame_support::sp_std::marker::PhantomData - ) + name: #ident_str, + ty: #frame_support::scale_info::meta_type::<#const_type>(), + value: #frame_support::dispatch::DefaultByteGetter( + &#default_byte_getter::<#type_use_gen>( + #frame_support::sp_std::marker::PhantomData ) ), - documentation: #frame_support::dispatch::DecodeDifferent::Encode( - &[ #( #doc ),* ] - ), + documentation: #frame_support::scale_info::prelude::vec![ #( #doc ),* ], } }) }); @@ -129,9 +123,9 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn module_constants_metadata() - -> &'static [#frame_support::dispatch::ModuleConstantMetadata] + -> #frame_support::sp_std::vec::Vec<#frame_support::dispatch::ModuleConstantMetadata> { - &[ #( #consts ),* ] + #frame_support::scale_info::prelude::vec![ #( #consts ),* ] } } ) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 30f6301bf4f45..3b0ff3e4222d0 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -21,8 +21,10 @@ pub use crate::sp_std::{result, fmt, prelude::{Vec, Clone, Eq, PartialEq}, marker}; pub use crate::codec::{Codec, EncodeLike, Decode, Encode, Input, Output, HasCompact, EncodeAsRef}; pub use frame_metadata::{ - FunctionMetadata, DecodeDifferent, DecodeDifferentArray, FunctionArgumentMetadata, - ModuleConstantMetadata, DefaultByte, DefaultByteGetter, ModuleErrorMetadata, ErrorMetadata + DefaultByte, DefaultByteGetter, ModuleErrorMetadata, ErrorMetadata +}; +pub use frame_metadata2::v13::{ + FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, }; pub use crate::weights::{ GetDispatchInfo, DispatchInfo, WeighData, ClassifyDispatch, TransactionPriority, Weight, diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index e7c697b0bf19b..3d1a3faa569cc 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -126,13 +126,7 @@ macro_rules! __runtime_modules_to_metadata { event: $crate::__runtime_modules_to_metadata_calls_event!( $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), - // todo: [AJ] constants - constants: None, - // constants: $crate::metadata::DecodeDifferent::Encode( - // $crate::metadata::FnEncode( - // $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata - // ) - // ), + constants: $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata(), // todo: [AJ] errors errors: vec![], // errors: $crate::metadata::DecodeDifferent::Encode( diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 87874d4a30e52..2bf0b5a420a4c 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -623,7 +623,7 @@ impl GetDispatchInfo for sp_runtime::testing::TestX } /// The weight of database operations that the runtime can invoke. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub struct RuntimeDbWeight { pub read: Weight, pub write: Weight, diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 5118069584bb1..38e8e87d82805 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "3.0.0", default-features = false, path = "../" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index de2d2c91bd278..dae558701f796 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/system/src/limits.rs b/frame/system/src/limits.rs index 49a458224020c..6716b2a8a2b51 100644 --- a/frame/system/src/limits.rs +++ b/frame/system/src/limits.rs @@ -29,7 +29,7 @@ use frame_support::weights::{Weight, DispatchClass, constants, PerDispatchClass, use sp_runtime::{RuntimeDebug, Perbill}; /// Block length limit configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct BlockLength { /// Maximal total length in bytes for each extrinsic class. /// @@ -194,7 +194,7 @@ pub struct WeightsPerClass { /// /// As a consequence of `reserved` space, total consumed block weight might exceed `max_block` /// value, so this parameter should rather be thought of as "target block weight" than a hard limit. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct BlockWeights { /// Base weight of block execution. pub base_block: Weight, diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 0db616d160258..9c810f15409e9 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 53aa0f3ce06f0..c66439de753e5 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index c22c36586cdfd..467df50d0db02 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 7d6e14815ef44..b73f81f1e381e 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index ccd2740361139..5b50de5b0476d 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index 6e341fb2fc2b3..51216769e138d 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 52f9281e577e5..b568c069deef1 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "3.0.0", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-io = { version = "3.0.0", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index a6d1621fb5df1..31e9cee01a2e1 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 79a099d8db453..23cfcdd78bdae 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 38a8c1ab58a4e..98d27dc34a4ce 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 63fa7795281ef..800fed5261877 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 8774ff1c40e8c..91548875fa239 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 92deb6e50791a..06f1a61cc9588 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "3.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index f41c0d12012f9..aaf95b52a82a4 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.0", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 7cec778d4b741..4af9819a28fa7 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "3.0.0", path = "./compact" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 19931f8b342f0..e34a18bc4622a 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 9218e44d243a7..9d1f60fc272c2 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 0a1544028e4e5..2d88f029072a5 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index bfb9a742c8689..5bc95e5eec1f4 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,6 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/version/src/lib.rs b/primitives/version/src/lib.rs index 24a1b85ed0c38..fcd66cb780e2a 100644 --- a/primitives/version/src/lib.rs +++ b/primitives/version/src/lib.rs @@ -52,7 +52,7 @@ macro_rules! create_apis_vec { /// This triplet have different semantics and mis-interpretation could cause problems. /// In particular: bug fixes should result in an increment of `spec_version` and possibly `authoring_version`, /// absolutely not `impl_version` since they change the semantics of the runtime. -#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] pub struct RuntimeVersion { diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index bcd9b0c0b456c..206ba516591db 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.9.0", default-features = false, path = "../.. sp-consensus-babe = { version = "0.9.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "3.0.0", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-remove-compact-type-bounds", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.26.0", default-features = false } From dff848886cdc73d8f8bd77e7461f6626b6f5069d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Apr 2021 12:43:45 +0100 Subject: [PATCH 137/503] Fix up constants metadata generation --- .../procedural/src/pallet/expand/constants.rs | 3 +-- frame/support/src/dispatch.rs | 26 ++++++++----------- frame/system/src/limits.rs | 2 +- 3 files changed, 13 insertions(+), 18 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index a8f21ee58660b..f297068255b6a 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -16,7 +16,6 @@ // limitations under the License. use crate::pallet::Def; -use quote::ToTokens; struct ConstDef { /// Name of the associated type. @@ -112,7 +111,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { &#default_byte_getter::<#type_use_gen>( #frame_support::sp_std::marker::PhantomData ) - ), + ).0.default_byte(), // todo: [AJ] unify DefaultByteGetter documentation: #frame_support::scale_info::prelude::vec![ #( #doc ),* ], } }) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 3b0ff3e4222d0..67d13b5d791ec 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2404,7 +2404,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn module_constants_metadata() -> &'static [$crate::dispatch::ModuleConstantMetadata] { + pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::dispatch::ModuleConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] @@ -2435,23 +2435,19 @@ macro_rules! __impl_module_constants_metadata { , $const_instance: $const_instantiable)? > Sync for $default_byte_name <$const_trait_instance $(, $const_instance)?> {} )* - &[ + $crate::scale_info::prelude::vec![ $( $crate::dispatch::ModuleConstantMetadata { - name: $crate::dispatch::DecodeDifferent::Encode(stringify!($name)), - ty: $crate::dispatch::DecodeDifferent::Encode(stringify!($type)), - value: $crate::dispatch::DecodeDifferent::Encode( - $crate::dispatch::DefaultByteGetter( - &$default_byte_name::< - $const_trait_instance $(, $const_instance)? - >( - $crate::dispatch::marker::PhantomData - ) + name: stringify!($name), + ty: $crate::scale_info::meta_type::<$type>(), + value: $crate::dispatch::DefaultByteGetter( + &$default_byte_name::< + $const_trait_instance $(, $const_instance)? + >( + $crate::dispatch::marker::PhantomData ) - ), - documentation: $crate::dispatch::DecodeDifferent::Encode( - &[ $( $doc_attr ),* ] - ), + ).0.default_byte(), // todo: [AJ] unify DefaultByteGetter, + documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], } ),* ] diff --git a/frame/system/src/limits.rs b/frame/system/src/limits.rs index 6716b2a8a2b51..c6a2a5c13214d 100644 --- a/frame/system/src/limits.rs +++ b/frame/system/src/limits.rs @@ -94,7 +94,7 @@ pub type ValidationResult = Result; const DEFAULT_NORMAL_RATIO: Perbill = Perbill::from_percent(75); /// `DispatchClass`-specific weight configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct WeightsPerClass { /// Base weight of single extrinsic of given class. pub base_extrinsic: Weight, From 18d522e1f5d215b2859a8ec49f649c586bd5527d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Apr 2021 12:59:54 +0100 Subject: [PATCH 138/503] More TypeInfo derives for constants --- frame/support/src/lib.rs | 2 +- frame/support/src/weights.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 0bfa1b72b519e..1e904b41997e0 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -93,7 +93,7 @@ pub const LOG_TARGET: &'static str = "runtime::frame-support"; pub enum Never {} /// A pallet identifier. These are per pallet and should be stored in a registry somewhere. -#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode)] +#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode, scale_info::TypeInfo)] pub struct PalletId(pub [u8; 8]); impl TypeId for PalletId { diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 2bf0b5a420a4c..e2f0f7dfd9c3c 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -655,7 +655,7 @@ impl RuntimeDbWeight { /// /// The `negative` value encodes whether the term is added or substracted from the /// overall polynomial result. -#[derive(Clone, Encode, Decode)] +#[derive(Clone, Encode, Decode, scale_info::TypeInfo)] pub struct WeightToFeeCoefficient { /// The integral part of the coefficient. pub coeff_integer: Balance, From 788426e5a2c1f832517afc3e07b58608c7ee70de Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Apr 2021 13:00:31 +0100 Subject: [PATCH 139/503] Use frame-metadata2 aj-constants --- Cargo.lock | 176 +++++++++++++++++---------------------- frame/support/Cargo.toml | 2 +- 2 files changed, 78 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 29d4949384d85..5b925a0be5bd9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1649,7 +1649,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#d30be8b4b31be144e2d0199b9c9ac1a0db6e05c9" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" dependencies = [ "either", "futures 0.3.13", @@ -1659,7 +1659,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", + "scale-info", ] [[package]] @@ -1726,7 +1726,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.5", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-io", @@ -1765,7 +1765,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1783,7 +1783,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -1796,11 +1796,11 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#86bf2b374c9aa7fee82ad19df8b5569b0d1b8c58" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-constants#616aba05e84c022345da37dae1ff11eb0d873afb" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", ] [[package]] @@ -1808,7 +1808,7 @@ name = "frame-metadata" version = "13.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-std", @@ -1830,7 +1830,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "pretty_assertions", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1885,7 +1885,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions", "rustversion", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -1905,7 +1905,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -4244,7 +4244,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4351,7 +4351,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-authority-discovery", @@ -4431,7 +4431,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-block-builder", @@ -4646,7 +4646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4662,7 +4662,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4681,7 +4681,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-aura", @@ -4699,7 +4699,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-authority-discovery", @@ -4718,7 +4718,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4745,7 +4745,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-consensus-babe", @@ -4768,7 +4768,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4786,7 +4786,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4806,7 +4806,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4836,7 +4836,7 @@ dependencies = [ "pwasm-utils 0.16.0", "rand 0.7.3", "rand_pcg", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4853,7 +4853,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-runtime", "sp-std", ] @@ -4892,7 +4892,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4909,7 +4909,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4934,7 +4934,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4975,7 +4975,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -4995,7 +4995,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5012,7 +5012,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5028,7 +5028,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5046,7 +5046,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -5073,7 +5073,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5096,7 +5096,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5115,7 +5115,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5134,7 +5134,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5153,7 +5153,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5168,7 +5168,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5188,7 +5188,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5239,7 +5239,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5255,7 +5255,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5271,7 +5271,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5288,7 +5288,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5349,7 +5349,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5366,7 +5366,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5383,7 +5383,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5400,7 +5400,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5418,7 +5418,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5463,7 +5463,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5490,7 +5490,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.5", "rand_chacha 0.2.2", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5530,7 +5530,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5546,7 +5546,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5563,7 +5563,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5583,7 +5583,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5600,7 +5600,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5647,7 +5647,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5665,7 +5665,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -5684,7 +5684,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -6234,7 +6234,7 @@ dependencies = [ "impl-codec", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] @@ -8171,17 +8171,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds#53d0d64b26a23ed99700646d98e31e3d7780cc02" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds)", -] - [[package]] name = "scale-info" version = "0.6.0" @@ -8190,18 +8179,7 @@ dependencies = [ "cfg-if 1.0.0", "derive_more", "parity-scale-codec", - "scale-info-derive 0.4.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", -] - -[[package]] -name = "scale-info-derive" -version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-remove-compact-type-bounds#53d0d64b26a23ed99700646d98e31e3d7780cc02" -dependencies = [ - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", - "syn", + "scale-info-derive", ] [[package]] @@ -8669,7 +8647,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-core", "sp-io", @@ -8698,7 +8676,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8722,7 +8700,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8807,7 +8785,7 @@ name = "sp-consensus-aura" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8824,7 +8802,7 @@ version = "0.9.0" dependencies = [ "merlin", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8855,7 +8833,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8900,7 +8878,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8955,7 +8933,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9041,7 +9019,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9117,7 +9095,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.5", "rand 0.7.3", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "serde_json", "sp-api", @@ -9229,7 +9207,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9242,7 +9220,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "sp-runtime", "sp-std", ] @@ -9386,7 +9364,7 @@ version = "3.0.0" dependencies = [ "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9664,7 +9642,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info 0.6.0 (git+https://github.com/paritytech/scale-info?branch=aj-substrate)", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index a1b19f74c3f6f..3cf97d478a65c 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-constants", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } From b96d17d37ffd198628f1356bd19b1f7d09dbfb22 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 12:35:29 +0100 Subject: [PATCH 140/503] Error metadata --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- .../procedural/src/pallet/expand/error.rs | 8 +++---- .../src/pallet/expand/pallet_struct.rs | 6 ++--- frame/support/src/dispatch.rs | 10 ++++---- frame/support/src/error.rs | 23 ++++++++++++++----- 6 files changed, 30 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5418b4de70710..43ba0083d8fe8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1791,7 +1791,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-constants#616aba05e84c022345da37dae1ff11eb0d873afb" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#eb8166882db7603b8b7eea1ac87061af0651d02f" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 3cf97d478a65c..6d83584cfd925 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-constants", default-features = false, features = ["v13"] } +frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 000f476d94d8b..97e768737ab88 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -57,8 +57,8 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { let variant_str = format!("{}", variant); quote::quote_spanned!(error.attr_span => #frame_support::error::ErrorMetadata { - name: #frame_support::error::DecodeDifferent::Encode(#variant_str), - documentation: #frame_support::error::DecodeDifferent::Encode(&[ #( #doc, )* ]), + name: #variant_str, + documentation: #frame_support::scale_info::prelude::vec![ #( #doc, )* ], }, ) }); @@ -141,8 +141,8 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { for #error_ident<#type_use_gen> #config_where_clause { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { - &[ #( #metadata )* ] + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { + #frame_support::scale_info::prelude::vec![ #( #metadata )* ] } } ) diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index fd3230edd1e74..6990ea37298e2 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -76,7 +76,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { for #pallet_ident<#type_use_gen> #config_where_clause { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { < #error_ident<#type_use_gen> as #frame_support::error::ModuleErrorMetadata >::metadata() @@ -89,8 +89,8 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { for #pallet_ident<#type_use_gen> #config_where_clause { - fn metadata() -> &'static [#frame_support::error::ErrorMetadata] { - &[] + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { + #frame_support::scale_info::prelude::vec::Vec::new() } } ) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 67d13b5d791ec..ab90364b1a2df 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -20,9 +20,7 @@ pub use crate::sp_std::{result, fmt, prelude::{Vec, Clone, Eq, PartialEq}, marker}; pub use crate::codec::{Codec, EncodeLike, Decode, Encode, Input, Output, HasCompact, EncodeAsRef}; -pub use frame_metadata::{ - DefaultByte, DefaultByteGetter, ModuleErrorMetadata, ErrorMetadata -}; +pub use frame_metadata::{DefaultByte, DefaultByteGetter}; pub use frame_metadata2::v13::{ FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, }; @@ -2154,11 +2152,11 @@ macro_rules! decl_module { $( $constants )* } - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::dispatch::ModuleErrorMetadata + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::error::ModuleErrorMetadata for $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { - fn metadata() -> &'static [$crate::dispatch::ErrorMetadata] { - <$error_type as $crate::dispatch::ModuleErrorMetadata>::metadata() + fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::error::ErrorMetadata> { + <$error_type as $crate::error::ModuleErrorMetadata>::metadata() } } } diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 508de49e949c2..52cc6f1f9057b 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -20,7 +20,7 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; #[doc(hidden)] -pub use frame_metadata::{ModuleErrorMetadata, ErrorMetadata, DecodeDifferent}; +pub use frame_metadata2::v13::ErrorMetadata; /// Declare an error type for a runtime module. /// @@ -164,14 +164,14 @@ macro_rules! decl_error { for $error<$generic $(, $inst_generic)?> $( where $( $where_ty: $where_bound ),* )? { - fn metadata() -> &'static [$crate::error::ErrorMetadata] { - &[ + fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::error::ErrorMetadata> { + $crate::scale_info::prelude::vec![ $( $crate::error::ErrorMetadata { - name: $crate::error::DecodeDifferent::Encode(stringify!($name)), - documentation: $crate::error::DecodeDifferent::Encode(&[ + name: stringify!($name), + documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* - ]), + ], } ),* ] @@ -210,3 +210,14 @@ macro_rules! decl_error { } } } + +/// All the metadata about errors in a module. +pub trait ModuleErrorMetadata { + fn metadata() -> sp_std::prelude::Vec; +} + +impl ModuleErrorMetadata for &'static str { + fn metadata() -> sp_std::prelude::Vec { + sp_std::prelude::Vec::new() + } +} From 34560b8cb2ba34952b15ef3c18bffbbdf0b4e05a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 12:42:24 +0100 Subject: [PATCH 141/503] Add error metadata to metadata struct --- frame/support/src/metadata.rs | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 3d1a3faa569cc..5494f30d2f223 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -127,13 +127,7 @@ macro_rules! __runtime_modules_to_metadata { $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), constants: $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata(), - // todo: [AJ] errors - errors: vec![], - // errors: $crate::metadata::DecodeDifferent::Encode( - // $crate::metadata::FnEncode( - // <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata - // ) - // ) + errors: <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata(), }; $( $rest )* ) From 2ff2515204012b84e5b8d942c5f7dca5a54acceb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 13:10:21 +0100 Subject: [PATCH 142/503] Use correct ModuleErrorMetadata trait --- frame/support/src/metadata.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 5494f30d2f223..1be6669399f69 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -18,7 +18,7 @@ pub use frame_metadata::{ DecodeDifferent, FnEncode, RuntimeMetadata, ModuleMetadata, RuntimeMetadataLastVersion, DefaultByteGetter, RuntimeMetadataPrefixed, StorageEntryMetadata, StorageMetadata, - StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, ModuleErrorMetadata, + StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, ExtrinsicMetadata, }; pub use frame_metadata2; @@ -127,7 +127,7 @@ macro_rules! __runtime_modules_to_metadata { $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), constants: $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata(), - errors: <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::metadata::ModuleErrorMetadata>::metadata(), + errors: <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::error::ModuleErrorMetadata>::metadata(), }; $( $rest )* ) From 6cbc3fb364fa70d508690a8052200416954cf47e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 16:04:22 +0100 Subject: [PATCH 143/503] Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere --- frame/balances/src/tests_composite.rs | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index d585798704bde..8a589ae000610 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -46,16 +46,6 @@ frame_support::construct_runtime!( } ); -impl_outer_event! { - pub enum Event for Test { - system, - balances, - } -} - -// Workaround for https://github.com/rust-lang/rust/issues/26925 . Remove when sorted. -#[derive(Clone, PartialEq, Eq, Debug, scale_info::TypeInfo)] -pub struct Test; parameter_types! { pub const BlockHashCount: u64 = 250; pub BlockWeights: frame_system::limits::BlockWeights = From d04f074dd7a34133491507d7e3d541d10fc86f37 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 16:04:34 +0100 Subject: [PATCH 144/503] Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere --- frame/balances/src/tests_composite.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 8a589ae000610..e7fbc618f8ab1 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -60,7 +60,7 @@ impl frame_system::Config for Test { type Origin = Origin; type Index = u64; type BlockNumber = u64; - type Call = CallWithDispatchInfo; + type Call = Call; type Hash = H256; type Hashing = ::sp_runtime::traits::BlakeTwo256; type AccountId = u64; From 5a8fe52ebc0aa35dbae15bd50d213a51d9305d80 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 16:04:43 +0100 Subject: [PATCH 145/503] Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere --- frame/balances/src/tests_composite.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index e7fbc618f8ab1..57a730d754002 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -69,7 +69,7 @@ impl frame_system::Config for Test { type Event = Event; type BlockHashCount = BlockHashCount; type Version = (); - type PalletInfo = (); + type PalletInfo = PalletInfo; type AccountData = super::AccountData; type OnNewAccount = (); type OnKilledAccount = (); From 54aef1a301fef5fc0a6bb94c044d29f3914ae979 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 16:05:00 +0100 Subject: [PATCH 146/503] Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere --- frame/balances/src/tests_composite.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 57a730d754002..ea7d5dc63c5db 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -124,7 +124,7 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - GenesisConfig:: { + pallet_balances::GenesisConfig:: { balances: if self.monied { vec![ (1, 10 * self.existential_deposit), From 256ced39c90fb2367253c7b474bd4596391f0355 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 17:17:48 +0100 Subject: [PATCH 147/503] Delete legacy frame-metadata crate and consolidate imports --- Cargo.lock | 15 +- Cargo.toml | 1 - frame/metadata/Cargo.toml | 29 -- frame/metadata/README.md | 7 - frame/metadata/src/lib.rs | 430 ------------------ frame/support/Cargo.toml | 3 +- .../procedural/src/pallet/expand/call.rs | 6 +- .../procedural/src/pallet/expand/error.rs | 4 +- .../procedural/src/pallet/expand/event.rs | 6 +- .../src/pallet/expand/pallet_struct.rs | 4 +- .../procedural/src/pallet/expand/storage.rs | 12 +- frame/support/src/dispatch.rs | 9 +- frame/support/src/error.rs | 2 - frame/support/src/event.rs | 18 +- frame/support/src/hash.rs | 17 +- frame/support/src/metadata.rs | 18 +- frame/support/src/storage/types/double_map.rs | 11 +- frame/support/src/storage/types/map.rs | 7 +- frame/support/src/storage/types/mod.rs | 3 +- frame/support/src/storage/types/value.rs | 3 +- frame/support/test/Cargo.toml | 1 - 21 files changed, 57 insertions(+), 549 deletions(-) delete mode 100644 frame/metadata/Cargo.toml delete mode 100644 frame/metadata/README.md delete mode 100644 frame/metadata/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 43ba0083d8fe8..2e3b5dc6b75be 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1796,17 +1796,7 @@ dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", "scale-info", -] - -[[package]] -name = "frame-metadata" -version = "13.0.0" -dependencies = [ - "parity-scale-codec", - "scale-info", "serde", - "sp-core", - "sp-std", ] [[package]] @@ -1814,8 +1804,7 @@ name = "frame-support" version = "3.0.0" dependencies = [ "bitflags", - "frame-metadata 12.0.0", - "frame-metadata 13.0.0", + "frame-metadata", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", @@ -1874,7 +1863,6 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata 13.0.0", "frame-support", "frame-system", "parity-scale-codec", @@ -8179,6 +8167,7 @@ dependencies = [ "derive_more", "parity-scale-codec", "scale-info-derive", + "serde", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e536ace8e6214..7ba4f5f4119c7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -94,7 +94,6 @@ members = [ "frame/merkle-mountain-range", "frame/merkle-mountain-range/primitives", "frame/merkle-mountain-range/rpc", - "frame/metadata", "frame/multisig", "frame/nicks", "frame/node-authorization", diff --git a/frame/metadata/Cargo.toml b/frame/metadata/Cargo.toml deleted file mode 100644 index 9232057e58874..0000000000000 --- a/frame/metadata/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "frame-metadata" -version = "13.0.0" -authors = ["Parity Technologies "] -edition = "2018" -license = "Apache-2.0" -homepage = "https://substrate.dev" -repository = "https://github.com/paritytech/substrate/" -description = "Decodable variant of the RuntimeMetadata." -readme = "README.md" - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] -codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -serde = { version = "1.0.101", optional = true, features = ["derive"] } -sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } -sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } - -[features] -default = ["std"] -std = [ - "codec/std", - "sp-std/std", - "sp-core/std", - "serde", -] diff --git a/frame/metadata/README.md b/frame/metadata/README.md deleted file mode 100644 index 423af8602e3f0..0000000000000 --- a/frame/metadata/README.md +++ /dev/null @@ -1,7 +0,0 @@ -Decodable variant of the RuntimeMetadata. - -This really doesn't belong here, but is necessary for the moment. In the future -it should be removed entirely to an external module for shimming on to the -codec-encoded metadata. - -License: Apache-2.0 \ No newline at end of file diff --git a/frame/metadata/src/lib.rs b/frame/metadata/src/lib.rs deleted file mode 100644 index a63da82ca00db..0000000000000 --- a/frame/metadata/src/lib.rs +++ /dev/null @@ -1,430 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) 2018-2021 Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! Decodable variant of the RuntimeMetadata. -//! -//! This really doesn't belong here, but is necessary for the moment. In the future -//! it should be removed entirely to an external module for shimming on to the -//! codec-encoded metadata. - -#![cfg_attr(not(feature = "std"), no_std)] - -#[cfg(feature = "std")] -use serde::Serialize; -#[cfg(feature = "std")] -use codec::{Decode, Input, Error}; -use codec::{Encode, Output}; -use sp_std::vec::Vec; -use sp_core::RuntimeDebug; - -#[cfg(feature = "std")] -type StringBuf = String; - -/// Current prefix of metadata -pub const META_RESERVED: u32 = 0x6174656d; // 'meta' warn endianness - -/// On `no_std` we do not support `Decode` and thus `StringBuf` is just `&'static str`. -/// So, if someone tries to decode this stuff on `no_std`, they will get a compilation error. -#[cfg(not(feature = "std"))] -type StringBuf = &'static str; - -/// A type that decodes to a different type than it encodes. -/// The user needs to make sure that both types use the same encoding. -/// -/// For example a `&'static [ &'static str ]` can be decoded to a `Vec`. -#[derive(Clone)] -pub enum DecodeDifferent where B: 'static, O: 'static { - Encode(B), - Decoded(O), -} - -impl Encode for DecodeDifferent where B: Encode + 'static, O: Encode + 'static { - fn encode_to(&self, dest: &mut W) { - match self { - DecodeDifferent::Encode(b) => b.encode_to(dest), - DecodeDifferent::Decoded(o) => o.encode_to(dest), - } - } -} - -impl codec::EncodeLike for DecodeDifferent where B: Encode + 'static, O: Encode + 'static {} - -#[cfg(feature = "std")] -impl Decode for DecodeDifferent where B: 'static, O: Decode + 'static { - fn decode(input: &mut I) -> Result { - ::decode(input).map(|val| { - DecodeDifferent::Decoded(val) - }) - } -} - -impl PartialEq for DecodeDifferent -where - B: Encode + Eq + PartialEq + 'static, - O: Encode + Eq + PartialEq + 'static, -{ - fn eq(&self, other: &Self) -> bool { - self.encode() == other.encode() - } -} - -impl Eq for DecodeDifferent - where B: Encode + Eq + PartialEq + 'static, O: Encode + Eq + PartialEq + 'static -{} - -impl sp_std::fmt::Debug for DecodeDifferent - where - B: sp_std::fmt::Debug + Eq + 'static, - O: sp_std::fmt::Debug + Eq + 'static, -{ - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - match self { - DecodeDifferent::Encode(b) => b.fmt(f), - DecodeDifferent::Decoded(o) => o.fmt(f), - } - } -} - -#[cfg(feature = "std")] -impl serde::Serialize for DecodeDifferent - where - B: serde::Serialize + 'static, - O: serde::Serialize + 'static, -{ - fn serialize(&self, serializer: S) -> Result where S: serde::Serializer { - match self { - DecodeDifferent::Encode(b) => b.serialize(serializer), - DecodeDifferent::Decoded(o) => o.serialize(serializer), - } - } -} - -pub type DecodeDifferentArray = DecodeDifferent<&'static [B], Vec>; - -type DecodeDifferentStr = DecodeDifferent<&'static str, StringBuf>; - -/// All the metadata about a function. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct FunctionMetadata { - pub name: DecodeDifferentStr, - pub arguments: DecodeDifferentArray, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about a function argument. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct FunctionArgumentMetadata { - pub name: DecodeDifferentStr, - pub ty: DecodeDifferentStr, -} - -/// Newtype wrapper for support encoding functions (actual the result of the function). -#[derive(Clone, Eq)] -pub struct FnEncode(pub fn() -> E) where E: Encode + 'static; - -impl Encode for FnEncode { - fn encode_to(&self, dest: &mut W) { - self.0().encode_to(dest); - } -} - -impl codec::EncodeLike for FnEncode {} - -impl PartialEq for FnEncode { - fn eq(&self, other: &Self) -> bool { - self.0().eq(&other.0()) - } -} - -impl sp_std::fmt::Debug for FnEncode { - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - self.0().fmt(f) - } -} - -#[cfg(feature = "std")] -impl serde::Serialize for FnEncode { - fn serialize(&self, serializer: S) -> Result where S: serde::Serializer { - self.0().serialize(serializer) - } -} - -/// All the metadata about an outer event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct OuterEventMetadata { - pub name: DecodeDifferentStr, - pub events: DecodeDifferentArray< - (&'static str, FnEncode<&'static [EventMetadata]>), - (StringBuf, Vec) - >, -} - -/// All the metadata about an event. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct EventMetadata { - pub name: DecodeDifferentStr, - pub arguments: DecodeDifferentArray<&'static str, StringBuf>, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about one storage entry. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct StorageEntryMetadata { - pub name: DecodeDifferentStr, - pub modifier: StorageEntryModifier, - pub ty: StorageEntryType, - pub default: ByteGetter, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about one module constant. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ModuleConstantMetadata { - pub name: DecodeDifferentStr, - pub ty: DecodeDifferentStr, - pub value: ByteGetter, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about a module error. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ErrorMetadata { - pub name: DecodeDifferentStr, - pub documentation: DecodeDifferentArray<&'static str, StringBuf>, -} - -/// All the metadata about errors in a module. -pub trait ModuleErrorMetadata { - fn metadata() -> &'static [ErrorMetadata]; -} - -impl ModuleErrorMetadata for &'static str { - fn metadata() -> &'static [ErrorMetadata] { - &[] - } -} - -/// A technical trait to store lazy initiated vec value as static dyn pointer. -pub trait DefaultByte: Send + Sync { - fn default_byte(&self) -> Vec; -} - -/// Wrapper over dyn pointer for accessing a cached once byte value. -#[derive(Clone)] -pub struct DefaultByteGetter(pub &'static dyn DefaultByte); - -/// Decode different for static lazy initiated byte value. -pub type ByteGetter = DecodeDifferent>; - -impl Encode for DefaultByteGetter { - fn encode_to(&self, dest: &mut W) { - self.0.default_byte().encode_to(dest) - } -} - -impl codec::EncodeLike for DefaultByteGetter {} - -impl PartialEq for DefaultByteGetter { - fn eq(&self, other: &DefaultByteGetter) -> bool { - let left = self.0.default_byte(); - let right = other.0.default_byte(); - left.eq(&right) - } -} - -impl Eq for DefaultByteGetter { } - -#[cfg(feature = "std")] -impl serde::Serialize for DefaultByteGetter { - fn serialize(&self, serializer: S) -> Result where S: serde::Serializer { - self.0.default_byte().serialize(serializer) - } -} - -impl sp_std::fmt::Debug for DefaultByteGetter { - fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { - self.0.default_byte().fmt(f) - } -} - -/// Hasher used by storage maps -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageHasher { - Blake2_128, - Blake2_256, - Blake2_128Concat, - Twox128, - Twox256, - Twox64Concat, - Identity, -} - -/// A storage entry type. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageEntryType { - Plain(DecodeDifferentStr), - Map { - hasher: StorageHasher, - key: DecodeDifferentStr, - value: DecodeDifferentStr, - // is_linked flag previously, unused now to keep backwards compat - unused: bool, - }, - DoubleMap { - hasher: StorageHasher, - key1: DecodeDifferentStr, - key2: DecodeDifferentStr, - value: DecodeDifferentStr, - key2_hasher: StorageHasher, - }, -} - -/// A storage entry modifier. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum StorageEntryModifier { - Optional, - Default, -} - -/// All metadata of the storage. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct StorageMetadata { - /// The common prefix used by all storage entries. - pub prefix: DecodeDifferent<&'static str, StringBuf>, - pub entries: DecodeDifferent<&'static [StorageEntryMetadata], Vec>, -} - -/// Metadata prefixed by a u32 for reserved usage -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct RuntimeMetadataPrefixed(pub u32, pub RuntimeMetadata); - -/// Metadata of the extrinsic used by the runtime. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ExtrinsicMetadata { - /// Extrinsic version. - pub version: u8, - /// The signed extensions in the order they appear in the extrinsic. - pub signed_extensions: Vec, -} - -/// The metadata of a runtime. -/// The version ID encoded/decoded through -/// the enum nature of `RuntimeMetadata`. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub enum RuntimeMetadata { - /// Unused; enum filler. - V0(RuntimeMetadataDeprecated), - /// Version 1 for runtime metadata. No longer used. - V1(RuntimeMetadataDeprecated), - /// Version 2 for runtime metadata. No longer used. - V2(RuntimeMetadataDeprecated), - /// Version 3 for runtime metadata. No longer used. - V3(RuntimeMetadataDeprecated), - /// Version 4 for runtime metadata. No longer used. - V4(RuntimeMetadataDeprecated), - /// Version 5 for runtime metadata. No longer used. - V5(RuntimeMetadataDeprecated), - /// Version 6 for runtime metadata. No longer used. - V6(RuntimeMetadataDeprecated), - /// Version 7 for runtime metadata. No longer used. - V7(RuntimeMetadataDeprecated), - /// Version 8 for runtime metadata. No longer used. - V8(RuntimeMetadataDeprecated), - /// Version 9 for runtime metadata. No longer used. - V9(RuntimeMetadataDeprecated), - /// Version 10 for runtime metadata. No longer used. - V10(RuntimeMetadataDeprecated), - /// Version 11 for runtime metadata. No longer used. - V11(RuntimeMetadataDeprecated), - /// Version 12 for runtime metadata. - V12(RuntimeMetadataV12), -} - -/// Enum that should fail. -#[derive(Eq, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize))] -pub enum RuntimeMetadataDeprecated { } - -impl Encode for RuntimeMetadataDeprecated { - fn encode_to(&self, _dest: &mut W) {} -} - -impl codec::EncodeLike for RuntimeMetadataDeprecated {} - -#[cfg(feature = "std")] -impl Decode for RuntimeMetadataDeprecated { - fn decode(_input: &mut I) -> Result { - Err("Decoding is not supported".into()) - } -} - -/// The metadata of a runtime. -#[derive(Eq, Encode, PartialEq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct RuntimeMetadataV12 { - /// Metadata of all the modules. - pub modules: DecodeDifferentArray, - /// Metadata of the extrinsic. - pub extrinsic: ExtrinsicMetadata, -} - -/// The latest version of the metadata. -pub type RuntimeMetadataLastVersion = RuntimeMetadataV12; - -/// All metadata about an runtime module. -#[derive(Clone, PartialEq, Eq, Encode, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Decode, Serialize))] -pub struct ModuleMetadata { - pub name: DecodeDifferentStr, - pub storage: Option, StorageMetadata>>, - pub calls: ODFnA, - pub event: ODFnA, - pub constants: DFnA, - pub errors: DFnA, - /// Define the index of the module, this index will be used for the encoding of module event, - /// call and origin variants. - pub index: u8, -} - -type ODFnA = Option>; -type DFnA = DecodeDifferent, Vec>; - -impl Into for RuntimeMetadataPrefixed { - fn into(self) -> sp_core::OpaqueMetadata { - sp_core::OpaqueMetadata::new(self.encode()) - } -} - -impl Into for RuntimeMetadataLastVersion { - fn into(self) -> RuntimeMetadataPrefixed { - RuntimeMetadataPrefixed(META_RESERVED, RuntimeMetadata::V12(self)) - } -} diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 6d83584cfd925..de9d379c98936 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,8 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } -frame-metadata2 = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v13"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v13"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 38c5f1b4f8605..a90823edc4587 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -179,12 +179,12 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { #[doc(hidden)] - pub fn call_functions() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::FunctionMetadata> { + pub fn call_functions() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::FunctionMetadata> { #frame_support::scale_info::prelude::vec![ #( - #frame_support::metadata::v13::FunctionMetadata { + #frame_support::metadata::FunctionMetadata { name: stringify!(#fn_name), arguments: #frame_support::scale_info::prelude::vec![ #( - #frame_support::metadata::v13::FunctionArgumentMetadata { + #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_type>(), is_compact: #args_is_compact, diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 97e768737ab88..47ce7f54222bd 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -56,7 +56,7 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { .map(|(variant, doc)| { let variant_str = format!("{}", variant); quote::quote_spanned!(error.attr_span => - #frame_support::error::ErrorMetadata { + #frame_support::metadata::ErrorMetadata { name: #variant_str, documentation: #frame_support::scale_info::prelude::vec![ #( #doc, )* ], }, @@ -141,7 +141,7 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { for #error_ident<#type_use_gen> #config_where_clause { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { #frame_support::scale_info::prelude::vec![ #( #metadata )* ] } } diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 4a3e93773a6ca..41abdfb32b8ef 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -49,12 +49,12 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { .iter() .map(|(ty, name)| { quote::quote!( - #frame_support::metadata::v13::TypeSpec::new::<#ty>(#name) + #frame_support::metadata::TypeSpec::new::<#ty>(#name) ) }); let docs = &event.docs; quote::quote!( - #frame_support::metadata::v13::EventMetadata { + #frame_support::metadata::EventMetadata { name: #name, arguments: #frame_support::scale_info::prelude::vec![ #( #args, )* ], documentation: #frame_support::scale_info::prelude::vec![ #( #docs, )* ], @@ -145,7 +145,7 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::v13::EventMetadata> { + pub fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::EventMetadata> { #frame_support::scale_info::prelude::vec![ #( #metadata )* ] } } diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 6990ea37298e2..1db96c0d1647a 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -76,7 +76,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { for #pallet_ident<#type_use_gen> #config_where_clause { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { < #error_ident<#type_use_gen> as #frame_support::error::ModuleErrorMetadata >::metadata() @@ -89,7 +89,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { for #pallet_ident<#type_use_gen> #config_where_clause { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::error::ErrorMetadata> { + fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { #frame_support::scale_info::prelude::vec::Vec::new() } } diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index 5c0e2fbe3aaf3..b20fa30a5335c 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -94,14 +94,14 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let ty = match &storage.metadata { Metadata::Value { value } => { quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::v13::StorageEntryType::Plain( + #frame_support::metadata::StorageEntryType::Plain( #frame_support::scale_info::meta_type::<#value>() ) ) }, Metadata::Map { key, value } => { quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::v13::StorageEntryType::Map { + #frame_support::metadata::StorageEntryType::Map { hasher: <#full_ident as #metadata_trait>::HASHER, key: #frame_support::scale_info::meta_type::<#key>(), value: #frame_support::scale_info::meta_type::<#value>(), @@ -111,7 +111,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { }, Metadata::DoubleMap { key1, key2, value } => { quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::v13::StorageEntryType::DoubleMap { + #frame_support::metadata::StorageEntryType::DoubleMap { hasher: <#full_ident as #metadata_trait>::HASHER1, key2_hasher: <#full_ident as #metadata_trait>::HASHER2, key1: #frame_support::scale_info::meta_type::<#key1>(), @@ -123,7 +123,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { }; quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* #frame_support::metadata::v13::StorageEntryMetadata { + #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { name: <#full_ident as #metadata_trait>::NAME, modifier: <#full_ident as #metadata_trait>::MODIFIER, ty: #ty, @@ -265,8 +265,8 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #completed_where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::metadata::v13::StorageMetadata { - #frame_support::metadata::v13::StorageMetadata { + pub fn storage_metadata() -> #frame_support::metadata::StorageMetadata { + #frame_support::metadata::StorageMetadata { prefix: < ::PalletInfo as #frame_support::traits::PalletInfo diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index ab90364b1a2df..5fcc94940a8b0 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -20,8 +20,7 @@ pub use crate::sp_std::{result, fmt, prelude::{Vec, Clone, Eq, PartialEq}, marker}; pub use crate::codec::{Codec, EncodeLike, Decode, Encode, Input, Output, HasCompact, EncodeAsRef}; -pub use frame_metadata::{DefaultByte, DefaultByteGetter}; -pub use frame_metadata2::v13::{ +pub use frame_metadata::v13::{ FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, }; pub use crate::weights::{ @@ -2328,7 +2327,7 @@ macro_rules! __dispatch_impl_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions() -> $crate::dispatch::Vec<$crate::metadata::v13::FunctionMetadata> { + pub fn call_functions() -> $crate::dispatch::Vec<$crate::metadata::FunctionMetadata> { $crate::__call_to_functions!($($rest)*) } } @@ -2523,11 +2522,11 @@ macro_rules! __function_to_metadata { $( $fn_doc:expr ),*; $fn_id:expr; ) => { - $crate::metadata::v13::FunctionMetadata { + $crate::metadata::FunctionMetadata { name: stringify!($fn_name), arguments: $crate::scale_info::prelude::vec![ $( - $crate::metadata::v13::FunctionArgumentMetadata { + $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), ty: $crate::scale_info::meta_type::<$param>(), is_compact: $crate::__function_to_metadata!(@has_compact_attr diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 52cc6f1f9057b..db2f4576e5f3f 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -19,8 +19,6 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; -#[doc(hidden)] -pub use frame_metadata2::v13::ErrorMetadata; /// Declare an error type for a runtime module. /// diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index a26a2ec03ff76..9d38edf41934c 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -21,8 +21,6 @@ // You should have received a copy of the GNU General Public License // along with Substrate. If not, see . -pub use frame_metadata::{EventMetadata, DecodeDifferent, OuterEventMetadata, FnEncode}; - /// Implement the `Event` for a module. /// /// # Simple Event Example: @@ -146,7 +144,7 @@ macro_rules! decl_event { impl Event { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { + pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::EventMetadata> { $crate::__events_to_metadata!(; $( $events )* ) } } @@ -298,7 +296,7 @@ macro_rules! __decl_generic_event { { #[allow(dead_code)] #[doc(hidden)] - pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::v13::EventMetadata> { + pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::EventMetadata> { $crate::__events_to_metadata!(; $( $events )* ) } } @@ -319,10 +317,10 @@ macro_rules! __events_to_metadata { ) => { $crate::__events_to_metadata!( $( $metadata, )* - $crate::metadata::v13::EventMetadata { + $crate::metadata::EventMetadata { name: stringify!($event), arguments: $crate::scale_info::prelude::vec![ - $( $( $crate::metadata::v13::TypeSpec::new::<$param>(stringify!($param)) ),* )* + $( $( $crate::metadata::TypeSpec::new::<$param>(stringify!($param)) ),* )* ], documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* @@ -512,12 +510,12 @@ macro_rules! __impl_outer_event_json_metadata { ) => { impl $runtime { #[allow(dead_code)] - pub fn outer_event_metadata() -> $crate::metadata::v13::OuterEventMetadata { - $crate::metadata::v13::OuterEventMetadata { + pub fn outer_event_metadata() -> $crate::metadata::OuterEventMetadata { + $crate::metadata::OuterEventMetadata { name: stringify!($event_name), events: $crate::scale_info::prelude::vec![ $( - $crate::metadata::v13::ModuleEventMetadata { + $crate::metadata::ModuleEventMetadata { name: stringify!($module_name), events: $module_name::Event ::< $( $generic_params ),* > ::metadata() } @@ -542,7 +540,7 @@ macro_rules! __impl_outer_event_json_metadata { $( #[allow(dead_code)] pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> - Vec<$crate::metadata::v13::EventMetadata> + Vec<$crate::metadata::EventMetadata> { $module_name::Event ::< $( $generic_params ),* > ::metadata() } diff --git a/frame/support/src/hash.rs b/frame/support/src/hash.rs index 458c8dad6e1d6..e8a51e94722f1 100644 --- a/frame/support/src/hash.rs +++ b/frame/support/src/hash.rs @@ -17,6 +17,7 @@ //! Hash utilities. +use crate::metadata; use codec::Codec; use sp_std::prelude::Vec; use sp_io::hashing::{blake2_128, blake2_256, twox_64, twox_128, twox_256}; @@ -56,7 +57,7 @@ impl Hashable for T { /// Hasher to use to hash keys to insert to storage. pub trait StorageHasher: 'static { - const METADATA: frame_metadata2::v13::StorageHasher; + const METADATA: metadata::StorageHasher; type Output: AsRef<[u8]>; fn hash(x: &[u8]) -> Self::Output; } @@ -74,7 +75,7 @@ pub trait ReversibleStorageHasher: StorageHasher { /// Store the key directly. pub struct Identity; impl StorageHasher for Identity { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Identity; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Identity; type Output = Vec; fn hash(x: &[u8]) -> Vec { x.to_vec() @@ -89,7 +90,7 @@ impl ReversibleStorageHasher for Identity { /// Hash storage keys with `concat(twox64(key), key)` pub struct Twox64Concat; impl StorageHasher for Twox64Concat { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox64Concat; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox64Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { twox_64(x) @@ -112,7 +113,7 @@ impl ReversibleStorageHasher for Twox64Concat { /// Hash storage keys with `concat(blake2_128(key), key)` pub struct Blake2_128Concat; impl StorageHasher for Blake2_128Concat { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_128Concat; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_128Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { blake2_128(x) @@ -135,7 +136,7 @@ impl ReversibleStorageHasher for Blake2_128Concat { /// Hash storage keys with blake2 128 pub struct Blake2_128; impl StorageHasher for Blake2_128 { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_128; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { blake2_128(x) @@ -145,7 +146,7 @@ impl StorageHasher for Blake2_128 { /// Hash storage keys with blake2 256 pub struct Blake2_256; impl StorageHasher for Blake2_256 { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Blake2_256; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { blake2_256(x) @@ -155,7 +156,7 @@ impl StorageHasher for Blake2_256 { /// Hash storage keys with twox 128 pub struct Twox128; impl StorageHasher for Twox128 { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox128; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox128; type Output = [u8; 16]; fn hash(x: &[u8]) -> [u8; 16] { twox_128(x) @@ -165,7 +166,7 @@ impl StorageHasher for Twox128 { /// Hash storage keys with twox 256 pub struct Twox256; impl StorageHasher for Twox256 { - const METADATA: frame_metadata2::v13::StorageHasher = frame_metadata2::v13::StorageHasher::Twox256; + const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox256; type Output = [u8; 32]; fn hash(x: &[u8]) -> [u8; 32] { twox_256(x) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 1be6669399f69..203bfacfe8ac4 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -15,14 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -pub use frame_metadata::{ - DecodeDifferent, FnEncode, RuntimeMetadata, ModuleMetadata, RuntimeMetadataLastVersion, - DefaultByteGetter, RuntimeMetadataPrefixed, StorageEntryMetadata, StorageMetadata, - StorageEntryType, StorageEntryModifier, DefaultByte, StorageHasher, - ExtrinsicMetadata, +pub use frame_metadata::v13::{ + ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, + StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, }; -pub use frame_metadata2; -pub use frame_metadata2::v13; /// todo: [AJ] update docs /// Implements the metadata support for the given runtime and all its modules. @@ -77,9 +73,9 @@ macro_rules! impl_runtime_metadata { ) => { impl $runtime { pub fn metadata() -> $crate::metadata::frame_metadata2::RuntimeMetadataPrefixed { - $crate::metadata::v13::RuntimeMetadataLastVersion::new( + $crate::metadata::RuntimeMetadataLastVersion::new( $crate::__runtime_modules_to_metadata!($runtime;; $( $rest )*), - $crate::metadata::v13::ExtrinsicMetadata { + $crate::metadata::ExtrinsicMetadata { ty: $crate::scale_info::meta_type::<$ext>(), version: <$ext as $crate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < @@ -88,7 +84,7 @@ macro_rules! impl_runtime_metadata { >::SignedExtensions as $crate::sp_runtime::traits::SignedExtension >::identifier() .into_iter() - .map(|(id, ty)| $crate::metadata::v13::SignedExtensionMetadata { + .map(|(id, ty)| $crate::metadata::SignedExtensionMetadata { identifier: id, ty, }) @@ -114,7 +110,7 @@ macro_rules! __runtime_modules_to_metadata { ) => { $crate::__runtime_modules_to_metadata!( $runtime; - $( $metadata, )* $crate::metadata::v13::ModuleMetadata { + $( $metadata, )* $crate::metadata::ModuleMetadata { name: stringify!($name), index: $index, storage: $crate::__runtime_modules_to_metadata_calls_storage!( diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 762513e9a7efb..7397e095a401c 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -20,14 +20,13 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ + metadata::{self, StorageEntryModifier}, storage::{ StorageAppend, StorageDecodeLength, types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::DefaultByteGetter; -use frame_metadata2::v13::StorageEntryModifier; use sp_std::vec::Vec; /// A type that allow to store values for `(key1, key2)` couple. Similar to `StorageMap` but allow @@ -392,8 +391,8 @@ pub trait StorageDoubleMapMetadata { const MODIFIER: StorageEntryModifier; const NAME: &'static str; const DEFAULT: DefaultByteGetter; - const HASHER1: frame_metadata2::v13::StorageHasher; - const HASHER2: frame_metadata2::v13::StorageHasher; + const HASHER1: metadata::StorageHasher; + const HASHER2: metadata::StorageHasher; } impl StorageDoubleMapMetadata @@ -408,8 +407,8 @@ impl StorageDou OnEmpty: crate::traits::Get + 'static { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER1: frame_metadata2::v13::StorageHasher = Hasher1::METADATA; - const HASHER2: frame_metadata2::v13::StorageHasher = Hasher2::METADATA; + const HASHER1: metadata::StorageHasher = Hasher1::METADATA; + const HASHER2: metadata::StorageHasher = Hasher2::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; const DEFAULT: DefaultByteGetter = DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 69160ffaca003..eaba4a05a0d6f 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -20,14 +20,13 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ + metadata::{self, StorageEntryModifier}, storage::{ StorageAppend, StorageDecodeLength, types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::{DefaultByteGetter}; -use frame_metadata2::v13::StorageEntryModifier; use sp_std::prelude::*; /// A type that allow to store value for given key. Allowing to insert/remove/iterate on values. @@ -296,7 +295,7 @@ pub trait StorageMapMetadata { const MODIFIER: StorageEntryModifier; const NAME: &'static str; const DEFAULT: DefaultByteGetter; - const HASHER: frame_metadata2::v13::StorageHasher; + const HASHER: metadata::StorageHasher; } impl StorageMapMetadata @@ -309,7 +308,7 @@ impl StorageMapMetadata OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER: frame_metadata2::v13::StorageHasher = Hasher::METADATA; + const HASHER: metadata::StorageHasher = Hasher::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; const DEFAULT: DefaultByteGetter = DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 6cf58a9c0e26d..7ba805262fcb6 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -19,8 +19,7 @@ //! StorageMap and others. use codec::FullCodec; -use frame_metadata::{DefaultByte}; -use frame_metadata2::v13::StorageEntryModifier; +use frame_metadata::v13::StorageEntryModifier; mod value; mod map; diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 4313714bf3981..02d2b7cd20f75 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -25,8 +25,7 @@ use crate::{ }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::{DefaultByteGetter}; -use frame_metadata2::v13::StorageEntryModifier; +use frame_metadata::v13::StorageEntryModifier; /// A type that allow to store a value. /// diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 38e8e87d82805..98a31f2c068c8 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -25,7 +25,6 @@ sp-std = { version = "3.0.0", default-features = false, path = "../../../primiti trybuild = "1.0.38" pretty_assertions = "0.6.1" rustversion = "1.0.0" -frame-metadata = { version = "13.0.0", default-features = false, path = "../../metadata" } frame-system = { version = "3.0.0", default-features = false, path = "../../system" } [features] From b792fd295fd00644860d7f8c3c886b3408a478d1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Apr 2021 17:25:04 +0100 Subject: [PATCH 148/503] Add ErrorMetadata import --- frame/support/src/error.rs | 9 ++++++--- frame/support/src/metadata.rs | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index db2f4576e5f3f..30a4d0bb21c4a 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -20,6 +20,9 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; +use crate::metadata::ErrorMetadata; +use sp_std::prelude::Vec; + /// Declare an error type for a runtime module. /// /// `decl_error!` supports only variants that do not hold any data. The dispatchable @@ -211,11 +214,11 @@ macro_rules! decl_error { /// All the metadata about errors in a module. pub trait ModuleErrorMetadata { - fn metadata() -> sp_std::prelude::Vec; + fn metadata() -> Vec; } impl ModuleErrorMetadata for &'static str { - fn metadata() -> sp_std::prelude::Vec { - sp_std::prelude::Vec::new() + fn metadata() -> Vec { + Vec::new() } } diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 203bfacfe8ac4..bc47b2a291054 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -17,7 +17,7 @@ pub use frame_metadata::v13::{ ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, - StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, + StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, ErrorMetadata, }; /// todo: [AJ] update docs From 5b65cbf7462f5a65dc233e31032ef9f5bb59f52c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 11:56:43 +0100 Subject: [PATCH 149/503] Unify storage metadata traits --- .../procedural/src/pallet/expand/storage.rs | 52 ++----------------- frame/support/src/metadata.rs | 1 + frame/support/src/storage/mod.rs | 1 + frame/support/src/storage/types/double_map.rs | 42 +++++++-------- frame/support/src/storage/types/map.rs | 36 ++++++------- frame/support/src/storage/types/mod.rs | 27 +++++----- frame/support/src/storage/types/value.rs | 25 +++++---- 7 files changed, 70 insertions(+), 114 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index b20fa30a5335c..f9795fe9095ce 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -79,56 +79,12 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { let cfg_attrs = &storage.cfg_attrs; - let metadata_trait = match &storage.metadata { - Metadata::Value { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageValueMetadata - ), - Metadata::Map { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageMapMetadata - ), - Metadata::DoubleMap { .. } => quote::quote_spanned!(storage.attr_span => - #frame_support::storage::types::StorageDoubleMapMetadata - ), - }; - - let ty = match &storage.metadata { - Metadata::Value { value } => { - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Plain( - #frame_support::scale_info::meta_type::<#value>() - ) - ) - }, - Metadata::Map { key, value } => { - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::Map { - hasher: <#full_ident as #metadata_trait>::HASHER, - key: #frame_support::scale_info::meta_type::<#key>(), - value: #frame_support::scale_info::meta_type::<#value>(), - unused: false, - } - ) - }, - Metadata::DoubleMap { key1, key2, value } => { - quote::quote_spanned!(storage.attr_span => - #frame_support::metadata::StorageEntryType::DoubleMap { - hasher: <#full_ident as #metadata_trait>::HASHER1, - key2_hasher: <#full_ident as #metadata_trait>::HASHER2, - key1: #frame_support::scale_info::meta_type::<#key1>(), - key2: #frame_support::scale_info::meta_type::<#key2>(), - value: #frame_support::scale_info::meta_type::<#value>(), - } - ) - } - }; - quote::quote_spanned!(storage.attr_span => #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { - name: <#full_ident as #metadata_trait>::NAME, - modifier: <#full_ident as #metadata_trait>::MODIFIER, - ty: #ty, - // todo: [AJ] do we need the ByteGetter stuff or is a Vec okay? - default: <#full_ident as #metadata_trait>::DEFAULT.0.default_byte(), + name: <#full_ident as #frame_support::storage::StorageEntryMetadata>::NAME, + modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, + ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), + default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), documentation: #frame_support::scale_info::prelude::vec![ #( #docs, )* ], diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index bc47b2a291054..8d9f1620b36c6 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -18,6 +18,7 @@ pub use frame_metadata::v13::{ ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, ErrorMetadata, + TypeSpec, }; /// todo: [AJ] update docs diff --git a/frame/support/src/storage/mod.rs b/frame/support/src/storage/mod.rs index e00a3fe831829..fbe5b9096b32f 100644 --- a/frame/support/src/storage/mod.rs +++ b/frame/support/src/storage/mod.rs @@ -23,6 +23,7 @@ use codec::{FullCodec, FullEncode, Encode, EncodeLike, Decode}; use crate::hash::{Twox128, StorageHasher, ReversibleStorageHasher}; use sp_runtime::generic::{Digest, DigestItem}; pub use sp_runtime::TransactionOutcome; +pub use self::types::StorageEntryMetadata; pub mod unhashed; pub mod hashed; diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 7397e095a401c..fb8ba965f92bd 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -20,14 +20,15 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ - metadata::{self, StorageEntryModifier}, + metadata::StorageEntryModifier, storage::{ StorageAppend, StorageDecodeLength, - types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, + types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, }, traits::{GetDefault, StorageInstance}, }; use sp_std::vec::Vec; +use crate::metadata::StorageEntryType; /// A type that allow to store values for `(key1, key2)` couple. Similar to `StorageMap` but allow /// to iterate and remove value associated to first key. @@ -384,34 +385,33 @@ where } } -/// Part of storage metadata for a storage double map. -/// -/// NOTE: Generic hashers is supported. -pub trait StorageDoubleMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHER1: metadata::StorageHasher; - const HASHER2: metadata::StorageHasher; -} - -impl StorageDoubleMapMetadata +impl StorageEntryMetadata for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, Hasher2: crate::hash::StorageHasher, - Key1: FullCodec, - Key2: FullCodec, - Value: FullCodec, + Key1: FullCodec + scale_info::TypeInfo + 'static, + Key2: FullCodec + scale_info::TypeInfo + 'static, + Value: FullCodec + scale_info::TypeInfo + 'static, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER1: metadata::StorageHasher = Hasher1::METADATA; - const HASHER2: metadata::StorageHasher = Hasher2::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::DoubleMap { + hasher: Hasher1::METADATA, + key2_hasher: Hasher2::METADATA, + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::(), + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } #[cfg(test)] diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index eaba4a05a0d6f..d9681e677287a 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -20,10 +20,10 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ - metadata::{self, StorageEntryModifier}, + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ StorageAppend, StorageDecodeLength, - types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, + types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, }, traits::{GetDefault, StorageInstance}, }; @@ -288,30 +288,30 @@ where } } -/// Part of storage metadata for a storage map. -/// -/// NOTE: Generic hasher is supported. -pub trait StorageMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHER: metadata::StorageHasher; -} - -impl StorageMapMetadata +impl StorageEntryMetadata for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, - Key: FullCodec, - Value: FullCodec, + Key: FullCodec + scale_info::TypeInfo + 'static, + Value: FullCodec + scale_info::TypeInfo + 'static, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; - const HASHER: metadata::StorageHasher = Hasher::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::Map { + hasher: Hasher::METADATA, + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + unused: false + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } #[cfg(test)] diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 7ba805262fcb6..51d06d0676480 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -18,16 +18,16 @@ //! Storage types to build abstraction on storage, they implements storage traits such as //! StorageMap and others. +use crate::metadata::{StorageEntryType, StorageEntryModifier}; use codec::FullCodec; -use frame_metadata::v13::StorageEntryModifier; mod value; mod map; mod double_map; -pub use value::{StorageValue, StorageValueMetadata}; -pub use map::{StorageMap, StorageMapMetadata}; -pub use double_map::{StorageDoubleMap, StorageDoubleMapMetadata}; +pub use value::StorageValue; +pub use map::StorageMap; +pub use double_map::StorageDoubleMap; /// Trait implementing how the storage optional value is converted into the queried type. /// @@ -95,14 +95,13 @@ where } } -/// A helper struct which implements DefaultByte using `Get` and encode it. -struct OnEmptyGetter(core::marker::PhantomData<(Value, OnEmpty)>); -impl> DefaultByte - for OnEmptyGetter -{ - fn default_byte(&self) -> sp_std::vec::Vec { - OnEmpty::get().encode() - } +/// Provide metadata for a storage entry. +/// +/// Implemented by each of the storage entry kinds: value, map and doublemap. +pub trait StorageEntryMetadata { + const MODIFIER: StorageEntryModifier; + const NAME: &'static str; + + fn ty() -> StorageEntryType; + fn default() -> Vec; } -unsafe impl > Send for OnEmptyGetter {} -unsafe impl > Sync for OnEmptyGetter {} diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 02d2b7cd20f75..d52853a87a8ba 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -19,13 +19,13 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ StorageAppend, StorageDecodeLength, - types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, + types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, }, traits::{GetDefault, StorageInstance}, }; -use frame_metadata::v13::StorageEntryModifier; /// A type that allow to store a value. /// @@ -171,24 +171,23 @@ where } } -/// Part of storage metadata for storage value. -pub trait StorageValueMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; -} - -impl StorageValueMetadata +impl StorageEntryMetadata for StorageValue where Prefix: StorageInstance, - Value: FullCodec, + Value: FullCodec + scale_info::TypeInfo + 'static, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = - DefaultByteGetter(&OnEmptyGetter::(core::marker::PhantomData)); + + fn ty() -> StorageEntryType { + StorageEntryType::Plain(scale_info::meta_type::()) + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } #[cfg(test)] From 40b3c730d7d0bec62af0c652a3d92fcbad845c69 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 13:01:38 +0100 Subject: [PATCH 150/503] Add missing imports --- frame/support/src/metadata.rs | 2 +- frame/support/src/storage/types/value.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 8d9f1620b36c6..6bcdc5ba5f11a 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -18,7 +18,7 @@ pub use frame_metadata::v13::{ ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, ErrorMetadata, - TypeSpec, + EventMetadata, TypeSpec, FunctionMetadata, FunctionArgumentMetadata }; /// todo: [AJ] update docs diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index d52853a87a8ba..e9a5b60d96dec 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -26,6 +26,7 @@ use crate::{ }, traits::{GetDefault, StorageInstance}, }; +use sp_std::prelude::*; /// A type that allow to store a value. /// From 697aaf3921c1df0474059bdccc3a2db0646ecb94 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 13:07:20 +0100 Subject: [PATCH 151/503] Fix up constants following removal of DefaultByte trait --- .../procedural/src/pallet/expand/constants.rs | 21 +++++++------------ 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index f297068255b6a..5b71b75e32f5e 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -88,8 +88,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::sp_std::marker::PhantomData<(#type_use_gen)> ); - impl<#type_impl_gen> #frame_support::dispatch::DefaultByte for - #default_byte_getter<#type_use_gen> + impl<#type_impl_gen> #default_byte_getter<#type_use_gen> #completed_where_clause { fn default_byte(&self) -> #frame_support::sp_std::vec::Vec { @@ -97,21 +96,17 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { } } - unsafe impl<#type_impl_gen> Send for #default_byte_getter<#type_use_gen> - #completed_where_clause - {} - unsafe impl<#type_impl_gen> Sync for #default_byte_getter<#type_use_gen> - #completed_where_clause - {} + // unsafe impl<#type_impl_gen> Send for #default_byte_getter<#type_use_gen> + // #completed_where_clause + // {} + // unsafe impl<#type_impl_gen> Sync for #default_byte_getter<#type_use_gen> + // #completed_where_clause + // {} #frame_support::dispatch::ModuleConstantMetadata { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), - value: #frame_support::dispatch::DefaultByteGetter( - &#default_byte_getter::<#type_use_gen>( - #frame_support::sp_std::marker::PhantomData - ) - ).0.default_byte(), // todo: [AJ] unify DefaultByteGetter + value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), documentation: #frame_support::scale_info::prelude::vec![ #( #doc ),* ], } }) From 37787468f6908140e248da71a97141529ac93561 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 13:16:09 +0100 Subject: [PATCH 152/503] Move some dispatch type reexports to metadata --- .../procedural/src/pallet/expand/constants.rs | 4 ++-- .../procedural/src/storage/metadata.rs | 22 +++++++++---------- frame/support/src/dispatch.rs | 7 ++---- frame/support/src/metadata.rs | 2 +- 4 files changed, 16 insertions(+), 19 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index 5b71b75e32f5e..126201cc10316 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -103,7 +103,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { // #completed_where_clause // {} - #frame_support::dispatch::ModuleConstantMetadata { + #frame_support::metadata::ModuleConstantMetadata { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), @@ -117,7 +117,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn module_constants_metadata() - -> #frame_support::sp_std::vec::Vec<#frame_support::dispatch::ModuleConstantMetadata> + -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::ModuleConstantMetadata> { #frame_support::scale_info::prelude::vec![ #( #consts ),* ] } diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 7df29b051bc23..4d2f5a53341d4 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -26,7 +26,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> match &line.storage_type { StorageLineTypeDef::Simple(_) => { quote!{ - #scrate::metadata::v13::StorageEntryType::Plain( + #scrate::metadata::StorageEntryType::Plain( #scrate::scale_info::meta_type::<#value_type>() ) } @@ -35,8 +35,8 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let hasher = map.hasher.into_metadata(); let key = &map.key; quote!{ - #scrate::metadata::v13::StorageEntryType::Map { - hasher: #scrate::metadata::v13::#hasher, + #scrate::metadata::StorageEntryType::Map { + hasher: #scrate::metadata::#hasher, key: #scrate::scale_info::meta_type::<#key>(), value: #scrate::scale_info::meta_type::<#value_type>(), unused: false, @@ -49,12 +49,12 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let key1 = &map.key1; let key2 = &map.key2; quote!{ - #scrate::metadata::v13::StorageEntryType::DoubleMap { - hasher: #scrate::metadata::v13::#hasher1, + #scrate::metadata::StorageEntryType::DoubleMap { + hasher: #scrate::metadata::#hasher1, key1: #scrate::scale_info::meta_type::<#key1>(), key2: #scrate::scale_info::meta_type::<#key2>(), value: #scrate::scale_info::meta_type::<#value_type>(), - key2_hasher: #scrate::metadata::v13::#hasher2, + key2_hasher: #scrate::metadata::#hasher2, } } }, @@ -143,9 +143,9 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre let str_name = line.name.to_string(); let modifier = if line.is_option { - quote!(#scrate::metadata::v13::StorageEntryModifier::Optional) + quote!(#scrate::metadata::StorageEntryModifier::Optional) } else { - quote!(#scrate::metadata::v13::StorageEntryModifier::Default) + quote!(#scrate::metadata::StorageEntryModifier::Default) }; let ty = storage_line_metadata_type(scrate, line); @@ -166,7 +166,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre } let entry = quote! { - #scrate::metadata::v13::StorageEntryMetadata { + #scrate::metadata::StorageEntryMetadata { name: #str_name, modifier: #modifier, ty: #ty, @@ -188,7 +188,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre }; let store_metadata = quote!( - #scrate::metadata::v13::StorageMetadata { + #scrate::metadata::StorageMetadata { prefix: #prefix, entries: #scrate::scale_info::prelude::vec![ #entries ], } @@ -203,7 +203,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre impl#module_impl #module_struct #where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #scrate::metadata::v13::StorageMetadata { + pub fn storage_metadata() -> #scrate::metadata::StorageMetadata { #store_metadata } } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 5fcc94940a8b0..948db6f0f7471 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -20,9 +20,6 @@ pub use crate::sp_std::{result, fmt, prelude::{Vec, Clone, Eq, PartialEq}, marker}; pub use crate::codec::{Codec, EncodeLike, Decode, Encode, Input, Output, HasCompact, EncodeAsRef}; -pub use frame_metadata::v13::{ - FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, -}; pub use crate::weights::{ GetDispatchInfo, DispatchInfo, WeighData, ClassifyDispatch, TransactionPriority, Weight, PaysFee, PostDispatchInfo, WithPostDispatchInfo, @@ -2401,7 +2398,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::dispatch::ModuleConstantMetadata> { + pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::ModuleConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] @@ -2434,7 +2431,7 @@ macro_rules! __impl_module_constants_metadata { )* $crate::scale_info::prelude::vec![ $( - $crate::dispatch::ModuleConstantMetadata { + $crate::metadata::ModuleConstantMetadata { name: stringify!($name), ty: $crate::scale_info::meta_type::<$type>(), value: $crate::dispatch::DefaultByteGetter( diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 6bcdc5ba5f11a..0b5389f5082bb 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -18,7 +18,7 @@ pub use frame_metadata::v13::{ ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, ErrorMetadata, - EventMetadata, TypeSpec, FunctionMetadata, FunctionArgumentMetadata + EventMetadata, TypeSpec, FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, }; /// todo: [AJ] update docs From 69beff270439b3801df30fb44968f0d9227fee46 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 13:16:30 +0100 Subject: [PATCH 153/503] Don't need Send and Sync --- frame/support/procedural/src/pallet/expand/constants.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index 126201cc10316..faec6ea58d846 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -96,13 +96,6 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { } } - // unsafe impl<#type_impl_gen> Send for #default_byte_getter<#type_use_gen> - // #completed_where_clause - // {} - // unsafe impl<#type_impl_gen> Sync for #default_byte_getter<#type_use_gen> - // #completed_where_clause - // {} - #frame_support::metadata::ModuleConstantMetadata { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), From 195e4777fd3908e4947f29735232f6b469450a5a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 15:20:22 +0100 Subject: [PATCH 154/503] Fix up ErrorMetadata import --- frame/support/src/error.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 30a4d0bb21c4a..7d8b1871edd48 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -165,10 +165,10 @@ macro_rules! decl_error { for $error<$generic $(, $inst_generic)?> $( where $( $where_ty: $where_bound ),* )? { - fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::error::ErrorMetadata> { + fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::ErrorMetadata> { $crate::scale_info::prelude::vec![ $( - $crate::error::ErrorMetadata { + $crate::metadata::ErrorMetadata { name: stringify!($name), documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* From 317f3ef0122b9209739c9d3dbd397ec3645fa303 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 15:25:04 +0100 Subject: [PATCH 155/503] Replace DefaultByteGetter for consts in legacy macro --- frame/support/src/dispatch.rs | 23 +++++------------------ 1 file changed, 5 insertions(+), 18 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 948db6f0f7471..807e493e5c6c7 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2151,7 +2151,7 @@ macro_rules! decl_module { impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::error::ModuleErrorMetadata for $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { - fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::error::ErrorMetadata> { + fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::ErrorMetadata> { <$error_type as $crate::error::ModuleErrorMetadata>::metadata() } } @@ -2412,35 +2412,22 @@ macro_rules! __impl_module_constants_metadata { >); impl<$const_trait_instance: 'static + $const_trait_name $( , $const_instance: $const_instantiable)? - > $crate::dispatch::DefaultByte - for $default_byte_name <$const_trait_instance $(, $const_instance)?> + > $default_byte_name <$const_trait_instance $(, $const_instance)?> { fn default_byte(&self) -> $crate::dispatch::Vec { let value: $type = $value; $crate::dispatch::Encode::encode(&value) } } - - unsafe impl<$const_trait_instance: 'static + $const_trait_name $( - , $const_instance: $const_instantiable)? - > Send for $default_byte_name <$const_trait_instance $(, $const_instance)?> {} - - unsafe impl<$const_trait_instance: 'static + $const_trait_name $( - , $const_instance: $const_instantiable)? - > Sync for $default_byte_name <$const_trait_instance $(, $const_instance)?> {} )* $crate::scale_info::prelude::vec![ $( $crate::metadata::ModuleConstantMetadata { name: stringify!($name), ty: $crate::scale_info::meta_type::<$type>(), - value: $crate::dispatch::DefaultByteGetter( - &$default_byte_name::< - $const_trait_instance $(, $const_instance)? - >( - $crate::dispatch::marker::PhantomData - ) - ).0.default_byte(), // todo: [AJ] unify DefaultByteGetter, + value: $default_byte_name::<$const_trait_instance $(, $const_instance)?>( + Default::default() + ).default_byte(), documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], } ),* From d178883cbbcf7296003d49b9ffb59b1e29afb2be Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 15:25:21 +0100 Subject: [PATCH 156/503] Add missing import --- frame/support/src/storage/types/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 51d06d0676480..96a60d9507cfe 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -20,6 +20,7 @@ use crate::metadata::{StorageEntryType, StorageEntryModifier}; use codec::FullCodec; +use sp_std::prelude::*; mod value; mod map; From 15461b59f36c5760332a6c591ec20d99d7b7c189 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 16:13:51 +0100 Subject: [PATCH 157/503] Fix up DefaultByte in legacy macro --- frame/support/procedural/src/storage/metadata.rs | 14 +++----------- frame/support/test/tests/instance.rs | 2 +- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 4d2f5a53341d4..495cf57cb9cfd 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -96,8 +96,7 @@ fn default_byte_getter( #[cfg(feature = "std")] impl<#runtime_generic: #runtime_trait, #optional_instance_bound> - #scrate::metadata::DefaultByte - for #struct_name<#runtime_generic, #optional_instance> + #struct_name<#runtime_generic, #optional_instance> #where_clause { fn default_byte(&self) -> #scrate::sp_std::vec::Vec { @@ -109,16 +108,9 @@ fn default_byte_getter( } } - unsafe impl<#runtime_generic: #runtime_trait, #optional_instance_bound> Send - for #struct_name<#runtime_generic, #optional_instance> #where_clause {} - - unsafe impl<#runtime_generic: #runtime_trait, #optional_instance_bound> Sync - for #struct_name<#runtime_generic, #optional_instance> #where_clause {} - #[cfg(not(feature = "std"))] impl<#runtime_generic: #runtime_trait, #optional_instance_bound> - #scrate::metadata::DefaultByte - for #struct_name<#runtime_generic, #optional_instance> + #struct_name<#runtime_generic, #optional_instance> #where_clause { fn default_byte(&self) -> #scrate::sp_std::vec::Vec { @@ -170,7 +162,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre name: #str_name, modifier: #modifier, ty: #ty, - default: #scrate::metadata::DefaultByteGetter(&#default_byte_getter_struct_instance).0.default_byte(), + default: #default_byte_getter_struct_instance.default_byte(), documentation: #scrate::scale_info::prelude::vec![ #docs ], }, }; diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index dbffead8ad2b0..b8cfe85c61695 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -22,7 +22,7 @@ use sp_runtime::{generic, BuildStorage, traits::{BlakeTwo256, Verify}}; use frame_support::{ Parameter, traits::Get, parameter_types, metadata::{ - DecodeDifferent, StorageMetadata, StorageEntryModifier, StorageEntryType, DefaultByteGetter, + StorageMetadata, StorageEntryModifier, StorageEntryType, StorageEntryMetadata, StorageHasher, }, StorageValue, StorageMap, StorageDoubleMap, From 6e59cc7809c3a5dadef68efeb09ef03c3e0167e5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 15 Apr 2021 16:14:17 +0100 Subject: [PATCH 158/503] Fix up frame_metadata reexports --- frame/support/src/metadata.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 0b5389f5082bb..9f3334babe83a 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -15,10 +15,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -pub use frame_metadata::v13::{ - ModuleMetadata, RuntimeMetadataLastVersion, StorageEntryMetadata, StorageMetadata, - StorageEntryType, StorageEntryModifier, StorageHasher, ExtrinsicMetadata, ErrorMetadata, - EventMetadata, TypeSpec, FunctionMetadata, FunctionArgumentMetadata, ModuleConstantMetadata, +pub use frame_metadata::{ + v13::{ + ErrorMetadata, EventMetadata, ExtrinsicMetadata, FunctionArgumentMetadata, + FunctionMetadata, ModuleConstantMetadata, ModuleEventMetadata, ModuleMetadata, + OuterEventMetadata, RuntimeMetadataLastVersion, SignedExtensionMetadata, + StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, + StorageMetadata, TypeSpec, + }, + RuntimeMetadataPrefixed, }; /// todo: [AJ] update docs @@ -73,7 +78,7 @@ macro_rules! impl_runtime_metadata { $( $rest:tt )* ) => { impl $runtime { - pub fn metadata() -> $crate::metadata::frame_metadata2::RuntimeMetadataPrefixed { + pub fn metadata() -> $crate::metadata::RuntimeMetadataPrefixed { $crate::metadata::RuntimeMetadataLastVersion::new( $crate::__runtime_modules_to_metadata!($runtime;; $( $rest )*), $crate::metadata::ExtrinsicMetadata { From 224490716d0d92ff5664372c6d3cf44710167db6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Apr 2021 12:02:40 +0100 Subject: [PATCH 159/503] Fix up tests --- Cargo.lock | 5 +- frame/support/Cargo.toml | 1 + frame/support/src/dispatch.rs | 77 ++++++++-------- frame/support/src/event.rs | 88 ++++++++++--------- frame/support/src/lib.rs | 6 +- frame/support/src/storage/types/double_map.rs | 35 ++++---- frame/support/src/storage/types/map.rs | 14 +-- frame/support/src/storage/types/value.rs | 10 ++- frame/support/test/tests/construct_runtime.rs | 2 +- 9 files changed, 125 insertions(+), 113 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2e3b5dc6b75be..ec5fdd9dc8152 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1803,6 +1803,7 @@ dependencies = [ name = "frame-support" version = "3.0.0" dependencies = [ + "assert_matches", "bitflags", "frame-metadata", "frame-support-procedural", @@ -8161,7 +8162,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#e183a586a9f5b0c7360b8a40d06c8ed437222269" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91633dd5f52fd7770299f3f551cb5b38f8fd0b32" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8173,7 +8174,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#e183a586a9f5b0c7360b8a40d06c8ed437222269" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91633dd5f52fd7770299f3f551cb5b38f8fd0b32" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index de9d379c98936..d021d0f382a36 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -35,6 +35,7 @@ smallvec = "1.4.1" log = { version = "0.4.14", default-features = false } [dev-dependencies] +assert_matches = "1.3.0" pretty_assertions = "0.6.1" frame-system = { version = "3.0.0", path = "../system" } parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 807e493e5c6c7..a21095b0e4d08 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2595,6 +2595,7 @@ mod tests { CallMetadata, GetCallMetadata, GetCallName, OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, IntegrityTest, Get, PalletInfo, }; + use crate::metadata::*; pub trait Config: system::Config + Sized where Self::AccountId: From { } @@ -2669,69 +2670,69 @@ mod tests { const EXPECTED_METADATA: &'static [FunctionMetadata] = &[ FunctionMetadata { - name: DecodeDifferent::Encode("aux_0"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[ + name: "aux_0", + arguments: vec![], + documentation: vec![ " Hi, this is a comment." - ]) + ] }, FunctionMetadata { - name: DecodeDifferent::Encode("aux_1"), - arguments: DecodeDifferent::Encode(&[ + name: "aux_1", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("Compact") + name: "_data", + ty: scale_info::meta_type::>(), } - ]), - documentation: DecodeDifferent::Encode(&[]), + ], + documentation: vec![], }, FunctionMetadata { - name: DecodeDifferent::Encode("aux_2"), - arguments: DecodeDifferent::Encode(&[ + name: "aux_2", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), + name: "_data", + ty: "i32", }, FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("String"), + name: "_data2", + ty: "String", } - ]), - documentation: DecodeDifferent::Encode(&[]), + ], + documentation: vec![], }, FunctionMetadata { - name: DecodeDifferent::Encode("aux_3"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), + name: "aux_3", + arguments: vec![], + documentation: vec![], }, FunctionMetadata { - name: DecodeDifferent::Encode("aux_4"), - arguments: DecodeDifferent::Encode(&[ + name: "aux_4", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), + name: "_data", + ty: "i32", } - ]), - documentation: DecodeDifferent::Encode(&[]), + ], + documentation: vec![], }, FunctionMetadata { - name: DecodeDifferent::Encode("aux_5"), - arguments: DecodeDifferent::Encode(&[ + name: "aux_5", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data"), - ty: DecodeDifferent::Encode("i32"), + name: "_data", + ty: "i32", }, FunctionArgumentMetadata { - name: DecodeDifferent::Encode("_data2"), - ty: DecodeDifferent::Encode("Compact") + name: "_data2", + ty: scale_info::meta_type::>() } - ]), - documentation: DecodeDifferent::Encode(&[]), + ], + documentation: vec![], }, FunctionMetadata { - name: DecodeDifferent::Encode("operational"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), + name: "operational", + arguments: vec![], + documentation: vec![], }, ]; diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 9d38edf41934c..cca266180ae50 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -553,6 +553,7 @@ macro_rules! __impl_outer_event_json_metadata { #[allow(dead_code)] mod tests { use super::*; + use crate::metadata::*; use serde::Serialize; use codec::{Encode, Decode}; @@ -758,59 +759,60 @@ mod tests { } const EXPECTED_METADATA: OuterEventMetadata = OuterEventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - events: DecodeDifferent::Encode(&[ - ( - "system", - FnEncode(|| &[ + name: "TestEvent", + events: vec![ + ModuleEventMetadata { + name: "system", + events: vec![EventMetadata { + name: "SystemEvent", + arguments: vec![], + documentation: vec![], + }] + }, + ModuleEventMetadata { + name: "event_module", + events: vec![ EventMetadata { - name: DecodeDifferent::Encode("SystemEvent"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]), - } - ]) - ), - ( - "event_module", - FnEncode(|| &[ - EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&[ "Balance", "Origin" ]), - documentation: DecodeDifferent::Encode(&[ " Hi, I am a comment." ]) + name: "TestEvent", + arguments: vec![ + TypeSpec::new::("Balance"), + TypeSpec::new::("Origin"), + ], + documentation: vec![ " Hi, I am a comment." ] }, EventMetadata { - name: DecodeDifferent::Encode("EventWithoutParams"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[ " Dog" ]), + name: "EventWithoutParams", + arguments: vec![], + documentation: vec![ " Dog" ], }, - ]) - ), - ( - "event_module2", - FnEncode(|| &[ + ] + }, + ModuleEventMetadata { + name: "event_module2", + events: vec![ EventMetadata { - name: DecodeDifferent::Encode("TestEvent"), - arguments: DecodeDifferent::Encode(&[ "BalanceRenamed" ]), - documentation: DecodeDifferent::Encode(&[]) + name: "TestEvent", + arguments: vec![TypeSpec::new::("BalanceRenamed")], + documentation: vec![] }, EventMetadata { - name: DecodeDifferent::Encode("TestOrigin"), - arguments: DecodeDifferent::Encode(&[ "OriginRenamed" ]), - documentation: DecodeDifferent::Encode(&[]), + name: "TestOrigin", + arguments: vec![TypeSpec::new::("OriginRenamed")], + documentation: vec![], }, - ]) - ), - ( - "event_module3", - FnEncode(|| &[ + ] + }, + ModuleEventMetadata { + name: "event_module3", + events: vec![ EventMetadata { - name: DecodeDifferent::Encode("HiEvent"), - arguments: DecodeDifferent::Encode(&[]), - documentation: DecodeDifferent::Encode(&[]) + name: "HiEvent", + arguments: vec![], + documentation: vec![] } - ]) - ) - ]) + ] + } + ] }; #[test] diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 7bbfc80fa5f37..97d141aa38336 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -754,9 +754,9 @@ pub use serde::{Serialize, Deserialize}; pub mod tests { use super::*; use codec::{Codec, EncodeLike}; - use frame_metadata::{ - DecodeDifferent, StorageEntryMetadata, StorageMetadata, StorageEntryType, - StorageEntryModifier, DefaultByteGetter, StorageHasher, + use crate::metadata::{ + StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, + StorageHasher, }; use sp_std::{marker::PhantomData, result}; use sp_io::TestExternalities; diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index fb8ba965f92bd..b4f10e5900dac 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -20,7 +20,7 @@ use codec::{FullCodec, Decode, EncodeLike, Encode}; use crate::{ - metadata::StorageEntryModifier, + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ StorageAppend, StorageDecodeLength, types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, @@ -28,7 +28,6 @@ use crate::{ traits::{GetDefault, StorageInstance}, }; use sp_std::vec::Vec; -use crate::metadata::StorageEntryType; /// A type that allow to store values for `(key1, key2)` couple. Similar to `StorageMap` but allow /// to iterate and remove value associated to first key. @@ -417,10 +416,13 @@ impl StorageEnt #[cfg(test)] mod test { use super::*; + use assert_matches::assert_matches; use sp_io::{TestExternalities, hashing::twox_128}; use crate::hash::*; - use crate::storage::types::ValueQuery; - use frame_metadata::StorageEntryModifier; + use crate::{ + metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, + storage::types::ValueQuery, + }; struct Prefix; impl StorageInstance for Prefix { @@ -573,19 +575,20 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::HASHER1, frame_metadata::StorageHasher::Blake2_128Concat); - assert_eq!(A::HASHER2, frame_metadata::StorageHasher::Twox64Concat); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER1, - frame_metadata::StorageHasher::Blake2_128Concat - ); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER2, - frame_metadata::StorageHasher::Twox64Concat - ); + assert_matches!(A::ty(), StorageEntryType::DoubleMap { + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + .. + }); + assert_matches!(AValueQueryWithAnOnEmpty::ty(), StorageEntryType::DoubleMap { + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + .. + }); + assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(); assert_eq!(WithLen::decode_len(3, 30), None); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index d9681e677287a..0b9e4fa3cef0a 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -319,8 +319,10 @@ mod test { use super::*; use sp_io::{TestExternalities, hashing::twox_128}; use crate::hash::*; - use crate::storage::types::ValueQuery; - use frame_metadata::StorageEntryModifier; + use crate::{ + metadata::{StorageEntryModifier, StorageHasher}, + storage::types::ValueQuery + }; struct Prefix; impl StorageInstance for Prefix { @@ -470,14 +472,14 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::HASHER, frame_metadata::StorageHasher::Blake2_128Concat); + assert_eq!(A::HASHER, StorageHasher::Blake2_128Concat); assert_eq!( AValueQueryWithAnOnEmpty::HASHER, - frame_metadata::StorageHasher::Blake2_128Concat + StorageHasher::Blake2_128Concat ); assert_eq!(A::NAME, "foo"); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(); assert_eq!(WithLen::decode_len(3), None); diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index e9a5b60d96dec..63c1bf594b7bd 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -195,8 +195,10 @@ impl StorageEntryMetadata mod test { use super::*; use sp_io::{TestExternalities, hashing::twox_128}; - use crate::storage::types::ValueQuery; - use frame_metadata::StorageEntryModifier; + use crate::{ + metadata::StorageEntryModifier, + storage::types::ValueQuery, + }; struct Prefix; impl StorageInstance for Prefix { @@ -268,8 +270,8 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "foo"); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); - assert_eq!(AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), 97u32.encode()); + assert_eq!(A::default(), Option::::None.encode()); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); WithLen::kill(); assert_eq!(WithLen::decode_len(), None); diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 64c656729719c..0d4cd45fb7739 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -511,7 +511,7 @@ fn call_codec() { // todo: [AJ] update test below with v13 metadata from above #[test] fn test_metadata() { - use frame_metadata::*; + use crate::metadata::*; use frame_support::scale_info::{form::MetaForm, IntoPortable, Registry}; // vnext modules defined with legacy macros have empty metadata let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { From fec091f9bcc5e9da44c69c239cd2b85b53b56044 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Apr 2021 15:26:41 +0100 Subject: [PATCH 160/503] Remove is_compact flag, fixing up more tests --- Cargo.lock | 2 +- .../procedural/src/pallet/expand/call.rs | 7 - frame/support/src/dispatch.rs | 13 +- frame/support/src/event.rs | 9 +- frame/support/src/lib.rs | 136 ++++++++---------- frame/support/test/tests/pallet.rs | 9 +- 6 files changed, 69 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ec5fdd9dc8152..667a9144c9b46 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1791,7 +1791,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#eb8166882db7603b8b7eea1ac87061af0651d02f" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#4a3b60f513a865887a4ae663806e6b0ebedf5942" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index a90823edc4587..2c4de3ee1e337 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -56,12 +56,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); - let args_is_compact = def.call.methods.iter().map(|method| { - method.args.iter() - .map(|(is_compact, _, _)| is_compact) - .collect::>() - }); - let default_docs = [syn::parse_quote!( r"Contains one variant per dispatchable that can be called by an extrinsic." )]; @@ -187,7 +181,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_type>(), - is_compact: #args_is_compact, }, )* ], documentation: #frame_support::scale_info::prelude::vec![ #( #fn_doc ),* ], diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index a21095b0e4d08..40166ac11fe5f 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2513,24 +2513,12 @@ macro_rules! __function_to_metadata { $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), ty: $crate::scale_info::meta_type::<$param>(), - is_compact: $crate::__function_to_metadata!(@has_compact_attr - $(#[$codec_attr])* $param_name - ) } ),* ], documentation: $crate::scale_info::prelude::vec![ $( $fn_doc ),* ], } }; - - (@has_compact_attr #[compact] $param_name:ident) => { true }; - (@has_compact_attr $param_name:ident) => { false }; - (@has_compact_attr $(#[codec_attr:ident])* $param_name:ident) => { - compile_error!(concat!( - "Invalid attribute for parameter `", stringify!($param_name), - "`, the following attributes are supported: `#[compact]`" - )); - } } #[macro_export] @@ -2596,6 +2584,7 @@ mod tests { IntegrityTest, Get, PalletInfo, }; use crate::metadata::*; + use codec::Compact; pub trait Config: system::Config + Sized where Self::AccountId: From { } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index cca266180ae50..90d5468e52b7f 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -552,7 +552,6 @@ macro_rules! __impl_outer_event_json_metadata { #[cfg(test)] #[allow(dead_code)] mod tests { - use super::*; use crate::metadata::*; use serde::Serialize; use codec::{Encode, Decode}; @@ -775,8 +774,8 @@ mod tests { EventMetadata { name: "TestEvent", arguments: vec![ - TypeSpec::new::("Balance"), - TypeSpec::new::("Origin"), + TypeSpec::new::("Balance"), + TypeSpec::new::("Origin"), ], documentation: vec![ " Hi, I am a comment." ] }, @@ -792,12 +791,12 @@ mod tests { events: vec![ EventMetadata { name: "TestEvent", - arguments: vec![TypeSpec::new::("BalanceRenamed")], + arguments: vec![TypeSpec::new::("BalanceRenamed")], documentation: vec![] }, EventMetadata { name: "TestOrigin", - arguments: vec![TypeSpec::new::("OriginRenamed")], + arguments: vec![TypeSpec::new::("OriginRenamed")], documentation: vec![], }, ] diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 97d141aa38336..6fcc44cc4ccb3 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -774,7 +774,7 @@ pub mod tests { } pub trait Config: 'static { - type BlockNumber: Codec + EncodeLike + Default; + type BlockNumber: Codec + EncodeLike + Default + scale_info::TypeInfo; type Origin; type PalletInfo: crate::traits::PalletInfo; type DbWeight: crate::traits::Get; @@ -1074,134 +1074,118 @@ pub mod tests { }); } - const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("Test"), - entries: DecodeDifferent::Encode( - &[ + fn expected_metadata() -> StorageMetadata { + StorageMetadata { + prefix: "Test", + entries: vec![ StorageEntryMetadata { - name: DecodeDifferent::Encode("Data"), + name: "Data", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map{ + ty: StorageEntryType::Map { hasher: StorageHasher::Twox64Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u64"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructData(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("OptionLinkedMap"), + name: "OptionLinkedMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u32"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructOptionLinkedMap(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData"), + name: "GenericData", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map{ + ty: StorageEntryType::Map { hasher: StorageHasher::Identity, - key: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGenericData(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData2"), + name: "GenericData2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map{ + ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGenericData2(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("DataDM"), + name: "DataDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap{ + ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Twox64Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("u64"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructDataDM(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericDataDM"), + name: "GenericDataDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap{ + ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("T::BlockNumber"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::(), key2_hasher: StorageHasher::Identity, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGenericDataDM(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GenericData2DM"), + name: "GenericData2DM", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap{ + ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("T::BlockNumber"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("T::BlockNumber"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::(), key2_hasher: StorageHasher::Twox64Concat, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGenericData2DM(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("AppendableDM"), + name: "AppendableDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap{ + ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("T::BlockNumber"), - value: DecodeDifferent::Encode("Vec"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::>(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGenericData2DM(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, - ] - ), - }; + ], + } + } #[test] fn store_metadata() { let metadata = Module::::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } parameter_types! { diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 8a0ea7b686c30..09aeef5330b7d 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -882,13 +882,11 @@ fn metadata_vnext() { arguments: vec![ vnext::FunctionArgumentMetadata { name: "_foo", - ty: meta_type::(), - is_compact: true, + ty: meta_type::>(), }, vnext::FunctionArgumentMetadata { name: "_bar", - ty: meta_type::(), - is_compact: false, + ty: meta_type::>(), }, ], documentation: vec![" Doc comment put in metadata"], @@ -898,8 +896,7 @@ fn metadata_vnext() { arguments: vec![ vnext::FunctionArgumentMetadata { name: "foo", - ty: meta_type::(), - is_compact: true, + ty: meta_type::>(), }, ], documentation: vec![" Doc comment put in metadata"], From 94dd3cac4bb4d8c5270d9c2bbe1939deaffc06c3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Apr 2021 16:16:22 +0100 Subject: [PATCH 161/503] More test fixes --- Cargo.lock | 1 + frame/support/src/dispatch.rs | 11 ++++++----- frame/support/src/lib.rs | 2 +- frame/support/src/origin.rs | 8 ++++---- frame/support/src/storage/types/map.rs | 16 ++++++++++------ frame/system/benchmarking/Cargo.toml | 1 + 6 files changed, 23 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 667a9144c9b46..f0187e6b4b6d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1908,6 +1908,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 40166ac11fe5f..11b29071e54b5 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2601,7 +2601,7 @@ mod tests { type DbWeight: Get; } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), @@ -2680,11 +2680,11 @@ mod tests { arguments: vec![ FunctionArgumentMetadata { name: "_data", - ty: "i32", + ty: scale_info::meta_type::(), }, FunctionArgumentMetadata { name: "_data2", - ty: "String", + ty: scale_info::meta_type::(), } ], documentation: vec![], @@ -2699,7 +2699,7 @@ mod tests { arguments: vec![ FunctionArgumentMetadata { name: "_data", - ty: "i32", + ty: scale_info::meta_type::(), } ], documentation: vec![], @@ -2709,7 +2709,7 @@ mod tests { arguments: vec![ FunctionArgumentMetadata { name: "_data", - ty: "i32", + ty: scale_info::meta_type::(), }, FunctionArgumentMetadata { name: "_data2", @@ -2725,6 +2725,7 @@ mod tests { }, ]; + #[derive(scale_info::TypeInfo)] pub struct TraitImpl {} impl Config for TraitImpl { } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 6fcc44cc4ccb3..e53ad5c17206f 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -758,7 +758,7 @@ pub mod tests { StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, StorageHasher, }; - use sp_std::{marker::PhantomData, result}; + use sp_std::result; use sp_io::TestExternalities; /// A PalletInfo implementation which just panics. diff --git a/frame/support/src/origin.rs b/frame/support/src/origin.rs index a5a528288c788..db93e69a59bdb 100644 --- a/frame/support/src/origin.rs +++ b/frame/support/src/origin.rs @@ -434,7 +434,7 @@ mod tests { type BaseCallFilter; } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), @@ -456,20 +456,20 @@ mod tests { mod origin_without_generic { use super::*; - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub struct Origin; } mod origin_with_generic { use super::*; - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub struct Origin { t: T } } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] pub struct TestRuntime; pub struct BaseCallFilter; diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 0b9e4fa3cef0a..123ea30ddcb0c 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -317,10 +317,11 @@ impl StorageEntryMetadata #[cfg(test)] mod test { use super::*; + use assert_matches::assert_matches; use sp_io::{TestExternalities, hashing::twox_128}; use crate::hash::*; use crate::{ - metadata::{StorageEntryModifier, StorageHasher}, + metadata::{StorageEntryModifier, StorageHasher, StorageEntryType}, storage::types::ValueQuery }; @@ -472,11 +473,14 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_eq!(A::HASHER, StorageHasher::Blake2_128Concat); - assert_eq!( - AValueQueryWithAnOnEmpty::HASHER, - StorageHasher::Blake2_128Concat - ); + assert_matches!(A::ty(), StorageEntryType::Map { + hasher: StorageHasher::Blake2_128Concat, + .. + }); + assert_matches!(AValueQueryWithAnOnEmpty::ty(), StorageEntryType::Map { + hasher: StorageHasher::Blake2_128Concat, + .. + }); assert_eq!(A::NAME, "foo"); assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); assert_eq!(A::default(), Option::::None.encode()); diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index 1a9317c69bf43..a1c652178e2d2 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../../benchmarking" } From d16089dc770b3a8368c8001f1be29cba0da80f3f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Apr 2021 17:28:31 +0100 Subject: [PATCH 162/503] Fix up more tests --- frame/balances/src/tests_composite.rs | 2 +- frame/support/src/dispatch.rs | 138 +++++++------- frame/support/test/tests/construct_runtime.rs | 178 +++++++++++------- 3 files changed, 180 insertions(+), 138 deletions(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index ea7d5dc63c5db..4975ae6b8ec60 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -25,7 +25,7 @@ use sp_runtime::{ }; use sp_core::H256; use sp_io; -use frame_support::{impl_outer_origin, impl_outer_event, parameter_types}; +use frame_support::parameter_types; use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; use pallet_transaction_payment::CurrencyAdapter; use crate::{ diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 11b29071e54b5..3565fe2bea821 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2657,73 +2657,75 @@ mod tests { } } - const EXPECTED_METADATA: &'static [FunctionMetadata] = &[ - FunctionMetadata { - name: "aux_0", - arguments: vec![], - documentation: vec![ - " Hi, this is a comment." - ] - }, - FunctionMetadata { - name: "aux_1", - arguments: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::>(), - } - ], - documentation: vec![], - }, - FunctionMetadata { - name: "aux_2", - arguments: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::(), - } - ], - documentation: vec![], - }, - FunctionMetadata { - name: "aux_3", - arguments: vec![], - documentation: vec![], - }, - FunctionMetadata { - name: "aux_4", - arguments: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - } - ], - documentation: vec![], - }, - FunctionMetadata { - name: "aux_5", - arguments: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::>() - } - ], - documentation: vec![], - }, - FunctionMetadata { - name: "operational", - arguments: vec![], - documentation: vec![], - }, - ]; + fn expected_metadata() -> Vec { + vec![ + FunctionMetadata { + name: "aux_0", + arguments: vec![], + documentation: vec![ + " Hi, this is a comment." + ] + }, + FunctionMetadata { + name: "aux_1", + arguments: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![], + }, + FunctionMetadata { + name: "aux_2", + arguments: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::(), + }, + FunctionArgumentMetadata { + name: "_data2", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![], + }, + FunctionMetadata { + name: "aux_3", + arguments: vec![], + documentation: vec![], + }, + FunctionMetadata { + name: "aux_4", + arguments: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![], + }, + FunctionMetadata { + name: "aux_5", + arguments: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::(), + }, + FunctionArgumentMetadata { + name: "_data2", + ty: scale_info::meta_type::>() + } + ], + documentation: vec![], + }, + FunctionMetadata { + name: "operational", + arguments: vec![], + documentation: vec![], + }, + ] + } #[derive(scale_info::TypeInfo)] pub struct TraitImpl {} @@ -2773,7 +2775,7 @@ mod tests { #[test] fn module_json_metadata() { let metadata = Module::::call_functions(); - assert_eq!(EXPECTED_METADATA, metadata); + assert_eq!(expected_metadata(), metadata); } #[test] diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 0d4cd45fb7739..61f7963418017 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -511,76 +511,116 @@ fn call_codec() { // todo: [AJ] update test below with v13 metadata from above #[test] fn test_metadata() { - use crate::metadata::*; - use frame_support::scale_info::{form::MetaForm, IntoPortable, Registry}; - // vnext modules defined with legacy macros have empty metadata - let expected_metadata: vnext::RuntimeMetadataLastVersion = vnext::RuntimeMetadataLastVersion { - modules: vec![ - vnext::ModuleMetadata { - name: "System", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_1", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module2", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_2", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_3", - calls: None, - event: None, - }, - vnext::ModuleMetadata { - name: "Module1_4", - calls: Some(vec![]), - event: None, - }, - vnext::ModuleMetadata { - name: "Module1_5", - calls: None, - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_6", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_7", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_8", - calls: Some(vec![]), - event: Some(vec![]), - }, - vnext::ModuleMetadata { - name: "Module1_9", - calls: Some(vec![]), - event: Some(vec![]), - } - ], - // todo [AJ]: add back extrinsic - // extrinsic: ExtrinsicMetadata { - // version: 4, - // signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], - // }, + use scale_info::{form::MetaForm, IntoPortable, Registry}; + use frame_support::metadata::*; + let modules = modules: vec![ + ModuleMetadata { + name: "System", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 0 + }, + ModuleMetadata { + name: "Module1_1", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 1 + }, + ModuleMetadata { + name: "Module2", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 2 + }, + ModuleMetadata { + name: "Module1_2", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 3 + }, + ModuleMetadata { + name: "Module1_3", + storage: None, + calls: None, + event: None, + constants: vec![], + errors: vec![], + index: 4 + }, + ModuleMetadata { + name: "Module1_4", + storage: None, + calls: Some(vec![]), + event: None, + constants: vec![], + errors: vec![], + index: 5 + }, + ModuleMetadata { + name: "Module1_5", + storage: None, + calls: None, + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 6 + }, + ModuleMetadata { + name: "Module1_6", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 7 + }, + ModuleMetadata { + name: "Module1_7", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 8 + }, + ModuleMetadata { + name: "Module1_8", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 9 + }, + ModuleMetadata { + name: "Module1_9", + storage: None, + calls: Some(vec![]), + event: Some(vec![]), + constants: vec![], + errors: vec![], + index: 10 + } + ]; + let extrinsic = ExtrinsicMetadata { + ty: scale_info::meta_type::(), + version: 0, + signed_extensions: vec![] }; - let mut registry = Registry::new(); - let expected_metadata = expected_metadata.into_portable(&mut registry); - pretty_assertions::assert_eq!(Runtime::metadata().1, vnext::RuntimeMetadata::V12(expected_metadata)); + let expected_metadata = RuntimeMetadataLastVersion::new(modules, extrinsic); + pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V13(expected_metadata)); } #[test] From 5053a70630ef4a0b6abd02c853122a7ce81631ea Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Apr 2021 17:40:27 +0100 Subject: [PATCH 163/503] Event tests, add TypeInfo derive to Instance* structs --- frame/support/src/event.rs | 112 +++++++++++++++++---------------- frame/support/src/instances.rs | 32 +++++----- frame/support/test/src/lib.rs | 2 +- 3 files changed, 74 insertions(+), 72 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 90d5468e52b7f..329bd17254d83 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -757,66 +757,68 @@ mod tests { type DbWeight = (); } - const EXPECTED_METADATA: OuterEventMetadata = OuterEventMetadata { - name: "TestEvent", - events: vec![ - ModuleEventMetadata { - name: "system", - events: vec![EventMetadata { - name: "SystemEvent", - arguments: vec![], - documentation: vec![], - }] - }, - ModuleEventMetadata { - name: "event_module", - events: vec![ - EventMetadata { - name: "TestEvent", - arguments: vec![ - TypeSpec::new::("Balance"), - TypeSpec::new::("Origin"), - ], - documentation: vec![ " Hi, I am a comment." ] - }, - EventMetadata { - name: "EventWithoutParams", + fn expected_metadata() -> OuterEventMetadata { + OuterEventMetadata { + name: "TestEvent", + events: vec![ + ModuleEventMetadata { + name: "system", + events: vec![EventMetadata { + name: "SystemEvent", arguments: vec![], - documentation: vec![ " Dog" ], - }, - ] - }, - ModuleEventMetadata { - name: "event_module2", - events: vec![ - EventMetadata { - name: "TestEvent", - arguments: vec![TypeSpec::new::("BalanceRenamed")], - documentation: vec![] - }, - EventMetadata { - name: "TestOrigin", - arguments: vec![TypeSpec::new::("OriginRenamed")], documentation: vec![], - }, - ] - }, - ModuleEventMetadata { - name: "event_module3", - events: vec![ - EventMetadata { - name: "HiEvent", - arguments: vec![], - documentation: vec![] - } - ] - } - ] - }; + }] + }, + ModuleEventMetadata { + name: "event_module", + events: vec![ + EventMetadata { + name: "TestEvent", + arguments: vec![ + TypeSpec::new::("Balance"), + TypeSpec::new::("Origin"), + ], + documentation: vec![" Hi, I am a comment."] + }, + EventMetadata { + name: "EventWithoutParams", + arguments: vec![], + documentation: vec![" Dog"], + }, + ] + }, + ModuleEventMetadata { + name: "event_module2", + events: vec![ + EventMetadata { + name: "TestEvent", + arguments: vec![TypeSpec::new::("BalanceRenamed")], + documentation: vec![] + }, + EventMetadata { + name: "TestOrigin", + arguments: vec![TypeSpec::new::("OriginRenamed")], + documentation: vec![], + }, + ] + }, + ModuleEventMetadata { + name: "event_module3", + events: vec![ + EventMetadata { + name: "HiEvent", + arguments: vec![], + documentation: vec![] + } + ] + } + ] + } + } #[test] fn outer_event_metadata() { - assert_eq!(EXPECTED_METADATA, TestRuntime::outer_event_metadata()); + assert_eq!(expected_metadata(), TestRuntime::outer_event_metadata()); } #[test] diff --git a/frame/support/src/instances.rs b/frame/support/src/instances.rs index 9908d16076a08..192ea3ea36ec9 100644 --- a/frame/support/src/instances.rs +++ b/frame/support/src/instances.rs @@ -32,65 +32,65 @@ //! accessible to [`frame_support::construct_runtime`]. /// Instance1 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] // todo: [AJ] can we remove these TypeInfo derives? pub struct Instance1; /// Instance2 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance2; /// Instance3 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance3; /// Instance4 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance4; /// Instance5 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance5; /// Instance6 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance6; /// Instance7 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance7; /// Instance8 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance8; /// Instance9 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance9; /// Instance10 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance10; /// Instance11 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance11; /// Instance12 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance12; /// Instance13 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance13; /// Instance14 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance14; /// Instance15 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance15; /// Instance16 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] pub struct Instance16; diff --git a/frame/support/test/src/lib.rs b/frame/support/test/src/lib.rs index d40031c149d90..65272d82b9352 100644 --- a/frame/support/test/src/lib.rs +++ b/frame/support/test/src/lib.rs @@ -30,7 +30,7 @@ pub trait Config: 'static { /// The runtime origin type. type Origin: codec::Codec + codec::EncodeLike + Default; /// The block number type. - type BlockNumber: codec::Codec + codec::EncodeLike + Default; + type BlockNumber: codec::Codec + codec::EncodeLike + Default + scale_info::TypeInfo; /// The information about the pallet setup in the runtime. type PalletInfo: frame_support::traits::PalletInfo; /// The db weights. From dbb674bfad1311c58011cde498cef77529b7ccd7 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 11:15:55 +0100 Subject: [PATCH 164/503] Fix up decl_storage tests --- frame/support/test/src/lib.rs | 2 +- frame/support/test/tests/decl_storage.rs | 303 ++++++++++------------- 2 files changed, 125 insertions(+), 180 deletions(-) diff --git a/frame/support/test/src/lib.rs b/frame/support/test/src/lib.rs index 65272d82b9352..142827ec19f6c 100644 --- a/frame/support/test/src/lib.rs +++ b/frame/support/test/src/lib.rs @@ -28,7 +28,7 @@ mod pallet_version; /// The configuration trait pub trait Config: 'static { /// The runtime origin type. - type Origin: codec::Codec + codec::EncodeLike + Default; + type Origin: codec::Codec + codec::EncodeLike + Default + scale_info::TypeInfo; /// The block number type. type BlockNumber: codec::Codec + codec::EncodeLike + Default + scale_info::TypeInfo; /// The information about the pallet setup in the runtime. diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index a2690b1379db5..d3141ff46893c 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -21,7 +21,6 @@ mod tests { use frame_support::metadata::*; use sp_io::TestExternalities; - use std::marker::PhantomData; frame_support::decl_module! { pub struct Module for enum Call where origin: T::Origin, system=frame_support_test {} @@ -90,301 +89,247 @@ mod tests { impl Config for TraitImpl {} - const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("TestStorage"), - entries: DecodeDifferent::Encode( - &[ + fn expected_metadata() -> StorageMetadata { + StorageMetadata { + prefix: "TestStorage", + entries: vec![ StorageEntryMetadata { - name: DecodeDifferent::Encode("U32"), + name: "U32", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[ " Hello, this is doc!" ]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![" Hello, this is doc!"], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBU32"), + name: "PUBU32", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("U32MYDEF"), + name: "U32MYDEF", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBU32MYDEF"), + name: "PUBU32MYDEF", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32"), + name: "GETU32", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("T::Origin")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32"), + name: "PUBGETU32", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32WITHCONFIG"), + name: "GETU32WITHCONFIG", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETU32WITHCONFIG(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIG"), + name: "PUBGETU32WITHCONFIG", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETU32WITHCONFIG(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32MYDEF"), + name: "GETU32MYDEF", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32MYDEF"), + name: "PUBGETU32MYDEF", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETU32WITHCONFIGMYDEF"), + name: "GETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETU32WITHCONFIGMYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIGMYDEF"), + name: "PUBGETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETU32WITHCONFIGMYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETU32WITHCONFIGMYDEFOPT"), + name: "PUBGETU32WITHCONFIGMYDEFOPT", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETU32WITHCONFIGMYDEFOPT(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GetU32WithBuilder"), + name: "GetU32WithBuilder", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGetU32WithBuilder(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GetOptU32WithBuilderSome"), + name: "GetOptU32WithBuilderSome", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGetOptU32WithBuilderSome(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GetOptU32WithBuilderNone"), + name: "GetOptU32WithBuilderNone", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGetOptU32WithBuilderNone(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("MAPU32"), + name: "MAPU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructMAPU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBMAPU32"), + name: "PUBMAPU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBMAPU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("MAPU32MYDEF"), + name: "MAPU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBMAPU32MYDEF"), + name: "PUBMAPU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETMAPU32"), + name: "GETMAPU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETMAPU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETMAPU32"), + name: "PUBGETMAPU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETMAPU32(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("GETMAPU32MYDEF"), + name: "GETMAPU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructGETMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBGETMAPU32MYDEF"), + name: "PUBGETMAPU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBGETMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE1"), + name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("::std::vec::Vec")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructCOMPLEXTYPE1(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::>()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE2"), + name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("(Vec)>>, u32)")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructCOMPLEXTYPE2(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::<(Vec)>>, u32)>()), + default: vec![], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("COMPLEXTYPE3"), + name: "COMPLEXTYPE3", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("[u32; 25]")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructCOMPLEXTYPE3(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::<[u32; 25]>()), + default: vec![], + documentation: vec![], }, ] - ), - }; + } + } #[test] fn store_metadata() { let metadata = Module::::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } #[test] @@ -488,7 +433,7 @@ mod test_append_and_len { pub struct Module for enum Call where origin: T::Origin, system=frame_support_test {} } - #[derive(PartialEq, Eq, Clone, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] struct NoDef(u32); frame_support::decl_storage! { From 32a0b82c56348d03e631be260498ba98dc492c2f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 12:11:10 +0100 Subject: [PATCH 165/503] More test fixes --- frame/balances/src/tests_composite.rs | 2 +- frame/scored-pool/src/lib.rs | 3 ++- frame/support/src/metadata.rs | 2 +- frame/support/test/tests/construct_runtime.rs | 4 ++-- frame/support/test/tests/pallet_compatibility.rs | 4 ++-- frame/support/test/tests/pallet_compatibility_instance.rs | 4 ++-- 6 files changed, 10 insertions(+), 9 deletions(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 4975ae6b8ec60..2896afa7e999e 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -92,7 +92,7 @@ impl Config for Test { type DustRemoval = (); type Event = Event; type ExistentialDeposit = ExistentialDeposit; - type AccountStore = system::Module; + type AccountStore = system::Pallet; type MaxLocks = (); type WeightInfo = (); } diff --git a/frame/scored-pool/src/lib.rs b/frame/scored-pool/src/lib.rs index da26872a0071a..24bc10e02f568 100644 --- a/frame/scored-pool/src/lib.rs +++ b/frame/scored-pool/src/lib.rs @@ -122,7 +122,8 @@ pub trait Config: frame_system::Config { /// The score attributed to a member or candidate. type Score: - AtLeast32Bit + Clone + Copy + Default + FullCodec + MaybeSerializeDeserialize + Debug; + AtLeast32Bit + Clone + Copy + Default + FullCodec + MaybeSerializeDeserialize + Debug + + scale_info::TypeInfo; /// The overarching event type. type Event: From> + Into<::Event>; diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 9f3334babe83a..92ff3a1a56565 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -23,7 +23,7 @@ pub use frame_metadata::{ StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, StorageMetadata, TypeSpec, }, - RuntimeMetadataPrefixed, + RuntimeMetadata, RuntimeMetadataPrefixed, }; /// todo: [AJ] update docs diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 61f7963418017..5c1be674f9331 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -92,7 +92,7 @@ mod module2 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -513,7 +513,7 @@ fn call_codec() { fn test_metadata() { use scale_info::{form::MetaForm, IntoPortable, Registry}; use frame_support::metadata::*; - let modules = modules: vec![ + let modules = vec![ ModuleMetadata { name: "System", storage: None, diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index fdf9c0ceaa0a9..555dea3105479 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -267,8 +267,8 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let modules = match metadata.1 { - frame_metadata::RuntimeMetadata::V12(frame_metadata::RuntimeMetadataV12 { - modules: frame_metadata::DecodeDifferent::Encode(m), + frame_support::metadata::RuntimeMetadata::V12(frame_support::metadata::RuntimeMetadataV12 { + modules: m, .. }) => m, _ => unreachable!(), diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index 736f879f956b0..f3b89c7b2e5ba 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -282,8 +282,8 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let modules = match metadata.1 { - frame_metadata::RuntimeMetadata::V12(frame_metadata::RuntimeMetadataV12 { - modules: frame_metadata::DecodeDifferent::Encode(m), + frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataV13 { + modules: m, .. }) => m, _ => unreachable!(), From bda85be1a69fc21578356eb27e566844f1f8865d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 12:15:34 +0100 Subject: [PATCH 166/503] Fix up pallet tests --- frame/support/test/tests/pallet.rs | 33 ++++++++++--------- .../test/tests/pallet_compatibility.rs | 2 +- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 09aeef5330b7d..5d2ab52f79070 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -869,32 +869,33 @@ fn pallet_on_genesis() { // todo: [AJ] update this test #[test] fn metadata_vnext() { - use frame_metadata::*; use crate::scale_info::{meta_type, Registry, IntoPortable}; + use codec::Compact; + use frame_support::metadata::*; - let expected_pallet_metadata = vnext::ModuleMetadata { + let expected_pallet_metadata = ModuleMetadata { // index: 1, name: "Example", // storage: calls: Some(vec![ - vnext::FunctionMetadata { + FunctionMetadata { name: "foo", arguments: vec![ - vnext::FunctionArgumentMetadata { + FunctionArgumentMetadata { name: "_foo", ty: meta_type::>(), }, - vnext::FunctionArgumentMetadata { + FunctionArgumentMetadata { name: "_bar", ty: meta_type::>(), }, ], documentation: vec![" Doc comment put in metadata"], }, - vnext::FunctionMetadata { + FunctionMetadata { name: "foo_transactional", arguments: vec![ - vnext::FunctionArgumentMetadata { + FunctionArgumentMetadata { name: "foo", ty: meta_type::>(), }, @@ -903,31 +904,31 @@ fn metadata_vnext() { }, ]), event: Some(vec![ - vnext::EventMetadata { + EventMetadata { name: "Proposed", arguments: vec![ - vnext::TypeSpec::new::<::AccountId>("::AccountId"), + TypeSpec::new::<::AccountId>("::AccountId"), ], documentation: vec![" doc comment put in metadata"], }, - vnext::EventMetadata { + EventMetadata { name: "Spending", arguments: vec![ - vnext::TypeSpec::new::<::Balance>("Balance"), + TypeSpec::new::<::Balance>("Balance"), ], documentation: vec![" doc"], }, - vnext::EventMetadata { + EventMetadata { name: "Something", arguments: vec![ - vnext::TypeSpec::new::("Other"), + TypeSpec::new::("Other"), ], documentation: vec![], }, - vnext::EventMetadata { + EventMetadata { name: "SomethingElse", arguments: vec![ - vnext::TypeSpec::new::<<::AccountId as SomeAssociation1>::_1>("::_1"), + TypeSpec::new::<<::AccountId as SomeAssociation1>::_1>("::_1"), ], documentation: vec![], }, @@ -937,7 +938,7 @@ fn metadata_vnext() { }; let metadata = match Runtime::metadata().1 { - vnext::RuntimeMetadata::V12(metadata) => metadata, + RuntimeMetadata::V12(metadata) => metadata, }; let mut registry = Registry::new(); diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 555dea3105479..e8c6573d5f8fd 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -267,7 +267,7 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let modules = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V12(frame_support::metadata::RuntimeMetadataV12 { + frame_support::metadata::RuntimeMetadata::V12(frame_support::metadata::RuntimeMetadataLastVersion { modules: m, .. }) => m, From b8e235d71851a06552708634221acc4b5366ba9e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 14:34:35 +0100 Subject: [PATCH 167/503] Fix more metadata tests --- frame/support/test/tests/construct_runtime.rs | 3 +- .../tests/pallet_compatibility_instance.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 165 +++++++++--------- 3 files changed, 82 insertions(+), 88 deletions(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 5c1be674f9331..742dc67552d3e 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -50,7 +50,7 @@ mod module1 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct Origin(pub core::marker::PhantomData::<(T, I)>); frame_support::decl_event! { @@ -511,7 +511,6 @@ fn call_codec() { // todo: [AJ] update test below with v13 metadata from above #[test] fn test_metadata() { - use scale_info::{form::MetaForm, IntoPortable, Registry}; use frame_support::metadata::*; let modules = vec![ ModuleMetadata { diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index f3b89c7b2e5ba..b929d6b6c65aa 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -282,7 +282,7 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let modules = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataV13 { + frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { modules: m, .. }) => m, diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 45c3a54bc4f27..f7d6e06b50f31 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -552,163 +552,158 @@ fn pallet_on_genesis() { #[test] fn metadata() { - use frame_metadata::*; + use frame_support::metadata::*; use codec::{Decode, Encode}; let expected_pallet_metadata = ModuleMetadata { index: 1, - name: DecodeDifferent::Decoded("Example".to_string()), - storage: Some(DecodeDifferent::Decoded(StorageMetadata { - prefix: DecodeDifferent::Decoded("Example".to_string()), - entries: DecodeDifferent::Decoded(vec![ + name: "Example", + storage: Some(StorageMetadata { + prefix: "Example", + entries: vec![ StorageEntryMetadata { - name: DecodeDifferent::Decoded("Value".to_string()), + name: "Value", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map".to_string()), + name: "Map", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u8".to_string()), - value: DecodeDifferent::Decoded("u16".to_string()), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), hasher: StorageHasher::Blake2_128Concat, unused: false, }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("Map2".to_string()), + name: "Map2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: DecodeDifferent::Decoded("u16".to_string()), - value: DecodeDifferent::Decoded("u32".to_string()), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), hasher: StorageHasher::Twox64Concat, unused: false, }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap".to_string()), + name: "DoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u32".to_string()), - key1: DecodeDifferent::Decoded("u8".to_string()), - key2: DecodeDifferent::Decoded("u16".to_string()), + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), hasher: StorageHasher::Blake2_128Concat, key2_hasher: StorageHasher::Twox64Concat, }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Decoded("DoubleMap2".to_string()), + name: "DoubleMap2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { - value: DecodeDifferent::Decoded("u64".to_string()), - key1: DecodeDifferent::Decoded("u16".to_string()), - key2: DecodeDifferent::Decoded("u32".to_string()), + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), hasher: StorageHasher::Twox64Concat, key2_hasher: StorageHasher::Blake2_128Concat, }, - default: DecodeDifferent::Decoded(vec![0]), - documentation: DecodeDifferent::Decoded(vec![]), + default: vec![0], + documentation: vec![], }, - ]), - })), - calls: Some(DecodeDifferent::Decoded(vec![ + ], + }), + calls: Some(vec![ FunctionMetadata { - name: DecodeDifferent::Decoded("foo".to_string()), - arguments: DecodeDifferent::Decoded(vec![ + name: "foo", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), + name: "_foo", + ty: scale_info::meta_type::>(), } - ]), - documentation: DecodeDifferent::Decoded(vec![ + ], + documentation: vec![ " Doc comment put in metadata".to_string(), - ]), + ], }, FunctionMetadata { - name: DecodeDifferent::Decoded("foo_transactional".to_string()), - arguments: DecodeDifferent::Decoded(vec![ + name: "foo_transactional", + arguments: vec![ FunctionArgumentMetadata { - name: DecodeDifferent::Decoded("_foo".to_string()), - ty: DecodeDifferent::Decoded("Compact".to_string()), + name: "_foo", + ty: scale_info::meta_type::>(), } - ]), - documentation: DecodeDifferent::Decoded(vec![ + ], + documentation: vec![ " Doc comment put in metadata".to_string(), - ]), + ], }, - ])), - event: Some(DecodeDifferent::Decoded(vec![ + ]), + event: Some(vec![ EventMetadata { - name: DecodeDifferent::Decoded("Proposed".to_string()), - arguments: DecodeDifferent::Decoded(vec!["::AccountId".to_string()]), - documentation: DecodeDifferent::Decoded(vec![ - " doc comment put in metadata".to_string() - ]), + name: "Proposed", + arguments: vec![TypeSpec::new::("::AccountId")], + documentation: vec![ + " doc comment put in metadata" + ], }, EventMetadata { - name: DecodeDifferent::Decoded("Spending".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), - documentation: DecodeDifferent::Decoded(vec![ + name: "Spending", + arguments: vec![TypeSpec::new::("Balance")], + documentation: vec![ " doc".to_string() - ]), + ], }, EventMetadata { - name: DecodeDifferent::Decoded("Something".to_string()), - arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), - documentation: DecodeDifferent::Decoded(vec![]), + name: "Something", + arguments: vec![TypeSpec::new::("Other")], + documentation: vec![], }, - ])), - constants: DecodeDifferent::Decoded(vec![ + ]), + constants: vec![ ModuleConstantMetadata { - name: DecodeDifferent::Decoded("MyGetParam".to_string()), - ty: DecodeDifferent::Decoded("u32".to_string()), - value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), - documentation: DecodeDifferent::Decoded(vec![]), + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], + documentation: vec![], }, - ]), - errors: DecodeDifferent::Decoded(vec![ + ], + errors: vec![ ErrorMetadata { - name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), - documentation: DecodeDifferent::Decoded(vec![ + name: "InsufficientProposersBalance", + documentation: vec![ " doc comment put into metadata".to_string(), - ]), + ], }, - ]), + ], }; let mut expected_pallet_instance1_metadata = expected_pallet_metadata.clone(); - expected_pallet_instance1_metadata.name = DecodeDifferent::Decoded("Instance1Example".to_string()); + expected_pallet_instance1_metadata.name = "Instance1Example"; expected_pallet_instance1_metadata.index = 2; match expected_pallet_instance1_metadata.storage { - Some(DecodeDifferent::Decoded(ref mut storage_meta)) => { - storage_meta.prefix = DecodeDifferent::Decoded("Instance1Example".to_string()); + Some(ref mut storage_meta) => { + storage_meta.prefix = "Instance1Example"; }, _ => unreachable!(), } let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V12(metadata) => metadata, + RuntimeMetadata::V13(metadata) => metadata, _ => panic!("metadata has been bump, test needs to be updated"), }; - let modules_metadata = match metadata.modules { - DecodeDifferent::Encode(modules_metadata) => modules_metadata, - _ => unreachable!(), - }; - - let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); + let pallet_metadata = ModuleMetadata::decode(&mut &metadata.modules[1].encode()[..]).unwrap(); let pallet_instance1_metadata = - ModuleMetadata::decode(&mut &modules_metadata[2].encode()[..]).unwrap(); + ModuleMetadata::decode(&mut &metadata.modules[2].encode()[..]).unwrap(); pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); pretty_assertions::assert_eq!(pallet_instance1_metadata, expected_pallet_instance1_metadata); From 77265755d93b15cf159b6b2f5331b3f7aec85f25 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 17:24:50 +0100 Subject: [PATCH 168/503] Fix more metadata tests --- frame/support/test/tests/instance.rs | 68 +-- frame/support/test/tests/issue2219.rs | 6 +- frame/support/test/tests/pallet.rs | 460 +++++++----------- .../test/tests/pallet_compatibility.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 12 +- frame/support/test/tests/system.rs | 6 +- 6 files changed, 226 insertions(+), 328 deletions(-) diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index b8cfe85c61695..5db392b1e24c6 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -393,66 +393,50 @@ fn storage_with_instance_basic_operation() { }); } -const EXPECTED_METADATA: StorageMetadata = StorageMetadata { - prefix: DecodeDifferent::Encode("Instance2Module2"), - entries: DecodeDifferent::Encode( - &[ - StorageEntryMetadata { - name: DecodeDifferent::Encode("Value"), +fn expected_metadata() -> StorageMetadata { + StorageMetadata { + prefix: "Instance2Module2", + entries: vec![ + StorageEntryMetadata + { + name: "Value", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("T::Amount")), - default: DecodeDifferent::Encode( - DefaultByteGetter( - &module2::__GetByteStructValue( - std::marker::PhantomData::<(Runtime, module2::Instance2)> - ) - ) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("Map"), + name: "Map", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Identity, - key: DecodeDifferent::Encode("u64"), - value: DecodeDifferent::Encode("u64"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), unused: false, }, - default: DecodeDifferent::Encode( - DefaultByteGetter( - &module2::__GetByteStructMap( - std::marker::PhantomData::<(Runtime, module2::Instance2)> - ) - ) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("DoubleMap"), + name: "DoubleMap", modifier: StorageEntryModifier::Default, ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Identity, key2_hasher: StorageHasher::Identity, - key1: DecodeDifferent::Encode("u64"), - key2: DecodeDifferent::Encode("u64"), - value: DecodeDifferent::Encode("u64"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::(), }, - default: DecodeDifferent::Encode( - DefaultByteGetter( - &module2::__GetByteStructDoubleMap( - std::marker::PhantomData::<(Runtime, module2::Instance2)> - ) - ) - ), - documentation: DecodeDifferent::Encode(&[]), - } + default: vec![], + documentation: vec![], + }, ] - ) -}; + } +} + #[test] fn test_instance_storage_metadata() { let metadata = Module2_2::storage_metadata(); - pretty_assertions::assert_eq!(EXPECTED_METADATA, metadata); + pretty_assertions::assert_eq!(expected_metadata(), metadata); } diff --git a/frame/support/test/tests/issue2219.rs b/frame/support/test/tests/issue2219.rs index 4525e8c1a1fe2..371376595e483 100644 --- a/frame/support/test/tests/issue2219.rs +++ b/frame/support/test/tests/issue2219.rs @@ -33,12 +33,12 @@ mod module { ); pub type Requests = Vec>; - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, scale_info::TypeInfo)] pub enum Role { Storage, } - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, scale_info::TypeInfo)] pub struct RoleParameters { // minimum actors to maintain - if role is unstaking // and remaining actors would be less that this value - prevent or punish for unstaking @@ -81,7 +81,7 @@ mod module { } } - pub trait Config: system::Config {} + pub trait Config: system::Config + scale_info::TypeInfo {} frame_support::decl_module! { pub struct Module for enum Call where origin: T::Origin, system=system {} diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 5d2ab52f79070..f6ecec9f69dc2 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -24,6 +24,7 @@ use frame_support::{ storage::unhashed, scale_info, }; +use scale_info::form::PortableForm; use sp_runtime::DispatchError; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -624,325 +625,236 @@ fn pallet_on_genesis() { }) } -// #[test] -// fn metadata() { -// use frame_metadata::*; -// use codec::{Decode, Encode}; -// -// let expected_pallet_metadata = ModuleMetadata { -// index: 1, -// name: DecodeDifferent::Decoded("Example".to_string()), -// storage: Some(DecodeDifferent::Decoded(StorageMetadata { -// prefix: DecodeDifferent::Decoded("Example".to_string()), -// entries: DecodeDifferent::Decoded(vec![ -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("ValueWhereClause".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Plain( -// DecodeDifferent::Decoded( -// "::_2".to_string() -// ), -// ), -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("Value".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("Map".to_string()), -// modifier: StorageEntryModifier::Default, -// ty: StorageEntryType::Map { -// key: DecodeDifferent::Decoded("u8".to_string()), -// value: DecodeDifferent::Decoded("u16".to_string()), -// hasher: StorageHasher::Blake2_128Concat, -// unused: false, -// }, -// default: DecodeDifferent::Decoded(vec![4, 0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("Map2".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Map { -// key: DecodeDifferent::Decoded("u16".to_string()), -// value: DecodeDifferent::Decoded("u32".to_string()), -// hasher: StorageHasher::Twox64Concat, -// unused: false, -// }, -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("DoubleMap".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::DoubleMap { -// value: DecodeDifferent::Decoded("u32".to_string()), -// key1: DecodeDifferent::Decoded("u8".to_string()), -// key2: DecodeDifferent::Decoded("u16".to_string()), -// hasher: StorageHasher::Blake2_128Concat, -// key2_hasher: StorageHasher::Twox64Concat, -// }, -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// StorageEntryMetadata { -// name: DecodeDifferent::Decoded("DoubleMap2".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::DoubleMap { -// value: DecodeDifferent::Decoded("u64".to_string()), -// key1: DecodeDifferent::Decoded("u16".to_string()), -// key2: DecodeDifferent::Decoded("u32".to_string()), -// hasher: StorageHasher::Twox64Concat, -// key2_hasher: StorageHasher::Blake2_128Concat, -// }, -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { -// name: DecodeDifferent::Decoded("ConditionalValue".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Plain(DecodeDifferent::Decoded("u32".to_string())), -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { -// name: DecodeDifferent::Decoded("ConditionalMap".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Map { -// key: DecodeDifferent::Decoded("u16".to_string()), -// value: DecodeDifferent::Decoded("u32".to_string()), -// hasher: StorageHasher::Twox64Concat, -// unused: false, -// }, -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// #[cfg(feature = "conditional-storage")] StorageEntryMetadata { -// name: DecodeDifferent::Decoded("ConditionalDoubleMap".to_string()), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::DoubleMap { -// value: DecodeDifferent::Decoded("u32".to_string()), -// key1: DecodeDifferent::Decoded("u8".to_string()), -// key2: DecodeDifferent::Decoded("u16".to_string()), -// hasher: StorageHasher::Blake2_128Concat, -// key2_hasher: StorageHasher::Twox64Concat, -// }, -// default: DecodeDifferent::Decoded(vec![0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// ]), -// })), -// calls: Some(DecodeDifferent::Decoded(vec![ -// FunctionMetadata { -// name: DecodeDifferent::Decoded("foo".to_string()), -// arguments: DecodeDifferent::Decoded(vec![ -// FunctionArgumentMetadata { -// name: DecodeDifferent::Decoded("_foo".to_string()), -// ty: DecodeDifferent::Decoded("Compact".to_string()), -// }, -// FunctionArgumentMetadata { -// name: DecodeDifferent::Decoded("_bar".to_string()), -// ty: DecodeDifferent::Decoded("u32".to_string()), -// } -// ]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Doc comment put in metadata".to_string(), -// ]), -// }, -// FunctionMetadata { -// name: DecodeDifferent::Decoded("foo_transactional".to_string()), -// arguments: DecodeDifferent::Decoded(vec![ -// FunctionArgumentMetadata { -// name: DecodeDifferent::Decoded("foo".to_string()), -// ty: DecodeDifferent::Decoded("Compact".to_string()), -// } -// ]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Doc comment put in metadata".to_string(), -// ]), -// }, -// FunctionMetadata { -// name: DecodeDifferent::Decoded("foo_no_post_info".to_string()), -// arguments: DecodeDifferent::Decoded(vec![]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// ])), -// event: Some(DecodeDifferent::Decoded(vec![ -// EventMetadata { -// name: DecodeDifferent::Decoded("Proposed".to_string()), -// arguments: DecodeDifferent::Decoded(vec!["::AccountId".to_string()]), -// documentation: DecodeDifferent::Decoded(vec![ -// " doc comment put in metadata".to_string() -// ]), -// }, -// EventMetadata { -// name: DecodeDifferent::Decoded("Spending".to_string()), -// arguments: DecodeDifferent::Decoded(vec!["Balance".to_string()]), -// documentation: DecodeDifferent::Decoded(vec![ -// " doc".to_string() -// ]), -// }, -// EventMetadata { -// name: DecodeDifferent::Decoded("Something".to_string()), -// arguments: DecodeDifferent::Decoded(vec!["Other".to_string()]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// EventMetadata { -// name: DecodeDifferent::Decoded("SomethingElse".to_string()), -// arguments: DecodeDifferent::Decoded(vec!["::_1".to_string()]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// ])), -// constants: DecodeDifferent::Decoded(vec![ -// ModuleConstantMetadata { -// name: DecodeDifferent::Decoded("MyGetParam".to_string()), -// ty: DecodeDifferent::Decoded("u32".to_string()), -// value: DecodeDifferent::Decoded(vec![10, 0, 0, 0]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Some comment".to_string(), -// " Some comment".to_string(), -// ]), -// }, -// ModuleConstantMetadata { -// name: DecodeDifferent::Decoded("MyGetParam2".to_string()), -// ty: DecodeDifferent::Decoded("u32".to_string()), -// value: DecodeDifferent::Decoded(vec![11, 0, 0, 0]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Some comment".to_string(), -// " Some comment".to_string(), -// ]), -// }, -// ModuleConstantMetadata { -// name: DecodeDifferent::Decoded("MyGetParam3".to_string()), -// ty: DecodeDifferent::Decoded("::_1".to_string()), -// value: DecodeDifferent::Decoded(vec![12, 0, 0, 0, 0, 0, 0, 0]), -// documentation: DecodeDifferent::Decoded(vec![]), -// }, -// ModuleConstantMetadata { -// name: DecodeDifferent::Decoded("some_extra".to_string()), -// ty: DecodeDifferent::Decoded("T::AccountId".to_string()), -// value: DecodeDifferent::Decoded(vec![100, 0, 0, 0, 0, 0, 0, 0]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Some doc".to_string(), -// " Some doc".to_string(), -// ]), -// }, -// ModuleConstantMetadata { -// name: DecodeDifferent::Decoded("some_extra_extra".to_string()), -// ty: DecodeDifferent::Decoded("T::AccountId".to_string()), -// value: DecodeDifferent::Decoded(vec![0, 0, 0, 0, 0, 0, 0, 0]), -// documentation: DecodeDifferent::Decoded(vec![ -// " Some doc".to_string(), -// ]), -// }, -// ]), -// errors: DecodeDifferent::Decoded(vec![ -// ErrorMetadata { -// name: DecodeDifferent::Decoded("InsufficientProposersBalance".to_string()), -// documentation: DecodeDifferent::Decoded(vec![ -// " doc comment put into metadata".to_string(), -// ]), -// }, -// ]), -// }; -// -// let metadata = match Runtime::metadata().1 { -// RuntimeMetadata::V12(metadata) => metadata, -// _ => panic!("metadata has been bump, test needs to be updated"), -// }; -// -// let modules_metadata = match metadata.modules { -// DecodeDifferent::Encode(modules_metadata) => modules_metadata, -// _ => unreachable!(), -// }; -// -// let pallet_metadata = ModuleMetadata::decode(&mut &modules_metadata[1].encode()[..]).unwrap(); -// -// pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); -// } - -// todo: [AJ] update this test #[test] -fn metadata_vnext() { - use crate::scale_info::{meta_type, Registry, IntoPortable}; - use codec::Compact; +fn metadata() { use frame_support::metadata::*; + use codec::{Decode, Encode}; let expected_pallet_metadata = ModuleMetadata { - // index: 1, + index: 1, name: "Example", - // storage: + storage: Some(StorageMetadata { + prefix: "Example", + entries: vec![ + StorageEntryMetadata { + name: "ValueWhereClause", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + unused: false, + }, + default: vec![4, 0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Map2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + unused: false, + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + key2_hasher: StorageHasher::Blake2_128Concat, + }, + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + unused: false, + }, + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalDoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + documentation: vec![], + }, + ], + }), calls: Some(vec![ FunctionMetadata { name: "foo", arguments: vec![ FunctionArgumentMetadata { name: "_foo", - ty: meta_type::>(), + ty: scale_info::meta_type::>(), }, FunctionArgumentMetadata { name: "_bar", - ty: meta_type::>(), - }, + ty: scale_info::meta_type::(), + } + ], + documentation: vec![ + " Doc comment put in metadata", ], - documentation: vec![" Doc comment put in metadata"], }, FunctionMetadata { name: "foo_transactional", arguments: vec![ FunctionArgumentMetadata { name: "foo", - ty: meta_type::>(), - }, + ty: scale_info::meta_type::>(), + } ], - documentation: vec![" Doc comment put in metadata"], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_no_post_info", + arguments: vec![], + documentation: vec![], }, ]), event: Some(vec![ EventMetadata { name: "Proposed", - arguments: vec![ - TypeSpec::new::<::AccountId>("::AccountId"), + arguments: vec![TypeSpec::new::("::AccountId")], + documentation: vec![ + " doc comment put in metadata" ], - documentation: vec![" doc comment put in metadata"], }, EventMetadata { name: "Spending", - arguments: vec![ - TypeSpec::new::<::Balance>("Balance"), + arguments: vec![TypeSpec::new::("Balance")], + documentation: vec![ + " doc" ], - documentation: vec![" doc"], }, EventMetadata { name: "Something", - arguments: vec![ - TypeSpec::new::("Other"), - ], + arguments: vec![TypeSpec::new::("Other")], documentation: vec![], }, EventMetadata { name: "SomethingElse", - arguments: vec![ - TypeSpec::new::<<::AccountId as SomeAssociation1>::_1>("::_1"), - ], + arguments: vec![TypeSpec::new::("::_1")], documentation: vec![], }, ]), - // constants: , - // errors: + constants: vec![ + ModuleConstantMetadata { + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], + documentation: vec![ + " Some comment", + " Some comment", + ], + }, + ModuleConstantMetadata { + name: "MyGetParam2", + ty: scale_info::meta_type::(), + value: vec![11, 0, 0, 0], + documentation: vec![ + " Some comment", + " Some comment", + ], + }, + ModuleConstantMetadata { + name: "MyGetParam3", + ty: scale_info::meta_type::(), + value: vec![12, 0, 0, 0, 0, 0, 0, 0], + documentation: vec![], + }, + ModuleConstantMetadata { + name: "some_extra", + ty: scale_info::meta_type::(), + value: vec![100, 0, 0, 0, 0, 0, 0, 0], + documentation: vec![ + " Some doc", + " Some doc", + ], + }, + ModuleConstantMetadata { + name: "some_extra_extra", + ty: scale_info::meta_type::(), + value: vec![0, 0, 0, 0, 0, 0, 0, 0], + documentation: vec![ + " Some doc", + ], + }, + ], + errors: vec![ + ErrorMetadata { + name: "InsufficientProposersBalance", + documentation: vec![ + " doc comment put into metadata", + ], + }, + ], }; let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V12(metadata) => metadata, + RuntimeMetadata::V13(metadata) => metadata, + _ => panic!("metadata has been bump, test needs to be updated"), }; - let mut registry = Registry::new(); - let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); - - pretty_assertions::assert_eq!(metadata.modules[1], expected_pallet_metadata); + let pallet_metadata = ModuleMetadata::::decode( + &mut &metadata.modules[1].encode()[..] + ).unwrap(); + pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); } diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index e8c6573d5f8fd..72fd236005107 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -267,7 +267,7 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let modules = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V12(frame_support::metadata::RuntimeMetadataLastVersion { + frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { modules: m, .. }) => m, diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index f7d6e06b50f31..fd8bc40e0cc96 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -630,7 +630,7 @@ fn metadata() { } ], documentation: vec![ - " Doc comment put in metadata".to_string(), + " Doc comment put in metadata", ], }, FunctionMetadata { @@ -642,7 +642,7 @@ fn metadata() { } ], documentation: vec![ - " Doc comment put in metadata".to_string(), + " Doc comment put in metadata", ], }, ]), @@ -658,7 +658,7 @@ fn metadata() { name: "Spending", arguments: vec![TypeSpec::new::("Balance")], documentation: vec![ - " doc".to_string() + " doc" ], }, EventMetadata { @@ -679,7 +679,7 @@ fn metadata() { ErrorMetadata { name: "InsufficientProposersBalance", documentation: vec![ - " doc comment put into metadata".to_string(), + " doc comment put into metadata", ], }, ], @@ -701,7 +701,9 @@ fn metadata() { _ => panic!("metadata has been bump, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::decode(&mut &metadata.modules[1].encode()[..]).unwrap(); + let pallet_metadata = ModuleMetadata::::decode( + &mut &metadata.modules[1].encode()[..] + ).unwrap(); let pallet_instance1_metadata = ModuleMetadata::decode(&mut &metadata.modules[2].encode()[..]).unwrap(); diff --git a/frame/support/test/tests/system.rs b/frame/support/test/tests/system.rs index 19858731b3a09..254e48b1a985f 100644 --- a/frame/support/test/tests/system.rs +++ b/frame/support/test/tests/system.rs @@ -24,9 +24,9 @@ pub trait Config: 'static + Eq + Clone { + From>; type BaseCallFilter: frame_support::traits::Filter; - type BlockNumber: Decode + Encode + EncodeLike + Clone + Default; + type BlockNumber: Decode + Encode + EncodeLike + Clone + Default + scale_info::TypeInfo; type Hash; - type AccountId: Encode + EncodeLike + Decode; + type AccountId: Encode + EncodeLike + Decode + scale_info::TypeInfo; type Call; type Event: From>; type PalletInfo: frame_support::traits::PalletInfo; @@ -63,7 +63,7 @@ frame_support::decl_error! { } /// Origin for the system module. -#[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), From ee935d697bc2da0d2830777ca18970f1ccd1a89f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 17:31:44 +0100 Subject: [PATCH 169/503] TypeInfo derives --- frame/atomic-swap/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index 513a9343a72e1..a7044a74447c2 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -54,7 +54,7 @@ use codec::{Encode, Decode}; use sp_runtime::RuntimeDebug; /// Pending atomic swap operation. -#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode)] +#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, scale_info::TypeInfo)] pub struct PendingSwap { /// Source of the swap. pub source: T::AccountId, @@ -87,7 +87,7 @@ pub trait SwapAction { } /// A swap action that only allows transferring balances. -#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode)] +#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode, scale_info::TypeInfo)] pub struct BalanceSwapAction> { value: >::Balance, _marker: PhantomData, From 5ed395a640bdb10dbd527e6e04f22fe03df2f35b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Apr 2021 17:31:58 +0100 Subject: [PATCH 170/503] Test fixes --- frame/support/test/tests/instance.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 5db392b1e24c6..8c3c07f8f24d7 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -45,7 +45,7 @@ mod module1 { type Event: From> + Into<::Event>; type Origin: From>; type SomeParameter: Get; - type GenericType: Default + Clone + Codec + EncodeLike; + type GenericType: Default + Clone + Codec + EncodeLike + scale_info::TypeInfo; } frame_support::decl_module! { @@ -100,7 +100,7 @@ mod module1 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum Origin, I> where T::BlockNumber: From { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), @@ -166,7 +166,7 @@ mod module2 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] pub enum Origin, I=DefaultInstance> { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), From 3e5ea3619ef48b6e18acbf66ec0f5241683f0d86 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 10:31:59 +0100 Subject: [PATCH 171/503] Add temporary TypeInfo derives to npos_solution macro --- bin/node/runtime/src/lib.rs | 11 ----------- primitives/npos-elections/compact/src/lib.rs | 4 ++-- primitives/npos-elections/src/lib.rs | 2 ++ 3 files changed, 4 insertions(+), 13 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index a8fd06e047f08..924cd1eba319f 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -533,17 +533,6 @@ sp_npos_elections::generate_solution_type!( >(16) ); -// todo [AJ] probably need to generate custom TypeInfo in generate_solution_type!, see polkadot.js -impl scale_info::TypeInfo for NposCompactSolution16 { - type Identity = (); - - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("NposCompactSolution16", module_path!())) - .composite(scale_info::build::Fields::unit()) - } -} - pub const MAX_NOMINATIONS: u32 = ::LIMIT as u32; diff --git a/primitives/npos-elections/compact/src/lib.rs b/primitives/npos-elections/compact/src/lib.rs index e558ae89ca93e..a5f122ef4a0cc 100644 --- a/primitives/npos-elections/compact/src/lib.rs +++ b/primitives/npos-elections/compact/src/lib.rs @@ -168,11 +168,11 @@ fn struct_def( ); quote!{ #compact_impl - #[derive(Default, PartialEq, Eq, Clone, Debug)] + #[derive(Default, PartialEq, Eq, Clone, Debug, _npos::scale_info::TypeInfo)] // todo: [AJ] manually generate TypeInfo here instead } } else { // automatically derived. - quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode)]) + quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode, _npos::scale_info::TypeInfo)]) }; let from_impl = assignment::from_impl(count); diff --git a/primitives/npos-elections/src/lib.rs b/primitives/npos-elections/src/lib.rs index eb26268eb8c5c..797cd7e77b550 100644 --- a/primitives/npos-elections/src/lib.rs +++ b/primitives/npos-elections/src/lib.rs @@ -118,6 +118,8 @@ pub use pjr::*; #[doc(hidden)] pub use codec; #[doc(hidden)] +pub use scale_info; +#[doc(hidden)] pub use sp_arithmetic; #[doc(hidden)] pub use sp_std; From 39b4e4e6a04ef9558bc26072b8e20c797ab1adf6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 11:58:02 +0100 Subject: [PATCH 172/503] Add temporary instance TypeInfo bound --- frame/support/src/event.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 329bd17254d83..aa468e89ff024 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -293,6 +293,7 @@ macro_rules! __decl_generic_event { impl<$( $generic_param ),* $(, $instance)?> RawEvent<$( $generic_param ),* $(, $instance)?> where $( $generic_param: $crate::scale_info::TypeInfo + 'static ),* + $(, $instance: $crate::scale_info::TypeInfo + 'static)? // todo: [AJ] just adding this to make it compile, look at removing it. { #[allow(dead_code)] #[doc(hidden)] From 9a91246d8d9efa2fb8251c1a4138b1f5de480695 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 11:58:24 +0100 Subject: [PATCH 173/503] Add missing scale-info dependency --- Cargo.lock | 1 + primitives/npos-elections/fuzzer/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index f0187e6b4b6d7..b25881d614791 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9040,6 +9040,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index 3154a7861d306..e6213c1c6f014 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-arithmetic = { version = "3.0.0", path = "../../arithmetic" } From e5a89565163b899f8887763725ca726cda941504 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 11:58:56 +0100 Subject: [PATCH 174/503] Add some TypeInfo derives --- frame/elections/src/lib.rs | 2 +- frame/proxy/src/tests.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/elections/src/lib.rs b/frame/elections/src/lib.rs index 46ec62bf75174..792e7cf3878a7 100644 --- a/frame/elections/src/lib.rs +++ b/frame/elections/src/lib.rs @@ -108,7 +108,7 @@ mod tests; // entries before they increase the capacity. /// The activity status of a voter. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug)] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] pub struct VoterInfo { /// Last VoteIndex in which this voter assigned (or initialized) approvals. last_active: VoteIndex, diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index 6f3b1f35e2ada..be9ce06639bc6 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -100,7 +100,7 @@ parameter_types! { pub const AnnouncementDepositBase: u64 = 1; pub const AnnouncementDepositFactor: u64 = 1; } -#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug)] +#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum ProxyType { Any, JustTransfer, From 7ee4754aa28510b0544a21b9b4cf0d8efebfb548 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 12:36:28 +0100 Subject: [PATCH 175/503] More test fixes --- frame/support/test/tests/pallet.rs | 10 ++++++++-- frame/support/test/tests/pallet_instance.rs | 14 ++++++++++++-- primitives/runtime/src/testing.rs | 2 +- 3 files changed, 21 insertions(+), 5 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index f6ecec9f69dc2..8f5b04ff33b50 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -24,7 +24,10 @@ use frame_support::{ storage::unhashed, scale_info, }; -use scale_info::form::PortableForm; +use scale_info::{ + form::PortableForm, + IntoPortable, +}; use sp_runtime::DispatchError; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -261,7 +264,7 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode)] + #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] pub struct Origin(PhantomData); #[pallet::validate_unsigned] @@ -856,5 +859,8 @@ fn metadata() { let pallet_metadata = ModuleMetadata::::decode( &mut &metadata.modules[1].encode()[..] ).unwrap(); + + let mut registry = scale_info::Registry::new(); + let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); } diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index fd8bc40e0cc96..152345fb6e908 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -23,6 +23,10 @@ use frame_support::{ dispatch::UnfilteredDispatchable, storage::unhashed, }; +use scale_info::{ + form::PortableForm, + IntoPortable, +}; use sp_runtime::DispatchError; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -147,7 +151,7 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode)] + #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] pub struct Origin(PhantomData<(T, I)>); #[pallet::validate_unsigned] @@ -701,12 +705,18 @@ fn metadata() { _ => panic!("metadata has been bump, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::::decode( + let pallet_metadata = ModuleMetadata::::decode( &mut &metadata.modules[1].encode()[..] ).unwrap(); let pallet_instance1_metadata = ModuleMetadata::decode(&mut &metadata.modules[2].encode()[..]).unwrap(); + let mut registry = scale_info::Registry::new(); + let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); + + let mut registry = scale_info::Registry::new(); + let expected_pallet_instance1_metadata = expected_pallet_instance1_metadata.into_portable(&mut registry); + pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); pretty_assertions::assert_eq!(pallet_instance1_metadata, expected_pallet_instance1_metadata); } diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index b80482ea940db..05cfa4d9b4d7c 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -255,7 +255,7 @@ impl<'a, Xt> Deserialize<'a> for Block where Block: Decode { /// with index only used if sender is some. /// /// If sender is some then the transaction is signed otherwise it is unsigned. -#[derive(PartialEq, Eq, Clone, Encode, Decode)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] pub struct TestXt { /// Signature of the extrinsic. pub signature: Option<(u64, Extra)>, From 22e99d65a9b5dc18cd39baaffed2baa5dbcd6351 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Apr 2021 17:14:09 +0100 Subject: [PATCH 176/503] Add TypeInfo derive for Pallet struct --- frame/support/procedural/src/pallet/expand/pallet_struct.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 1db96c0d1647a..ff28937939234 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -66,6 +66,7 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::EqNoBound, #frame_support::PartialEqNoBound, #frame_support::RuntimeDebugNoBound, + #frame_support::scale_info::TypeInfo, )] )); From 2d0e8bdc70ce2dc8b4a3ceb0e8e3856fafeab3d4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Apr 2021 15:29:10 +0100 Subject: [PATCH 177/503] Add (temporary?) TypeInfo bound to atomic-swap Config --- frame/atomic-swap/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index a7044a74447c2..b85dd54759133 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -144,7 +144,7 @@ pub mod pallet { /// Atomic swap's pallet configuration trait. #[pallet::config] - pub trait Config: frame_system::Config { + pub trait Config: frame_system::Config + scale_info::TypeInfo { // todo: [AJ] see whether we really need this bound /// The overarching event type. type Event: From> + IsType<::Event>; /// Swap action. From e9320cae7bf4d11c88820a388cca49f49d731103 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 15:14:51 +0100 Subject: [PATCH 178/503] Add TypeInfo derive to BoundedVec --- frame/support/src/storage/bounded_vec.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/storage/bounded_vec.rs b/frame/support/src/storage/bounded_vec.rs index 9fcfe4035294f..f49008f7876dd 100644 --- a/frame/support/src/storage/bounded_vec.rs +++ b/frame/support/src/storage/bounded_vec.rs @@ -37,7 +37,7 @@ impl BoundedVecValue for T {} /// /// As the name suggests, the length of the queue is always bounded. All internal operations ensure /// this bound is respected. -#[derive(Encode, Decode, crate::DefaultNoBound, crate::CloneNoBound, crate::DebugNoBound)] +#[derive(Encode, Decode, crate::DefaultNoBound, crate::CloneNoBound, crate::DebugNoBound, scale_info::TypeInfo)] pub struct BoundedVec>(Vec, PhantomData); // NOTE: we could also implement this as: From e3fae39f9e437e50b69b43b9c4ae095f6ce057bf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 15:15:32 +0100 Subject: [PATCH 179/503] Cargo.lock regenerate after merge --- Cargo.lock | 232 ++++++++++++++++++++++++++--------------------------- 1 file changed, 115 insertions(+), 117 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b25881d614791..643bf0e6ee8fd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -164,22 +164,9 @@ checksum = "5a2f58b0bb10c380af2b26e57212856b8c9a59e0925b4c20f4a174a49734eaf7" [[package]] name = "asn1_der" -version = "0.6.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fce6b6a0ffdafebd82c87e79e3f40e8d2c523e5fea5566ff6b90509bf98d638" -dependencies = [ - "asn1_der_derive", -] - -[[package]] -name = "asn1_der_derive" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d0864d84b8e07b145449be9a8537db86bf9de5ce03b913214694643b4743502" -dependencies = [ - "quote", - "syn", -] +checksum = "9d6e24d2cce90c53b948c46271bfb053e4bdc2db9b5d3f65e20f8cf28a1b7fc3" [[package]] name = "assert_cmd" @@ -462,6 +449,12 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +[[package]] +name = "beef" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6736e2428df2ca2848d846c43e88745121a6654696e349ce0054a420815a7409" + [[package]] name = "bincode" version = "1.3.3" @@ -1326,6 +1319,12 @@ dependencies = [ "syn", ] +[[package]] +name = "diff" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" + [[package]] name = "difference" version = "2.0.0" @@ -1778,7 +1777,6 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-inherents", "sp-io", @@ -1814,7 +1812,7 @@ dependencies = [ "parity-scale-codec", "parity-util-mem", "paste 1.0.4", - "pretty_assertions", + "pretty_assertions 0.6.1", "scale-info", "serde", "smallvec 1.6.1", @@ -1867,7 +1865,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "pretty_assertions", + "pretty_assertions 0.6.1", "rustversion", "scale-info", "serde", @@ -2923,12 +2921,12 @@ dependencies = [ [[package]] name = "jsonrpsee-http-client" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b15fc3a0ef2e02d770aa1a221d3412443dcaedc43e27d80c957dd5bbd65321b" +checksum = "7e3a49473ea266be8e9f23e20a7bfa4349109b42319d72cc0b8a101e18fa6466" dependencies = [ "async-trait", - "futures 0.3.13", + "fnv", "hyper 0.13.10", "hyper-rustls", "jsonrpsee-types", @@ -2937,17 +2935,17 @@ dependencies = [ "serde", "serde_json", "thiserror", - "unicase", "url 2.2.1", ] [[package]] name = "jsonrpsee-proc-macros" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb4afbda476e2ee11cc6245055c498c116fc8002d2d60fe8338b6ee15d84c3a" +checksum = "b0cbaee9ca6440e191545a68c7bf28db0ff918359a904e37a6e7cf7edd132f5a" dependencies = [ "Inflector", + "proc-macro-crate 1.0.0", "proc-macro2", "quote", "syn", @@ -2955,32 +2953,29 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c42a82588b5f7830e94341bb7e79d15f46070ab6f64dde1e3b3719721b61c5bf" +checksum = "e4ce2de6884fb4abee16eca02329a1eec1eb8df8aed751a8e929083820c78ce7" dependencies = [ "async-trait", - "futures 0.3.13", + "beef", + "futures-channel", + "futures-util", "log", "serde", "serde_json", - "smallvec 1.6.1", "thiserror", ] [[package]] name = "jsonrpsee-utils" -version = "0.2.0-alpha.3" +version = "0.2.0-alpha.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e65c77838fce96bc554b4a3a159d0b9a2497319ae9305c66ee853998c7ed2fd3" +checksum = "3b22199cccd81d9ef601be86bedc5bef67aeacbbfddace031d4931c60fca96e9" dependencies = [ - "futures 0.3.13", - "globset", + "futures-util", "hyper 0.13.10", "jsonrpsee-types", - "lazy_static", - "log", - "unicase", ] [[package]] @@ -3118,9 +3113,9 @@ checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a" [[package]] name = "libp2p" -version = "0.36.0" +version = "0.37.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe5759b526f75102829c15e4d8566603b4bf502ed19b5f35920d98113873470d" +checksum = "08053fbef67cd777049ef7a95ebaca2ece370b4ed7712c3fa404d69a88cb741b" dependencies = [ "atomic", "bytes 1.0.1", @@ -3157,9 +3152,9 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e1797734bbd4c453664fefb029628f77c356ffc5bce98f06b18a7db3ebb0f7" +checksum = "71dd51b562e14846e65bad00e5808d0644376e6588668c490d3c48e1dfeb4a9a" dependencies = [ "asn1_der", "bs58", @@ -3202,9 +3197,9 @@ dependencies = [ [[package]] name = "libp2p-dns" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9712eb3e9f7dcc77cc5ca7d943b6a85ce4b1faaf91a67e003442412a26d6d6f8" +checksum = "62e63dab8b5ff35e0c101a3e51e843ba782c07bbb1682f5fd827622e0d02b98b" dependencies = [ "async-std-resolver", "futures 0.3.13", @@ -3216,9 +3211,9 @@ dependencies = [ [[package]] name = "libp2p-floodsub" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897645f99e9b396df256a6aa8ba8c4bc019ac6b7c62556f624b5feea9acc82bb" +checksum = "48a9b570f6766301d9c4aa00fce3554cad1598e2f466debbc4dde909028417cf" dependencies = [ "cuckoofilter", "fnv", @@ -3234,9 +3229,9 @@ dependencies = [ [[package]] name = "libp2p-gossipsub" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "794b0c85f5df1acbc1fc38414d37272594811193b6325c76d3931c3e3f5df8c0" +checksum = "73cb9a89a301afde1e588c73f7e9131e12a5388725f290a9047b878862db1b53" dependencies = [ "asynchronous-codec 0.6.0", "base64 0.13.0", @@ -3260,9 +3255,9 @@ dependencies = [ [[package]] name = "libp2p-identify" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f88ebc841d744979176ab4b8b294a3e655a7ba4ef26a905d073a52b49ed4dff5" +checksum = "5f668f00efd9883e8b7bcc582eaf0164615792608f886f6577da18bcbeea0a46" dependencies = [ "futures 0.3.13", "libp2p-core", @@ -3276,9 +3271,9 @@ dependencies = [ [[package]] name = "libp2p-kad" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb5b90b6bda749023a85f60b49ea74b387c25f17d8df541ae72a3c75dd52e63" +checksum = "b07312ebe5ee4fd2404447a0609814574df55c65d4e20838b957bbd34907d820" dependencies = [ "arrayvec 0.5.2", "asynchronous-codec 0.6.0", @@ -3302,9 +3297,9 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.29.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be28ca13bb648d249a9baebd750ebc64ce7040ddd5f0ce1035ff1f4549fb596d" +checksum = "41e282f974c4bea56db8acca50387f05189406e346318cb30190b0bde662961e" dependencies = [ "async-io", "data-encoding", @@ -3363,9 +3358,9 @@ dependencies = [ [[package]] name = "libp2p-ping" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea10fc5209260915ea65b78f612d7ff78a29ab288e7aa3250796866af861c45" +checksum = "bf4bfaffac63bf3c7ec11ed9d8879d455966ddea7e78ee14737f0b6dce0d1cd1" dependencies = [ "futures 0.3.13", "libp2p-core", @@ -3409,9 +3404,9 @@ dependencies = [ [[package]] name = "libp2p-relay" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff268be6a9d6f3c6cca3b81bbab597b15217f9ad8787c6c40fc548c1af7cd24" +checksum = "0b8786aca3f18671d8776289706a5521f6c9124a820f69e358de214b9939440d" dependencies = [ "asynchronous-codec 0.6.0", "bytes 1.0.1", @@ -3432,9 +3427,9 @@ dependencies = [ [[package]] name = "libp2p-request-response" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "725367dd2318c54c5ab1a6418592e5b01c63b0dedfbbfb8389220b2bcf691899" +checksum = "1cdbe172f08e6d0f95fa8634e273d4c4268c4063de2e33e7435194b0130c62e3" dependencies = [ "async-trait", "bytes 1.0.1", @@ -3452,9 +3447,9 @@ dependencies = [ [[package]] name = "libp2p-swarm" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75c26980cadd7c25d89071cb23e1f7f5df4863128cc91d83c6ddc72338cecafa" +checksum = "1e04d8e1eef675029ec728ba14e8d0da7975d84b6679b699b4ae91a1de9c3a92" dependencies = [ "either", "futures 0.3.13", @@ -3468,9 +3463,9 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c564ebaa36a64839f51eaddb0243aaaa29ce64affb56129193cc3248b72af273" +checksum = "365b0a699fea5168676840567582a012ea297b1ca02eee467e58301b9c9c5eed" dependencies = [ "quote", "syn", @@ -3539,9 +3534,9 @@ dependencies = [ [[package]] name = "libp2p-yamux" -version = "0.31.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96d6144cc94143fb0a8dd1e7c2fbcc32a2808168bcd1d69920635424d5993b7b" +checksum = "f35da42cfc6d5cb0dcf3ad6881bc68d146cdf38f98655e09e33fbba4d13eabc4" dependencies = [ "futures 0.3.13", "libp2p-core", @@ -4229,7 +4224,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "pretty_assertions", + "pretty_assertions 0.6.1", "scale-info", "sp-application-crypto", "sp-core", @@ -4338,7 +4333,6 @@ dependencies = [ "pallet-vesting", "parity-scale-codec", "scale-info", - "serde", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4418,7 +4412,6 @@ dependencies = [ "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", "scale-info", - "serde", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4633,7 +4626,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4649,7 +4641,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4668,7 +4659,6 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "scale-info", - "serde", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4686,7 +4676,6 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "serde", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4732,7 +4721,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "serde", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -4755,7 +4743,6 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4773,7 +4760,6 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4793,7 +4779,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4816,12 +4801,12 @@ dependencies = [ "pallet-randomness-collective-flip", "pallet-timestamp", "parity-scale-codec", - "parity-wasm 0.41.0", + "parity-wasm 0.42.2", "paste 1.0.4", - "pretty_assertions", - "pwasm-utils 0.16.0", - "rand 0.7.3", - "rand_pcg", + "pretty_assertions 0.7.2", + "pwasm-utils 0.17.1", + "rand 0.8.3", + "rand_pcg 0.3.0", "scale-info", "serde", "sp-core", @@ -4829,7 +4814,7 @@ dependencies = [ "sp-runtime", "sp-sandbox", "sp-std", - "wasmi-validation", + "wasmi-validation 0.4.0", "wat", ] @@ -4923,7 +4908,6 @@ dependencies = [ "paste 1.0.4", "rand 0.7.3", "scale-info", - "serde", "sp-arithmetic", "sp-core", "sp-io", @@ -4945,7 +4929,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -4954,7 +4937,7 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" -version = "3.0.0" +version = "4.0.0" dependencies = [ "frame-benchmarking", "frame-support", @@ -4964,7 +4947,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-npos-elections", @@ -4984,7 +4966,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5001,7 +4982,6 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-keystore", @@ -5035,7 +5015,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-arithmetic", "sp-core", "sp-io", @@ -5062,7 +5041,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "serde", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5085,7 +5063,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5104,7 +5081,6 @@ dependencies = [ "pallet-session", "parity-scale-codec", "scale-info", - "serde", "sp-application-crypto", "sp-core", "sp-io", @@ -5123,7 +5099,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-keyring", @@ -5153,11 +5128,12 @@ dependencies = [ name = "pallet-membership" version = "3.0.0" dependencies = [ + "frame-benchmarking", "frame-support", "frame-system", + "log", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5177,7 +5153,6 @@ dependencies = [ "pallet-mmr-primitives", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5228,7 +5203,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5244,7 +5218,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5260,7 +5233,6 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5322,7 +5294,6 @@ dependencies = [ "pallet-utility", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5355,7 +5326,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5372,7 +5342,6 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5389,7 +5358,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5407,7 +5375,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "serde", "sp-application-crypto", "sp-core", "sp-io", @@ -5452,7 +5419,6 @@ dependencies = [ "parity-scale-codec", "rand_chacha 0.2.2", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5519,7 +5485,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5552,7 +5517,6 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-inherents", "sp-io", @@ -5654,7 +5618,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5673,7 +5636,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5817,6 +5779,12 @@ version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddfc878dac00da22f8f61e7af3157988424567ab01d9920b962ef7dcbd7cd865" +[[package]] +name = "parity-wasm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" + [[package]] name = "parity-ws" version = "0.10.0" @@ -6213,6 +6181,18 @@ dependencies = [ "output_vt100", ] +[[package]] +name = "pretty_assertions" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cab0e7c02cf376875e9335e0ba1da535775beb5450d21e1dffca068818ed98b" +dependencies = [ + "ansi_term 0.12.1", + "ctor", + "diff", + "output_vt100", +] + [[package]] name = "primitive-types" version = "0.9.0" @@ -6377,13 +6357,13 @@ dependencies = [ [[package]] name = "pwasm-utils" -version = "0.16.0" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c8ac87af529432d3a4f0e2b3bbf08af49f28f09cc73ed7e551161bdaef5f78d" +checksum = "78778a25194f953d1766fc8c6a331ed56f070d09a0511267ee2c150cb71ea8c2" dependencies = [ "byteorder", "log", - "parity-wasm 0.41.0", + "parity-wasm 0.42.2", ] [[package]] @@ -6469,7 +6449,7 @@ dependencies = [ "rand_chacha 0.2.2", "rand_core 0.5.1", "rand_hc 0.2.0", - "rand_pcg", + "rand_pcg 0.2.1", ] [[package]] @@ -6573,6 +6553,15 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rand_pcg" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7de198537002b913568a3847e53535ace266f93526caf5c360ec41d72c5787f0" +dependencies = [ + "rand_core 0.6.2", +] + [[package]] name = "rawpointer" version = "0.2.1" @@ -6729,7 +6718,6 @@ dependencies = [ "hex-literal", "jsonrpsee-http-client", "jsonrpsee-proc-macros", - "jsonrpsee-types", "log", "parity-scale-codec", "sp-core", @@ -6944,6 +6932,7 @@ dependencies = [ "either", "futures 0.3.13", "futures-timer 3.0.2", + "ip_network", "libp2p", "log", "parity-scale-codec", @@ -8863,7 +8852,7 @@ dependencies = [ "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", - "pretty_assertions", + "pretty_assertions 0.6.1", "primitive-types", "rand 0.7.3", "rand_chacha 0.2.2", @@ -9226,7 +9215,7 @@ dependencies = [ "num-traits", "parity-scale-codec", "parking_lot 0.11.1", - "pretty_assertions", + "pretty_assertions 0.6.1", "rand 0.7.3", "smallvec 1.6.1", "sp-core", @@ -10902,7 +10891,7 @@ dependencies = [ "num-rational", "num-traits", "parity-wasm 0.41.0", - "wasmi-validation", + "wasmi-validation 0.3.0", ] [[package]] @@ -10914,6 +10903,15 @@ dependencies = [ "parity-wasm 0.41.0", ] +[[package]] +name = "wasmi-validation" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb8e860796d8be48efef530b60eebf84e74a88bce107374fffb0da97d504b8" +dependencies = [ + "parity-wasm 0.42.2", +] + [[package]] name = "wasmparser" version = "0.76.0" @@ -11283,15 +11281,15 @@ dependencies = [ [[package]] name = "yamux" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cc7bd8c983209ed5d527f44b01c41b7dc146fd960c61cf9e1d25399841dc271" +checksum = "e7d9028f208dd5e63c614be69f115c1b53cacc1111437d4c765185856666c107" dependencies = [ "futures 0.3.13", "log", "nohash-hasher", "parking_lot 0.11.1", - "rand 0.7.3", + "rand 0.8.3", "static_assertions", ] From 46db8b988147e0cb72135641be166afb1382f6a0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 15:15:59 +0100 Subject: [PATCH 180/503] Add missing import after merge. --- frame/support/src/storage/types/double_map.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index d2ff5b2ee897b..1c55894bfa00a 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -26,7 +26,7 @@ use crate::{ bounded_vec::{BoundedVec, BoundedVecValue}, types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, }, - traits::{GetDefault, StorageInstance}, + traits::{GetDefault, StorageInstance, Get}, }; use sp_std::vec::Vec; From 09f26026f35eff53f1fa4a484c33ec1afcfd8d1c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 15:52:50 +0100 Subject: [PATCH 181/503] Add some more TypeInfo derives --- frame/election-provider-multi-phase/src/lib.rs | 1 + primitives/runtime/src/curve.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index f2fc9d975a145..5170ce24626c7 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -345,6 +345,7 @@ impl Phase { /// A configuration for the pallet to indicate what should happen in the case of a fallback i.e. /// reaching a call to `elect` with no good solution. #[cfg_attr(test, derive(Clone))] +#[derive(scale_info::TypeInfo)] pub enum FallbackStrategy { /// Run a on-chain sequential phragmen. /// diff --git a/primitives/runtime/src/curve.rs b/primitives/runtime/src/curve.rs index 06f7f2c7e3f05..9657a33858832 100644 --- a/primitives/runtime/src/curve.rs +++ b/primitives/runtime/src/curve.rs @@ -21,7 +21,7 @@ use crate::{Perbill, traits::{AtLeast32BitUnsigned, SaturatedConversion}}; use core::ops::Sub; /// Piecewise Linear function in [0, 1] -> [0, 1]. -#[derive(PartialEq, Eq, sp_core::RuntimeDebug)] +#[derive(PartialEq, Eq, sp_core::RuntimeDebug, scale_info::TypeInfo)] pub struct PiecewiseLinear<'a> { /// Array of points. Must be in order from the lowest abscissas to the highest. pub points: &'a [(Perbill, Perbill)], From 3fb80ddcc565b8da688e2cab811fe741114820c6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 20:44:05 +0100 Subject: [PATCH 182/503] Param type const derives, change some usizes to u32 for TypeInfo --- Cargo.lock | 2 ++ bin/node/runtime/src/lib.rs | 2 +- frame/collective/src/lib.rs | 2 +- frame/lottery/src/lib.rs | 6 +++--- frame/lottery/src/mock.rs | 2 +- frame/nicks/src/lib.rs | 16 ++++++++-------- frame/offences/benchmarking/Cargo.toml | 1 + frame/session/benchmarking/Cargo.toml | 1 + frame/support/src/lib.rs | 13 +++++++++++++ 9 files changed, 31 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 643bf0e6ee8fd..c88b531452e4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5275,6 +5275,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5400,6 +5401,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 09ca7bc118108..1cf4d07117d9f 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1022,7 +1022,7 @@ impl pallet_mmr::Config for Runtime { parameter_types! { pub const LotteryPalletId: PalletId = PalletId(*b"py/lotto"); - pub const MaxCalls: usize = 10; + pub const MaxCalls: u32 = 10; pub const MaxGenerateRandom: u32 = 10; } diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index e5b920376af20..bf4176cbf45fb 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -137,7 +137,7 @@ pub trait Config: frame_system::Config { type MotionDuration: Get; /// Maximum number of proposals allowed to be active in parallel. - type MaxProposals: Get; + type MaxProposals: Get + scale_info::TypeInfo; /// The maximum number of members supported by the pallet. Used for weight estimation. /// diff --git a/frame/lottery/src/lib.rs b/frame/lottery/src/lib.rs index f04b85b209f3d..e935dc5981e12 100644 --- a/frame/lottery/src/lib.rs +++ b/frame/lottery/src/lib.rs @@ -94,7 +94,7 @@ pub trait Config: frame_system::Config { type ManagerOrigin: EnsureOrigin; /// The max number of calls available in a single lottery. - type MaxCalls: Get; + type MaxCalls: Get; // todo: [AJ] changed from usize -> u32 (see usages) for TypeInfo support. /// Used to determine if a call would be valid for purchasing a ticket. /// @@ -212,7 +212,7 @@ decl_module! { type Error = Error; const PalletId: PalletId = T::PalletId::get(); - const MaxCalls: u32 = T::MaxCalls::get() as u32; + const MaxCalls: u32 = T::MaxCalls::get(); fn deposit_event() = default; @@ -247,7 +247,7 @@ decl_module! { #[weight = T::WeightInfo::set_calls(calls.len() as u32)] fn set_calls(origin, calls: Vec<::Call>) { T::ManagerOrigin::ensure_origin(origin)?; - ensure!(calls.len() <= T::MaxCalls::get(), Error::::TooManyCalls); + ensure!(calls.len() as u32 <= T::MaxCalls::get(), Error::::TooManyCalls); if calls.is_empty() { CallIndices::kill(); } else { diff --git a/frame/lottery/src/mock.rs b/frame/lottery/src/mock.rs index ca372cc37e24e..b668fba85951b 100644 --- a/frame/lottery/src/mock.rs +++ b/frame/lottery/src/mock.rs @@ -97,7 +97,7 @@ impl pallet_balances::Config for Test { parameter_types! { pub const LotteryPalletId: PalletId = PalletId(*b"py/lotto"); - pub const MaxCalls: usize = 2; + pub const MaxCalls: u32 = 2; pub const MaxGenerateRandom: u32 = 10; } diff --git a/frame/nicks/src/lib.rs b/frame/nicks/src/lib.rs index 1afe55756777a..0bad7cb9c1ac9 100644 --- a/frame/nicks/src/lib.rs +++ b/frame/nicks/src/lib.rs @@ -71,10 +71,10 @@ pub trait Config: frame_system::Config { type ForceOrigin: EnsureOrigin; /// The minimum length a name may be. - type MinLength: Get; + type MinLength: Get; /// The maximum length a name may be. - type MaxLength: Get; + type MaxLength: Get; } decl_storage! { @@ -122,10 +122,10 @@ decl_module! { const ReservationFee: BalanceOf = T::ReservationFee::get(); /// The minimum length a name may be. - const MinLength: u32 = T::MinLength::get() as u32; + const MinLength: u32 = T::MinLength::get(); /// The maximum length a name may be. - const MaxLength: u32 = T::MaxLength::get() as u32; + const MaxLength: u32 = T::MaxLength::get(); /// Set an account's name. The name should be a UTF-8-encoded string by convention, though /// we don't check it. @@ -147,8 +147,8 @@ decl_module! { fn set_name(origin, name: Vec) { let sender = ensure_signed(origin)?; - ensure!(name.len() >= T::MinLength::get(), Error::::TooShort); - ensure!(name.len() <= T::MaxLength::get(), Error::::TooLong); + ensure!(name.len() as u32 >= T::MinLength::get(), Error::::TooShort); + ensure!(name.len() as u32 <= T::MaxLength::get(), Error::::TooLong); let deposit = if let Some((_, deposit)) = >::get(&sender) { Self::deposit_event(RawEvent::NameChanged(sender.clone())); @@ -308,8 +308,8 @@ mod tests { } parameter_types! { pub const ReservationFee: u64 = 2; - pub const MinLength: usize = 3; - pub const MaxLength: usize = 16; + pub const MinLength: u32 = 3; + pub const MaxLength: u32 = 16; } ord_parameter_types! { pub const One: u64 = 1; diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 6c249ebcc61d8..5e0fde0e348a3 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../../benchmarking" } frame-support = { version = "3.0.0", default-features = false, path = "../../support" } frame-system = { version = "3.0.0", default-features = false, path = "../../system" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 0c83347b1991f..714d27a0a6504 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -13,6 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-session = { version = "3.0.0", default-features = false, path = "../../../primitives/session" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index eae8d530af5e9..e68399d1118aa 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -333,6 +333,19 @@ macro_rules! parameter_types { I::from($value) } } + + impl $crate::scale_info::TypeInfo for $name { + type Identity = Self; + + fn type_info() -> $crate::scale_info::Type<$crate::scale_info::form::MetaForm> { + $crate::scale_info::Type::builder() + .path($crate::scale_info::Path::new(stringify!($name), module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field_of::<$type>(stringify!($type)) + ) + } + } }; (IMPL $name:ident, $type:ty, $value:expr) => { impl $name { From c90b0a277e3f3ed667870d025dca5ec132e18701 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 20:48:48 +0100 Subject: [PATCH 183/503] Add missing scale-info dependency --- Cargo.lock | 1 + bin/node/executor/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index c88b531452e4c..fe707140a3ea6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4188,6 +4188,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index 54a44d59c2591..db079b4e18432 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "2.0.0", path = "../runtime" } sc-executor = { version = "0.9.0", path = "../../../client/executor" } From d708cf2f6bdd96c8c8e48135b34d3de89becec8a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Apr 2021 20:58:11 +0100 Subject: [PATCH 184/503] Add TypeInfo derive --- primitives/runtime/src/generic/unchecked_extrinsic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index 079aab455c3cf..4c6e8f921af8e 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -368,7 +368,7 @@ mod tests { const TEST_ACCOUNT: TestAccountId = 0; // NOTE: this is demonstration. One can simply use `()` for testing. - #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd)] + #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, scale_info::TypeInfo)] struct TestExtra; impl SignedExtension for TestExtra { const IDENTIFIER: &'static str = "TestExtra"; From f8e1d8d19a2ad8aec975e7948792aed6cfa97dc1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Apr 2021 11:07:17 +0100 Subject: [PATCH 185/503] Register compact call args wrapped with Compact --- .../procedural/src/pallet/expand/call.rs | 14 +++++++++++- frame/support/src/dispatch.rs | 22 +++++++++++++++---- 2 files changed, 31 insertions(+), 5 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 2c4de3ee1e337..a677250e34adb 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -56,6 +56,18 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); + let args_meta_type = def.call.methods.iter().map(|method| { + method.args.iter() + .map(|(is_compact, _, type_)| { + if *is_compact { + quote::quote_spanned!(type_.span() => #frame_support::codec::Compact<#type_>) + } else { + quote::quote_spanned!(type_.span() => #type_ ) + } + }) + .collect::>() + }); + let default_docs = [syn::parse_quote!( r"Contains one variant per dispatchable that can be called by an extrinsic." )]; @@ -180,7 +192,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { arguments: #frame_support::scale_info::prelude::vec![ #( #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), - ty: #frame_support::scale_info::meta_type::<#args_type>(), + ty: #frame_support::scale_info::meta_type::<#args_meta_type>(), }, )* ], documentation: #frame_support::scale_info::prelude::vec![ #( #fn_doc ),* ], diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 3565fe2bea821..f07bbd8cb3e4e 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2512,13 +2512,28 @@ macro_rules! __function_to_metadata { $( $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), - ty: $crate::scale_info::meta_type::<$param>(), + ty: $crate::__function_to_metadata!(@meta_type + $(#[$codec_attr])* $param_name $param + ), } ),* ], documentation: $crate::scale_info::prelude::vec![ $( $fn_doc ),* ], } }; + + (@meta_type #[compact] $param_name:ident $param:ty) => { + $crate::scale_info::meta_type::<$crate::codec::Compact<$param>>() + }; + (@meta_type $param_name:ident $param:ty) => { + $crate::scale_info::meta_type::<$param>() + }; + (@meta_type $(#[codec_attr:ident])* $param_name:ident, $param:ty) => { + compile_error!(concat!( + "Invalid attribute for parameter `", stringify!($param_name), + "`, the following attributes are supported: `#[compact]`" + )); + } } #[macro_export] @@ -2584,7 +2599,6 @@ mod tests { IntegrityTest, Get, PalletInfo, }; use crate::metadata::*; - use codec::Compact; pub trait Config: system::Config + Sized where Self::AccountId: From { } @@ -2671,7 +2685,7 @@ mod tests { arguments: vec![ FunctionArgumentMetadata { name: "_data", - ty: scale_info::meta_type::>(), + ty: scale_info::meta_type::>(), } ], documentation: vec![], @@ -2714,7 +2728,7 @@ mod tests { }, FunctionArgumentMetadata { name: "_data2", - ty: scale_info::meta_type::>() + ty: scale_info::meta_type::>() } ], documentation: vec![], From 7e16dd971f7397e364234822655fac1511770e05 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Apr 2021 10:47:24 +0100 Subject: [PATCH 186/503] Updating to new v13 metadata types: pallet events --- Cargo.lock | 6 ++-- .../procedural/src/pallet/expand/event.rs | 28 ----------------- .../procedural/src/pallet/parse/event.rs | 30 ------------------- frame/support/src/error.rs | 11 +++---- frame/support/src/event.rs | 23 ++++---------- frame/support/src/lib.rs | 2 +- frame/support/src/metadata.rs | 6 ++-- 7 files changed, 19 insertions(+), 87 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fe707140a3ea6..2cebba9e8ddec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1789,7 +1789,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#4a3b60f513a865887a4ae663806e6b0ebedf5942" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#42c4d9386f9814f0086f41d6a05c36005f7b0069" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -8155,7 +8155,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91633dd5f52fd7770299f3f551cb5b38f8fd0b32" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#b856e028748e8b86e84352694681e8510fecfd0a" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8167,7 +8167,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91633dd5f52fd7770299f3f551cb5b38f8fd0b32" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#b856e028748e8b86e84352694681e8510fecfd0a" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 41abdfb32b8ef..cbfbc7811839c 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -42,26 +42,6 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let event_impl_gen= &event.gen_kind.type_impl_gen(event.attr_span); - let metadata = event.metadata.iter() - .map(|event| { - let name = format!("{}", event.name); - let args = event.args - .iter() - .map(|(ty, name)| { - quote::quote!( - #frame_support::metadata::TypeSpec::new::<#ty>(#name) - ) - }); - let docs = &event.docs; - quote::quote!( - #frame_support::metadata::EventMetadata { - name: #name, - arguments: #frame_support::scale_info::prelude::vec![ #( #args, )* ], - documentation: #frame_support::scale_info::prelude::vec![ #( #docs, )* ], - }, - ) - }); - let event_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; if let syn::Item::Enum(item) = item { @@ -141,13 +121,5 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { fn from(_: #event_ident<#event_use_gen>) -> () { () } } - - impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::EventMetadata> { - #frame_support::scale_info::prelude::vec![ #( #metadata )* ] - } - } ) } diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index ebaa2293bbe9e..721975fa8efac 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -18,7 +18,6 @@ use super::helper; use syn::spanned::Spanned; use quote::ToTokens; -use frame_support_procedural_tools::clean_type_string; /// List of additional token to be used for parsing. mod keyword { @@ -35,8 +34,6 @@ pub struct EventDef { pub index: usize, /// The keyword Event used (contains span). pub event: keyword::Event, - /// Event metadatas: `(name, args, docs)`. - pub metadata: Vec, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, /// The kind of generic the type `Event` has. @@ -49,13 +46,6 @@ pub struct EventDef { pub attr_span: proc_macro2::Span, } -/// Metadata for a pallet event variant. -pub struct EventDefMetadata { - pub name: syn::Ident, - pub args: Vec<(syn::Type, String)>, - pub docs: Vec, -} - /// Attribute for Event: defines metadata name to use. /// /// Syntax is: @@ -172,7 +162,6 @@ impl EventDef { let event_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; - let metadata = attr_info.metadata.unwrap_or_else(Vec::new); let deposit_event = attr_info.deposit_event; if !matches!(item.vis, syn::Visibility::Public(_)) { @@ -202,28 +191,9 @@ impl EventDef { let event = syn::parse2::(item.ident.to_token_stream())?; - let metadata = item.variants.iter() - .map(|variant| { - let name = variant.ident.clone(); - let docs = helper::get_doc_literals(&variant.attrs); - let args = variant.fields.iter() - .map(|field| { - metadata.iter().find(|m| m.0 == field.ty) - .cloned() - .unwrap_or_else(|| { - (field.ty.clone(), clean_type_string(&field.ty.to_token_stream().to_string())) - }) - }) - .collect(); - - EventDefMetadata { name, args, docs } - }) - .collect(); - Ok(EventDef { attr_span, index, - metadata, instances, deposit_event, event, diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 7d8b1871edd48..1fab557e95b8e 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -20,7 +20,7 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; -use crate::metadata::ErrorMetadata; +use crate::metadata::PalletErrorMetadata; use sp_std::prelude::Vec; /// Declare an error type for a runtime module. @@ -212,13 +212,14 @@ macro_rules! decl_error { } } -/// All the metadata about errors in a module. +/// All the metadata about errors in a pallet. +/// todo: rename? PalletErrorMetadata would clash pub trait ModuleErrorMetadata { - fn metadata() -> Vec; + fn metadata() -> PalletErrorMetadata; } impl ModuleErrorMetadata for &'static str { - fn metadata() -> Vec { - Vec::new() + fn metadata() -> PalletErrorMetadata { + PalletErrorMetadata { ty: scale_info::meta_type::<&'static str>() } } } diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index aa468e89ff024..63fb9ec8f1d0f 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -503,6 +503,7 @@ macro_rules! impl_outer_event { #[macro_export] #[doc(hidden)] +// todo: [AJ] rename/refactor? macro_rules! __impl_outer_event_json_metadata { ( $runtime:ident; @@ -510,21 +511,6 @@ macro_rules! __impl_outer_event_json_metadata { $( $module_name:ident::Event < $( $generic_params:path ),* > $( $instance:ident )?, )*; ) => { impl $runtime { - #[allow(dead_code)] - pub fn outer_event_metadata() -> $crate::metadata::OuterEventMetadata { - $crate::metadata::OuterEventMetadata { - name: stringify!($event_name), - events: $crate::scale_info::prelude::vec![ - $( - $crate::metadata::ModuleEventMetadata { - name: stringify!($module_name), - events: $module_name::Event ::< $( $generic_params ),* > ::metadata() - } - ),* - ] - } - } - $crate::__impl_outer_event_json_metadata! { @DECL_MODULE_EVENT_FNS $( $module_name < $( $generic_params ),* > $( $instance )? ; )* @@ -541,9 +527,12 @@ macro_rules! __impl_outer_event_json_metadata { $( #[allow(dead_code)] pub fn [< __module_events_ $module_name $( _ $instance )? >] () -> - Vec<$crate::metadata::EventMetadata> + $crate::metadata::PalletEventMetadata { - $module_name::Event ::< $( $generic_params ),* > ::metadata() + let ty = $crate::scale_info::meta_type::< + $module_name::Event ::< $( $generic_params ),* > + >(); + $crate::metadata::PalletEventMetadata { ty } } )* } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index e68399d1118aa..017181abf353a 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -342,7 +342,7 @@ macro_rules! parameter_types { .path($crate::scale_info::Path::new(stringify!($name), module_path!())) .composite( scale_info::build::Fields::unnamed() - .field_of::<$type>(stringify!($type)) + .field_of::<$type>(stringify!($type), &["todo: docs"]) ) } } diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 92ff3a1a56565..fd4d9be7726c6 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -17,9 +17,9 @@ pub use frame_metadata::{ v13::{ - ErrorMetadata, EventMetadata, ExtrinsicMetadata, FunctionArgumentMetadata, - FunctionMetadata, ModuleConstantMetadata, ModuleEventMetadata, ModuleMetadata, - OuterEventMetadata, RuntimeMetadataLastVersion, SignedExtensionMetadata, + PalletErrorMetadata, PalletEventMetadata, ExtrinsicMetadata, FunctionArgumentMetadata, + FunctionMetadata, PalletCallMetadata, PalletMetadata, PalletConstantMetadata, + RuntimeMetadataLastVersion, SignedExtensionMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, StorageMetadata, TypeSpec, }, From a1bd0a81aea8a4aca5041528e4ef9f968e637cd5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Apr 2021 11:44:41 +0100 Subject: [PATCH 187/503] Rename ModuleConstantMetadata to PalletConstantMetadata --- .../support/procedural/src/pallet/expand/constants.rs | 4 ++-- frame/support/src/dispatch.rs | 4 ++-- frame/support/src/metadata.rs | 8 ++++---- frame/support/test/tests/pallet.rs | 10 +++++----- frame/support/test/tests/pallet_instance.rs | 2 +- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index faec6ea58d846..33c88b46d3d26 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -96,7 +96,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { } } - #frame_support::metadata::ModuleConstantMetadata { + #frame_support::metadata::PalletConstantMetadata { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), @@ -110,7 +110,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn module_constants_metadata() - -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::ModuleConstantMetadata> + -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::PalletConstantMetadata> { #frame_support::scale_info::prelude::vec![ #( #consts ),* ] } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index f07bbd8cb3e4e..d00be24c6324e 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2398,7 +2398,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::ModuleConstantMetadata> { + pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] @@ -2422,7 +2422,7 @@ macro_rules! __impl_module_constants_metadata { )* $crate::scale_info::prelude::vec![ $( - $crate::metadata::ModuleConstantMetadata { + $crate::metadata::PalletConstantMetadata { name: stringify!($name), ty: $crate::scale_info::meta_type::<$type>(), value: $default_byte_name::<$const_trait_instance $(, $const_instance)?>( diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index fd4d9be7726c6..c5f6a1f858067 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -247,7 +247,7 @@ macro_rules! __runtime_modules_to_metadata_calls_storage { // use super::*; // use frame_metadata::{ // EventMetadata, StorageEntryModifier, StorageEntryType, FunctionMetadata, StorageEntryMetadata, -// ModuleMetadata, RuntimeMetadataPrefixed, DefaultByte, ModuleConstantMetadata, DefaultByteGetter, +// ModuleMetadata, RuntimeMetadataPrefixed, DefaultByte, PalletConstantMetadata, DefaultByteGetter, // ErrorMetadata, ExtrinsicMetadata, // }; // use codec::{Encode, Decode}; @@ -528,7 +528,7 @@ macro_rules! __runtime_modules_to_metadata_calls_storage { // )), // constants: DecodeDifferent::Encode( // FnEncode(|| &[ -// ModuleConstantMetadata { +// PalletConstantMetadata { // name: DecodeDifferent::Encode("BlockNumber"), // ty: DecodeDifferent::Encode("T::BlockNumber"), // value: DecodeDifferent::Encode( @@ -536,7 +536,7 @@ macro_rules! __runtime_modules_to_metadata_calls_storage { // ), // documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]), // }, -// ModuleConstantMetadata { +// PalletConstantMetadata { // name: DecodeDifferent::Encode("GetType"), // ty: DecodeDifferent::Encode("T::AccountId"), // value: DecodeDifferent::Encode( @@ -544,7 +544,7 @@ macro_rules! __runtime_modules_to_metadata_calls_storage { // ), // documentation: DecodeDifferent::Encode(&[]), // }, -// ModuleConstantMetadata { +// PalletConstantMetadata { // name: DecodeDifferent::Encode("ASSOCIATED_CONST"), // ty: DecodeDifferent::Encode("u64"), // value: DecodeDifferent::Encode( diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index bb935091e0757..9364abac73a0c 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -799,7 +799,7 @@ fn metadata() { }, ]), constants: vec![ - ModuleConstantMetadata { + PalletConstantMetadata { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], @@ -808,7 +808,7 @@ fn metadata() { " Some comment", ], }, - ModuleConstantMetadata { + PalletConstantMetadata { name: "MyGetParam2", ty: scale_info::meta_type::(), value: vec![11, 0, 0, 0], @@ -817,13 +817,13 @@ fn metadata() { " Some comment", ], }, - ModuleConstantMetadata { + PalletConstantMetadata { name: "MyGetParam3", ty: scale_info::meta_type::(), value: vec![12, 0, 0, 0, 0, 0, 0, 0], documentation: vec![], }, - ModuleConstantMetadata { + PalletConstantMetadata { name: "some_extra", ty: scale_info::meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], @@ -832,7 +832,7 @@ fn metadata() { " Some doc", ], }, - ModuleConstantMetadata { + PalletConstantMetadata { name: "some_extra_extra", ty: scale_info::meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index fdb4d52b5fc29..1dd3ad51a97ba 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -672,7 +672,7 @@ fn metadata() { }, ]), constants: vec![ - ModuleConstantMetadata { + PalletConstantMetadata { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], From e4e0be66070b128def5edb8a511448f2441c6cee Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Apr 2021 11:58:12 +0100 Subject: [PATCH 188/503] Add todo about event metadata attributes --- frame/support/procedural/src/pallet/parse/event.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index 721975fa8efac..2148b5e425c0a 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -124,6 +124,7 @@ impl syn::parse::Parse for PalletEventAttr { } struct PalletEventAttrInfo { + // todo: [AJ] this is unused now because of TypeInfo derive for Error, consider removing if compatible with downstream clients metadata: Option>, deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, } From 88f489a3f669cc71040efa9a914d1771d594e0f2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Apr 2021 17:35:50 +0100 Subject: [PATCH 189/503] Use TypeInfo for Error metadata --- Cargo.lock | 2 +- frame/atomic-swap/src/lib.rs | 2 +- frame/contracts/src/lib.rs | 2 +- .../procedural/src/pallet/expand/error.rs | 25 ++--------- .../src/pallet/expand/pallet_struct.rs | 27 +++++------- frame/support/src/dispatch.rs | 44 +++++++++++++++---- frame/support/src/error.rs | 32 +------------- frame/support/src/lib.rs | 5 +-- frame/support/src/metadata.rs | 2 +- frame/system/src/lib.rs | 2 +- 10 files changed, 57 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2cebba9e8ddec..f8c5b45567cc6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1789,7 +1789,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#42c4d9386f9814f0086f41d6a05c36005f7b0069" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#92aced5f3d4605f36f72b875cbdac8be9ac5e430" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index b85dd54759133..a7044a74447c2 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -144,7 +144,7 @@ pub mod pallet { /// Atomic swap's pallet configuration trait. #[pallet::config] - pub trait Config: frame_system::Config + scale_info::TypeInfo { // todo: [AJ] see whether we really need this bound + pub trait Config: frame_system::Config { /// The overarching event type. type Event: From> + IsType<::Event>; /// Swap action. diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 491ef6d5518fa..8c7438d53ef2e 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -144,7 +144,7 @@ pub mod pallet { use super::*; #[pallet::config] - pub trait Config: frame_system::Config + scale_info::TypeInfo { // todo: we shouldn't need this TypeInfo bound + pub trait Config: frame_system::Config { /// The time implementation used to supply timestamps to conntracts through `seal_now`. type Time: Time; diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 47ce7f54222bd..00bfeacbe98e1 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -18,7 +18,6 @@ use crate::pallet::{Def, parse::helper::get_doc_literals}; /// * impl various trait on Error -/// * impl ModuleErrorMetadata for Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { let error = if let Some(error) = &def.error { error @@ -52,17 +51,6 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { quote::quote_spanned!(error.attr_span => Self::#variant => #variant_str,) }); - let metadata = error.variants.iter() - .map(|(variant, doc)| { - let variant_str = format!("{}", variant); - quote::quote_spanned!(error.attr_span => - #frame_support::metadata::ErrorMetadata { - name: #variant_str, - documentation: #frame_support::scale_info::prelude::vec![ #( #doc, )* ], - }, - ) - }); - let error_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; if let syn::Item::Enum(item) = item { @@ -73,6 +61,10 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { }; error_item.variants.insert(0, phantom_variant); + // derive TypeInfo for error metadata + error_item.attrs.push( + syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] ) + ); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( @@ -136,14 +128,5 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { } } } - - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #error_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { - #frame_support::scale_info::prelude::vec![ #( #metadata )* ] - } - } ) } diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index adf40b6e71d0c..15f14946ce56f 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -20,7 +20,6 @@ use crate::pallet::{Def, parse::helper::get_doc_literals}; /// * Add derive trait on Pallet /// * Implement GetPalletVersion on Pallet /// * Implement OnGenesis on Pallet -/// * Implement ModuleErrorMetadata on Pallet /// * declare Module type alias for construct_runtime /// * replace the first field type of `struct Pallet` with `PhantomData` if it is `_` /// * implementation of `PalletInfoAccess` information @@ -71,35 +70,29 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { )] )); - let module_error_metadata = if let Some(error_def) = &def.error { + let pallet_error_metadata = if let Some(error_def) = &def.error { let error_ident = &error_def.error; quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { - < - #error_ident<#type_use_gen> as #frame_support::error::ModuleErrorMetadata - >::metadata() + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + pub fn error_metadata() -> Option<#frame_support::metadata::PalletErrorMetadata> { + Some(#frame_support::metadata::PalletErrorMetadata { + ty: #frame_support::scale_info::meta_type::<#error_ident<#type_use_gen>>() + }) } } ) } else { quote::quote_spanned!(def.pallet_struct.attr_span => - impl<#type_impl_gen> #frame_support::error::ModuleErrorMetadata - for #pallet_ident<#type_use_gen> - #config_where_clause - { - fn metadata() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::ErrorMetadata> { - #frame_support::scale_info::prelude::vec::Vec::new() + impl<#type_impl_gen> #pallet_ident<#type_use_gen> #config_where_clause { + pub fn error_metadata() -> Option<#frame_support::metadata::PalletErrorMetadata> { + None } } ) }; quote::quote_spanned!(def.pallet_struct.attr_span => - #module_error_metadata + #pallet_error_metadata /// Type alias to `Pallet`, to be used by `construct_runtime`. /// diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index d00be24c6324e..f5fa866f95e82 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2142,19 +2142,16 @@ macro_rules! decl_module { )* } } + $crate::__impl_error_metadata! { + $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> + { $( $other_where_bounds )* } + $error_type + } $crate::__impl_module_constants_metadata ! { $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> { $( $other_where_bounds )* } $( $constants )* } - - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::error::ModuleErrorMetadata - for $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* - { - fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::ErrorMetadata> { - <$error_type as $crate::error::ModuleErrorMetadata>::metadata() - } - } } } @@ -2331,6 +2328,37 @@ macro_rules! __dispatch_impl_metadata { } } +/// Implement metadata for dispatch. +#[macro_export] +#[doc(hidden)] +macro_rules! __impl_error_metadata { + ( + $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> + { $( $other_where_bounds:tt )* } + $error_type:tt + $($rest:tt)* + ) => { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + where $( $other_where_bounds )* + { + #[doc(hidden)] + #[allow(dead_code)] + pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { + let ty = $crate::scale_info::meta_type::<$error_type>(); + // If no error type is declared, `&'static str` is the default error type + // todo: [AJ] can this be done rather by the macro? Not an expert with the declarative macros. + if ty == $crate::scale_info::meta_type::<&'static str>() { + None + } else { + Some($crate::metadata::PalletErrorMetadata { + ty: $crate::scale_info::meta_type::<$error_type>() + }) + } + } + } + }; +} + /// Implement metadata for module constants. #[macro_export] #[doc(hidden)] diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 1fab557e95b8e..b559e21e20537 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -20,7 +20,6 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; -use crate::metadata::PalletErrorMetadata; use sp_std::prelude::Vec; /// Declare an error type for a runtime module. @@ -88,6 +87,7 @@ macro_rules! decl_error { } ) => { $(#[$attr])* + #[derive($crate::scale_info::TypeInfo)] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { @@ -160,24 +160,6 @@ macro_rules! decl_error { } } } - - impl<$generic: $trait $(, $inst_generic: $instance)?> $crate::error::ModuleErrorMetadata - for $error<$generic $(, $inst_generic)?> - $( where $( $where_ty: $where_bound ),* )? - { - fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::ErrorMetadata> { - $crate::scale_info::prelude::vec![ - $( - $crate::metadata::ErrorMetadata { - name: stringify!($name), - documentation: $crate::scale_info::prelude::vec![ - $( $doc_attr ),* - ], - } - ),* - ] - } - } }; (@GENERATE_AS_U8 $self:ident @@ -211,15 +193,3 @@ macro_rules! decl_error { } } } - -/// All the metadata about errors in a pallet. -/// todo: rename? PalletErrorMetadata would clash -pub trait ModuleErrorMetadata { - fn metadata() -> PalletErrorMetadata; -} - -impl ModuleErrorMetadata for &'static str { - fn metadata() -> PalletErrorMetadata { - PalletErrorMetadata { ty: scale_info::meta_type::<&'static str>() } - } -} diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 017181abf353a..298d9ab9e65fe 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1357,7 +1357,7 @@ pub mod pallet_prelude { /// It implements on pallet: /// * [`traits::GetPalletVersion`] /// * [`traits::OnGenesis`]: contains some logic to write pallet version into storage. -/// * `ModuleErrorMetadata`: using error declared or no metadata. +/// * `PalletErrorTypeInfo`: provides the type information for the pallet error, if defined. /// /// It declare `type Module` type alias for `Pallet`, used by [`construct_runtime`]. /// @@ -1486,9 +1486,6 @@ pub mod pallet_prelude { /// The macro implements `From>` for `&'static str`. /// The macro implements `From>` for `DispatchError`. /// -/// The macro implements `ModuleErrorMetadata` on `Pallet` defining the `ErrorMetadata` of the -/// pallet. -/// /// # Event: `#[pallet::event]` optional /// /// Allow to define pallet events, pallet events are stored in the block when they deposited (and diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index c5f6a1f858067..5d6da4a5d597a 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -129,7 +129,7 @@ macro_rules! __runtime_modules_to_metadata { $mod, $module $( <$instance> )?, $runtime, $(with $kw)* ), constants: $mod::$module::<$runtime $(, $mod::$instance )?>::module_constants_metadata(), - errors: <$mod::$module::<$runtime $(, $mod::$instance )?> as $crate::error::ModuleErrorMetadata>::metadata(), + error: $mod::$module::<$runtime $(, $mod::$instance )?>::error_metadata(), }; $( $rest )* ) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index da8e1614ca63a..576de27e072e3 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -161,7 +161,7 @@ pub mod pallet { /// System configuration trait. Implemented by runtime. #[pallet::config] #[pallet::disable_frame_system_supertrait_check] - pub trait Config: 'static + Eq + Clone { + pub trait Config: 'static + Eq + Clone + scale_info::TypeInfo { // todo: [AJ] see whether we really need this bound /// The basic call filter to use in Origin. All origins are built with this filter as base, /// except Root. type BaseCallFilter: Filter; From ab7607885f520b52c33678f7a45639371a9c5e32 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Apr 2021 10:13:50 +0100 Subject: [PATCH 190/503] Add Instance TypeInfo bounds --- frame/support/src/dispatch.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index f5fa866f95e82..fe3b566d2d086 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2338,7 +2338,8 @@ macro_rules! __impl_error_metadata { $error_type:tt $($rest:tt)* ) => { - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + // todo: [AJ] remove TypeInfo bounds on Instance, depends on skipping the T in PhantomData + impl<$trait_instance: $trait_name $(, $instance: $instantiable + $crate::scale_info::TypeInfo)?> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { #[doc(hidden)] From c170b5c054893cdc1837d4cf6438c4622156d41c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Apr 2021 10:14:03 +0100 Subject: [PATCH 191/503] Remove some legacy Events metadata --- frame/support/src/event.rs | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 63fb9ec8f1d0f..60e2f2b2d688f 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -141,13 +141,6 @@ macro_rules! decl_event { impl From for () { fn from(_: Event) -> () { () } } - impl Event { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::EventMetadata> { - $crate::__events_to_metadata!(; $( $events )* ) - } - } } } @@ -290,17 +283,6 @@ macro_rules! __decl_generic_event { impl<$( $generic_param ),* $(, $instance)? > From> for () { fn from(_: RawEvent<$( $generic_param ),* $(, $instance)?>) -> () { () } } - impl<$( $generic_param ),* $(, $instance)?> RawEvent<$( $generic_param ),* $(, $instance)?> - where - $( $generic_param: $crate::scale_info::TypeInfo + 'static ),* - $(, $instance: $crate::scale_info::TypeInfo + 'static)? // todo: [AJ] just adding this to make it compile, look at removing it. - { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> $crate::scale_info::prelude::vec::Vec<$crate::metadata::EventMetadata> { - $crate::__events_to_metadata!(; $( $events )* ) - } - } }; (@cannot_parse $ty:ty) => { compile_error!(concat!("The type `", stringify!($ty), "` can't be parsed as an unnamed one, please name it `Name = ", stringify!($ty), "`")); From db30e0fb989812406335d6d91be33de0f4d67700 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 4 May 2021 10:07:29 +0100 Subject: [PATCH 192/503] Rename ModuleMetadata to PalletMetadata --- frame/support/src/metadata.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 5d6da4a5d597a..e42bcec6b391d 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -116,7 +116,7 @@ macro_rules! __runtime_modules_to_metadata { ) => { $crate::__runtime_modules_to_metadata!( $runtime; - $( $metadata, )* $crate::metadata::ModuleMetadata { + $( $metadata, )* $crate::metadata::PalletMetadata { name: stringify!($name), index: $index, storage: $crate::__runtime_modules_to_metadata_calls_storage!( From 9a6a3094c4fea8c36eaa32d12ea773564275c2e6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 4 May 2021 10:09:34 +0100 Subject: [PATCH 193/503] Add Call meta type to PalletCallMetadata --- frame/support/src/dispatch.rs | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index fe3b566d2d086..74b9fcf64804c 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2314,15 +2314,21 @@ macro_rules! __dispatch_impl_metadata { ( $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> { $( $other_where_bounds:tt )* } + $call_type:ident $($rest:tt)* ) => { - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + // todo: [AJ] another Instance: TypeInfo bound to remove + impl<$trait_instance: $trait_name $(, $instance: $instantiable + $crate::scale_info::TypeInfo)? + $crate::scale_info::TypeInfo> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { #[doc(hidden)] #[allow(dead_code)] - pub fn call_functions() -> $crate::dispatch::Vec<$crate::metadata::FunctionMetadata> { - $crate::__call_to_functions!($($rest)*) + pub fn call_functions() -> $crate::metadata::PalletCallMetadata { + let ty = $crate::scale_info::meta_type::<$call_type<$trait_instance $(, $instance)?>>(); + $crate::metadata::PalletCallMetadata { + ty, + calls: $crate::__call_to_functions!($($rest)*), + } } } } @@ -2471,7 +2477,7 @@ macro_rules! __impl_module_constants_metadata { #[doc(hidden)] macro_rules! __call_to_functions { ( - $call_type:ident $origin_type:ty + $origin_type:ty { $( $(#[doc = $doc_attr:tt])* From 7733298dbe48059dd10b8abbc604dbe1b96b8c5c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 4 May 2021 17:36:03 +0100 Subject: [PATCH 194/503] Add scale_info bounds for Error metadata, temporarily? --- .../procedural/src/pallet/expand/pallet_struct.rs | 2 +- frame/support/procedural/src/pallet/parse/mod.rs | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 15f14946ce56f..30e8c85d07a9c 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -26,7 +26,7 @@ use crate::pallet::{Def, parse::helper::get_doc_literals}; pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); + let type_impl_gen = &def.type_impl_scale_info_bounded_generics(def.pallet_struct.attr_span); let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); let pallet_ident = &def.pallet_struct.pallet; diff --git a/frame/support/procedural/src/pallet/parse/mod.rs b/frame/support/procedural/src/pallet/parse/mod.rs index 39a40fc148bcd..8281d772f919f 100644 --- a/frame/support/procedural/src/pallet/parse/mod.rs +++ b/frame/support/procedural/src/pallet/parse/mod.rs @@ -267,6 +267,20 @@ impl Def { } } + /// Depending on if pallet is instantiable: + /// * either `T: Config + TypeInfo` + /// * or `T: Config + TypeInfo, I: 'static + TypeInfo` + /// todo: [AJ] see if we can remove this by not requiring TypeInfo on all generic params in scale_info + pub fn type_impl_scale_info_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { + let frame_support = &self.frame_support; + let bound = quote::quote!(#frame_support::scale_info::TypeInfo); + if self.config.has_instance { + quote::quote_spanned!(span => T: Config + #bound, I: 'static + #bound) + } else { + quote::quote_spanned!(span => T: Config + #bound) + } + } + /// Depending on if pallet is instantiable: /// * either `T: Config` /// * or `T: Config, I: 'static = ()` From cdf8b132d24d0c82d0d30c46144d848d6d8ad7ed Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 09:30:03 +0100 Subject: [PATCH 195/503] Remove test for outer_event_metadata (no longer used) --- frame/support/src/event.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 60e2f2b2d688f..4be7b6da48e99 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -788,11 +788,6 @@ mod tests { } } - #[test] - fn outer_event_metadata() { - assert_eq!(expected_metadata(), TestRuntime::outer_event_metadata()); - } - #[test] fn test_codec() { let runtime_1_event_module_2 = TestEvent::event_module2( From 9d44b06bf2b953c3228e37b14fe00d72da809504 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 09:30:26 +0100 Subject: [PATCH 196/503] Derive TypeInfo for test runtime --- test-utils/runtime/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index 07fe1926d1659..ad8a671688d93 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -424,7 +424,7 @@ cfg_if! { } } -#[derive(Clone, Eq, PartialEq)] +#[derive(Clone, Eq, PartialEq, scale_info::TypeInfo)] pub struct Runtime; impl GetNodeBlockType for Runtime { @@ -488,7 +488,7 @@ parameter_types! { }; pub RuntimeBlockLength: BlockLength = BlockLength::max(4 * 1024 * 1024); - pub RuntimeBlockWeights: BlockWeights = +k pub RuntimeBlockWeights: BlockWeights = BlockWeights::with_sensible_defaults(4 * 1024 * 1024, Perbill::from_percent(75)); } From 30427f37f9353fa4ffdd7147f32e9f8953c53d96 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 09:30:59 +0100 Subject: [PATCH 197/503] Construct PalletCallMetadata for call functions --- frame/support/procedural/src/pallet/expand/call.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index a677250e34adb..6f302b89bc18a 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -185,8 +185,9 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { #[doc(hidden)] - pub fn call_functions() -> #frame_support::scale_info::prelude::vec::Vec<#frame_support::metadata::FunctionMetadata> { - #frame_support::scale_info::prelude::vec![ #( + pub fn call_functions() -> #frame_support::metadata::PalletCallMetadata { + let ty = #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>(); + let calls = #frame_support::scale_info::prelude::vec![ #( #frame_support::metadata::FunctionMetadata { name: stringify!(#fn_name), arguments: #frame_support::scale_info::prelude::vec![ #( @@ -197,7 +198,8 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { )* ], documentation: #frame_support::scale_info::prelude::vec![ #( #fn_doc ),* ], }, - )* ] + )* ]; + #frame_support::metadata::PalletCallMetadata { ty, calls } } } ) From 6313faf5567a1758139012f45619a0eacd0d2910 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 09:33:26 +0100 Subject: [PATCH 198/503] Use type impl scale-info bounded generics for Call enum --- frame/support/procedural/src/pallet/expand/call.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 6f302b89bc18a..e0c2ae225469c 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -23,7 +23,7 @@ use syn::spanned::Spanned; pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_generics(def.call.attr_span); + let type_impl_gen = &def.type_impl_scale_info_bounded_generics(def.call.attr_span); let type_decl_bounded_gen = &def.type_decl_bounded_generics(def.call.attr_span); let type_use_gen = &def.type_use_generics(def.call.attr_span); let call_ident = syn::Ident::new("Call", def.call.attr_span); From febef22ffb5efa5069767c31885b289462a9abbc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 10:42:55 +0100 Subject: [PATCH 199/503] Fix up tests for refactored metadata --- frame/identity/src/lib.rs | 5 +- frame/support/src/dispatch.rs | 10 +- frame/support/src/event.rs | 59 --------- frame/support/test/tests/construct_runtime.rs | 102 ++++++++-------- frame/support/test/tests/pallet.rs | 115 +++++++----------- .../test/tests/pallet_compatibility.rs | 16 +-- .../tests/pallet_compatibility_instance.rs | 16 +-- frame/support/test/tests/pallet_instance.rs | 94 ++++++-------- frame/treasury/src/lib.rs | 2 +- test-utils/runtime/src/lib.rs | 2 +- 10 files changed, 158 insertions(+), 263 deletions(-) diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index bdf2296f11611..724d7197aa19d 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -253,7 +253,10 @@ impl scale_info::TypeInfo for IdentityFields { .path(scale_info::Path::new("IdentityFields", module_path!())) .composite( scale_info::build::Fields::unnamed() - .field_of::("BitFlags") + .field_of::( + "BitFlags", + &["Wrapper type for `BitFlags`"] + ) ) } } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 74b9fcf64804c..1fa94985f4de5 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2318,7 +2318,7 @@ macro_rules! __dispatch_impl_metadata { $($rest:tt)* ) => { // todo: [AJ] another Instance: TypeInfo bound to remove - impl<$trait_instance: $trait_name $(, $instance: $instantiable + $crate::scale_info::TypeInfo)? + $crate::scale_info::TypeInfo> $mod_type<$trait_instance $(, $instance)?> + impl<$trait_instance: $trait_name $( + $crate::scale_info::TypeInfo, $instance: $instantiable + $crate::scale_info::TypeInfo)? + $crate::scale_info::TypeInfo> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { #[doc(hidden)] @@ -2706,7 +2706,7 @@ mod tests { } } - fn expected_metadata() -> Vec { + fn expected_calls() -> Vec { vec![ FunctionMetadata { name: "aux_0", @@ -2824,7 +2824,11 @@ mod tests { #[test] fn module_json_metadata() { let metadata = Module::::call_functions(); - assert_eq!(expected_metadata(), metadata); + let expected_metadata = PalletCallMetadata { + calls: expected_calls(), + ty: scale_info::meta_type::>(), + }; + assert_eq!(expected_metadata, metadata); } #[test] diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 4be7b6da48e99..37318675814d3 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -729,65 +729,6 @@ mod tests { type DbWeight = (); } - fn expected_metadata() -> OuterEventMetadata { - OuterEventMetadata { - name: "TestEvent", - events: vec![ - ModuleEventMetadata { - name: "system", - events: vec![EventMetadata { - name: "SystemEvent", - arguments: vec![], - documentation: vec![], - }] - }, - ModuleEventMetadata { - name: "event_module", - events: vec![ - EventMetadata { - name: "TestEvent", - arguments: vec![ - TypeSpec::new::("Balance"), - TypeSpec::new::("Origin"), - ], - documentation: vec![" Hi, I am a comment."] - }, - EventMetadata { - name: "EventWithoutParams", - arguments: vec![], - documentation: vec![" Dog"], - }, - ] - }, - ModuleEventMetadata { - name: "event_module2", - events: vec![ - EventMetadata { - name: "TestEvent", - arguments: vec![TypeSpec::new::("BalanceRenamed")], - documentation: vec![] - }, - EventMetadata { - name: "TestOrigin", - arguments: vec![TypeSpec::new::("OriginRenamed")], - documentation: vec![], - }, - ] - }, - ModuleEventMetadata { - name: "event_module3", - events: vec![ - EventMetadata { - name: "HiEvent", - arguments: vec![], - documentation: vec![] - } - ] - } - ] - } - } - #[test] fn test_codec() { let runtime_1_event_module_2 = TestEvent::event_module2( diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 742dc67552d3e..8a87bdbbf32c8 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -286,7 +286,7 @@ fn call_codec() { // use frame_metadata::*; // let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { // modules: DecodeDifferent::Encode(&[ -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("System"), // storage: None, // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { @@ -315,7 +315,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 30, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_1"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance1Module"), @@ -337,7 +337,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 31, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module2"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Module"), @@ -361,7 +361,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 32, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_2"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance2Module"), @@ -381,7 +381,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 33, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_3"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance3Module"), @@ -393,7 +393,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 6, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_4"), // storage: None, // calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { @@ -406,7 +406,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 3, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_5"), // storage: None, // calls: None, @@ -419,7 +419,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 4, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_6"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance6Module"), @@ -439,7 +439,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 1, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_7"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance7Module"), @@ -459,7 +459,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 2, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_8"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance8Module"), @@ -479,7 +479,7 @@ fn call_codec() { // errors: DecodeDifferent::Encode(FnEncode(|| &[])), // index: 12, // }, -// ModuleMetadata { +// PalletMetadata { // name: DecodeDifferent::Encode("Module1_9"), // storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { // prefix: DecodeDifferent::Encode("Instance9Module"), @@ -513,103 +513,103 @@ fn call_codec() { fn test_metadata() { use frame_support::metadata::*; let modules = vec![ - ModuleMetadata { + PalletMetadata { name: "System", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 0 }, - ModuleMetadata { + PalletMetadata { name: "Module1_1", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 1 }, - ModuleMetadata { + PalletMetadata { name: "Module2", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 2 }, - ModuleMetadata { + PalletMetadata { name: "Module1_2", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 3 }, - ModuleMetadata { + PalletMetadata { name: "Module1_3", storage: None, calls: None, event: None, constants: vec![], - errors: vec![], + error: None, index: 4 }, - ModuleMetadata { + PalletMetadata { name: "Module1_4", storage: None, - calls: Some(vec![]), + calls: None, event: None, constants: vec![], - errors: vec![], + error: None, index: 5 }, - ModuleMetadata { + PalletMetadata { name: "Module1_5", storage: None, calls: None, - event: Some(vec![]), + event: None, constants: vec![], - errors: vec![], + error: None, index: 6 }, - ModuleMetadata { + PalletMetadata { name: "Module1_6", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 7 }, - ModuleMetadata { + PalletMetadata { name: "Module1_7", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 8 }, - ModuleMetadata { + PalletMetadata { name: "Module1_8", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 9 }, - ModuleMetadata { + PalletMetadata { name: "Module1_9", storage: None, - calls: Some(vec![]), - event: Some(vec![]), + calls: None, + event: None, constants: vec![], - errors: vec![], + error: None, index: 10 } ]; diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 4b73c3695cd3a..bacc1e72d3003 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -633,7 +633,7 @@ fn metadata() { use frame_support::metadata::*; use codec::{Decode, Encode}; - let expected_pallet_metadata = ModuleMetadata { + let expected_pallet_metadata = PalletMetadata { index: 1, name: "Example", storage: Some(StorageMetadata { @@ -737,67 +737,45 @@ fn metadata() { }, ], }), - calls: Some(vec![ - FunctionMetadata { - name: "foo", - arguments: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - }, - FunctionArgumentMetadata { - name: "_bar", - ty: scale_info::meta_type::(), - } - ], - documentation: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_transactional", - arguments: vec![ - FunctionArgumentMetadata { - name: "foo", - ty: scale_info::meta_type::>(), - } - ], - documentation: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_no_post_info", - arguments: vec![], - documentation: vec![], - }, - ]), - event: Some(vec![ - EventMetadata { - name: "Proposed", - arguments: vec![TypeSpec::new::("::AccountId")], - documentation: vec![ - " doc comment put in metadata" - ], - }, - EventMetadata { - name: "Spending", - arguments: vec![TypeSpec::new::("Balance")], - documentation: vec![ - " doc" - ], - }, - EventMetadata { - name: "Something", - arguments: vec![TypeSpec::new::("Other")], - documentation: vec![], - }, - EventMetadata { - name: "SomethingElse", - arguments: vec![TypeSpec::new::("::_1")], - documentation: vec![], - }, - ]), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "foo", + arguments: vec![ + FunctionArgumentMetadata { + name: "_foo", + ty: scale_info::meta_type::>(), + }, + FunctionArgumentMetadata { + name: "_bar", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_transactional", + arguments: vec![ + FunctionArgumentMetadata { + name: "foo", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_no_post_info", + arguments: vec![], + documentation: vec![], + }, + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), constants: vec![ PalletConstantMetadata { name: "MyGetParam", @@ -841,14 +819,7 @@ fn metadata() { ], }, ], - errors: vec![ - ErrorMetadata { - name: "InsufficientProposersBalance", - documentation: vec![ - " doc comment put into metadata", - ], - }, - ], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }; let metadata = match Runtime::metadata().1 { @@ -856,8 +827,8 @@ fn metadata() { _ => panic!("metadata has been bump, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::::decode( - &mut &metadata.modules[1].encode()[..] + let pallet_metadata = PalletMetadata::::decode( + &mut &metadata.pallets[1].encode()[..] ).unwrap(); let mut registry = scale_info::Registry::new(); diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 72fd236005107..757d2c7561a32 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -266,18 +266,18 @@ mod test { #[test] fn metadata() { let metadata = Runtime::metadata(); - let modules = match metadata.1 { + let pallets = match metadata.1 { frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { - modules: m, + pallets: p, .. - }) => m, + }) => p, _ => unreachable!(), }; - pretty_assertions::assert_eq!(modules[1].storage, modules[2].storage); - pretty_assertions::assert_eq!(modules[1].calls, modules[2].calls); - pretty_assertions::assert_eq!(modules[1].event, modules[2].event); - pretty_assertions::assert_eq!(modules[1].constants, modules[2].constants); - pretty_assertions::assert_eq!(modules[1].errors, modules[2].errors); + pretty_assertions::assert_eq!(pallets[1].storage, pallets[2].storage); + pretty_assertions::assert_eq!(pallets[1].calls, pallets[2].calls); + pretty_assertions::assert_eq!(pallets[1].event, pallets[2].event); + pretty_assertions::assert_eq!(pallets[1].constants, pallets[2].constants); + pretty_assertions::assert_eq!(pallets[1].error, pallets[2].error); } #[test] diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index b929d6b6c65aa..ebe9e8ff632c2 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -281,19 +281,19 @@ mod test { #[test] fn metadata() { let metadata = Runtime::metadata(); - let modules = match metadata.1 { + let pallets = match metadata.1 { frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { - modules: m, + pallets: p, .. - }) => m, + }) => p, _ => unreachable!(), }; for i in vec![1, 3, 5].into_iter() { - pretty_assertions::assert_eq!(modules[i].storage, modules[i+1].storage); - pretty_assertions::assert_eq!(modules[i].calls, modules[i+1].calls); - pretty_assertions::assert_eq!(modules[i].event, modules[i+1].event); - pretty_assertions::assert_eq!(modules[i].constants, modules[i+1].constants); - pretty_assertions::assert_eq!(modules[i].errors, modules[i+1].errors); + pretty_assertions::assert_eq!(pallets[i].storage, pallets[i+1].storage); + pretty_assertions::assert_eq!(pallets[i].calls, pallets[i+1].calls); + pretty_assertions::assert_eq!(pallets[i].event, pallets[i+1].event); + pretty_assertions::assert_eq!(pallets[i].constants, pallets[i+1].constants); + pretty_assertions::assert_eq!(pallets[i].error, pallets[i+1].error); } } diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 1a53a2559806e..52ecdbee3686c 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -559,7 +559,7 @@ fn metadata() { use frame_support::metadata::*; use codec::{Decode, Encode}; - let expected_pallet_metadata = ModuleMetadata { + let expected_pallet_metadata = PalletMetadata { index: 1, name: "Example", storage: Some(StorageMetadata { @@ -624,53 +624,36 @@ fn metadata() { }, ], }), - calls: Some(vec![ - FunctionMetadata { - name: "foo", - arguments: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - } - ], - documentation: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_transactional", - arguments: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - } - ], - documentation: vec![ - " Doc comment put in metadata", - ], - }, - ]), - event: Some(vec![ - EventMetadata { - name: "Proposed", - arguments: vec![TypeSpec::new::("::AccountId")], - documentation: vec![ - " doc comment put in metadata" - ], - }, - EventMetadata { - name: "Spending", - arguments: vec![TypeSpec::new::("Balance")], - documentation: vec![ - " doc" - ], - }, - EventMetadata { - name: "Something", - arguments: vec![TypeSpec::new::("Other")], - documentation: vec![], - }, - ]), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "foo", + arguments: vec![ + FunctionArgumentMetadata { + name: "_foo", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_transactional", + arguments: vec![ + FunctionArgumentMetadata { + name: "_foo", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), constants: vec![ PalletConstantMetadata { name: "MyGetParam", @@ -679,14 +662,7 @@ fn metadata() { documentation: vec![], }, ], - errors: vec![ - ErrorMetadata { - name: "InsufficientProposersBalance", - documentation: vec![ - " doc comment put into metadata", - ], - }, - ], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }; let mut expected_pallet_instance1_metadata = expected_pallet_metadata.clone(); @@ -705,11 +681,11 @@ fn metadata() { _ => panic!("metadata has been bump, test needs to be updated"), }; - let pallet_metadata = ModuleMetadata::::decode( - &mut &metadata.modules[1].encode()[..] + let pallet_metadata = PalletMetadata::::decode( + &mut &metadata.pallets[1].encode()[..] ).unwrap(); let pallet_instance1_metadata = - ModuleMetadata::decode(&mut &metadata.modules[2].encode()[..]).unwrap(); + PalletMetadata::decode(&mut &metadata.pallets[2].encode()[..]).unwrap(); let mut registry = scale_info::Registry::new(); let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index 3f2d041ddd912..0d9b1a488feba 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -133,7 +133,7 @@ pub trait Config: frame_system::Config { type SpendFunds: SpendFunds; /// The maximum number of approvals that can wait in the spending queue. - type MaxApprovals: Get; + type MaxApprovals: Get + scale_info::TypeInfo; // todo: [AJ] see if we can remove this bound for Get storages } /// A trait to allow the Treasury Pallet to spend it's funds for other purposes. diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index ad8a671688d93..dda78623a340c 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -488,7 +488,7 @@ parameter_types! { }; pub RuntimeBlockLength: BlockLength = BlockLength::max(4 * 1024 * 1024); -k pub RuntimeBlockWeights: BlockWeights = + pub RuntimeBlockWeights: BlockWeights = BlockWeights::with_sensible_defaults(4 * 1024 * 1024, Perbill::from_percent(75)); } From 6796bb2be33e3c5f78b57427e71b2b5df32567e0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 10:50:27 +0100 Subject: [PATCH 200/503] Fix unused import warnings --- frame/support/src/error.rs | 2 -- frame/support/src/event.rs | 1 - 2 files changed, 3 deletions(-) diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index b559e21e20537..42d6aba43196c 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -20,8 +20,6 @@ #[doc(hidden)] pub use sp_runtime::traits::{LookupError, BadOrigin}; -use sp_std::prelude::Vec; - /// Declare an error type for a runtime module. /// /// `decl_error!` supports only variants that do not hold any data. The dispatchable diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 37318675814d3..6e83c11ef7b8e 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -524,7 +524,6 @@ macro_rules! __impl_outer_event_json_metadata { #[cfg(test)] #[allow(dead_code)] mod tests { - use crate::metadata::*; use serde::Serialize; use codec::{Encode, Decode}; From d97f5382679afd77d0e6d7dc4ea20a4a95c53f1e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 5 May 2021 12:15:57 +0100 Subject: [PATCH 201/503] Update frame-metadata --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 58915a0b47f5b..990991258841f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1799,7 +1799,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#92aced5f3d4605f36f72b875cbdac8be9ac5e430" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#b02894949737a7b1e462af282330c3cf1f43acf9" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From 6752676e86cd898e6cc2233a46b98196442e979c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 6 May 2021 11:28:47 +0100 Subject: [PATCH 202/503] Update frame-metadata --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 990991258841f..c6ef1a260847e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8163,7 +8163,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#b856e028748e8b86e84352694681e8510fecfd0a" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ff42886b35a3c5e960a8b2089bdef5fccd0ee245" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8175,7 +8175,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#b856e028748e8b86e84352694681e8510fecfd0a" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ff42886b35a3c5e960a8b2089bdef5fccd0ee245" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From 44b6cc5a3ad30690993b7daa3650dc24d6579c8b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 6 May 2021 14:49:04 +0100 Subject: [PATCH 203/503] Fix some metadata tests --- frame/support/src/dispatch.rs | 2 +- frame/support/src/lib.rs | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 1fa94985f4de5..6226adbd744e6 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2759,7 +2759,7 @@ mod tests { arguments: vec![ FunctionArgumentMetadata { name: "_data", - ty: scale_info::meta_type::(), + ty: scale_info::meta_type::(), }, FunctionArgumentMetadata { name: "_data2", diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 42c75fc3adce2..5a3f9943debb2 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1100,7 +1100,7 @@ pub mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0, 0, 0, 0, 0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -1112,7 +1112,7 @@ pub mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -1124,7 +1124,7 @@ pub mod tests { value: scale_info::meta_type::(), unused: false }, - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -1136,7 +1136,7 @@ pub mod tests { value: scale_info::meta_type::(), unused: false }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -1149,7 +1149,7 @@ pub mod tests { value: scale_info::meta_type::(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: vec![], + default: vec![0, 0, 0, 0, 0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -1162,7 +1162,7 @@ pub mod tests { value: scale_info::meta_type::(), key2_hasher: StorageHasher::Identity, }, - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -1175,7 +1175,7 @@ pub mod tests { value: scale_info::meta_type::(), key2_hasher: StorageHasher::Twox64Concat, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -1188,7 +1188,7 @@ pub mod tests { value: scale_info::meta_type::>(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: vec![], + default: vec![0], documentation: vec![], }, ], From ec579e699b00140394225c20181afa28caf287cd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 7 May 2021 16:07:13 +0100 Subject: [PATCH 204/503] Fix construct_runtime metadata tests --- frame/support/test/tests/construct_runtime.rs | 427 +++++++----------- 1 file changed, 156 insertions(+), 271 deletions(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 8a87bdbbf32c8..95cd5bfcb81c2 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -281,345 +281,230 @@ fn call_codec() { assert_eq!(Call::Module1_9(module1::Call::fail()).encode()[0], 13); } -// #[test] -// fn test_metadata() { -// use frame_metadata::*; -// let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { -// modules: DecodeDifferent::Encode(&[ -// PalletMetadata { -// name: DecodeDifferent::Encode("System"), -// storage: None, -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("noop"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[ -// EventMetadata { -// name: DecodeDifferent::Encode("ExtrinsicSuccess"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// EventMetadata { -// name: DecodeDifferent::Encode("ExtrinsicFailed"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// EventMetadata { -// name: DecodeDifferent::Encode("Ignore"), -// arguments: DecodeDifferent::Encode(&["BlockNumber"]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// ]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 30, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_1"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance1Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ -// FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// ]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 31, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module2"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[ -// FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// ]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[ -// EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// ]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 32, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_2"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance2Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 33, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_3"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance3Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: None, -// event: None, -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 6, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_4"), -// storage: None, -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: None, -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 3, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_5"), -// storage: None, -// calls: None, -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 4, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_6"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance6Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 1, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_7"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance7Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 2, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_8"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance8Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 12, -// }, -// PalletMetadata { -// name: DecodeDifferent::Encode("Module1_9"), -// storage: Some(DecodeDifferent::Encode(FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("Instance9Module"), -// entries: DecodeDifferent::Encode(&[]), -// }))), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[FunctionMetadata { -// name: DecodeDifferent::Encode("fail"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// event: Some(DecodeDifferent::Encode(FnEncode(|| &[EventMetadata { -// name: DecodeDifferent::Encode("A"), -// arguments: DecodeDifferent::Encode(&["AccountId"]), -// documentation: DecodeDifferent::Encode(&[]), -// }]))), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// index: 13, -// }, -// ]), -// extrinsic: ExtrinsicMetadata { -// version: 4, -// signed_extensions: vec![DecodeDifferent::Encode("UnitSignedExtension")], -// }, -// }; -// pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V12(expected_metadata)); -// } - -// todo: [AJ] update test below with v13 metadata from above #[test] fn test_metadata() { use frame_support::metadata::*; - let modules = vec![ + use scale_info::interner::UntrackedSymbol; + use std::marker::PhantomData; + + let pallets = vec![ PalletMetadata { name: "System", storage: None, - calls: None, - event: None, + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "noop", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 0 + index: 30, }, PalletMetadata { name: "Module1_1", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance1Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 1 + index: 31, }, PalletMetadata { name: "Module2", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::(), + }), constants: vec![], error: None, - index: 2 + index: 32, }, PalletMetadata { name: "Module1_2", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance2Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 3 + index: 33, }, PalletMetadata { name: "Module1_3", - storage: None, + storage: Some(StorageMetadata { + prefix: "Instance3Module", + entries: vec![], + }), calls: None, event: None, constants: vec![], error: None, - index: 4 + index: 6, }, PalletMetadata { name: "Module1_4", storage: None, - calls: None, + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), event: None, constants: vec![], error: None, - index: 5 + index: 3, }, PalletMetadata { name: "Module1_5", storage: None, calls: None, - event: None, + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 6 + index: 4, }, PalletMetadata { name: "Module1_6", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance6Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 7 + index: 1, }, PalletMetadata { name: "Module1_7", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance7Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 8 + index: 2, }, PalletMetadata { name: "Module1_8", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance8Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 9 + index: 12, }, PalletMetadata { name: "Module1_9", - storage: None, - calls: None, - event: None, + storage: Some(StorageMetadata { + prefix: "Instance9Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + arguments: vec![], + documentation: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![], error: None, - index: 10 - } + index: 13, + }, ]; + let extrinsic = ExtrinsicMetadata { ty: scale_info::meta_type::(), - version: 0, - signed_extensions: vec![] + version: 4, + signed_extensions: vec![ + SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + ] }; - let expected_metadata = RuntimeMetadataLastVersion::new(modules, extrinsic); - pretty_assertions::assert_eq!(Runtime::metadata().1, RuntimeMetadata::V13(expected_metadata)); + + let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let actual_metadata = Runtime::metadata(); + pretty_assertions::assert_eq!(actual_metadata, expected_metadata); } #[test] From 9752983b87580ff482fdf8511877db79137f0b77 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 7 May 2021 16:19:31 +0100 Subject: [PATCH 205/503] Fix warnings --- frame/support/test/tests/construct_runtime.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 95cd5bfcb81c2..3dfd488cb0bf5 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -284,8 +284,6 @@ fn call_codec() { #[test] fn test_metadata() { use frame_support::metadata::*; - use scale_info::interner::UntrackedSymbol; - use std::marker::PhantomData; let pallets = vec![ PalletMetadata { From bbc5606fb630a28f26d01ae360d67922ae67e36c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 10 May 2021 10:41:27 +0100 Subject: [PATCH 206/503] Remove #[pallet::metadata] attribute --- bin/node-template/pallets/template/src/lib.rs | 1 - frame/assets/src/lib.rs | 5 --- frame/atomic-swap/src/lib.rs | 1 - frame/balances/src/lib.rs | 1 - frame/contracts/src/lib.rs | 1 - .../election-provider-multi-phase/src/lib.rs | 1 - frame/elections-phragmen/src/lib.rs | 5 --- frame/gilt/src/lib.rs | 1 - frame/identity/src/lib.rs | 4 -- frame/im-online/src/lib.rs | 1 - frame/indices/src/lib.rs | 1 - frame/nicks/src/lib.rs | 1 - frame/node-authorization/src/lib.rs | 1 - frame/proxy/src/lib.rs | 1 - frame/sudo/src/lib.rs | 1 - frame/sudo/src/mock.rs | 1 - .../procedural/src/pallet/parse/event.rs | 42 ++----------------- frame/support/src/lib.rs | 22 ---------- frame/support/test/tests/pallet.rs | 1 - .../test/tests/pallet_compatibility.rs | 1 - .../tests/pallet_compatibility_instance.rs | 1 - frame/support/test/tests/pallet_instance.rs | 1 - frame/system/src/lib.rs | 1 - frame/vesting/src/lib.rs | 1 - 24 files changed, 4 insertions(+), 93 deletions(-) diff --git a/bin/node-template/pallets/template/src/lib.rs b/bin/node-template/pallets/template/src/lib.rs index 7b986a5186692..297ab1f823cbc 100644 --- a/bin/node-template/pallets/template/src/lib.rs +++ b/bin/node-template/pallets/template/src/lib.rs @@ -42,7 +42,6 @@ pub mod pallet { // Pallets use events to inform users when important changes are made. // https://substrate.dev/docs/en/knowledgebase/runtime/events #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// Event documentation should end with an array that provides descriptive names for event diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index e8dfd50f4086a..34836fe2b3996 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -259,11 +259,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - T::Balance = "Balance", - T::AssetId = "AssetId" - )] pub enum Event, I: 'static = ()> { /// Some asset class was created. \[asset_id, creator, owner\] Created(T::AssetId, T::AccountId, T::AccountId), diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index a7044a74447c2..8609c24a29b67 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -198,7 +198,6 @@ pub mod pallet { /// Event of atomic swap pallet. #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId", PendingSwap = "PendingSwap")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// Swap created. \[account, proof, swap\] diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index f0445a4e40a06..cda0f42b84c06 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -369,7 +369,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Balance = "Balance")] pub enum Event, I: 'static = ()> { /// An account was created with some free balance. \[account, free_balance\] Endowed(T::AccountId, T::Balance), diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 8c7438d53ef2e..0637ea1199490 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -462,7 +462,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash", BalanceOf = "Balance")] pub enum Event { /// Contract deployed by address at the specified address. \[deployer, contract\] Instantiated(T::AccountId, T::AccountId), diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 40257e2b83017..c74f4869d15aa 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -792,7 +792,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A solution was stored with the given compute. diff --git a/frame/elections-phragmen/src/lib.rs b/frame/elections-phragmen/src/lib.rs index 98e004efbde2e..260b725434ee4 100644 --- a/frame/elections-phragmen/src/lib.rs +++ b/frame/elections-phragmen/src/lib.rs @@ -531,11 +531,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata( - ::AccountId = "AccountId", - BalanceOf = "Balance", - Vec<(::AccountId, BalanceOf)> = "Vec<(AccountId, Balance)>", - )] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A new term with \[new_members\]. This indicates that enough candidates existed to run diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index b9b3d959b4dda..bf2057b59440c 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -261,7 +261,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A bid was successfully placed. diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index 724d7197aa19d..41d26ef92067a 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -518,10 +518,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - BalanceOf = "Balance" - )] pub enum Event { /// A name was set or reset (which will remove all judgements). \[who\] IdentitySet(T::AccountId), diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 2b5e55c97aef5..540c078874152 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -288,7 +288,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AuthorityId = "AuthorityId", Vec> = "Vec")] pub enum Event { /// A new heartbeat was received from `AuthorityId` \[authority_id\] HeartbeatReceived(T::AuthorityId), diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index 19697f2d941bb..993d1011bbd99 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -255,7 +255,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::AccountIndex = "AccountIndex")] pub enum Event { /// A account index was assigned. \[index, who\] IndexAssigned(T::AccountId, T::AccountIndex), diff --git a/frame/nicks/src/lib.rs b/frame/nicks/src/lib.rs index a6d2415ab96ef..cbabc8a8293ae 100644 --- a/frame/nicks/src/lib.rs +++ b/frame/nicks/src/lib.rs @@ -86,7 +86,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// A name was set. \[who\] NameSet(T::AccountId), diff --git a/frame/node-authorization/src/lib.rs b/frame/node-authorization/src/lib.rs index 5f233549c73ca..be2b5d95551ac 100644 --- a/frame/node-authorization/src/lib.rs +++ b/frame/node-authorization/src/lib.rs @@ -143,7 +143,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// The given well known node was added. NodeAdded(PeerId, T::AccountId), diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index ccdddd4371d6f..1b933ce54764a 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -513,7 +513,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId", T::ProxyType = "ProxyType", CallHashOf = "Hash")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { diff --git a/frame/sudo/src/lib.rs b/frame/sudo/src/lib.rs index d840d45a7f430..90ece1a7dedeb 100644 --- a/frame/sudo/src/lib.rs +++ b/frame/sudo/src/lib.rs @@ -256,7 +256,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// A sudo just took place. \[result\] Sudid(DispatchResult), diff --git a/frame/sudo/src/mock.rs b/frame/sudo/src/mock.rs index 568799e1fe632..e416527bbcab9 100644 --- a/frame/sudo/src/mock.rs +++ b/frame/sudo/src/mock.rs @@ -77,7 +77,6 @@ pub mod logger { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { AppendI32(i32, Weight), AppendI32AndAccount(T::AccountId, i32, Weight), diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index 2148b5e425c0a..c0ed8eef0fe84 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -21,7 +21,6 @@ use quote::ToTokens; /// List of additional token to be used for parsing. mod keyword { - syn::custom_keyword!(metadata); syn::custom_keyword!(Event); syn::custom_keyword!(pallet); syn::custom_keyword!(generate_deposit); @@ -46,17 +45,12 @@ pub struct EventDef { pub attr_span: proc_macro2::Span, } -/// Attribute for Event: defines metadata name to use. +/// Attribute for a pallet's Event. /// /// Syntax is: -/// * `#[pallet::metadata(SomeType = MetadataName, ...)]` /// * `#[pallet::generate_deposit($vis fn deposit_event)]` enum PalletEventAttr { - Metadata { - metadata: Vec<(syn::Type, String)>, - // Span of the attribute - span: proc_macro2::Span, - }, + // todo: [AJ] could make this just a struct now it is a single variant DepositEvent { fn_vis: syn::Visibility, // Span for the keyword deposit_event @@ -69,22 +63,11 @@ enum PalletEventAttr { impl PalletEventAttr { fn span(&self) -> proc_macro2::Span { match self { - Self::Metadata { span, .. } => *span, Self::DepositEvent { span, .. } => *span, } } } -/// Parse for syntax `$Type = "$SomeString"`. -fn parse_event_metadata_element( - input: syn::parse::ParseStream -) -> syn::Result<(syn::Type, String)> { - let typ = input.parse::()?; - input.parse::()?; - let ident = input.parse::()?; - Ok((typ, ident.value())) -} - impl syn::parse::Parse for PalletEventAttr { fn parse(input: syn::parse::ParseStream) -> syn::Result { input.parse::()?; @@ -94,19 +77,7 @@ impl syn::parse::Parse for PalletEventAttr { content.parse::()?; let lookahead = content.lookahead1(); - if lookahead.peek(keyword::metadata) { - let span = content.parse::()?.span(); - let metadata_content; - syn::parenthesized!(metadata_content in content); - - let metadata = metadata_content - .parse_terminated::<_, syn::Token![,]>(parse_event_metadata_element)? - .into_pairs() - .map(syn::punctuated::Pair::into_value) - .collect(); - - Ok(PalletEventAttr::Metadata { metadata, span }) - } else if lookahead.peek(keyword::generate_deposit) { + if lookahead.peek(keyword::generate_deposit) { let span = content.parse::()?.span(); let generate_content; @@ -124,19 +95,14 @@ impl syn::parse::Parse for PalletEventAttr { } struct PalletEventAttrInfo { - // todo: [AJ] this is unused now because of TypeInfo derive for Error, consider removing if compatible with downstream clients - metadata: Option>, deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, } impl PalletEventAttrInfo { fn from_attrs(attrs: Vec) -> syn::Result { - let mut metadata = None; let mut deposit_event = None; for attr in attrs { match attr { - PalletEventAttr::Metadata { metadata: m, .. } if metadata.is_none() => - metadata = Some(m), PalletEventAttr::DepositEvent { fn_vis, fn_span, .. } if deposit_event.is_none() => deposit_event = Some((fn_vis, fn_span)), attr => { @@ -145,7 +111,7 @@ impl PalletEventAttrInfo { } } - Ok(PalletEventAttrInfo { metadata, deposit_event }) + Ok(PalletEventAttrInfo { deposit_event }) } } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 5a3f9943debb2..12e54d348fb23 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1495,7 +1495,6 @@ pub mod pallet_prelude { /// Item is defined as: /// ```ignore /// #[pallet::event] -/// #[pallet::metadata($SomeType = "$Metadata", $SomeOtherType = "$Metadata", ..)] // Optional /// #[pallet::generate_deposit($visibility fn deposit_event)] // Optional /// pub enum Event<$some_generic> $optional_where_clause { /// /// Some doc @@ -1510,23 +1509,6 @@ pub mod pallet_prelude { /// only). /// For ease of use, bound the trait `Member` available in frame_support::pallet_prelude. /// -/// Variant documentations and field types are put into metadata. -/// The attribute `#[pallet::metadata(..)]` allows to specify the metadata to put for some types. -/// -/// The metadata of a type is defined by: -/// * if matching a type in `#[pallet::metadata(..)]`, then the corresponding metadata. -/// * otherwise the type stringified. -/// -/// E.g.: -/// ```ignore -/// #[pallet::event] -/// #[pallet::metadata(u32 = "SpecialU32")] -/// pub enum Event { -/// Proposed(u32, T::AccountId), -/// } -/// ``` -/// will write in event variant metadata `"SpecialU32"` and `"T::AccountId"`. -/// /// The attribute `#[pallet::generate_deposit($visibility fn deposit_event)]` generate a helper /// function on `Pallet` to deposit event. /// @@ -1851,8 +1833,6 @@ pub mod pallet_prelude { /// // /// // The macro generates event metadata, and derive Clone, Debug, Eq, PartialEq and Codec /// #[pallet::event] -/// // Additional argument to specify the metadata to use for given type. -/// #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] /// // Generate a funciton on Pallet to deposit an event. /// #[pallet::generate_deposit(pub(super) fn deposit_event)] /// pub enum Event { @@ -2014,7 +1994,6 @@ pub mod pallet_prelude { /// } /// /// #[pallet::event] -/// #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] /// #[pallet::generate_deposit(pub(super) fn deposit_event)] /// pub enum Event, I: 'static = ()> { /// /// doc comment put in metadata @@ -2161,7 +2140,6 @@ pub mod pallet_prelude { /// 7. **migrate event**: /// rewrite as a simple enum under with the attribute `#[pallet::event]`, /// use `#[pallet::generate_deposit($vis fn deposit_event)]` to generate deposit_event, -/// use `#[pallet::metadata(...)]` to configure the metadata for types in order not to break them. /// 8. **migrate error**: rewrite it with attribute `#[pallet::error]`. /// 9. **migrate storage**: /// decl_storage provide an upgrade template (see 3.). All storages, genesis config, genesis diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 3425d2d23be7c..050032f45dab4 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -186,7 +186,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] #[pallet::generate_deposit(fn deposit_event)] pub enum Event where T::AccountId: SomeAssociation1 + From{ /// doc comment put in metadata diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 757d2c7561a32..dc1752d0f0dd4 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -145,7 +145,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(fn deposit_event)] - #[pallet::metadata(T::Balance = "Balance")] pub enum Event { /// Dummy event, just here so there's a generic type that's used. Dummy(T::Balance), diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index ebe9e8ff632c2..c300205512a96 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -135,7 +135,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(fn deposit_event)] - #[pallet::metadata(T::Balance = "Balance")] pub enum Event, I: 'static = ()> { /// Dummy event, just here so there's a generic type that's used. Dummy(T::Balance), diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 52ecdbee3686c..c85b1598236fc 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -112,7 +112,6 @@ pub mod pallet { } #[pallet::event] - #[pallet::metadata(BalanceOf = "Balance", u32 = "Other")] #[pallet::generate_deposit(fn deposit_event)] pub enum Event, I: 'static = ()> { /// doc comment put in metadata diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index c512ea3e2157d..1e6ca8a9566a3 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -483,7 +483,6 @@ pub mod pallet { /// Event for the System pallet. #[pallet::event] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash")] pub enum Event { /// An extrinsic completed successfully. \[info\] ExtrinsicSuccess(DispatchInfo), diff --git a/frame/vesting/src/lib.rs b/frame/vesting/src/lib.rs index 151dc72a001ac..9768ea294ced3 100644 --- a/frame/vesting/src/lib.rs +++ b/frame/vesting/src/lib.rs @@ -182,7 +182,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// The amount vested has been updated. This could indicate more funds are available. The /// balance given is the amount which is left unvested (and thus locked). From 271de08ac526f4a84af10acc889a16b3eb92c9ee Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 10 May 2021 10:52:01 +0100 Subject: [PATCH 207/503] Fix errors after merge --- frame/support/src/storage/types/value.rs | 2 +- primitives/runtime/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 37dfc495955a6..e49d594f94391 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -23,7 +23,7 @@ use crate::{ storage::{ StorageAppend, StorageDecodeLength, bounded_vec::BoundedVec, - types::{OptionQuery, QueryKindTrait, OnEmptyGetter}, + types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, }, traits::{GetDefault, StorageInstance, Get}, }; diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 5c1f755043fe8..a465a2da144ce 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -568,7 +568,7 @@ impl From for DispatchError { } /// Arithmetic errors. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, scale_info::TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum ArithmeticError { /// Underflow. From ed0de79ab97bdea129a1525ed5b1004d8402cf0d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 10 May 2021 11:40:24 +0100 Subject: [PATCH 208/503] Fix storage metadata test --- frame/support/test/tests/decl_storage.rs | 54 ++++++++++++------------ 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index d3141ff46893c..3f49a9158fe28 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -97,112 +97,112 @@ mod tests { name: "U32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![" Hello, this is doc!"], }, StorageEntryMetadata { name: "PUBU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "U32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "PUBU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "GETU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "PUBGETU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "GETU32WITHCONFIG", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIG", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "GETU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "PUBGETU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![3, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "GETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![2, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![1, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIGMYDEFOPT", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "GetU32WithBuilder", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderSome", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderNone", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -214,7 +214,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -226,7 +226,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -238,7 +238,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -250,7 +250,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -262,7 +262,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -274,7 +274,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { @@ -286,7 +286,7 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![12, 109, 97, 112], // "map" documentation: vec![], }, StorageEntryMetadata { @@ -298,28 +298,28 @@ mod tests { value: scale_info::meta_type::(), unused: false, }, - default: vec![], + default: vec![24, 112, 117, 98, 109, 97, 112], // "pubmap" documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::>()), - default: vec![], + default: vec![0], documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<(Vec)>>, u32)>()), - default: vec![], + default: vec![0, 0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE3", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<[u32; 25]>()), - default: vec![], + default: [0u8; 100].to_vec(), documentation: vec![], }, ] From 29554eda69197f90cb6e0d79d7d1f3cdeb64bab2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 10 May 2021 11:43:37 +0100 Subject: [PATCH 209/503] Fix instance metadata test --- frame/support/test/tests/instance.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index dccdd64c5e344..456238f724887 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -401,8 +401,8 @@ fn expected_metadata() -> StorageMetadata { { name: "Value", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![0], + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -414,7 +414,7 @@ fn expected_metadata() -> StorageMetadata { value: scale_info::meta_type::(), unused: false, }, - default: vec![0], + default: [0u8; 8].to_vec(), documentation: vec![], }, StorageEntryMetadata { @@ -427,7 +427,7 @@ fn expected_metadata() -> StorageMetadata { key2: scale_info::meta_type::(), value: scale_info::meta_type::(), }, - default: vec![], + default: [0u8; 8].to_vec(), documentation: vec![], }, ] From 564328660b845029215d8aac699a4124d3b19644 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 10 May 2021 17:37:23 +0100 Subject: [PATCH 210/503] WIP fixing pallet metadata test --- Cargo.lock | 1 + frame/support/test/Cargo.toml | 1 + frame/support/test/tests/pallet.rs | 517 +++++++++++++++++++---------- 3 files changed, 342 insertions(+), 177 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ab73258d079fa..02b32ce902e9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1879,6 +1879,7 @@ dependencies = [ "rustversion", "scale-info", "serde", + "sp-arithmetic", "sp-core", "sp-io", "sp-runtime", diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index b09b0383a855b..b58cf11c5a541 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -15,6 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } frame-support = { version = "3.0.0", default-features = false, path = "../" } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 050032f45dab4..3fd0820ba8f54 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -24,10 +24,6 @@ use frame_support::{ storage::unhashed, scale_info, }; -use scale_info::{ - form::PortableForm, - IntoPortable, -}; use sp_runtime::DispatchError; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -641,207 +637,374 @@ fn metadata() { use frame_support::metadata::*; use codec::{Decode, Encode}; - let expected_pallet_metadata = PalletMetadata { - index: 1, - name: "Example", - storage: Some(StorageMetadata { - prefix: "Example", - entries: vec![ - StorageEntryMetadata { - name: "ValueWhereClause", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![0], - documentation: vec![], - }, - StorageEntryMetadata { - name: "Value", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![0], - documentation: vec![], - }, - StorageEntryMetadata { - name: "Map", - modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - unused: false, + let pallets = vec![ + PalletMetadata { + index: 0, + name: "System", + storage: None, + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "fill_block", + arguments: vec![ + FunctionArgumentMetadata { + name: "_ratio", + ty: scale_info::meta_type::() + } + ], + documentation: vec![ + " A dispatch that will fill the block weight up to the given ratio.a" + ] }, - default: vec![4, 0], - documentation: vec![], - }, - StorageEntryMetadata { - name: "Map2", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - unused: false, + FunctionMetadata { + name: "remark", + arguments: vec![ + FunctionArgumentMetadata { + name: "_remark", + ty: scale_info::meta_type::>(), + }], + documentation: vec![] }, - default: vec![0], - documentation: vec![], - }, - StorageEntryMetadata { - name: "DoubleMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + FunctionMetadata { + name: "set_heap_pages", + arguments: vec![ + FunctionArgumentMetadata { + name: "pages", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![] }, - default: vec![0], - documentation: vec![], - }, - StorageEntryMetadata { - name: "DoubleMap2", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + FunctionMetadata { + name: "set_code", + arguments: vec![ + FunctionArgumentMetadata { + name: "code", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] }, - default: vec![0], + FunctionMetadata { + name: "set_code_without_checks", + arguments: vec![ + FunctionArgumentMetadata { + name: "code", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] + }, + FunctionMetadata { + name: "set_changes_trie_config", + arguments: vec![ + FunctionArgumentMetadata { + name: "changes_trie_config", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] }, + FunctionMetadata { + name: "set_storage", + arguments: vec![ + FunctionArgumentMetadata { + name: "items", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] }, + FunctionMetadata { name: "kill_storage", + arguments: vec![ + FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } + ], + documentation: vec![] }, + FunctionMetadata { + name: "kill_prefix", + arguments: vec![ + FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, + FunctionArgumentMetadata { + name: "_subkeys", + ty: scale_info::meta_type::() + } + ], + documentation: vec![] }, + FunctionMetadata { + name: "remark_with_event", + arguments: vec![ + FunctionArgumentMetadata { + name: "remark", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] } + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + constants: vec![ + PalletConstantMetadata { + name: "BlockWeights", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![] + }, + PalletConstantMetadata { + name: "BlockLength", + ty: scale_info::meta_type::(), + value: vec![], documentation: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: "ConditionalValue", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), - default: vec![0], + PalletConstantMetadata { + name: "BlockHashCount", + ty: scale_info::meta_type::(), + value: vec![], documentation: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: "ConditionalMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - unused: false, - }, - default: vec![0], + PalletConstantMetadata { + name: "DbWeight", + ty: scale_info::meta_type::(), // todo + value: vec![], documentation: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { - name: "ConditionalDoubleMap", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - }, - default: vec![0], + PalletConstantMetadata { + name: "Version", + ty: scale_info::meta_type::(), // todo + value: vec![], documentation: vec![], }, + PalletConstantMetadata { + name: "SS58Prefix", + ty: scale_info::meta_type::(), // todo + value: vec![], + documentation: vec![] + } ], - }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "foo", - arguments: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + }, + PalletMetadata { + index: 1, + name: "Example", + storage: Some(StorageMetadata { + prefix: "Example", + entries: vec![ + StorageEntryMetadata { + name: "ValueWhereClause", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + unused: false, }, - FunctionArgumentMetadata { - name: "_bar", - ty: scale_info::meta_type::(), - } - ], + default: vec![4, 0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "Map2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + unused: false, + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + key2_hasher: StorageHasher::Blake2_128Concat, + }, + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + unused: false, + }, + default: vec![0], + documentation: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalDoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + documentation: vec![], + }, + ], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "foo", + arguments: vec![ + FunctionArgumentMetadata { + name: "_foo", + ty: scale_info::meta_type::>(), + }, + FunctionArgumentMetadata { + name: "_bar", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_transactional", + arguments: vec![ + FunctionArgumentMetadata { + name: "foo", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_no_post_info", + arguments: vec![], + documentation: vec![], + }, + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + constants: vec![ + PalletConstantMetadata { + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], documentation: vec![ - " Doc comment put in metadata", + " Some comment", + " Some comment", ], }, - FunctionMetadata { - name: "foo_transactional", - arguments: vec![ - FunctionArgumentMetadata { - name: "foo", - ty: scale_info::meta_type::>(), - } - ], + PalletConstantMetadata { + name: "MyGetParam2", + ty: scale_info::meta_type::(), + value: vec![11, 0, 0, 0], documentation: vec![ - " Doc comment put in metadata", + " Some comment", + " Some comment", ], }, - FunctionMetadata { - name: "foo_no_post_info", - arguments: vec![], + PalletConstantMetadata { + name: "MyGetParam3", + ty: scale_info::meta_type::(), + value: vec![12, 0, 0, 0, 0, 0, 0, 0], documentation: vec![], }, - ] - }), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), - constants: vec![ - PalletConstantMetadata { - name: "MyGetParam", - ty: scale_info::meta_type::(), - value: vec![10, 0, 0, 0], - documentation: vec![ - " Some comment", - " Some comment", - ], - }, - PalletConstantMetadata { - name: "MyGetParam2", - ty: scale_info::meta_type::(), - value: vec![11, 0, 0, 0], - documentation: vec![ - " Some comment", - " Some comment", - ], - }, - PalletConstantMetadata { - name: "MyGetParam3", - ty: scale_info::meta_type::(), - value: vec![12, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![], - }, - PalletConstantMetadata { - name: "some_extra", - ty: scale_info::meta_type::(), - value: vec![100, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![ - " Some doc", - " Some doc", - ], - }, - PalletConstantMetadata { - name: "some_extra_extra", - ty: scale_info::meta_type::(), - value: vec![0, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![ - " Some doc", - ], - }, - ], - error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + PalletConstantMetadata { + name: "some_extra", + ty: scale_info::meta_type::(), + value: vec![100, 0, 0, 0, 0, 0, 0, 0], + documentation: vec![ + " Some doc", + " Some doc", + ], + }, + PalletConstantMetadata { + name: "some_extra_extra", + ty: scale_info::meta_type::(), + value: vec![0, 0, 0, 0, 0, 0, 0, 0], + documentation: vec![ + " Some doc", + ], + }, + ], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + } + ]; + + let extrinsic = ExtrinsicMetadata { + ty: scale_info::meta_type::(), + version: 4, + signed_extensions: vec![ + SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + ] + }; + + let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + // match actual_metadata.1 { + // RuntimeMetadata::V13(ref metadata) => { + // println!("{:?}", metadata.pallets); + // }, + // _ => panic!("metadata has been bump, test needs to be updated"), + // }; + let expected_metadata = match expected_metadata.1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, + _ => panic!("metadata has been bump, test needs to be updated"), }; - let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => metadata, + let actual_metadata = match Runtime::metadata().1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, _ => panic!("metadata has been bump, test needs to be updated"), }; - let pallet_metadata = PalletMetadata::::decode( - &mut &metadata.pallets[1].encode()[..] - ).unwrap(); + // let _ = vec![ + // PalletMetadata { name: "Example2", storage: Some(StorageMetadata { prefix: "Example2", entries: [] }), calls: Some(PalletCallMetadata { ty: UntrackedSymbol { id: 43, marker: PhantomData }, calls: [] }), event: Some(PalletEventMetadata { ty: UntrackedSymbol { id: 44, marker: PhantomData } }), constants: [], error: None, index: 2 } + // ]; - let mut registry = scale_info::Registry::new(); - let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); - pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); + pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); } #[test] From d834552f1021f4660f839cf885c1ef285ff3ea26 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 11 May 2021 09:28:34 +0100 Subject: [PATCH 211/503] Fix pallet metadata test --- Cargo.lock | 1 + frame/support/test/Cargo.toml | 1 + frame/support/test/tests/pallet.rs | 26 ++++++++------------------ 3 files changed, 10 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 02b32ce902e9f..8d11d6bffd155 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1885,6 +1885,7 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-std", + "sp-version", "trybuild", ] diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index b58cf11c5a541..ef1868a83de54 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -22,6 +22,7 @@ frame-support = { version = "3.0.0", default-features = false, path = "../" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } +sp-version = { version = "3.0.0", default-features = false, path = "../../../primitives/version" } trybuild = "1.0.38" pretty_assertions = "0.6.1" rustversion = "1.0.0" diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 3fd0820ba8f54..bec167f344ca5 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -762,19 +762,19 @@ fn metadata() { }, PalletConstantMetadata { name: "DbWeight", - ty: scale_info::meta_type::(), // todo + ty: scale_info::meta_type::(), value: vec![], documentation: vec![], }, PalletConstantMetadata { name: "Version", - ty: scale_info::meta_type::(), // todo + ty: scale_info::meta_type::(), value: vec![], documentation: vec![], }, PalletConstantMetadata { name: "SS58Prefix", - ty: scale_info::meta_type::(), // todo + ty: scale_info::meta_type::(), value: vec![], documentation: vec![] } @@ -951,7 +951,7 @@ fn metadata() { }, PalletConstantMetadata { name: "some_extra", - ty: scale_info::meta_type::(), + ty: scale_info::meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], documentation: vec![ " Some doc", @@ -960,7 +960,7 @@ fn metadata() { }, PalletConstantMetadata { name: "some_extra_extra", - ty: scale_info::meta_type::(), + ty: scale_info::meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], documentation: vec![ " Some doc", @@ -968,7 +968,7 @@ fn metadata() { }, ], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), - } + }, ]; let extrinsic = ExtrinsicMetadata { @@ -980,30 +980,20 @@ fn metadata() { }; let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); - // match actual_metadata.1 { - // RuntimeMetadata::V13(ref metadata) => { - // println!("{:?}", metadata.pallets); - // }, - // _ => panic!("metadata has been bump, test needs to be updated"), - // }; let expected_metadata = match expected_metadata.1 { RuntimeMetadata::V13(metadata) => { metadata }, - _ => panic!("metadata has been bump, test needs to be updated"), + _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match Runtime::metadata().1 { RuntimeMetadata::V13(metadata) => { metadata }, - _ => panic!("metadata has been bump, test needs to be updated"), + _ => panic!("metadata has been bumped, test needs to be updated"), }; - // let _ = vec![ - // PalletMetadata { name: "Example2", storage: Some(StorageMetadata { prefix: "Example2", entries: [] }), calls: Some(PalletCallMetadata { ty: UntrackedSymbol { id: 43, marker: PhantomData }, calls: [] }), event: Some(PalletEventMetadata { ty: UntrackedSymbol { id: 44, marker: PhantomData } }), constants: [], error: None, index: 2 } - // ]; - pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); } From aeb363b5c5dfd4d0baae5922e407459423efe633 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 11 May 2021 11:56:20 +0100 Subject: [PATCH 212/503] Fix pallet compatibility test --- frame/support/test/tests/pallet.rs | 1 - .../test/tests/pallet_compatibility.rs | 75 +++++++++++++++++-- 2 files changed, 67 insertions(+), 9 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index bec167f344ca5..659234ad4868f 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -635,7 +635,6 @@ fn pallet_on_genesis() { #[test] fn metadata() { use frame_support::metadata::*; - use codec::{Decode, Encode}; let pallets = vec![ PalletMetadata { diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index dc1752d0f0dd4..faabdbd2b47e9 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -261,22 +261,81 @@ mod test { use super::pallet; use super::pallet_old; use codec::{Decode, Encode}; + use frame_support::traits::Len; + use scale_info::{ + form::PortableForm, + Variant, + }; #[test] fn metadata() { let metadata = Runtime::metadata(); - let pallets = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { - pallets: p, - .. - }) => p, + let (pallets, types) = match metadata.1 { + frame_support::metadata::RuntimeMetadata::V13(metadata) => + (metadata.pallets, metadata.types), _ => unreachable!(), }; + + let assert_meta_types = |ty_id1, ty_id2| { + let ty1 = types.resolve(ty_id1).map(|ty| ty.type_def()); + let ty2 = types.resolve(ty_id2).map(|ty| ty.type_def()); + pretty_assertions::assert_eq!(ty1, ty2); + }; + + let get_enum_variants = |ty_id| { + match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => { + match ty { + scale_info::TypeDef::Variant(var) => { + var.variants() + } + _ => panic!("Expected variant type") + } + } + _ => panic!("No type found") + } + }; + + let assert_enum_variants = |vs1: &[scale_info::Variant], vs2: &[scale_info::Variant]| { + assert_eq!(vs1.len(), vs2.len()); + for i in 0..vs1.len() { + let v1 = &vs2[i]; + let v2 = &vs2[i]; + assert_eq!(v1.fields().len(), v2.fields().len()); + for f in 0..v1.fields().len() { + let f1 = &v1.fields()[f]; + let f2 = &v2.fields()[f]; + pretty_assertions::assert_eq!(f1.name(), f2.name()); + pretty_assertions::assert_eq!(f1.ty(), f2.ty()); + } + } + }; + pretty_assertions::assert_eq!(pallets[1].storage, pallets[2].storage); - pretty_assertions::assert_eq!(pallets[1].calls, pallets[2].calls); - pretty_assertions::assert_eq!(pallets[1].event, pallets[2].event); + + let calls1 = pallets[1].calls.as_ref().unwrap(); + let calls2 = pallets[2].calls.as_ref().unwrap(); + pretty_assertions::assert_eq!(calls1.calls, calls2.calls); + assert_meta_types(calls1.ty.id(), calls2.ty.id()); + + // event: check variants and fields but ignore the type name which will be different + let event1_variants = get_enum_variants(pallets[1].event.as_ref().unwrap().ty.id()); + let event2_variants = get_enum_variants(pallets[2].event.as_ref().unwrap().ty.id()); + assert_enum_variants(event1_variants, event2_variants); + + let err1 = get_enum_variants(pallets[1].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + let err2 = get_enum_variants(pallets[2].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + assert_enum_variants(&err1, &err2); + pretty_assertions::assert_eq!(pallets[1].constants, pallets[2].constants); - pretty_assertions::assert_eq!(pallets[1].error, pallets[2].error); } #[test] From 77a8b08ba333322e5d81454b5081b55bc8b76ea2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 11 May 2021 13:07:31 +0100 Subject: [PATCH 213/503] Fix pallet compatibility instance test --- .../test/tests/pallet_compatibility.rs | 6 +- .../tests/pallet_compatibility_instance.rs | 75 ++++++++++++++++--- 2 files changed, 66 insertions(+), 15 deletions(-) diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index faabdbd2b47e9..08e2975b65804 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -261,11 +261,7 @@ mod test { use super::pallet; use super::pallet_old; use codec::{Decode, Encode}; - use frame_support::traits::Len; - use scale_info::{ - form::PortableForm, - Variant, - }; + use scale_info::form::PortableForm; #[test] fn metadata() { diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index c300205512a96..4ff615d1185e9 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -276,23 +276,78 @@ mod test { use super::pallet; use super::pallet_old; use codec::{Decode, Encode}; + use scale_info::form::PortableForm; #[test] fn metadata() { let metadata = Runtime::metadata(); - let pallets = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V13(frame_support::metadata::RuntimeMetadataLastVersion { - pallets: p, - .. - }) => p, + let (pallets, types) = match metadata.1 { + frame_support::metadata::RuntimeMetadata::V13(metadata) => + (metadata.pallets, metadata.types), _ => unreachable!(), }; + + let assert_meta_types = |ty_id1, ty_id2| { + let ty1 = types.resolve(ty_id1).map(|ty| ty.type_def()); + let ty2 = types.resolve(ty_id2).map(|ty| ty.type_def()); + pretty_assertions::assert_eq!(ty1, ty2); + }; + + let get_enum_variants = |ty_id| { + match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => { + match ty { + scale_info::TypeDef::Variant(var) => { + var.variants() + } + _ => panic!("Expected variant type") + } + } + _ => panic!("No type found") + } + }; + + let assert_enum_variants = |vs1: &[scale_info::Variant], vs2: &[scale_info::Variant]| { + assert_eq!(vs1.len(), vs2.len()); + for i in 0..vs1.len() { + let v1 = &vs2[i]; + let v2 = &vs2[i]; + assert_eq!(v1.fields().len(), v2.fields().len()); + for f in 0..v1.fields().len() { + let f1 = &v1.fields()[f]; + let f2 = &v2.fields()[f]; + pretty_assertions::assert_eq!(f1.name(), f2.name()); + pretty_assertions::assert_eq!(f1.ty(), f2.ty()); + } + } + }; + for i in vec![1, 3, 5].into_iter() { - pretty_assertions::assert_eq!(pallets[i].storage, pallets[i+1].storage); - pretty_assertions::assert_eq!(pallets[i].calls, pallets[i+1].calls); - pretty_assertions::assert_eq!(pallets[i].event, pallets[i+1].event); - pretty_assertions::assert_eq!(pallets[i].constants, pallets[i+1].constants); - pretty_assertions::assert_eq!(pallets[i].error, pallets[i+1].error); + pretty_assertions::assert_eq!(pallets[i].storage, pallets[i + 1].storage); + + let calls1 = pallets[i].calls.as_ref().unwrap(); + let calls2 = pallets[i + 1].calls.as_ref().unwrap(); + pretty_assertions::assert_eq!(calls1.calls, calls2.calls); + assert_meta_types(calls1.ty.id(), calls2.ty.id()); + + // event: check variants and fields but ignore the type name which will be different + let event1_variants = get_enum_variants(pallets[i].event.as_ref().unwrap().ty.id()); + let event2_variants = get_enum_variants(pallets[i + 1].event.as_ref().unwrap().ty.id()); + assert_enum_variants(event1_variants, event2_variants); + + let err1 = get_enum_variants(pallets[i].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + let err2 = get_enum_variants(pallets[i + 1].error.as_ref().unwrap().ty.id()) + .iter() + .filter(|v| v.name() == "__Ignore") + .cloned() + .collect::>(); + assert_enum_variants(&err1, &err2); + + pretty_assertions::assert_eq!(pallets[i].constants, pallets[i + 1].constants); } } From 56eedcdcf3f5cbbb9759fd88bfee83566b82d5ed Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 11 May 2021 16:03:04 +0100 Subject: [PATCH 214/503] Fix pallet instance test --- frame/support/test/tests/pallet_instance.rs | 220 +++++++++++++++++--- 1 file changed, 194 insertions(+), 26 deletions(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index c85b1598236fc..64487ff6ded7b 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -23,10 +23,6 @@ use frame_support::{ dispatch::UnfilteredDispatchable, storage::unhashed, }; -use scale_info::{ - form::PortableForm, - IntoPortable, -}; use sp_runtime::DispatchError; use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; @@ -556,9 +552,152 @@ fn pallet_on_genesis() { #[test] fn metadata() { use frame_support::metadata::*; - use codec::{Decode, Encode}; - let expected_pallet_metadata = PalletMetadata { + let system_pallet_metadata = PalletMetadata { + index: 0, + name: "System", + storage: None, + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "fill_block", + arguments: vec![ + FunctionArgumentMetadata { + name: "_ratio", + ty: scale_info::meta_type::() + } + ], + documentation: vec![ + " A dispatch that will fill the block weight up to the given ratio.a" + ] + }, + FunctionMetadata { + name: "remark", + arguments: vec![ + FunctionArgumentMetadata { + name: "_remark", + ty: scale_info::meta_type::>(), + }], + documentation: vec![] + }, + FunctionMetadata { + name: "set_heap_pages", + arguments: vec![ + FunctionArgumentMetadata { + name: "pages", + ty: scale_info::meta_type::(), + } + ], + documentation: vec![] + }, + FunctionMetadata { + name: "set_code", + arguments: vec![ + FunctionArgumentMetadata { + name: "code", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] + }, + FunctionMetadata { + name: "set_code_without_checks", + arguments: vec![ + FunctionArgumentMetadata { + name: "code", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] + }, + FunctionMetadata { + name: "set_changes_trie_config", + arguments: vec![ + FunctionArgumentMetadata { + name: "changes_trie_config", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] }, + FunctionMetadata { + name: "set_storage", + arguments: vec![ + FunctionArgumentMetadata { + name: "items", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] }, + FunctionMetadata { name: "kill_storage", + arguments: vec![ + FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } + ], + documentation: vec![] }, + FunctionMetadata { + name: "kill_prefix", + arguments: vec![ + FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, + FunctionArgumentMetadata { + name: "_subkeys", + ty: scale_info::meta_type::() + } + ], + documentation: vec![] }, + FunctionMetadata { + name: "remark_with_event", + arguments: vec![ + FunctionArgumentMetadata { + name: "remark", + ty: scale_info::meta_type::>(), + } + ], + documentation: vec![] } + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + constants: vec![ + PalletConstantMetadata { + name: "BlockWeights", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![] + }, + PalletConstantMetadata { + name: "BlockLength", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![], + }, + PalletConstantMetadata { + name: "BlockHashCount", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![], + }, + PalletConstantMetadata { + name: "DbWeight", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![], + }, + PalletConstantMetadata { + name: "Version", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![], + }, + PalletConstantMetadata { + name: "SS58Prefix", + ty: scale_info::meta_type::(), + value: vec![], + documentation: vec![] + } + ], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + }; + + let example_pallet_metadata = PalletMetadata { index: 1, name: "Example", storage: Some(StorageMetadata { @@ -664,36 +803,65 @@ fn metadata() { error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }; - let mut expected_pallet_instance1_metadata = expected_pallet_metadata.clone(); - expected_pallet_instance1_metadata.name = "Instance1Example"; - expected_pallet_instance1_metadata.index = 2; - match expected_pallet_instance1_metadata.storage { + let mut example_pallet_instance1_metadata = example_pallet_metadata.clone(); + example_pallet_instance1_metadata.name = "Instance1Example"; + example_pallet_instance1_metadata.index = 2; + match example_pallet_instance1_metadata.calls { + Some(ref mut calls_meta) => { + calls_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.event { + Some(ref mut event_meta) => { + event_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.error { + Some(ref mut error_meta) => { + error_meta.ty = scale_info::meta_type::>(); + }, + _ => unreachable!(), + } + match example_pallet_instance1_metadata.storage { Some(ref mut storage_meta) => { storage_meta.prefix = "Instance1Example"; }, _ => unreachable!(), } - - let metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => metadata, - _ => panic!("metadata has been bump, test needs to be updated"), + let pallets = vec![ + system_pallet_metadata, + example_pallet_metadata, + example_pallet_instance1_metadata, + ]; + + let extrinsic = ExtrinsicMetadata { + ty: scale_info::meta_type::(), + version: 4, + signed_extensions: vec![ + SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + ] }; - let pallet_metadata = PalletMetadata::::decode( - &mut &metadata.pallets[1].encode()[..] - ).unwrap(); - let pallet_instance1_metadata = - PalletMetadata::decode(&mut &metadata.pallets[2].encode()[..]).unwrap(); - - let mut registry = scale_info::Registry::new(); - let expected_pallet_metadata = expected_pallet_metadata.into_portable(&mut registry); + let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let expected_metadata = match expected_metadata.1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; - let mut registry = scale_info::Registry::new(); - let expected_pallet_instance1_metadata = expected_pallet_instance1_metadata.into_portable(&mut registry); + let actual_metadata = match Runtime::metadata().1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; - pretty_assertions::assert_eq!(pallet_metadata, expected_pallet_metadata); - pretty_assertions::assert_eq!(pallet_instance1_metadata, expected_pallet_instance1_metadata); + pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); + pretty_assertions::assert_eq!(actual_metadata.pallets[2], expected_metadata.pallets[2]); } #[test] From fb48607b0f588941b6be26de54737cc3b070f988 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 11 May 2021 16:36:31 +0100 Subject: [PATCH 215/503] Fix up UI tests --- .../test/tests/pallet_ui/call_argument_invalid_bound.rs | 2 +- .../test/tests/pallet_ui/call_argument_invalid_bound_2.rs | 3 ++- .../test/tests/pallet_ui/call_argument_invalid_bound_3.rs | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs index 69d35344d5761..a41528d14dbb3 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.rs @@ -5,7 +5,7 @@ mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - type Bar: codec::Codec; + type Bar: codec::Codec + scale_info::TypeInfo; } #[pallet::pallet] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs index 581c72a4240a0..a93ad782995f2 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs @@ -5,7 +5,8 @@ mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - type Bar; + // todo: [AJ] should we rather modify the stderr to expect the missing trait bound error? + type Bar: scale_info::TypeInfo; } #[pallet::pallet] diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs index 97f362551037d..334869bd50fdc 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.rs @@ -13,7 +13,7 @@ mod pallet { #[pallet::hooks] impl Hooks> for Pallet {} - #[derive(Encode, Decode)] + #[derive(Encode, Decode, scale_info::TypeInfo)] struct Bar; #[pallet::call] From 7dbd90b379c8f6c5d63f1516b843289af71ce492 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 12 May 2021 07:53:13 +0100 Subject: [PATCH 216/503] Restore metadata test --- frame/support/src/metadata.rs | 778 ++++++++++++++++------------------ 1 file changed, 370 insertions(+), 408 deletions(-) diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index e42bcec6b391d..75dfe5e48a006 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -239,411 +239,373 @@ macro_rules! __runtime_modules_to_metadata_calls_storage { }; } -// todo: [AJ] restore metadata tests -// #[cfg(test)] -// // Do not complain about unused `dispatch` and `dispatch_aux`. -// #[allow(dead_code)] -// mod tests { -// use super::*; -// use frame_metadata::{ -// EventMetadata, StorageEntryModifier, StorageEntryType, FunctionMetadata, StorageEntryMetadata, -// ModuleMetadata, RuntimeMetadataPrefixed, DefaultByte, PalletConstantMetadata, DefaultByteGetter, -// ErrorMetadata, ExtrinsicMetadata, -// }; -// use codec::{Encode, Decode}; -// use crate::traits::Get; -// use sp_runtime::transaction_validity::TransactionValidityError; -// -// #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] -// struct TestExtension; -// impl sp_runtime::traits::SignedExtension for TestExtension { -// type AccountId = u32; -// type Call = (); -// type AdditionalSigned = u32; -// type Pre = (); -// const IDENTIFIER: &'static str = "testextension"; -// fn additional_signed(&self) -> Result { -// Ok(1) -// } -// } -// -// #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode)] -// struct TestExtension2; -// impl sp_runtime::traits::SignedExtension for TestExtension2 { -// type AccountId = u32; -// type Call = (); -// type AdditionalSigned = u32; -// type Pre = (); -// const IDENTIFIER: &'static str = "testextension2"; -// fn additional_signed(&self) -> Result { -// Ok(1) -// } -// } -// -// struct TestExtrinsic; -// -// impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic { -// const VERSION: u8 = 1; -// type SignedExtensions = (TestExtension, TestExtension2); -// } -// -// mod system { -// use super::*; -// -// pub trait Config: 'static { -// type BaseCallFilter; -// const ASSOCIATED_CONST: u64 = 500; -// type Origin: Into, Self::Origin>> -// + From>; -// type AccountId: From + Encode; -// type BlockNumber: From + Encode; -// type SomeValue: Get; -// type PalletInfo: crate::traits::PalletInfo; -// type DbWeight: crate::traits::Get; -// type Call; -// } -// -// decl_module! { -// pub struct Module for enum Call where origin: T::Origin, system=self { -// /// Hi, I am a comment. -// const BlockNumber: T::BlockNumber = 100.into(); -// const GetType: T::AccountId = T::SomeValue::get().into(); -// const ASSOCIATED_CONST: u64 = T::ASSOCIATED_CONST.into(); -// } -// } -// -// decl_event!( -// pub enum Event { -// SystemEvent, -// } -// ); -// -// #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode)] -// pub enum RawOrigin { -// Root, -// Signed(AccountId), -// None, -// } -// -// impl From> for RawOrigin { -// fn from(s: Option) -> RawOrigin { -// match s { -// Some(who) => RawOrigin::Signed(who), -// None => RawOrigin::None, -// } -// } -// } -// -// pub type Origin = RawOrigin<::AccountId>; -// } -// -// mod event_module { -// use crate::dispatch::DispatchResult; -// use super::system; -// -// pub trait Config: system::Config { -// type Balance; -// } -// -// decl_event!( -// pub enum Event where ::Balance -// { -// /// Hi, I am a comment. -// TestEvent(Balance), -// } -// ); -// -// decl_module! { -// pub struct Module for enum Call where origin: T::Origin, system=system { -// type Error = Error; -// -// #[weight = 0] -// fn aux_0(_origin) -> DispatchResult { unreachable!() } -// } -// } -// -// crate::decl_error! { -// pub enum Error for Module { -// /// Some user input error -// UserInputError, -// /// Something bad happened -// /// this could be due to many reasons -// BadThingHappened, -// } -// } -// } -// -// mod event_module2 { -// use super::system; -// -// pub trait Config: system::Config { -// type Balance; -// } -// -// decl_event!( -// pub enum Event where ::Balance -// { -// TestEvent(Balance), -// } -// ); -// -// decl_module! { -// pub struct Module for enum Call where origin: T::Origin, system=system {} -// } -// -// crate::decl_storage! { -// trait Store for Module as TestStorage { -// StorageMethod : Option; -// } -// add_extra_genesis { -// build(|_| {}); -// } -// } -// } -// -// type EventModule = event_module::Module; -// type EventModule2 = event_module2::Module; -// -// #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode)] -// pub struct TestRuntime; -// -// impl crate::traits::PalletInfo for TestRuntime { -// fn index() -> Option { -// let type_id = sp_std::any::TypeId::of::

(); -// if type_id == sp_std::any::TypeId::of::>() { -// return Some(0) -// } -// if type_id == sp_std::any::TypeId::of::() { -// return Some(1) -// } -// if type_id == sp_std::any::TypeId::of::() { -// return Some(2) -// } -// -// None -// } -// fn name() -> Option<&'static str> { -// let type_id = sp_std::any::TypeId::of::

(); -// if type_id == sp_std::any::TypeId::of::>() { -// return Some("System") -// } -// if type_id == sp_std::any::TypeId::of::() { -// return Some("EventModule") -// } -// if type_id == sp_std::any::TypeId::of::() { -// return Some("EventModule2") -// } -// -// None -// } -// } -// -// impl_outer_event! { -// pub enum TestEvent for TestRuntime { -// system, -// event_module, -// event_module2, -// } -// } -// -// impl_outer_origin! { -// pub enum Origin for TestRuntime where system = system {} -// } -// -// impl_outer_dispatch! { -// pub enum Call for TestRuntime where origin: Origin { -// event_module::EventModule, -// event_module2::EventModule2, -// } -// } -// -// impl event_module::Config for TestRuntime { -// type Balance = u32; -// } -// -// impl event_module2::Config for TestRuntime { -// type Balance = u32; -// } -// -// crate::parameter_types! { -// pub const SystemValue: u32 = 600; -// } -// -// impl system::Config for TestRuntime { -// type BaseCallFilter = (); -// type Origin = Origin; -// type AccountId = u32; -// type BlockNumber = u32; -// type SomeValue = SystemValue; -// type PalletInfo = Self; -// type DbWeight = (); -// type Call = Call; -// } -// -// impl_runtime_metadata!( -// for TestRuntime with pallets where Extrinsic = TestExtrinsic -// system::Pallet as System { index 0 } with Event, -// event_module::Module as Module { index 1 } with Event Call, -// event_module2::Module as Module2 { index 2 } with Event Storage Call, -// ); -// -// struct ConstantBlockNumberByteGetter; -// impl DefaultByte for ConstantBlockNumberByteGetter { -// fn default_byte(&self) -> Vec { -// 100u32.encode() -// } -// } -// -// struct ConstantGetTypeByteGetter; -// impl DefaultByte for ConstantGetTypeByteGetter { -// fn default_byte(&self) -> Vec { -// SystemValue::get().encode() -// } -// } -// -// struct ConstantAssociatedConstByteGetter; -// impl DefaultByte for ConstantAssociatedConstByteGetter { -// fn default_byte(&self) -> Vec { -// ::ASSOCIATED_CONST.encode() -// } -// } -// -// #[test] -// fn runtime_metadata() { -// let expected_metadata: RuntimeMetadataLastVersion = RuntimeMetadataLastVersion { -// modules: DecodeDifferent::Encode(&[ -// ModuleMetadata { -// name: DecodeDifferent::Encode("System"), -// index: 0, -// storage: None, -// calls: None, -// event: Some(DecodeDifferent::Encode( -// FnEncode(||&[ -// EventMetadata { -// name: DecodeDifferent::Encode("SystemEvent"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]) -// } -// ]) -// )), -// constants: DecodeDifferent::Encode( -// FnEncode(|| &[ -// PalletConstantMetadata { -// name: DecodeDifferent::Encode("BlockNumber"), -// ty: DecodeDifferent::Encode("T::BlockNumber"), -// value: DecodeDifferent::Encode( -// DefaultByteGetter(&ConstantBlockNumberByteGetter) -// ), -// documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]), -// }, -// PalletConstantMetadata { -// name: DecodeDifferent::Encode("GetType"), -// ty: DecodeDifferent::Encode("T::AccountId"), -// value: DecodeDifferent::Encode( -// DefaultByteGetter(&ConstantGetTypeByteGetter) -// ), -// documentation: DecodeDifferent::Encode(&[]), -// }, -// PalletConstantMetadata { -// name: DecodeDifferent::Encode("ASSOCIATED_CONST"), -// ty: DecodeDifferent::Encode("u64"), -// value: DecodeDifferent::Encode( -// DefaultByteGetter(&ConstantAssociatedConstByteGetter) -// ), -// documentation: DecodeDifferent::Encode(&[]), -// } -// ]) -// ), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// }, -// ModuleMetadata { -// name: DecodeDifferent::Encode("Module"), -// index: 1, -// storage: None, -// calls: Some( -// DecodeDifferent::Encode(FnEncode(|| &[ -// FunctionMetadata { -// name: DecodeDifferent::Encode("aux_0"), -// arguments: DecodeDifferent::Encode(&[]), -// documentation: DecodeDifferent::Encode(&[]), -// } -// ]))), -// event: Some(DecodeDifferent::Encode( -// FnEncode(||&[ -// EventMetadata { -// name: DecodeDifferent::Encode("TestEvent"), -// arguments: DecodeDifferent::Encode(&["Balance"]), -// documentation: DecodeDifferent::Encode(&[" Hi, I am a comment."]) -// } -// ]) -// )), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[ -// ErrorMetadata { -// name: DecodeDifferent::Encode("UserInputError"), -// documentation: DecodeDifferent::Encode(&[" Some user input error"]), -// }, -// ErrorMetadata { -// name: DecodeDifferent::Encode("BadThingHappened"), -// documentation: DecodeDifferent::Encode(&[ -// " Something bad happened", -// " this could be due to many reasons", -// ]), -// }, -// ])), -// }, -// ModuleMetadata { -// name: DecodeDifferent::Encode("Module2"), -// index: 2, -// storage: Some(DecodeDifferent::Encode( -// FnEncode(|| StorageMetadata { -// prefix: DecodeDifferent::Encode("TestStorage"), -// entries: DecodeDifferent::Encode( -// &[ -// StorageEntryMetadata { -// name: DecodeDifferent::Encode("StorageMethod"), -// modifier: StorageEntryModifier::Optional, -// ty: StorageEntryType::Plain(DecodeDifferent::Encode("u32")), -// default: DecodeDifferent::Encode( -// DefaultByteGetter( -// &event_module2::__GetByteStructStorageMethod( -// std::marker::PhantomData:: -// ) -// ) -// ), -// documentation: DecodeDifferent::Encode(&[]), -// } -// ] -// ) -// }), -// )), -// calls: Some(DecodeDifferent::Encode(FnEncode(|| &[]))), -// event: Some(DecodeDifferent::Encode( -// FnEncode(||&[ -// EventMetadata { -// name: DecodeDifferent::Encode("TestEvent"), -// arguments: DecodeDifferent::Encode(&["Balance"]), -// documentation: DecodeDifferent::Encode(&[]) -// } -// ]) -// )), -// constants: DecodeDifferent::Encode(FnEncode(|| &[])), -// errors: DecodeDifferent::Encode(FnEncode(|| &[])), -// }, -// ]), -// extrinsic: ExtrinsicMetadata { -// version: 1, -// signed_extensions: vec![ -// DecodeDifferent::Encode("testextension"), -// DecodeDifferent::Encode("testextension2"), -// ], -// } -// }; -// -// let metadata_encoded = TestRuntime::metadata().encode(); -// let metadata_decoded = RuntimeMetadataPrefixed::decode(&mut &metadata_encoded[..]); -// let expected_metadata: RuntimeMetadataPrefixed = expected_metadata.into(); -// -// pretty_assertions::assert_eq!(expected_metadata, metadata_decoded.unwrap()); -// } -// } +#[cfg(test)] +// Do not complain about unused `dispatch` and `dispatch_aux`. +#[allow(dead_code)] +mod tests { + use super::*; + use codec::{Encode, Decode}; + use crate::traits::Get; + use sp_runtime::transaction_validity::TransactionValidityError; + + #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode, scale_info::TypeInfo)] + struct TestExtension; + impl sp_runtime::traits::SignedExtension for TestExtension { + type AccountId = u32; + type Call = (); + type AdditionalSigned = u32; + type Pre = (); + const IDENTIFIER: &'static str = "testextension"; + fn additional_signed(&self) -> Result { + Ok(1) + } + } + + #[derive(Clone, Eq, Debug, PartialEq, Encode, Decode, scale_info::TypeInfo)] + struct TestExtension2; + impl sp_runtime::traits::SignedExtension for TestExtension2 { + type AccountId = u32; + type Call = (); + type AdditionalSigned = u32; + type Pre = (); + const IDENTIFIER: &'static str = "testextension2"; + fn additional_signed(&self) -> Result { + Ok(1) + } + } + + #[derive(scale_info::TypeInfo)] + struct TestExtrinsic; + + impl sp_runtime::traits::ExtrinsicMetadata for TestExtrinsic { + const VERSION: u8 = 1; + type SignedExtensions = (TestExtension, TestExtension2); + } + + mod system { + use super::*; + + pub trait Config: scale_info::TypeInfo + 'static { + type BaseCallFilter; + const ASSOCIATED_CONST: u64 = 500; + type Origin: Into, Self::Origin>> + + From>; + type AccountId: From + Encode + scale_info::TypeInfo; + type BlockNumber: From + Encode + scale_info::TypeInfo; + type SomeValue: Get; + type PalletInfo: crate::traits::PalletInfo; + type DbWeight: crate::traits::Get; + type Call; + } + + decl_module! { + pub struct Module for enum Call where origin: T::Origin, system=self { + /// Hi, I am a comment. + const BlockNumber: T::BlockNumber = 100.into(); + const GetType: T::AccountId = T::SomeValue::get().into(); + const ASSOCIATED_CONST: u64 = T::ASSOCIATED_CONST.into(); + } + } + + decl_event!( + pub enum Event { + SystemEvent, + } + ); + + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] + pub enum RawOrigin { + Root, + Signed(AccountId), + None, + } + + impl From> for RawOrigin { + fn from(s: Option) -> RawOrigin { + match s { + Some(who) => RawOrigin::Signed(who), + None => RawOrigin::None, + } + } + } + + pub type Origin = RawOrigin<::AccountId>; + } + + mod event_module { + use crate::dispatch::DispatchResult; + use super::system; + + pub trait Config: system::Config { + type Balance; + } + + decl_event!( + pub enum Event where ::Balance + { + /// Hi, I am a comment. + TestEvent(Balance), + } + ); + + decl_module! { + pub struct Module for enum Call where origin: T::Origin, system=system { + type Error = Error; + + #[weight = 0] + fn aux_0(_origin) -> DispatchResult { unreachable!() } + } + } + + crate::decl_error! { + pub enum Error for Module { + /// Some user input error + UserInputError, + /// Something bad happened + /// this could be due to many reasons + BadThingHappened, + } + } + } + + mod event_module2 { + use super::system; + + pub trait Config: system::Config { + type Balance; + } + + decl_event!( + pub enum Event where ::Balance + { + TestEvent(Balance), + } + ); + + decl_module! { + pub struct Module for enum Call where origin: T::Origin, system=system {} + } + + crate::decl_storage! { + trait Store for Module as TestStorage { + StorageMethod : Option; + } + add_extra_genesis { + build(|_| {}); + } + } + } + + type EventModule = event_module::Module; + type EventModule2 = event_module2::Module; + + #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, scale_info::TypeInfo)] + pub struct TestRuntime; + + impl crate::traits::PalletInfo for TestRuntime { + fn index() -> Option { + let type_id = sp_std::any::TypeId::of::

(); + if type_id == sp_std::any::TypeId::of::>() { + return Some(0) + } + if type_id == sp_std::any::TypeId::of::() { + return Some(1) + } + if type_id == sp_std::any::TypeId::of::() { + return Some(2) + } + + None + } + fn name() -> Option<&'static str> { + let type_id = sp_std::any::TypeId::of::

(); + if type_id == sp_std::any::TypeId::of::>() { + return Some("System") + } + if type_id == sp_std::any::TypeId::of::() { + return Some("EventModule") + } + if type_id == sp_std::any::TypeId::of::() { + return Some("EventModule2") + } + + None + } + } + + impl_outer_event! { + pub enum TestEvent for TestRuntime { + system, + event_module, + event_module2, + } + } + + impl_outer_origin! { + pub enum Origin for TestRuntime where system = system {} + } + + impl_outer_dispatch! { + pub enum Call for TestRuntime where origin: Origin { + event_module::EventModule, + event_module2::EventModule2, + } + } + + impl event_module::Config for TestRuntime { + type Balance = u32; + } + + impl event_module2::Config for TestRuntime { + type Balance = u32; + } + + crate::parameter_types! { + pub const SystemValue: u32 = 600; + } + + #[test] + fn runtime_metadata() { + let pallets = vec![ + PalletMetadata { + name: "System", + index: 0, + storage: None, + calls: None, + event: Some( + PalletEventMetadata { + ty: scale_info::meta_type::(), + }, + ), + constants: vec![ + PalletConstantMetadata { + name: "BlockNumber", + ty: scale_info::meta_type::(), + value: vec![100, 0, 0, 0], + documentation: vec![ + " Hi, I am a comment.", + ], + }, + PalletConstantMetadata { + name: "GetType", + ty: scale_info::meta_type::(), + value: vec![88, 2, 0, 0], + documentation: vec![], + }, + PalletConstantMetadata { + name: "ASSOCIATED_CONST", + ty: scale_info::meta_type::(), + value: vec![244, 1, 0, 0, 0, 0, 0, 0], + documentation: vec![], + }, + ], + error: None, + }, + PalletMetadata { + name: "Module", + index: 1, + storage: None, + calls: Some( + PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "aux_0", + arguments: vec![], + documentation: vec![], + }, + ], + }, + ), + event: Some( + PalletEventMetadata { + ty: scale_info::meta_type::>(), + }, + ), + constants: vec![], + error: Some( + PalletErrorMetadata { + ty: scale_info::meta_type::>(), + }, + ), + }, + PalletMetadata { + name: "Module2", + storage: Some( + StorageMetadata { + prefix: "TestStorage", + entries: vec![ + StorageEntryMetadata { + name: "StorageMethod", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain( + scale_info::meta_type::(), + ), + default: vec![0], + documentation: vec![], + }, + ], + }, + ), + calls: Some( + PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![], + }, + ), + event: Some( + PalletEventMetadata { + ty: scale_info::meta_type::>(), + }, + ), + constants: vec![], + error: None, + index: 2, + }, + ]; + let extrinsic = ExtrinsicMetadata { + ty: scale_info::meta_type::(), + version: 1, + signed_extensions: vec![ + SignedExtensionMetadata { identifier: "testextension", ty: scale_info::meta_type::() }, + SignedExtensionMetadata { identifier: "testextension2", ty: scale_info::meta_type::() }, + ] + }; + + let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let expected_metadata = match expected_metadata.1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; + + let actual_metadata = match TestRuntime::metadata().1 { + RuntimeMetadata::V13(metadata) => { + metadata + }, + _ => panic!("metadata has been bumped, test needs to be updated"), + }; + + pretty_assertions::assert_eq!(actual_metadata.pallets, expected_metadata.pallets); + pretty_assertions::assert_eq!(actual_metadata.extrinsic, expected_metadata.extrinsic); + } + + impl system::Config for TestRuntime { + type BaseCallFilter = (); + type Origin = Origin; + type AccountId = u32; + type BlockNumber = u32; + type SomeValue = SystemValue; + type PalletInfo = Self; + type DbWeight = (); + type Call = Call; + } + + impl_runtime_metadata!( + for TestRuntime with pallets where Extrinsic = TestExtrinsic + system::Pallet as System { index 0 } with Event, + event_module::Module as Module { index 1 } with Event Call, + event_module2::Module as Module2 { index 2 } with Event Storage Call, + ); +} From a852f42e0842d3dd996d05d2c5ae9a237b03a39c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 12 May 2021 17:33:18 +0100 Subject: [PATCH 217/503] Remove more pallet::metadata attributes --- frame/lottery/src/lib.rs | 1 - frame/recovery/src/lib.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/frame/lottery/src/lib.rs b/frame/lottery/src/lib.rs index b71524f2e0451..d454de5626274 100644 --- a/frame/lottery/src/lib.rs +++ b/frame/lottery/src/lib.rs @@ -164,7 +164,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// A lottery has been started! LotteryStarted, diff --git a/frame/recovery/src/lib.rs b/frame/recovery/src/lib.rs index 796b826ea7f0d..4b35a09d3f953 100644 --- a/frame/recovery/src/lib.rs +++ b/frame/recovery/src/lib.rs @@ -252,7 +252,6 @@ pub mod pallet { /// Events type. #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId")] pub enum Event { /// A recovery process has been set up for an \[account\]. RecoveryCreated(T::AccountId), From 9d6d395f7eafaecc8f869ad9a17795eb2e8714b1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 13 May 2021 15:26:58 +0100 Subject: [PATCH 218/503] Update frame-metadata and use `Into>` for constructing OpaqueMetadata --- Cargo.lock | 2 +- bin/node-template/runtime/src/lib.rs | 3 +-- bin/node/runtime/src/lib.rs | 2 +- frame/support/procedural/src/pallet/expand/storage.rs | 4 ++-- frame/support/procedural/src/storage/metadata.rs | 4 ++-- frame/support/src/metadata.rs | 5 ++--- 6 files changed, 9 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 946b34dd461ec..eeb4c75460c9c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1799,7 +1799,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#b02894949737a7b1e462af282330c3cf1f43acf9" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#f4b1727ec933545f3d34f30f4537c0820671c52c" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index 78e6576b6c073..e9f0bd8deefef 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -345,8 +345,7 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - use codec::Encode as _; - OpaqueMetadata::new(Runtime::metadata().encode()) + OpaqueMetadata::new(Runtime::metadata().into()) } } diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index 0318f3fc28b13..4119e3edb77f5 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -1208,7 +1208,7 @@ impl_runtime_apis! { impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata { - OpaqueMetadata::new(Runtime::metadata().encode()) + OpaqueMetadata::new(Runtime::metadata().into()) } } diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index f9795fe9095ce..62e808472a4c6 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -221,8 +221,8 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #completed_where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #frame_support::metadata::StorageMetadata { - #frame_support::metadata::StorageMetadata { + pub fn storage_metadata() -> #frame_support::metadata::PalletStorageMetadata { + #frame_support::metadata::PalletStorageMetadata { prefix: < ::PalletInfo as #frame_support::traits::PalletInfo diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 495cf57cb9cfd..51fd916f1b7d3 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -180,7 +180,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre }; let store_metadata = quote!( - #scrate::metadata::StorageMetadata { + #scrate::metadata::PalletStorageMetadata { prefix: #prefix, entries: #scrate::scale_info::prelude::vec![ #entries ], } @@ -195,7 +195,7 @@ pub fn impl_metadata(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStre impl#module_impl #module_struct #where_clause { #[doc(hidden)] - pub fn storage_metadata() -> #scrate::metadata::StorageMetadata { + pub fn storage_metadata() -> #scrate::metadata::PalletStorageMetadata { #store_metadata } } diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 75dfe5e48a006..3ef90b03daef4 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -18,10 +18,9 @@ pub use frame_metadata::{ v13::{ PalletErrorMetadata, PalletEventMetadata, ExtrinsicMetadata, FunctionArgumentMetadata, - FunctionMetadata, PalletCallMetadata, PalletMetadata, PalletConstantMetadata, + FunctionMetadata, PalletCallMetadata, PalletMetadata, PalletConstantMetadata, PalletStorageMetadata, RuntimeMetadataLastVersion, SignedExtensionMetadata, - StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, - StorageMetadata, TypeSpec, + StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, TypeSpec, }, RuntimeMetadata, RuntimeMetadataPrefixed, }; From 6f04c1cc9f8f169b75dcb71537971fe39085c034 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 13 May 2021 16:09:34 +0100 Subject: [PATCH 219/503] Rename StorageMetadata -> PalletStorageMetadata --- frame/support/src/lib.rs | 6 +++--- frame/support/src/metadata.rs | 2 +- frame/support/test/tests/construct_runtime.rs | 16 ++++++++-------- frame/support/test/tests/decl_storage.rs | 4 ++-- frame/support/test/tests/instance.rs | 6 +++--- frame/support/test/tests/pallet.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 2 +- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 0c1aad9c8c121..9aadd3ee30b94 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -768,7 +768,7 @@ pub mod tests { use super::*; use codec::{Codec, EncodeLike}; use crate::metadata::{ - StorageEntryMetadata, StorageMetadata, StorageEntryType, StorageEntryModifier, + StorageEntryMetadata, PalletStorageMetadata, StorageEntryType, StorageEntryModifier, StorageHasher, }; use sp_std::result; @@ -1087,8 +1087,8 @@ pub mod tests { }); } - fn expected_metadata() -> StorageMetadata { - StorageMetadata { + fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { prefix: "Test", entries: vec![ StorageEntryMetadata { diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 3ef90b03daef4..97eab817ebbc6 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -531,7 +531,7 @@ mod tests { PalletMetadata { name: "Module2", storage: Some( - StorageMetadata { + PalletStorageMetadata { prefix: "TestStorage", entries: vec![ StorageEntryMetadata { diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 3dfd488cb0bf5..92ed5ec8c5f9e 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -306,7 +306,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_1", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance1Module", entries: vec![], }), @@ -327,7 +327,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module2", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![], }), @@ -348,7 +348,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_2", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance2Module", entries: vec![], }), @@ -369,7 +369,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_3", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance3Module", entries: vec![], }), @@ -408,7 +408,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_6", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance6Module", entries: vec![], }), @@ -429,7 +429,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_7", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance7Module", entries: vec![], }), @@ -450,7 +450,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_8", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance8Module", entries: vec![], }), @@ -471,7 +471,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_9", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Instance9Module", entries: vec![], }), diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 3f49a9158fe28..073175b6df2a1 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -89,8 +89,8 @@ mod tests { impl Config for TraitImpl {} - fn expected_metadata() -> StorageMetadata { - StorageMetadata { + fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { prefix: "TestStorage", entries: vec![ StorageEntryMetadata { diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 456238f724887..6f008bcc6d517 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -22,7 +22,7 @@ use sp_runtime::{generic, BuildStorage, traits::{BlakeTwo256, Verify}}; use frame_support::{ Parameter, traits::Get, parameter_types, metadata::{ - StorageMetadata, StorageEntryModifier, StorageEntryType, + PalletStorageMetadata, StorageEntryModifier, StorageEntryType, StorageEntryMetadata, StorageHasher, }, StorageValue, StorageMap, StorageDoubleMap, @@ -393,8 +393,8 @@ fn storage_with_instance_basic_operation() { }); } -fn expected_metadata() -> StorageMetadata { - StorageMetadata { +fn expected_metadata() -> PalletStorageMetadata { + PalletStorageMetadata { prefix: "Instance2Module2", entries: vec![ StorageEntryMetadata diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 659234ad4868f..129443c1533c7 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -783,7 +783,7 @@ fn metadata() { PalletMetadata { index: 1, name: "Example", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Example", entries: vec![ StorageEntryMetadata { diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 64487ff6ded7b..4f8170f72e67a 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -700,7 +700,7 @@ fn metadata() { let example_pallet_metadata = PalletMetadata { index: 1, name: "Example", - storage: Some(StorageMetadata { + storage: Some(PalletStorageMetadata { prefix: "Example", entries: vec![ StorageEntryMetadata { From 1230851e495efa73c2ac55d591a77cd452bfad7b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 10:02:41 +0100 Subject: [PATCH 220/503] Update to v14 frame-metadata and adapt nmap metadata --- Cargo.lock | 8 +-- frame/support/Cargo.toml | 2 +- .../procedural/src/storage/metadata.rs | 15 ++---- frame/support/src/metadata.rs | 6 +-- frame/support/src/storage/types/key.rs | 31 +++++------ frame/support/src/storage/types/mod.rs | 8 +-- frame/support/src/storage/types/nmap.rs | 53 +++++++++---------- 7 files changed, 58 insertions(+), 65 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eeb4c75460c9c..bae336079f6b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1798,8 +1798,8 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "12.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#f4b1727ec933545f3d34f30f4537c0820671c52c" +version = "14.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#fe50b8a581ae13fa575d53aa2d5dd63a5e5b5321" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -8178,7 +8178,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ff42886b35a3c5e960a8b2089bdef5fccd0ee245" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#13e23415b57da3c25a3b4afe8654c649bc11facb" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8190,7 +8190,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ff42886b35a3c5e960a8b2089bdef5fccd0ee245" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#13e23415b57da3c25a3b4afe8654c649bc11facb" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index d021d0f382a36..8c4991ce68f59 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v13"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v14"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index dea0290abd89a..ce373cd3481b5 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -59,23 +59,18 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> } }, StorageLineTypeDef::NMap(map) => { - let keys = map.keys - .iter() - .map(|key| clean_type_string("e!(#key).to_string())) - .collect::>(); + let key_tuple = &map.to_key_tuple(); let hashers = map.hashers .iter() .map(|hasher| hasher.to_storage_hasher_struct()) .collect::>(); quote!{ #scrate::metadata::StorageEntryType::NMap { - keys: #scrate::metadata::DecodeDifferent::Encode(&[ - #( #keys, )* - ]), - hashers: #scrate::metadata::DecodeDifferent::Encode(&[ + keys: #scrate::scale_info::meta_type::<#key_tuple>(), + hashers: #scrate::scale_info::prelude::vec! [ #( #scrate::metadata::StorageHasher::#hashers, )* - ]), - value: #scrate::metadata::DecodeDifferent::Encode(#value_type), + ], + value: #scrate::scale_info::meta_type::<#value_type>(), } } } diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 97eab817ebbc6..1fc8fb6d9cc97 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -16,7 +16,7 @@ // limitations under the License. pub use frame_metadata::{ - v13::{ + v14::{ PalletErrorMetadata, PalletEventMetadata, ExtrinsicMetadata, FunctionArgumentMetadata, FunctionMetadata, PalletCallMetadata, PalletMetadata, PalletConstantMetadata, PalletStorageMetadata, RuntimeMetadataLastVersion, SignedExtensionMetadata, @@ -573,14 +573,14 @@ mod tests { let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match TestRuntime::metadata().1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index fb3c69ff20cde..7804d4861dee1 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -20,6 +20,7 @@ use crate::hash::{ReversibleStorageHasher, StorageHasher}; use codec::{Encode, EncodeLike, FullCodec}; use paste::paste; +use scale_info::TypeInfo; use sp_std::prelude::*; /// A type used exclusively by storage maps as their key type. @@ -35,12 +36,12 @@ pub struct Key(core::marker::PhantomData<(Hasher, KeyType)>); /// A trait that contains the current key as an associated type. pub trait KeyGenerator { - type Key: EncodeLike; + type Key: EncodeLike + TypeInfo + 'static; type KArg: Encode; type HashFn: FnOnce(&[u8]) -> Vec; type HArg; - const HASHER_METADATA: &'static [frame_metadata::StorageHasher]; + const HASHER_METADATA: &'static [crate::metadata::StorageHasher]; /// Given a `key` tuple, calculate the final key by encoding each element individuallly and /// hashing them using the corresponding hasher in the `KeyGenerator`. @@ -61,13 +62,13 @@ pub trait KeyGeneratorInner: KeyGenerator { fn final_hash(encoded: &[u8]) -> Vec; } -impl KeyGenerator for Key { +impl KeyGenerator for Key { type Key = K; type KArg = (K,); type HashFn = Box Vec>; type HArg = (Self::HashFn,); - const HASHER_METADATA: &'static [frame_metadata::StorageHasher] = &[H::METADATA]; + const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[H::METADATA]; fn final_key + TupleToEncodedIter>(key: KArg) -> Vec { H::hash( @@ -91,7 +92,7 @@ impl KeyGenerator for Key { } } -impl KeyGeneratorInner for Key { +impl KeyGeneratorInner for Key { type Hasher = H; fn final_hash(encoded: &[u8]) -> Vec { @@ -107,7 +108,7 @@ impl KeyGenerator for Tuple { for_tuples!( type HArg = ( #(Tuple::HashFn),* ); ); type HashFn = Box Vec>; - const HASHER_METADATA: &'static [frame_metadata::StorageHasher] = &[ + const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[ for_tuples!( #(Tuple::Hasher::METADATA),* ) ]; @@ -203,7 +204,7 @@ pub trait ReversibleKeyGenerator: KeyGenerator { fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error>; } -impl ReversibleKeyGenerator for Key { +impl ReversibleKeyGenerator for Key { type ReversibleHasher = H; fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error> { @@ -248,7 +249,7 @@ pub trait HasReversibleKeyPrefix

: ReversibleKeyGenerator + HasKeyPrefix

{ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), ($($prefix:ident),+), ($($suffix:ident),+)) => { paste! { - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: StorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher),+> HasKeyPrefix<($($prefix),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -259,7 +260,7 @@ macro_rules! impl_key_prefix_for { } } - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> HasReversibleKeyPrefix<($($prefix),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -271,7 +272,7 @@ macro_rules! impl_key_prefix_for { }; (($($keygen:ident),+), $prefix:ident, ($($suffix:ident),+)) => { paste! { - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: StorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher),+> HasKeyPrefix<($prefix,)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -282,7 +283,7 @@ macro_rules! impl_key_prefix_for { } } - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> HasReversibleKeyPrefix<($prefix,)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -294,7 +295,7 @@ macro_rules! impl_key_prefix_for { }; (($($keygen:ident),+), ($($prefix:ident),+), $suffix:ident) => { paste! { - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: StorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher),+> HasKeyPrefix<($($prefix),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -305,7 +306,7 @@ macro_rules! impl_key_prefix_for { } } - impl<$($keygen: FullCodec,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> + impl<$($keygen: FullCodec + TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher),+> HasReversibleKeyPrefix<($($prefix),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -317,7 +318,7 @@ macro_rules! impl_key_prefix_for { }; } -impl HasKeyPrefix<(A,)> +impl HasKeyPrefix<(A,)> for (Key, Key) { type Suffix = B; @@ -327,7 +328,7 @@ impl HasKeyPrefi } } -impl +impl HasReversibleKeyPrefix<(A,)> for (Key, Key) { fn decode_partial_key(key_material: &[u8]) -> Result { diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 7a2cb02f54148..add5f38c34413 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -28,14 +28,14 @@ mod map; mod nmap; mod value; -pub use double_map::{StorageDoubleMap, StorageDoubleMapMetadata}; +pub use double_map::StorageDoubleMap; pub use key::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, Key, KeyGenerator, ReversibleKeyGenerator, TupleToEncodedIter, }; -pub use map::{StorageMap, StorageMapMetadata}; -pub use nmap::{StorageNMap, StorageNMapMetadata}; -pub use value::{StorageValue, StorageValueMetadata}; +pub use map::StorageMap; +pub use nmap::StorageNMap; +pub use value::StorageValue; /// Trait implementing how the storage optional value is converted into the queried type. /// diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 1a2b6d4d55dcc..e1e6bae61469a 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -19,17 +19,17 @@ //! StoragePrefixedDoubleMap traits and their methods directly. use crate::{ + metadata::{StorageEntryModifier, StorageEntryType}, storage::{ types::{ - EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, OnEmptyGetter, - OptionQuery, QueryKindTrait, TupleToEncodedIter, + EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, + OptionQuery, QueryKindTrait, StorageEntryMetadata, TupleToEncodedIter, }, KeyGenerator, PrefixIterator, StorageAppend, StorageDecodeLength, }, traits::{GetDefault, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_std::prelude::*; /// A type that allow to store values for an arbitrary number of keys in the form of @@ -345,31 +345,28 @@ where } } -/// Part of storage metadata for a storage n map. -/// -/// NOTE: Generic hashers is supported. -pub trait StorageNMapMetadata { - const MODIFIER: StorageEntryModifier; - const NAME: &'static str; - const DEFAULT: DefaultByteGetter; - const HASHERS: &'static [frame_metadata::StorageHasher]; -} - -impl StorageNMapMetadata - for StorageNMap -where +impl StorageEntryMetadata +for StorageNMap where Prefix: StorageInstance, Key: super::key::KeyGenerator, - Value: FullCodec, + Value: FullCodec + scale_info::TypeInfo + 'static, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { const MODIFIER: StorageEntryModifier = QueryKind::METADATA; const NAME: &'static str = Prefix::STORAGE_PREFIX; - const DEFAULT: DefaultByteGetter = DefaultByteGetter( - &OnEmptyGetter::(core::marker::PhantomData), - ); - const HASHERS: &'static [frame_metadata::StorageHasher] = Key::HASHER_METADATA; + + fn ty() -> StorageEntryType { + StorageEntryType::NMap { + keys: scale_info::meta_type::(), + hashers: Key::HASHER_METADATA.iter().cloned().collect(), + value: scale_info::meta_type::(), + } + } + + fn default() -> Vec { + OnEmpty::get().encode() + } } #[cfg(test)] @@ -377,7 +374,7 @@ mod test { use super::*; use crate::hash::*; use crate::storage::types::{Key, ValueQuery}; - use frame_metadata::StorageEntryModifier; + use crate::metadata::StorageEntryModifier; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -539,10 +536,10 @@ mod test { ); assert_eq!(A::NAME, "foo"); assert_eq!( - AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), + AValueQueryWithAnOnEmpty::default(), 98u32.encode() ); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(); assert_eq!(WithLen::decode_len((3,)), None); @@ -725,10 +722,10 @@ mod test { ); assert_eq!(A::NAME, "foo"); assert_eq!( - AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), + AValueQueryWithAnOnEmpty::default(), 98u32.encode() ); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(); assert_eq!(WithLen::decode_len((3, 30)), None); @@ -960,10 +957,10 @@ mod test { ); assert_eq!(A::NAME, "foo"); assert_eq!( - AValueQueryWithAnOnEmpty::DEFAULT.0.default_byte(), + AValueQueryWithAnOnEmpty::default(), 98u32.encode() ); - assert_eq!(A::DEFAULT.0.default_byte(), Option::::None.encode()); + assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(); assert_eq!(WithLen::decode_len((3, 30, 300)), None); From f72a33737e657faa534710b21e0e3d7ba395276d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 11:23:34 +0100 Subject: [PATCH 221/503] Fix up v14 test compilation errors --- frame/support/test/tests/pallet.rs | 4 ++-- frame/support/test/tests/pallet_compatibility.rs | 2 +- frame/support/test/tests/pallet_compatibility_instance.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 47ec1e5cdad0e..9c6f10816db44 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1021,14 +1021,14 @@ fn metadata() { let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 08e2975b65804..684c5891217e5 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -267,7 +267,7 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let (pallets, types) = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V13(metadata) => + frame_support::metadata::RuntimeMetadata::V14(metadata) => (metadata.pallets, metadata.types), _ => unreachable!(), }; diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index df011fe2bf1c6..c7e3a8da77d38 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -282,7 +282,7 @@ mod test { fn metadata() { let metadata = Runtime::metadata(); let (pallets, types) = match metadata.1 { - frame_support::metadata::RuntimeMetadata::V13(metadata) => + frame_support::metadata::RuntimeMetadata::V14(metadata) => (metadata.pallets, metadata.types), _ => unreachable!(), }; diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 8a5f37e826982..db8cbfa8579fd 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -888,14 +888,14 @@ fn metadata() { let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match Runtime::metadata().1 { - RuntimeMetadata::V13(metadata) => { + RuntimeMetadata::V14(metadata) => { metadata }, _ => panic!("metadata has been bumped, test needs to be updated"), From 4c741fbca31a32e20be418af482284c5e81f04f4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 11:44:53 +0100 Subject: [PATCH 222/503] Use KArg for static NMap metadata, fix pallet tests --- frame/support/src/storage/types/key.rs | 4 +-- frame/support/src/storage/types/nmap.rs | 2 +- frame/support/test/tests/pallet.rs | 33 +++++++++++++++++++++++++ 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index 7804d4861dee1..809047b614d71 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -37,13 +37,13 @@ pub struct Key(core::marker::PhantomData<(Hasher, KeyType)>); /// A trait that contains the current key as an associated type. pub trait KeyGenerator { type Key: EncodeLike + TypeInfo + 'static; - type KArg: Encode; + type KArg: Encode + TypeInfo + 'static; type HashFn: FnOnce(&[u8]) -> Vec; type HArg; const HASHER_METADATA: &'static [crate::metadata::StorageHasher]; - /// Given a `key` tuple, calculate the final key by encoding each element individuallly and + /// Given a `key` tuple, calculate the final key by encoding each element individually and /// hashing them using the corresponding hasher in the `KeyGenerator`. fn final_key + TupleToEncodedIter>(key: KArg) -> Vec; /// Given a `key` tuple, migrate the keys from using the old hashers as given by `hash_fns` diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index e1e6bae61469a..579134880d2f0 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -358,7 +358,7 @@ for StorageNMap where fn ty() -> StorageEntryType { StorageEntryType::NMap { - keys: scale_info::meta_type::(), + keys: scale_info::meta_type::(), hashers: Key::HASHER_METADATA.iter().cloned().collect(), value: scale_info::meta_type::(), } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 9c6f10816db44..e0bd185061d41 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -891,6 +891,28 @@ fn metadata() { default: vec![0], documentation: vec![], }, + StorageEntryMetadata { + name: "NMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u8,)>(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "NMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + documentation: vec![], + }, #[cfg(feature = "conditional-storage")] StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, @@ -923,6 +945,17 @@ fn metadata() { default: vec![0], documentation: vec![], }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalNMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + documentation: vec![], + }, ], }), calls: Some(PalletCallMetadata { From d52348aea986b020ce04e131399ea6c6a00c50bf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 11:47:42 +0100 Subject: [PATCH 223/503] Fix pallet instance tests --- frame/support/test/tests/pallet_instance.rs | 22 +++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index db8cbfa8579fd..6c7256da9deb1 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -801,6 +801,28 @@ fn metadata() { default: vec![0], documentation: vec![], }, + StorageEntryMetadata { + name: "NMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u8,)>(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + documentation: vec![], + }, + StorageEntryMetadata { + name: "NMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + documentation: vec![], + }, ], }), calls: Some(PalletCallMetadata { From 6853fb11ea9f2eb987f6a3d49dc7cf262180bb43 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 11:48:11 +0100 Subject: [PATCH 224/503] Update frame-metadata --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index bae336079f6b0..b68179d954318 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1799,7 +1799,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#fe50b8a581ae13fa575d53aa2d5dd63a5e5b5321" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#385bbbd82a648ed2a60576f87dbf141813f96642" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From 42c9361f07205602b05d578ba4ad34184c799c87 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 18 May 2021 17:21:54 +0100 Subject: [PATCH 225/503] Fix up errors after merge --- frame/support/src/storage/types/double_map.rs | 1 - frame/support/src/storage/types/key.rs | 14 +- frame/support/src/storage/types/map.rs | 1 - frame/support/src/storage/types/nmap.rs | 1 - frame/support/src/storage/types/value.rs | 1 - frame/support/src/traits/tokens/misc.rs | 2 +- frame/support/test/tests/decl_storage.rs | 166 ++++++++---------- 7 files changed, 83 insertions(+), 103 deletions(-) diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index edf8349505f47..1c28c8d267634 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -28,7 +28,6 @@ use crate::{ }, traits::{GetDefault, StorageInstance, Get, MaxEncodedLen, StorageInfo}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index eb6df5fa5f11d..276f4e0d7c0ef 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -275,7 +275,7 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), ($($prefix:ident),+), ($($suffix:ident),+)) => { paste! { impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasKeyPrefix<($( [] ),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -287,7 +287,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasReversibleKeyPrefix<($( [] ),+)> for @@ -302,7 +302,7 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), $prefix:ident, ($($suffix:ident),+)) => { paste! { impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher,)+ []: EncodeLike<$prefix> > HasKeyPrefix<( [] ,)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -314,7 +314,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ []: EncodeLike<$prefix> > HasReversibleKeyPrefix<( [] ,)> for @@ -329,9 +329,9 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), ($($prefix:ident),+), $suffix:ident) => { paste! { impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: StorageHasher,)+ - $( []: EncodeLike<$prefix>),+ + $( []: EncodeLike<$prefix> ),+ > HasKeyPrefix<($( [] ),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { type Suffix = $suffix; @@ -341,7 +341,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec,)+ + $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasReversibleKeyPrefix<($( [] ),+)> for diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 66c49859858e8..c31b43cd8f3a7 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -28,7 +28,6 @@ use crate::{ }, traits::{GetDefault, StorageInstance, Get, MaxEncodedLen, StorageInfo}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 30b23862f7565..57b2f709c0e8d 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -30,7 +30,6 @@ use crate::{ traits::{Get, GetDefault, StorageInstance, StorageInfo, MaxEncodedLen}, }; use codec::{Decode, Encode, EncodeLike, FullCodec}; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_runtime::SaturatedConversion; use sp_std::prelude::*; diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 04411481a81f6..e2bf30b1fea64 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -27,7 +27,6 @@ use crate::{ }, traits::{GetDefault, StorageInstance, Get, MaxEncodedLen, StorageInfo}, }; -use frame_metadata::{DefaultByteGetter, StorageEntryModifier}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; diff --git a/frame/support/src/traits/tokens/misc.rs b/frame/support/src/traits/tokens/misc.rs index 8fa4b322b4fee..0bf4432259e89 100644 --- a/frame/support/src/traits/tokens/misc.rs +++ b/frame/support/src/traits/tokens/misc.rs @@ -165,5 +165,5 @@ pub trait AssetId: FullCodec + Copy + Default + Eq + PartialEq + Debug {} impl AssetId for T {} /// Simple amalgamation trait to collect together properties for a Balance under one roof. -pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug + + scale_info::TypeInfo {} +pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug + scale_info::TypeInfo {} impl Balance for T {} diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 69a2c8ed7295a..5909d6683a03f 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -28,7 +28,7 @@ mod tests { pub trait Config: frame_support_test::Config { type Origin2: codec::Codec + codec::EncodeLike + Default - + frame_support::traits::MaxEncodedLen; + + frame_support::traits::MaxEncodedLen + scale_info::TypeInfo; } frame_support::decl_storage! { @@ -222,8 +222,8 @@ mod tests { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, default: vec![0], @@ -234,51 +234,47 @@ mod tests { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, default: vec![0], documentation: vec![], }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("MAPU32MYDEF"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), - unused: false, - }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), - }, - StorageEntryMetadata { - name: DecodeDifferent::Encode("PUBMAPU32MYDEF"), - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("String"), - unused: false, - }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructPUBMAPU32MYDEF(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), - }, + // StorageEntryMetadata { + // name: "MAPU32MYDEF", + // modifier: StorageEntryModifier::Optional, + // ty: StorageEntryType::Map { + // hasher: StorageHasher::Blake2_128Concat, + // key: scale_info::meta_type::(), + // value: scale_info::meta_type::(), + // unused: false, + // }, + // default: vec![0], + // documentation: vec![], + // }, + // StorageEntryMetadata { + // name: "PUBMAPU32MYDEF", + // modifier: StorageEntryModifier::Optional, + // ty: StorageEntryType::Map { + // hasher: StorageHasher::Blake2_128Concat, + // key: scale_info::meta_type::(), + // value: scale_info::meta_type::(), + // unused: false, + // }, + // default: vec![0], + // documentation: vec![], + // }, StorageEntryMetadata { name: "GETMAPU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, - default: vec![0], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -286,11 +282,11 @@ mod tests { modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, - default: vec![0], + default: vec![0, 0, 0, 0], documentation: vec![], }, StorageEntryMetadata { @@ -298,11 +294,11 @@ mod tests { modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, - default: vec![12, 109, 97, 112], // "map" + default: vec![109, 97, 112, 100], // "map" documentation: vec![], }, StorageEntryMetadata { @@ -310,60 +306,52 @@ mod tests { modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, - key: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), unused: false, }, - default: vec![24, 112, 117, 98, 109, 97, 112], // "pubmap" + default: vec![112, 117, 98, 109], // "pubmap" documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("DOUBLEMAP"), + name: "DOUBLEMAP", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructDOUBLEMAP(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("DOUBLEMAP2"), + name: "DOUBLEMAP2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { hasher: StorageHasher::Blake2_128Concat, - key1: DecodeDifferent::Encode("u32"), - key2: DecodeDifferent::Encode("u32"), - value: DecodeDifferent::Encode("[u8; 4]"), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + value: scale_info::meta_type::<[u8; 4]>(), key2_hasher: StorageHasher::Blake2_128Concat, }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructDOUBLEMAP2(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("(::std::option::Option,)")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructCOMPLEXTYPE1(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::>()), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(DecodeDifferent::Encode("([[(u16, Option<()>); 32]; 12], u32)")), - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructCOMPLEXTYPE2(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + ty: StorageEntryType::Plain(scale_info::meta_type::<([[(u16, Option<()>); 32]; 12], u32)>()), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE3", @@ -373,34 +361,30 @@ mod tests { documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("NMAP"), + name: "NMAP", modifier: StorageEntryModifier::Default, ty: StorageEntryType::NMap { - keys: DecodeDifferent::Encode(&["u32", "u16"]), - hashers: DecodeDifferent::Encode(&[StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat]), - value: DecodeDifferent::Encode("u8"), + keys: scale_info::meta_type::<(u32, u16)>(), + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + value: scale_info::meta_type::(), }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructNMAP(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), + default: vec![0], + documentation: vec![], }, StorageEntryMetadata { - name: DecodeDifferent::Encode("NMAP2"), + name: "NMAP2", modifier: StorageEntryModifier::Default, ty: StorageEntryType::NMap { - keys: DecodeDifferent::Encode(&["u32"]), - hashers: DecodeDifferent::Encode(&[StorageHasher::Blake2_128Concat]), - value: DecodeDifferent::Encode("u8"), + keys: scale_info::meta_type::<(u32,)>(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), }, - default: DecodeDifferent::Encode( - DefaultByteGetter(&__GetByteStructNMAP(PhantomData::)) - ), - documentation: DecodeDifferent::Encode(&[]), - }, - ] - ), - }; + default: vec![0], + documentation: vec![], + }, + ], + } + } #[test] fn storage_info() { From 0f0341101d120b4d1d06a851a93908748b8209b3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 19 May 2021 12:47:02 +0100 Subject: [PATCH 226/503] Use KeyGenerator::Key type for keys for consistency, fix decl_storage tests --- frame/support/src/storage/types/key.rs | 2 +- frame/support/src/storage/types/nmap.rs | 2 +- frame/support/test/tests/decl_storage.rs | 6 +++--- frame/support/test/tests/pallet.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index 276f4e0d7c0ef..b21fafdb7ef5e 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -37,7 +37,7 @@ pub struct Key(core::marker::PhantomData<(Hasher, KeyType)>); /// A trait that contains the current key as an associated type. pub trait KeyGenerator { type Key: EncodeLike + TypeInfo + 'static; - type KArg: Encode + TypeInfo + 'static; + type KArg: Encode; type HashFn: FnOnce(&[u8]) -> Vec; type HArg; diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 57b2f709c0e8d..45546b37aa987 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -371,7 +371,7 @@ where fn ty() -> StorageEntryType { StorageEntryType::NMap { - keys: scale_info::meta_type::(), + keys: scale_info::meta_type::(), hashers: Key::HASHER_METADATA.iter().cloned().collect(), value: scale_info::meta_type::(), } diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 5909d6683a03f..5b5aa634dac97 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -342,7 +342,7 @@ mod tests { StorageEntryMetadata { name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(scale_info::meta_type::>()), + ty: StorageEntryType::Plain(scale_info::meta_type::<(Option,)>()), default: vec![0], documentation: vec![], }, @@ -350,7 +350,7 @@ mod tests { name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<([[(u16, Option<()>); 32]; 12], u32)>()), - default: vec![0], + default: [0u8; 1156].to_vec(), documentation: vec![], }, StorageEntryMetadata { @@ -375,7 +375,7 @@ mod tests { name: "NMAP2", modifier: StorageEntryModifier::Default, ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u32,)>(), + keys: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index a3fd050bd67f3..39a9ecd77d410 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -903,7 +903,7 @@ fn metadata() { name: "NMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u8,)>(), + keys: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 26dd612979220..be9204b039bd6 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -805,7 +805,7 @@ fn metadata() { name: "NMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u8,)>(), + keys: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, From 25bf1ebe87af73e9cb318f0a9633ed8c9c1cf5b1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 19 May 2021 13:04:08 +0100 Subject: [PATCH 227/503] Use scale_info::StaticTypeInfo instead of TypeInfo + `static --- Cargo.lock | 4 +-- frame/support/src/metadata.rs | 2 +- frame/support/src/storage/types/double_map.rs | 6 ++-- frame/support/src/storage/types/key.rs | 32 +++++++++---------- frame/support/src/storage/types/map.rs | 4 +-- frame/support/src/storage/types/nmap.rs | 2 +- frame/support/src/storage/types/value.rs | 2 +- .../test/tests/pallet_compatibility.rs | 2 +- .../tests/pallet_compatibility_instance.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 2 +- frame/timestamp/src/lib.rs | 2 +- primitives/runtime/src/traits.rs | 4 +-- 12 files changed, 32 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b68179d954318..8db57272f4d07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8178,7 +8178,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#13e23415b57da3c25a3b4afe8654c649bc11facb" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8190,7 +8190,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#13e23415b57da3c25a3b4afe8654c649bc11facb" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/src/metadata.rs b/frame/support/src/metadata.rs index 1fc8fb6d9cc97..1a22282dc9af1 100644 --- a/frame/support/src/metadata.rs +++ b/frame/support/src/metadata.rs @@ -284,7 +284,7 @@ mod tests { mod system { use super::*; - pub trait Config: scale_info::TypeInfo + 'static { + pub trait Config: scale_info::StaticTypeInfo { type BaseCallFilter; const ASSOCIATED_CONST: u64 = 500; type Origin: Into, Self::Origin>> diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 1c28c8d267634..62fba8eba299d 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -453,9 +453,9 @@ where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, Hasher2: crate::hash::StorageHasher, - Key1: FullCodec + scale_info::TypeInfo + 'static, - Key2: FullCodec + scale_info::TypeInfo + 'static, - Value: FullCodec + scale_info::TypeInfo + 'static, + Key1: FullCodec + scale_info::StaticTypeInfo, + Key2: FullCodec + scale_info::StaticTypeInfo, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index b21fafdb7ef5e..bd3d1e803265e 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -20,7 +20,7 @@ use crate::{hash::{ReversibleStorageHasher, StorageHasher}, traits::MaxEncodedLen}; use codec::{Encode, EncodeLike, FullCodec}; use paste::paste; -use scale_info::TypeInfo; +use scale_info::StaticTypeInfo; use sp_std::prelude::*; /// A type used exclusively by storage maps as their key type. @@ -36,7 +36,7 @@ pub struct Key(core::marker::PhantomData<(Hasher, KeyType)>); /// A trait that contains the current key as an associated type. pub trait KeyGenerator { - type Key: EncodeLike + TypeInfo + 'static; + type Key: EncodeLike + StaticTypeInfo; type KArg: Encode; type HashFn: FnOnce(&[u8]) -> Vec; type HArg; @@ -67,7 +67,7 @@ pub trait KeyGeneratorInner: KeyGenerator { fn final_hash(encoded: &[u8]) -> Vec; } -impl KeyGenerator for Key { +impl KeyGenerator for Key { type Key = K; type KArg = (K,); type HashFn = Box Vec>; @@ -97,13 +97,13 @@ impl KeyGenerator for Key KeyGeneratorMaxEncodedLen for Key { +impl KeyGeneratorMaxEncodedLen for Key { fn key_max_encoded_len() -> usize { H::max_len::() } } -impl KeyGeneratorInner for Key { +impl KeyGeneratorInner for Key { type Hasher = H; fn final_hash(encoded: &[u8]) -> Vec { @@ -229,7 +229,7 @@ pub trait ReversibleKeyGenerator: KeyGenerator { fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error>; } -impl ReversibleKeyGenerator for Key { +impl ReversibleKeyGenerator for Key { type ReversibleHasher = H; fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error> { @@ -275,7 +275,7 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), ($($prefix:ident),+), ($($suffix:ident),+)) => { paste! { impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: StorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasKeyPrefix<($( [] ),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -287,7 +287,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasReversibleKeyPrefix<($( [] ),+)> for @@ -302,7 +302,7 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), $prefix:ident, ($($suffix:ident),+)) => { paste! { impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: StorageHasher,)+ []: EncodeLike<$prefix> > HasKeyPrefix<( [] ,)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -314,7 +314,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ []: EncodeLike<$prefix> > HasReversibleKeyPrefix<( [] ,)> for @@ -329,7 +329,7 @@ macro_rules! impl_key_prefix_for { (($($keygen:ident),+), ($($prefix:ident),+), $suffix:ident) => { paste! { impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: StorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasKeyPrefix<($( [] ),+)> for ($(Key<[<$keygen $keygen>], $keygen>),+) { @@ -341,7 +341,7 @@ macro_rules! impl_key_prefix_for { } impl< - $($keygen: FullCodec + $crate::scale_info::TypeInfo + 'static,)+ + $($keygen: FullCodec + StaticTypeInfo,)+ $( [<$keygen $keygen>]: ReversibleStorageHasher,)+ $( []: EncodeLike<$prefix> ),+ > HasReversibleKeyPrefix<($( [] ),+)> for @@ -357,8 +357,8 @@ macro_rules! impl_key_prefix_for { impl HasKeyPrefix<(KArg,)> for (Key, Key) where - A: FullCodec + TypeInfo + 'static, - B: FullCodec + TypeInfo + 'static, + A: FullCodec + StaticTypeInfo, + B: FullCodec + StaticTypeInfo, X: StorageHasher, Y: StorageHasher, KArg: EncodeLike, @@ -372,8 +372,8 @@ where impl HasReversibleKeyPrefix<(KArg,)> for (Key, Key) where - A: FullCodec + TypeInfo + 'static, - B: FullCodec + TypeInfo + 'static, + A: FullCodec + StaticTypeInfo, + B: FullCodec + StaticTypeInfo, X: ReversibleStorageHasher, Y: ReversibleStorageHasher, KArg: EncodeLike, diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index c31b43cd8f3a7..ab92108acd961 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -330,8 +330,8 @@ impl StorageEntryMeta for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, - Key: FullCodec + scale_info::TypeInfo + 'static, - Value: FullCodec + scale_info::TypeInfo + 'static, + Key: FullCodec + scale_info::StaticTypeInfo, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 45546b37aa987..a3cbcea62775c 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -361,7 +361,7 @@ impl StorageEntryMetadata where Prefix: StorageInstance, Key: super::key::KeyGenerator, - Value: FullCodec + scale_info::TypeInfo + 'static, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: Get + 'static, MaxValues: Get>, diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index e2bf30b1fea64..9736bfaf5a36c 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -197,7 +197,7 @@ where impl StorageEntryMetadata for StorageValue where Prefix: StorageInstance, - Value: FullCodec + scale_info::TypeInfo + 'static, + Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, OnEmpty: crate::traits::Get + 'static, { diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 684c5891217e5..b3f325dbf2ba0 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -100,7 +100,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + SomeAssociation + scale_info::TypeInfo + 'static; + + MaybeSerializeDeserialize + SomeAssociation + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index c7e3a8da77d38..e3fa65e677b43 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -90,7 +90,7 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config { type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + scale_info::TypeInfo + 'static; + + MaybeSerializeDeserialize + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index be9204b039bd6..95a0ba91d832a 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -39,7 +39,7 @@ pub mod pallet { pub trait Config: frame_system::Config { #[pallet::constant] type MyGetParam: Get; - type Balance: Parameter + Default + scale_info::TypeInfo + 'static; + type Balance: Parameter + Default + scale_info::StaticTypeInfo; type Event: From> + IsType<::Event>; } diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index 62337e6a7b794..fe76c31504640 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -116,7 +116,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// Type used for expressing timestamp. type Moment: Parameter + Default + AtLeast32Bit - + Scale + Copy + scale_info::TypeInfo + 'static; + + Scale + Copy + scale_info::StaticTypeInfo; /// Something which can be notified when the timestamp is set. Set this to `()` if not needed. type OnTimestampSet: OnTimestampSet; diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 29a41eb9f9ec7..7d83eac31901f 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -233,7 +233,7 @@ impl StaticLookup for AccountIdLookup: Codec + scale_info::TypeInfo + 'static, + crate::MultiAddress: Codec + scale_info::StaticTypeInfo, { type Source = crate::MultiAddress; type Target = AccountId; @@ -740,7 +740,7 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + scale_info::TypeInfo + 'static { +pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + scale_info::StaticTypeInfo { /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used From fcd3da2be837d366f463425f2648c8266bdafa5a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 21 May 2021 12:00:37 +0100 Subject: [PATCH 228/503] Fix error after merge --- frame/support/procedural/src/pallet/expand/call.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index bcf08f94b86df..077d6dbde40dd 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -66,7 +66,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); - let args_metadata_type = methods.iter().map(|method| { + let args_meta_type = methods.iter().map(|method| { method.args.iter() .map(|(is_compact, _, type_)| { if *is_compact { From a4caa8f69cbf5ccaed0e219e52cb72b214f2fdc4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 7 Jun 2021 11:33:10 +0100 Subject: [PATCH 229/503] Fix frame-metadata dependency --- frame/support/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 9b22cac1b32e8..3e1d797245ad6 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.1.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -frame-metadata = { version = "13.0.0", default-features = false, path = "../metadata" } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v14"] } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } From 19aba37e9944b18eefe23d8e46445d265464ab1d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 7 Jun 2021 11:33:36 +0100 Subject: [PATCH 230/503] Checkout Cargo.lock from master --- Cargo.lock | 651 ++++++++++++++++++++++++++--------------------------- 1 file changed, 322 insertions(+), 329 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8db57272f4d07..62056dd99b2e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,7 +1,5 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 - [[package]] name = "Inflector" version = "0.11.4" @@ -18,7 +16,16 @@ version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7" dependencies = [ - "gimli", + "gimli 0.23.0", +] + +[[package]] +name = "addr2line" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03345e98af8f3d786b6d9f656ccfa6ac316d954e92bc4841f0bba20789d5fb5a" +dependencies = [ + "gimli 0.24.0", ] [[package]] @@ -342,6 +349,19 @@ version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" +[[package]] +name = "async-tls" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f23d769dbf1838d5df5156e7b1ad404f4c463d1ac2c6aeb6cd943630f8a8400" +dependencies = [ + "futures-core", + "futures-io", + "rustls 0.19.0", + "webpki 0.21.4", + "webpki-roots", +] + [[package]] name = "async-trait" version = "0.1.48" @@ -417,11 +437,11 @@ version = "0.3.56" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" dependencies = [ - "addr2line", + "addr2line 0.14.1", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object", + "object 0.23.0", "rustc-demangle", ] @@ -457,35 +477,31 @@ checksum = "6736e2428df2ca2848d846c43e88745121a6654696e349ce0054a420815a7409" [[package]] name = "bincode" -version = "1.3.3" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" dependencies = [ + "byteorder", "serde", ] [[package]] name = "bindgen" -version = "0.54.0" +version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66c0bb6167449588ff70803f4127f0684f9063097eca5016f37eb52b92c2cf36" +checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d" dependencies = [ "bitflags", "cexpr", - "cfg-if 0.1.10", "clang-sys", - "clap", - "env_logger 0.7.1", "lazy_static", "lazycell", - "log", "peeking_take_while", "proc-macro2", "quote", "regex", "rustc-hash", "shlex", - "which 3.1.1", ] [[package]] @@ -671,9 +687,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "bytes" @@ -858,13 +874,13 @@ dependencies = [ [[package]] name = "clang-sys" -version = "0.29.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe6837df1d5cba2397b835c8530f51723267e16abbf83892e9e5af4f0e5dd10a" +checksum = "853eda514c284c2287f4bf20ae614f8781f40a81d32ecda6e91449304dfe077c" dependencies = [ "glob", "libc", - "libloading", + "libloading 0.7.0", ] [[package]] @@ -956,38 +972,36 @@ checksum = "dcb25d077389e53838a8158c8e99174c5a9d902dee4904320db714f3c653ffba" [[package]] name = "cranelift-bforest" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcee7a5107071484772b89fdf37f0f460b7db75f476e43ea7a684fd942470bcf" +checksum = "c8ca3560686e7c9c7ed7e0fe77469f2410ba5d7781b1acaa9adc8d8deea28e3e" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "654ab96f0f1cab71c0d323618a58360a492da2c341eb2c1f977fc195c664001b" +checksum = "baf9bf1ffffb6ce3d2e5ebc83549bd2436426c99b31cc550d521364cbe35d276" dependencies = [ - "byteorder", "cranelift-bforest", "cranelift-codegen-meta", "cranelift-codegen-shared", "cranelift-entity", - "gimli", + "gimli 0.24.0", "log", "regalloc", "serde", "smallvec 1.6.1", "target-lexicon", - "thiserror", ] [[package]] name = "cranelift-codegen-meta" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65994cfc5be9d5fd10c5fc30bcdddfa50c04bb79c91329287bff846434ff8f14" +checksum = "4cc21936a5a6d07e23849ffe83e5c1f6f50305c074f4b2970ca50c13bf55b821" dependencies = [ "cranelift-codegen-shared", "cranelift-entity", @@ -995,27 +1009,27 @@ dependencies = [ [[package]] name = "cranelift-codegen-shared" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "889d720b688b8b7df5e4903f9b788c3c59396050f5548e516e58ccb7312463ab" +checksum = "ca5b6ffaa87560bebe69a5446449da18090b126037920b0c1c6d5945f72faf6b" dependencies = [ "serde", ] [[package]] name = "cranelift-entity" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a2e6884a363e42a9ba980193ea8603a4272f8a92bd8bbaf9f57a94dbea0ff96" +checksum = "7d6b4a8bef04f82e4296782646f733c641d09497df2fabf791323fefaa44c64c" dependencies = [ "serde", ] [[package]] name = "cranelift-frontend" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6f41e2f9b57d2c030e249d0958f1cdc2c3cd46accf8c0438b3d1944e9153444" +checksum = "c31b783b351f966fce33e3c03498cb116d16d97a8f9978164a60920bd0d3a99c" dependencies = [ "cranelift-codegen", "log", @@ -1025,9 +1039,9 @@ dependencies = [ [[package]] name = "cranelift-native" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aab70ba7575665375d31cbdea2462916ce58be887834e1b83c860b43b51af637" +checksum = "a77c88d3dd48021ff1e37e978a00098524abd3513444ae252c08d37b310b3d2a" dependencies = [ "cranelift-codegen", "target-lexicon", @@ -1035,9 +1049,9 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.71.0" +version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fc3d2e70da6439adf97648dcdf81834363154f2907405345b6fbe7ca38918c" +checksum = "edb6d408e2da77cdbbd65466298d44c86ae71c1785d2ab0d8657753cdb4d9d89" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -1422,6 +1436,12 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" +[[package]] +name = "downcast-rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" + [[package]] name = "dyn-clonable" version = "0.9.0" @@ -1652,7 +1672,8 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" dependencies = [ "either", "futures 0.3.13", @@ -1662,13 +1683,13 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" dependencies = [ "byteorder", "rand 0.8.3", @@ -1729,7 +1750,6 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.4", - "scale-info", "serde", "sp-api", "sp-io", @@ -1768,7 +1788,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1786,7 +1805,6 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1798,32 +1816,30 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#385bbbd82a648ed2a60576f87dbf141813f96642" +version = "13.0.0" dependencies = [ - "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", "serde", + "sp-core", + "sp-std", ] [[package]] name = "frame-support" version = "3.0.0" dependencies = [ - "assert_matches", "bitflags", "frame-metadata", "frame-support-procedural", "frame-system", "impl-trait-for-tuples", "log", + "max-encoded-len", "once_cell", "parity-scale-codec", "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", - "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1872,20 +1888,18 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ + "frame-metadata", "frame-support", "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", - "scale-info", "serde", - "sp-arithmetic", "sp-core", "sp-io", "sp-runtime", "sp-state-machine", "sp-std", - "sp-version", "trybuild", ] @@ -1898,7 +1912,6 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-externalities", @@ -1917,7 +1930,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -1952,7 +1964,7 @@ checksum = "03d47dad3685eceed8488986cad3d5027165ea5edb164331770e2059555f10a5" dependencies = [ "lazy_static", "libc", - "libloading", + "libloading 0.5.2", "winapi 0.3.9", ] @@ -2116,7 +2128,7 @@ checksum = "3a1387e07917c711fb4ee4f48ea0adb04a3c9739e53ef85bf43ae1edc2937a8b" dependencies = [ "futures-io", "rustls 0.19.0", - "webpki", + "webpki 0.21.4", ] [[package]] @@ -2217,9 +2229,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9495705279e7140bf035dde1f6e750c162df8b625267cd52cc44e0b156732c8" +checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2243,6 +2255,12 @@ name = "gimli" version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" + +[[package]] +name = "gimli" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4075386626662786ddb0ec9081e7c7eeb1ba31951f447ca780ef9f5d568189" dependencies = [ "fallible-iterator", "indexmap", @@ -2592,7 +2610,7 @@ dependencies = [ "rustls-native-certs", "tokio 0.2.25", "tokio-rustls", - "webpki", + "webpki 0.21.4", ] [[package]] @@ -2657,7 +2675,8 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" dependencies = [ "parity-scale-codec", ] @@ -2671,14 +2690,6 @@ dependencies = [ "serde", ] -[[package]] -name = "impl-serde" -version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" -dependencies = [ - "serde", -] - [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -2930,25 +2941,6 @@ dependencies = [ "slab", ] -[[package]] -name = "jsonrpsee-http-client" -version = "0.2.0-alpha.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2737440f37efa10e5ef7beeec43d059d29dc92640978be21fcdcef481a2edb0d" -dependencies = [ - "async-trait", - "fnv", - "hyper 0.13.10", - "hyper-rustls", - "jsonrpsee-types", - "jsonrpsee-utils", - "log", - "serde", - "serde_json", - "thiserror", - "url 2.2.1", -] - [[package]] name = "jsonrpsee-proc-macros" version = "0.2.0-alpha.6" @@ -2979,14 +2971,25 @@ dependencies = [ ] [[package]] -name = "jsonrpsee-utils" +name = "jsonrpsee-ws-client" version = "0.2.0-alpha.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d63cf4d423614e71fd144a8691208539d2b23d8373e069e2fbe023c5eba5e922" +checksum = "d6fdb4390bd25358c62e8b778652a564a1723ba07dca0feb3da439c2253fe59f" dependencies = [ - "futures-util", - "hyper 0.13.10", + "async-std", + "async-tls", + "async-trait", + "fnv", + "futures 0.3.13", "jsonrpsee-types", + "log", + "pin-project 1.0.5", + "serde", + "serde_json", + "soketto", + "thiserror", + "url 2.2.1", + "webpki 0.22.0", ] [[package]] @@ -3116,6 +3119,16 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "libloading" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f84d96438c15fcd6c3f244c8fce01d1e2b9c6b5623e9c711dc9286d8fc92d6a" +dependencies = [ + "cfg-if 1.0.0", + "winapi 0.3.9", +] + [[package]] name = "libm" version = "0.2.1" @@ -3300,7 +3313,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uint", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3308,9 +3321,9 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.30.1" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41e282f974c4bea56db8acca50387f05189406e346318cb30190b0bde662961e" +checksum = "c221897b3fd7f215de7ecfec215c5eba598e5b61c605b5f8b56fe8a4fb507724" dependencies = [ "async-io", "data-encoding", @@ -3558,9 +3571,9 @@ dependencies = [ [[package]] name = "librocksdb-sys" -version = "6.11.4" +version = "6.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb5b56f651c204634b936be2f92dbb42c36867e00ff7fe2405591f3b9fa66f09" +checksum = "5da125e1c0f22c7cae785982115523a0738728498547f415c9054cb17c7e89f9" dependencies = [ "bindgen", "cc", @@ -3729,6 +3742,29 @@ dependencies = [ "rawpointer", ] +[[package]] +name = "max-encoded-len" +version = "3.0.0" +dependencies = [ + "frame-support", + "impl-trait-for-tuples", + "max-encoded-len-derive", + "parity-scale-codec", + "primitive-types", + "rustversion", + "trybuild", +] + +[[package]] +name = "max-encoded-len-derive" +version = "3.0.0" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "maybe-uninit" version = "2.0.0" @@ -4201,7 +4237,6 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", - "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4239,7 +4274,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", - "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4303,6 +4337,7 @@ dependencies = [ "frame-try-runtime", "hex-literal", "log", + "max-encoded-len", "node-primitives", "pallet-assets", "pallet-authority-discovery", @@ -4342,11 +4377,12 @@ dependencies = [ "pallet-tips", "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", + "pallet-transaction-storage", "pallet-treasury", + "pallet-uniques", "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4426,7 +4462,6 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4575,6 +4610,12 @@ name = "object" version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4" + +[[package]] +name = "object" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5b3dd1c072ee7963717671d1ca129f1048fda25edea6b752bfc71ac8854170" dependencies = [ "crc32fast", "indexmap", @@ -4638,9 +4679,9 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", + "max-encoded-len", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4655,7 +4696,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4673,7 +4713,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4690,7 +4729,6 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4708,7 +4746,6 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info", "serde", "sp-authorship", "sp-core", @@ -4734,7 +4771,6 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -4754,9 +4790,9 @@ dependencies = [ "frame-support", "frame-system", "log", + "max-encoded-len", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4773,7 +4809,6 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4792,7 +4827,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4815,13 +4849,11 @@ dependencies = [ "pallet-randomness-collective-flip", "pallet-timestamp", "parity-scale-codec", - "parity-wasm 0.42.2", "paste 1.0.4", "pretty_assertions 0.7.2", - "pwasm-utils 0.17.1", + "pwasm-utils", "rand 0.8.3", "rand_pcg 0.3.0", - "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -4829,7 +4861,7 @@ dependencies = [ "sp-runtime", "sp-sandbox", "sp-std", - "wasmi-validation 0.4.0", + "wasmi-validation", "wat", ] @@ -4839,7 +4871,6 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-runtime", @@ -4880,7 +4911,6 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4897,7 +4927,6 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -4922,7 +4951,6 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand 0.7.3", - "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -4943,7 +4971,6 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4961,7 +4988,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -4980,7 +5006,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4996,7 +5021,6 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5011,7 +5035,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5029,7 +5052,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5039,7 +5061,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" -version = "3.0.0" +version = "3.1.0" dependencies = [ "finality-grandpa", "frame-benchmarking", @@ -5055,7 +5077,6 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5077,7 +5098,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5095,7 +5115,6 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5113,7 +5132,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5131,7 +5149,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5148,7 +5165,6 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5167,7 +5183,6 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5217,7 +5232,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5232,7 +5246,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5247,7 +5260,6 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5263,7 +5275,6 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5290,7 +5301,6 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5306,10 +5316,10 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", + "max-encoded-len", "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5324,7 +5334,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info", "serde", "sp-core", "sp-io", @@ -5341,7 +5350,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5357,7 +5365,6 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5373,7 +5380,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5390,7 +5396,6 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", - "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5416,7 +5421,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info", "serde", "sp-core", "sp-io", @@ -5435,7 +5439,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5461,7 +5464,6 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand_chacha 0.2.2", - "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5501,7 +5503,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5516,7 +5517,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5533,7 +5533,6 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5552,7 +5551,6 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5569,7 +5567,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5606,6 +5603,26 @@ dependencies = [ "sp-runtime", ] +[[package]] +name = "pallet-transaction-storage" +version = "3.0.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-support-test", + "frame-system", + "hex-literal", + "pallet-balances", + "parity-scale-codec", + "serde", + "sp-core", + "sp-inherents", + "sp-io", + "sp-runtime", + "sp-std", + "sp-transaction-storage-proof", +] + [[package]] name = "pallet-treasury" version = "3.0.0" @@ -5616,7 +5633,6 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -5625,6 +5641,21 @@ dependencies = [ "sp-storage", ] +[[package]] +name = "pallet-uniques" +version = "3.0.0" +dependencies = [ + "frame-benchmarking", + "frame-support", + "frame-system", + "pallet-balances", + "parity-scale-codec", + "sp-core", + "sp-io", + "sp-runtime", + "sp-std", +] + [[package]] name = "pallet-utility" version = "3.0.0" @@ -5634,7 +5665,6 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5652,7 +5682,6 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5662,9 +5691,9 @@ dependencies = [ [[package]] name = "parity-db" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "495197c078e54b8735181aa35c00a327f7f3a3cc00a1ee8c95926dd010f0ec6b" +checksum = "2e337f62db341435f0da05b8f6b97e984ef4ea5800510cd07c2d624688c40b47" dependencies = [ "blake2-rfc", "crc32fast", @@ -5697,9 +5726,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "731f4d179ed52b1c7eeb29baf29c604ea9301b889b23ce93660220a5465d5c6f" +checksum = "e0f518afaa5a47d0d6386229b0a6e01e86427291d643aa4cabb4992219f504f8" dependencies = [ "arrayvec 0.7.0", "bitvec", @@ -5748,12 +5777,13 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-util-mem-derive", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5771,16 +5801,6 @@ dependencies = [ "synstructure", ] -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" -dependencies = [ - "proc-macro2", - "syn", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.32.0" @@ -5790,12 +5810,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "parity-wasm" -version = "0.41.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc878dac00da22f8f61e7af3157988424567ab01d9920b962ef7dcbd7cd865" - [[package]] name = "parity-wasm" version = "0.42.2" @@ -6213,14 +6227,13 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", - "parity-scale-codec", - "scale-info", - "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", + "impl-serde", + "uint", ] [[package]] @@ -6326,7 +6339,7 @@ dependencies = [ "prost", "prost-types", "tempfile", - "which 4.0.2", + "which", ] [[package]] @@ -6363,20 +6376,9 @@ dependencies = [ [[package]] name = "pwasm-utils" -version = "0.14.0" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f53bc2558e8376358ebdc28301546471d67336584f6438ed4b7c7457a055fd7" -dependencies = [ - "byteorder", - "log", - "parity-wasm 0.41.0", -] - -[[package]] -name = "pwasm-utils" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78778a25194f953d1766fc8c6a331ed56f070d09a0511267ee2c150cb71ea8c2" +checksum = "a0e517f47d9964362883182404b68d0b6949382c0baa40aa5ffca94f5f1e3481" dependencies = [ "byteorder", "log", @@ -6531,7 +6533,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" dependencies = [ - "getrandom 0.2.2", + "getrandom 0.2.3", ] [[package]] @@ -6651,7 +6653,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" dependencies = [ - "getrandom 0.2.2", + "getrandom 0.2.3", "redox_syscall 0.2.5", ] @@ -6732,15 +6734,18 @@ name = "remote-externalities" version = "0.9.0" dependencies = [ "env_logger 0.8.3", - "hex-literal", - "jsonrpsee-http-client", + "frame-support", + "hex", "jsonrpsee-proc-macros", + "jsonrpsee-ws-client", "log", + "pallet-elections-phragmen", "parity-scale-codec", + "serde_json", "sp-core", "sp-io", "sp-runtime", - "tokio 0.2.25", + "tokio 1.6.0", ] [[package]] @@ -6764,9 +6769,9 @@ dependencies = [ [[package]] name = "retain_mut" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53552c6c49e1e13f1a203ef0080ab3bbef0beb570a528993e83df057a9d9bba1" +checksum = "e9c17925a9027d298a4603d286befe3f9dc0e8ed02523141914eb628798d6e5b" [[package]] name = "ring" @@ -6852,7 +6857,7 @@ dependencies = [ "log", "ring", "sct", - "webpki", + "webpki 0.21.4", ] [[package]] @@ -6865,7 +6870,7 @@ dependencies = [ "log", "ring", "sct", - "webpki", + "webpki 0.21.4", ] [[package]] @@ -7172,7 +7177,7 @@ dependencies = [ "derive_more", "futures 0.3.13", "futures-timer 3.0.2", - "getrandom 0.2.2", + "getrandom 0.2.3", "log", "parity-scale-codec", "parking_lot 0.11.1", @@ -7414,7 +7419,7 @@ dependencies = [ "libsecp256k1", "log", "parity-scale-codec", - "parity-wasm 0.41.0", + "parity-wasm 0.42.2", "parking_lot 0.11.1", "paste 1.0.4", "sc-executor-common", @@ -7449,8 +7454,7 @@ version = "0.9.0" dependencies = [ "derive_more", "parity-scale-codec", - "parity-wasm 0.41.0", - "pwasm-utils 0.14.0", + "pwasm-utils", "sp-allocator", "sp-core", "sp-maybe-compressed-blob", @@ -7481,8 +7485,7 @@ dependencies = [ "assert_matches", "log", "parity-scale-codec", - "parity-wasm 0.41.0", - "pwasm-utils 0.14.0", + "parity-wasm 0.42.2", "sc-executor-common", "scoped-tls", "sp-allocator", @@ -7538,6 +7541,7 @@ dependencies = [ "substrate-test-runtime-client", "tempfile", "tokio 0.2.25", + "wasm-timer", ] [[package]] @@ -7602,6 +7606,7 @@ version = "0.9.0" dependencies = [ "ansi_term 0.12.1", "futures 0.3.13", + "futures-timer 3.0.2", "log", "parity-util-mem", "sc-client-api", @@ -7609,7 +7614,6 @@ dependencies = [ "sp-blockchain", "sp-runtime", "sp-transaction-pool", - "sp-utils", "wasm-timer", ] @@ -7970,6 +7974,7 @@ dependencies = [ "sp-state-machine", "sp-tracing", "sp-transaction-pool", + "sp-transaction-storage-proof", "sp-trie", "sp-utils", "sp-version", @@ -8027,7 +8032,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-util-mem-derive", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8175,29 +8180,6 @@ dependencies = [ "wasm-timer", ] -[[package]] -name = "scale-info" -version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" -dependencies = [ - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive", - "serde", -] - -[[package]] -name = "scale-info-derive" -version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" -dependencies = [ - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "schannel" version = "0.1.19" @@ -8660,8 +8642,8 @@ dependencies = [ name = "sp-application-crypto" version = "3.0.0" dependencies = [ + "max-encoded-len", "parity-scale-codec", - "scale-info", "serde", "sp-core", "sp-io", @@ -8690,7 +8672,6 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8714,7 +8695,6 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8801,7 +8781,6 @@ version = "0.9.0" dependencies = [ "async-trait", "parity-scale-codec", - "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8819,7 +8798,6 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8850,7 +8828,6 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", - "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8881,10 +8858,11 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "lazy_static", "libsecp256k1", "log", + "max-encoded-len", "merlin", "num-traits", "parity-scale-codec", @@ -8895,7 +8873,6 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", - "scale-info", "schnorrkel", "secrecy", "serde", @@ -8950,7 +8927,6 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9040,7 +9016,6 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9071,7 +9046,6 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", - "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -9115,11 +9089,11 @@ dependencies = [ "hash256-std-hasher", "impl-trait-for-tuples", "log", + "max-encoded-len", "parity-scale-codec", "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info", "serde", "serde_json", "sp-api", @@ -9232,7 +9206,6 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9245,7 +9218,6 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", - "scale-info", "sp-runtime", "sp-std", ] @@ -9283,7 +9255,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec", "ref-cast", "serde", @@ -9364,6 +9336,20 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sp-transaction-storage-proof" +version = "3.0.0" +dependencies = [ + "async-trait", + "log", + "parity-scale-codec", + "sp-core", + "sp-inherents", + "sp-runtime", + "sp-std", + "sp-trie", +] + [[package]] name = "sp-trie" version = "3.0.0" @@ -9397,9 +9383,8 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec", - "scale-info", "serde", "sp-runtime", "sp-std", @@ -9555,7 +9540,7 @@ dependencies = [ "futures 0.1.31", "futures 0.3.13", "futures-timer 3.0.2", - "getrandom 0.2.2", + "getrandom 0.2.3", "js-sys", "kvdb-web", "libp2p-wasm-ext", @@ -9690,7 +9675,6 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9845,9 +9829,9 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "target-lexicon" -version = "0.11.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422045212ea98508ae3d28025bc5aaa2bd4a9cdaecd442a08da2ee620ee9ea95" +checksum = "64ae3b39281e4b14b8123bdbaddd472b7dfe215e444181f2f9d2443c2444f834" [[package]] name = "tempfile" @@ -10100,10 +10084,21 @@ dependencies = [ "pin-project-lite 0.1.12", "signal-hook-registry", "slab", - "tokio-macros", + "tokio-macros 0.2.6", "winapi 0.3.9", ] +[[package]] +name = "tokio" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd3076b5c8cc18138b8f8814895c11eb4de37114a5d127bafdc5e55798ceef37" +dependencies = [ + "autocfg", + "pin-project-lite 0.2.6", + "tokio-macros 1.2.0", +] + [[package]] name = "tokio-buf" version = "0.1.1" @@ -10179,6 +10174,17 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-macros" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c49e3df43841dafb86046472506755d8501c5615673955f6aa17181125d13c37" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tokio-named-pipes" version = "0.1.0" @@ -10220,7 +10226,7 @@ dependencies = [ "futures-core", "rustls 0.18.1", "tokio 0.2.25", - "webpki", + "webpki 0.21.4", ] [[package]] @@ -10559,9 +10565,9 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.41" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99471a206425fba51842a9186315f32d91c56eadc21ea4c21f847b59cf778f8b" +checksum = "1768998d9a3b179411618e377dbb134c58a88cda284b0aa71c42c40660127d46" dependencies = [ "dissimilar", "glob", @@ -10607,17 +10613,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "uint" -version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#61fc474c4f14d2291820282aed081256c2391510" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - [[package]] name = "unicase" version = "2.6.0" @@ -10950,26 +10945,18 @@ dependencies = [ [[package]] name = "wasmi" -version = "0.6.2" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf617d864d25af3587aa745529f7aaa541066c876d57e050c0d0c85c61c92aff" +checksum = "d2ee05bba3d1d994652079893941a2ef9324d2b58a63c31b40678fb7eddd7a5a" dependencies = [ + "downcast-rs", "errno", "libc", "memory_units", "num-rational", "num-traits", - "parity-wasm 0.41.0", - "wasmi-validation 0.3.0", -] - -[[package]] -name = "wasmi-validation" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea78c597064ba73596099281e2f4cfc019075122a65cdda3205af94f0b264d93" -dependencies = [ - "parity-wasm 0.41.0", + "parity-wasm 0.42.2", + "wasmi-validation", ] [[package]] @@ -10983,15 +10970,15 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.76.0" +version = "0.78.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755a9a4afe3f6cccbbe6d7e965eef44cf260b001f93e547eba84255c1d0187d8" +checksum = "52144d4c78e5cf8b055ceab8e5fa22814ce4315d6002ad32cfd914f37c12fd65" [[package]] name = "wasmtime" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "718cb52a9fdb7ab12471e9b9d051c9adfa6b5c504e0a1fea045e5eabc81eedd9" +checksum = "b310b9d20fcf59385761d1ade7a3ef06aecc380e3d3172035b919eaf7465d9f7" dependencies = [ "anyhow", "backtrace", @@ -10999,9 +10986,11 @@ dependencies = [ "cfg-if 1.0.0", "cpp_demangle", "indexmap", + "lazy_static", "libc", "log", "paste 1.0.4", + "psm", "region", "rustc-demangle", "serde", @@ -11020,9 +11009,9 @@ dependencies = [ [[package]] name = "wasmtime-cache" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f984df56c4adeba91540f9052db9f7a8b3b00cfaac1a023bee50a972f588b0c" +checksum = "d14d500d5c3dc5f5c097158feee123d64b3097f0d836a2a27dff9c761c73c843" dependencies = [ "anyhow", "base64 0.13.0", @@ -11041,28 +11030,29 @@ dependencies = [ [[package]] name = "wasmtime-cranelift" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a05abbf94e03c2c8ee02254b1949320c4d45093de5d9d6ed4d9351d536075c9" +checksum = "c525b39f062eada7db3c1298287b96dcb6e472b9f6b22501300b28d9fa7582f6" dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", "cranelift-wasm", + "target-lexicon", "wasmparser", "wasmtime-environ", ] [[package]] name = "wasmtime-debug" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382eecd6281c6c1d1f3c904c3c143e671fc1a9573820cbfa777fba45ce2eda9c" +checksum = "c5d2a763e7a6fc734218e0e463196762a4f409c483063d81e0e85f96343b2e0a" dependencies = [ "anyhow", - "gimli", + "gimli 0.24.0", "more-asserts", - "object", + "object 0.24.0", "target-lexicon", "thiserror", "wasmparser", @@ -11071,16 +11061,15 @@ dependencies = [ [[package]] name = "wasmtime-environ" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81011b2b833663d7e0ce34639459a0e301e000fc7331e0298b3a27c78d0cec60" +checksum = "f64d0c2d881c31b0d65c1f2695e022d71eb60b9fbdd336aacca28208b58eac90" dependencies = [ - "anyhow", "cfg-if 1.0.0", "cranelift-codegen", "cranelift-entity", "cranelift-wasm", - "gimli", + "gimli 0.24.0", "indexmap", "log", "more-asserts", @@ -11091,9 +11080,9 @@ dependencies = [ [[package]] name = "wasmtime-fiber" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92da32e31af2e3d828f485f5f24651ed4d3b7f03a46ea6555eae6940d1402cd" +checksum = "a089d44cd7e2465d41a53b840a5b4fca1bf6d1ecfebc970eac9592b34ea5f0b3" dependencies = [ "cc", "libc", @@ -11102,11 +11091,11 @@ dependencies = [ [[package]] name = "wasmtime-jit" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b5f649623859a12d361fe4cc4793de44f7c3ff34c322c5714289787e89650bb" +checksum = "4d4539ea734422b7c868107e2187d7746d8affbcaa71916d72639f53757ad707" dependencies = [ - "addr2line", + "addr2line 0.15.1", "anyhow", "cfg-if 1.0.0", "cranelift-codegen", @@ -11114,10 +11103,10 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli", + "gimli 0.24.0", "log", "more-asserts", - "object", + "object 0.24.0", "rayon", "region", "serde", @@ -11135,13 +11124,13 @@ dependencies = [ [[package]] name = "wasmtime-obj" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2e99cd9858f57fd062e9351e07881cedfc8597928385e02a48d9333b9e15a1" +checksum = "8e1a8ff85246d091828e2225af521a6208ed28c997bb5c39eb697366dc2e2f2b" dependencies = [ "anyhow", "more-asserts", - "object", + "object 0.24.0", "target-lexicon", "wasmtime-debug", "wasmtime-environ", @@ -11149,16 +11138,16 @@ dependencies = [ [[package]] name = "wasmtime-profiling" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e46c0a590e49278ba7f79ef217af9db4ecc671b50042c185093e22d73524abb2" +checksum = "e24364d522dcd67c897c8fffc42e5bdfc57207bbb6d7eeade0da9d4a7d70105b" dependencies = [ "anyhow", "cfg-if 1.0.0", - "gimli", + "gimli 0.24.0", "lazy_static", "libc", - "object", + "object 0.24.0", "scroll", "serde", "target-lexicon", @@ -11168,10 +11157,11 @@ dependencies = [ [[package]] name = "wasmtime-runtime" -version = "0.24.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1438a09185fc7ca067caf1a80d7e5b398eefd4fb7630d94841448ade60feb3d0" +checksum = "c51e57976e8a19a18a18e002c6eb12e5769554204238e47ff155fda1809ef0f7" dependencies = [ + "anyhow", "backtrace", "cc", "cfg-if 1.0.0", @@ -11179,12 +11169,14 @@ dependencies = [ "lazy_static", "libc", "log", + "mach", "memoffset 0.6.1", "more-asserts", - "psm", + "rand 0.8.3", "region", "thiserror", "wasmtime-environ", + "wasmtime-fiber", "winapi 0.3.9", ] @@ -11199,9 +11191,9 @@ dependencies = [ [[package]] name = "wat" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b0fa059022c5dabe129f02b429d67086400deb8277f89c975555dacc1dadbcc" +checksum = "8ec280a739b69173e0ffd12c1658507996836ba4e992ed9bc1e5385a0bd72a02" dependencies = [ "wast", ] @@ -11226,13 +11218,23 @@ dependencies = [ "untrusted", ] +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "webpki-roots" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376" dependencies = [ - "webpki", + "webpki 0.21.4", ] [[package]] @@ -11244,15 +11246,6 @@ dependencies = [ "cc", ] -[[package]] -name = "which" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724" -dependencies = [ - "libc", -] - [[package]] name = "which" version = "4.0.2" From a4a1717c514fe89483639415c78ccbad587b17ce Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 7 Jun 2021 17:54:05 +0100 Subject: [PATCH 231/503] Update bincode for compat with primitive types branch --- Cargo.lock | 181 +++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 154 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 62056dd99b2e0..dd368c12e75b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -477,11 +477,10 @@ checksum = "6736e2428df2ca2848d846c43e88745121a6654696e349ce0054a420815a7409" [[package]] name = "bincode" -version = "1.3.2" +version = "1.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d175dfa69e619905c4c3cdb7c3c203fa3bdd5d51184e3afdb2742c0280493772" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" dependencies = [ - "byteorder", "serde", ] @@ -687,9 +686,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "byteorder" -version = "1.3.4" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -1672,8 +1671,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6447e2f8178843749e8c8003206def83ec124a7859475395777a28b5338647c" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" dependencies = [ "either", "futures 0.3.13", @@ -1683,13 +1681,13 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] name = "fixed-hash" version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" dependencies = [ "byteorder", "rand 0.8.3", @@ -1750,6 +1748,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.4", + "scale-info", "serde", "sp-api", "sp-io", @@ -1788,6 +1787,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1805,6 +1805,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1816,18 +1817,20 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "13.0.0" +version = "14.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#385bbbd82a648ed2a60576f87dbf141813f96642" dependencies = [ + "cfg-if 1.0.0", "parity-scale-codec", + "scale-info", "serde", - "sp-core", - "sp-std", ] [[package]] name = "frame-support" version = "3.0.0" dependencies = [ + "assert_matches", "bitflags", "frame-metadata", "frame-support-procedural", @@ -1840,6 +1843,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1888,18 +1892,20 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", "frame-support", "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", + "scale-info", "serde", + "sp-arithmetic", "sp-core", "sp-io", "sp-runtime", "sp-state-machine", "sp-std", + "sp-version", "trybuild", ] @@ -1912,6 +1918,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -1930,6 +1937,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -2675,8 +2683,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" dependencies = [ "parity-scale-codec", ] @@ -2690,6 +2697,14 @@ dependencies = [ "serde", ] +[[package]] +name = "impl-serde" +version = "0.3.1" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +dependencies = [ + "serde", +] + [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -3313,7 +3328,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -4237,6 +4252,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4274,6 +4290,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4383,6 +4400,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", + "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4462,6 +4480,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4682,6 +4701,7 @@ dependencies = [ "max-encoded-len", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4696,6 +4716,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4713,6 +4734,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", + "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4729,6 +4751,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4746,6 +4769,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4771,6 +4795,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -4793,6 +4818,7 @@ dependencies = [ "max-encoded-len", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4809,6 +4835,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4827,6 +4854,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4854,6 +4882,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.3", "rand_pcg 0.3.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -4871,6 +4900,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-runtime", @@ -4911,6 +4941,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4927,6 +4958,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4951,6 +4983,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -4971,6 +5004,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4988,6 +5022,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -5006,6 +5041,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5021,6 +5057,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5035,6 +5072,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5052,6 +5090,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5077,6 +5116,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5098,6 +5138,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5115,6 +5156,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5132,6 +5174,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5149,6 +5192,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5165,6 +5209,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5183,6 +5228,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5232,6 +5278,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5246,6 +5293,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5260,6 +5308,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5275,6 +5324,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5301,6 +5351,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5320,6 +5371,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5334,6 +5386,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", + "scale-info", "serde", "sp-core", "sp-io", @@ -5350,6 +5403,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5365,6 +5419,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5380,6 +5435,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5396,6 +5452,7 @@ dependencies = [ "lazy_static", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5421,6 +5478,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-core", "sp-io", @@ -5439,6 +5497,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5464,6 +5523,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5503,6 +5563,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5517,6 +5578,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5533,6 +5595,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5551,6 +5614,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5567,6 +5631,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5633,6 +5698,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5665,6 +5731,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5682,6 +5749,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5777,13 +5845,12 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5801,6 +5868,16 @@ dependencies = [ "synstructure", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-wasm" version = "0.32.0" @@ -6227,13 +6304,14 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde", - "uint", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", + "parity-scale-codec", + "scale-info", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -8032,7 +8110,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8180,6 +8258,29 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.6.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" +dependencies = [ + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "0.4.0" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8644,6 +8745,7 @@ version = "3.0.0" dependencies = [ "max-encoded-len", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8672,6 +8774,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8695,6 +8798,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8781,6 +8885,7 @@ version = "0.9.0" dependencies = [ "async-trait", "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8798,6 +8903,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8828,6 +8934,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8858,7 +8965,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "libsecp256k1", "log", @@ -8873,6 +8980,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8927,6 +9035,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9016,6 +9125,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9046,6 +9156,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -9094,6 +9205,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9206,6 +9318,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9218,6 +9331,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] @@ -9255,7 +9369,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", "ref-cast", "serde", @@ -9383,8 +9497,9 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde", + "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-scale-codec", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9675,6 +9790,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -10613,6 +10729,17 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "uint" +version = "0.9.0" +source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unicase" version = "2.6.0" From 253ea92ae5133ce9b8ba67a4e5f81c3dba3da7e0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Jun 2021 10:07:34 +0100 Subject: [PATCH 232/503] Add some TypeInfo bounds --- frame/support/procedural/src/key_prefix.rs | 4 ++-- frame/support/src/storage/weak_bounded_vec.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frame/support/procedural/src/key_prefix.rs b/frame/support/procedural/src/key_prefix.rs index 17c310c2bcadc..a2c98b0121ad4 100644 --- a/frame/support/procedural/src/key_prefix.rs +++ b/frame/support/procedural/src/key_prefix.rs @@ -44,7 +44,7 @@ pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result),* > HasKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { @@ -56,7 +56,7 @@ pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result),* > HasReversibleKeyPrefix<( #( #kargs, )* )> for ( #( Key<#hashers, #current_tuple>, )* ) { diff --git a/frame/support/src/storage/weak_bounded_vec.rs b/frame/support/src/storage/weak_bounded_vec.rs index ca2271df43419..acc8dc76c80f5 100644 --- a/frame/support/src/storage/weak_bounded_vec.rs +++ b/frame/support/src/storage/weak_bounded_vec.rs @@ -37,7 +37,7 @@ use crate::{ /// /// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound /// is accepted, and some method allow to bypass the restriction with warnings. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] pub struct WeakBoundedVec(Vec, PhantomData); impl> Decode for WeakBoundedVec { From 30db92e558f82408d2e17c547c712b3303bc612b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Jun 2021 12:44:34 +0100 Subject: [PATCH 233/503] Update new runtime metadata proc macros --- .../src/construct_runtime/expand/event.rs | 12 ----- .../src/construct_runtime/expand/metadata.rs | 53 ++++++------------- 2 files changed, 17 insertions(+), 48 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/event.rs b/frame/support/procedural/src/construct_runtime/expand/event.rs index c2c905e50ff8d..569baf7791346 100644 --- a/frame/support/procedural/src/construct_runtime/expand/event.rs +++ b/frame/support/procedural/src/construct_runtime/expand/event.rs @@ -27,7 +27,6 @@ pub fn expand_outer_event( ) -> syn::Result { let mut event_variants = TokenStream::new(); let mut event_conversions = TokenStream::new(); - let mut events_metadata = TokenStream::new(); for pallet_decl in pallet_decls { if let Some(pallet_entry) = pallet_decl.find_part("Event") { @@ -55,7 +54,6 @@ pub fn expand_outer_event( event_variants.extend(expand_event_variant(runtime, path, index, instance, generics)); event_conversions.extend(expand_event_conversion(scrate, path, instance, &pallet_event)); - events_metadata.extend(expand_event_metadata(scrate, path, &pallet_event)); } } @@ -134,13 +132,3 @@ fn expand_event_conversion( } } } - -fn expand_event_metadata( - scrate: &TokenStream, - path: &PalletPath, - pallet_event: &TokenStream, -) -> TokenStream { - let mod_name = path.mod_name(); - - quote!{(stringify!(#mod_name), #scrate::event::FnEncode(#pallet_event::metadata)),} -} diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index cbabec73d3a6f..f2742563953a8 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -42,15 +42,15 @@ pub fn expand_runtime_metadata( .map(|(decl, filtered_names)| { let name = &decl.name; let index = &decl.index; - let storage = expand_pallet_metadata_storage(&filtered_names, runtime, scrate, decl); - let calls = expand_pallet_metadata_calls(&filtered_names, runtime, scrate, decl); - let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); - let constants = expand_pallet_metadata_constants(runtime, scrate, decl); - let errors = expand_pallet_metadata_errors(runtime, scrate, decl); + let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); + let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, decl); + let constants = expand_pallet_metadata_constants(runtime, decl); + let errors = expand_pallet_metadata_errors(runtime, decl); quote!{ #scrate::metadata::ModuleMetadata { - name: #scrate::metadata::DecodeDifferent::Encode(stringify!(#name)), + name: stringify!(#name), index: #index, storage: #storage, calls: #calls, @@ -66,8 +66,9 @@ pub fn expand_runtime_metadata( impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion { - modules: #scrate::metadata::DecodeDifferent::Encode(&[ #(#modules),* ]), + modules: #scrate::::scale_info::prelude::vec![ #(#modules),* ], extrinsic: #scrate::metadata::ExtrinsicMetadata { + ty: $crate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < < @@ -75,7 +76,10 @@ pub fn expand_runtime_metadata( >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension >::identifier() .into_iter() - .map(#scrate::metadata::DecodeDifferent::Encode) + .map(|(id, ty)| #scrate::metadata::SignedExtensionMetadata { + identifier: id, + ty, + }) .collect(), }, }.into() @@ -87,7 +91,6 @@ pub fn expand_runtime_metadata( fn expand_pallet_metadata_storage( filtered_names: &[&'static str], runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Storage") { @@ -95,11 +98,7 @@ fn expand_pallet_metadata_storage( let path = &decl.pallet; quote!{ - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata - ) - )) + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) } } else { quote!(None) @@ -109,7 +108,6 @@ fn expand_pallet_metadata_storage( fn expand_pallet_metadata_calls( filtered_names: &[&'static str], runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Call") { @@ -117,11 +115,7 @@ fn expand_pallet_metadata_calls( let path = &decl.pallet; quote!{ - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::call_functions - ) - )) + Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) } } else { quote!(None) @@ -131,7 +125,6 @@ fn expand_pallet_metadata_calls( fn expand_pallet_metadata_events( filtered_names: &[&'static str], runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Event") { @@ -146,9 +139,7 @@ fn expand_pallet_metadata_events( }; quote!{ - Some(#scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode(#pallet_event::metadata) - )) + Some(#pallet_event::metadata()) } } else { quote!(None) @@ -157,34 +148,24 @@ fn expand_pallet_metadata_events( fn expand_pallet_metadata_constants( runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { let path = &decl.pallet; let instance = decl.instance.as_ref().into_iter(); quote!{ - #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - #path::Pallet::<#runtime #(, #path::#instance)*>::module_constants_metadata - ) - ) + #path::Pallet::<#runtime #(, #path::#instance)*>::module_constants_metadata() } } fn expand_pallet_metadata_errors( runtime: &Ident, - scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { let path = &decl.pallet; let instance = decl.instance.as_ref().into_iter(); quote!{ - #scrate::metadata::DecodeDifferent::Encode( - #scrate::metadata::FnEncode( - <#path::Pallet::<#runtime #(, #path::#instance)*> as #scrate::metadata::ModuleErrorMetadata>::metadata - ) - ) + #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() } } From ad9c4eddc27777f51f33aa49357ba1b02c820b0d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Jun 2021 13:16:47 +0100 Subject: [PATCH 234/503] Update to latest frame-metadata --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 1ff401977e706..284a9e98150c5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1818,7 +1818,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#385bbbd82a648ed2a60576f87dbf141813f96642" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#f14f5258c40f6b887a9abfb324e1ba5849227293" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From 4a3e1e070c2dfee58ccddb893c99e1c060aee4e4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 8 Jun 2021 13:17:14 +0100 Subject: [PATCH 235/503] Remove pallet metadata attribute --- frame/multisig/src/lib.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/frame/multisig/src/lib.rs b/frame/multisig/src/lib.rs index 79e9b888224ae..774b11ec2392a 100644 --- a/frame/multisig/src/lib.rs +++ b/frame/multisig/src/lib.rs @@ -197,11 +197,6 @@ pub mod pallet{ #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - T::BlockNumber = "BlockNumber", - Timepoint = "Timepoint" - )] pub enum Event { /// A new multisig operation has begun. \[approving, multisig, call_hash\] NewMultisig(T::AccountId, T::AccountId, CallHash), From 71a447107c65f4d892d4403184f2b6d675a99562 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Jun 2021 11:49:56 +0100 Subject: [PATCH 236/503] Allow additional constant traits Copied from https://github.com/paritytech/substrate/pull/9050 --- .../procedural/src/pallet/parse/config.rs | 54 +++++++++++++------ 1 file changed, 37 insertions(+), 17 deletions(-) diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index 79d4680752b90..7a00dc3a15c2a 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -15,6 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +use core::convert::TryFrom; use super::helper; use syn::spanned::Spanned; use quote::ToTokens; @@ -25,7 +26,6 @@ mod keyword { syn::custom_keyword!(From); syn::custom_keyword!(T); syn::custom_keyword!(I); - syn::custom_keyword!(Get); syn::custom_keyword!(config); syn::custom_keyword!(IsType); syn::custom_keyword!(Event); @@ -62,22 +62,42 @@ pub struct ConstMetadataDef { pub doc: Vec, } -impl syn::parse::Parse for ConstMetadataDef { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let doc = helper::get_doc_literals(&syn::Attribute::parse_outer(input)?); - input.parse::()?; - let ident = input.parse::()?; - input.parse::()?; - input.parse::()?; - input.parse::()?; - let mut type_ = input.parse::()?; - type_ = syn::parse2::(replace_self_by_t(type_.to_token_stream())) - .expect("Internal error: replacing `Self` by `T` should result in valid type"); - input.parse::]>()?; - input.parse::()?; +{ +type Error = syn::Error; + +fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let doc = helper::get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty.bounds + .iter() + .find_map(|b| + if let syn::TypeParamBound::Trait(tb) = b { + tb.path.segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None } ) + } else { + None + } + ) + .ok_or_else(|| Error::new(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed (ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(Error::new(ab.args[0].span(), "Expected a type argument")) + } + } else { + Err(Error::new(bound.span(), "Expected a single type argument")) + } + } else { + Err(Error::new(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); - Ok(Self { ident, type_, doc }) - } + Ok(Self { ident, type_, doc }) +} } /// Parse for `#[pallet::disable_frame_system_supertrait_check]` @@ -323,7 +343,7 @@ impl ConfigDef { if type_attrs_const.len() == 1 { match trait_item { syn::TraitItem::Type(type_) => { - let constant = syn::parse2::(type_.to_token_stream()) + let constant = ConstMetadataDef::try_from(type_.clone()) .map_err(|e| { let error_msg = "Invalid usage of `#[pallet::constant]`, syntax \ must be `type $SomeIdent: Get<$SomeType>;`"; From 96fbea911f539ffc19f6ed8820ab49b318495303 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Jun 2021 11:50:23 +0100 Subject: [PATCH 237/503] Add some TypeInfo bounds --- frame/balances/src/lib.rs | 4 ++-- frame/proxy/src/lib.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 61703ab25bf50..55354b8dba36e 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -214,10 +214,10 @@ pub mod pallet { /// The maximum number of locks that should exist on an account. /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get; + type MaxLocks: Get + scale_info::TypeInfo; /// The maximum number of named reserves that can exist on an account. - type MaxReserves: Get; + type MaxReserves: Get + scale_info::TypeInfo; /// The id type for named reserves. type ReserveIdentifier: Parameter + Member + MaxEncodedLen + Ord + Copy; diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 8fea78099a5a3..da3b532a69602 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -136,14 +136,14 @@ pub mod pallet { /// The maximum amount of proxies allowed for a single account. #[pallet::constant] - type MaxProxies: Get; + type MaxProxies: Get + scale_info::TypeInfo; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; /// The maximum amount of time-delayed announcements that are allowed to be pending. #[pallet::constant] - type MaxPending: Get; + type MaxPending: Get + scale_info::TypeInfo; /// The type of hash used for hashing the call. type CallHasher: Hash; From 3c5a6f81491615aeb840f601a2c1952ff60ab47a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Jun 2021 16:56:30 +0100 Subject: [PATCH 238/503] Copy from https://github.com/paritytech/substrate/pull/9050 --- .../procedural/src/pallet/parse/config.rs | 78 +++++++++---------- 1 file changed, 36 insertions(+), 42 deletions(-) diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index 7a00dc3a15c2a..802b0ae323b49 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -62,42 +62,44 @@ pub struct ConstMetadataDef { pub doc: Vec, } -{ -type Error = syn::Error; - -fn try_from(trait_ty: &syn::TraitItemType) -> Result { - let doc = helper::get_doc_literals(&trait_ty.attrs); - let ident = trait_ty.ident.clone(); - let bound = trait_ty.bounds - .iter() - .find_map(|b| - if let syn::TypeParamBound::Trait(tb) = b { - tb.path.segments - .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None } ) - } else { - None - } - ) - .ok_or_else(|| Error::new(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed (ref ab) = bound.arguments { - if ab.args.len() == 1 { - if let syn::GenericArgument::Type(ref ty) = ab.args[0] { - Ok(ty) +impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { + type Error = syn::Error; + + fn try_from(trait_ty: &syn::TraitItemType) -> Result { + let err = |span, msg| + syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)); + let doc = helper::get_doc_literals(&trait_ty.attrs); + let ident = trait_ty.ident.clone(); + let bound = trait_ty.bounds + .iter() + .find_map(|b| + if let syn::TypeParamBound::Trait(tb) = b { + tb.path.segments + .last() + .and_then(|s| if s.ident == "Get" { Some(s) } else { None } ) + } else { + None + } + ) + .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; + let type_arg = if let syn::PathArguments::AngleBracketed (ref ab) = bound.arguments { + if ab.args.len() == 1 { + if let syn::GenericArgument::Type(ref ty) = ab.args[0] { + Ok(ty) + } else { + Err(err(ab.args[0].span(), "Expected a type argument")) + } } else { - Err(Error::new(ab.args[0].span(), "Expected a type argument")) + Err(err(bound.span(), "Expected a single type argument")) } } else { - Err(Error::new(bound.span(), "Expected a single type argument")) - } - } else { - Err(Error::new(bound.span(), "Expected trait generic args")) - }?; - let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) - .expect("Internal error: replacing `Self` by `T` should result in valid type"); + Err(err(bound.span(), "Expected trait generic args")) + }?; + let type_ = syn::parse2::(replace_self_by_t(type_arg.to_token_stream())) + .expect("Internal error: replacing `Self` by `T` should result in valid type"); - Ok(Self { ident, type_, doc }) -} + Ok(Self { ident, type_, doc }) + } } /// Parse for `#[pallet::disable_frame_system_supertrait_check]` @@ -342,16 +344,8 @@ impl ConfigDef { if type_attrs_const.len() == 1 { match trait_item { - syn::TraitItem::Type(type_) => { - let constant = ConstMetadataDef::try_from(type_.clone()) - .map_err(|e| { - let error_msg = "Invalid usage of `#[pallet::constant]`, syntax \ - must be `type $SomeIdent: Get<$SomeType>;`"; - let mut err = syn::Error::new(type_.span(), error_msg); - err.combine(e); - err - })?; - + syn::TraitItem::Type(ref type_) => { + let constant = ConstMetadataDef::try_from(type_)?; consts_metadata.push(constant); }, _ => { From dee3367ce03c56fdd6fb5298f184f9261a297765 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 9 Jun 2021 17:04:55 +0100 Subject: [PATCH 239/503] Some fixes to construct_runtime metadata --- .../procedural/src/construct_runtime/expand/metadata.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index f2742563953a8..84fdf8ddadc13 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -26,7 +26,7 @@ pub fn expand_runtime_metadata( scrate: &TokenStream, extrinsic: &TypePath, ) -> TokenStream { - let modules = pallet_declarations + let pallets = pallet_declarations .iter() .filter_map(|pallet_declaration| { pallet_declaration.find_part("Pallet").map(|_| { @@ -66,9 +66,9 @@ pub fn expand_runtime_metadata( impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion { - modules: #scrate::::scale_info::prelude::vec![ #(#modules),* ], + pallets: #scrate::::scale_info::prelude::vec![ #(#pallets),* ], extrinsic: #scrate::metadata::ExtrinsicMetadata { - ty: $crate::scale_info::meta_type::<#extrinsic>(), + ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < < From 5bd0c5741d4f02a6bb1610deb38a5d9c9413b274 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 15:26:30 +0100 Subject: [PATCH 240/503] Fix metadata construction --- .../procedural/src/construct_runtime/expand/metadata.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 929454654d869..828aba59c3588 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -65,9 +65,9 @@ pub fn expand_runtime_metadata( quote!{ impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { - #scrate::metadata::RuntimeMetadataLastVersion { - pallets: #scrate::::scale_info::prelude::vec![ #(#pallets),* ], - extrinsic: #scrate::metadata::ExtrinsicMetadata { + #scrate::metadata::RuntimeMetadataLastVersion::new( + #scrate::::scale_info::prelude::vec![ #(#pallets),* ], + #scrate::metadata::ExtrinsicMetadata { ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, signed_extensions: < @@ -82,7 +82,7 @@ pub fn expand_runtime_metadata( }) .collect(), }, - }.into() + ).into() } } } From 2beff006bff94fcc439fb45a5dd5947fde5f5e3d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 16:06:59 +0100 Subject: [PATCH 241/503] Fix error in metadata construction --- .../procedural/src/construct_runtime/expand/metadata.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 828aba59c3588..28b1c9c70f354 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -49,7 +49,7 @@ pub fn expand_runtime_metadata( let errors = expand_pallet_metadata_errors(runtime, decl); quote!{ - #scrate::metadata::ModuleMetadata { + #scrate::metadata::PalletMetadata { name: stringify!(#name), index: #index, storage: #storage, @@ -66,7 +66,7 @@ pub fn expand_runtime_metadata( impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion::new( - #scrate::::scale_info::prelude::vec![ #(#pallets),* ], + #scrate::scale_info::prelude::vec![ #(#pallets),* ], #scrate::metadata::ExtrinsicMetadata { ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, From bf8d874fdfc57d173c8f4e7055a553636d15cbb3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 16:10:28 +0100 Subject: [PATCH 242/503] Derive TypeInfo for outer Event --- frame/support/procedural/src/construct_runtime/expand/event.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/procedural/src/construct_runtime/expand/event.rs b/frame/support/procedural/src/construct_runtime/expand/event.rs index afedb3ed92508..4dc985557b980 100644 --- a/frame/support/procedural/src/construct_runtime/expand/event.rs +++ b/frame/support/procedural/src/construct_runtime/expand/event.rs @@ -62,6 +62,7 @@ pub fn expand_outer_event( Clone, PartialEq, Eq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] #[allow(non_camel_case_types)] From 37423ab01cf565627f8e3a2d485dc72bcfb22af1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 16:22:49 +0100 Subject: [PATCH 243/503] Fix metadata errors field rename --- .../support/procedural/src/construct_runtime/expand/metadata.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 28b1c9c70f354..dbf3d8df3dab2 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -56,7 +56,7 @@ pub fn expand_runtime_metadata( calls: #calls, event: #event, constants: #constants, - errors: #errors, + error: #errors, } } }) From 23231d12ccc186807c3f913240e04116313f2e54 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 16:23:06 +0100 Subject: [PATCH 244/503] Remove metadata attributes --- frame/democracy/src/lib.rs | 7 ------- frame/scheduler/src/lib.rs | 1 - frame/uniques/src/lib.rs | 5 ----- 3 files changed, 13 deletions(-) diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 70b943bf00d54..7785ebb438eb6 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -488,13 +488,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - Vec = "Vec", - BalanceOf = "Balance", - T::BlockNumber = "BlockNumber", - T::Hash = "Hash", - )] pub enum Event { /// A motion has been proposed by a public account. \[proposal_index, deposit\] Proposed(PropIndex, BalanceOf), diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 7a5a6f827d2d1..0e2444d37acaa 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -185,7 +185,6 @@ pub mod pallet { /// Events type. #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::BlockNumber = "BlockNumber", TaskAddress = "TaskAddress")] pub enum Event { /// Scheduled some task. \[when, index\] Scheduled(T::BlockNumber, u32), diff --git a/frame/uniques/src/lib.rs b/frame/uniques/src/lib.rs index f4a0228de4a89..73952bf831f84 100644 --- a/frame/uniques/src/lib.rs +++ b/frame/uniques/src/lib.rs @@ -179,11 +179,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - T::AccountId = "AccountId", - T::ClassId = "ClassId", - T::InstanceId = "InstanceId", - )] pub enum Event, I: 'static = ()> { /// An asset class was created. \[ class, creator, owner \] Created(T::ClassId, T::AccountId, T::AccountId), From b5b719648b23de42d92f885006c6cdc790ca7696 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 16:38:38 +0100 Subject: [PATCH 245/503] Restore Event metadata fn --- frame/support/procedural/src/pallet/expand/event.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index c77ec937bd432..9dd2dd200e6d5 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -121,5 +121,15 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { fn from(_: #event_ident<#event_use_gen>) {} } + + impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { + #[allow(dead_code)] + #[doc(hidden)] + pub fn metadata() -> #frame_support::metadata::PalletEventMetadata { + #frame_support::metadata::PalletEventMetadata { + ty: #frame_support::scale_info::meta_type::() + } + } + } ) } From 265f78832435b85a9ba4c0c8767dfa74b13aeca4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 17:17:52 +0100 Subject: [PATCH 246/503] Update scale_info and skip Event type params --- Cargo.lock | 4 ++-- frame/support/procedural/src/pallet/expand/event.rs | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a09bfc7fef8d..08a64ba1925b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8319,7 +8319,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#62213f82460fd1c7ee245e5819c65a09ae0ea16f" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8331,7 +8331,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#a8bc702c67f481bd0ed1fbbd4776a3bbcd0d5d03" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#62213f82460fd1c7ee245e5819c65a09ae0ea16f" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 9dd2dd200e6d5..979a06818daa3 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -88,6 +88,9 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { )] )); + // skip requirement for type params to implement `TypeInfo` + event_item.attrs.push(syn::parse_quote!( #[scale_info(skip_type_params(#event_use_gen))] )); + let deposit_event = if let Some((fn_vis, fn_span)) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let trait_use_gen = &def.trait_use_generics(event.attr_span); From 909b3ce54fecc9bf0bab36efd7a00dcfe938d421 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 17:24:13 +0100 Subject: [PATCH 247/503] Skip runtime and instance TypeInfo requirements --- frame/support/procedural/src/pallet/expand/call.rs | 3 ++- .../procedural/src/pallet/expand/pallet_struct.rs | 5 ++++- frame/support/procedural/src/pallet/parse/mod.rs | 14 -------------- 3 files changed, 6 insertions(+), 16 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 077d6dbde40dd..be3717d83be1e 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -34,7 +34,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { }; let frame_support = &def.frame_support; let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_scale_info_bounded_generics(span); + let type_impl_gen = &def.type_impl_generics(span); let type_decl_bounded_gen = &def.type_decl_bounded_generics(span); let type_use_gen = &def.type_use_generics(span); let call_ident = syn::Ident::new("Call", span); @@ -100,6 +100,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { )] #[codec(encode_bound())] #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_gen))] #[allow(non_camel_case_types)] pub enum #call_ident<#type_decl_bounded_gen> #where_clause { #[doc(hidden)] diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 779d89f8feb61..c6bd3a74705a0 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -27,7 +27,7 @@ use crate::pallet::{Def, expand::merge_where_clauses, parse::helper::get_doc_lit pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let frame_system = &def.frame_system; - let type_impl_gen = &def.type_impl_scale_info_bounded_generics(def.pallet_struct.attr_span); + let type_impl_gen = &def.type_impl_generics(def.pallet_struct.attr_span); let type_use_gen = &def.type_use_generics(def.pallet_struct.attr_span); let type_decl_gen = &def.type_decl_generics(def.pallet_struct.attr_span); let pallet_ident = &def.pallet_struct.pallet; @@ -75,6 +75,9 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { )] )); + // skip requirement for type params to implement `TypeInfo` + pallet_item.attrs.push(syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] )); + let pallet_error_metadata = if let Some(error_def) = &def.error { let error_ident = &error_def.error; quote::quote_spanned!(def.pallet_struct.attr_span => diff --git a/frame/support/procedural/src/pallet/parse/mod.rs b/frame/support/procedural/src/pallet/parse/mod.rs index 7be98fa5bf37e..2f378c52e8b33 100644 --- a/frame/support/procedural/src/pallet/parse/mod.rs +++ b/frame/support/procedural/src/pallet/parse/mod.rs @@ -270,20 +270,6 @@ impl Def { } } - /// Depending on if pallet is instantiable: - /// * either `T: Config + TypeInfo` - /// * or `T: Config + TypeInfo, I: 'static + TypeInfo` - /// todo: [AJ] see if we can remove this by not requiring TypeInfo on all generic params in scale_info - pub fn type_impl_scale_info_bounded_generics(&self, span: proc_macro2::Span) -> proc_macro2::TokenStream { - let frame_support = &self.frame_support; - let bound = quote::quote!(#frame_support::scale_info::TypeInfo); - if self.config.has_instance { - quote::quote_spanned!(span => T: Config + #bound, I: 'static + #bound) - } else { - quote::quote_spanned!(span => T: Config + #bound) - } - } - /// Depending on if pallet is instantiable: /// * either `T: Config` /// * or `T: Config, I: 'static = ()` From aa027a40420797038a16965d182f265955e1e4a1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 10 Jun 2021 17:26:36 +0100 Subject: [PATCH 248/503] Fix manual TypeInfo impl --- frame/support/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 0d644565f047f..cef0f2382eecb 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -343,7 +343,7 @@ macro_rules! parameter_types { .path($crate::scale_info::Path::new(stringify!($name), module_path!())) .composite( scale_info::build::Fields::unnamed() - .field_of::<$type>(stringify!($type), &["todo: docs"]) + .field(|f| f.ty::<$type>().type_name(stringify!($type))) ) } } From b695b9487eda940ac4f94ba28e0513d10a6b01bb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 08:23:29 +0100 Subject: [PATCH 249/503] Slip type params for Error --- frame/support/procedural/src/pallet/expand/error.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 00bfeacbe98e1..aa7099c4bf441 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -65,6 +65,9 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { error_item.attrs.push( syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] ) ); + error_item.attrs.push( + syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] ) + ); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( From ad6c4c708e14ad9d70834fc0194e79c1ee038255 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 08:27:38 +0100 Subject: [PATCH 250/503] Update scale-info to include TypeParams fix --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08a64ba1925b0..b4c0a2f11dcf9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8319,7 +8319,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#62213f82460fd1c7ee245e5819c65a09ae0ea16f" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#1e99eb88bbd84de0d18a224075a7c53412eb49cf" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8331,7 +8331,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#62213f82460fd1c7ee245e5819c65a09ae0ea16f" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#1e99eb88bbd84de0d18a224075a7c53412eb49cf" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From d4605822e3e4538e347ca94ecb9db39336d0bc3a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 08:33:02 +0100 Subject: [PATCH 251/503] Add some scale-info deps and derives --- Cargo.lock | 1 + frame/grandpa/src/lib.rs | 4 ++-- frame/identity/src/lib.rs | 6 +++--- frame/scheduler/src/lib.rs | 3 ++- frame/uniques/Cargo.toml | 1 + 5 files changed, 9 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b4c0a2f11dcf9..3a59c9012ae00 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5762,6 +5762,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index 952e0d646135b..b1eb4af897c1e 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -361,7 +361,7 @@ pub trait WeightInfo { } /// A stored pending change. -#[derive(Encode, Decode)] +#[derive(Encode, Decode, scale_info::TypeInfo)] pub struct StoredPendingChange { /// The block number this was scheduled at. pub scheduled_at: N, @@ -377,7 +377,7 @@ pub struct StoredPendingChange { /// Current state of the GRANDPA authority set. State transitions must happen in /// the same order of states defined below, e.g. `Paused` implies a prior /// `PendingPause`. -#[derive(Decode, Encode)] +#[derive(Decode, Encode, scale_info::TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub enum StoredState { /// The current authority set is live, and GRANDPA is enabled. diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index d3f21c9cc1a31..07cc40d974508 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -253,9 +253,9 @@ impl scale_info::TypeInfo for IdentityFields { .path(scale_info::Path::new("IdentityFields", module_path!())) .composite( scale_info::build::Fields::unnamed() - .field_of::( - "BitFlags", - &["Wrapper type for `BitFlags`"] + .field(|f| f.ty::() + .type_name("BitFlags") + .docs(&["Wrapper type for `BitFlags`"]) ) ) } diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 0e2444d37acaa..66fad78256b17 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -138,7 +138,8 @@ pub mod pallet { + IsType<::Origin>; /// The caller origin, overarching type of all pallets origins. - type PalletsOrigin: From> + Codec + Clone + Eq; + type PalletsOrigin: From> + Codec + Clone + Eq + + scale_info::TypeInfo; /// The aggregated call type. type Call: Parameter diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index f007744dc64a2..74103ded00bd7 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } From b594cf54e25e8e6e4849476c5aade64a83987cd1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 09:45:43 +0100 Subject: [PATCH 252/503] Build event metadata at construct_runtime level --- .../src/construct_runtime/expand/metadata.rs | 9 +++++++-- frame/support/procedural/src/pallet/expand/event.rs | 10 ---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index dbf3d8df3dab2..47a29e80a41e9 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -44,7 +44,7 @@ pub fn expand_runtime_metadata( let index = &decl.index; let storage = expand_pallet_metadata_storage(&filtered_names, runtime, decl); let calls = expand_pallet_metadata_calls(&filtered_names, runtime, decl); - let event = expand_pallet_metadata_events(&filtered_names, runtime, decl); + let event = expand_pallet_metadata_events(&filtered_names, runtime, scrate, decl); let constants = expand_pallet_metadata_constants(runtime, decl); let errors = expand_pallet_metadata_errors(runtime, decl); @@ -125,6 +125,7 @@ fn expand_pallet_metadata_calls( fn expand_pallet_metadata_events( filtered_names: &[&'static str], runtime: &Ident, + scrate: &TokenStream, decl: &Pallet, ) -> TokenStream { if filtered_names.contains(&"Event") { @@ -139,7 +140,11 @@ fn expand_pallet_metadata_events( }; quote!{ - Some(#pallet_event::metadata()) + Some( + #scrate::metadata::PalletEventMetadata { + ty: #scrate::scale_info::meta_type::<#pallet_event>() + } + ) } } else { quote!(None) diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 979a06818daa3..de6498afc6295 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -124,15 +124,5 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { impl<#event_impl_gen> From<#event_ident<#event_use_gen>> for () #event_where_clause { fn from(_: #event_ident<#event_use_gen>) {} } - - impl<#event_impl_gen> #event_ident<#event_use_gen> #event_where_clause { - #[allow(dead_code)] - #[doc(hidden)] - pub fn metadata() -> #frame_support::metadata::PalletEventMetadata { - #frame_support::metadata::PalletEventMetadata { - ty: #frame_support::scale_info::meta_type::() - } - } - } ) } From 58b74cb5ec1514b2cc9247293632d3f0add77daa Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 09:55:34 +0100 Subject: [PATCH 253/503] Add some more derives and bounds --- frame/assets/src/lib.rs | 2 +- frame/democracy/src/lib.rs | 4 ++-- frame/uniques/src/lib.rs | 7 ++++--- frame/uniques/src/types.rs | 11 ++++++----- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 8e67ee8de62e8..f65ec2c48b6f4 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -202,7 +202,7 @@ pub mod pallet { type ApprovalDeposit: Get>; /// The maximum length of a name or symbol stored on-chain. - type StringLimit: Get; + type StringLimit: Get + scale_info::TypeInfo; /// A hook to allow a per-asset, per-account minimum balance to be enforced. This must be /// respected in all permissionless operations. diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 7785ebb438eb6..963bb36fe6e22 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -201,7 +201,7 @@ type BalanceOf = <::Currency as Currency< = <::Currency as Currency<::AccountId>>::NegativeImbalance; -#[derive(Clone, Encode, Decode, RuntimeDebug)] +#[derive(Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] pub enum PreimageStatus { /// The preimage is imminently needed at the argument. Missing(BlockNumber), @@ -228,7 +228,7 @@ impl PreimageStatus>; /// The maximum length of data stored on-chain. - type StringLimit: Get; + type StringLimit: Get + TypeInfo; /// The maximum length of an attribute key. - type KeyLimit: Get; + type KeyLimit: Get + TypeInfo; /// The maximum length of an attribute value. - type ValueLimit: Get; + type ValueLimit: Get + TypeInfo; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; diff --git a/frame/uniques/src/types.rs b/frame/uniques/src/types.rs index f73a18c7f3f3d..3f4169b2c3af1 100644 --- a/frame/uniques/src/types.rs +++ b/frame/uniques/src/types.rs @@ -18,6 +18,7 @@ //! Various basic types for use in the assets pallet. use super::*; +use scale_info::TypeInfo; use frame_support::{traits::Get, BoundedVec}; pub(super) type DepositBalanceOf = @@ -28,7 +29,7 @@ pub(super) type InstanceDetailsFor = InstanceDetails<::AccountId, DepositBalanceOf>; -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct ClassDetails< AccountId, DepositBalance, @@ -57,7 +58,7 @@ pub struct ClassDetails< } /// Witness data for the destroy transactions. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct DestroyWitness { /// The total number of outstanding instances of this asset class. #[codec(compact)] @@ -81,7 +82,7 @@ impl ClassDetails { } /// Information concerning the ownership of a single unique asset. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] pub struct InstanceDetails { /// The owner of this asset. pub(super) owner: AccountId, @@ -94,7 +95,7 @@ pub struct InstanceDetails { pub(super) deposit: DepositBalance, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] pub struct ClassMetadata> { /// The balance deposited for this metadata. /// @@ -108,7 +109,7 @@ pub struct ClassMetadata> { pub(super) is_frozen: bool, } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] pub struct InstanceMetadata> { /// The balance deposited for this metadata. /// From 4015226f7d7197b45604d638b74c2d8d7cbdc225 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 10:13:29 +0100 Subject: [PATCH 254/503] Add some more derives and bounds --- Cargo.lock | 2 ++ .../procedural/src/construct_runtime/expand/origin.rs | 5 ++++- frame/support/test/tests/construct_runtime.rs | 4 ++-- frame/transaction-storage/Cargo.toml | 1 + frame/transaction-storage/src/lib.rs | 2 +- primitives/transaction-storage-proof/Cargo.toml | 1 + primitives/transaction-storage-proof/src/lib.rs | 2 +- 7 files changed, 12 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3a59c9012ae00..0d961fa67a41f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5725,6 +5725,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -9554,6 +9555,7 @@ dependencies = [ "async-trait", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-runtime", diff --git a/frame/support/procedural/src/construct_runtime/expand/origin.rs b/frame/support/procedural/src/construct_runtime/expand/origin.rs index 2d0cc8300cb76..11786233b7a40 100644 --- a/frame/support/procedural/src/construct_runtime/expand/origin.rs +++ b/frame/support/procedural/src/construct_runtime/expand/origin.rs @@ -154,7 +154,10 @@ pub fn expand_outer_origin( } } - #[derive(Clone, PartialEq, Eq, #scrate::RuntimeDebug, #scrate::codec::Encode, #scrate::codec::Decode)] + #[derive( + Clone, PartialEq, Eq, #scrate::RuntimeDebug, #scrate::codec::Encode, + #scrate::codec::Decode, #scrate::scale_info::TypeInfo, + )] #[allow(non_camel_case_types)] pub enum OriginCaller { #[codec(index = #system_index)] diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index ba250e9d77238..ffc38fee654e5 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -135,7 +135,7 @@ mod nested { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -175,7 +175,7 @@ pub mod module3 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] pub struct Origin(pub core::marker::PhantomData); frame_support::decl_event! { diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 8892e234d436f..7aea92c122f08 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/transaction-storage/src/lib.rs b/frame/transaction-storage/src/lib.rs index ef824a8399f57..80dba3a248fb5 100644 --- a/frame/transaction-storage/src/lib.rs +++ b/frame/transaction-storage/src/lib.rs @@ -57,7 +57,7 @@ pub const DEFAULT_MAX_TRANSACTION_SIZE: u32 = 8 * 1024 * 1024; pub const DEFAULT_MAX_BLOCK_TRANSACTIONS: u32 = 512; /// State data for a stored transaction. -#[derive(Encode, Decode, Clone, sp_runtime::RuntimeDebug, PartialEq, Eq)] +#[derive(Encode, Decode, Clone, sp_runtime::RuntimeDebug, PartialEq, Eq, scale_info::TypeInfo)] pub struct TransactionInfo { /// Chunk trie root. chunk_root: ::Output, diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index bbdcb9f989f0b..7b48fc6f437a6 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,6 +19,7 @@ sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-trie = { version = "3.0.0", optional = true, path = "../trie" } sp-core = { version = "3.0.0", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.48", optional = true } diff --git a/primitives/transaction-storage-proof/src/lib.rs b/primitives/transaction-storage-proof/src/lib.rs index 825de27b2a5a9..5c56010475f3e 100644 --- a/primitives/transaction-storage-proof/src/lib.rs +++ b/primitives/transaction-storage-proof/src/lib.rs @@ -49,7 +49,7 @@ impl IsFatalError for InherentError { } } -#[derive(Encode, Decode, Clone, PartialEq, Debug)] +#[derive(Encode, Decode, Clone, PartialEq, Debug, scale_info::TypeInfo)] pub struct TransactionStorageProof { /// Data chunk that is proved to exist. pub chunk: Vec, From afaeacd03f42522841fc6f51568eef86163be650 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 10:33:01 +0100 Subject: [PATCH 255/503] Remove some unnecessary runtime and instance bounds --- frame/support/src/dispatch.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 6226adbd744e6..7fbc393b7df8b 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1804,6 +1804,7 @@ macro_rules! decl_module { /// /// Each variant of this enum maps to a dispatchable function from the associated module. #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] + #[scale_info(skip_type_params($trait_instance, $($instance)?))] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { @@ -2317,8 +2318,7 @@ macro_rules! __dispatch_impl_metadata { $call_type:ident $($rest:tt)* ) => { - // todo: [AJ] another Instance: TypeInfo bound to remove - impl<$trait_instance: $trait_name $( + $crate::scale_info::TypeInfo, $instance: $instantiable + $crate::scale_info::TypeInfo)? + $crate::scale_info::TypeInfo> $mod_type<$trait_instance $(, $instance)?> + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { #[doc(hidden)] From 60c55be5e3939add7a819fb48154180f07c959e0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 10:50:41 +0100 Subject: [PATCH 256/503] Remove some more instance trait bounds --- frame/support/src/dispatch.rs | 3 +-- frame/support/src/error.rs | 1 + frame/support/src/instances.rs | 32 ++++++++++++++++---------------- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 7fbc393b7df8b..af4a3e4769fb5 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2344,8 +2344,7 @@ macro_rules! __impl_error_metadata { $error_type:tt $($rest:tt)* ) => { - // todo: [AJ] remove TypeInfo bounds on Instance, depends on skipping the T in PhantomData - impl<$trait_instance: $trait_name $(, $instance: $instantiable + $crate::scale_info::TypeInfo)?> $mod_type<$trait_instance $(, $instance)?> + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* { #[doc(hidden)] diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 42d6aba43196c..a5df4f7d4cbc3 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -86,6 +86,7 @@ macro_rules! decl_error { ) => { $(#[$attr])* #[derive($crate::scale_info::TypeInfo)] + #[scale_info(skip_type_params($generic, $($inst_generic)?))] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { diff --git a/frame/support/src/instances.rs b/frame/support/src/instances.rs index 192ea3ea36ec9..9908d16076a08 100644 --- a/frame/support/src/instances.rs +++ b/frame/support/src/instances.rs @@ -32,65 +32,65 @@ //! accessible to [`frame_support::construct_runtime`]. /// Instance1 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] // todo: [AJ] can we remove these TypeInfo derives? +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance1; /// Instance2 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance2; /// Instance3 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance3; /// Instance4 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance4; /// Instance5 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance5; /// Instance6 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance6; /// Instance7 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance7; /// Instance8 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance8; /// Instance9 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance9; /// Instance10 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance10; /// Instance11 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance11; /// Instance12 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance12; /// Instance13 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance13; /// Instance14 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance14; /// Instance15 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance15; /// Instance16 to be used for instantiable pallet define with `pallet` macro. -#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance16; From 273250f5a0438bac7a22e412c2824335f6b2b293 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 11 Jun 2021 11:01:04 +0100 Subject: [PATCH 257/503] Skip type params for instance origin --- frame/support/test/tests/pallet_instance.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 0f4e2c433db8e..47e880a9cbd62 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -162,6 +162,7 @@ pub mod pallet { #[pallet::origin] #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] + #[scale_info(skip_type_params(T, I))] // todo: [AJ] could/should this be added automatically? pub struct Origin(PhantomData<(T, I)>); #[pallet::validate_unsigned] From 65ed74e45de2a5b99618f7877ed4edf39b386553 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 15 Jun 2021 10:41:08 +0100 Subject: [PATCH 258/503] Fix errors --- Cargo.lock | 1 - frame/support/procedural/src/pallet/parse/config.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a6b37d880cafd..8e79cad184e7a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1913,7 +1913,6 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-std", - "sp-version", "trybuild", ] diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index 532b1b4583ac8..69dfaeb7f9e9b 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -15,7 +15,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -use core::convert::TryFrom; use super::helper; use core::convert::TryFrom; use syn::spanned::Spanned; From 016bb2f4922307b1ccf0990ce35c25238b93ed28 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 17 Jun 2021 10:39:58 +0100 Subject: [PATCH 259/503] Update scale-info --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b922bc535a3ed..bc1516077aa5d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8310,7 +8310,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#1e99eb88bbd84de0d18a224075a7c53412eb49cf" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91799c7cfa2db7b8d8c8c11da4bc3c4d71a50e27" dependencies = [ "cfg-if 1.0.0", "derive_more", @@ -8322,7 +8322,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#1e99eb88bbd84de0d18a224075a7c53412eb49cf" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91799c7cfa2db7b8d8c8c11da4bc3c4d71a50e27" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From 192f8d8c5cbadeec9347966dc72bd45f505297d5 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 17 Jun 2021 10:43:31 +0100 Subject: [PATCH 260/503] Update Cargo.lock after merge --- Cargo.lock | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bc1516077aa5d..e19ed3c678dc2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1678,8 +1678,9 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" dependencies = [ "either", "futures 0.3.15", @@ -1689,7 +1690,6 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", - "scale-info", ] [[package]] @@ -11612,3 +11612,8 @@ dependencies = [ "cc", "libc", ] + +[[patch.unused]] +name = "finality-grandpa" +version = "0.14.0" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" From 4b20388dc2f76660a488336641f4248974f00146 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 17 Jun 2021 12:05:28 +0100 Subject: [PATCH 261/503] Add missing TypeInfo derive and fix grandpa github dependency --- Cargo.lock | 30 ++++++++++++------- .../src/construct_runtime/expand/call.rs | 1 + primitives/finality-grandpa/Cargo.toml | 2 +- 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e19ed3c678dc2..f3cfbeae12e15 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1676,6 +1676,21 @@ dependencies = [ "log", ] +[[package]] +name = "finality-grandpa" +version = "0.14.0" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" +dependencies = [ + "either", + "futures 0.3.15", + "futures-timer 3.0.2", + "log", + "num-traits", + "parity-scale-codec", + "parking_lot 0.11.1", + "scale-info", +] + [[package]] name = "finality-grandpa" version = "0.14.1" @@ -5134,7 +5149,7 @@ dependencies = [ name = "pallet-grandpa" version = "3.1.0" dependencies = [ - "finality-grandpa", + "finality-grandpa 0.14.1", "frame-benchmarking", "frame-election-provider-support", "frame-support", @@ -7631,7 +7646,7 @@ dependencies = [ "async-trait", "derive_more", "dyn-clone", - "finality-grandpa", + "finality-grandpa 0.14.1", "fork-tree", "futures 0.3.15", "futures-timer 3.0.2", @@ -7677,7 +7692,7 @@ name = "sc-finality-grandpa-rpc" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa", + "finality-grandpa 0.14.1", "futures 0.3.15", "jsonrpc-core", "jsonrpc-core-client", @@ -7707,7 +7722,7 @@ name = "sc-finality-grandpa-warp-sync" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa", + "finality-grandpa 0.14.1", "futures 0.3.15", "log", "num-traits", @@ -9118,7 +9133,7 @@ dependencies = [ name = "sp-finality-grandpa" version = "3.0.0" dependencies = [ - "finality-grandpa", + "finality-grandpa 0.14.0", "log", "parity-scale-codec", "scale-info", @@ -11612,8 +11627,3 @@ dependencies = [ "cc", "libc", ] - -[[patch.unused]] -name = "finality-grandpa" -version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" diff --git a/frame/support/procedural/src/construct_runtime/expand/call.rs b/frame/support/procedural/src/construct_runtime/expand/call.rs index 6a44468f25b2c..b6d886abff332 100644 --- a/frame/support/procedural/src/construct_runtime/expand/call.rs +++ b/frame/support/procedural/src/construct_runtime/expand/call.rs @@ -54,6 +54,7 @@ pub fn expand_outer_dispatch( Clone, PartialEq, Eq, #scrate::codec::Encode, #scrate::codec::Decode, + #scrate::scale_info::TypeInfo, #scrate::RuntimeDebug, )] pub enum Call { diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 3076f500f56c4..16c5516cb419d 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } From 2545482491daedc2945cb298e08ec303fcbbe0fc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 17 Jun 2021 13:17:18 +0100 Subject: [PATCH 262/503] Update finality-grandpa branch --- Cargo.lock | 29 +++++++------------------- primitives/finality-grandpa/Cargo.toml | 2 +- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f3cfbeae12e15..3bb1ea6831b9c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1676,26 +1676,10 @@ dependencies = [ "log", ] -[[package]] -name = "finality-grandpa" -version = "0.14.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2e8d2ab5bbf20b0ed97085a1aedf83cdd313e21d" -dependencies = [ - "either", - "futures 0.3.15", - "futures-timer 3.0.2", - "log", - "num-traits", - "parity-scale-codec", - "parking_lot 0.11.1", - "scale-info", -] - [[package]] name = "finality-grandpa" version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#520e638ca4375ee6268b04b1d84de89c060c4f5d" dependencies = [ "either", "futures 0.3.15", @@ -1705,6 +1689,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] @@ -5149,7 +5134,7 @@ dependencies = [ name = "pallet-grandpa" version = "3.1.0" dependencies = [ - "finality-grandpa 0.14.1", + "finality-grandpa", "frame-benchmarking", "frame-election-provider-support", "frame-support", @@ -7646,7 +7631,7 @@ dependencies = [ "async-trait", "derive_more", "dyn-clone", - "finality-grandpa 0.14.1", + "finality-grandpa", "fork-tree", "futures 0.3.15", "futures-timer 3.0.2", @@ -7692,7 +7677,7 @@ name = "sc-finality-grandpa-rpc" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa 0.14.1", + "finality-grandpa", "futures 0.3.15", "jsonrpc-core", "jsonrpc-core-client", @@ -7722,7 +7707,7 @@ name = "sc-finality-grandpa-warp-sync" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa 0.14.1", + "finality-grandpa", "futures 0.3.15", "log", "num-traits", @@ -9133,7 +9118,7 @@ dependencies = [ name = "sp-finality-grandpa" version = "3.0.0" dependencies = [ - "finality-grandpa 0.14.0", + "finality-grandpa", "log", "parity-scale-codec", "scale-info", diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 16c5516cb419d..3076f500f56c4 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -grandpa = { package = "finality-grandpa", git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info", default-features = false, features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } From 1934a34c092cbd884976543bbaaf3ed5e766038a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 17 Jun 2021 16:25:27 +0100 Subject: [PATCH 263/503] Remove some pallet metadata attrs --- frame/elections/src/lib.rs | 1 - frame/staking/src/lib.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/frame/elections/src/lib.rs b/frame/elections/src/lib.rs index 4160f3ae4bd7b..1065afa019aa0 100644 --- a/frame/elections/src/lib.rs +++ b/frame/elections/src/lib.rs @@ -466,7 +466,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", Vec = "Vec")] pub enum Event { /// Reaped \[voter, reaper\]. VoterReaped(T::AccountId, T::AccountId), diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index bd93f48009fdb..4b6d51b7a43b1 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -1293,7 +1293,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// The era payout has been set; the first balance is the validator-payout; the second is /// the remainder from the maximum amount of reward. From 4a7f46a05224db7b70dc45d33e8c86039f9fa6aa Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 18 Jun 2021 09:34:14 +0100 Subject: [PATCH 264/503] Update primitive-types --- Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3bb1ea6831b9c..265764eb74910 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,7 +1695,7 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "byteorder", "rand 0.8.3", @@ -2709,7 +2709,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "parity-scale-codec", ] @@ -2726,7 +2726,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "serde", ] @@ -5879,7 +5879,7 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "cfg-if 1.0.0", "hashbrown", @@ -5905,7 +5905,7 @@ dependencies = [ [[package]] name = "parity-util-mem-derive" version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "proc-macro2", "syn", @@ -6338,7 +6338,7 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "fixed-hash", "impl-codec", @@ -10823,7 +10823,7 @@ dependencies = [ [[package]] name = "uint" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#0ac86ef7159e9fc7695de280f7095c97e5e5ca81" +source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" dependencies = [ "byteorder", "crunchy", From 3419a75b15804fcf503b2a8df0d05dc6f63ceb70 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 18 Jun 2021 12:01:42 +0100 Subject: [PATCH 265/503] Use primitive types master branch --- primitives/core/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index e65f7f21e18f7..2b721f128158c 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -19,7 +19,7 @@ scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-su log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { version = "0.9.0", default-features = false, features = ["codec", "scale-info"] } +primitive-types = { git = "https://github.com/paritytech/parity-common", default-features = false, features = ["codec", "scale-info"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } hash-db = { version = "0.15.2", default-features = false } From 44aa8ffdcbc9d4a176d2581cb111977ee1ed7c41 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 22 Jun 2021 12:46:09 +0100 Subject: [PATCH 266/503] Use frame-metadata master branch --- frame/support/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 3e1d797245ad6..cede2edea96b7 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.1.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-substrate", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "master", default-features = false, features = ["v14"] } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } From 390e058e5fb9a3d93ce465dd27b2f94931defc4a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 22 Jun 2021 12:46:36 +0100 Subject: [PATCH 267/503] Use frame-metadata main branch --- frame/support/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index cede2edea96b7..92c9f777d7946 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.1.0", default-features = false, features = ["derive"] } scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "master", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } From 23496f02a8ba483fb1192620ad391be51cdbb05b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 22 Jun 2021 15:09:45 +0100 Subject: [PATCH 268/503] Cargo.lock --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 29df9c1dd8b1c..1573d90ab2d6c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1826,7 +1826,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-substrate#f14f5258c40f6b887a9abfb324e1ba5849227293" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#78d3456e4f057d808e78163c2f4954f32b071000" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From 83696bea0c049bb5a0802b28da75e2af46688f85 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 25 Jun 2021 09:41:36 +0100 Subject: [PATCH 269/503] Remove `TypeInfo` constraint from `frame_system::Config` --- frame/contracts/src/schedule.rs | 3 +++ frame/contracts/src/storage.rs | 1 + frame/contracts/src/wasm/mod.rs | 1 + frame/system/src/lib.rs | 2 +- 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index 2447f599ca7b2..704be2c1953f8 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -73,6 +73,7 @@ pub const INSTR_BENCHMARK_BATCH_SIZE: u32 = 1_000; #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(bound(serialize = "", deserialize = "")))] #[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, DefaultNoBound, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct Schedule { /// Describes the upper limits on various metrics. pub limits: Limits, @@ -175,6 +176,7 @@ impl Limits { /// and dropping return values in order to maintain a valid module. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct InstructionWeights { /// Version of the instruction weights. /// @@ -248,6 +250,7 @@ pub struct InstructionWeights { /// Describes the weight for each imported function that a contract is allowed to call. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct HostFnWeights { /// Weight of calling `seal_caller`. pub caller: Weight, diff --git a/frame/contracts/src/storage.rs b/frame/contracts/src/storage.rs index 0623355cf90fd..a75c1b6501aa5 100644 --- a/frame/contracts/src/storage.rs +++ b/frame/contracts/src/storage.rs @@ -46,6 +46,7 @@ pub type TombstoneContractInfo = /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account #[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub enum ContractInfo { Alive(AliveContractInfo), Tombstone(TombstoneContractInfo), diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index 24ac85e098b89..88e15f1ef9ee6 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -51,6 +51,7 @@ pub use tests::MockExt; /// called. Therefore one must be careful when holding any in-memory representation of this /// type while calling into a contract as those fields can get out of date. #[derive(Clone, Encode, Decode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct PrefabWasmModule { /// Version of the instruction weights with which the code was instrumented. #[codec(compact)] diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index d1790561d5ec7..9ef9070b85158 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -161,7 +161,7 @@ pub mod pallet { /// System configuration trait. Implemented by runtime. #[pallet::config] #[pallet::disable_frame_system_supertrait_check] - pub trait Config: 'static + Eq + Clone + scale_info::TypeInfo { // todo: [AJ] see whether we really need this bound + pub trait Config: 'static + Eq + Clone { /// The basic call filter to use in Origin. All origins are built with this filter as base, /// except Root. type BaseCallFilter: Filter; From 9e1ea7c3d76a36711608be9dbd6a23b405fff5f4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 25 Jun 2021 11:07:41 +0100 Subject: [PATCH 270/503] Skip type param for ChargeTransactionPayment --- frame/transaction-payment/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 6fa8ec332f177..3e1129f87f2b0 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -515,6 +515,7 @@ impl Convert> for Pallet where /// Require the transactor pay for themselves and maybe include a tip to gain additional priority /// in the queue. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct ChargeTransactionPayment(#[codec(compact)] BalanceOf); impl ChargeTransactionPayment where @@ -565,7 +566,7 @@ impl ChargeTransactionPayment where } } -impl sp_std::fmt::Debug for ChargeTransactionPayment { +impl sp_std::fmt::Debug for ChargeTransactionPayment { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "ChargeTransactionPayment<{:?}>", self.0) From 49c511d156c8032a5401dcab14a8b3f53027b9d4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 28 Jun 2021 10:39:59 +0100 Subject: [PATCH 271/503] UncheckedExtrinsic: manual `TypeInfo` implementation --- .../src/generic/unchecked_extrinsic.rs | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index 4c6e8f921af8e..d983afc77a425 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -20,6 +20,7 @@ use sp_std::{fmt, prelude::*}; use sp_io::hashing::blake2_256; use codec::{Decode, Encode, EncodeLike, Input, Error}; +use scale_info::{build::Fields, TypeInfo, Type, Path}; use crate::{ traits::{ self, Member, MaybeDisplay, SignedExtension, Checkable, Extrinsic, ExtrinsicMetadata, @@ -48,21 +49,22 @@ where pub function: Call, } -// todo: [AJ] remove this manual impl once the top level runtime Call implements TypeInfo... -// ...which it should be able to once all pallets are converted to frame v2 macros and all types have -// scale_info support. It does mean for now that we won't have enough metadata to decode the raw -// UncheckedExtrinsic bytes until all Pallet Calls are converted. -impl scale_info::TypeInfo for UncheckedExtrinsic +/// Manual [`TypeInfo`] implementation because of custom encoding. The data is a valid encoded +/// `Vec`, but requires some logic to extract the signature and payload. +/// +/// See [`UncheckedExtrinsic::encode`] and [`UncheckedExtrinsic::decode`]. +impl TypeInfo + for UncheckedExtrinsic where - Extra: SignedExtension + scale_info::TypeInfo, + Extra: SignedExtension + TypeInfo { - type Identity = (); + type Identity = UncheckedExtrinsic<(), (), (), Extra>; - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("UncheckedExtrinsic", module_path!())) - // dummy impl because we can't bound `Call` type param to `TypeInfo` yet (see above todo - .composite(scale_info::build::Fields::unit()) + fn type_info() -> Type { + Type::builder() + .path(Path::new("UncheckedExtrinsic", module_path!())) + .docs(&["UncheckedExtrinsic raw bytes, requires custom decoding routine"]) + .composite(Fields::unnamed().field(|f| f.ty::>())) } } From d0812d21cee386044c8e8fd3686ac8016534831e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 28 Jun 2021 12:36:12 +0100 Subject: [PATCH 272/503] Remove todo for removing `TypeInfo` from `Never` --- frame/support/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 82bcfb57c8e20..80cc8a97da55b 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -90,7 +90,7 @@ use sp_runtime::TypeId; pub const LOG_TARGET: &'static str = "runtime::frame-support"; /// A type that cannot be instantiated. -#[derive(Debug, PartialEq, Eq, Clone, scale_info::TypeInfo)] // todo: [AJ] remove this TypeInfo derive once we support skip +#[derive(Debug, PartialEq, Eq, Clone, scale_info::TypeInfo)] pub enum Never {} /// A pallet identifier. These are per pallet and should be stored in a registry somewhere. From c386343786677a7d92c41abe98b2782bff034a26 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 28 Jun 2021 13:22:01 +0100 Subject: [PATCH 273/503] Skip CheckGenesis T: TypeInfo requirement --- frame/system/src/extensions/check_genesis.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index 0191d07eec7b0..4a7997bb2447a 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -25,9 +25,10 @@ use sp_runtime::{ /// Genesis hash check to provide replay protection between different networks. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] -pub struct CheckGenesis(sp_std::marker::PhantomData); +#[scale_info(skip_type_params(T))] +pub struct CheckGenesis(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckGenesis { +impl sp_std::fmt::Debug for CheckGenesis { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckGenesis") @@ -46,7 +47,7 @@ impl CheckGenesis { } } -impl SignedExtension for CheckGenesis { +impl SignedExtension for CheckGenesis { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = T::Hash; From e0b79df4e0ce07ded6d31a3538116fc4dadeb707 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 29 Jun 2021 08:26:29 +0100 Subject: [PATCH 274/503] Add some TypeInfo derives --- frame/election-provider-multi-phase/src/signed.rs | 2 +- frame/support/src/storage/bounded_btree_map.rs | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frame/election-provider-multi-phase/src/signed.rs b/frame/election-provider-multi-phase/src/signed.rs index ba1123c1331ad..7c11fce726964 100644 --- a/frame/election-provider-multi-phase/src/signed.rs +++ b/frame/election-provider-multi-phase/src/signed.rs @@ -43,7 +43,7 @@ use sp_std::{ /// A raw, unchecked signed submission. /// /// This is just a wrapper around [`RawSolution`] and some additional info. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] pub struct SignedSubmission { /// Who submitted this solution. pub who: AccountId, diff --git a/frame/support/src/storage/bounded_btree_map.rs b/frame/support/src/storage/bounded_btree_map.rs index 0c1994d63a35d..a5620dbaf60a2 100644 --- a/frame/support/src/storage/bounded_btree_map.rs +++ b/frame/support/src/storage/bounded_btree_map.rs @@ -34,7 +34,8 @@ use codec::{Encode, Decode}; /// /// Unlike a standard `BTreeMap`, there is an enforced upper limit to the number of items in the /// map. All internal operations ensure this bound is respected. -#[derive(Encode)] +#[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct BoundedBTreeMap(BTreeMap, PhantomData); impl Decode for BoundedBTreeMap From 9ffc69dba7a03b80bf0645c50264c159363b3801 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 29 Jun 2021 09:55:02 +0100 Subject: [PATCH 275/503] Generate TypeInfo impls for compact npos solutions --- .../npos-elections/compact/src/codec.rs | 88 +++++++++++++++++-- primitives/npos-elections/compact/src/lib.rs | 4 +- 2 files changed, 81 insertions(+), 11 deletions(-) diff --git a/primitives/npos-elections/compact/src/codec.rs b/primitives/npos-elections/compact/src/codec.rs index f75f99682711c..9e6ea6b365521 100644 --- a/primitives/npos-elections/compact/src/codec.rs +++ b/primitives/npos-elections/compact/src/codec.rs @@ -15,33 +15,35 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Code generation for the ratio assignment type' encode/decode impl. +//! Code generation for the ratio assignment type' encode/decode/info impl. use crate::field_name_for; use proc_macro2::TokenStream as TokenStream2; use quote::quote; -pub(crate) fn codec_impl( +pub(crate) fn codec_and_info_impl( ident: syn::Ident, voter_type: syn::Type, target_type: syn::Type, weight_type: syn::Type, count: usize, ) -> TokenStream2 { - let encode = encode_impl(ident.clone(), count); - let decode = decode_impl(ident, voter_type, target_type, weight_type, count); + let encode = encode_impl(&ident, count); + let decode = decode_impl(&ident, &voter_type, &target_type, &weight_type, count); + let scale_info = scale_info_impl(&ident, &voter_type, &target_type, &weight_type, count); quote! { #encode #decode + #scale_info } } fn decode_impl( - ident: syn::Ident, - voter_type: syn::Type, - target_type: syn::Type, - weight_type: syn::Type, + ident: &syn::Ident, + voter_type: &syn::Type, + target_type: &syn::Type, + weight_type: &syn::Type, count: usize, ) -> TokenStream2 { let decode_impl_single = { @@ -132,7 +134,7 @@ fn decode_impl( // General attitude is that we will convert inner values to `Compact` and then use the normal // `Encode` implementation. -fn encode_impl(ident: syn::Ident, count: usize) -> TokenStream2 { +fn encode_impl(ident: &syn::Ident, count: usize) -> TokenStream2 { let encode_impl_single = { let name = field_name_for(1); quote! { @@ -201,3 +203,71 @@ fn encode_impl(ident: syn::Ident, count: usize) -> TokenStream2 { } ) } + +fn scale_info_impl( + ident: &syn::Ident, + voter_type: &syn::Type, + target_type: &syn::Type, + weight_type: &syn::Type, + count: usize, +) -> TokenStream2 { + let scale_info_impl_single = { + let name = format!("{}", field_name_for(1)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec< + (_npos::codec::Compact<#voter_type>, _npos::codec::Compact<#target_type>) + >>() + .name(#name) + ) + } + }; + + let scale_info_impl_double = { + let name = format!("{}", field_name_for(2)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>), + _npos::codec::Compact<#target_type> + )>>() + .name(#name) + ) + } + }; + + let scale_info_impl_rest = (3..=count).map(|c| { + let name = format!("{}", field_name_for(c)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + [ + (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); + #c - 1 + ], + _npos::codec::Compact<#target_type> + )>>() + .name(#name) + ) + } + }).collect::(); + + quote!( + impl _npos::scale_info::TypeInfo for #ident { + type Identity = Self; + + fn type_info() -> _npos::scale_info::Type<_npos::scale_info::form::MetaForm> { + _npos::scale_info::Type::builder() + .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) + .composite( + _npos::scale_info::build::Fields::named() + #scale_info_impl_single + #scale_info_impl_double + #scale_info_impl_rest + ) + } + } + ) +} diff --git a/primitives/npos-elections/compact/src/lib.rs b/primitives/npos-elections/compact/src/lib.rs index 0e6237d9c5f99..96e7e088a6538 100644 --- a/primitives/npos-elections/compact/src/lib.rs +++ b/primitives/npos-elections/compact/src/lib.rs @@ -160,7 +160,7 @@ fn struct_def( let derives_and_maybe_compact_encoding = if compact_encoding { // custom compact encoding. - let compact_impl = codec::codec_impl( + let compact_impl = codec::codec_and_info_impl( ident.clone(), voter_type.clone(), target_type.clone(), @@ -169,7 +169,7 @@ fn struct_def( ); quote!{ #compact_impl - #[derive(Default, PartialEq, Eq, Clone, Debug, PartialOrd, Ord, _npos::scale_info::TypeInfo)] // todo: [AJ] manually generate TypeInfo here instead + #[derive(Default, PartialEq, Eq, Clone, Debug, PartialOrd, Ord)] } } else { // automatically derived. From c78588589f992b8d3d2279e940022bfc8fab54cb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 29 Jun 2021 10:06:09 +0100 Subject: [PATCH 276/503] Fix up impl_error_metadata default --- frame/support/src/dispatch.rs | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 984ce7dda6798..c802f04866609 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2352,16 +2352,25 @@ macro_rules! __impl_error_metadata { #[doc(hidden)] #[allow(dead_code)] pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { - let ty = $crate::scale_info::meta_type::<$error_type>(); - // If no error type is declared, `&'static str` is the default error type - // todo: [AJ] can this be done rather by the macro? Not an expert with the declarative macros. - if ty == $crate::scale_info::meta_type::<&'static str>() { - None - } else { - Some($crate::metadata::PalletErrorMetadata { - ty: $crate::scale_info::meta_type::<$error_type>() - }) - } + Some($crate::metadata::PalletErrorMetadata { + ty: $crate::scale_info::meta_type::<$error_type>() + }) + } + } + }; + ( + $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> + { $( $other_where_bounds:tt )* } + { &'static str } + $($rest:tt)* + ) => { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> + where $( $other_where_bounds )* + { + #[doc(hidden)] + #[allow(dead_code)] + pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { + None } } }; From 031f7134b704e906f8a4b28b6a1012d31a6e5f6f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 29 Jun 2021 10:29:49 +0100 Subject: [PATCH 277/503] Remove todo about `Get` requiring `TypeInfo` --- frame/treasury/src/lib.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index c4c1a649715d7..952b64b10703b 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -81,6 +81,7 @@ use sp_runtime::{ use frame_support::weights::{Weight, DispatchClass}; use frame_support::traits::EnsureOrigin; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use frame_system::ensure_signed; pub use weights::WeightInfo; @@ -133,7 +134,7 @@ pub trait Config: frame_system::Config { type SpendFunds: SpendFunds; /// The maximum number of approvals that can wait in the spending queue. - type MaxApprovals: Get + scale_info::TypeInfo; // todo: [AJ] see if we can remove this bound for Get storages + type MaxApprovals: Get + TypeInfo; } /// A trait to allow the Treasury Pallet to spend it's funds for other purposes. @@ -163,7 +164,7 @@ pub type ProposalIndex = u32; /// A spending proposal. #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Proposal { /// The account proposing it. proposer: AccountId, From 7788dc2583e137e176514a2f05b92a34a0c1c4f6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 29 Jun 2021 10:32:01 +0100 Subject: [PATCH 278/503] Rename outer event metadata macro --- frame/support/src/event.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 6e83c11ef7b8e..442c3d5456e98 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -471,7 +471,7 @@ macro_rules! impl_outer_event { } )* } - $crate::__impl_outer_event_json_metadata!( + $crate::__impl_outer_event_metadata!( $runtime; $name; $( @@ -485,15 +485,14 @@ macro_rules! impl_outer_event { #[macro_export] #[doc(hidden)] -// todo: [AJ] rename/refactor? -macro_rules! __impl_outer_event_json_metadata { +macro_rules! __impl_outer_event_metadata { ( $runtime:ident; $event_name:ident; $( $module_name:ident::Event < $( $generic_params:path ),* > $( $instance:ident )?, )*; ) => { impl $runtime { - $crate::__impl_outer_event_json_metadata! { + $crate::__impl_outer_event_metadata! { @DECL_MODULE_EVENT_FNS $( $module_name < $( $generic_params ),* > $( $instance )? ; )* } From a7322d323086f13618d68496a3f51b4132822de0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 30 Jun 2021 12:15:07 +0100 Subject: [PATCH 279/503] Update to scale-info 0.9 --- Cargo.lock | 47 ++++++++----------- Cargo.toml | 6 +-- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/election-provider-support/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 4 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- .../transaction-storage-proof/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- 80 files changed, 101 insertions(+), 110 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18c98fc826fd0..7096e54e37bab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1679,7 +1679,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.1" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#520e638ca4375ee6268b04b1d84de89c060c4f5d" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#177760823e17b3582986b6c0f2fa1d3b90b474ef" dependencies = [ "either", "futures 0.3.15", @@ -1695,7 +1695,7 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" dependencies = [ "byteorder", "rand 0.8.3", @@ -1826,7 +1826,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#78d3456e4f057d808e78163c2f4954f32b071000" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#32584873b4fc87044869bd55aa6908feb1d0084d" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -2709,7 +2709,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" dependencies = [ "parity-scale-codec", ] @@ -2726,7 +2726,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" dependencies = [ "serde", ] @@ -5879,12 +5879,13 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-util-mem-derive", "parking_lot 0.11.1", "primitive-types", "smallvec 1.6.1", @@ -5902,16 +5903,6 @@ dependencies = [ "synstructure", ] -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" -dependencies = [ - "proc-macro2", - "syn", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.32.0" @@ -6338,14 +6329,13 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" dependencies = [ "fixed-hash", "impl-codec", - "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common)", - "parity-scale-codec", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", "scale-info", - "uint 0.9.0 (git+https://github.com/paritytech/parity-common)", + "uint 0.9.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", ] [[package]] @@ -8162,7 +8152,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-util-mem-derive", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8311,9 +8301,11 @@ dependencies = [ [[package]] name = "scale-info" -version = "0.6.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91799c7cfa2db7b8d8c8c11da4bc3c4d71a50e27" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc96e61bb609c3ff7fd758e3b3b78a0d07797b942bdd88140c9078ccf7df700b" dependencies = [ + "bitvec", "cfg-if 1.0.0", "derive_more", "parity-scale-codec", @@ -8323,8 +8315,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "0.4.0" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#91799c7cfa2db7b8d8c8c11da4bc3c4d71a50e27" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "777bae32436ada4e11677dc898a8a3f82f7792942022b0b127ba62049d36d26f" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -10827,7 +10820,7 @@ dependencies = [ [[package]] name = "uint" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common#7c2a9b28e266d0b0adbad4869e2559500fad4859" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" dependencies = [ "byteorder", "crunchy", diff --git a/Cargo.toml b/Cargo.toml index 3f27a574dc00b..04e241a115106 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -277,7 +277,5 @@ zeroize = { opt-level = 3 } panic = "unwind" [patch.crates-io] -parity-util-mem = { git = "https://github.com/paritytech/parity-common" } -primitive-types = { git = "https://github.com/paritytech/parity-common" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate" } -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } +primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index b28cf1cbf3242..c9d2f1527e946 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "3.0.0", path = "../../../../frame/support" } frame-system = { default-features = false, version = "3.0.0", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "3.1.0", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 44f8aaaaa00c8..b9130562bbd1a 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } pallet-aura = { version = "3.0.0", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "3.0.0", default-features = false, path = "../../../frame/balances" } frame-support = { version = "3.0.0", default-features = false, path = "../../../frame/support" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index 6558e6b23caf2..cf14f94337b31 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "2.0.0", path = "../runtime" } sc-executor = { version = "0.9.0", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 00dfc861bea2c..136e2509af329 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-system = { version = "3.0.0", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 3e7861f8edf0d..5606dbcab5350 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index ccf244e8e2afa..3bea0fb8108bd 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index ffacbc828f48a..2b19ce0e9981d 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index ec65429fe09fe..a1bf822a8f0cd 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } pallet-session = { version = "3.0.0", default-features = false, path = "../session" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 0a6f8315ad008..1139b01c4d9f7 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "3.0.0", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } pallet-session = { version = "3.0.0", features = ["historical" ], path = "../session", default-features = false } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 1f52de2606cb6..a5456e83e9203 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-authorship = { version = "3.0.0", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index de45df16873f5..d9b847201fef3 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index c5e66ef65ef91..c9de70020e468 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 9a84ed581a2a0..1c2466f5cf17d 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.0", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "3.0.0", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "3.0.0", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 7e77acc47cf9e..f516b8990e736 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index e7c88fe5f8f13..51c84badd4bce 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 76723a6421017..d2dd0621efa28 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 5ce7c827bffe4..fd91fb9539f6d 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index ebc559a17b97b..9d3dd62fc0dca 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "3.0.0", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index b3461f9b640d0..478d669058d0f 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 7881cf9cf1ef7..31e8aff368062 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index 792d00604ed2a..48aac6a54b321 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 7044809cf9b93..ccbc91e2546d7 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "3.0.0", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 55264494bf439..5542fbd4768d2 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index c583fcbdc8861..f97ead4a86dc0 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 0dab42f2514a2..ae13c51827b77 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 7fbb2cba45e59..94ef20ed61674 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 9c0b9258f1332..d78bbf9232f85 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 4fe8275ed7988..65f091542d656 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 69e1819e202bd..5b6d890cac612 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index c0df8cde654f4..306a19f392ae2 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 5c5b74be50d5d..69248d92673b6 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "3.0.0", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 0cb332ad39d13..98f09ac5cca48 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 7a31611037dcd..a6b2e9f4a1f1b 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 35e374931ef40..a4771599f3a5d 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 33b6e12601ad0..c111d1dabbbbf 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index fcf0f8fcbcdff..a6bce677ddce5 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 1742840377b74..ffbde3eccb86f 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index da4b61422327e..a48b22b74dff1 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 10acdb0b15a9b..017cc2a91f19b 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.101", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index df32e58363efe..62e3fb8cee27d 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../../benchmarking" } frame-support = { version = "3.0.0", default-features = false, path = "../../support" } frame-system = { version = "3.0.0", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 44dbef4d5be74..8842659598075 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index e7c33a8661ae8..6d3a982c669e6 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 8f8b15064ccbb..8ab7db76b6926 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 7bdf6df5b9e3a..6dc7df252023a 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 5bed3a673b6d3..5dbed3f4e0116 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 08f6fb7bb6b32..c28980a6c547e 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 7abffce01476f..802adfdb5cbe2 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -13,7 +13,7 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-session = { version = "3.0.0", default-features = false, path = "../../../primitives/session" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index c86af259dfccd..0e4eaf8f47f7e 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 3fdc59b16a86f..f99c6ebe256a5 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.101", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io ={ version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 66a30822d6a81..acb5f82ccbde3 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 92c9f777d7946..48760dcfc46c4 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.1.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 86e5f626c0d25..e8740d9447945 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "3.0.0", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.9.0", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index dae558701f796..273515e997b94 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index a1c652178e2d2..095be2c265a37 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "3.1.0", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 4b6ca17a1bb33..f6ba881b2c109 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index c66439de753e5..f003bc4d06746 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 72ae74e55c932..2a6e5671d4beb 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 7aea92c122f08..94677d9388685 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } pallet-balances = { version = "3.0.0", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index b73f81f1e381e..490bd8462fba0 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "3.0.0", default-features = false, path = "../support" } diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index 74103ded00bd7..8787ac67a5d7c 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 8f28aee44762a..2a3904ccdb8d5 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "3.0.0", default-features = false, path = "../support" } frame-system = { version = "3.0.0", default-features = false, path = "../system" } sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index ff7dac70e77ed..d738cb1f100dc 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 68c7ec42a77ab..8afc491b977bb 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "3.0.0", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-io = { version = "3.0.0", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 31e9cee01a2e1..df77dd6285432 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 23cfcdd78bdae..2b730e80f1bca 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index c730a6836addb..b1237ae2412d9 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 2c33450b6a7ab..b4db91a0df7e3 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "3.0.0", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../../std" } sp-api = { version = "3.0.0", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 91548875fa239..a86e597936a1d 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 2b721f128158c..bc97ca6c1e157 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,11 +15,11 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "3.0.0", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { git = "https://github.com/paritytech/parity-common", default-features = false, features = ["codec", "scale-info"] } +primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info", default-features = false, features = ["codec", "scale-info"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } hash-db = { version = "0.15.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 3076f500f56c4..8feb5cabf50b4 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 0a3d85f4fcaf0..ba78387d908d0 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "3.0.0", path = "./compact" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index e6213c1c6f014..31a86def3ebb3 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-arithmetic = { version = "3.0.0", path = "../../arithmetic" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 0f941adf9d52a..ee31598e4465f 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-application-crypto = { version = "3.0.0", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "3.0.0", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 9d1f60fc272c2..2844a294b3997 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } sp-core = { version = "3.0.0", default-features = false, path = "../core" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 2d88f029072a5..c2f08e1408052 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } sp-std = { version = "3.0.0", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 7b48fc6f437a6..66d866bd2ae11 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-trie = { version = "3.0.0", optional = true, path = "../trie" } sp-core = { version = "3.0.0", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.48", optional = true } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index fcfe8118dc5c2..e10e855fe26ea 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "3.0.0", default-features = false, path = "../std" } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "3.0.0", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 206ba516591db..da5ddb295b5af 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.9.0", default-features = false, path = "../.. sp-consensus-babe = { version = "0.9.0", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "3.0.0", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.26.0", default-features = false } From b61db7f052ceeffa4d55c1b77c0f5e048270ca67 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 30 Jun 2021 16:45:47 +0100 Subject: [PATCH 280/503] Add metadata-docs feature for reducing runtime Wasm size --- frame/support/Cargo.toml | 5 +++ frame/support/procedural/Cargo.toml | 3 +- .../procedural/src/pallet/expand/config.rs | 3 +- .../procedural/src/pallet/expand/error.rs | 3 +- .../procedural/src/pallet/expand/event.rs | 3 +- .../src/pallet/expand/genesis_config.rs | 3 +- .../procedural/src/pallet/expand/mod.rs | 3 +- .../src/pallet/expand/pallet_struct.rs | 3 +- .../procedural/src/pallet/parse/call.rs | 5 ++- .../procedural/src/pallet/parse/config.rs | 3 +- .../procedural/src/pallet/parse/error.rs | 3 +- .../src/pallet/parse/extra_constants.rs | 3 +- .../procedural/src/pallet/parse/helper.rs | 17 -------- .../procedural/src/pallet/parse/storage.rs | 3 +- .../procedural/src/storage/metadata.rs | 13 ++---- frame/support/procedural/tools/src/docs.rs | 40 +++++++++++++++++++ frame/support/procedural/tools/src/lib.rs | 3 ++ 17 files changed, 76 insertions(+), 40 deletions(-) create mode 100644 frame/support/procedural/tools/src/docs.rs diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 48760dcfc46c4..fc739d21db035 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -44,6 +44,7 @@ parity-util-mem = { version = "0.9.0", default-features = false, features = ["pr [features] default = ["std"] std = [ + "metadata-docs", "once_cell", "serde", "scale-info/std", @@ -61,5 +62,9 @@ std = [ "log/std", "max-encoded-len/std", ] +metadata-docs = [ + "frame-support-procedural/metadata-docs", + "scale-info/docs" +] runtime-benchmarks = [] try-runtime = [] diff --git a/frame/support/procedural/Cargo.toml b/frame/support/procedural/Cargo.toml index 4a00a24e3849d..ff36b3ab56d3a 100644 --- a/frame/support/procedural/Cargo.toml +++ b/frame/support/procedural/Cargo.toml @@ -23,4 +23,5 @@ syn = { version = "1.0.58", features = ["full"] } [features] default = ["std"] -std = [] +std = ["metadata-docs"] +metadata-docs = [] diff --git a/frame/support/procedural/src/pallet/expand/config.rs b/frame/support/procedural/src/pallet/expand/config.rs index 1e60313c55317..96dae031c9173 100644 --- a/frame/support/procedural/src/pallet/expand/config.rs +++ b/frame/support/procedural/src/pallet/expand/config.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, parse::helper::get_doc_literals}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; /// * Generate default rust doc pub fn expand_config(def: &mut Def) -> proc_macro2::TokenStream { diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index aa7099c4bf441..d5ef8b08d99b6 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, parse::helper::get_doc_literals}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; /// * impl various trait on Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 8ec687b1ba57d..304547e99ce2b 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, parse::helper::get_doc_literals}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; use crate::COUNTER; use syn::{spanned::Spanned, Ident}; diff --git a/frame/support/procedural/src/pallet/expand/genesis_config.rs b/frame/support/procedural/src/pallet/expand/genesis_config.rs index ac0bdacefc772..1f65cb6a079cb 100644 --- a/frame/support/procedural/src/pallet/expand/genesis_config.rs +++ b/frame/support/procedural/src/pallet/expand/genesis_config.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, parse::helper::get_doc_literals}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; use crate::COUNTER; use syn::{Ident, spanned::Spanned}; diff --git a/frame/support/procedural/src/pallet/expand/mod.rs b/frame/support/procedural/src/pallet/expand/mod.rs index f3a42dfa868b2..97ae2f61ae83b 100644 --- a/frame/support/procedural/src/pallet/expand/mod.rs +++ b/frame/support/procedural/src/pallet/expand/mod.rs @@ -32,7 +32,8 @@ mod type_value; mod origin; mod validate_unsigned; -use crate::pallet::{Def, parse::helper::get_doc_literals}; +use crate::pallet::Def; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; /// Merge where clause together, `where` token span is taken from the first not none one. diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index c6bd3a74705a0..a73f2716b8c44 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -15,7 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, expand::merge_where_clauses, parse::helper::get_doc_literals}; +use crate::pallet::{Def, expand::merge_where_clauses}; +use frame_support_procedural_tools::get_doc_literals; /// * Add derive trait on Pallet /// * Implement GetPalletVersion on Pallet diff --git a/frame/support/procedural/src/pallet/parse/call.rs b/frame/support/procedural/src/pallet/parse/call.rs index 299b86cf6f84e..380f01d9eecd6 100644 --- a/frame/support/procedural/src/pallet/parse/call.rs +++ b/frame/support/procedural/src/pallet/parse/call.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; use syn::spanned::Spanned; @@ -223,7 +224,7 @@ impl CallDef { args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); } - let docs = helper::get_doc_literals(&method.attrs); + let docs = get_doc_literals(&method.attrs); methods.push(CallVariantDef { name: method.sig.ident.clone(), @@ -243,7 +244,7 @@ impl CallDef { instances, methods, where_clause: item.generics.where_clause.clone(), - docs: helper::get_doc_literals(&item.attrs), + docs: get_doc_literals(&item.attrs), }) } } diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index 69dfaeb7f9e9b..6a476165de036 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use core::convert::TryFrom; use syn::spanned::Spanned; use quote::ToTokens; @@ -68,7 +69,7 @@ impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { fn try_from(trait_ty: &syn::TraitItemType) -> Result { let err = |span, msg| syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)); - let doc = helper::get_doc_literals(&trait_ty.attrs); + let doc = get_doc_literals(&trait_ty.attrs); let ident = trait_ty.ident.clone(); let bound = trait_ty.bounds .iter() diff --git a/frame/support/procedural/src/pallet/parse/error.rs b/frame/support/procedural/src/pallet/parse/error.rs index 49aaebc87f428..969123eb637f0 100644 --- a/frame/support/procedural/src/pallet/parse/error.rs +++ b/frame/support/procedural/src/pallet/parse/error.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use syn::spanned::Spanned; use quote::ToTokens; @@ -78,7 +79,7 @@ impl ErrorDef { return Err(syn::Error::new(span, msg)); } - Ok((variant.ident.clone(), helper::get_doc_literals(&variant.attrs))) + Ok((variant.ident.clone(), get_doc_literals(&variant.attrs))) }) .collect::>()?; diff --git a/frame/support/procedural/src/pallet/parse/extra_constants.rs b/frame/support/procedural/src/pallet/parse/extra_constants.rs index 430bf94783774..d56bcf51b6aac 100644 --- a/frame/support/procedural/src/pallet/parse/extra_constants.rs +++ b/frame/support/procedural/src/pallet/parse/extra_constants.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use syn::spanned::Spanned; /// List of additional token to be used for parsing. @@ -107,7 +108,7 @@ impl ExtraConstantsDef { extra_constants.push(ExtraConstantDef { ident: method.sig.ident.clone(), type_, - doc: helper::get_doc_literals(&method.attrs), + doc: get_doc_literals(&method.attrs), }); } diff --git a/frame/support/procedural/src/pallet/parse/helper.rs b/frame/support/procedural/src/pallet/parse/helper.rs index 3a7729c47e1d7..30a2ccd9ee012 100644 --- a/frame/support/procedural/src/pallet/parse/helper.rs +++ b/frame/support/procedural/src/pallet/parse/helper.rs @@ -141,23 +141,6 @@ impl MutItemAttrs for syn::ItemMod { } } -/// Return all doc attributes literals found. -pub fn get_doc_literals(attrs: &Vec) -> Vec { - attrs.iter() - .filter_map(|attr| { - if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - Some(meta.lit) - } else { - None - } - } else { - None - } - }) - .collect() -} - /// Parse for `()` struct Unit; impl syn::parse::Parse for Unit { diff --git a/frame/support/procedural/src/pallet/parse/storage.rs b/frame/support/procedural/src/pallet/parse/storage.rs index 9ec890e66e57a..82317200dcdd1 100644 --- a/frame/support/procedural/src/pallet/parse/storage.rs +++ b/frame/support/procedural/src/pallet/parse/storage.rs @@ -16,6 +16,7 @@ // limitations under the License. use super::helper; +use frame_support_procedural_tools::get_doc_literals; use syn::spanned::Spanned; use quote::ToTokens; use std::collections::HashMap; @@ -635,7 +636,7 @@ impl StorageDef { instances.push(helper::check_type_def_gen(&item.generics, item.ident.span())?); let where_clause = item.generics.where_clause.clone(); - let docs = helper::get_doc_literals(&item.attrs); + let docs = get_doc_literals(&item.attrs); let typ = if let syn::Type::Path(typ) = &*item.ty { typ diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index ce373cd3481b5..124ea20f4d559 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -17,6 +17,7 @@ //! Implementation of `storage_metadata` on module structure, used by construct_runtime. +use frame_support_procedural_tools::get_doc_literals; use proc_macro2::TokenStream; use quote::quote; use super::{DeclStorageDefExt, StorageLineDefExt, StorageLineTypeDef}; @@ -164,15 +165,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { default_byte_getter_struct_instance, ) = default_byte_getter(scrate, line, def); - let mut docs = TokenStream::new(); - for attr in line.attrs.iter().filter_map(|v| v.parse_meta().ok()) { - if let syn::Meta::NameValue(meta) = attr { - if meta.path.is_ident("doc") { - let lit = meta.lit; - docs.extend(quote!(#lit,)); - } - } - } + let docs = get_doc_literals(&line.attrs); let entry = quote! { #scrate::metadata::StorageEntryMetadata { @@ -180,7 +173,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { modifier: #modifier, ty: #ty, default: #default_byte_getter_struct_instance.default_byte(), - documentation: #scrate::scale_info::prelude::vec![ #docs ], + documentation: #scrate::scale_info::prelude::vec![ #( #docs ),* ], }, }; diff --git a/frame/support/procedural/tools/src/docs.rs b/frame/support/procedural/tools/src/docs.rs new file mode 100644 index 0000000000000..85715a66dbd73 --- /dev/null +++ b/frame/support/procedural/tools/src/docs.rs @@ -0,0 +1,40 @@ +// This file is part of Substrate. + +// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[cfg(feature = "metadata-docs")] +/// Return all doc attributes literals found. +pub fn get_doc_literals(attrs: &Vec) -> Vec { + attrs.iter() + .filter_map(|attr| { + if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + Some(meta.lit) + } else { + None + } + } else { + None + } + }) + .collect() +} + +#[cfg(not(feature = "metadata-docs"))] +/// No-op, returns empty Vec unless the "metadata-docs" feature is enabled. +pub fn get_doc_literals(_attrs: &Vec) -> Vec { + Vec::new() +} \ No newline at end of file diff --git a/frame/support/procedural/tools/src/lib.rs b/frame/support/procedural/tools/src/lib.rs index 64f21d66391c2..8b8887f110f13 100644 --- a/frame/support/procedural/tools/src/lib.rs +++ b/frame/support/procedural/tools/src/lib.rs @@ -26,8 +26,11 @@ use proc_macro_crate::{crate_name, FoundCrate}; use syn::parse::Error; use quote::quote; +mod docs; pub mod syn_ext; +pub use docs::get_doc_literals; + // FIXME #1569, remove the following functions, which are copied from sp-api-macros use proc_macro2::{TokenStream, Span}; use syn::Ident; From 2e75a85b5f364c72d06b167605467d630f166284 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 30 Jun 2021 16:56:36 +0100 Subject: [PATCH 281/503] Add metadata-docs feature for reducing runtime Wasm size --- frame/support/src/dispatch.rs | 6 ++++++ frame/support/src/event.rs | 7 ++++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index c802f04866609..f2176af10b4eb 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2473,7 +2473,10 @@ macro_rules! __impl_module_constants_metadata { value: $default_byte_name::<$const_trait_instance $(, $const_instance)?>( Default::default() ).default_byte(), + #[cfg(feature = "metadata-docs")] documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], + #[cfg(not(feature = "metadata-docs"))] + documentation: $crate::scale_info::prelude::vec![], } ),* ] @@ -2563,7 +2566,10 @@ macro_rules! __function_to_metadata { } ),* ], + #[cfg(feature = "metadata-docs")] documentation: $crate::scale_info::prelude::vec![ $( $fn_doc ),* ], + #[cfg(not(feature = "metadata-docs"))] + documentation: $crate::scale_info::prelude::vec![], } }; diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 442c3d5456e98..10dccae8427a6 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -305,9 +305,10 @@ macro_rules! __events_to_metadata { arguments: $crate::scale_info::prelude::vec![ $( $( $crate::metadata::TypeSpec::new::<$param>(stringify!($param)) ),* )* ], - documentation: $crate::scale_info::prelude::vec![ - $( $doc_attr ),* - ], + #[cfg(feature = "metadata-docs")] + documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], + #[cfg(not(feature = "metadata-docs"))] + documentation: $crate::scale_info::prelude::vec![], }; $( $rest )* ) From a13c9d2ced1fc24b06e427ee4c9a79f116ef8eef Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 15:17:16 +0100 Subject: [PATCH 282/503] Update all parity-common dependencies --- Cargo.lock | 145 +++++++++++++++-------- Cargo.toml | 8 +- bin/node/bench/Cargo.toml | 6 +- client/api/Cargo.toml | 4 +- client/db/Cargo.toml | 10 +- client/informant/Cargo.toml | 2 +- client/service/Cargo.toml | 2 +- client/state-db/Cargo.toml | 2 +- client/transaction-pool/Cargo.toml | 2 +- client/transaction-pool/graph/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- max-encoded-len/Cargo.toml | 2 +- primitives/core/Cargo.toml | 4 +- primitives/database/Cargo.toml | 2 +- primitives/runtime-interface/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/test-primitives/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- utils/browser/Cargo.toml | 2 +- 19 files changed, 125 insertions(+), 78 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 31e4d36e91c23..3f7d17793e5b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1695,7 +1695,19 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" +dependencies = [ + "byteorder", + "rand 0.8.3", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixed-hash" +version = "0.7.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "byteorder", "rand 0.8.3", @@ -2364,6 +2376,11 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" +[[package]] +name = "hash-db" +version = "0.15.2" +source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" + [[package]] name = "hash256-std-hasher" version = "0.15.2" @@ -2709,7 +2726,16 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-codec" +version = "0.5.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "parity-scale-codec", ] @@ -2726,7 +2752,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "serde", ] @@ -3049,7 +3075,7 @@ version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711adba9940a039f4374fc5724c0a5eaca84a2d558cce62256bfe26f0dbef05e" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hash256-std-hasher", "tiny-keccak", ] @@ -3075,9 +3101,8 @@ dependencies = [ [[package]] name = "kvdb" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8891bd853eff90e33024195d79d578dc984c82f9e0715fcd2b525a0c19d52811" +version = "0.10.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "parity-util-mem", "smallvec 1.6.1", @@ -3085,9 +3110,8 @@ dependencies = [ [[package]] name = "kvdb-memorydb" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a0da8e08caf08d384a620ec19bb6c9b85c84137248e202617fb91881f25912" +version = "0.10.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "kvdb", "parity-util-mem", @@ -3096,9 +3120,8 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34446c373ccc494c2124439281c198c7636ccdc2752c06722bbffd56d459c1e4" +version = "0.12.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "fs-swap", "kvdb", @@ -3114,9 +3137,8 @@ dependencies = [ [[package]] name = "kvdb-web" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1e98ba343d0b35f9009a8844cd2b87fa3192f7e79033ac05b00aeae0f3b0b5" +version = "0.10.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "futures 0.3.15", "js-sys", @@ -3795,7 +3817,7 @@ dependencies = [ "impl-trait-for-tuples", "max-encoded-len-derive", "parity-scale-codec", - "primitive-types", + "primitive-types 0.10.0", "rustversion", "trybuild", ] @@ -3862,10 +3884,9 @@ dependencies = [ [[package]] name = "memory-db" version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "814bbecfc0451fc314eeea34f05bbcd5b98a7ad7af37faee088b86a1e633f1d4" +source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" dependencies = [ - "hash-db", + "hash-db 0.15.2 (git+https://github.com/paritytech/trie?branch=aj-update-parity-common)", "hashbrown", "parity-util-mem", ] @@ -4129,7 +4150,7 @@ dependencies = [ "derive_more", "fs_extra", "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hex", "kvdb", "kvdb-rocksdb", @@ -5878,16 +5899,15 @@ dependencies = [ [[package]] name = "parity-util-mem" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +version = "0.10.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "cfg-if 1.0.0", "hashbrown", "impl-trait-for-tuples", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", "parking_lot 0.11.1", - "primitive-types", + "primitive-types 0.10.0", "smallvec 1.6.1", "winapi 0.3.9", ] @@ -5903,6 +5923,16 @@ dependencies = [ "synstructure", ] +[[package]] +name = "parity-util-mem-derive" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +dependencies = [ + "proc-macro2", + "syn", + "synstructure", +] + [[package]] name = "parity-wasm" version = "0.32.0" @@ -6329,10 +6359,21 @@ dependencies = [ [[package]] name = "primitive-types" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +dependencies = [ + "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "primitive-types" +version = "0.10.0" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ - "fixed-hash", - "impl-codec", + "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", + "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", "scale-info", "uint 0.9.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", @@ -6893,9 +6934,9 @@ dependencies = [ [[package]] name = "rocksdb" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d83c02c429044d58474eaf5ae31e062d0de894e21125b47437ec0edc1397e6" +checksum = "c749134fda8bfc90d0de643d59bfc841dcb3ac8a1062e12b6754bd60235c48b3" dependencies = [ "libc", "librocksdb-sys", @@ -7219,7 +7260,7 @@ dependencies = [ "derive_more", "fnv", "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb", "kvdb-memorydb", "lazy_static", @@ -7254,7 +7295,7 @@ name = "sc-client-db" version = "0.9.0" dependencies = [ "blake2-rfc", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb", "kvdb-memorydb", "kvdb-rocksdb", @@ -7770,7 +7811,7 @@ dependencies = [ name = "sc-light" version = "3.0.0" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static", "parity-scale-codec", "parking_lot 0.11.1", @@ -7956,7 +7997,7 @@ dependencies = [ "assert_matches", "futures 0.1.31", "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core", "jsonrpc-pubsub", "lazy_static", @@ -8058,7 +8099,7 @@ dependencies = [ "futures 0.1.31", "futures 0.3.15", "futures-timer 3.0.2", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core", "jsonrpc-pubsub", "lazy_static", @@ -8162,7 +8203,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive", + "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8774,7 +8815,7 @@ dependencies = [ name = "sp-api" version = "3.0.0" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "log", "parity-scale-codec", "sp-api-proc-macro", @@ -8853,7 +8894,7 @@ dependencies = [ "integer-sqrt", "num-traits", "parity-scale-codec", - "primitive-types", + "primitive-types 0.9.0", "rand 0.7.3", "scale-info", "serde", @@ -8870,7 +8911,7 @@ dependencies = [ "honggfuzz", "num-bigint", "num-traits", - "primitive-types", + "primitive-types 0.9.0", "sp-arithmetic", ] @@ -9034,7 +9075,7 @@ dependencies = [ "dyn-clonable", "ed25519-dalek", "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hash256-std-hasher", "hex", "hex-literal", @@ -9049,7 +9090,7 @@ dependencies = [ "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions 0.6.1", - "primitive-types", + "primitive-types 0.10.0", "rand 0.7.3", "rand_chacha 0.2.2", "regex", @@ -9137,7 +9178,7 @@ name = "sp-io" version = "3.0.0" dependencies = [ "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "libsecp256k1", "log", "parity-scale-codec", @@ -9298,7 +9339,7 @@ version = "3.0.0" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "primitive-types", + "primitive-types 0.10.0", "rustversion", "sp-core", "sp-externalities", @@ -9413,7 +9454,7 @@ dependencies = [ name = "sp-state-machine" version = "0.9.0" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal", "log", "num-traits", @@ -9543,7 +9584,7 @@ name = "sp-trie" version = "3.0.0" dependencies = [ "criterion", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hex-literal", "memory-db", "parity-scale-codec", @@ -9825,7 +9866,7 @@ dependencies = [ "async-trait", "futures 0.1.31", "futures 0.3.15", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hex", "parity-scale-codec", "sc-client-api", @@ -10643,7 +10684,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "568257edb909a5c532b1f4ab38ee6b5dedfbf8775be6a55a29020513ebe3e072" dependencies = [ "criterion", - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hasher", "memory-db", "parity-scale-codec", @@ -10658,7 +10699,7 @@ version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd81fe0c8bc2b528a51c9d2c31dae4483367a26a723a3c9a4a8120311d7774e3" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "hashbrown", "log", "rustc-hex", @@ -10671,7 +10712,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "652931506d2c1244d7217a70b99f56718a7b4161b37f04e7cd868072a99f68cd" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -10680,7 +10721,7 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3161ba520ab28cd8e6b68e1126f1009f6e335339d1a73b978139011703264c8" dependencies = [ - "hash-db", + "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hasher", ] @@ -10811,7 +10852,7 @@ dependencies = [ [[package]] name = "uint" version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#8e5e7131f7b245196742ab125cf3c18ffccba408" +source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" dependencies = [ "byteorder", "crunchy", diff --git a/Cargo.toml b/Cargo.toml index 2abb0b5dc4090..46f9a3eb5cf0b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -277,4 +277,10 @@ panic = "unwind" [patch.crates-io] primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file +parity-util-mem = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +kvdb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +kvdb-web = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } +memory-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } \ No newline at end of file diff --git a/bin/node/bench/Cargo.toml b/bin/node/bench/Cargo.toml index 93ee35d98f98d..65c6a562b18b7 100644 --- a/bin/node/bench/Cargo.toml +++ b/bin/node/bench/Cargo.toml @@ -21,8 +21,8 @@ serde = "1.0.101" serde_json = "1.0.41" structopt = "0.3" derive_more = "0.99.2" -kvdb = "0.9.0" -kvdb-rocksdb = "0.11.0" +kvdb = "0.10.0" +kvdb-rocksdb = "0.12.0" sp-trie = { version = "3.0.0", path = "../../../primitives/trie" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-consensus = { version = "0.9.0", path = "../../../primitives/consensus/common" } @@ -37,7 +37,7 @@ fs_extra = "1" hex = "0.4.0" rand = { version = "0.7.2", features = ["small_rng"] } lazy_static = "1.4.0" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parity-db = { version = "0.2.4" } sc-transaction-pool = { version = "3.0.0", path = "../../../client/transaction-pool" } futures = { version = "0.3.4", features = ["thread-pool"] } diff --git a/client/api/Cargo.toml b/client/api/Cargo.toml index 637dae4a29abd..65a48954c4900 100644 --- a/client/api/Cargo.toml +++ b/client/api/Cargo.toml @@ -24,7 +24,7 @@ futures = "0.3.1" hash-db = { version = "0.15.2", default-features = false } sp-blockchain = { version = "3.0.0", path = "../../primitives/blockchain" } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } -kvdb = "0.9.0" +kvdb = "0.10.0" log = "0.4.8" parking_lot = "0.11.1" lazy_static = "1.4.0" @@ -43,7 +43,7 @@ sp-transaction-pool = { version = "3.0.0", path = "../../primitives/transaction- prometheus-endpoint = { package = "substrate-prometheus-endpoint", version = "0.9.0", path = "../../utils/prometheus" } [dev-dependencies] -kvdb-memorydb = "0.9.0" +kvdb-memorydb = "0.10.0" sp-test-primitives = { version = "2.0.0", path = "../../primitives/test-primitives" } substrate-test-runtime = { version = "2.0.0", path = "../../test-utils/runtime" } thiserror = "1.0.21" diff --git a/client/db/Cargo.toml b/client/db/Cargo.toml index 43bae63f09c2b..2145b988891d7 100644 --- a/client/db/Cargo.toml +++ b/client/db/Cargo.toml @@ -15,12 +15,12 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] parking_lot = "0.11.1" log = "0.4.8" -kvdb = "0.9.0" -kvdb-rocksdb = { version = "0.11.0", optional = true } -kvdb-memorydb = "0.9.0" +kvdb = "0.10.0" +kvdb-rocksdb = { version = "0.12.0", optional = true } +kvdb-memorydb = "0.10.0" linked-hash-map = "0.5.2" hash-db = "0.15.2" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["std"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["std"] } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } blake2-rfc = "0.2.18" @@ -43,7 +43,7 @@ sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } sp-tracing = { version = "3.0.0", path = "../../primitives/tracing" } substrate-test-runtime-client = { version = "2.0.0", path = "../../test-utils/runtime/client" } quickcheck = "1.0.3" -kvdb-rocksdb = "0.11.0" +kvdb-rocksdb = "0.12.0" tempfile = "3" [features] diff --git a/client/informant/Cargo.toml b/client/informant/Cargo.toml index 139a5ce19a000..4238243ef96ee 100644 --- a/client/informant/Cargo.toml +++ b/client/informant/Cargo.toml @@ -17,7 +17,7 @@ ansi_term = "0.12.1" futures = "0.3.9" futures-timer = "3.0.1" log = "0.4.8" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } sc-client-api = { version = "3.0.0", path = "../api" } sc-network = { version = "0.9.0", path = "../network" } sp-blockchain = { version = "3.0.0", path = "../../primitives/blockchain" } diff --git a/client/service/Cargo.toml b/client/service/Cargo.toml index a90efb02dc5f2..e4756b1880f37 100644 --- a/client/service/Cargo.toml +++ b/client/service/Cargo.toml @@ -79,7 +79,7 @@ sc-tracing = { version = "3.0.0", path = "../tracing" } sp-tracing = { version = "3.0.0", path = "../../primitives/tracing" } tracing = "0.1.25" tracing-futures = { version = "0.2.4" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } async-trait = "0.1.42" [target.'cfg(not(target_os = "unknown"))'.dependencies] diff --git a/client/state-db/Cargo.toml b/client/state-db/Cargo.toml index d61dd7fc125a1..ca538f9d651f2 100644 --- a/client/state-db/Cargo.toml +++ b/client/state-db/Cargo.toml @@ -19,5 +19,5 @@ log = "0.4.11" sc-client-api = { version = "3.0.0", path = "../api" } sp-core = { version = "3.0.0", path = "../../primitives/core" } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parity-util-mem-derive = "0.1.0" diff --git a/client/transaction-pool/Cargo.toml b/client/transaction-pool/Cargo.toml index 6b105520baec5..77b551915ce13 100644 --- a/client/transaction-pool/Cargo.toml +++ b/client/transaction-pool/Cargo.toml @@ -18,7 +18,7 @@ thiserror = "1.0.21" futures = { version = "0.3.1", features = ["compat"] } intervalier = "0.4.0" log = "0.4.8" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parking_lot = "0.11.1" prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} sc-client-api = { version = "3.0.0", path = "../api" } diff --git a/client/transaction-pool/graph/Cargo.toml b/client/transaction-pool/graph/Cargo.toml index 4b134c7080963..9af2b152d8c6a 100644 --- a/client/transaction-pool/graph/Cargo.toml +++ b/client/transaction-pool/graph/Cargo.toml @@ -25,7 +25,7 @@ sp-utils = { version = "3.0.0", path = "../../../primitives/utils" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-runtime = { version = "3.0.0", path = "../../../primitives/runtime" } sp-transaction-pool = { version = "3.0.0", path = "../../../primitives/transaction-pool" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } linked-hash-map = "0.5.2" retain_mut = "0.1.3" diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index fc739d21db035..9c851b4f5e645 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -39,7 +39,7 @@ log = { version = "0.4.14", default-features = false } assert_matches = "1.3.0" pretty_assertions = "0.6.1" frame-system = { version = "3.0.0", path = "../system" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] default = ["std"] diff --git a/max-encoded-len/Cargo.toml b/max-encoded-len/Cargo.toml index 994a3c6a5e132..9c0f559224812 100644 --- a/max-encoded-len/Cargo.toml +++ b/max-encoded-len/Cargo.toml @@ -13,7 +13,7 @@ description = "Trait MaxEncodedLen bounds the max encoded length of an item." codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } impl-trait-for-tuples = "0.2.1" max-encoded-len-derive = { package = "max-encoded-len-derive", version = "3.0.0", path = "derive", default-features = false, optional = true } -primitive-types = { version = "0.9.0", default-features = false, features = ["codec"] } +primitive-types = { version = "0.10.0", default-features = false, features = ["codec"] } [dev-dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive" ] } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index bc97ca6c1e157..1159dc0cf5720 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -19,7 +19,7 @@ scale-info = { version = "0.9.0", default-features = false, features = ["derive" log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info", default-features = false, features = ["codec", "scale-info"] } +primitive-types = { version = "0.10.0", features = ["codec", "scale-info"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } hash-db = { version = "0.15.2", default-features = false } @@ -37,7 +37,7 @@ parking_lot = { version = "0.11.1", optional = true } sp-debug-derive = { version = "3.0.0", path = "../debug-derive" } sp-externalities = { version = "0.9.0", optional = true, path = "../externalities" } sp-storage = { version = "3.0.0", default-features = false, path = "../storage" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } futures = { version = "0.3.1", optional = true } dyn-clonable = { version = "0.9.0", optional = true } thiserror = { version = "1.0.21", optional = true } diff --git a/primitives/database/Cargo.toml b/primitives/database/Cargo.toml index aae7668b5ec80..f8693449af8cd 100644 --- a/primitives/database/Cargo.toml +++ b/primitives/database/Cargo.toml @@ -12,5 +12,5 @@ readme = "README.md" [dependencies] parking_lot = "0.11.1" -kvdb = "0.9.0" +kvdb = "0.10.0" diff --git a/primitives/runtime-interface/Cargo.toml b/primitives/runtime-interface/Cargo.toml index 4099e89933880..c418ef44cef61 100644 --- a/primitives/runtime-interface/Cargo.toml +++ b/primitives/runtime-interface/Cargo.toml @@ -21,7 +21,7 @@ sp-runtime-interface-proc-macro = { version = "3.0.0", path = "proc-macro" } sp-externalities = { version = "0.9.0", optional = true, path = "../externalities" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } static_assertions = "1.0.0" -primitive-types = { version = "0.9.0", default-features = false } +primitive-types = { version = "0.10.0", default-features = false } sp-storage = { version = "3.0.0", default-features = false, path = "../storage" } impl-trait-for-tuples = "0.2.1" diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index ee31598e4465f..68cdbf5d2db7c 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -27,7 +27,7 @@ log = { version = "0.4.14", default-features = false } paste = "1.0" rand = { version = "0.7.2", optional = true } impl-trait-for-tuples = "0.2.1" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } hash256-std-hasher = { version = "0.15.2", default-features = false } either = { version = "1.5", default-features = false } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } diff --git a/primitives/test-primitives/Cargo.toml b/primitives/test-primitives/Cargo.toml index fbf29db96fa46..f1f2c70bf2315 100644 --- a/primitives/test-primitives/Cargo.toml +++ b/primitives/test-primitives/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.0.0", default-features = sp-core = { version = "3.0.0", default-features = false, path = "../core" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] default = [ diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index da5ddb295b5af..294c786511376 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -40,7 +40,7 @@ sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../ sp-trie = { version = "3.0.0", default-features = false, path = "../../primitives/trie" } sp-transaction-pool = { version = "3.0.0", default-features = false, path = "../../primitives/transaction-pool" } trie-db = { version = "0.22.2", default-features = false } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } sc-service = { version = "0.9.0", default-features = false, optional = true, features = ["test-helpers"], path = "../../client/service" } sp-state-machine = { version = "0.9.0", default-features = false, path = "../../primitives/state-machine" } sp-externalities = { version = "0.9.0", default-features = false, path = "../../primitives/externalities" } diff --git a/utils/browser/Cargo.toml b/utils/browser/Cargo.toml index 31403a5e6fa96..65755f293e174 100644 --- a/utils/browser/Cargo.toml +++ b/utils/browser/Cargo.toml @@ -21,7 +21,7 @@ console_error_panic_hook = "0.1.6" js-sys = "0.3.34" wasm-bindgen = "0.2.73" wasm-bindgen-futures = "0.4.18" -kvdb-web = "0.9.0" +kvdb-web = "0.10.0" sp-database = { version = "3.0.0", path = "../../primitives/database" } sc-informant = { version = "0.9.0", path = "../../client/informant" } sc-service = { version = "0.9.0", path = "../../client/service", default-features = false } From 96807e15e2209ceaba1cc07d889ebdd6396d08cd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 15:52:36 +0100 Subject: [PATCH 283/503] Modify parity-common patches to use master --- Cargo.lock | 70 +++++++++++++++++++++++++++++++++++------------------- Cargo.toml | 12 +++++----- 2 files changed, 51 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3f7d17793e5b8..54ce9fd6f2a2d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -96,6 +96,17 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e" +[[package]] +name = "ahash" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98" +dependencies = [ + "getrandom 0.2.3", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.15" @@ -1707,7 +1718,7 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "byteorder", "rand 0.8.3", @@ -2396,7 +2407,16 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" dependencies = [ - "ahash", + "ahash 0.4.7", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash 0.7.4", ] [[package]] @@ -2735,7 +2755,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "parity-scale-codec", ] @@ -2752,7 +2772,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "serde", ] @@ -2775,7 +2795,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.9.1", "serde", ] @@ -3102,7 +3122,7 @@ dependencies = [ [[package]] name = "kvdb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "parity-util-mem", "smallvec 1.6.1", @@ -3111,7 +3131,7 @@ dependencies = [ [[package]] name = "kvdb-memorydb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "kvdb", "parity-util-mem", @@ -3121,7 +3141,7 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" version = "0.12.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "fs-swap", "kvdb", @@ -3138,7 +3158,7 @@ dependencies = [ [[package]] name = "kvdb-web" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "futures 0.3.15", "js-sys", @@ -3380,7 +3400,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", - "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uint 0.9.0", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3752,7 +3772,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f374d42cdfc1d7dbf3d3dec28afab2eb97ffbf43a3234d795b5986dbf4b90ba" dependencies = [ - "hashbrown", + "hashbrown 0.9.1", ] [[package]] @@ -3887,7 +3907,7 @@ version = "0.26.0" source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" dependencies = [ "hash-db 0.15.2 (git+https://github.com/paritytech/trie?branch=aj-update-parity-common)", - "hashbrown", + "hashbrown 0.9.1", "parity-util-mem", ] @@ -5900,12 +5920,12 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "cfg-if 1.0.0", - "hashbrown", + "hashbrown 0.11.2", "impl-trait-for-tuples", - "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", + "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common?branch=master)", "parking_lot 0.11.1", "primitive-types 0.10.0", "smallvec 1.6.1", @@ -5926,7 +5946,7 @@ dependencies = [ [[package]] name = "parity-util-mem-derive" version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "proc-macro2", "syn", @@ -6364,19 +6384,19 @@ checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uint 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", + "uint 0.9.0", ] [[package]] name = "primitive-types" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ - "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", - "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", - "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", + "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common?branch=master)", + "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common?branch=master)", + "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=master)", "scale-info", - "uint 0.9.0 (git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info)", + "uint 0.9.1", ] [[package]] @@ -10700,7 +10720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd81fe0c8bc2b528a51c9d2c31dae4483367a26a723a3c9a4a8120311d7774e3" dependencies = [ "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashbrown", + "hashbrown 0.9.1", "log", "rustc-hex", "smallvec 1.6.1", @@ -10851,8 +10871,8 @@ dependencies = [ [[package]] name = "uint" -version = "0.9.0" -source = "git+https://github.com/paritytech/parity-common?branch=aj-update-scale-info#d95576e3a77561a54e384a83136cd7aa25743683" +version = "0.9.1" +source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" dependencies = [ "byteorder", "crunchy", diff --git a/Cargo.toml b/Cargo.toml index 46f9a3eb5cf0b..24a0335972d9c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -276,11 +276,11 @@ zeroize = { opt-level = 3 } panic = "unwind" [patch.crates-io] -primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -parity-util-mem = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -kvdb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -kvdb-memorydb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -kvdb-web = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } -kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common", branch = "aj-update-scale-info" } +primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "master" } +parity-util-mem = { git = "https://github.com/paritytech/parity-common", branch = "master" } +kvdb = { git = "https://github.com/paritytech/parity-common", branch = "master" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common", branch = "master" } +kvdb-web = { git = "https://github.com/paritytech/parity-common", branch = "master" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common", branch = "master" } finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } memory-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } \ No newline at end of file From c7facabf8681ea917a038c23be2784aa174e9763 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 16:34:40 +0100 Subject: [PATCH 284/503] Patch hash-db --- Cargo.lock | 42 ++++++++++++++++++------------------------ Cargo.toml | 3 ++- 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 54ce9fd6f2a2d..eb456727f8c60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2381,12 +2381,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "hash-db" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" - [[package]] name = "hash-db" version = "0.15.2" @@ -3095,7 +3089,7 @@ version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711adba9940a039f4374fc5724c0a5eaca84a2d558cce62256bfe26f0dbef05e" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hash256-std-hasher", "tiny-keccak", ] @@ -3906,7 +3900,7 @@ name = "memory-db" version = "0.26.0" source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" dependencies = [ - "hash-db 0.15.2 (git+https://github.com/paritytech/trie?branch=aj-update-parity-common)", + "hash-db", "hashbrown 0.9.1", "parity-util-mem", ] @@ -4170,7 +4164,7 @@ dependencies = [ "derive_more", "fs_extra", "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hex", "kvdb", "kvdb-rocksdb", @@ -7280,7 +7274,7 @@ dependencies = [ "derive_more", "fnv", "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "kvdb", "kvdb-memorydb", "lazy_static", @@ -7315,7 +7309,7 @@ name = "sc-client-db" version = "0.9.0" dependencies = [ "blake2-rfc", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "kvdb", "kvdb-memorydb", "kvdb-rocksdb", @@ -7831,7 +7825,7 @@ dependencies = [ name = "sc-light" version = "3.0.0" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "lazy_static", "parity-scale-codec", "parking_lot 0.11.1", @@ -8017,7 +8011,7 @@ dependencies = [ "assert_matches", "futures 0.1.31", "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "jsonrpc-core", "jsonrpc-pubsub", "lazy_static", @@ -8119,7 +8113,7 @@ dependencies = [ "futures 0.1.31", "futures 0.3.15", "futures-timer 3.0.2", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "jsonrpc-core", "jsonrpc-pubsub", "lazy_static", @@ -8835,7 +8829,7 @@ dependencies = [ name = "sp-api" version = "3.0.0" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "log", "parity-scale-codec", "sp-api-proc-macro", @@ -9095,7 +9089,7 @@ dependencies = [ "dyn-clonable", "ed25519-dalek", "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hash256-std-hasher", "hex", "hex-literal", @@ -9198,7 +9192,7 @@ name = "sp-io" version = "3.0.0" dependencies = [ "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "libsecp256k1", "log", "parity-scale-codec", @@ -9474,7 +9468,7 @@ dependencies = [ name = "sp-state-machine" version = "0.9.0" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hex-literal", "log", "num-traits", @@ -9604,7 +9598,7 @@ name = "sp-trie" version = "3.0.0" dependencies = [ "criterion", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hex-literal", "memory-db", "parity-scale-codec", @@ -9886,7 +9880,7 @@ dependencies = [ "async-trait", "futures 0.1.31", "futures 0.3.15", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hex", "parity-scale-codec", "sc-client-api", @@ -10704,7 +10698,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "568257edb909a5c532b1f4ab38ee6b5dedfbf8775be6a55a29020513ebe3e072" dependencies = [ "criterion", - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "keccak-hasher", "memory-db", "parity-scale-codec", @@ -10719,7 +10713,7 @@ version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd81fe0c8bc2b528a51c9d2c31dae4483367a26a723a3c9a4a8120311d7774e3" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "hashbrown 0.9.1", "log", "rustc-hex", @@ -10732,7 +10726,7 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "652931506d2c1244d7217a70b99f56718a7b4161b37f04e7cd868072a99f68cd" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", ] [[package]] @@ -10741,7 +10735,7 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3161ba520ab28cd8e6b68e1126f1009f6e335339d1a73b978139011703264c8" dependencies = [ - "hash-db 0.15.2 (registry+https://github.com/rust-lang/crates.io-index)", + "hash-db", "keccak-hasher", ] diff --git a/Cargo.toml b/Cargo.toml index 24a0335972d9c..27372808996f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -283,4 +283,5 @@ kvdb-memorydb = { git = "https://github.com/paritytech/parity-common", branch = kvdb-web = { git = "https://github.com/paritytech/parity-common", branch = "master" } kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common", branch = "master" } finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } -memory-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } \ No newline at end of file +memory-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } +hash-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } \ No newline at end of file From 35560ba67d02fe6d4ecb186ad1247c1f4327be5a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 16:49:43 +0100 Subject: [PATCH 285/503] Bump finality-grandpa version --- Cargo.lock | 1130 +++++++++--------- client/finality-grandpa-warp-sync/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 4 +- client/finality-grandpa/rpc/Cargo.toml | 2 +- 4 files changed, 599 insertions(+), 539 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index eb456727f8c60..a5336eb15ff1c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,20 +14,11 @@ dependencies = [ [[package]] name = "addr2line" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7" -dependencies = [ - "gimli 0.23.0", -] - -[[package]] -name = "addr2line" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03345e98af8f3d786b6d9f656ccfa6ac316d954e92bc4841f0bba20789d5fb5a" +checksum = "e7a2e47a1fbe209ee101dd6d61285226744c6c8d3c21c8dc878ba6cb9f467f3a" dependencies = [ - "gimli 0.24.0", + "gimli", ] [[package]] @@ -109,9 +100,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "0.7.15" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7404febffaa47dac81aa44dba71523c9d069b1bdc50a77db41195149e17f68e5" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" dependencies = [ "memchr", ] @@ -136,24 +127,33 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.38" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1" +checksum = "15af2628f6890fe2609a3b91bef4c83450512802e59489f9c1cb1fa5df064a61" [[package]] name = "approx" -version = "0.3.2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f2a05fd1bd10b2527e20a2cd32d8873d115b8b39fe219ee25f42a8aca6ba278" +dependencies = [ + "num-traits", +] + +[[package]] +name = "approx" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0e60b75072ecd4168020818c0107f2857bb6c4e64252d8d3983f6263b40a5c3" +checksum = "072df7202e63b127ab55acfe16ce97013d5b97bf160489336d3f1840fd78e99e" dependencies = [ "num-traits", ] [[package]] name = "arbitrary" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "698b65a961a9d730fb45b6b0327e20207810c9f61ee421b082b27ba003f49e2b" +checksum = "237430fd6ed3740afe94eefcc278ae21e050285be882804e0d6e8695f0c94691" [[package]] name = "arrayref" @@ -178,9 +178,9 @@ checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a2f58b0bb10c380af2b26e57212856b8c9a59e0925b4c20f4a174a49734eaf7" +checksum = "be4dc07131ffa69b8072d35f5007352af944213cde02545e2103680baed38fcd" [[package]] name = "asn1_der" @@ -190,9 +190,9 @@ checksum = "9d6e24d2cce90c53b948c46271bfb053e4bdc2db9b5d3f65e20f8cf28a1b7fc3" [[package]] name = "assert_cmd" -version = "1.0.3" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2475b58cd94eb4f70159f4fd8844ba3b807532fe3131b3373fae060bbe30396" +checksum = "a88b6bd5df287567ffdf4ddf4d33060048e1068308e5f62d81c6f9824a045a48" dependencies = [ "bstr", "doc-comment", @@ -231,16 +231,16 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb877970c7b440ead138f6321a3b5395d6061183af779340b65e20c0fede9146" +checksum = "871f9bb5e0a22eeb7e8cf16641feb87c9dc67032ccf8ff49e772eb9941d3a965" dependencies = [ "async-task", "concurrent-queue", "fastrand", "futures-lite", "once_cell", - "vec-arena", + "slab", ] [[package]] @@ -261,29 +261,28 @@ dependencies = [ [[package]] name = "async-io" -version = "1.3.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9315f8f07556761c3e48fec2e6b276004acf426e6dc068b2c2251854d65ee0fd" +checksum = "a811e6a479f2439f0c04038796b5cfb3d2ad56c230e0f2d3f7b04d68cfee607b" dependencies = [ "concurrent-queue", - "fastrand", "futures-lite", "libc", "log", - "nb-connect", "once_cell", "parking", "polling", - "vec-arena", + "slab", + "socket2 0.4.0", "waker-fn", "winapi 0.3.9", ] [[package]] name = "async-lock" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1996609732bde4a9988bc42125f55f2af5f3c36370e27c778d5191a4a1b63bfb" +checksum = "e6a8ea61bf9947a1007c5cada31e647dbc77b103c679858150003ba697ea798b" dependencies = [ "event-listener", ] @@ -299,15 +298,16 @@ dependencies = [ [[package]] name = "async-process" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef37b86e2fa961bae5a4d212708ea0154f904ce31d1a4a7f47e1bbc33a0c040b" +checksum = "a8f38756dd9ac84671c428afbf7c9f7495feff9ec5b0710f17100098e5b354ac" dependencies = [ "async-io", "blocking", "cfg-if 1.0.0", "event-listener", "futures-lite", + "libc", "once_cell", "signal-hook", "winapi 0.3.9", @@ -325,7 +325,7 @@ dependencies = [ "async-io", "async-lock", "async-process", - "crossbeam-utils 0.8.3", + "crossbeam-utils 0.8.5", "futures-channel", "futures-core", "futures-io", @@ -336,7 +336,7 @@ dependencies = [ "memchr", "num_cpus", "once_cell", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", "pin-utils", "slab", "wasm-bindgen-futures", @@ -344,9 +344,9 @@ dependencies = [ [[package]] name = "async-std-resolver" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665c56111e244fe38e7708ee10948a4356ad6a548997c21f5a63a0f4e0edc4d" +checksum = "ed4e2c3da14d8ad45acb1e3191db7a918e9505b6f155b218e70a7c9a1a48c638" dependencies = [ "async-std", "async-trait", @@ -364,9 +364,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.48" +version = "0.1.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36ea56748e10732c49404c153638a15ec3d6211ec5ff35d9bb20e13b93576adf" +checksum = "0b98e84bbb4cbcdd97da190ba0c58a1bb0de2c1fdf67d159e192ed766aeca722" dependencies = [ "proc-macro2", "quote", @@ -383,7 +383,7 @@ dependencies = [ "futures-sink", "futures-util", "memchr", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", ] [[package]] @@ -396,7 +396,7 @@ dependencies = [ "futures-sink", "futures-util", "memchr", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", ] [[package]] @@ -433,15 +433,16 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "backtrace" -version = "0.3.56" +version = "0.3.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d117600f438b1707d4e4ae15d3595657288f8235a0eb593e80ecc98ab34e1bc" +checksum = "b7815ea54e4d821e791162e078acbebfd6d8c8939cd559c9335dceb1c8ca7282" dependencies = [ - "addr2line 0.14.1", + "addr2line", + "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.23.0", + "object 0.25.3", "rustc-demangle", ] @@ -514,9 +515,9 @@ checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" [[package]] name = "bitvec" -version = "0.20.2" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f682656975d3a682daff957be4ddeb65d6ad656737cd821f2d00685ae466af1" +checksum = "7774144344a4faa177370406a7ff5f1da24303817368584c6206c8303eb07848" dependencies = [ "funty", "radium", @@ -569,9 +570,9 @@ dependencies = [ [[package]] name = "blake3" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9ff35b701f3914bdb8fad3368d822c766ef2858b2583198e41639b936f09d3f" +checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3" dependencies = [ "arrayref", "arrayvec 0.5.2", @@ -650,9 +651,9 @@ checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" [[package]] name = "bstr" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40b47ad93e1a5404e6c18dec46b628214fee441c70f4ab5d6942142cc268a3d" +checksum = "90682c8d613ad3373e66de8c6411e0ae2ab2571e879d2efbf73558cc66f21279" dependencies = [ "lazy_static", "memchr", @@ -671,9 +672,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.6.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63396b8a4b9de3f4fdfb320ab6080762242f66a8ef174c49d8e19b674db4cdbe" +checksum = "9c59e7af012c713f529e7a3ee57ce9b31ddd858d4b512923602f74608b009631" [[package]] name = "byte-slice-cast" @@ -756,18 +757,18 @@ dependencies = [ [[package]] name = "cast" -version = "0.2.3" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9434b9a5aa1450faa3f9cb14ea0e8c53bb5d2b3c1bfd1ab4fc03e9f33fbfb0" +checksum = "57cdfa5d50aad6cb4d44dcab6101a7f79925bd59d82ca42f38a9856a28865374" dependencies = [ - "rustc_version", + "rustc_version 0.3.3", ] [[package]] name = "cc" -version = "1.0.67" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd" +checksum = "4a72c244c1ff497a746a7e1fb3d14bd08420ecda70c8f25c7112f2781652d787" dependencies = [ "jobserver", ] @@ -934,6 +935,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.7.0" @@ -977,10 +984,13 @@ dependencies = [ ] [[package]] -name = "cpuid-bool" -version = "0.1.2" +name = "cpufeatures" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" +checksum = "66c99696f6c9dd7f35d486b9d04d7e6e202aa3e8c40d553f2fdf5e7e0c6a71ef" +dependencies = [ + "libc", +] [[package]] name = "cpuid-bool" @@ -1007,7 +1017,7 @@ dependencies = [ "cranelift-codegen-meta", "cranelift-codegen-shared", "cranelift-entity", - "gimli 0.24.0", + "gimli", "log", "regalloc", "serde", @@ -1074,7 +1084,7 @@ dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", - "itertools 0.10.0", + "itertools 0.10.1", "log", "serde", "smallvec 1.6.1", @@ -1102,7 +1112,7 @@ dependencies = [ "clap", "criterion-plot", "csv", - "itertools 0.10.0", + "itertools 0.10.1", "lazy_static", "num-traits", "oorandom", @@ -1129,12 +1139,12 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dca26ee1f8d361640700bde38b2c37d8c22b3ce2d360e1fc1c74ea4b0aa7d775" +checksum = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.3", + "crossbeam-utils 0.8.5", ] [[package]] @@ -1155,8 +1165,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9" dependencies = [ "cfg-if 1.0.0", - "crossbeam-epoch 0.9.3", - "crossbeam-utils 0.8.3", + "crossbeam-epoch 0.9.5", + "crossbeam-utils 0.8.5", ] [[package]] @@ -1176,14 +1186,14 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2584f639eb95fea8c798496315b297cf81b9b58b6d30ab066a75455333cf4b12" +checksum = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.3", + "crossbeam-utils 0.8.5", "lazy_static", - "memoffset 0.6.1", + "memoffset 0.6.4", "scopeguard", ] @@ -1211,11 +1221,10 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7e9d99fa91428effe99c5c6d4634cdeba32b8cf784fc428a2a687f61a952c49" +checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" dependencies = [ - "autocfg", "cfg-if 1.0.0", "lazy_static", ] @@ -1279,9 +1288,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8f45d9ad417bcef4817d614a501ab55cdd96a6fdb24f49aab89a54acfd66b19" +checksum = "5e98e2ad1a782e33928b96fc3948e7c355e5af34ba4de7670fe8bac2a3b2006d" dependencies = [ "quote", "syn", @@ -1313,9 +1322,9 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "3.0.2" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f627126b946c25a4638eec0ea634fc52506dea98db118aae985118ce7c3d723f" +checksum = "639891fde0dbea823fc3d798a0fdf9d2f9440a42d64a78ab3488b0ca025117b3" dependencies = [ "byteorder", "digest 0.9.0", @@ -1332,9 +1341,9 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57" [[package]] name = "data-encoding-macro" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a94feec3d2ba66c0b6621bca8bc6f68415b1e5c69af3586fdd0af9fd9f29b17" +checksum = "86927b7cd2fe88fa698b87404b287ab98d1a0063a34071d92e575b72d3029aca" dependencies = [ "data-encoding", "data-encoding-macro-internal", @@ -1342,9 +1351,9 @@ dependencies = [ [[package]] name = "data-encoding-macro-internal" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f83e699727abca3c56e187945f303389590305ab2f0185ea445aa66e8d5f2a" +checksum = "a5bbed42daaa95e780b60a50546aa345b8413a1e46f9a40a12907d3598f038db" dependencies = [ "data-encoding", "syn", @@ -1352,10 +1361,11 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.11" +version = "0.99.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb0e6161ad61ed084a36ba71fbba9e3ac5aee3606fb607fe08da6acbcf3d8c" +checksum = "5cc7b9cef1e351660e5443924e4f43ab25fbbed3e9a5f052df3677deb4d6b320" dependencies = [ + "convert_case", "proc-macro2", "quote", "syn", @@ -1393,9 +1403,9 @@ dependencies = [ [[package]] name = "directories" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8fed639d60b58d0f53498ab13d26f621fd77569cc6edb031f4cc36a2ad9da0f" +checksum = "e69600ff1703123957937708eb27f7a564e48885c537782722ed0ba3189ce1d7" dependencies = [ "dirs-sys", ] @@ -1412,12 +1422,12 @@ dependencies = [ [[package]] name = "dirs-sys" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a" +checksum = "03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780" dependencies = [ "libc", - "redox_users 0.3.5", + "redox_users", "winapi 0.3.9", ] @@ -1428,7 +1438,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", - "redox_users 0.4.0", + "redox_users", "winapi 0.3.9", ] @@ -1489,9 +1499,9 @@ checksum = "ee2626afccd7561a06cf1367e2950c4718ea04565e20fb5029b6c7d8ad09abcf" [[package]] name = "ed25519" -version = "1.0.3" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c66a534cbb46ab4ea03477eae19d5c22c01da8258030280b7bd9d8433fb6ef" +checksum = "8d0860415b12243916284c67a9be413e044ee6668247b99ba26d94b2bc06c8f6" dependencies = [ "signature", ] @@ -1502,11 +1512,11 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek 3.0.2", + "curve25519-dalek 3.1.0", "ed25519", "rand 0.7.3", "serde", - "sha2 0.9.3", + "sha2 0.9.5", "zeroize", ] @@ -1563,9 +1573,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17392a012ea30ef05a610aa97dfb49496e71c9f676b27879922ea5bdf60d9d3f" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" dependencies = [ "atty", "humantime 2.1.0", @@ -1582,9 +1592,9 @@ checksum = "68b91989ae21441195d7d9b9993a2f9295c7e1a8c96255d8b729accddc124797" [[package]] name = "erased-serde" -version = "0.3.13" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0465971a8cc1fa2455c8465aaa377131e1f1cf4983280f474a13e68793aa770c" +checksum = "3de9ad4541d99dc22b59134e7ff8dc3d6c988c89ecd7324bf10a8362b07a2afa" dependencies = [ "serde", ] @@ -1661,9 +1671,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca5faf057445ce5c9d4329e382b2ce7ca38550ef3b73a5348362d5f24e0c7fe3" +checksum = "77b705829d1e87f762c2df6da140b26af5839e1033aa84aa5f56bb688e4e1bdb" dependencies = [ "instant", ] @@ -1690,7 +1700,8 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.1" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#177760823e17b3582986b6c0f2fa1d3b90b474ef" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" dependencies = [ "either", "futures 0.3.15", @@ -1699,7 +1710,21 @@ dependencies = [ "num-traits", "parity-scale-codec", "parking_lot 0.11.1", - "rand 0.8.3", +] + +[[package]] +name = "finality-grandpa" +version = "0.15.0" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#642d7a1b33e8f14317e0561b51c37324f027dfc6" +dependencies = [ + "either", + "futures 0.3.15", + "futures-timer 3.0.2", + "log", + "num-traits", + "parity-scale-codec", + "parking_lot 0.11.1", + "rand 0.8.4", "scale-info", ] @@ -1710,7 +1735,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfcf0ed7fe52a17a03854ec54a9f76d6d84508d1c0e66bc1793301c73fc8493c" dependencies = [ "byteorder", - "rand 0.8.3", + "rand 0.8.4", "rustc-hex", "static_assertions", ] @@ -1718,10 +1743,10 @@ dependencies = [ [[package]] name = "fixed-hash" version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "byteorder", - "rand 0.8.3", + "rand 0.8.4", "rustc-hex", "static_assertions", ] @@ -1778,7 +1803,7 @@ dependencies = [ "linregress", "log", "parity-scale-codec", - "paste 1.0.4", + "paste 1.0.5", "scale-info", "serde", "sp-api", @@ -1872,7 +1897,7 @@ dependencies = [ "once_cell", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions 0.6.1", "scale-info", "serde", @@ -2117,16 +2142,16 @@ checksum = "acc499defb3b348f8d8f3f66415835a9131856ff7714bf10dadfc4ec4bdb29a1" [[package]] name = "futures-lite" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4481d0cd0de1d204a4fa55e7d45f07b1d958abcb06714b3446438e2eff695fb" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" dependencies = [ "fastrand", "futures-core", "futures-io", "memchr", "parking", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", "waker-fn", ] @@ -2197,7 +2222,7 @@ dependencies = [ "futures-sink", "futures-task", "memchr", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", "pin-utils", "proc-macro-hack", "proc-macro-nested", @@ -2219,15 +2244,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "generic-array" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f797e67af32588215eaaab8327027ee8e71b9dd0b2b26996aedf20c030fce309" -dependencies = [ - "typenum", -] - [[package]] name = "generic-array" version = "0.14.4" @@ -2274,12 +2290,6 @@ dependencies = [ "polyval", ] -[[package]] -name = "gimli" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" - [[package]] name = "gimli" version = "0.24.0" @@ -2299,9 +2309,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "globset" -version = "0.4.6" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c152169ef1e421390738366d2f796655fec62621dabbd0fd476f905934061e4a" +checksum = "10463d9ff00a2a068db14231982f5132edebad0d7660cd956a1c30292dbcbfbd" dependencies = [ "aho-corasick", "bstr", @@ -2352,7 +2362,7 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.3", + "http 0.2.4", "indexmap", "slab", "tokio 0.2.25", @@ -2369,14 +2379,14 @@ checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3" [[package]] name = "handlebars" -version = "3.5.3" +version = "3.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" +checksum = "4498fc115fa7d34de968184e473529abb40eeb6be8bc5f7faba3d08c316cb3e3" dependencies = [ "log", "pest", "pest_derive", - "quick-error 2.0.0", + "quick-error 2.0.1", "serde", "serde_json", ] @@ -2415,18 +2425,18 @@ dependencies = [ [[package]] name = "heck" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" dependencies = [ "unicode-segmentation", ] [[package]] name = "hermit-abi" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] @@ -2515,9 +2525,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7245cd7449cc792608c3c8a9eaf69bd4eabbabf802713748fd739c98b82f0747" +checksum = "527e8c9ac747e28542699a951517aa9a6945af506cd1f2e1b53a576c17b6cc11" dependencies = [ "bytes 1.0.1", "fnv", @@ -2543,7 +2553,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" dependencies = [ "bytes 0.5.6", - "http 0.2.3", + "http 0.2.4", ] [[package]] @@ -2553,15 +2563,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60daa14be0e0786db0f03a9e57cb404c9d756eed2b6c62b9ea98ec5743ec75a9" dependencies = [ "bytes 1.0.1", - "http 0.2.3", - "pin-project-lite 0.2.6", + "http 0.2.4", + "pin-project-lite 0.2.7", ] [[package]] name = "httparse" -version = "1.3.5" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" +checksum = "f3a87b616e37e93c22fb19bcd386f02f3af5ea98a25670ad0fce773de23c5e68" [[package]] name = "httpdate" @@ -2569,6 +2579,12 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +[[package]] +name = "httpdate" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" + [[package]] name = "humantime" version = "1.3.0" @@ -2601,7 +2617,7 @@ dependencies = [ "itoa", "log", "net2", - "rustc_version", + "rustc_version 0.2.3", "time", "tokio 0.1.22", "tokio-buf", @@ -2625,12 +2641,12 @@ dependencies = [ "futures-core", "futures-util", "h2 0.2.7", - "http 0.2.3", + "http 0.2.4", "http-body 0.3.1", "httparse", - "httpdate", + "httpdate 0.3.2", "itoa", - "pin-project 1.0.5", + "pin-project 1.0.7", "socket2 0.3.19", "tokio 0.2.25", "tower-service", @@ -2640,21 +2656,21 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.5" +version = "0.14.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf09f61b52cfcf4c00de50df88ae423d6c02354e385a86341133b5338630ad1" +checksum = "07d6baa1b441335f3ce5098ac421fb6547c46dda735ca1bc6d0153c838f9dd83" dependencies = [ "bytes 1.0.1", "futures-channel", "futures-core", "futures-util", - "http 0.2.3", + "http 0.2.4", "http-body 0.4.2", "httparse", - "httpdate", + "httpdate 1.0.1", "itoa", - "pin-project 1.0.5", - "tokio 1.6.0", + "pin-project-lite 0.2.7", + "tokio 1.7.1", "tower-service", "tracing", "want 0.3.0", @@ -2691,9 +2707,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89829a5d69c23d348314a7ac337fe39173b61149a9864deabd260983aed48c21" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" dependencies = [ "matches", "unicode-bidi", @@ -2723,9 +2739,9 @@ dependencies = [ [[package]] name = "if-watch" -version = "0.2.0" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6d52908d4ea4ab2bc22474ba149bf1011c8e2c3ebc1ff593ae28ac44f494b6" +checksum = "ae8ab7f67bad3240049cb24fb9cb0b4c2c6af4c245840917fbbdededeee91179" dependencies = [ "async-io", "futures 0.3.15", @@ -2749,7 +2765,7 @@ dependencies = [ [[package]] name = "impl-codec" version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "parity-scale-codec", ] @@ -2766,7 +2782,7 @@ dependencies = [ [[package]] name = "impl-serde" version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "serde", ] @@ -2784,12 +2800,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.6.2" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" +checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" dependencies = [ "autocfg", - "hashbrown 0.9.1", + "hashbrown 0.11.2", "serde", ] @@ -2853,9 +2869,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135" +checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" [[package]] name = "itertools" @@ -2868,9 +2884,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d572918e350e82412fe766d24b15e6682fb2ed2bbe018280caa810397cb319" +checksum = "69ddb889f9d0d08a67338271fa9b62996bc788c7796a5c18cf057420aaed5eaf" dependencies = [ "either", ] @@ -2883,9 +2899,9 @@ checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736" [[package]] name = "jobserver" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c71313ebb9439f74b00d9d2dcec36440beaf57a6aa0623068441dd7cd81a7f2" +checksum = "972f5ae5d1cb9c6ae417789196c803205313edde988685da5e3aae0827b9e7fd" dependencies = [ "libc", ] @@ -3045,7 +3061,7 @@ dependencies = [ "beef", "futures-channel", "futures-util", - "hyper 0.14.5", + "hyper 0.14.9", "log", "serde", "serde_json", @@ -3064,7 +3080,7 @@ dependencies = [ "futures 0.3.15", "jsonrpsee-types", "log", - "pin-project 1.0.5", + "pin-project 1.0.7", "rustls 0.19.1", "rustls-native-certs 0.5.0", "serde", @@ -3074,7 +3090,7 @@ dependencies = [ "tokio 0.2.25", "tokio-rustls 0.15.0", "tokio-util", - "url 2.2.1", + "url 2.2.2", ] [[package]] @@ -3116,7 +3132,7 @@ dependencies = [ [[package]] name = "kvdb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "parity-util-mem", "smallvec 1.6.1", @@ -3125,7 +3141,7 @@ dependencies = [ [[package]] name = "kvdb-memorydb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "kvdb", "parity-util-mem", @@ -3135,7 +3151,7 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" version = "0.12.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "fs-swap", "kvdb", @@ -3152,7 +3168,7 @@ dependencies = [ [[package]] name = "kvdb-web" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "futures 0.3.15", "js-sys", @@ -3186,9 +3202,9 @@ checksum = "3576a87f2ba00f6f106fdfcd16db1d698d648a26ad8e0573cad8537c3c362d2a" [[package]] name = "libc" -version = "0.2.95" +version = "0.2.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "789da6d93f1b866ffe175afc5322a4d76c038605a1c3319bb57b06967ca98a36" +checksum = "12b8adadd720df158f4d70dfe7ccc6adb0472d7c55ca83445f6a5ab3e36f8fb6" [[package]] name = "libloading" @@ -3250,16 +3266,16 @@ dependencies = [ "libp2p-yamux", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "smallvec 1.6.1", "wasm-timer", ] [[package]] name = "libp2p-core" -version = "0.28.2" +version = "0.28.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71dd51b562e14846e65bad00e5808d0644376e6588668c490d3c48e1dfeb4a9a" +checksum = "554d3e7e9e65f939d66b75fd6a4c67f258fe250da61b91f46c545fc4a89b51d9" dependencies = [ "asn1_der", "bs58", @@ -3275,13 +3291,13 @@ dependencies = [ "multistream-select", "parity-multiaddr", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "prost", "prost-build", "rand 0.7.3", "ring", "rw-stream-sink", - "sha2 0.9.3", + "sha2 0.9.5", "smallvec 1.6.1", "thiserror", "unsigned-varint 0.7.0", @@ -3334,9 +3350,9 @@ dependencies = [ [[package]] name = "libp2p-gossipsub" -version = "0.30.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73cb9a89a301afde1e588c73f7e9131e12a5388725f290a9047b878862db1b53" +checksum = "e7b0c8506a6ec3344b9e706d7c7a6dba826f8ede735cfe13dde12a8c263c4af9" dependencies = [ "asynchronous-codec 0.6.0", "base64 0.13.0", @@ -3352,7 +3368,7 @@ dependencies = [ "prost-build", "rand 0.7.3", "regex", - "sha2 0.9.3", + "sha2 0.9.5", "smallvec 1.6.1", "unsigned-varint 0.7.0", "wasm-timer", @@ -3392,9 +3408,9 @@ dependencies = [ "prost", "prost-build", "rand 0.7.3", - "sha2 0.9.3", + "sha2 0.9.5", "smallvec 1.6.1", - "uint 0.9.0", + "uint 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3402,9 +3418,9 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.30.0" +version = "0.30.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c221897b3fd7f215de7ecfec215c5eba598e5b61c605b5f8b56fe8a4fb507724" +checksum = "4efa70c1c3d2d91237f8546e27aeb85e287d62c066a7b4f3ea6a696d43ced714" dependencies = [ "async-io", "data-encoding", @@ -3415,7 +3431,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "rand 0.8.3", + "rand 0.8.4", "smallvec 1.6.1", "socket2 0.4.0", "void", @@ -3446,7 +3462,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36db0f0db3b0433f5b9463f1c0cd9eadc0a3734a9170439ce501ff99733a88bd" dependencies = [ "bytes 1.0.1", - "curve25519-dalek 3.0.2", + "curve25519-dalek 3.1.0", "futures 0.3.15", "lazy_static", "libp2p-core", @@ -3454,7 +3470,7 @@ dependencies = [ "prost", "prost-build", "rand 0.7.3", - "sha2 0.9.3", + "sha2 0.9.5", "snow", "static_assertions", "x25519-dalek", @@ -3501,7 +3517,7 @@ checksum = "6ce3374f3b28162db9d3442c9347c4f14cb01e8290052615c7d341d40eae0599" dependencies = [ "futures 0.3.15", "log", - "pin-project 1.0.5", + "pin-project 1.0.7", "rand 0.7.3", "salsa20", "sha3", @@ -3520,7 +3536,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "pin-project 1.0.5", + "pin-project 1.0.7", "prost", "prost-build", "rand 0.7.3", @@ -3607,9 +3623,9 @@ dependencies = [ [[package]] name = "libp2p-wasm-ext" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef45d61e43c313531b5e903e4e8415212ff6338e0c54c47da5b9b412b5760de" +checksum = "e2d413e4cf9b8e5dfbcd2a60d3dc5a3391308bdb463684093d4f67137b7113de" dependencies = [ "futures 0.3.15", "js-sys", @@ -3633,7 +3649,7 @@ dependencies = [ "quicksink", "rw-stream-sink", "soketto 0.4.2", - "url 2.2.1", + "url 2.2.2", "webpki-roots", ] @@ -3680,9 +3696,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" +checksum = "de5435b8549c16d423ed0c03dbaafe57cf6c3344744f1242520d59c9d8ecec66" dependencies = [ "cc", "pkg-config", @@ -3706,11 +3722,11 @@ dependencies = [ [[package]] name = "linregress" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d0ad4b5cc8385a881c561fac3501353d63d2a2b7a357b5064d71815c9a92724" +checksum = "b1ff7f341d23e1275eec0656a9a07225fcc86216c4322392868adffe59023d1a" dependencies = [ - "nalgebra", + "nalgebra 0.27.1", "statrs", ] @@ -3743,9 +3759,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96ffd135b2fd7b973ac026d28085defbe8983df057ced3eb4f2130b0831312" +checksum = "0382880606dff6d15c9476c416d18690b72742aa7b605bb6dd6ec9030fbf07eb" dependencies = [ "scopeguard", ] @@ -3816,9 +3832,9 @@ checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" [[package]] name = "matrixmultiply" -version = "0.2.4" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "916806ba0031cd542105d916a97c8572e1fa6dd79c9c51e7eb43a09ec2dd84c1" +checksum = "5a8a15b776d9dfaecd44b03c5828c2199cddff5247215858aac14624f8d6b741" dependencies = [ "rawpointer", ] @@ -3854,9 +3870,9 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "memchr" -version = "2.3.4" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" +checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc" [[package]] name = "memmap" @@ -3870,9 +3886,9 @@ dependencies = [ [[package]] name = "memmap2" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e3e85b970d650e2ae6d70592474087051c11c54da7f7b4949725c5735fbcc6" +checksum = "723e3ebdcdc5c023db1df315364573789f8857c11b631a2fdfad7c00f5c046b4" dependencies = [ "libc", ] @@ -3888,9 +3904,9 @@ dependencies = [ [[package]] name = "memoffset" -version = "0.6.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" +checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" dependencies = [ "autocfg", ] @@ -3925,18 +3941,18 @@ dependencies = [ [[package]] name = "minicbor" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea79ce4ab9f445ec6b71833a2290ac0a29c9dde0fa7cae4c481eecae021d9bd9" +checksum = "51aa5bb0ca22415daca596a227b507f880ad1b2318a87fa9325312a5d285ca0d" dependencies = [ "minicbor-derive", ] [[package]] name = "minicbor-derive" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce18b5423c573a13e80cb3046ea0af6379ef725dc3af4886bdb8f4e5093068" +checksum = "7f2b9e8883d58e34b18facd16c4564a77ea50fce028ad3d0ee6753440e37acc8" dependencies = [ "proc-macro2", "quote", @@ -3992,7 +4008,7 @@ checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656" dependencies = [ "log", "mio", - "miow 0.3.6", + "miow 0.3.7", "winapi 0.3.9", ] @@ -4021,11 +4037,10 @@ dependencies = [ [[package]] name = "miow" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a33c1b55807fbed163481b5ba66db4b2fa6cde694a5027be10fb724206c5897" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" dependencies = [ - "socket2 0.3.19", "winapi 0.3.9", ] @@ -4058,18 +4073,18 @@ dependencies = [ "digest 0.9.0", "generic-array 0.14.4", "multihash-derive", - "sha2 0.9.3", + "sha2 0.9.5", "sha3", "unsigned-varint 0.5.1", ] [[package]] name = "multihash-derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85ee3c48cb9d9b275ad967a0e96715badc13c6029adb92f34fa17b9ff28fd81f" +checksum = "424f6e86263cd5294cbd7f1e95746b95aca0e0d66bff31e5a40d6baa87b4aa99" dependencies = [ - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.0.0", "proc-macro-error", "proc-macro2", "quote", @@ -4079,9 +4094,9 @@ dependencies = [ [[package]] name = "multimap" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1255076139a83bb467426e7f8d0134968a8118844faa755985e077cf31850333" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "multistream-select" @@ -4092,46 +4107,62 @@ dependencies = [ "bytes 1.0.1", "futures 0.3.15", "log", - "pin-project 1.0.5", + "pin-project 1.0.7", "smallvec 1.6.1", "unsigned-varint 0.7.0", ] [[package]] name = "nalgebra" -version = "0.21.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6b6147c3d50b4f3cdabfe2ecc94a0191fd3d6ad58aefd9664cf396285883486" +checksum = "476d1d59fe02fe54c86356e91650cd892f392782a1cb9fc524ec84f7aa9e1d06" dependencies = [ - "approx", - "generic-array 0.13.3", + "approx 0.4.0", "matrixmultiply", - "num-complex", - "num-rational", + "num-complex 0.3.1", + "num-rational 0.3.2", "num-traits", - "rand 0.7.3", + "rand 0.8.4", "rand_distr", - "simba", + "simba 0.4.0", "typenum", ] [[package]] -name = "names" -version = "0.11.0" +name = "nalgebra" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +checksum = "462fffe4002f4f2e1f6a9dcf12cc1a6fc0e15989014efc02a941d3e0f5dc2120" dependencies = [ - "rand 0.3.23", + "approx 0.5.0", + "matrixmultiply", + "nalgebra-macros", + "num-complex 0.4.0", + "num-rational 0.4.0", + "num-traits", + "simba 0.5.1", + "typenum", ] [[package]] -name = "nb-connect" -version = "1.0.3" +name = "nalgebra-macros" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670361df1bc2399ee1ff50406a0d422587dd3bb0da596e1978fe8e05dabddf4f" +checksum = "01fcc0b8149b4632adc89ac3b7b31a12fb6099a0317a4eb2ebff574ef7de7218" dependencies = [ - "libc", - "socket2 0.3.19", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "names" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +dependencies = [ + "rand 0.3.23", ] [[package]] @@ -4640,11 +4671,19 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.2.4" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6b19411a9719e753aff12e5187b74d60d3dc449ec3f4dc21e3989c3f554bc95" +checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-complex" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26873667bbbb7c5182d4a37c1add32cdf09f841af72da53318fdb81543c15085" dependencies = [ - "autocfg", "num-traits", ] @@ -4670,6 +4709,28 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-rational" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.14" @@ -4690,12 +4751,6 @@ dependencies = [ "libc", ] -[[package]] -name = "object" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a7ab5d64814df0fe4a4b5ead45ed6c5f181ee3ff04ba344313a6c80446c5d4" - [[package]] name = "object" version = "0.24.0" @@ -4706,11 +4761,20 @@ dependencies = [ "indexmap", ] +[[package]] +name = "object" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38f2be3697a57b4060074ff41b44c16870d916ad7877c17696e063257482bc7" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" -version = "1.7.2" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af8b08b04175473088b46763e51ee54da5f9a164bc162f615b91bc179dbf15a3" +checksum = "692fcb63b64b1758029e0a96ee63e049ce8c5948587f2f7208df04625e5f6b56" dependencies = [ "parking_lot 0.11.1", ] @@ -4735,9 +4799,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl-probe" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" +checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" [[package]] name = "output_vt100" @@ -4944,11 +5008,11 @@ dependencies = [ "pallet-randomness-collective-flip", "pallet-timestamp", "parity-scale-codec", - "paste 1.0.4", + "paste 1.0.5", "pretty_assertions 0.7.2", "pwasm-utils", - "rand 0.8.3", - "rand_pcg 0.3.0", + "rand 0.8.4", + "rand_pcg 0.3.1", "scale-info", "serde", "smallvec 1.6.1", @@ -5048,7 +5112,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", "scale-info", "sp-arithmetic", @@ -5169,7 +5233,7 @@ dependencies = [ name = "pallet-grandpa" version = "3.1.0" dependencies = [ - "finality-grandpa", + "finality-grandpa 0.14.1", "frame-benchmarking", "frame-election-provider-support", "frame-support", @@ -5288,7 +5352,7 @@ name = "pallet-mmr" version = "3.0.0" dependencies = [ "ckb-merkle-mountain-range", - "env_logger 0.8.3", + "env_logger 0.8.4", "frame-benchmarking", "frame-support", "frame-system", @@ -5588,7 +5652,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "rand_chacha 0.2.2", "scale-info", "serde", @@ -5840,7 +5904,7 @@ dependencies = [ "log", "memmap2", "parking_lot 0.11.1", - "rand 0.8.3", + "rand 0.8.4", ] [[package]] @@ -5858,16 +5922,16 @@ dependencies = [ "serde", "static_assertions", "unsigned-varint 0.7.0", - "url 2.2.1", + "url 2.2.2", ] [[package]] name = "parity-scale-codec" -version = "2.1.1" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f518afaa5a47d0d6386229b0a6e01e86427291d643aa4cabb4992219f504f8" +checksum = "b310f220c335f9df1b3d2e9fbe3890bbfeef5030dad771620f48c5c229877cd3" dependencies = [ - "arrayvec 0.7.0", + "arrayvec 0.7.1", "bitvec", "byte-slice-cast", "parity-scale-codec-derive", @@ -5876,11 +5940,11 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "2.1.0" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f44c5f94427bd0b5076e8f7e15ca3f60a4d8ac0077e4793884e6fdfd8915344e" +checksum = "81038e13ca2c32587201d544ea2e6b6c47120f1e4eae04478f9f60b6bcb89145" dependencies = [ - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.0.0", "proc-macro2", "quote", "syn", @@ -5903,7 +5967,7 @@ dependencies = [ "libc", "log", "mio-named-pipes", - "miow 0.3.6", + "miow 0.3.7", "rand 0.7.3", "tokio 0.1.22", "tokio-named-pipes", @@ -5914,7 +5978,7 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "cfg-if 1.0.0", "hashbrown 0.11.2", @@ -5940,7 +6004,7 @@ dependencies = [ [[package]] name = "parity-util-mem-derive" version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "proc-macro2", "syn", @@ -5977,7 +6041,7 @@ dependencies = [ "rand 0.7.3", "sha-1 0.8.2", "slab", - "url 2.2.1", + "url 2.2.2", ] [[package]] @@ -5994,7 +6058,7 @@ checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" dependencies = [ "lock_api 0.3.4", "parking_lot_core 0.6.2", - "rustc_version", + "rustc_version 0.2.3", ] [[package]] @@ -6014,7 +6078,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d7744ac029df22dca6284efe4e898991d28e3085c706c972bcd7da4a27a15eb" dependencies = [ "instant", - "lock_api 0.4.2", + "lock_api 0.4.4", "parking_lot_core 0.8.3", ] @@ -6028,7 +6092,7 @@ dependencies = [ "cloudabi", "libc", "redox_syscall 0.1.57", - "rustc_version", + "rustc_version 0.2.3", "smallvec 0.6.14", "winapi 0.3.9", ] @@ -6056,7 +6120,7 @@ dependencies = [ "cfg-if 1.0.0", "instant", "libc", - "redox_syscall 0.2.5", + "redox_syscall 0.2.9", "smallvec 1.6.1", "winapi 0.3.9", ] @@ -6073,9 +6137,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d65c4d95931acda4498f675e332fcbdc9a06705cd07086c510e9b6009cd1c1" +checksum = "acbf547ad0c65e31259204bd90935776d1c693cec2f4ff7abb7a1bbbd40dfe58" [[package]] name = "paste-impl" @@ -6184,27 +6248,27 @@ dependencies = [ [[package]] name = "pin-project" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffbc8e94b38ea3d2d8ba92aea2983b503cd75d0888d75b86bb37970b5698e15" +checksum = "918192b5c59119d51e0cd221f4d49dde9112824ba717369e903c97d076083d0f" dependencies = [ - "pin-project-internal 0.4.27", + "pin-project-internal 0.4.28", ] [[package]] name = "pin-project" -version = "1.0.5" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96fa8ebb90271c4477f144354485b8068bd8f6b78b428b01ba892ca26caf0b63" +checksum = "c7509cc106041c40a4518d2af7a61530e1eed0e6285296a3d8c5472806ccc4a4" dependencies = [ - "pin-project-internal 1.0.5", + "pin-project-internal 1.0.7", ] [[package]] name = "pin-project-internal" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65ad2ae56b6abe3a1ee25f15ee605bacadb9a764edaba9c2bf4103800d4a1895" +checksum = "3be26700300be6d9d23264c73211d8190e755b6b5ca7a1b28230025511b52a5e" dependencies = [ "proc-macro2", "quote", @@ -6213,9 +6277,9 @@ dependencies = [ [[package]] name = "pin-project-internal" -version = "1.0.5" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758669ae3558c6f74bd2a18b41f7ac0b5a195aea6639d6a9b5e5d1ad5ba24c0b" +checksum = "48c950132583b500556b1efd71d45b319029f2b71518d979fcc208e16b42426f" dependencies = [ "proc-macro2", "quote", @@ -6230,9 +6294,9 @@ checksum = "257b64915a082f7811703966789728173279bdebb956b143dbcd23f6f970a777" [[package]] name = "pin-project-lite" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc0e1f259c92177c30a4c9d177246edd0a3568b25756a977d0632cf8fa37e905" +checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" [[package]] name = "pin-utils" @@ -6267,29 +6331,29 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b07fffcddc1cb3a1de753caa4e4df03b79922ba43cf882acc1bdd7e8df9f4590" +checksum = "fd8be10f7485c8a323ea100b20d6052c27cf5968f08f8e3a56ee9f0cf38ebd3d" [[package]] name = "plotters-svg" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b38a02e23bd9604b842a812063aec4ef702b57989c37b655254bb61c471ad211" +checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" dependencies = [ "plotters-backend", ] [[package]] name = "polling" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" +checksum = "92341d779fa34ea8437ef4d82d440d5e1ce3f3ff7f824aa64424cd481f9a1f25" dependencies = [ - "cfg-if 0.1.10", + "cfg-if 1.0.0", "libc", "log", - "wepoll-sys", + "wepoll-ffi", "winapi 0.3.9", ] @@ -6299,7 +6363,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b7456bc1ad2d4cf82b3a016be4c2ac48daf11bf990c1603ebd447fe6f30fca8" dependencies = [ - "cpuid-bool 0.2.0", + "cpuid-bool", "universal-hash", ] @@ -6309,7 +6373,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eebcc4aa140b9abd2bc40d9c3f7ccec842679cd79045ac3a7ac698c1a064b7cd" dependencies = [ - "cpuid-bool 0.2.0", + "cpuid-bool", "opaque-debug 0.3.0", "universal-hash", ] @@ -6322,9 +6386,9 @@ checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" [[package]] name = "predicates" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeb433456c1a57cc93554dea3ce40b4c19c4057e41c55d4a0f3d84ea71c325aa" +checksum = "f49cfaf7fdaa3bfacc6fa3e7054e65148878354a5cfddcf661df4c851f8021df" dependencies = [ "difference", "predicates-core", @@ -6378,19 +6442,19 @@ checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uint 0.9.0", + "uint 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "primitive-types" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common?branch=master)", "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common?branch=master)", "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=master)", "scale-info", - "uint 0.9.1", + "uint 0.9.1 (git+https://github.com/paritytech/parity-common?branch=master)", ] [[package]] @@ -6450,9 +6514,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec" +checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038" dependencies = [ "unicode-xid", ] @@ -6524,18 +6588,18 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3abf49e5417290756acfd26501536358560c4a5cc4a0934d390939acb3e7083a" +checksum = "21ff0279b4a85e576b97e4a21d13e437ebcd56612706cde5d3f0d5c9399490c0" dependencies = [ "cc", ] [[package]] name = "pwasm-utils" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0e517f47d9964362883182404b68d0b6949382c0baa40aa5ffca94f5f1e3481" +checksum = "f0c1a2f10b47d446372a4f397c58b329aaea72b2daf9395a623a411cb8ccb54f" dependencies = [ "byteorder", "log", @@ -6550,9 +6614,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quick-error" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ac73b1112776fc109b2e61909bc46c7e1bf0d7f690ffb1676553acce16d5cda" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" [[package]] name = "quickcheck" @@ -6560,9 +6624,9 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ - "env_logger 0.8.3", + "env_logger 0.8.4", "log", - "rand 0.8.3", + "rand 0.8.4", ] [[package]] @@ -6630,14 +6694,14 @@ dependencies = [ [[package]] name = "rand" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" dependencies = [ "libc", - "rand_chacha 0.3.0", - "rand_core 0.6.2", - "rand_hc 0.3.0", + "rand_chacha 0.3.1", + "rand_core 0.6.3", + "rand_hc 0.3.1", ] [[package]] @@ -6652,12 +6716,12 @@ dependencies = [ [[package]] name = "rand_chacha" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.2", + "rand_core 0.6.3", ] [[package]] @@ -6686,20 +6750,21 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34cf66eb183df1c5876e2dcf6b13d57340741e8dc255b48e40a26de954d06ae7" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" dependencies = [ "getrandom 0.2.3", ] [[package]] name = "rand_distr" -version = "0.2.2" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96977acbdd3a6576fb1d27391900035bf3863d4a16422973a409b488cf29ffb2" +checksum = "051b398806e42b9cd04ad9ec8f81e355d0a382c543ac6672c62f5a5b452ef142" dependencies = [ - "rand 0.7.3", + "num-traits", + "rand 0.8.4", ] [[package]] @@ -6713,11 +6778,11 @@ dependencies = [ [[package]] name = "rand_hc" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" dependencies = [ - "rand_core 0.6.2", + "rand_core 0.6.3", ] [[package]] @@ -6731,11 +6796,11 @@ dependencies = [ [[package]] name = "rand_pcg" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7de198537002b913568a3847e53535ace266f93526caf5c360ec41d72c5787f0" +checksum = "59cad018caf63deb318e5a4586d99a24424a364f40f1e5778c29aca23f4fc73e" dependencies = [ - "rand_core 0.6.2", + "rand_core 0.6.3", ] [[package]] @@ -6746,9 +6811,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" +checksum = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90" dependencies = [ "autocfg", "crossbeam-deque 0.8.0", @@ -6758,13 +6823,13 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab346ac5921dc62ffa9f89b7a773907511cdfa5490c572ae9be1be33e8afa4a" +checksum = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e" dependencies = [ "crossbeam-channel", "crossbeam-deque 0.8.0", - "crossbeam-utils 0.8.3", + "crossbeam-utils 0.8.5", "lazy_static", "num_cpus", ] @@ -6786,24 +6851,13 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.2.5" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94341e4e44e24f6b591b59e47a8a027df12e008d73fd5672dbea9cc22f4507d9" +checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee" dependencies = [ "bitflags", ] -[[package]] -name = "redox_users" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" -dependencies = [ - "getrandom 0.1.16", - "redox_syscall 0.1.57", - "rust-argon2", -] - [[package]] name = "redox_users" version = "0.4.0" @@ -6811,7 +6865,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" dependencies = [ "getrandom 0.2.3", - "redox_syscall 0.2.5", + "redox_syscall 0.2.9", ] [[package]] @@ -6848,31 +6902,29 @@ dependencies = [ [[package]] name = "regex" -version = "1.4.3" +version = "1.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" dependencies = [ "aho-corasick", "memchr", "regex-syntax", - "thread_local", ] [[package]] name = "regex-automata" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1ded71d66a4a97f5e961fd0cb25a5f366a42a41570d16a763a69c092c26ae4" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" dependencies = [ - "byteorder", "regex-syntax", ] [[package]] name = "regex-syntax" -version = "0.6.22" +version = "0.6.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" [[package]] name = "region" @@ -6890,7 +6942,7 @@ dependencies = [ name = "remote-externalities" version = "0.9.0" dependencies = [ - "env_logger 0.8.3", + "env_logger 0.8.4", "frame-support", "hex", "jsonrpsee-proc-macros", @@ -6966,23 +7018,11 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "rust-argon2" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" -dependencies = [ - "base64 0.13.0", - "blake2b_simd", - "constant_time_eq", - "crossbeam-utils 0.8.3", -] - [[package]] name = "rustc-demangle" -version = "0.1.18" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232" +checksum = "dead70b0b5e03e9c814bcb6b01e03e68f7c57a80aa48c72ec92152ab3e818d49" [[package]] name = "rustc-hash" @@ -7005,6 +7045,15 @@ dependencies = [ "semver 0.9.0", ] +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + [[package]] name = "rustls" version = "0.18.1" @@ -7052,20 +7101,20 @@ dependencies = [ "openssl-probe", "rustls 0.19.1", "schannel", - "security-framework 2.3.0", + "security-framework 2.3.1", ] [[package]] name = "rustversion" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb5d2a036dc6d2d8fd16fde3498b04306e29bd193bf306a57427019b823d5acd" +checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088" [[package]] name = "ruzstd" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d425143485a37727c7a46e689bbe3b883a00f42b4a52c4ac0f44855c1009b00" +checksum = "8cada0ef59efa6a5f4dc5e491f93d9f31e3fc7758df421ff1de8a706338e1100" dependencies = [ "byteorder", "twox-hash", @@ -7078,7 +7127,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" dependencies = [ "futures 0.3.15", - "pin-project 0.4.27", + "pin-project 0.4.28", "static_assertions", ] @@ -7094,7 +7143,7 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d3d055a2582e6b00ed7a31c1524040aa391092bf636328350813f3a0605215c" dependencies = [ - "rustc_version", + "rustc_version 0.2.3", ] [[package]] @@ -7404,7 +7453,7 @@ dependencies = [ "log", "merlin", "num-bigint", - "num-rational", + "num-rational 0.2.4", "num-traits", "parity-scale-codec", "parking_lot 0.11.1", @@ -7602,7 +7651,7 @@ dependencies = [ "parity-scale-codec", "parity-wasm 0.42.2", "parking_lot 0.11.1", - "paste 1.0.4", + "paste 1.0.5", "regex", "sc-executor-common", "sc-executor-wasmi", @@ -7687,7 +7736,7 @@ dependencies = [ "async-trait", "derive_more", "dyn-clone", - "finality-grandpa", + "finality-grandpa 0.15.0", "fork-tree", "futures 0.3.15", "futures-timer 3.0.2", @@ -7695,7 +7744,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "rand 0.7.3", "sc-block-builder", "sc-client-api", @@ -7733,7 +7782,7 @@ name = "sc-finality-grandpa-rpc" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa", + "finality-grandpa 0.15.0", "futures 0.3.15", "jsonrpc-core", "jsonrpc-core-client", @@ -7763,14 +7812,14 @@ name = "sc-finality-grandpa-warp-sync" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa", + "finality-grandpa 0.15.0", "futures 0.3.15", "log", "num-traits", "parity-scale-codec", "parking_lot 0.11.1", "prost", - "rand 0.8.3", + "rand 0.8.4", "sc-block-builder", "sc-client-api", "sc-finality-grandpa", @@ -7868,7 +7917,7 @@ dependencies = [ "nohash-hasher", "parity-scale-codec", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "prost", "prost-build", "quickcheck", @@ -8121,7 +8170,7 @@ dependencies = [ "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "rand 0.7.3", "sc-block-builder", "sc-chain-spec", @@ -8252,7 +8301,7 @@ dependencies = [ "libp2p", "log", "parking_lot 0.11.1", - "pin-project 1.0.5", + "pin-project 1.0.7", "rand 0.7.3", "serde", "serde_json", @@ -8452,9 +8501,9 @@ dependencies = [ [[package]] name = "sct" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3042af939fca8c3453b7af0f1c66e533a15a86169e39de2657310ade8f98d3c" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" dependencies = [ "ring", "untrusted", @@ -8484,9 +8533,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b239a3d5db51252f6f48f42172c65317f37202f4a21021bf5f9d40a408f4592c" +checksum = "23a2ac85147a3a11d77ecf1bc7166ec0b92febfa4461c37944e180f319ece467" dependencies = [ "bitflags", "core-foundation 0.9.1", @@ -8572,9 +8621,9 @@ checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" [[package]] name = "serde" -version = "1.0.124" +version = "1.0.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd761ff957cb2a45fbb9ab3da6512de9de55872866160b23c25f1a841e99d29f" +checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03" dependencies = [ "serde_derive", ] @@ -8591,9 +8640,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.124" +version = "1.0.126" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1800f7693e94e186f5e25a28291ae1570da908aff7d97a095dec1e56ff99069b" +checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43" dependencies = [ "proc-macro2", "quote", @@ -8625,13 +8674,13 @@ dependencies = [ [[package]] name = "sha-1" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfebf75d25bd900fd1e7d11501efab59bc846dbc76196839663e6637bba9f25f" +checksum = "8c4cfa741c5832d0ef7fab46cabed29c2aae926db0b11bb2069edd8db5e64e16" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool 0.1.2", + "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.0", ] @@ -8650,13 +8699,13 @@ dependencies = [ [[package]] name = "sha2" -version = "0.9.3" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" +checksum = "b362ae5752fd2137731f9fa25fd4d9058af34666ca1966fb969119cc35719f12" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool 0.1.2", + "cpufeatures", "digest 0.9.0", "opaque-debug 0.3.0", ] @@ -8690,9 +8739,9 @@ checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "signal-hook" -version = "0.3.6" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" +checksum = "470c5a6397076fae0094aaf06a08e6ba6f37acb77d3b1b91ea92b4d6c8650c39" dependencies = [ "libc", "signal-hook-registry", @@ -8700,36 +8749,48 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1d0fef1604ba8f7a073c7e701f213e056707210e9020af4528e0101ce11a6" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" dependencies = [ "libc", ] [[package]] name = "signature" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0242b8e50dd9accdd56170e94ca1ebd223b098eb9c83539a6e367d0f36ae68" +checksum = "c19772be3c4dd2ceaacf03cb41d5885f2a02c4d8804884918e3a258480803335" [[package]] name = "simba" -version = "0.1.5" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb931b1367faadea6b1ab1c306a860ec17aaa5fa39f367d0c744e69d971a1fb2" +checksum = "5132a955559188f3d13c9ba831e77c802ddc8782783f050ed0c52f5988b95f4c" dependencies = [ - "approx", - "num-complex", + "approx 0.4.0", + "num-complex 0.3.1", "num-traits", - "paste 0.1.18", + "paste 1.0.5", +] + +[[package]] +name = "simba" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e82063457853d00243beda9952e910b82593e4b07ae9f721b9278a99a0d3d5c" +dependencies = [ + "approx 0.5.0", + "num-complex 0.4.0", + "num-traits", + "paste 1.0.5", ] [[package]] name = "slab" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8" +checksum = "f173ac3d1a7e3b28003f40de0b5ce7fe2710f9b9dc3fc38664cebee46b3b6527" [[package]] name = "slog" @@ -8767,8 +8828,8 @@ dependencies = [ "rand 0.7.3", "rand_core 0.5.1", "ring", - "rustc_version", - "sha2 0.9.3", + "rustc_version 0.2.3", + "sha2 0.9.5", "subtle 2.4.0", "x25519-dalek", ] @@ -8807,7 +8868,7 @@ dependencies = [ "httparse", "log", "rand 0.7.3", - "sha-1 0.9.4", + "sha-1 0.9.6", ] [[package]] @@ -8821,8 +8882,8 @@ dependencies = [ "futures 0.3.15", "httparse", "log", - "rand 0.8.3", - "sha-1 0.9.4", + "rand 0.8.4", + "sha-1 0.9.6", ] [[package]] @@ -9113,7 +9174,7 @@ dependencies = [ "secrecy", "serde", "serde_json", - "sha2 0.9.3", + "sha2 0.9.5", "sp-debug-derive", "sp-externalities", "sp-runtime-interface", @@ -9160,7 +9221,7 @@ dependencies = [ name = "sp-finality-grandpa" version = "3.0.0" dependencies = [ - "finality-grandpa", + "finality-grandpa 0.14.1", "log", "parity-scale-codec", "scale-info", @@ -9331,7 +9392,7 @@ dependencies = [ "max-encoded-len", "parity-scale-codec", "parity-util-mem", - "paste 1.0.4", + "paste 1.0.5", "rand 0.7.3", "scale-info", "serde", @@ -9677,11 +9738,15 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "statrs" -version = "0.12.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cce16f6de653e88beca7bd13780d08e09d4489dbca1f9210e041bc4852481382" +checksum = "1e0c1f144861fbfd2a8cc82d564ccbf7fb3b7834d4fa128b84e9c2a73371aead" dependencies = [ - "rand 0.7.3", + "approx 0.4.0", + "lazy_static", + "nalgebra 0.26.2", + "num-traits", + "rand 0.8.4", ] [[package]] @@ -10039,9 +10104,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.69" +version = "1.0.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb" +checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7" dependencies = [ "proc-macro2", "quote", @@ -10086,8 +10151,8 @@ checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" dependencies = [ "cfg-if 1.0.0", "libc", - "rand 0.8.3", - "redox_syscall 0.2.5", + "rand 0.8.4", + "redox_syscall 0.2.9", "remove_dir_all", "winapi 0.3.9", ] @@ -10186,18 +10251,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +checksum = "fa6f76457f59514c7eeb4e59d891395fab0b2fd1d40723ae737d64153392e9c6" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +checksum = "8a36768c0fbf1bb15eca10defa29526bda730a2376c2ab4393ccfa16fb1a318d" dependencies = [ "proc-macro2", "quote", @@ -10245,7 +10310,7 @@ dependencies = [ "pbkdf2 0.4.0", "rand 0.7.3", "rustc-hash", - "sha2 0.9.3", + "sha2 0.9.5", "thiserror", "unicode-normalization", "zeroize", @@ -10272,9 +10337,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.1.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317cca572a0e89c3ce0ca1f1bdc9369547fe318a683418e42ac8f59d14701023" +checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342" dependencies = [ "tinyvec_macros", ] @@ -10335,12 +10400,12 @@ dependencies = [ [[package]] name = "tokio" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd3076b5c8cc18138b8f8814895c11eb4de37114a5d127bafdc5e55798ceef37" +checksum = "5fb2ed024293bb19f7a5dc54fe83bf86532a44c12a2bb8ba40d64a4509395ca2" dependencies = [ "autocfg", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", ] [[package]] @@ -10601,13 +10666,13 @@ checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" [[package]] name = "tracing" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" +checksum = "09adeb8c97449311ccd28a427f96fb563e7fd31aabf994189879d9da2394b89d" dependencies = [ "cfg-if 1.0.0", "log", - "pin-project-lite 0.2.6", + "pin-project-lite 0.2.7", "tracing-attributes", "tracing-core", ] @@ -10625,9 +10690,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.17" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f" +checksum = "a9ff14f98b1a4b289c6248a023c1c2fa1491062964e9fed67ab29c4e4da4a052" dependencies = [ "lazy_static", ] @@ -10638,7 +10703,7 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" dependencies = [ - "pin-project 1.0.5", + "pin-project 1.0.7", "tracing", ] @@ -10665,9 +10730,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa5553bf0883ba7c9cbe493b085c29926bd41b66afc31ff72cf17ff4fb60dcd5" +checksum = "ab69019741fca4d98be3c62d2b75254528b5432233fd8a4d2739fec20278de48" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10741,9 +10806,9 @@ dependencies = [ [[package]] name = "trust-dns-proto" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d57e219ba600dd96c2f6d82eb79645068e14edbc5c7e27514af40436b88150c" +checksum = "ad0d7f5db438199a6e2609debe3f69f808d074e0a2888ee0bccb45fe234d03f4" dependencies = [ "async-trait", "cfg-if 1.0.0", @@ -10752,22 +10817,22 @@ dependencies = [ "futures-channel", "futures-io", "futures-util", - "idna 0.2.2", + "idna 0.2.3", "ipnet", "lazy_static", "log", - "rand 0.8.3", + "rand 0.8.4", "smallvec 1.6.1", "thiserror", "tinyvec", - "url 2.2.1", + "url 2.2.2", ] [[package]] name = "trust-dns-resolver" -version = "0.20.1" +version = "0.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0437eea3a6da51acc1e946545ff53d5b8fb2611ff1c3bed58522dde100536ae" +checksum = "f6ad17b608a64bd0735e67bde16b0636f8aa8591f831a25d18443ed00a699770" dependencies = [ "cfg-if 1.0.0", "futures-util", @@ -10841,9 +10906,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373c8a200f9e67a0c95e62a4f52fbf80c23b4381c05a17845531982fa99e6b33" +checksum = "879f6906492a7cd215bfa4cf595b600146ccfac0c79bcbd1f3000162af5e8b06" [[package]] name = "ucd-trie" @@ -10853,9 +10918,9 @@ checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" [[package]] name = "uint" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e11fe9a9348741cf134085ad57c249508345fe16411b3d7fb4ff2da2f1d6382e" +checksum = "6470ab50f482bde894a037a57064480a246dbfdd5960bd65a44824693f08da5f" dependencies = [ "byteorder", "crunchy", @@ -10866,7 +10931,7 @@ dependencies = [ [[package]] name = "uint" version = "0.9.1" -source = "git+https://github.com/paritytech/parity-common?branch=master#075a34ee77b02f917ef350d3044acd95102ecc61" +source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" dependencies = [ "byteorder", "crunchy", @@ -10885,18 +10950,18 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" +checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0" dependencies = [ "matches", ] [[package]] name = "unicode-normalization" -version = "0.1.17" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07fbfce1c8a97d547e8b5334978438d9d6ec8c20e38f56d4a4374d181493eaef" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" dependencies = [ "tinyvec", ] @@ -10915,9 +10980,9 @@ checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3" [[package]] name = "unicode-xid" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7fe0bb3479651439c9112f72b6c505038574c9fbb575ed1bf3b797fa39dd564" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" [[package]] name = "universal-hash" @@ -10978,36 +11043,31 @@ dependencies = [ [[package]] name = "url" -version = "2.2.1" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ccd964113622c8e9322cfac19eb1004a07e636c545f325da085d5cdde6f1f8b" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" dependencies = [ "form_urlencoded", - "idna 0.2.2", + "idna 0.2.3", "matches", "percent-encoding 2.1.0", ] [[package]] name = "value-bag" -version = "1.0.0-alpha.6" +version = "1.0.0-alpha.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b676010e055c99033117c2343b33a40a30b91fecd6c49055ac9cd2d6c305ab1" +checksum = "dd320e1520f94261153e96f7534476ad869c14022aee1e59af7c778075d840ae" dependencies = [ "ctor", + "version_check", ] [[package]] name = "vcpkg" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b00bca6106a5e23f3eee943593759b7fcddb00554332e856d990c893966879fb" - -[[package]] -name = "vec-arena" -version = "1.0.0" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eafc1b9b2dfc6f5529177b62cf806484db55b32dc7c9658a118e11bbeb33061d" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vec_map" @@ -11017,9 +11077,9 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5a972e5669d67ba988ce3dc826706fb0a8b01471c088cb0b6110b805cc36aed" +checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" [[package]] name = "void" @@ -11044,9 +11104,9 @@ checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777182bc735b6424e1a57516d35ed72cb8019d85c8c9bf536dccb3445c1a2f7d" +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" dependencies = [ "same-file", "winapi 0.3.9", @@ -11115,9 +11175,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.20" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3de431a2910c86679c34283a33f66f4e4abd7e0aec27b6669060148872aadf94" +checksum = "81b8b767af23de6ac18bf2168b690bed2902743ddf0fb39252e36f9e2bfc63ea" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -11156,9 +11216,9 @@ checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" [[package]] name = "wasm-bindgen-test" -version = "0.3.20" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" +checksum = "e972e914de63aa53bd84865e54f5c761bd274d48e5be3a6329a662c0386aa67a" dependencies = [ "console_error_panic_hook", "js-sys", @@ -11170,9 +11230,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.20" +version = "0.3.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" +checksum = "ea6153a8f9bf24588e9f25c87223414fff124049f68d3a442a0f0eab4768a8b6" dependencies = [ "proc-macro2", "quote", @@ -11214,7 +11274,7 @@ dependencies = [ "errno", "libc", "memory_units", - "num-rational", + "num-rational 0.2.4", "num-traits", "parity-wasm 0.42.2", "wasmi-validation", @@ -11250,7 +11310,7 @@ dependencies = [ "lazy_static", "libc", "log", - "paste 1.0.4", + "paste 1.0.5", "psm", "region", "rustc-demangle", @@ -11283,7 +11343,7 @@ dependencies = [ "libc", "log", "serde", - "sha2 0.9.3", + "sha2 0.9.5", "toml", "winapi 0.3.9", "zstd", @@ -11311,7 +11371,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5d2a763e7a6fc734218e0e463196762a4f409c483063d81e0e85f96343b2e0a" dependencies = [ "anyhow", - "gimli 0.24.0", + "gimli", "more-asserts", "object 0.24.0", "target-lexicon", @@ -11330,7 +11390,7 @@ dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-wasm", - "gimli 0.24.0", + "gimli", "indexmap", "log", "more-asserts", @@ -11356,7 +11416,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d4539ea734422b7c868107e2187d7746d8affbcaa71916d72639f53757ad707" dependencies = [ - "addr2line 0.15.1", + "addr2line", "anyhow", "cfg-if 1.0.0", "cranelift-codegen", @@ -11364,7 +11424,7 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "cranelift-wasm", - "gimli 0.24.0", + "gimli", "log", "more-asserts", "object 0.24.0", @@ -11405,7 +11465,7 @@ checksum = "e24364d522dcd67c897c8fffc42e5bdfc57207bbb6d7eeade0da9d4a7d70105b" dependencies = [ "anyhow", "cfg-if 1.0.0", - "gimli 0.24.0", + "gimli", "lazy_static", "libc", "object 0.24.0", @@ -11431,9 +11491,9 @@ dependencies = [ "libc", "log", "mach", - "memoffset 0.6.1", + "memoffset 0.6.4", "more-asserts", - "rand 0.8.3", + "rand 0.8.4", "region", "thiserror", "wasmtime-environ", @@ -11443,27 +11503,27 @@ dependencies = [ [[package]] name = "wast" -version = "35.0.0" +version = "36.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db5ae96da18bb5926341516fd409b5a8ce4e4714da7f0a1063d3b20ac9f9a1e1" +checksum = "8b5d7ba374a364571da1cb0a379a3dc302582a2d9937a183bfe35b68ad5bb9c4" dependencies = [ "leb128", ] [[package]] name = "wat" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ec280a739b69173e0ffd12c1658507996836ba4e992ed9bc1e5385a0bd72a02" +checksum = "16383df7f0e3901484c2dda6294ed6895caa3627ce4f6584141dcf30a33a23e6" dependencies = [ "wast", ] [[package]] name = "web-sys" -version = "0.3.47" +version = "0.3.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c40dc691fc48003eba817c38da7113c15698142da971298003cac3ef175680b3" +checksum = "a905d57e488fec8861446d3393670fb50d27a262344013181c2cdf9fff5481be" dependencies = [ "js-sys", "wasm-bindgen", @@ -11481,30 +11541,30 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82015b7e0b8bad8185994674a13a93306bea76cf5a16c5a181382fd3a5ec2376" +checksum = "aabe153544e473b775453675851ecc86863d2a81d786d741f6b76778f2a48940" dependencies = [ "webpki", ] [[package]] -name = "wepoll-sys" -version = "3.0.1" +name = "wepoll-ffi" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcb14dea929042224824779fbc82d9fab8d2e6d3cbc0ac404de8edf489e77ff" +checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb" dependencies = [ "cc", ] [[package]] name = "which" -version = "4.0.2" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87c14ef7e1b8b8ecfc75d5eca37949410046e66f15d185c01d70824f1f8111ef" +checksum = "b55551e42cbdf2ce2bedd2203d0cc08dba002c27510f86dab6d0ce304cba3dfe" dependencies = [ + "either", "libc", - "thiserror", ] [[package]] @@ -11583,11 +11643,11 @@ checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" [[package]] name = "x25519-dalek" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc614d95359fd7afc321b66d2107ede58b246b844cf5d8a0adcca413e439f088" +checksum = "5a0c105152107e3b96f6a00a65e86ce82d9b125230e1c4302940eca58ff71f4f" dependencies = [ - "curve25519-dalek 3.0.2", + "curve25519-dalek 3.1.0", "rand_core 0.5.1", "zeroize", ] @@ -11602,24 +11662,24 @@ dependencies = [ "log", "nohash-hasher", "parking_lot 0.11.1", - "rand 0.8.3", + "rand 0.8.4", "static_assertions", ] [[package]] name = "zeroize" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81a974bcdd357f0dca4d41677db03436324d45a4c9ed2d0b873a5a360ce41c36" +checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" dependencies = [ "zeroize_derive", ] [[package]] name = "zeroize_derive" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3f369ddb18862aba61aa49bf31e74d29f0f162dec753063200e1dc084345d16" +checksum = "a2c1e130bebaeab2f23886bf9acbaca14b092408c452543c857f66399cd6dab1" dependencies = [ "proc-macro2", "quote", diff --git a/client/finality-grandpa-warp-sync/Cargo.toml b/client/finality-grandpa-warp-sync/Cargo.toml index 27728e159c762..16ce463119719 100644 --- a/client/finality-grandpa-warp-sync/Cargo.toml +++ b/client/finality-grandpa-warp-sync/Cargo.toml @@ -28,7 +28,7 @@ sp-finality-grandpa = { version = "3.0.0", path = "../../primitives/finality-gra sp-runtime = { version = "3.0.0", path = "../../primitives/runtime" } [dev-dependencies] -finality-grandpa = { version = "0.14.1" } +finality-grandpa = { version = "0.15.0" } rand = "0.8" sc-block-builder = { version = "0.9.0", path = "../block-builder" } sp-consensus = { version = "0.9.0", path = "../../primitives/consensus/common" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index 3cb577aee5db8..5f876cc49dc9a 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -44,7 +44,7 @@ sc-network-gossip = { version = "0.9.0", path = "../network-gossip" } sp-finality-grandpa = { version = "3.0.0", path = "../../primitives/finality-grandpa" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} sc-block-builder = { version = "0.9.0", path = "../block-builder" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.15.0", features = ["derive-codec"] } pin-project = "1.0.4" linked-hash-map = "0.5.2" async-trait = "0.1.42" @@ -52,7 +52,7 @@ wasm-timer = "0.2" [dev-dependencies] assert_matches = "1.3.0" -finality-grandpa = { version = "0.14.1", features = ["derive-codec", "test-helpers"] } +finality-grandpa = { version = "0.15.0", features = ["derive-codec", "test-helpers"] } sc-network = { version = "0.9.0", path = "../network" } sc-network-test = { version = "0.8.0", path = "../network/test" } sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } diff --git a/client/finality-grandpa/rpc/Cargo.toml b/client/finality-grandpa/rpc/Cargo.toml index 97359120fcaae..e34e0947d56d7 100644 --- a/client/finality-grandpa/rpc/Cargo.toml +++ b/client/finality-grandpa/rpc/Cargo.toml @@ -14,7 +14,7 @@ sc-rpc = { version = "3.0.0", path = "../../rpc" } sp-blockchain = { version = "3.0.0", path = "../../../primitives/blockchain" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-runtime = { version = "3.0.0", path = "../../../primitives/runtime" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.15.0", features = ["derive-codec"] } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" From 3a809f60bbb9cdd3b744f1355656ce82e30f7e57 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 17:05:43 +0100 Subject: [PATCH 286/503] Use sp_std::vec! macro instead of scale_info --- .../src/construct_runtime/expand/metadata.rs | 2 +- frame/support/procedural/src/pallet/expand/call.rs | 6 +++--- .../procedural/src/pallet/expand/constants.rs | 4 ++-- .../procedural/src/pallet/expand/storage.rs | 4 ++-- frame/support/procedural/src/storage/metadata.rs | 6 +++--- frame/support/src/dispatch.rs | 14 +++++++------- frame/support/src/event.rs | 8 ++++---- 7 files changed, 22 insertions(+), 22 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 47a29e80a41e9..73b96e2f50274 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -66,7 +66,7 @@ pub fn expand_runtime_metadata( impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion::new( - #scrate::scale_info::prelude::vec![ #(#pallets),* ], + #scrate::frame_support::sp_std::vec![ #(#pallets),* ], #scrate::metadata::ExtrinsicMetadata { ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 24944f66aaaf3..dc9c113e0e494 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -231,16 +231,16 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn call_functions() -> #frame_support::metadata::PalletCallMetadata { let ty = #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>(); - let calls = #frame_support::scale_info::prelude::vec![ #( + let calls = #frame_support::frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionMetadata { name: stringify!(#fn_name), - arguments: #frame_support::scale_info::prelude::vec![ #( + arguments: #frame_support::frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_meta_type>(), }, )* ], - documentation: #frame_support::scale_info::prelude::vec![ #( #fn_doc ),* ], + documentation: #frame_support::frame_support::sp_std::vec![ #( #fn_doc ),* ], }, )* ]; #frame_support::metadata::PalletCallMetadata { ty, calls } diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index 33c88b46d3d26..eb74348ccca18 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -100,7 +100,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), - documentation: #frame_support::scale_info::prelude::vec![ #( #doc ),* ], + documentation: #frame_support::frame_support::sp_std::vec![ #( #doc ),* ], } }) }); @@ -112,7 +112,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { pub fn module_constants_metadata() -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::PalletConstantMetadata> { - #frame_support::scale_info::prelude::vec![ #( #consts ),* ] + #frame_support::frame_support::sp_std::vec![ #( #consts ),* ] } } ) diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index 81672aa469a01..a9a52374838b4 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -177,7 +177,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), - documentation: #frame_support::scale_info::prelude::vec![ + documentation: #frame_support::frame_support::sp_std::vec![ #( #docs, )* ], } @@ -346,7 +346,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::traits::PalletInfo >::name::<#pallet_ident<#type_use_gen>>() .expect("Every active pallet has a name in the runtime; qed"), - entries: #frame_support::scale_info::prelude::vec![ + entries: #frame_support::frame_support::sp_std::vec![ #( #entries, )* ], } diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 124ea20f4d559..57204e24c70ff 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -68,7 +68,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> quote!{ #scrate::metadata::StorageEntryType::NMap { keys: #scrate::scale_info::meta_type::<#key_tuple>(), - hashers: #scrate::scale_info::prelude::vec! [ + hashers: #scrate::frame_support::sp_std::vec! [ #( #scrate::metadata::StorageHasher::#hashers, )* ], value: #scrate::scale_info::meta_type::<#value_type>(), @@ -173,7 +173,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { modifier: #modifier, ty: #ty, default: #default_byte_getter_struct_instance.default_byte(), - documentation: #scrate::scale_info::prelude::vec![ #( #docs ),* ], + documentation: #scrate::frame_support::sp_std::vec![ #( #docs ),* ], }, }; @@ -192,7 +192,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { let store_metadata = quote!( #scrate::metadata::PalletStorageMetadata { prefix: #prefix, - entries: #scrate::scale_info::prelude::vec![ #entries ], + entries: #scrate::frame_support::sp_std::vec![ #entries ], } ); diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index f2176af10b4eb..70beeb502113e 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2465,7 +2465,7 @@ macro_rules! __impl_module_constants_metadata { } } )* - $crate::scale_info::prelude::vec![ + $crate::frame_support::sp_std::vec![ $( $crate::metadata::PalletConstantMetadata { name: stringify!($name), @@ -2474,9 +2474,9 @@ macro_rules! __impl_module_constants_metadata { Default::default() ).default_byte(), #[cfg(feature = "metadata-docs")] - documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], + documentation: $crate::frame_support::sp_std::vec![ $( $doc_attr ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::scale_info::prelude::vec![], + documentation: $crate::frame_support::sp_std::vec![], } ),* ] @@ -2539,7 +2539,7 @@ macro_rules! __functions_to_metadata{ $origin_type:ty; $( $function_metadata:expr ),*; ) => { - $crate::scale_info::prelude::vec![ $( $function_metadata ),* ] + $crate::frame_support::sp_std::vec![ $( $function_metadata ),* ] } } @@ -2556,7 +2556,7 @@ macro_rules! __function_to_metadata { ) => { $crate::metadata::FunctionMetadata { name: stringify!($fn_name), - arguments: $crate::scale_info::prelude::vec![ + arguments: $crate::frame_support::sp_std::vec![ $( $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), @@ -2567,9 +2567,9 @@ macro_rules! __function_to_metadata { ),* ], #[cfg(feature = "metadata-docs")] - documentation: $crate::scale_info::prelude::vec![ $( $fn_doc ),* ], + documentation: $crate::frame_support::sp_std::vec![ $( $fn_doc ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::scale_info::prelude::vec![], + documentation: $crate::frame_support::sp_std::vec![], } }; diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 10dccae8427a6..42b723493adb4 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -302,13 +302,13 @@ macro_rules! __events_to_metadata { $( $metadata, )* $crate::metadata::EventMetadata { name: stringify!($event), - arguments: $crate::scale_info::prelude::vec![ + arguments: $crate::frame_support::sp_std::vec![ $( $( $crate::metadata::TypeSpec::new::<$param>(stringify!($param)) ),* )* ], #[cfg(feature = "metadata-docs")] - documentation: $crate::scale_info::prelude::vec![ $( $doc_attr ),* ], + documentation: $crate::frame_support::sp_std::vec![ $( $doc_attr ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::scale_info::prelude::vec![], + documentation: $crate::frame_support::sp_std::vec![], }; $( $rest )* ) @@ -316,7 +316,7 @@ macro_rules! __events_to_metadata { ( $( $metadata:expr ),*; ) => { - $crate::scale_info::prelude::vec![ $( $metadata ),* ] + $crate::frame_support::sp_std::vec![ $( $metadata ),* ] } } From a734b3f2ed462a60a1898abbbfab942d326c020f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 17:39:17 +0100 Subject: [PATCH 287/503] Fix up vec! imports --- frame/support/procedural/src/pallet/expand/call.rs | 6 +++--- frame/support/procedural/src/pallet/expand/constants.rs | 4 ++-- frame/support/procedural/src/pallet/expand/storage.rs | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index dc9c113e0e494..e6da9fbc8aa70 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -231,16 +231,16 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub fn call_functions() -> #frame_support::metadata::PalletCallMetadata { let ty = #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>(); - let calls = #frame_support::frame_support::sp_std::vec![ #( + let calls = #frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionMetadata { name: stringify!(#fn_name), - arguments: #frame_support::frame_support::sp_std::vec![ #( + arguments: #frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_meta_type>(), }, )* ], - documentation: #frame_support::frame_support::sp_std::vec![ #( #fn_doc ),* ], + documentation: #frame_support::sp_std::vec![ #( #fn_doc ),* ], }, )* ]; #frame_support::metadata::PalletCallMetadata { ty, calls } diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index eb74348ccca18..b92d80e212094 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -100,7 +100,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), - documentation: #frame_support::frame_support::sp_std::vec![ #( #doc ),* ], + documentation: #frame_support::sp_std::vec![ #( #doc ),* ], } }) }); @@ -112,7 +112,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { pub fn module_constants_metadata() -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::PalletConstantMetadata> { - #frame_support::frame_support::sp_std::vec![ #( #consts ),* ] + #frame_support::sp_std::vec![ #( #consts ),* ] } } ) diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index a9a52374838b4..69291db7b7486 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -177,7 +177,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), - documentation: #frame_support::frame_support::sp_std::vec![ + documentation: #frame_support::sp_std::vec![ #( #docs, )* ], } @@ -346,7 +346,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::traits::PalletInfo >::name::<#pallet_ident<#type_use_gen>>() .expect("Every active pallet has a name in the runtime; qed"), - entries: #frame_support::frame_support::sp_std::vec![ + entries: #frame_support::sp_std::vec![ #( #entries, )* ], } From 20ce8f621106700f33984f5a93559594aa4a2186 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 1 Jul 2021 17:43:42 +0100 Subject: [PATCH 288/503] Fix up more vec! imports --- frame/benchmarking/src/lib.rs | 4 ++-- .../src/construct_runtime/expand/metadata.rs | 2 +- frame/support/procedural/src/storage/metadata.rs | 6 +++--- frame/support/src/dispatch.rs | 14 +++++++------- frame/support/src/event.rs | 8 ++++---- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index 8160bd5d1dd21..cb0bad89cf726 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -304,7 +304,7 @@ macro_rules! benchmarks_iter { ( $( $names_extra )* ) $name { $( $code )* }: { < - Call as $crate::frame_support::traits::UnfilteredDispatchable + Call as $crate::traits::UnfilteredDispatchable >::dispatch_bypass_filter( Call::::$dispatch($($arg),*), $origin.into() )?; @@ -722,7 +722,7 @@ macro_rules! impl_benchmark { // Add whitelist to DB including whitelisted caller let mut whitelist = whitelist.to_vec(); let whitelisted_caller_key = - as $crate::frame_support::storage::StorageMap<_,_>>::hashed_key_for( + as $crate::storage::StorageMap<_,_>>::hashed_key_for( $crate::whitelisted_caller::() ); whitelist.push(whitelisted_caller_key.into()); diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 73b96e2f50274..c1fe66d45ba3d 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -66,7 +66,7 @@ pub fn expand_runtime_metadata( impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion::new( - #scrate::frame_support::sp_std::vec![ #(#pallets),* ], + #scrate::sp_std::vec![ #(#pallets),* ], #scrate::metadata::ExtrinsicMetadata { ty: #scrate::scale_info::meta_type::<#extrinsic>(), version: <#extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata>::VERSION, diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 57204e24c70ff..18d30cfa1b7ae 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -68,7 +68,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> quote!{ #scrate::metadata::StorageEntryType::NMap { keys: #scrate::scale_info::meta_type::<#key_tuple>(), - hashers: #scrate::frame_support::sp_std::vec! [ + hashers: #scrate::sp_std::vec! [ #( #scrate::metadata::StorageHasher::#hashers, )* ], value: #scrate::scale_info::meta_type::<#value_type>(), @@ -173,7 +173,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { modifier: #modifier, ty: #ty, default: #default_byte_getter_struct_instance.default_byte(), - documentation: #scrate::frame_support::sp_std::vec![ #( #docs ),* ], + documentation: #scrate::sp_std::vec![ #( #docs ),* ], }, }; @@ -192,7 +192,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { let store_metadata = quote!( #scrate::metadata::PalletStorageMetadata { prefix: #prefix, - entries: #scrate::frame_support::sp_std::vec![ #entries ], + entries: #scrate::sp_std::vec![ #entries ], } ); diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 70beeb502113e..c7c1b12db6d64 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2465,7 +2465,7 @@ macro_rules! __impl_module_constants_metadata { } } )* - $crate::frame_support::sp_std::vec![ + $crate::sp_std::vec![ $( $crate::metadata::PalletConstantMetadata { name: stringify!($name), @@ -2474,9 +2474,9 @@ macro_rules! __impl_module_constants_metadata { Default::default() ).default_byte(), #[cfg(feature = "metadata-docs")] - documentation: $crate::frame_support::sp_std::vec![ $( $doc_attr ),* ], + documentation: $crate::sp_std::vec![ $( $doc_attr ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::frame_support::sp_std::vec![], + documentation: $crate::sp_std::vec![], } ),* ] @@ -2539,7 +2539,7 @@ macro_rules! __functions_to_metadata{ $origin_type:ty; $( $function_metadata:expr ),*; ) => { - $crate::frame_support::sp_std::vec![ $( $function_metadata ),* ] + $crate::sp_std::vec![ $( $function_metadata ),* ] } } @@ -2556,7 +2556,7 @@ macro_rules! __function_to_metadata { ) => { $crate::metadata::FunctionMetadata { name: stringify!($fn_name), - arguments: $crate::frame_support::sp_std::vec![ + arguments: $crate::sp_std::vec![ $( $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), @@ -2567,9 +2567,9 @@ macro_rules! __function_to_metadata { ),* ], #[cfg(feature = "metadata-docs")] - documentation: $crate::frame_support::sp_std::vec![ $( $fn_doc ),* ], + documentation: $crate::sp_std::vec![ $( $fn_doc ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::frame_support::sp_std::vec![], + documentation: $crate::sp_std::vec![], } }; diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 42b723493adb4..007d69a27e7a9 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -302,13 +302,13 @@ macro_rules! __events_to_metadata { $( $metadata, )* $crate::metadata::EventMetadata { name: stringify!($event), - arguments: $crate::frame_support::sp_std::vec![ + arguments: $crate::sp_std::vec![ $( $( $crate::metadata::TypeSpec::new::<$param>(stringify!($param)) ),* )* ], #[cfg(feature = "metadata-docs")] - documentation: $crate::frame_support::sp_std::vec![ $( $doc_attr ),* ], + documentation: $crate::sp_std::vec![ $( $doc_attr ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::frame_support::sp_std::vec![], + documentation: $crate::sp_std::vec![], }; $( $rest )* ) @@ -316,7 +316,7 @@ macro_rules! __events_to_metadata { ( $( $metadata:expr ),*; ) => { - $crate::frame_support::sp_std::vec![ $( $metadata ),* ] + $crate::sp_std::vec![ $( $metadata ),* ] } } From 196abf1c97b0d527b5b1b26ba80a464fdfb4cb55 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 09:09:27 +0100 Subject: [PATCH 289/503] Undo some unnecessary changes --- bin/node/cli/Cargo.toml | 2 +- client/tracing/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/node/cli/Cargo.toml b/bin/node/cli/Cargo.toml index 90567a4b1eaff..9fcd0875e8dca 100644 --- a/bin/node/cli/Cargo.toml +++ b/bin/node/cli/Cargo.toml @@ -107,7 +107,7 @@ try-runtime-cli = { version = "0.9.0", optional = true, path = "../../../utils/f wasm-bindgen = { version = "0.2.73", optional = true } wasm-bindgen-futures = { version = "0.4.18", optional = true } browser-utils = { package = "substrate-browser-utils", path = "../../../utils/browser", optional = true, version = "0.9.0"} -libp2p-wasm-ext = { version = "0.28.1", features = ["websocket"], optional = true } +libp2p-wasm-ext = { version = "0.28", features = ["websocket"], optional = true } [target.'cfg(target_arch="x86_64")'.dependencies] node-executor = { version = "2.0.0", path = "../executor", features = [ "wasmtime" ] } diff --git a/client/tracing/Cargo.toml b/client/tracing/Cargo.toml index 6b597a0695235..1121b922494c9 100644 --- a/client/tracing/Cargo.toml +++ b/client/tracing/Cargo.toml @@ -43,5 +43,5 @@ sc-rpc-server = { version = "3.0.0", path = "../rpc-servers" } wasm-timer = "0.2" [target.'cfg(target_os = "unknown")'.dependencies] -wasm-bindgen = "0.2.73" +wasm-bindgen = "0.2.67" web-sys = { version = "0.3.44", features = ["console"] } From 8afc0f9164ab19701a2413acf4f05bc9cb794771 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 10:03:59 +0100 Subject: [PATCH 290/503] Import scale_info::TypeInfo derive --- frame/atomic-swap/src/lib.rs | 5 ++-- frame/balances/src/lib.rs | 18 ++++++------- frame/bounties/src/lib.rs | 5 ++-- frame/collective/src/lib.rs | 7 ++--- frame/contracts/src/schedule.rs | 9 ++++--- frame/contracts/src/storage.rs | 9 ++++--- frame/democracy/src/lib.rs | 5 ++-- frame/democracy/src/types.rs | 9 ++++--- frame/democracy/src/vote.rs | 9 ++++--- .../election-provider-multi-phase/src/lib.rs | 17 ++++++------ frame/elections-phragmen/src/lib.rs | 7 ++--- frame/gilt/src/lib.rs | 9 ++++--- frame/grandpa/src/lib.rs | 6 +++-- frame/identity/src/lib.rs | 15 ++++++----- frame/im-online/src/lib.rs | 7 ++--- frame/multisig/src/lib.rs | 5 ++-- frame/recovery/src/lib.rs | 5 ++-- frame/scheduler/src/lib.rs | 7 ++--- frame/society/src/lib.rs | 12 ++++----- frame/staking/src/lib.rs | 27 ++++++++++--------- frame/staking/src/slashing.rs | 7 ++--- frame/support/src/lib.rs | 9 ++++--- frame/support/src/origin.rs | 11 ++++---- frame/support/src/weights.rs | 13 ++++----- frame/support/test/tests/construct_runtime.rs | 9 ++++--- frame/support/test/tests/instance.rs | 7 ++--- frame/support/test/tests/issue2219.rs | 6 ++--- frame/system/src/lib.rs | 15 ++++++----- frame/system/src/limits.rs | 7 ++--- primitives/consensus/babe/src/lib.rs | 5 ++-- primitives/consensus/slots/src/lib.rs | 5 ++-- primitives/core/src/crypto.rs | 5 ++-- primitives/core/src/ecdsa.rs | 5 ++-- primitives/core/src/ed25519.rs | 5 ++-- primitives/core/src/lib.rs | 7 +++-- primitives/core/src/offchain/mod.rs | 5 ++-- primitives/core/src/sr25519.rs | 5 ++-- primitives/finality-grandpa/src/lib.rs | 5 ++-- primitives/runtime/src/generic/digest.rs | 7 ++--- primitives/runtime/src/lib.rs | 11 ++++---- primitives/runtime/src/testing.rs | 9 ++++--- primitives/runtime/src/traits.rs | 17 ++++++------ test-utils/runtime/src/lib.rs | 5 ++-- 43 files changed, 208 insertions(+), 165 deletions(-) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index 0a219a5fbb187..b37862bae74c3 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -51,10 +51,11 @@ use frame_support::{ dispatch::DispatchResult, }; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::RuntimeDebug; /// Pending atomic swap operation. -#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, TypeInfo)] pub struct PendingSwap { /// Source of the swap. pub source: T::AccountId, @@ -87,7 +88,7 @@ pub trait SwapAction { } /// A swap action that only allows transferring balances. -#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode, TypeInfo)] pub struct BalanceSwapAction> { value: >::Balance, _marker: PhantomData, diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 13fbcc37230a6..178bd1a1fb4a3 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -194,7 +194,7 @@ pub mod pallet { pub trait Config: frame_system::Config { /// The balance of an account. type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug + MaxEncodedLen + scale_info::TypeInfo; + MaybeSerializeDeserialize + Debug + MaxEncodedLen + TypeInfo; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; @@ -214,10 +214,10 @@ pub mod pallet { /// The maximum number of locks that should exist on an account. /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get + scale_info::TypeInfo; + type MaxLocks: Get + TypeInfo; /// The maximum number of named reserves that can exist on an account. - type MaxReserves: Get + scale_info::TypeInfo; + type MaxReserves: Get + TypeInfo; /// The id type for named reserves. type ReserveIdentifier: Parameter + Member + MaxEncodedLen + Ord + Copy; @@ -570,9 +570,7 @@ impl, I: 'static> GenesisConfig { } /// Simplified reasons for withdrawing balance. -#[derive( - Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo -)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub enum Reasons { /// Paying system transaction fees. Fee = 0, @@ -604,7 +602,7 @@ impl BitOr for Reasons { /// A single lock on a balance. There can be many of these on an account and they "overlap", so the /// same balance is frozen by multiple locks. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct BalanceLock { /// An identifier for this lock. Only one lock may be in existence for each identifier. pub id: LockIdentifier, @@ -615,7 +613,7 @@ pub struct BalanceLock { } /// Store named reserved balance. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct ReserveData { /// The identifier for the named reserve. pub id: ReserveIdentifier, @@ -624,7 +622,7 @@ pub struct ReserveData { } /// All balance information for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, Default, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct AccountData { /// Non-reserved part of the balance. There may still be restrictions on this, but it is the /// total pool what may in principle be transferred, reserved and used for tipping. @@ -671,7 +669,7 @@ impl AccountData { // A value placed in storage that represents the current version of the Balances storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. This should match directly with the semantic versions of the Rust crate. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] enum Releases { V1_0_0, V2_0_0, diff --git a/frame/bounties/src/lib.rs b/frame/bounties/src/lib.rs index c15ed88581dd7..03fb358ecfb9f 100644 --- a/frame/bounties/src/lib.rs +++ b/frame/bounties/src/lib.rs @@ -96,6 +96,7 @@ use frame_support::traits::{EnsureOrigin}; use frame_support::weights::{Weight}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use frame_system::{self as system, ensure_signed}; pub use weights::WeightInfo; @@ -137,7 +138,7 @@ pub trait Config: frame_system::Config + pallet_treasury::Config { pub type BountyIndex = u32; /// A bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Bounty { /// The account proposing it. proposer: AccountId, @@ -154,7 +155,7 @@ pub struct Bounty { } /// The status of a bounty proposal. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum BountyStatus { /// The bounty is proposed and waiting for approval. Proposed, diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index c269bb20e7ca0..f51bd3661c607 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -50,6 +50,7 @@ use sp_runtime::{RuntimeDebug, traits::Hash}; use frame_support::{ decl_error, decl_event, decl_module, decl_storage, ensure, BoundedVec, codec::{Decode, Encode}, + scale_info::TypeInfo, dispatch::{ DispatchError, DispatchResult, DispatchResultWithPostInfo, Dispatchable, Parameter, PostDispatchInfo, @@ -137,7 +138,7 @@ pub trait Config: frame_system::Config { type MotionDuration: Get; /// Maximum number of proposals allowed to be active in parallel. - type MaxProposals: Get + scale_info::TypeInfo; + type MaxProposals: Get + TypeInfo; /// The maximum number of members supported by the pallet. Used for weight estimation. /// @@ -154,7 +155,7 @@ pub trait Config: frame_system::Config { } /// Origin for the collective module. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, TypeInfo)] pub enum RawOrigin { /// It has been condoned by a given number of members of the collective from a given total. Members(MemberCount, MemberCount), @@ -176,7 +177,7 @@ impl GetBacking for RawOrigin { /// Origin for the collective module. pub type Origin = RawOrigin<::AccountId, I>; -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] /// Info for keeping track of a motion being voted on. pub struct Votes { /// The proposal's unique index. diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index da1f8acc94d18..f17b05a2d3abb 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -26,6 +26,7 @@ use pallet_contracts_proc_macro::{ScheduleDebug, WeightDebug}; use frame_support::{DefaultNoBound, weights::Weight}; use sp_std::{marker::PhantomData, vec::Vec}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use pwasm_utils::{parity_wasm::elements, rules}; use sp_runtime::RuntimeDebug; @@ -72,7 +73,7 @@ pub const INSTR_BENCHMARK_BATCH_SIZE: u32 = 1_000; /// changes are made to its values. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(bound(serialize = "", deserialize = "")))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, DefaultNoBound, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, ScheduleDebug, DefaultNoBound, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct Schedule { /// Describes the upper limits on various metrics. @@ -93,7 +94,7 @@ pub struct Schedule { /// values will break existing contracts which are above the new limits when a /// re-instrumentation is triggered. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Limits { /// The maximum number of topics supported by an event. pub event_topics: u32, @@ -175,7 +176,7 @@ impl Limits { /// that use them as supporting instructions. Supporting means mainly pushing arguments /// and dropping return values in order to maintain a valid module. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct InstructionWeights { /// Version of the instruction weights. @@ -249,7 +250,7 @@ pub struct InstructionWeights { /// Describes the weight for each imported function that a contract is allowed to call. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, PartialEq, Eq, WeightDebug, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct HostFnWeights { /// Weight of calling `seal_caller`. diff --git a/frame/contracts/src/storage.rs b/frame/contracts/src/storage.rs index a75c1b6501aa5..a7c46cbe37ff5 100644 --- a/frame/contracts/src/storage.rs +++ b/frame/contracts/src/storage.rs @@ -23,6 +23,7 @@ use crate::{ weights::WeightInfo, }; use codec::{Codec, Encode, Decode}; +use scale_info::TypeInfo; use sp_std::prelude::*; use sp_std::{marker::PhantomData, fmt::Debug}; use sp_io::hashing::blake2_256; @@ -45,7 +46,7 @@ pub type TombstoneContractInfo = /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account -#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] #[scale_info(skip_type_params(T))] pub enum ContractInfo { Alive(AliveContractInfo), @@ -84,7 +85,7 @@ impl ContractInfo { /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct RawAliveContractInfo { /// Unique ID for the subtree encoded as a bytes vector. pub trie_id: TrieId, @@ -122,7 +123,7 @@ fn child_trie_info(trie_id: &[u8]) -> ChildInfo { ChildInfo::new_default(trie_id) } -#[derive(Encode, Decode, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct RawTombstoneContractInfo(H, PhantomData); impl RawTombstoneContractInfo @@ -146,7 +147,7 @@ impl From> for ContractInfo { } } -#[derive(Encode, Decode, scale_info::TypeInfo)] +#[derive(Encode, Decode, TypeInfo)] pub struct DeletedContract { pair_count: u32, trie_id: TrieId, diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 1c688c1159611..b5c285b01358b 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -158,6 +158,7 @@ use sp_runtime::{ traits::{Zero, Hash, Dispatchable, Saturating, Bounded}, }; use codec::{Encode, Decode, Input}; +use scale_info::TypeInfo; use frame_support::{ ensure, weights::Weight, traits::{ @@ -201,7 +202,7 @@ type BalanceOf = <::Currency as Currency< = <::Currency as Currency<::AccountId>>::NegativeImbalance; -#[derive(Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum PreimageStatus { /// The preimage is imminently needed at the argument. Missing(BlockNumber), @@ -228,7 +229,7 @@ impl PreimageStatus { /// The number of aye votes, expressed in terms of post-conviction lock-vote. pub (crate) ayes: Balance, @@ -34,7 +35,7 @@ pub struct Tally { } /// Amount of votes and capital placed in delegation for an account. -#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Delegations { /// The number of votes (this is post-conviction). pub (crate) votes: Balance, @@ -161,7 +162,7 @@ impl< } /// Info regarding an ongoing referendum. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct ReferendumStatus { /// When voting on this referendum will end. pub (crate) end: BlockNumber, @@ -176,7 +177,7 @@ pub struct ReferendumStatus { } /// Info regarding a referendum, present or past. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum ReferendumInfo { /// Referendum is happening, the arg is the block number at which it will end. Ongoing(ReferendumStatus), diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index dbf55249578e3..1f28d3ee10824 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -19,11 +19,12 @@ use sp_std::{prelude::*, result::Result, convert::TryFrom}; use codec::{Encode, EncodeLike, Decode, Output, Input}; +use scale_info::TypeInfo; use sp_runtime::{RuntimeDebug, traits::{Saturating, Zero}}; use crate::{Conviction, ReferendumIndex, Delegations}; /// A number of lock periods, plus a vote, one way or the other. -#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, TypeInfo)] pub struct Vote { pub aye: bool, pub conviction: Conviction, @@ -49,7 +50,7 @@ impl Decode for Vote { } /// A vote for a referendum of a particular account. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub enum AccountVote { /// A standard vote, one-way (approve or reject) with a given amount of conviction. Standard { vote: Vote, balance: Balance }, @@ -89,7 +90,7 @@ impl AccountVote { } /// A "prior" lock, i.e. a lock for some now-forgotten reason. -#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo)] pub struct PriorLock(BlockNumber, Balance); impl PriorLock { @@ -112,7 +113,7 @@ impl PriorLock { /// The account is voting directly. `delegations` is the total amount of post-conviction voting /// weight that it controls from those that have delegated to it. diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 11131932c1093..f68b8d82c38ea 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -228,6 +228,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; +use scale_info::TypeInfo; use frame_support::{ dispatch::DispatchResultWithPostInfo, ensure, @@ -318,7 +319,7 @@ impl BenchmarkingConfig for () { } /// Current phase of the pallet. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum Phase { /// Nothing, the election is not happening. Off, @@ -382,7 +383,7 @@ impl Phase { /// A configuration for the pallet to indicate what should happen in the case of a fallback i.e. /// reaching a call to `elect` with no good solution. #[cfg_attr(test, derive(Clone))] -#[derive(scale_info::TypeInfo)] +#[derive(TypeInfo)] pub enum FallbackStrategy { /// Run a on-chain sequential phragmen. /// @@ -394,7 +395,7 @@ pub enum FallbackStrategy { } /// The type of `Computation` that provided this election data. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum ElectionCompute { /// Election was computed on-chain. OnChain, @@ -416,7 +417,7 @@ impl Default for ElectionCompute { /// /// Such a solution should never become effective in anyway before being checked by the /// `Pallet::feasibility_check` -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, PartialOrd, Ord, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, PartialOrd, Ord, TypeInfo)] pub struct RawSolution { /// Compact election edges. pub compact: C, @@ -434,7 +435,7 @@ impl Default for RawSolution { } /// A checked solution, ready to be enacted. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, TypeInfo)] pub struct ReadySolution { /// The final supports of the solution. /// @@ -453,7 +454,7 @@ pub struct ReadySolution { /// [`ElectionDataProvider`] and are kept around until the round is finished. /// /// These are stored together because they are often accessed together. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, Default, TypeInfo)] pub struct RoundSnapshot { /// All of the voters. pub voters: Vec<(A, VoteWeight, Vec)>, @@ -466,7 +467,7 @@ pub struct RoundSnapshot { /// This is stored automatically on-chain, and it contains the **size of the entire snapshot**. /// This is also used in dispatchables as weight witness data and should **only contain the size of /// the presented solution**, not the entire snapshot. -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, Default, TypeInfo)] pub struct SolutionOrSnapshotSize { /// The length of voters. #[codec(compact)] @@ -644,7 +645,7 @@ pub mod pallet { + sp_std::fmt::Debug + Ord + CompactSolution - + scale_info::TypeInfo; + + TypeInfo; /// Accuracy used for fallback on-chain election. type OnChainAccuracy: PerThing128; diff --git a/frame/elections-phragmen/src/lib.rs b/frame/elections-phragmen/src/lib.rs index 77c5e26683783..96142ed9d33a0 100644 --- a/frame/elections-phragmen/src/lib.rs +++ b/frame/elections-phragmen/src/lib.rs @@ -108,6 +108,7 @@ use frame_support::{ }, weights::Weight, }; +use scale_info::TypeInfo; use sp_npos_elections::{ElectionResult, ExtendedBalance}; use sp_runtime::{ traits::{Saturating, StaticLookup, Zero}, @@ -131,7 +132,7 @@ type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; /// An indication that the renouncing account currently has which of the below roles. -#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug, TypeInfo)] pub enum Renouncing { /// A member is renouncing. Member, @@ -142,7 +143,7 @@ pub enum Renouncing { } /// An active voter. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, TypeInfo)] pub struct Voter { /// The members being backed. pub votes: Vec, @@ -155,7 +156,7 @@ pub struct Voter { } /// A holder of a seat as either a member or a runner-up. -#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq, TypeInfo)] pub struct SeatHolder { /// The holder. pub who: AccountId, diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index bf2057b59440c..b74dc949b0a2e 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -76,6 +76,7 @@ pub mod weights; #[frame_support::pallet] pub mod pallet { + use scale_info::TypeInfo; use sp_std::prelude::*; use sp_arithmetic::{Perquintill, PerThing}; use sp_runtime::traits::{Zero, Saturating}; @@ -103,7 +104,7 @@ pub mod pallet { type CurrencyBalance: sp_runtime::traits::AtLeast32BitUnsigned + codec::FullCodec + Copy + MaybeSerializeDeserialize + sp_std::fmt::Debug + Default + From - + scale_info::TypeInfo; + + TypeInfo; /// Origin required for setting the target proportion to be under gilt. type AdminOrigin: EnsureOrigin; @@ -172,7 +173,7 @@ pub mod pallet { pub struct Pallet(_); /// A single bid on a gilt, an item of a *queue* in `Queues`. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct GiltBid { /// The amount bid. pub amount: Balance, @@ -181,7 +182,7 @@ pub mod pallet { } /// Information representing an active gilt. - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveGilt { /// The proportion of the effective total issuance (i.e. accounting for any eventual gilt /// expansion or contraction that may eventually be claimed). @@ -205,7 +206,7 @@ pub mod pallet { /// `issuance - frozen + proportion * issuance` /// /// where `issuance = total_issuance - IgnoredIssuance` - #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] + #[derive(Clone, Eq, PartialEq, Default, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveGiltsTotal { /// The total amount of funds held in reserve for all active gilts. pub frozen: Balance, diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index 4bad5c1427ffd..72c80a8773631 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -69,6 +69,8 @@ pub use equivocation::{ pub use pallet::*; +use scale_info::TypeInfo; + #[frame_support::pallet] pub mod pallet { use frame_support::pallet_prelude::*; @@ -361,7 +363,7 @@ pub trait WeightInfo { } /// A stored pending change. -#[derive(Encode, Decode, scale_info::TypeInfo)] +#[derive(Encode, Decode, TypeInfo)] pub struct StoredPendingChange { /// The block number this was scheduled at. pub scheduled_at: N, @@ -377,7 +379,7 @@ pub struct StoredPendingChange { /// Current state of the GRANDPA authority set. State transitions must happen in /// the same order of states defined below, e.g. `Paused` implies a prior /// `PendingPause`. -#[derive(Decode, Encode, scale_info::TypeInfo)] +#[derive(Decode, Encode, TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub enum StoredState { /// The current authority set is live, and GRANDPA is enabled. diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index 310ee4ec90e64..2fb976fc0d17f 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -81,6 +81,7 @@ use sp_std::prelude::*; use sp_std::{fmt::Debug, ops::Add, iter::once}; use enumflags2::BitFlags; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::RuntimeDebug; use sp_runtime::traits::{StaticLookup, Zero, AppendZerosInput, Saturating}; use frame_support::traits::{Currency, ReservableCurrency, OnUnbalanced, BalanceStatus}; @@ -95,7 +96,7 @@ type NegativeImbalanceOf = <::Currency as Currency< { @@ -217,7 +218,7 @@ impl< /// The fields that we use to identify the owner of an account with. Each corresponds to a field /// in the `IdentityInfo` struct. #[repr(u64)] -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug, TypeInfo)] pub enum IdentityField { Display = 0b0000000000000000000000000000000000000000000000000000000000000001, Legal = 0b0000000000000000000000000000000000000000000000000000000000000010, @@ -245,7 +246,7 @@ impl Decode for IdentityFields { Ok(Self(>::from_bits(field as u64).map_err(|_| "invalid value")?)) } } -impl scale_info::TypeInfo for IdentityFields { +impl TypeInfo for IdentityFields { type Identity = Self; fn type_info() -> scale_info::Type { @@ -265,7 +266,7 @@ impl scale_info::TypeInfo for IdentityFields { /// /// NOTE: This should be stored at the end of the storage item to facilitate the addition of extra /// fields in a backwards compatible way through a specialized `Decode` impl. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] #[cfg_attr(test, derive(Default))] pub struct IdentityInfo { /// Additional fields of the identity that are not catered for with the struct's explicit @@ -317,7 +318,7 @@ pub struct IdentityInfo { /// /// NOTE: This is stored separately primarily to facilitate the addition of extra fields in a /// backwards compatible way through a specialized `Decode` impl. -#[derive(Clone, Encode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct Registration< Balance: Encode + Decode + Copy + Clone + Debug + Eq + PartialEq > { @@ -352,7 +353,7 @@ impl< } /// Information concerning a registrar. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct RegistrarInfo< Balance: Encode + Decode + Clone + Debug + Eq + PartialEq, AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 65f0c14eec033..df01e402eed3b 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -76,6 +76,7 @@ pub mod weights; use sp_application_crypto::RuntimeAppPublic; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_core::offchain::OpaqueNetworkState; use sp_std::prelude::*; use sp_std::convert::TryInto; @@ -141,7 +142,7 @@ const INCLUDE_THRESHOLD: u32 = 3; /// This stores the block number at which heartbeat was requested and when the worker /// has actually managed to produce it. /// Note we store such status for every `authority_index` separately. -#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] struct HeartbeatStatus { /// An index of the session that we are supposed to send heartbeat for. pub session_index: SessionIndex, @@ -202,7 +203,7 @@ impl sp_std::fmt::Debug for OffchainErr where BlockNumber: PartialEq + Eq + Decode + Encode, { @@ -834,7 +835,7 @@ impl OneSessionHandler for Pallet { } /// An offence that is filed if a validator didn't send a heartbeat message. -#[derive(RuntimeDebug, scale_info::TypeInfo)] +#[derive(RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Clone, PartialEq, Eq))] pub struct UnresponsivenessOffence { /// The current session index in which we report the unresponsive validators. diff --git a/frame/multisig/src/lib.rs b/frame/multisig/src/lib.rs index 99fa108eb9539..73fe2729eab18 100644 --- a/frame/multisig/src/lib.rs +++ b/frame/multisig/src/lib.rs @@ -52,6 +52,7 @@ pub mod weights; use sp_std::prelude::*; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_io::hashing::blake2_256; use frame_support::{ensure, RuntimeDebug}; use frame_support::{traits::{Get, ReservableCurrency, Currency}, @@ -71,7 +72,7 @@ pub type OpaqueCall = Vec; /// A global extrinsic index, formed as the extrinsic index within a block, together with that /// block's height. This allows a transaction in which a multisig operation of a particular /// composite was created to be uniquely identified. -#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Copy, Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct Timepoint { /// The height of the chain at the point in time. height: BlockNumber, @@ -80,7 +81,7 @@ pub struct Timepoint { } /// An open multisig operation. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct Multisig { /// The extrinsic when the multisig operation was opened. when: Timepoint, diff --git a/frame/recovery/src/lib.rs b/frame/recovery/src/lib.rs index af5e196240f84..15ae0953b6633 100644 --- a/frame/recovery/src/lib.rs +++ b/frame/recovery/src/lib.rs @@ -154,6 +154,7 @@ use sp_std::prelude::*; use sp_runtime::traits::{Dispatchable, SaturatedConversion, CheckedAdd, CheckedMul}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use frame_support::{ RuntimeDebug, weights::GetDispatchInfo, @@ -172,7 +173,7 @@ type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; /// An active recovery process. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct ActiveRecovery { /// The block number when the recovery process started. created: BlockNumber, @@ -184,7 +185,7 @@ pub struct ActiveRecovery { } /// Configuration for recovering an account. -#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct RecoveryConfig { /// The minimum number of blocks since the start of the recovery process before the account /// can be recovered. diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 3bbe35dc4d9ba..dd7dc74dfb46c 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -56,6 +56,7 @@ pub mod weights; use sp_std::{prelude::*, marker::PhantomData, borrow::Borrow}; use codec::{Encode, Decode, Codec}; +use scale_info::TypeInfo; use sp_runtime::{RuntimeDebug, traits::{Zero, One, BadOrigin, Saturating}}; use frame_support::{ dispatch::{Dispatchable, DispatchError, DispatchResult, Parameter}, @@ -82,7 +83,7 @@ struct ScheduledV1 { /// Information regarding an item to be executed in the future. #[cfg_attr(any(feature = "std", test), derive(PartialEq, Eq))] -#[derive(Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct ScheduledV2 { /// The unique identity for this task, if there is one. maybe_id: Option>, @@ -104,7 +105,7 @@ pub type Scheduled = // A value placed in storage that represents the current version of the Scheduler storage. // This value is used by the `on_runtime_upgrade` logic to determine whether we run // storage migration logic. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, TypeInfo)] enum Releases { V1, V2, @@ -139,7 +140,7 @@ pub mod pallet { /// The caller origin, overarching type of all pallets origins. type PalletsOrigin: From> + Codec + Clone + Eq - + scale_info::TypeInfo; + + TypeInfo; /// The aggregated call type. type Call: Parameter diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index 002c31dfa7d02..b04eb99d9842d 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -322,7 +322,7 @@ pub trait Config: system::Config { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum Vote { /// The member has been chosen to be skeptic and has not yet taken any action. Skeptic, @@ -333,7 +333,7 @@ pub enum Vote { } /// A judgement by the suspension judgement origin on a suspended candidate. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum Judgement { /// The suspension judgement origin takes no direct judgment /// and places the candidate back into the bid pool. @@ -345,7 +345,7 @@ pub enum Judgement { } /// Details of a payout given as a per-block linear "trickle". -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, Default, TypeInfo)] pub struct Payout { /// Total value of the payout. value: Balance, @@ -358,7 +358,7 @@ pub struct Payout { } /// Status of a vouching member. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum VouchingStatus { /// Member is currently vouching for a user. Vouching, @@ -370,7 +370,7 @@ pub enum VouchingStatus { pub type StrikeCount = u32; /// A bid for entry into society. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Bid { /// The bidder/candidate trying to enter society who: AccountId, @@ -381,7 +381,7 @@ pub struct Bid { } /// A vote by a member on a candidate application. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub enum BidKind { /// The CandidateDeposit was paid for this bid. Deposit(Balance), diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 55a32d82aa912..2c7a97db45f44 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -287,6 +287,7 @@ use sp_std::{ convert::From, }; use codec::{HasCompact, Encode, Decode}; +use scale_info::TypeInfo; use frame_support::{ pallet_prelude::*, weights::{ @@ -353,7 +354,7 @@ type NegativeImbalanceOf = <::Currency as Currency< >>::NegativeImbalance; /// Information regarding the active era (era in used in session). -#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ActiveEraInfo { /// Index of era. pub index: EraIndex, @@ -367,7 +368,7 @@ pub struct ActiveEraInfo { /// Reward points of an era. Used to split era total payout between validators. /// /// This points will be used to reward validators and their respective nominators. -#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct EraRewardPoints { /// Total number of points. Equals the sum of reward points for each validator. total: RewardPoint, @@ -376,7 +377,7 @@ pub struct EraRewardPoints { } /// Indicates the initial status of the staker. -#[derive(RuntimeDebug, scale_info::TypeInfo)] +#[derive(RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub enum StakerStatus { /// Chilling. @@ -388,7 +389,7 @@ pub enum StakerStatus { } /// A destination account for payment. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum RewardDestination { /// Pay into the stash account, increasing the amount at stake accordingly. Staked, @@ -409,7 +410,7 @@ impl Default for RewardDestination { } /// Preference of what happens regarding validation. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct ValidatorPrefs { /// Reward that validator takes up-front; only the rest is split between themselves and /// nominators. @@ -431,7 +432,7 @@ impl Default for ValidatorPrefs { } /// Just a Balance/BlockNumber tuple to encode when a chunk of funds will be unlocked. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct UnlockChunk { /// Amount of funds to be unlocked. #[codec(compact)] @@ -442,7 +443,7 @@ pub struct UnlockChunk { } /// The ledger of a (bonded) stash. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct StakingLedger { /// The stash account whose balance is actually locked and at stake. pub stash: AccountId, @@ -571,7 +572,7 @@ impl StakingLedger where } /// A record of the nominations made by a specific account. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct Nominations { /// The targets of nomination. pub targets: Vec, @@ -587,7 +588,7 @@ pub struct Nominations { } /// The amount of exposure (to slashing) than an individual nominator has. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub struct IndividualExposure { /// The stash account of the nominator in question. pub who: AccountId, @@ -597,7 +598,7 @@ pub struct IndividualExposure { } /// A snapshot of the stake backing a single validator in the system. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct Exposure { /// The total balance backing this validator. #[codec(compact)] @@ -611,7 +612,7 @@ pub struct Exposure { /// A pending slash record. The value of the slash has been computed but not applied yet, /// rather deferred for several eras. -#[derive(Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Default, RuntimeDebug, TypeInfo)] pub struct UnappliedSlash { /// The stash ID of the offending validator. validator: AccountId, @@ -713,7 +714,7 @@ impl< } /// Mode of era-forcing. -#[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub enum Forcing { /// Not forcing anything - just let whatever happen. @@ -737,7 +738,7 @@ impl Default for Forcing { // A value placed in storage that represents the current version of the Staking storage. This value // is used by the `on_runtime_upgrade` logic to determine whether we run storage migration logic. // This should match directly with the semantic versions of the Rust crate. -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, RuntimeDebug, TypeInfo)] enum Releases { V1_0_0Ancient, V2_0_0, diff --git a/frame/staking/src/slashing.rs b/frame/staking/src/slashing.rs index 1061b10831d0b..e81d59c27c81b 100644 --- a/frame/staking/src/slashing.rs +++ b/frame/staking/src/slashing.rs @@ -60,6 +60,7 @@ use frame_support::{ }; use sp_std::vec::Vec; use codec::{Encode, Decode}; +use scale_info::TypeInfo; /// The proportion of the slashing reward to be paid out on the first slashing detection. /// This is f_1 in the paper. @@ -69,7 +70,7 @@ const REWARD_F1: Perbill = Perbill::from_percent(50); pub type SpanIndex = u32; // A range of start..end eras for a slashing span. -#[derive(Encode, Decode, scale_info::TypeInfo)] +#[derive(Encode, Decode, TypeInfo)] #[cfg_attr(test, derive(Debug, PartialEq))] pub(crate) struct SlashingSpan { pub(crate) index: SpanIndex, @@ -84,7 +85,7 @@ impl SlashingSpan { } /// An encoding of all of a nominator's slashing spans. -#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] pub struct SlashingSpans { // the index of the current slashing span of the nominator. different for // every stash, resets when the account hits free balance 0. @@ -174,7 +175,7 @@ impl SlashingSpans { } /// A slashing-span record for a particular stash. -#[derive(Encode, Decode, Default, scale_info::TypeInfo)] +#[derive(Encode, Decode, Default, TypeInfo)] pub(crate) struct SpanRecord { slashed: Balance, paid_out: Balance, diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index d083107e16601..253db017fbcfd 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -84,17 +84,18 @@ pub use self::dispatch::{Parameter, Callable}; pub use sp_runtime::{self, ConsensusEngineId, print, traits::Printable}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::TypeId; /// A unified log target for support operations. pub const LOG_TARGET: &'static str = "runtime::frame-support"; /// A type that cannot be instantiated. -#[derive(Debug, PartialEq, Eq, Clone, scale_info::TypeInfo)] +#[derive(Debug, PartialEq, Eq, Clone, TypeInfo)] pub enum Never {} /// A pallet identifier. These are per pallet and should be stored in a registry somewhere. -#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, Encode, Decode, TypeInfo)] pub struct PalletId(pub [u8; 8]); impl TypeId for PalletId { @@ -364,7 +365,7 @@ macro_rules! parameter_types { } } - impl $crate::scale_info::TypeInfo for $name { + impl $crate::TypeInfo for $name { type Identity = Self; fn type_info() -> $crate::scale_info::Type<$crate::scale_info::form::MetaForm> { @@ -822,7 +823,7 @@ pub mod tests { } pub trait Config: 'static { - type BlockNumber: Codec + EncodeLike + Default + scale_info::TypeInfo; + type BlockNumber: Codec + EncodeLike + Default + TypeInfo; type Origin; type PalletInfo: crate::traits::PalletInfo; type DbWeight: crate::traits::Get; diff --git a/frame/support/src/origin.rs b/frame/support/src/origin.rs index 4d64437c91472..dd775fb27524f 100644 --- a/frame/support/src/origin.rs +++ b/frame/support/src/origin.rs @@ -271,7 +271,7 @@ macro_rules! impl_outer_origin { } $crate::paste::item! { - #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode, $crate::TypeInfo)] $(#[$attr])* #[allow(non_camel_case_types)] pub enum $caller_name { @@ -420,6 +420,7 @@ macro_rules! impl_outer_origin { #[cfg(test)] mod tests { use codec::{Encode, Decode}; + use scale_info::TypeInfo; use crate::traits::{Filter, OriginTrait}; mod frame_system { use super::*; @@ -430,7 +431,7 @@ mod tests { type BaseCallFilter; } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo)] pub enum RawOrigin { Root, Signed(AccountId), @@ -452,20 +453,20 @@ mod tests { mod origin_without_generic { use super::*; - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo)] pub struct Origin; } mod origin_with_generic { use super::*; - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo)] pub struct Origin { t: T } } - #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, Encode, Decode, TypeInfo)] pub struct TestRuntime; pub struct BaseCallFilter; diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 89143f2a4f71a..5aeecbfd5d5d3 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -130,6 +130,7 @@ #[cfg(feature = "std")] use serde::{Serialize, Deserialize}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{RuntimeDebug, traits::SignedExtension}; use sp_runtime::generic::{CheckedExtrinsic, UncheckedExtrinsic}; use crate::dispatch::{DispatchErrorWithPostInfo, DispatchResultWithPostInfo, DispatchError}; @@ -196,7 +197,7 @@ pub trait PaysFee { } /// Explicit enum to denote if a transaction pays fee or not. -#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Pays { /// Transactor will pay related fees. Yes, @@ -216,7 +217,7 @@ impl Default for Pays { /// [DispatchClass::all] and [DispatchClass::non_mandatory] helper functions. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] -#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Copy, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum DispatchClass { /// A normal dispatch. Normal, @@ -301,7 +302,7 @@ pub mod priority { } /// A bundle of static information collected from the `#[weight = $x]` attributes. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct DispatchInfo { /// Weight of this transaction. pub weight: Weight, @@ -623,7 +624,7 @@ impl GetDispatchInfo for sp_runtime::testing::TestX } /// The weight of database operations that the runtime can invoke. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct RuntimeDbWeight { pub read: Weight, pub write: Weight, @@ -655,7 +656,7 @@ impl RuntimeDbWeight { /// /// The `negative` value encodes whether the term is added or substracted from the /// overall polynomial result. -#[derive(Clone, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, TypeInfo)] pub struct WeightToFeeCoefficient { /// The integral part of the coefficient. pub coeff_integer: Balance, @@ -730,7 +731,7 @@ impl WeightToFeePolynomial for IdentityFee where } /// A struct holding value for each `DispatchClass`. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct PerDispatchClass { /// Value for `Normal` extrinsics. normal: T, diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index a8528617d1f5e..b6cbc881b151a 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -21,6 +21,7 @@ #![recursion_limit="128"] +use scale_info::TypeInfo; use sp_runtime::{generic, traits::{BlakeTwo256, Verify}, DispatchError}; use sp_core::{H256, sr25519}; use sp_std::cell::RefCell; @@ -50,7 +51,7 @@ mod module1 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin(pub core::marker::PhantomData::<(T, I)>); frame_support::decl_event! { @@ -92,7 +93,7 @@ mod module2 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -135,7 +136,7 @@ mod nested { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin; frame_support::decl_event! { @@ -191,7 +192,7 @@ pub mod module3 { } } - #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, scale_info::TypeInfo)] + #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] pub struct Origin(pub core::marker::PhantomData); frame_support::decl_event! { diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 14e4c3d5032b6..1d74232aab518 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -18,6 +18,7 @@ #![recursion_limit="128"] use codec::{Codec, EncodeLike, Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{generic, BuildStorage, traits::{BlakeTwo256, Verify}}; use frame_support::{ Parameter, traits::Get, parameter_types, @@ -45,7 +46,7 @@ mod module1 { type Event: From> + Into<::Event>; type Origin: From>; type SomeParameter: Get; - type GenericType: Default + Clone + Codec + EncodeLike + scale_info::TypeInfo; + type GenericType: Default + Clone + Codec + EncodeLike + TypeInfo; } frame_support::decl_module! { @@ -100,7 +101,7 @@ mod module1 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Origin, I> where T::BlockNumber: From { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), @@ -166,7 +167,7 @@ mod module2 { } } - #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] + #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] pub enum Origin, I=DefaultInstance> { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), diff --git a/frame/support/test/tests/issue2219.rs b/frame/support/test/tests/issue2219.rs index 371376595e483..d39a9dd16f7f2 100644 --- a/frame/support/test/tests/issue2219.rs +++ b/frame/support/test/tests/issue2219.rs @@ -33,12 +33,12 @@ mod module { ); pub type Requests = Vec>; - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, scale_info::TypeInfo)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, TypeInfo)] pub enum Role { Storage, } - #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, scale_info::TypeInfo)] + #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, TypeInfo)] pub struct RoleParameters { // minimum actors to maintain - if role is unstaking // and remaining actors would be less that this value - prevent or punish for unstaking @@ -81,7 +81,7 @@ mod module { } } - pub trait Config: system::Config + scale_info::TypeInfo {} + pub trait Config: system::Config + TypeInfo {} frame_support::decl_module! { pub struct Module for enum Call where origin: T::Origin, system=system {} diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 38b17224942f1..1f6f0a2b83317 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -97,6 +97,7 @@ use frame_support::{ dispatch::{DispatchResultWithPostInfo, DispatchResult}, }; use codec::{Encode, Decode, FullCodec, EncodeLike}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use frame_support::traits::GenesisBuild; @@ -203,7 +204,7 @@ pub mod pallet { + MaybeMallocSizeOf + MaxEncodedLen; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). - type Hashing: Hash + scale_info::TypeInfo; + type Hashing: Hash + TypeInfo; /// The user account identifier type for the runtime. type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord @@ -248,7 +249,7 @@ pub mod pallet { /// Data to be associated with an account (other than nonce/transaction counter, which this /// pallet does regardless). - type AccountData: Member + FullCodec + Clone + Default + scale_info::TypeInfo; + type AccountData: Member + FullCodec + Clone + Default + TypeInfo; /// Handler for when a new account has just been created. type OnNewAccount: OnNewAccount; @@ -714,7 +715,7 @@ pub type Key = Vec; pub type KeyValue = (Vec, Vec); /// A phase of a block's execution. -#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub enum Phase { /// Applying an extrinsic. @@ -732,7 +733,7 @@ impl Default for Phase { } /// Record of an event happening. -#[derive(Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))] pub struct EventRecord { /// The phase of the block it happened in. @@ -744,7 +745,7 @@ pub struct EventRecord { } /// Origin for the System pallet. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, TypeInfo)] pub enum RawOrigin { /// The system itself ordained this dispatch to happen: this is the highest privilege level. Root, @@ -784,7 +785,7 @@ type EventIndex = u32; pub type RefCount = u32; /// Information of an account. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct AccountInfo { /// The number of transactions this account has sent. pub nonce: Index, @@ -804,7 +805,7 @@ pub struct AccountInfo { /// Stores the `spec_version` and `spec_name` of when the last runtime upgrade /// happened. -#[derive(sp_runtime::RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive(sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "std", derive(PartialEq))] pub struct LastRuntimeUpgradeInfo { pub spec_version: codec::Compact, diff --git a/frame/system/src/limits.rs b/frame/system/src/limits.rs index c6a2a5c13214d..3d1b26fe3df42 100644 --- a/frame/system/src/limits.rs +++ b/frame/system/src/limits.rs @@ -26,10 +26,11 @@ //! which should be passed to `frame_system` configuration when runtime is being set up. use frame_support::weights::{Weight, DispatchClass, constants, PerDispatchClass, OneOrMany}; +use scale_info::TypeInfo; use sp_runtime::{RuntimeDebug, Perbill}; /// Block length limit configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct BlockLength { /// Maximal total length in bytes for each extrinsic class. /// @@ -94,7 +95,7 @@ pub type ValidationResult = Result; const DEFAULT_NORMAL_RATIO: Perbill = Perbill::from_percent(75); /// `DispatchClass`-specific weight configuration. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct WeightsPerClass { /// Base weight of single extrinsic of given class. pub base_extrinsic: Weight, @@ -194,7 +195,7 @@ pub struct WeightsPerClass { /// /// As a consequence of `reserved` space, total consumed block weight might exceed `max_block` /// value, so this parameter should rather be thought of as "target block weight" than a hard limit. -#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, scale_info::TypeInfo)] +#[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] pub struct BlockWeights { /// Base weight of block execution. pub base_block: Weight, diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index 7b9d6744e6ffa..67871db4fee5d 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -29,6 +29,7 @@ pub use sp_consensus_vrf::schnorrkel::{ }; use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use serde::{Serialize, Deserialize}; #[cfg(feature = "std")] @@ -221,7 +222,7 @@ pub struct BabeGenesisConfiguration { } /// Types of allowed slots. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum AllowedSlots { /// Only allow primary slots. @@ -254,7 +255,7 @@ impl sp_consensus::SlotData for BabeGenesisConfiguration { } /// Configuration data used by the BABE consensus engine. -#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BabeEpochConfiguration { /// A constant value that is used in the threshold calculation formula. diff --git a/primitives/consensus/slots/src/lib.rs b/primitives/consensus/slots/src/lib.rs index 9c839430c9c06..97099c14d1439 100644 --- a/primitives/consensus/slots/src/lib.rs +++ b/primitives/consensus/slots/src/lib.rs @@ -20,9 +20,10 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; +use scale_info::TypeInfo; /// Unit type wrapper that represents a slot. -#[derive(Debug, Encode, Decode, scale_info::TypeInfo, Eq, Clone, Copy, Default, Ord)] +#[derive(Debug, Encode, Decode, Eq, Clone, Copy, Default, Ord, TypeInfo)] pub struct Slot(u64); impl core::ops::Deref for Slot { @@ -96,7 +97,7 @@ impl From for u64 { /// produces more than one block on the same slot. The proof of equivocation /// are the given distinct headers that were signed by the validator and which /// include the slot number. -#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub struct EquivocationProof { /// Returns the authority id of the equivocator. pub offender: Id, diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index 821e4b21be5b4..89afec0effc12 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -32,6 +32,7 @@ use parking_lot::Mutex; #[cfg(feature = "std")] use rand::{RngCore, rngs::OsRng}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] use regex::Regex; #[cfg(feature = "std")] @@ -700,7 +701,7 @@ pub trait Public: } /// An opaque 32-byte cryptographic identifier. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen, TypeInfo)] #[cfg_attr(feature = "std", derive(Hash))] pub struct AccountId32([u8; 32]); @@ -1108,7 +1109,7 @@ pub trait CryptoType { /// public modules. #[derive( Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Encode, Decode, PassByInner, - crate::RuntimeDebug, scale_info::TypeInfo + crate::RuntimeDebug, TypeInfo )] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub struct KeyTypeId(pub [u8; 4]); diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 051a6547f8049..886d9e1776f4d 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -24,6 +24,7 @@ use sp_std::vec::Vec; use sp_std::cmp::Ordering; use codec::{Encode, Decode}; +use scale_info::TypeInfo; #[cfg(feature = "full_crypto")] use core::convert::{TryFrom, TryInto}; @@ -52,7 +53,7 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"ecds"); type Seed = [u8; 32]; /// The ECDSA compressed public key. -#[derive(Clone, Encode, Decode, PassByInner, max_encoded_len::MaxEncodedLen, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, PassByInner, max_encoded_len::MaxEncodedLen, TypeInfo)] pub struct Public(pub [u8; 33]); impl PartialOrd for Public { @@ -227,7 +228,7 @@ impl sp_std::hash::Hash for Public { } /// A signature (a 512-bit value, plus 8 bits for recovery ID). -#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 65]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/core/src/ed25519.rs b/primitives/core/src/ed25519.rs index 2e0e18446156d..9089905e54d60 100644 --- a/primitives/core/src/ed25519.rs +++ b/primitives/core/src/ed25519.rs @@ -24,6 +24,7 @@ use sp_std::vec::Vec; use crate::{hash::H256, hash::H512}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; #[cfg(feature = "full_crypto")] use core::convert::TryFrom; @@ -56,7 +57,7 @@ type Seed = [u8; 32]; #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, - max_encoded_len::MaxEncodedLen, scale_info::TypeInfo, + max_encoded_len::MaxEncodedLen, TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -191,7 +192,7 @@ impl<'de> Deserialize<'de> for Public { } /// A signature (a 512-bit value). -#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index 89f8aa02c2bab..e646c4309c5b3 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -183,7 +183,10 @@ impl sp_std::ops::Deref for OpaqueMetadata { } /// Simple blob to hold a `PeerId` without committing to its format. -#[derive(Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner, scale_info::TypeInfo)] +#[derive( + Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner, + TypeInfo +)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct OpaquePeerId(pub Vec); @@ -405,7 +408,7 @@ pub fn to_substrate_wasm_fn_return_value(value: &impl Encode) -> u64 { /// The void type - it cannot exist. // Oh rust, you crack me up... -#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Decode, Encode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub enum Void {} /// Macro for creating `Maybe*` marker traits. diff --git a/primitives/core/src/offchain/mod.rs b/primitives/core/src/offchain/mod.rs index 0ccf2e7deca40..3004284173780 100644 --- a/primitives/core/src/offchain/mod.rs +++ b/primitives/core/src/offchain/mod.rs @@ -18,6 +18,7 @@ //! Offchain workers types use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_std::{prelude::{Vec, Box}, convert::TryFrom}; use crate::{OpaquePeerId, RuntimeDebug}; use sp_runtime_interface::pass_by::{PassByCodec, PassByInner, PassByEnum}; @@ -181,7 +182,7 @@ impl TryFrom for HttpRequestStatus { /// A blob to hold information about the local node's network state /// without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByCodec, TypeInfo)] #[cfg_attr(feature = "std", derive(Default))] pub struct OpaqueNetworkState { /// PeerId of the local node in SCALE encoded. @@ -191,7 +192,7 @@ pub struct OpaqueNetworkState { } /// Simple blob to hold a `Multiaddr` without committing to its format. -#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, Encode, Decode, RuntimeDebug, PassByInner, TypeInfo)] pub struct OpaqueMultiaddr(pub Vec); impl OpaqueMultiaddr { diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index e97c2b00e682e..4e8ded9af93ab 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -43,6 +43,7 @@ use crate::crypto::Ss58Codec; use crate::crypto::{Public as TraitPublic, CryptoTypePublicPair, UncheckedFrom, CryptoType, Derive, CryptoTypeId}; use crate::hash::{H256, H512}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_std::ops::Deref; #[cfg(feature = "std")] @@ -62,7 +63,7 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"sr25"); #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, - max_encoded_len::MaxEncodedLen, scale_info::TypeInfo, + max_encoded_len::MaxEncodedLen, TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -192,7 +193,7 @@ impl<'de> Deserialize<'de> for Public { /// An Schnorrkel/Ristretto x25519 ("sr25519") signature. /// /// Instead of importing it for the local module, alias it to be available as a public type -#[derive(Encode, Decode, PassByInner, scale_info::TypeInfo)] +#[derive(Encode, Decode, PassByInner, TypeInfo)] pub struct Signature(pub [u8; 64]); impl sp_std::convert::TryFrom<&[u8]> for Signature { diff --git a/primitives/finality-grandpa/src/lib.rs b/primitives/finality-grandpa/src/lib.rs index 1ea63bc28a851..a3bd89fab01d5 100644 --- a/primitives/finality-grandpa/src/lib.rs +++ b/primitives/finality-grandpa/src/lib.rs @@ -26,6 +26,7 @@ extern crate alloc; use serde::Serialize; use codec::{Encode, Decode, Input, Codec}; +use scale_info::TypeInfo; use sp_runtime::{ConsensusEngineId, RuntimeDebug, traits::NumberFor}; use sp_std::borrow::Cow; use sp_std::vec::Vec; @@ -171,7 +172,7 @@ impl ConsensusLog { /// GRANDPA happens when a voter votes on the same round (either at prevote or /// precommit stage) for different blocks. Proving is achieved by collecting the /// signed messages of conflicting votes. -#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub struct EquivocationProof { set_id: SetId, equivocation: Equivocation, @@ -208,7 +209,7 @@ impl EquivocationProof { /// Wrapper object for GRANDPA equivocation proofs, useful for unifying prevote /// and precommit equivocations under a common type. -#[derive(Clone, Debug, Decode, Encode, PartialEq, scale_info::TypeInfo)] +#[derive(Clone, Debug, Decode, Encode, PartialEq, TypeInfo)] pub enum Equivocation { /// Proof of equivocation at prevote stage. Prevote(grandpa::Equivocation, AuthoritySignature>), diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index ca5bc2e26433f..03c6c4ee26680 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -24,10 +24,11 @@ use sp_std::prelude::*; use crate::ConsensusEngineId; use crate::codec::{Decode, Encode, Input, Error}; +use crate::scale_info::TypeInfo; use sp_core::{ChangesTrieConfiguration, RuntimeDebug}; /// Generic header digest. -#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] pub struct Digest { /// A list of logs in the digest. @@ -73,7 +74,7 @@ impl Digest { /// Digest item that is able to encode/decode 'system' digest items and /// provide opaque access to other items. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(parity_util_mem::MallocSizeOf))] pub enum DigestItem { /// System digest item that contains the root of changes trie at given @@ -113,7 +114,7 @@ pub enum DigestItem { } /// Available changes trie signals. -#[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "std", derive(Debug, parity_util_mem::MallocSizeOf))] pub enum ChangesTrieSignal { /// New changes trie configuration is enacted, starting from **next block**. diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 9141d3bd98e0a..6883adeaa89f4 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -48,6 +48,7 @@ use sp_std::convert::TryFrom; use sp_core::{crypto::{self, Public}, ed25519, sr25519, ecdsa, hash::{H256, H512}}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; pub mod curve; pub mod generic; @@ -224,7 +225,7 @@ pub type ConsensusEngineId = [u8; 4]; /// Signature verify that can work with any known signature types.. #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] -#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum MultiSignature { /// An Ed25519 signature. Ed25519(ed25519::Signature), @@ -280,7 +281,7 @@ impl Default for MultiSignature { } /// Public key for any known crypto algorithm. -#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum MultiSigner { /// An Ed25519 identity. @@ -441,7 +442,7 @@ pub type DispatchResult = sp_std::result::Result<(), DispatchError>; pub type DispatchResultWithInfo = sp_std::result::Result>; /// Reason why a dispatch call failed. -#[derive(Eq, Clone, Copy, Encode, Decode, Debug, scale_info::TypeInfo)] +#[derive(Eq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum DispatchError { /// Some error occurred. @@ -528,7 +529,7 @@ impl From for DispatchError { } /// Description of what went wrong when trying to complete an operation on a token. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum TokenError { /// Funds are unavailable. @@ -568,7 +569,7 @@ impl From for DispatchError { } /// Arithmetic errors. -#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Clone, Copy, Encode, Decode, Debug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum ArithmeticError { /// Underflow. diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index 05cfa4d9b4d7c..ddb884dd4db6e 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -36,7 +36,10 @@ use crate::transaction_validity::{TransactionValidity, TransactionValidityError, /// 2. Can be converted to any `Public` key. /// 3. Implements `RuntimeAppPublic` so it can be used instead of regular application-specific /// crypto. -#[derive(Default, PartialEq, Eq, Clone, Encode, Decode, Debug, Hash, Serialize, Deserialize, PartialOrd, Ord, scale_info::TypeInfo)] +#[derive( + Default, PartialEq, Eq, Clone, Encode, Decode, Debug, Hash, Serialize, Deserialize, PartialOrd, + Ord, TypeInfo +)] pub struct UintAuthorityId(pub u64); impl From for UintAuthorityId { @@ -142,7 +145,7 @@ impl traits::IdentifyAccount for UintAuthorityId { } /// A dummy signature type, to match `UintAuthorityId`. -#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode, scale_info::TypeInfo)] +#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize, Encode, Decode, TypeInfo)] pub struct TestSignature(pub u64, pub Vec); impl traits::Verify for TestSignature { @@ -255,7 +258,7 @@ impl<'a, Xt> Deserialize<'a> for Block where Block: Decode { /// with index only used if sender is some. /// /// If sender is some then the transaction is signed otherwise it is unsigned. -#[derive(PartialEq, Eq, Clone, Encode, Decode, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct TestXt { /// Signature of the extrinsic. pub signature: Option<(u64, Extra)>, diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index d6238bf7ad4cb..6d1e27cbb682c 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -27,6 +27,7 @@ use std::str::FromStr; use serde::{Serialize, Deserialize, de::DeserializeOwned}; use sp_core::{self, Hasher, TypeId, RuntimeDebug}; use crate::codec::{Codec, Encode, Decode}; +use crate::scale_info::TypeInfo; use crate::transaction_validity::{ ValidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, @@ -203,7 +204,7 @@ pub trait Lookup { /// context. pub trait StaticLookup { /// Type to lookup from. - type Source: Codec + Clone + PartialEq + Debug + scale_info::TypeInfo; + type Source: Codec + Clone + PartialEq + Debug + TypeInfo; /// Type to lookup into. type Target; /// Attempt a lookup. @@ -215,7 +216,7 @@ pub trait StaticLookup { /// A lookup implementation returning the input value. #[derive(Default)] pub struct IdentityLookup(PhantomData); -impl StaticLookup for IdentityLookup { +impl StaticLookup for IdentityLookup { type Source = T; type Target = T; fn lookup(x: T) -> Result { Ok(x) } @@ -388,7 +389,7 @@ pub trait Hash: 'static + MaybeSerializeDeserialize + Debug + Clone + Eq + Parti /// The hash type produced. type Output: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + Encode + Decode + MaxEncodedLen - + scale_info::TypeInfo; + + TypeInfo; /// Produce the hash of some byte-slice. fn hash(s: &[u8]) -> Self::Output { @@ -408,7 +409,7 @@ pub trait Hash: 'static + MaybeSerializeDeserialize + Debug + Clone + Eq + Parti } /// Blake2-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct BlakeTwo256; @@ -435,7 +436,7 @@ impl Hash for BlakeTwo256 { } /// Keccak-256 Hash implementation. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, scale_info::TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct Keccak256; @@ -553,7 +554,7 @@ pub trait Header: /// Header hash type type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> - + AsMut<[u8]> + MaybeMallocSizeOf + scale_info::TypeInfo; + + AsMut<[u8]> + MaybeMallocSizeOf + TypeInfo; /// Hashing algorithm type Hashing: Hash; @@ -609,7 +610,7 @@ pub trait Block: Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + May /// Block hash type. type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf + scale_info::TypeInfo; + + MaybeMallocSizeOf + TypeInfo; /// Returns a reference to the header. fn header(&self) -> &Self::Header; @@ -1202,7 +1203,7 @@ macro_rules! impl_opaque_keys_inner { Default, Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, - $crate::scale_info::TypeInfo, + $crate::TypeInfo, $crate::RuntimeDebug, )] pub struct $name { diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index be70f1e0c5592..8156e2b23737c 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -25,6 +25,7 @@ pub mod system; use sp_std::{prelude::*, marker::PhantomData}; use codec::{Encode, Decode, Input, Error}; +use scale_info::TypeInfo; use sp_core::{offchain::KeyTypeId, ChangesTrieConfiguration, OpaqueMetadata, RuntimeDebug}; use sp_application_crypto::{ed25519, sr25519, ecdsa, RuntimeAppPublic}; @@ -419,7 +420,7 @@ cfg_if! { } } -#[derive(Clone, Eq, PartialEq, scale_info::TypeInfo)] +#[derive(Clone, Eq, PartialEq, TypeInfo)] pub struct Runtime; impl GetNodeBlockType for Runtime { @@ -434,7 +435,7 @@ impl_outer_origin!{ pub enum Origin for Runtime where system = frame_system {} } -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub struct Event; impl From> for Event { From b9227dec85ad1d6a8c34865517bac6b4fee7841b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 10:09:16 +0100 Subject: [PATCH 291/503] Revert accidental change --- frame/benchmarking/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index cb0bad89cf726..8160bd5d1dd21 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -304,7 +304,7 @@ macro_rules! benchmarks_iter { ( $( $names_extra )* ) $name { $( $code )* }: { < - Call as $crate::traits::UnfilteredDispatchable + Call as $crate::frame_support::traits::UnfilteredDispatchable >::dispatch_bypass_filter( Call::::$dispatch($($arg),*), $origin.into() )?; @@ -722,7 +722,7 @@ macro_rules! impl_benchmark { // Add whitelist to DB including whitelisted caller let mut whitelist = whitelist.to_vec(); let whitelisted_caller_key = - as $crate::storage::StorageMap<_,_>>::hashed_key_for( + as $crate::frame_support::storage::StorageMap<_,_>>::hashed_key_for( $crate::whitelisted_caller::() ); whitelist.push(whitelisted_caller_key.into()); From 86f3c152aebfdc09686b62a76c9bc7505ec1f555 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 10:13:19 +0100 Subject: [PATCH 292/503] review: remove unnecessary bounds --- frame/contracts/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 9ae05893a567e..685f1536d6e2f 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -266,7 +266,7 @@ pub mod pallet { impl Pallet where T::AccountId: UncheckedFrom, - T::AccountId: AsRef<[u8]> + scale_info::TypeInfo, + T::AccountId: AsRef<[u8]>, { /// Makes a call to an account, optionally transferring some balance. /// @@ -623,7 +623,7 @@ pub mod pallet { impl Pallet where - T::AccountId: UncheckedFrom + AsRef<[u8]> + scale_info::TypeInfo, + T::AccountId: UncheckedFrom + AsRef<[u8]>, { /// Perform a call to a specified contract. /// From a50981b135e8084004b5942618c8bafce7eda26e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 10:13:33 +0100 Subject: [PATCH 293/503] Add missing TypeInfo imports --- primitives/core/src/lib.rs | 1 + primitives/runtime/src/testing.rs | 1 + 2 files changed, 2 insertions(+) diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index e646c4309c5b3..afa97143873ad 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -43,6 +43,7 @@ use serde::{Serialize, Deserialize}; pub use serde; #[doc(hidden)] pub use codec::{Encode, Decode}; +use scale_info::TypeInfo; pub use sp_debug_derive::RuntimeDebug; diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index ddb884dd4db6e..72390269d3023 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -20,6 +20,7 @@ use serde::{Serialize, Serializer, Deserialize, de::Error as DeError, Deserializer}; use std::{fmt::{self, Debug}, ops::Deref, cell::RefCell}; use crate::codec::{Codec, Encode, Decode}; +use crate::scale_info::TypeInfo; use crate::traits::{ self, Checkable, Applyable, BlakeTwo256, OpaqueKeys, SignedExtension, Dispatchable, DispatchInfoOf, PostDispatchInfoOf, From f508b0d03cb13210fa3fa9e615df1da47e1dbd5b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 11:02:26 +0100 Subject: [PATCH 294/503] Fix some test errors --- Cargo.lock | 1 + frame/support/src/lib.rs | 2 +- frame/support/test/Cargo.toml | 1 + frame/support/test/tests/issue2219.rs | 1 + 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index a5336eb15ff1c..0417156c5227e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1961,6 +1961,7 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-std", + "sp-version", "trybuild", ] diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 253db017fbcfd..96564d67fe1a8 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -365,7 +365,7 @@ macro_rules! parameter_types { } } - impl $crate::TypeInfo for $name { + impl $crate::scale_info::TypeInfo for $name { type Identity = Self; fn type_info() -> $crate::scale_info::Type<$crate::scale_info::form::MetaForm> { diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index e8740d9447945..6a12ad92ca86e 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -22,6 +22,7 @@ frame-support = { version = "3.0.0", default-features = false, path = "../" } sp-runtime = { version = "3.0.0", default-features = false, path = "../../../primitives/runtime" } sp-core = { version = "3.0.0", default-features = false, path = "../../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../../primitives/std" } +sp-version = { version = "3.0.0", default-features = false, path = "../../../primitives/version" } trybuild = "1.0.42" pretty_assertions = "0.6.1" rustversion = "1.0.0" diff --git a/frame/support/test/tests/issue2219.rs b/frame/support/test/tests/issue2219.rs index d39a9dd16f7f2..b9d21045e00f3 100644 --- a/frame/support/test/tests/issue2219.rs +++ b/frame/support/test/tests/issue2219.rs @@ -18,6 +18,7 @@ use frame_support::sp_runtime::generic; use frame_support::sp_runtime::traits::{BlakeTwo256, Verify}; use frame_support::codec::{Encode, Decode}; +use frame_support::scale_info::TypeInfo; use sp_core::{H256, sr25519}; use serde::{Serialize, Deserialize}; From 17f0f6812618c45ca8e1eb27788dfe520af81e56 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 11:35:03 +0100 Subject: [PATCH 295/503] Remove unnecessary TypeInfo bound --- frame/example/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 6cbcf7695d2c0..522dd0fb6ab1d 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -700,7 +700,7 @@ impl sp_std::fmt::Debug for WatchDummy { } } -impl SignedExtension for WatchDummy +impl SignedExtension for WatchDummy where ::Call: IsSubType>, { From 24f7f31c34033bf9b879dd69e2dd3ed595081267 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 11:37:15 +0100 Subject: [PATCH 296/503] Remove some fully qualified TypeInfo bounds --- frame/proxy/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 06b93803b0de7..f6c1132fb0f05 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -136,14 +136,14 @@ pub mod pallet { /// The maximum amount of proxies allowed for a single account. #[pallet::constant] - type MaxProxies: Get + scale_info::TypeInfo; + type MaxProxies: Get + TypeInfo; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; /// The maximum amount of time-delayed announcements that are allowed to be pending. #[pallet::constant] - type MaxPending: Get + scale_info::TypeInfo; + type MaxPending: Get + TypeInfo; /// The type of hash used for hashing the call. type CallHasher: Hash; From 91f997c623839150d2cb68b2d6e7288e568eab85 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 2 Jul 2021 12:33:45 +0100 Subject: [PATCH 297/503] Add missing import --- frame/society/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index b04eb99d9842d..75ee130b2c290 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -254,6 +254,7 @@ mod tests; use rand_chacha::{rand_core::{RngCore, SeedableRng}, ChaChaRng}; use sp_std::prelude::*; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::{Percent, RuntimeDebug, traits::{ StaticLookup, AccountIdConversion, Saturating, Zero, IntegerSquareRoot, Hash, From 8c097ea99963b0963e48b81540f0192ac21128e6 Mon Sep 17 00:00:00 2001 From: Andronik Ordian Date: Fri, 2 Jul 2021 21:18:54 +0200 Subject: [PATCH 298/503] bump a bunch of deps in parity-common --- Cargo.lock | 114 +++++++++++------------ bin/node/bench/Cargo.toml | 6 +- client/api/Cargo.toml | 4 +- client/db/Cargo.toml | 10 +- client/informant/Cargo.toml | 2 +- client/service/Cargo.toml | 2 +- client/state-db/Cargo.toml | 2 +- client/transaction-pool/Cargo.toml | 2 +- client/transaction-pool/graph/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- max-encoded-len/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/arithmetic/fuzzer/Cargo.toml | 2 +- primitives/core/Cargo.toml | 4 +- primitives/database/Cargo.toml | 2 +- primitives/runtime-interface/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/state-machine/Cargo.toml | 2 +- primitives/test-primitives/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 6 +- test-utils/runtime/Cargo.toml | 6 +- utils/browser/Cargo.toml | 2 +- utils/browser/src/lib.rs | 2 +- 23 files changed, 88 insertions(+), 94 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 737a762d88f0d..ff0a26810758c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -96,6 +96,17 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e" +[[package]] +name = "ahash" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98" +dependencies = [ + "getrandom 0.2.3", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.15" @@ -2149,7 +2160,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" dependencies = [ "gloo-timers", - "send_wrapper 0.4.0", + "send_wrapper", ] [[package]] @@ -2372,7 +2383,16 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" dependencies = [ - "ahash", + "ahash 0.4.7", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash 0.7.4", ] [[package]] @@ -2701,9 +2721,9 @@ dependencies = [ [[package]] name = "impl-codec" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" dependencies = [ "parity-scale-codec", ] @@ -2735,7 +2755,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.9.1", "serde", ] @@ -2746,9 +2766,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" dependencies = [ "cfg-if 1.0.0", - "js-sys", - "wasm-bindgen", - "web-sys", ] [[package]] @@ -3061,9 +3078,9 @@ dependencies = [ [[package]] name = "kvdb" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8891bd853eff90e33024195d79d578dc984c82f9e0715fcd2b525a0c19d52811" +checksum = "45a3f58dc069ec0e205a27f5b45920722a46faed802a0541538241af6228f512" dependencies = [ "parity-util-mem", "smallvec 1.6.1", @@ -3071,9 +3088,9 @@ dependencies = [ [[package]] name = "kvdb-memorydb" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a0da8e08caf08d384a620ec19bb6c9b85c84137248e202617fb91881f25912" +checksum = "c3b6b85fc643f5acd0bffb2cc8a6d150209379267af0d41db72170021841f9f5" dependencies = [ "kvdb", "parity-util-mem", @@ -3082,9 +3099,9 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34446c373ccc494c2124439281c198c7636ccdc2752c06722bbffd56d459c1e4" +checksum = "431ca65516efab86e65d96281f750ebb54277dec656fcf6c027f3d1c0cb69e4c" dependencies = [ "fs-swap", "kvdb", @@ -3098,24 +3115,6 @@ dependencies = [ "smallvec 1.6.1", ] -[[package]] -name = "kvdb-web" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1e98ba343d0b35f9009a8844cd2b87fa3192f7e79033ac05b00aeae0f3b0b5" -dependencies = [ - "futures 0.3.15", - "js-sys", - "kvdb", - "kvdb-memorydb", - "log", - "parity-util-mem", - "parking_lot 0.11.1", - "send_wrapper 0.5.0", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -3716,7 +3715,7 @@ version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f374d42cdfc1d7dbf3d3dec28afab2eb97ffbf43a3234d795b5986dbf4b90ba" dependencies = [ - "hashbrown", + "hashbrown 0.9.1", ] [[package]] @@ -3847,12 +3846,12 @@ dependencies = [ [[package]] name = "memory-db" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "814bbecfc0451fc314eeea34f05bbcd5b98a7ad7af37faee088b86a1e633f1d4" +checksum = "de006e09d04fc301a5f7e817b75aa49801c4479a8af753764416b085337ddcc5" dependencies = [ "hash-db", - "hashbrown", + "hashbrown 0.11.2", "parity-util-mem", ] @@ -5759,24 +5758,25 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f518afaa5a47d0d6386229b0a6e01e86427291d643aa4cabb4992219f504f8" +checksum = "8975095a2a03bbbdc70a74ab11a4f76a6d0b84680d87c68d722531b0ac28e8a9" dependencies = [ "arrayvec 0.7.0", "bitvec", "byte-slice-cast", + "impl-trait-for-tuples", "parity-scale-codec-derive", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f44c5f94427bd0b5076e8f7e15ca3f60a4d8ac0077e4793884e6fdfd8915344e" +checksum = "40dbbfef7f0a1143c5b06e0d76a6278e25dac0bc1af4be51a0fbb73f07e7ad09" dependencies = [ - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.0.0", "proc-macro2", "quote", "syn", @@ -5809,12 +5809,12 @@ dependencies = [ [[package]] name = "parity-util-mem" -version = "0.9.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "664a8c6b8e62d8f9f2f937e391982eb433ab285b4cd9545b342441e04a906e42" +checksum = "7ad6f1acec69b95caf435bbd158d486e5a0a44fcf51531e84922c59ff09e8457" dependencies = [ "cfg-if 1.0.0", - "hashbrown", + "hashbrown 0.11.2", "impl-trait-for-tuples", "parity-util-mem-derive", "parking_lot 0.11.1", @@ -6259,9 +6259,9 @@ dependencies = [ [[package]] name = "primitive-types" -version = "0.9.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" dependencies = [ "fixed-hash", "impl-codec", @@ -6824,9 +6824,9 @@ dependencies = [ [[package]] name = "rocksdb" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23d83c02c429044d58474eaf5ae31e062d0de894e21125b47437ec0edc1397e6" +checksum = "c749134fda8bfc90d0de643d59bfc841dcb3ac8a1062e12b6754bd60235c48b3" dependencies = [ "libc", "librocksdb-sys", @@ -8414,12 +8414,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" -[[package]] -name = "send_wrapper" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" - [[package]] name = "serde" version = "1.0.124" @@ -9621,7 +9615,7 @@ dependencies = [ "futures-timer 3.0.2", "getrandom 0.2.3", "js-sys", - "kvdb-web", + "kvdb-memorydb", "libp2p-wasm-ext", "log", "rand 0.7.3", @@ -10527,9 +10521,9 @@ checksum = "a7f741b240f1a48843f9b8e0444fb55fb2a4ff67293b50a9179dfd5ea67f8d41" [[package]] name = "trie-bench" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568257edb909a5c532b1f4ab38ee6b5dedfbf8775be6a55a29020513ebe3e072" +checksum = "4edd9bdf0c2e08fd77c0fb2608179cac7ebed997ae18f58d47a2d96425ff51f0" dependencies = [ "criterion", "hash-db", @@ -10543,12 +10537,12 @@ dependencies = [ [[package]] name = "trie-db" -version = "0.22.5" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd81fe0c8bc2b528a51c9d2c31dae4483367a26a723a3c9a4a8120311d7774e3" +checksum = "9eac131e334e81b6b3be07399482042838adcd7957aa0010231d0813e39e02fa" dependencies = [ "hash-db", - "hashbrown", + "hashbrown 0.11.2", "log", "rustc-hex", "smallvec 1.6.1", diff --git a/bin/node/bench/Cargo.toml b/bin/node/bench/Cargo.toml index 93ee35d98f98d..65c6a562b18b7 100644 --- a/bin/node/bench/Cargo.toml +++ b/bin/node/bench/Cargo.toml @@ -21,8 +21,8 @@ serde = "1.0.101" serde_json = "1.0.41" structopt = "0.3" derive_more = "0.99.2" -kvdb = "0.9.0" -kvdb-rocksdb = "0.11.0" +kvdb = "0.10.0" +kvdb-rocksdb = "0.12.0" sp-trie = { version = "3.0.0", path = "../../../primitives/trie" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-consensus = { version = "0.9.0", path = "../../../primitives/consensus/common" } @@ -37,7 +37,7 @@ fs_extra = "1" hex = "0.4.0" rand = { version = "0.7.2", features = ["small_rng"] } lazy_static = "1.4.0" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parity-db = { version = "0.2.4" } sc-transaction-pool = { version = "3.0.0", path = "../../../client/transaction-pool" } futures = { version = "0.3.4", features = ["thread-pool"] } diff --git a/client/api/Cargo.toml b/client/api/Cargo.toml index 637dae4a29abd..65a48954c4900 100644 --- a/client/api/Cargo.toml +++ b/client/api/Cargo.toml @@ -24,7 +24,7 @@ futures = "0.3.1" hash-db = { version = "0.15.2", default-features = false } sp-blockchain = { version = "3.0.0", path = "../../primitives/blockchain" } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } -kvdb = "0.9.0" +kvdb = "0.10.0" log = "0.4.8" parking_lot = "0.11.1" lazy_static = "1.4.0" @@ -43,7 +43,7 @@ sp-transaction-pool = { version = "3.0.0", path = "../../primitives/transaction- prometheus-endpoint = { package = "substrate-prometheus-endpoint", version = "0.9.0", path = "../../utils/prometheus" } [dev-dependencies] -kvdb-memorydb = "0.9.0" +kvdb-memorydb = "0.10.0" sp-test-primitives = { version = "2.0.0", path = "../../primitives/test-primitives" } substrate-test-runtime = { version = "2.0.0", path = "../../test-utils/runtime" } thiserror = "1.0.21" diff --git a/client/db/Cargo.toml b/client/db/Cargo.toml index 43bae63f09c2b..2145b988891d7 100644 --- a/client/db/Cargo.toml +++ b/client/db/Cargo.toml @@ -15,12 +15,12 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] parking_lot = "0.11.1" log = "0.4.8" -kvdb = "0.9.0" -kvdb-rocksdb = { version = "0.11.0", optional = true } -kvdb-memorydb = "0.9.0" +kvdb = "0.10.0" +kvdb-rocksdb = { version = "0.12.0", optional = true } +kvdb-memorydb = "0.10.0" linked-hash-map = "0.5.2" hash-db = "0.15.2" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["std"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["std"] } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } blake2-rfc = "0.2.18" @@ -43,7 +43,7 @@ sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } sp-tracing = { version = "3.0.0", path = "../../primitives/tracing" } substrate-test-runtime-client = { version = "2.0.0", path = "../../test-utils/runtime/client" } quickcheck = "1.0.3" -kvdb-rocksdb = "0.11.0" +kvdb-rocksdb = "0.12.0" tempfile = "3" [features] diff --git a/client/informant/Cargo.toml b/client/informant/Cargo.toml index 139a5ce19a000..4238243ef96ee 100644 --- a/client/informant/Cargo.toml +++ b/client/informant/Cargo.toml @@ -17,7 +17,7 @@ ansi_term = "0.12.1" futures = "0.3.9" futures-timer = "3.0.1" log = "0.4.8" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } sc-client-api = { version = "3.0.0", path = "../api" } sc-network = { version = "0.9.0", path = "../network" } sp-blockchain = { version = "3.0.0", path = "../../primitives/blockchain" } diff --git a/client/service/Cargo.toml b/client/service/Cargo.toml index a90efb02dc5f2..e4756b1880f37 100644 --- a/client/service/Cargo.toml +++ b/client/service/Cargo.toml @@ -79,7 +79,7 @@ sc-tracing = { version = "3.0.0", path = "../tracing" } sp-tracing = { version = "3.0.0", path = "../../primitives/tracing" } tracing = "0.1.25" tracing-futures = { version = "0.2.4" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } async-trait = "0.1.42" [target.'cfg(not(target_os = "unknown"))'.dependencies] diff --git a/client/state-db/Cargo.toml b/client/state-db/Cargo.toml index d61dd7fc125a1..ca538f9d651f2 100644 --- a/client/state-db/Cargo.toml +++ b/client/state-db/Cargo.toml @@ -19,5 +19,5 @@ log = "0.4.11" sc-client-api = { version = "3.0.0", path = "../api" } sp-core = { version = "3.0.0", path = "../../primitives/core" } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parity-util-mem-derive = "0.1.0" diff --git a/client/transaction-pool/Cargo.toml b/client/transaction-pool/Cargo.toml index 6b105520baec5..77b551915ce13 100644 --- a/client/transaction-pool/Cargo.toml +++ b/client/transaction-pool/Cargo.toml @@ -18,7 +18,7 @@ thiserror = "1.0.21" futures = { version = "0.3.1", features = ["compat"] } intervalier = "0.4.0" log = "0.4.8" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } parking_lot = "0.11.1" prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} sc-client-api = { version = "3.0.0", path = "../api" } diff --git a/client/transaction-pool/graph/Cargo.toml b/client/transaction-pool/graph/Cargo.toml index 4b134c7080963..9af2b152d8c6a 100644 --- a/client/transaction-pool/graph/Cargo.toml +++ b/client/transaction-pool/graph/Cargo.toml @@ -25,7 +25,7 @@ sp-utils = { version = "3.0.0", path = "../../../primitives/utils" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-runtime = { version = "3.0.0", path = "../../../primitives/runtime" } sp-transaction-pool = { version = "3.0.0", path = "../../../primitives/transaction-pool" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } linked-hash-map = "0.5.2" retain_mut = "0.1.3" diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 0c9aacaf307b1..80b3d7f0c4ec2 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -37,7 +37,7 @@ log = { version = "0.4.14", default-features = false } [dev-dependencies] pretty_assertions = "0.6.1" frame-system = { version = "3.0.0", path = "../system" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] default = ["std"] diff --git a/max-encoded-len/Cargo.toml b/max-encoded-len/Cargo.toml index 994a3c6a5e132..36234660e49cc 100644 --- a/max-encoded-len/Cargo.toml +++ b/max-encoded-len/Cargo.toml @@ -13,7 +13,7 @@ description = "Trait MaxEncodedLen bounds the max encoded length of an item." codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } impl-trait-for-tuples = "0.2.1" max-encoded-len-derive = { package = "max-encoded-len-derive", version = "3.0.0", path = "derive", default-features = false, optional = true } -primitive-types = { version = "0.9.0", default-features = false, features = ["codec"] } +primitive-types = { version = "0.10.1", default-features = false, features = ["codec"] } [dev-dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive" ] } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 3c3b5a35c164a..055d6fc33a5f4 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -27,7 +27,7 @@ sp-debug-derive = { version = "3.0.0", default-features = false, path = "../debu rand = "0.7.2" criterion = "0.3" serde_json = "1.0" -primitive-types = "0.9.0" +primitive-types = "0.10.1" [features] default = ["std"] diff --git a/primitives/arithmetic/fuzzer/Cargo.toml b/primitives/arithmetic/fuzzer/Cargo.toml index 2666dde9016a8..0c1cf731cd692 100644 --- a/primitives/arithmetic/fuzzer/Cargo.toml +++ b/primitives/arithmetic/fuzzer/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-arithmetic = { version = "3.0.0", path = ".." } honggfuzz = "0.5.49" -primitive-types = "0.9.0" +primitive-types = "0.10.1" num-bigint = "0.2" num-traits = "0.2" diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 0c724d61ae0cc..7ae89623b5f29 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.0.0", default-features = log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { version = "0.9.0", default-features = false, features = ["codec"] } +primitive-types = { version = "0.10.1", default-features = false, features = ["codec"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } hash-db = { version = "0.15.2", default-features = false } @@ -36,7 +36,7 @@ parking_lot = { version = "0.11.1", optional = true } sp-debug-derive = { version = "3.0.0", path = "../debug-derive" } sp-externalities = { version = "0.9.0", optional = true, path = "../externalities" } sp-storage = { version = "3.0.0", default-features = false, path = "../storage" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } futures = { version = "0.3.1", optional = true } dyn-clonable = { version = "0.9.0", optional = true } thiserror = { version = "1.0.21", optional = true } diff --git a/primitives/database/Cargo.toml b/primitives/database/Cargo.toml index aae7668b5ec80..f8693449af8cd 100644 --- a/primitives/database/Cargo.toml +++ b/primitives/database/Cargo.toml @@ -12,5 +12,5 @@ readme = "README.md" [dependencies] parking_lot = "0.11.1" -kvdb = "0.9.0" +kvdb = "0.10.0" diff --git a/primitives/runtime-interface/Cargo.toml b/primitives/runtime-interface/Cargo.toml index 4099e89933880..f22afb0a85c92 100644 --- a/primitives/runtime-interface/Cargo.toml +++ b/primitives/runtime-interface/Cargo.toml @@ -21,7 +21,7 @@ sp-runtime-interface-proc-macro = { version = "3.0.0", path = "proc-macro" } sp-externalities = { version = "0.9.0", optional = true, path = "../externalities" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } static_assertions = "1.0.0" -primitive-types = { version = "0.9.0", default-features = false } +primitive-types = { version = "0.10.1", default-features = false } sp-storage = { version = "3.0.0", default-features = false, path = "../storage" } impl-trait-for-tuples = "0.2.1" diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index aec2bc416ee37..e0fc2ed461833 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -26,7 +26,7 @@ log = { version = "0.4.14", default-features = false } paste = "1.0" rand = { version = "0.7.2", optional = true } impl-trait-for-tuples = "0.2.1" -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } hash256-std-hasher = { version = "0.15.2", default-features = false } either = { version = "1.5", default-features = false } max-encoded-len = { version = "3.0.0", default-features = false, path = "../../max-encoded-len", features = [ "derive" ] } diff --git a/primitives/state-machine/Cargo.toml b/primitives/state-machine/Cargo.toml index 79fccef08c199..00050116280e8 100644 --- a/primitives/state-machine/Cargo.toml +++ b/primitives/state-machine/Cargo.toml @@ -18,7 +18,7 @@ log = { version = "0.4.11", optional = true } thiserror = { version = "1.0.21", optional = true } parking_lot = { version = "0.11.1", optional = true } hash-db = { version = "0.15.2", default-features = false } -trie-db = { version = "0.22.2", default-features = false } +trie-db = { version = "0.22.6", default-features = false } trie-root = { version = "0.16.0", default-features = false } sp-trie = { version = "3.0.0", path = "../trie", default-features = false } sp-core = { version = "3.0.0", path = "../core", default-features = false } diff --git a/primitives/test-primitives/Cargo.toml b/primitives/test-primitives/Cargo.toml index fbf29db96fa46..f1f2c70bf2315 100644 --- a/primitives/test-primitives/Cargo.toml +++ b/primitives/test-primitives/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.0.0", default-features = sp-core = { version = "3.0.0", default-features = false, path = "../core" } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-runtime = { version = "3.0.0", default-features = false, path = "../runtime" } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] default = [ diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index bf91fff31b8b6..e13bc68fb96b0 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -21,13 +21,13 @@ harness = false codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } sp-std = { version = "3.0.0", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } -trie-db = { version = "0.22.5", default-features = false } +trie-db = { version = "0.22.6", default-features = false } trie-root = { version = "0.16.0", default-features = false } -memory-db = { version = "0.26.0", default-features = false } +memory-db = { version = "0.27.0", default-features = false } sp-core = { version = "3.0.0", default-features = false, path = "../core" } [dev-dependencies] -trie-bench = "0.27.0" +trie-bench = "0.28.0" trie-standardmap = "0.15.2" criterion = "0.3.3" hex-literal = "0.3.1" diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 96b7efff83380..60283bb97d183 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -20,7 +20,7 @@ sp-block-builder = { version = "3.0.0", default-features = false, path = "../../ codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } -memory-db = { version = "0.26.0", default-features = false } +memory-db = { version = "0.27.0", default-features = false } sp-offchain = { path = "../../primitives/offchain", default-features = false, version = "3.0.0"} sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } @@ -38,8 +38,8 @@ pallet-timestamp = { version = "3.0.0", default-features = false, path = "../../ sp-finality-grandpa = { version = "3.0.0", default-features = false, path = "../../primitives/finality-grandpa" } sp-trie = { version = "3.0.0", default-features = false, path = "../../primitives/trie" } sp-transaction-pool = { version = "3.0.0", default-features = false, path = "../../primitives/transaction-pool" } -trie-db = { version = "0.22.2", default-features = false } -parity-util-mem = { version = "0.9.0", default-features = false, features = ["primitive-types"] } +trie-db = { version = "0.22.6", default-features = false } +parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } sc-service = { version = "0.9.0", default-features = false, optional = true, features = ["test-helpers"], path = "../../client/service" } sp-state-machine = { version = "0.9.0", default-features = false, path = "../../primitives/state-machine" } sp-externalities = { version = "0.9.0", default-features = false, path = "../../primitives/externalities" } diff --git a/utils/browser/Cargo.toml b/utils/browser/Cargo.toml index 31403a5e6fa96..eac1730d5ce2d 100644 --- a/utils/browser/Cargo.toml +++ b/utils/browser/Cargo.toml @@ -21,7 +21,7 @@ console_error_panic_hook = "0.1.6" js-sys = "0.3.34" wasm-bindgen = "0.2.73" wasm-bindgen-futures = "0.4.18" -kvdb-web = "0.9.0" +kvdb-memorydb = "0.10.0" sp-database = { version = "3.0.0", path = "../../primitives/database" } sc-informant = { version = "0.9.0", path = "../../client/informant" } sc-service = { version = "0.9.0", path = "../../client/service", default-features = false } diff --git a/utils/browser/src/lib.rs b/utils/browser/src/lib.rs index d9d77210b9305..16553c63a7cf2 100644 --- a/utils/browser/src/lib.rs +++ b/utils/browser/src/lib.rs @@ -78,7 +78,7 @@ where role: Role::Light, database: { info!("Opening Indexed DB database '{}'...", name); - let db = kvdb_web::Database::open(name, 10).await?; + let db = kvdb_memorydb::create(10); DatabaseConfig::Custom(sp_database::as_database(db)) }, From 6a97d46adf0608b6479cb18258ccb95352819f2f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 09:32:06 +0100 Subject: [PATCH 299/503] Add missing imports --- frame/balances/src/lib.rs | 1 + frame/democracy/src/types.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 178bd1a1fb4a3..da0fa1698a5e3 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -159,6 +159,7 @@ pub mod weights; use sp_std::prelude::*; use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr}; use codec::{Codec, Encode, Decode}; +use scale_info::TypeInfo; use frame_support::{ ensure, WeakBoundedVec, traits::{ diff --git a/frame/democracy/src/types.rs b/frame/democracy/src/types.rs index 58992a37387fa..7cc748a0a0e28 100644 --- a/frame/democracy/src/types.rs +++ b/frame/democracy/src/types.rs @@ -18,7 +18,7 @@ //! Miscellaneous additional datatypes. use codec::{Encode, Decode}; -use scale_info::TypeInfo +use scale_info::TypeInfo; use sp_runtime::RuntimeDebug; use sp_runtime::traits::{Zero, Bounded, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, Saturating}; use crate::{Vote, VoteThreshold, AccountVote, Conviction}; From 0b281df852783c4926e22c8281c2f16149ea9489 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 09:32:31 +0100 Subject: [PATCH 300/503] Remove parity-common patches --- Cargo.toml | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 27372808996f7..859085e79724f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -276,12 +276,4 @@ zeroize = { opt-level = 3 } panic = "unwind" [patch.crates-io] -primitive-types = { git = "https://github.com/paritytech/parity-common", branch = "master" } -parity-util-mem = { git = "https://github.com/paritytech/parity-common", branch = "master" } -kvdb = { git = "https://github.com/paritytech/parity-common", branch = "master" } -kvdb-memorydb = { git = "https://github.com/paritytech/parity-common", branch = "master" } -kvdb-web = { git = "https://github.com/paritytech/parity-common", branch = "master" } -kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common", branch = "master" } -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } -memory-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } -hash-db = { git = "https://github.com/paritytech/trie", branch = "aj-update-parity-common" } \ No newline at end of file +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file From f5868c7e467e0a0cbd51d0521a475c9bc9ce725f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 09:41:17 +0100 Subject: [PATCH 301/503] Use kvdb-memrydb for broser demo (kvdb-web has been removed) --- utils/browser/Cargo.toml | 2 +- utils/browser/src/lib.rs | 4 +--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/utils/browser/Cargo.toml b/utils/browser/Cargo.toml index 65755f293e174..eac1730d5ce2d 100644 --- a/utils/browser/Cargo.toml +++ b/utils/browser/Cargo.toml @@ -21,7 +21,7 @@ console_error_panic_hook = "0.1.6" js-sys = "0.3.34" wasm-bindgen = "0.2.73" wasm-bindgen-futures = "0.4.18" -kvdb-web = "0.10.0" +kvdb-memorydb = "0.10.0" sp-database = { version = "3.0.0", path = "../../primitives/database" } sc-informant = { version = "0.9.0", path = "../../client/informant" } sc-service = { version = "0.9.0", path = "../../client/service", default-features = false } diff --git a/utils/browser/src/lib.rs b/utils/browser/src/lib.rs index d9d77210b9305..bd8969df4165c 100644 --- a/utils/browser/src/lib.rs +++ b/utils/browser/src/lib.rs @@ -77,9 +77,7 @@ where telemetry_external_transport: Some(transport), role: Role::Light, database: { - info!("Opening Indexed DB database '{}'...", name); - let db = kvdb_web::Database::open(name, 10).await?; - + let db = kvdb_memorydb::create(10); DatabaseConfig::Custom(sp_database::as_database(db)) }, keystore_remote: Default::default(), From 2200c4450ad477dd6e7e8d87bd8b733ec8f5438d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 09:41:41 +0100 Subject: [PATCH 302/503] Update memory-db and trie stuff --- primitives/trie/Cargo.toml | 4 ++-- test-utils/runtime/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index bf91fff31b8b6..b5fff84e818ed 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -23,11 +23,11 @@ sp-std = { version = "3.0.0", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.5", default-features = false } trie-root = { version = "0.16.0", default-features = false } -memory-db = { version = "0.26.0", default-features = false } +memory-db = { version = "0.27.0", default-features = false } sp-core = { version = "3.0.0", default-features = false, path = "../core" } [dev-dependencies] -trie-bench = "0.27.0" +trie-bench = "0.28.0" trie-standardmap = "0.15.2" criterion = "0.3.3" hex-literal = "0.3.1" diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 294c786511376..1f496d5c5ac8b 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -21,7 +21,7 @@ codec = { package = "parity-scale-codec", version = "2.0.0", default-features = scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-inherents = { version = "3.0.0", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "3.0.0", optional = true, path = "../../primitives/keyring" } -memory-db = { version = "0.26.0", default-features = false } +memory-db = { version = "0.27.0", default-features = false } sp-offchain = { path = "../../primitives/offchain", default-features = false, version = "3.0.0"} sp-core = { version = "3.0.0", default-features = false, path = "../../primitives/core" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } From a4b889a996299e72161358a84e4fbe3e210f33fe Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 09:42:13 +0100 Subject: [PATCH 303/503] Bump finality-grandpa --- Cargo.lock | 250 +++++++------------------ primitives/finality-grandpa/Cargo.toml | 2 +- 2 files changed, 65 insertions(+), 187 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0417156c5227e..4c900239814cd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -131,15 +131,6 @@ version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15af2628f6890fe2609a3b91bef4c83450512802e59489f9c1cb1fa5df064a61" -[[package]] -name = "approx" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f2a05fd1bd10b2527e20a2cd32d8873d115b8b39fe219ee25f42a8aca6ba278" -dependencies = [ - "num-traits", -] - [[package]] name = "approx" version = "0.5.0" @@ -190,9 +181,9 @@ checksum = "9d6e24d2cce90c53b948c46271bfb053e4bdc2db9b5d3f65e20f8cf28a1b7fc3" [[package]] name = "assert_cmd" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a88b6bd5df287567ffdf4ddf4d33060048e1068308e5f62d81c6f9824a045a48" +checksum = "aa2dfc8228c6260bf620fc5a341afa8e27edcde388b19ffc5732320bfe657eb2" dependencies = [ "bstr", "doc-comment", @@ -1740,17 +1731,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "fixed-hash" -version = "0.7.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" -dependencies = [ - "byteorder", - "rand 0.8.4", - "rustc-hex", - "static_assertions", -] - [[package]] name = "fixedbitset" version = "0.2.0" @@ -2205,7 +2185,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" dependencies = [ "gloo-timers", - "send_wrapper 0.4.0", + "send_wrapper", ] [[package]] @@ -2395,7 +2375,8 @@ dependencies = [ [[package]] name = "hash-db" version = "0.15.2" -source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" [[package]] name = "hash256-std-hasher" @@ -2756,17 +2737,9 @@ dependencies = [ [[package]] name = "impl-codec" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df170efa359aebdd5cb7fe78edcc67107748e4737bdca8a8fb40d15ea7a877ed" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-codec" -version = "0.5.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +checksum = "161ebdfec3c8e3b52bf61c4f3550a1eea4f9579d10dc1b936f3171ebdcd6c443" dependencies = [ "parity-scale-codec", ] @@ -2780,14 +2753,6 @@ dependencies = [ "serde", ] -[[package]] -name = "impl-serde" -version = "0.3.1" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" -dependencies = [ - "serde", -] - [[package]] name = "impl-trait-for-tuples" version = "0.2.1" @@ -2817,9 +2782,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" dependencies = [ "cfg-if 1.0.0", - "js-sys", - "wasm-bindgen", - "web-sys", ] [[package]] @@ -3133,7 +3095,8 @@ dependencies = [ [[package]] name = "kvdb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45a3f58dc069ec0e205a27f5b45920722a46faed802a0541538241af6228f512" dependencies = [ "parity-util-mem", "smallvec 1.6.1", @@ -3142,7 +3105,8 @@ dependencies = [ [[package]] name = "kvdb-memorydb" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3b6b85fc643f5acd0bffb2cc8a6d150209379267af0d41db72170021841f9f5" dependencies = [ "kvdb", "parity-util-mem", @@ -3152,7 +3116,8 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" version = "0.12.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "431ca65516efab86e65d96281f750ebb54277dec656fcf6c027f3d1c0cb69e4c" dependencies = [ "fs-swap", "kvdb", @@ -3166,23 +3131,6 @@ dependencies = [ "smallvec 1.6.1", ] -[[package]] -name = "kvdb-web" -version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" -dependencies = [ - "futures 0.3.15", - "js-sys", - "kvdb", - "kvdb-memorydb", - "log", - "parity-util-mem", - "parking_lot 0.11.1", - "send_wrapper 0.5.0", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "lazy_static" version = "1.4.0" @@ -3411,7 +3359,7 @@ dependencies = [ "rand 0.7.3", "sha2 0.9.5", "smallvec 1.6.1", - "uint 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "uint", "unsigned-varint 0.7.0", "void", "wasm-timer", @@ -3723,11 +3671,11 @@ dependencies = [ [[package]] name = "linregress" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1ff7f341d23e1275eec0656a9a07225fcc86216c4322392868adffe59023d1a" +checksum = "1e6e407dadb4ca4b31bc69c27aff00e7ca4534fdcee855159b039a7cebb5f395" dependencies = [ - "nalgebra 0.27.1", + "nalgebra", "statrs", ] @@ -3848,7 +3796,7 @@ dependencies = [ "impl-trait-for-tuples", "max-encoded-len-derive", "parity-scale-codec", - "primitive-types 0.10.0", + "primitive-types 0.10.1", "rustversion", "trybuild", ] @@ -3914,11 +3862,12 @@ dependencies = [ [[package]] name = "memory-db" -version = "0.26.0" -source = "git+https://github.com/paritytech/trie?branch=aj-update-parity-common#7d1265578a23e26de7a1e40e4f593a2c0562f9a9" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de006e09d04fc301a5f7e817b75aa49801c4479a8af753764416b085337ddcc5" dependencies = [ "hash-db", - "hashbrown 0.9.1", + "hashbrown 0.11.2", "parity-util-mem", ] @@ -4113,36 +4062,21 @@ dependencies = [ "unsigned-varint 0.7.0", ] -[[package]] -name = "nalgebra" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "476d1d59fe02fe54c86356e91650cd892f392782a1cb9fc524ec84f7aa9e1d06" -dependencies = [ - "approx 0.4.0", - "matrixmultiply", - "num-complex 0.3.1", - "num-rational 0.3.2", - "num-traits", - "rand 0.8.4", - "rand_distr", - "simba 0.4.0", - "typenum", -] - [[package]] name = "nalgebra" version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "462fffe4002f4f2e1f6a9dcf12cc1a6fc0e15989014efc02a941d3e0f5dc2120" dependencies = [ - "approx 0.5.0", + "approx", "matrixmultiply", "nalgebra-macros", - "num-complex 0.4.0", + "num-complex", "num-rational 0.4.0", "num-traits", - "simba 0.5.1", + "rand 0.8.4", + "rand_distr", + "simba", "typenum", ] @@ -4670,15 +4604,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-complex" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747d632c0c558b87dbabbe6a82f3b4ae03720d0646ac5b7b4dae89394be5f2c5" -dependencies = [ - "num-traits", -] - [[package]] name = "num-complex" version = "0.4.0" @@ -4710,17 +4635,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "num-rational" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - [[package]] name = "num-rational" version = "0.4.0" @@ -5928,22 +5842,23 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b310f220c335f9df1b3d2e9fbe3890bbfeef5030dad771620f48c5c229877cd3" +checksum = "8975095a2a03bbbdc70a74ab11a4f76a6d0b84680d87c68d722531b0ac28e8a9" dependencies = [ "arrayvec 0.7.1", "bitvec", "byte-slice-cast", + "impl-trait-for-tuples", "parity-scale-codec-derive", "serde", ] [[package]] name = "parity-scale-codec-derive" -version = "2.1.3" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81038e13ca2c32587201d544ea2e6b6c47120f1e4eae04478f9f60b6bcb89145" +checksum = "40dbbfef7f0a1143c5b06e0d76a6278e25dac0bc1af4be51a0fbb73f07e7ad09" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -5979,14 +5894,15 @@ dependencies = [ [[package]] name = "parity-util-mem" version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ad6f1acec69b95caf435bbd158d486e5a0a44fcf51531e84922c59ff09e8457" dependencies = [ "cfg-if 1.0.0", "hashbrown 0.11.2", "impl-trait-for-tuples", - "parity-util-mem-derive 0.1.0 (git+https://github.com/paritytech/parity-common?branch=master)", + "parity-util-mem-derive", "parking_lot 0.11.1", - "primitive-types 0.10.0", + "primitive-types 0.10.1", "smallvec 1.6.1", "winapi 0.3.9", ] @@ -6002,16 +5918,6 @@ dependencies = [ "synstructure", ] -[[package]] -name = "parity-util-mem-derive" -version = "0.1.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" -dependencies = [ - "proc-macro2", - "syn", - "synstructure", -] - [[package]] name = "parity-wasm" version = "0.32.0" @@ -6441,21 +6347,22 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" dependencies = [ - "fixed-hash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "impl-codec 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", - "uint 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", + "fixed-hash", + "impl-codec", + "uint", ] [[package]] name = "primitive-types" -version = "0.10.0" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e4722c697a58a99d5d06a08c30821d7c082a4632198de1eaa5a6c22ef42373" dependencies = [ - "fixed-hash 0.7.0 (git+https://github.com/paritytech/parity-common?branch=master)", - "impl-codec 0.5.0 (git+https://github.com/paritytech/parity-common?branch=master)", - "impl-serde 0.3.1 (git+https://github.com/paritytech/parity-common?branch=master)", + "fixed-hash", + "impl-codec", + "impl-serde", "scale-info", - "uint 0.9.1 (git+https://github.com/paritytech/parity-common?branch=master)", + "uint", ] [[package]] @@ -8267,7 +8174,7 @@ dependencies = [ "log", "parity-scale-codec", "parity-util-mem", - "parity-util-mem-derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-util-mem-derive", "parking_lot 0.11.1", "sc-client-api", "sp-core", @@ -8614,12 +8521,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" -[[package]] -name = "send_wrapper" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930c0acf610d3fdb5e2ab6213019aaa04e227ebe9547b0649ba599b16d788bd7" - [[package]] name = "serde" version = "1.0.126" @@ -8763,26 +8664,14 @@ version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c19772be3c4dd2ceaacf03cb41d5885f2a02c4d8804884918e3a258480803335" -[[package]] -name = "simba" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5132a955559188f3d13c9ba831e77c802ddc8782783f050ed0c52f5988b95f4c" -dependencies = [ - "approx 0.4.0", - "num-complex 0.3.1", - "num-traits", - "paste 1.0.5", -] - [[package]] name = "simba" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e82063457853d00243beda9952e910b82593e4b07ae9f721b9278a99a0d3d5c" dependencies = [ - "approx 0.5.0", - "num-complex 0.4.0", + "approx", + "num-complex", "num-traits", "paste 1.0.5", ] @@ -9155,7 +9044,7 @@ dependencies = [ "hash256-std-hasher", "hex", "hex-literal", - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "lazy_static", "libsecp256k1", "log", @@ -9166,7 +9055,7 @@ dependencies = [ "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions 0.6.1", - "primitive-types 0.10.0", + "primitive-types 0.10.1", "rand 0.7.3", "rand_chacha 0.2.2", "regex", @@ -9222,7 +9111,7 @@ dependencies = [ name = "sp-finality-grandpa" version = "3.0.0" dependencies = [ - "finality-grandpa 0.14.1", + "finality-grandpa 0.15.0", "log", "parity-scale-codec", "scale-info", @@ -9415,7 +9304,7 @@ version = "3.0.0" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "primitive-types 0.10.0", + "primitive-types 0.10.1", "rustversion", "sp-core", "sp-externalities", @@ -9559,7 +9448,7 @@ version = "3.0.0" name = "sp-storage" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec", "ref-cast", "serde", @@ -9688,7 +9577,7 @@ dependencies = [ name = "sp-version" version = "3.0.0" dependencies = [ - "impl-serde 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", + "impl-serde", "parity-scale-codec", "scale-info", "serde", @@ -9739,13 +9628,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "statrs" -version = "0.14.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e0c1f144861fbfd2a8cc82d564ccbf7fb3b7834d4fa128b84e9c2a73371aead" +checksum = "05bdbb8e4e78216a85785a85d3ec3183144f98d0097b9281802c019bb07a6f05" dependencies = [ - "approx 0.4.0", + "approx", "lazy_static", - "nalgebra 0.26.2", + "nalgebra", "num-traits", "rand 0.8.4", ] @@ -9852,7 +9741,7 @@ dependencies = [ "futures-timer 3.0.2", "getrandom 0.2.3", "js-sys", - "kvdb-web", + "kvdb-memorydb", "libp2p-wasm-ext", "log", "rand 0.7.3", @@ -10759,9 +10648,9 @@ checksum = "a7f741b240f1a48843f9b8e0444fb55fb2a4ff67293b50a9179dfd5ea67f8d41" [[package]] name = "trie-bench" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "568257edb909a5c532b1f4ab38ee6b5dedfbf8775be6a55a29020513ebe3e072" +checksum = "4edd9bdf0c2e08fd77c0fb2608179cac7ebed997ae18f58d47a2d96425ff51f0" dependencies = [ "criterion", "hash-db", @@ -10929,17 +10818,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "uint" -version = "0.9.1" -source = "git+https://github.com/paritytech/parity-common?branch=master#9e0e53a473aba2226a5c115f802762378fdf9fe7" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - [[package]] name = "unicase" version = "2.6.0" @@ -10969,9 +10847,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb0d2e7be6ae3a5fa87eed5fb451aff96f2573d2694942e40543ae0bbe19c796" +checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" [[package]] name = "unicode-width" diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 8feb5cabf50b4..46e2737e6cd2b 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.15.0", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } From bc3d2cfd594f5e40d487ceb2917603fb73e86a3a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 10:45:38 +0100 Subject: [PATCH 304/503] Update finality-grandpa --- Cargo.lock | 27 ++++++--------------------- frame/grandpa/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c900239814cd..9530428ebd288 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1688,25 +1688,10 @@ dependencies = [ "log", ] -[[package]] -name = "finality-grandpa" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" -dependencies = [ - "either", - "futures 0.3.15", - "futures-timer 3.0.2", - "log", - "num-traits", - "parity-scale-codec", - "parking_lot 0.11.1", -] - [[package]] name = "finality-grandpa" version = "0.15.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#642d7a1b33e8f14317e0561b51c37324f027dfc6" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#bc16739749bb1024cb172cf462f15d7a8db8952d" dependencies = [ "either", "futures 0.3.15", @@ -5148,7 +5133,7 @@ dependencies = [ name = "pallet-grandpa" version = "3.1.0" dependencies = [ - "finality-grandpa 0.14.1", + "finality-grandpa", "frame-benchmarking", "frame-election-provider-support", "frame-support", @@ -7644,7 +7629,7 @@ dependencies = [ "async-trait", "derive_more", "dyn-clone", - "finality-grandpa 0.15.0", + "finality-grandpa", "fork-tree", "futures 0.3.15", "futures-timer 3.0.2", @@ -7690,7 +7675,7 @@ name = "sc-finality-grandpa-rpc" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa 0.15.0", + "finality-grandpa", "futures 0.3.15", "jsonrpc-core", "jsonrpc-core-client", @@ -7720,7 +7705,7 @@ name = "sc-finality-grandpa-warp-sync" version = "0.9.0" dependencies = [ "derive_more", - "finality-grandpa 0.15.0", + "finality-grandpa", "futures 0.3.15", "log", "num-traits", @@ -9111,7 +9096,7 @@ dependencies = [ name = "sp-finality-grandpa" version = "3.0.0" dependencies = [ - "finality-grandpa 0.15.0", + "finality-grandpa", "log", "parity-scale-codec", "scale-info", diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 5b6d890cac612..8956c152a6bfd 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -32,7 +32,7 @@ log = { version = "0.4.14", default-features = false } [dev-dependencies] frame-benchmarking = { version = "3.1.0", path = "../benchmarking" } -grandpa = { package = "finality-grandpa", version = "0.14.1", features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.15.0", features = ["derive-codec"] } sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } pallet-balances = { version = "3.0.0", path = "../balances" } pallet-offences = { version = "3.0.0", path = "../offences" } From 6217f9212e5ba370bc3307f8a75c403b9de14fde Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 11:52:08 +0100 Subject: [PATCH 305/503] Remove origin todo --- frame/support/test/tests/pallet_instance.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index c5ea43f95432f..960cfaaabd290 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -162,7 +162,7 @@ pub mod pallet { #[pallet::origin] #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] - #[scale_info(skip_type_params(T, I))] // todo: [AJ] could/should this be added automatically? + #[scale_info(skip_type_params(T, I))] pub struct Origin(PhantomData<(T, I)>); #[pallet::validate_unsigned] From bc8f37543d7e02ae71934aa6bfb9c1ed07513896 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 11:56:20 +0100 Subject: [PATCH 306/503] Remove ui test todo --- .../test/tests/pallet_ui/call_argument_invalid_bound_2.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs index 25ac0bcb4b393..d981b55c48620 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.rs @@ -5,7 +5,6 @@ mod pallet { #[pallet::config] pub trait Config: frame_system::Config { - // todo: [AJ] should we rather modify the stderr to expect the missing trait bound error? type Bar: scale_info::TypeInfo; } From 717bd722dec256afc287894a8aa30d3dee31e461 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 15:26:53 +0100 Subject: [PATCH 307/503] Fix up incorrect scale_info path --- Cargo.lock | 29 +++++++++-------------------- primitives/runtime/src/traits.rs | 2 +- 2 files changed, 10 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9530428ebd288..39c61709784c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3781,7 +3781,7 @@ dependencies = [ "impl-trait-for-tuples", "max-encoded-len-derive", "parity-scale-codec", - "primitive-types 0.10.1", + "primitive-types", "rustversion", "trybuild", ] @@ -5887,7 +5887,7 @@ dependencies = [ "impl-trait-for-tuples", "parity-util-mem-derive", "parking_lot 0.11.1", - "primitive-types 0.10.1", + "primitive-types", "smallvec 1.6.1", "winapi 0.3.9", ] @@ -6326,17 +6326,6 @@ dependencies = [ "output_vt100", ] -[[package]] -name = "primitive-types" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2415937401cb030a2a0a4d922483f945fa068f52a7dbb22ce0fe5f2b6f6adace" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint", -] - [[package]] name = "primitive-types" version = "0.10.1" @@ -8844,7 +8833,7 @@ dependencies = [ "integer-sqrt", "num-traits", "parity-scale-codec", - "primitive-types 0.9.0", + "primitive-types", "rand 0.7.3", "scale-info", "serde", @@ -8861,7 +8850,7 @@ dependencies = [ "honggfuzz", "num-bigint", "num-traits", - "primitive-types 0.9.0", + "primitive-types", "sp-arithmetic", ] @@ -9040,7 +9029,7 @@ dependencies = [ "parity-util-mem", "parking_lot 0.11.1", "pretty_assertions 0.6.1", - "primitive-types 0.10.1", + "primitive-types", "rand 0.7.3", "rand_chacha 0.2.2", "regex", @@ -9289,7 +9278,7 @@ version = "3.0.0" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", - "primitive-types 0.10.1", + "primitive-types", "rustversion", "sp-core", "sp-externalities", @@ -10649,12 +10638,12 @@ dependencies = [ [[package]] name = "trie-db" -version = "0.22.5" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd81fe0c8bc2b528a51c9d2c31dae4483367a26a723a3c9a4a8120311d7774e3" +checksum = "9eac131e334e81b6b3be07399482042838adcd7957aa0010231d0813e39e02fa" dependencies = [ "hash-db", - "hashbrown 0.9.1", + "hashbrown 0.11.2", "log", "rustc-hex", "smallvec 1.6.1", diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 6d1e27cbb682c..c72526aaaf9c0 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -1203,7 +1203,7 @@ macro_rules! impl_opaque_keys_inner { Default, Clone, PartialEq, Eq, $crate::codec::Encode, $crate::codec::Decode, - $crate::TypeInfo, + $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] pub struct $name { From 41311f854a778c0499a2c93c4b0612a620a3570e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 5 Jul 2021 17:12:19 +0100 Subject: [PATCH 308/503] Remove unused Call metadata stuff --- .../src/construct_runtime/expand/call.rs | 32 ------- .../procedural/src/pallet/expand/call.rs | 15 ---- frame/support/src/dispatch.rs | 84 +------------------ frame/support/src/lib.rs | 2 +- frame/support/src/traits.rs | 2 +- frame/support/src/traits/metadata.rs | 27 ------ frame/support/test/tests/construct_runtime.rs | 33 -------- frame/support/test/tests/pallet.rs | 7 +- frame/support/test/tests/pallet_instance.rs | 12 +-- 9 files changed, 6 insertions(+), 208 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/call.rs b/frame/support/procedural/src/construct_runtime/expand/call.rs index b6d886abff332..eb335e11d5b76 100644 --- a/frame/support/procedural/src/construct_runtime/expand/call.rs +++ b/frame/support/procedural/src/construct_runtime/expand/call.rs @@ -67,38 +67,6 @@ pub fn expand_outer_dispatch( } } } - impl #scrate::dispatch::GetCallMetadata for Call { - fn get_call_metadata(&self) -> #scrate::dispatch::CallMetadata { - use #scrate::dispatch::GetCallName; - match self { - #( - #variant_patterns => { - let function_name = call.get_call_name(); - let pallet_name = stringify!(#pallet_names); - #scrate::dispatch::CallMetadata { function_name, pallet_name } - } - )* - } - } - - fn get_module_names() -> &'static [&'static str] { - &[#( - stringify!(#pallet_names), - )*] - } - - fn get_call_names(module: &str) -> &'static [&'static str] { - use #scrate::dispatch::{Callable, GetCallName}; - match module { - #( - stringify!(#pallet_names) => - <<#pallet_names as Callable<#runtime>>::Call - as GetCallName>::get_call_names(), - )* - _ => unreachable!(), - } - } - } impl #scrate::dispatch::Dispatchable for Call { type Origin = Origin; type Config = Call; diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index e6da9fbc8aa70..5f98ac5cdcd3b 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -179,21 +179,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { } } - impl<#type_impl_gen> #frame_support::dispatch::GetCallName for #call_ident<#type_use_gen> - #where_clause - { - fn get_call_name(&self) -> &'static str { - match *self { - #( Self::#fn_name(..) => stringify!(#fn_name), )* - Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), - } - } - - fn get_call_names() -> &'static [&'static str] { - &[ #( stringify!(#fn_name), )* ] - } - } - impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable for #call_ident<#type_use_gen> #where_clause diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index c7c1b12db6d64..fc52db6df07c1 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -25,9 +25,7 @@ pub use crate::weights::{ PaysFee, PostDispatchInfo, WithPostDispatchInfo, }; pub use sp_runtime::{traits::Dispatchable, DispatchError}; -pub use crate::traits::{ - CallMetadata, GetCallMetadata, GetCallName, UnfilteredDispatchable, GetPalletVersion, -}; +pub use crate::traits::{UnfilteredDispatchable, GetPalletVersion}; /// The return typ of a `Dispatchable` in frame. When returned explicitly from /// a dispatchable function it allows overriding the default `PostDispatchInfo` @@ -1989,32 +1987,6 @@ macro_rules! decl_module { } } - // Implement GetCallName for the Call. - impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::dispatch::GetCallName - for $call_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* - { - fn get_call_name(&self) -> &'static str { - match *self { - $( - $call_type::$fn_name( $( ref $param_name ),* ) => { - // Don't generate any warnings for unused variables - let _ = ( $( $param_name ),* ); - stringify!($fn_name) - }, - )* - $call_type::__PhantomItem(_, _) => unreachable!("__PhantomItem should never be used."), - } - } - - fn get_call_names() -> &'static [&'static str] { - &[ - $( - stringify!($fn_name), - )* - ] - } - } - // Bring `GetPalletVersion` into scope to make it easily usable. pub use $crate::traits::GetPalletVersion as _; // Implement `GetPalletVersion` for `Module` @@ -2190,36 +2162,7 @@ macro_rules! impl_outer_dispatch { } } } - impl $crate::dispatch::GetCallMetadata for $call_type { - fn get_call_metadata(&self) -> $crate::dispatch::CallMetadata { - use $crate::dispatch::GetCallName; - match self { - $( $call_type::$camelcase(call) => { - let function_name = call.get_call_name(); - let pallet_name = stringify!($camelcase); - $crate::dispatch::CallMetadata { function_name, pallet_name } - }, )* - } - } - - fn get_module_names() -> &'static [&'static str] { - &[$( - stringify!($camelcase), - )*] - } - fn get_call_names(module: &str) -> &'static [&'static str] { - use $crate::dispatch::{Callable, GetCallName}; - match module { - $( - stringify!($camelcase) => - <<$camelcase as Callable<$runtime>>::Call - as GetCallName>::get_call_names(), - )* - _ => unreachable!(), - } - } - } impl $crate::dispatch::Dispatchable for $call_type { type Origin = $origin; type Config = $call_type; @@ -2645,10 +2588,7 @@ macro_rules! __check_reserved_fn_name { mod tests { use super::*; use crate::weights::{DispatchInfo, DispatchClass, Pays, RuntimeDbWeight}; - use crate::traits::{ - CallMetadata, GetCallMetadata, GetCallName, OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, - IntegrityTest, Get, PalletInfo, - }; + use crate::traits::{OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, IntegrityTest, Get, PalletInfo}; use crate::metadata::*; pub trait Config: system::Config + Sized where Self::AccountId: From { } @@ -2930,26 +2870,6 @@ mod tests { ); } - #[test] - fn call_name() { - let name = Call::::aux_3().get_call_name(); - assert_eq!("aux_3", name); - } - - #[test] - fn call_metadata() { - let call = OuterCall::Test(Call::::aux_3()); - let metadata = call.get_call_metadata(); - let expected = CallMetadata { function_name: "aux_3".into(), pallet_name: "Test".into() }; - assert_eq!(metadata, expected); - } - - #[test] - fn get_call_names() { - let call_names = Call::::get_call_names(); - assert_eq!(["aux_0", "aux_1", "aux_2", "aux_3", "aux_4", "aux_5", "operational"], call_names); - } - #[test] fn get_module_names() { let module_names = OuterCall::get_module_names(); diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 56ff81078ed21..b7684e7b964e0 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1513,7 +1513,7 @@ pub mod pallet_prelude { /// /// The macro create an enum `Call` with one variant per dispatchable. This enum implements: /// `Clone`, `Eq`, `PartialEq`, `Debug` (with stripped implementation in `not("std")`), `Encode`, -/// `Decode`, `GetDispatchInfo`, `GetCallName`, `UnfilteredDispatchable`. +/// `Decode`, `GetDispatchInfo`, `UnfilteredDispatchable`. /// /// The macro implement on `Pallet`, the `Callable` trait and a function `call_functions` which /// returns the dispatchable metadatas. diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index 4eb630c6d9d79..a05cad1867dae 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -61,7 +61,7 @@ pub use randomness::Randomness; mod metadata; pub use metadata::{ - CallMetadata, GetCallMetadata, GetCallName, PalletInfo, PalletVersion, GetPalletVersion, + PalletInfo, PalletVersion, GetPalletVersion, PALLET_VERSION_STORAGE_KEY_POSTFIX, PalletInfoAccess, }; diff --git a/frame/support/src/traits/metadata.rs b/frame/support/src/traits/metadata.rs index b13a0464b30c0..a95690d8ac02d 100644 --- a/frame/support/src/traits/metadata.rs +++ b/frame/support/src/traits/metadata.rs @@ -41,33 +41,6 @@ pub trait PalletInfoAccess { fn name() -> &'static str; } -/// The function and pallet name of the Call. -#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug)] -pub struct CallMetadata { - /// Name of the function. - pub function_name: &'static str, - /// Name of the pallet to which the function belongs. - pub pallet_name: &'static str, -} - -/// Gets the function name of the Call. -pub trait GetCallName { - /// Return all function names. - fn get_call_names() -> &'static [&'static str]; - /// Return the function name of the Call. - fn get_call_name(&self) -> &'static str; -} - -/// Gets the metadata for the Call - function name and pallet name. -pub trait GetCallMetadata { - /// Return all module names. - fn get_module_names() -> &'static [&'static str]; - /// Return all function names for the given `module`. - fn get_call_names(module: &str) -> &'static [&'static str]; - /// Return a [`CallMetadata`], containing function and pallet name of the Call. - fn get_call_metadata(&self) -> CallMetadata; -} - /// The storage key postfix that is used to store the [`PalletVersion`] per pallet. /// /// The full storage key is built by using: diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index b6cbc881b151a..2f5ab8b11f881 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -530,39 +530,6 @@ fn call_weight_should_attach_to_call_enum() { ); } -#[test] -fn call_name() { - use frame_support::dispatch::GetCallName; - let name = module3::Call::::aux_4().get_call_name(); - assert_eq!("aux_4", name); -} - -#[test] -fn call_metadata() { - use frame_support::dispatch::{CallMetadata, GetCallMetadata}; - let call = Call::Module3(module3::Call::::aux_4()); - let metadata = call.get_call_metadata(); - let expected = CallMetadata { function_name: "aux_4".into(), pallet_name: "Module3".into() }; - assert_eq!(metadata, expected); -} - -#[test] -fn get_call_names() { - use frame_support::dispatch::GetCallName; - let call_names = module3::Call::::get_call_names(); - assert_eq!(["fail", "aux_1", "aux_2", "aux_3", "aux_4", "operational"], call_names); -} - -#[test] -fn get_module_names() { - use frame_support::dispatch::GetCallMetadata; - let module_names = Call::get_module_names(); - assert_eq!([ - "System", "Module1_1", "Module2", "Module1_2", "NestedModule3", "Module3", - "Module1_4", "Module1_6", "Module1_7", "Module1_8", "Module1_9", - ], module_names); -} - #[test] fn call_subtype_conversion() { use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 6a3b744facb4c..d70aa54eb3d2c 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -18,7 +18,7 @@ use frame_support::{ weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, traits::{ - GetCallName, OnInitialize, OnFinalize, OnRuntimeUpgrade, GetPalletVersion, OnGenesis, + OnInitialize, OnFinalize, OnRuntimeUpgrade, GetPalletVersion, OnGenesis, MaxEncodedLen, }, dispatch::{UnfilteredDispatchable, Parameter}, @@ -532,11 +532,6 @@ fn call_expand() { pays_fee: Pays::Yes, } ); - assert_eq!(call_foo.get_call_name(), "foo"); - assert_eq!( - pallet::Call::::get_call_names(), - &["foo", "foo_transactional", "foo_no_post_info"], - ); } #[test] diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 960cfaaabd290..81192530ac31d 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -18,7 +18,7 @@ use frame_support::{ weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, traits::{ - GetCallName, GetPalletVersion, OnInitialize, OnFinalize, OnRuntimeUpgrade, OnGenesis, + GetPalletVersion, OnInitialize, OnFinalize, OnRuntimeUpgrade, OnGenesis, }, dispatch::UnfilteredDispatchable, storage::unhashed, @@ -323,11 +323,6 @@ fn call_expand() { pays_fee: Pays::Yes, } ); - assert_eq!(call_foo.get_call_name(), "foo"); - assert_eq!( - pallet::Call::::get_call_names(), - &["foo", "foo_transactional"], - ); let call_foo = pallet::Call::::foo(3); assert_eq!( @@ -338,11 +333,6 @@ fn call_expand() { pays_fee: Pays::Yes, } ); - assert_eq!(call_foo.get_call_name(), "foo"); - assert_eq!( - pallet::Call::::get_call_names(), - &["foo", "foo_transactional"], - ); } #[test] From 8ebb9802ddcf47dacab5c44652c85edf2ea3cd33 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 09:40:56 +0100 Subject: [PATCH 309/503] Remove TypeInfo derive on Pallet --- frame/support/procedural/src/pallet/expand/pallet_struct.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index 595a79b3853b0..a0eb6130ff4ea 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -72,13 +72,9 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::EqNoBound, #frame_support::PartialEqNoBound, #frame_support::RuntimeDebugNoBound, - #frame_support::scale_info::TypeInfo, )] )); - // skip requirement for type params to implement `TypeInfo` - pallet_item.attrs.push(syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] )); - let pallet_error_metadata = if let Some(error_def) = &def.error { let error_ident = &error_def.error; quote::quote_spanned!(def.pallet_struct.attr_span => From bdfc96d829c33ca9cf12029e5ee9def9d9121ca6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 10:34:51 +0100 Subject: [PATCH 310/503] Cargo.lock after merge --- Cargo.lock | 36 ++++-------------------------------- 1 file changed, 4 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 39c61709784c7..90d75803f4d0f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1858,7 +1858,6 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "log", - "max-encoded-len", "once_cell", "parity-scale-codec", "parity-util-mem", @@ -2767,6 +2766,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" dependencies = [ "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", ] [[package]] @@ -3773,29 +3775,6 @@ dependencies = [ "rawpointer", ] -[[package]] -name = "max-encoded-len" -version = "3.0.0" -dependencies = [ - "frame-support", - "impl-trait-for-tuples", - "max-encoded-len-derive", - "parity-scale-codec", - "primitive-types", - "rustversion", - "trybuild", -] - -[[package]] -name = "max-encoded-len-derive" -version = "3.0.0" -dependencies = [ - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "maybe-uninit" version = "2.0.0" @@ -4155,6 +4134,7 @@ dependencies = [ "jsonrpc-core", "libp2p", "node-cli", + "parking_lot 0.11.1", "sc-rpc-api", "serde", "serde_json", @@ -4371,7 +4351,6 @@ dependencies = [ "frame-try-runtime", "hex-literal", "log", - "max-encoded-len", "node-primitives", "pallet-assets", "pallet-authority-discovery", @@ -4728,7 +4707,6 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "max-encoded-len", "pallet-balances", "parity-scale-codec", "scale-info", @@ -4845,7 +4823,6 @@ dependencies = [ "frame-support", "frame-system", "log", - "max-encoded-len", "pallet-transaction-payment", "parity-scale-codec", "scale-info", @@ -5398,7 +5375,6 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "max-encoded-len", "pallet-balances", "pallet-utility", "parity-scale-codec", @@ -6335,7 +6311,6 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", - "scale-info", "uint", ] @@ -8804,7 +8779,6 @@ dependencies = [ name = "sp-application-crypto" version = "3.0.0" dependencies = [ - "max-encoded-len", "parity-scale-codec", "scale-info", "serde", @@ -9022,7 +8996,6 @@ dependencies = [ "lazy_static", "libsecp256k1", "log", - "max-encoded-len", "merlin", "num-traits", "parity-scale-codec", @@ -9253,7 +9226,6 @@ dependencies = [ "hash256-std-hasher", "impl-trait-for-tuples", "log", - "max-encoded-len", "parity-scale-codec", "parity-util-mem", "paste 1.0.5", From 4a550a10ed6e13d708920645313fa472dc96838b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 11:26:15 +0100 Subject: [PATCH 311/503] Restore scale-info feature --- Cargo.lock | 1 + primitives/core/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 90d75803f4d0f..7d5ff582f94e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6311,6 +6311,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", + "scale-info", "uint", ] diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index fe100c729c0da..37809f151f909 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -19,7 +19,7 @@ scale-info = { version = "0.9.0", default-features = false, features = ["derive" log = { version = "0.4.11", default-features = false } serde = { version = "1.0.101", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } -primitive-types = { version = "0.10.0", default-features = false, features = ["codec"] } +primitive-types = { version = "0.10.0", default-features = false, features = ["codec", "scale-info"] } impl-serde = { version = "0.3.0", optional = true } wasmi = { version = "0.9.0", optional = true } hash-db = { version = "0.15.2", default-features = false } From 5ec82c1ce64228d86555b23e81aef8e4e8687a41 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 11:43:04 +0100 Subject: [PATCH 312/503] Fully qualify TypeInfo derive --- frame/support/src/origin.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/origin.rs b/frame/support/src/origin.rs index dd775fb27524f..b013011a635cc 100644 --- a/frame/support/src/origin.rs +++ b/frame/support/src/origin.rs @@ -271,7 +271,7 @@ macro_rules! impl_outer_origin { } $crate::paste::item! { - #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode, $crate::TypeInfo)] + #[derive(Clone, PartialEq, Eq, $crate::RuntimeDebug, $crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] $(#[$attr])* #[allow(non_camel_case_types)] pub enum $caller_name { From 8b5ba7c1859ccf772492286385b629a94486eac0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 11:52:49 +0100 Subject: [PATCH 313/503] Skip PendingSwap T --- frame/atomic-swap/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index b37862bae74c3..ed5013fe3baa9 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -56,6 +56,7 @@ use sp_runtime::RuntimeDebug; /// Pending atomic swap operation. #[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct PendingSwap { /// Source of the swap. pub source: T::AccountId, From e6d7970c8ebad63152698bdeeae42c9611c114bd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 11:55:33 +0100 Subject: [PATCH 314/503] Add missing skip_type_params attr --- frame/example/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 522dd0fb6ab1d..6f046d3009cab 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -692,6 +692,7 @@ impl Pallet { /// Additionally, it drops any transaction with an encoded length higher than 200 bytes. No /// particular reason why, just to demonstrate the power of signed extensions. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct WatchDummy(PhantomData); impl sp_std::fmt::Debug for WatchDummy { From 833c821216f635e154b7740bb4513a11d208a512 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 16:23:23 +0100 Subject: [PATCH 315/503] metadata docs features --- bin/node/runtime/Cargo.toml | 5 +++++ frame/support/Cargo.toml | 3 +-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 3a05cbec085a6..62b1f281e0722 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -252,3 +252,8 @@ try-runtime = [ contracts-unstable-interface = [ "pallet-contracts/unstable-interface" ] +# Enable docs to be included in metadata +metadata-docs = [ + "frame-support/metadata-docs", + "scale-info/docs" +] diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 6eb701e586a59..3f0f294d52c8b 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -41,9 +41,8 @@ frame-system = { version = "3.0.0", path = "../system" } parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] -default = ["std"] +default = ["std", "metadata-docs"] std = [ - "metadata-docs", "once_cell", "serde", "scale-info/std", From 6558710d6db4d9b4ef548a1816a18a113c71d66f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 16:52:13 +0100 Subject: [PATCH 316/503] Reduce pallet event attribute to struct --- .../procedural/src/pallet/expand/event.rs | 11 +++- .../procedural/src/pallet/parse/event.rs | 64 +++++++------------ 2 files changed, 30 insertions(+), 45 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 304547e99ce2b..12554fb8781e5 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -15,9 +15,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::Def; -use frame_support_procedural_tools::get_doc_literals; use crate::COUNTER; +use crate::pallet::{ + Def, + parse::event::PalletEventDepositAttr, +}; +use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; /// * Add __Ignore variant on Event @@ -121,12 +124,14 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { // skip requirement for type params to implement `TypeInfo` event_item.attrs.push(syn::parse_quote!( #[scale_info(skip_type_params(#event_use_gen))] )); - let deposit_event = if let Some((fn_vis, fn_span)) = &event.deposit_event { + let deposit_event = if let Some(deposit_event) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); let trait_use_gen = &def.trait_use_generics(event.attr_span); let type_impl_gen = &def.type_impl_generics(event.attr_span); let type_use_gen = &def.type_use_generics(event.attr_span); + let PalletEventDepositAttr { fn_vis, fn_span, .. } = deposit_event; + quote::quote_spanned!(*fn_span => impl<#type_impl_gen> Pallet<#type_use_gen> #completed_where_clause { #fn_vis fn deposit_event(event: Event<#event_use_gen>) { diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index c0ed8eef0fe84..ce4b90b16030e 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -38,7 +38,7 @@ pub struct EventDef { /// The kind of generic the type `Event` has. pub gen_kind: super::GenericKind, /// Whether the function `deposit_event` must be generated. - pub deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, + pub deposit_event: Option, /// Where clause used in event definition. pub where_clause: Option, /// The span of the pallet::event attribute. @@ -49,26 +49,15 @@ pub struct EventDef { /// /// Syntax is: /// * `#[pallet::generate_deposit($vis fn deposit_event)]` -enum PalletEventAttr { - // todo: [AJ] could make this just a struct now it is a single variant - DepositEvent { - fn_vis: syn::Visibility, - // Span for the keyword deposit_event - fn_span: proc_macro2::Span, - // Span of the attribute - span: proc_macro2::Span, - }, +pub struct PalletEventDepositAttr { + pub fn_vis: syn::Visibility, + // Span for the keyword deposit_event + pub fn_span: proc_macro2::Span, + // Span of the attribute + pub span: proc_macro2::Span, } -impl PalletEventAttr { - fn span(&self) -> proc_macro2::Span { - match self { - Self::DepositEvent { span, .. } => *span, - } - } -} - -impl syn::parse::Parse for PalletEventAttr { +impl syn::parse::Parse for PalletEventDepositAttr { fn parse(input: syn::parse::ParseStream) -> syn::Result { input.parse::()?; let content; @@ -76,38 +65,29 @@ impl syn::parse::Parse for PalletEventAttr { content.parse::()?; content.parse::()?; - let lookahead = content.lookahead1(); - if lookahead.peek(keyword::generate_deposit) { - let span = content.parse::()?.span(); + let span = content.parse::()?.span(); + let generate_content; + syn::parenthesized!(generate_content in content); + let fn_vis = generate_content.parse::()?; + generate_content.parse::()?; + let fn_span = generate_content.parse::()?.span(); - let generate_content; - syn::parenthesized!(generate_content in content); - let fn_vis = generate_content.parse::()?; - generate_content.parse::()?; - let fn_span = generate_content.parse::()?.span(); - - - Ok(PalletEventAttr::DepositEvent { fn_vis, span, fn_span }) - } else { - Err(lookahead.error()) - } + Ok(PalletEventDepositAttr { fn_vis, span, fn_span }) } } struct PalletEventAttrInfo { - deposit_event: Option<(syn::Visibility, proc_macro2::Span)>, + deposit_event: Option, } impl PalletEventAttrInfo { - fn from_attrs(attrs: Vec) -> syn::Result { + fn from_attrs(attrs: Vec) -> syn::Result { let mut deposit_event = None; for attr in attrs { - match attr { - PalletEventAttr::DepositEvent { fn_vis, fn_span, .. } if deposit_event.is_none() => - deposit_event = Some((fn_vis, fn_span)), - attr => { - return Err(syn::Error::new(attr.span(), "Duplicate attribute")); - } + if deposit_event.is_none() { + deposit_event = Some(attr) + } else { + return Err(syn::Error::new(attr.span, "Duplicate attribute")); } } @@ -127,7 +107,7 @@ impl EventDef { return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected item enum")) }; - let event_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let event_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; let deposit_event = attr_info.deposit_event; From cb0af26d341aed5dfed1182500583f81b79b2ecd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 6 Jul 2021 16:55:05 +0100 Subject: [PATCH 317/503] Cargo.lock --- Cargo.lock | 3 +++ 1 file changed, 3 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 7d5ff582f94e0..1a14600f64cb3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7520,6 +7520,7 @@ dependencies = [ "sp-core", "sp-externalities", "sp-io", + "sp-maybe-compressed-blob", "sp-panic-handler", "sp-runtime", "sp-runtime-interface", @@ -9526,11 +9527,13 @@ version = "3.0.0" dependencies = [ "impl-serde", "parity-scale-codec", + "parity-wasm 0.42.2", "scale-info", "serde", "sp-runtime", "sp-std", "sp-version-proc-macro", + "thiserror", ] [[package]] From 2a6dc3f73d3f7f034fb69ddb2f8f5f997457ac11 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 08:03:02 +0100 Subject: [PATCH 318/503] Update frame/balances/src/tests_composite.rs Co-authored-by: Guillaume Thiolliere --- frame/balances/src/tests_composite.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 4d864183c39dc..07ec0f377ecfc 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -97,7 +97,7 @@ impl Config for Test { type DustRemoval = (); type Event = Event; type ExistentialDeposit = ExistentialDeposit; - type AccountStore = system::Pallet; + type AccountStore = frame_system::Pallet; type MaxLocks = (); type MaxReserves = MaxReserves; type ReserveIdentifier = [u8; 8]; From 737250547357b19bd0c1845d144c9a07ea265a62 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 09:42:10 +0100 Subject: [PATCH 319/503] Line widths check --- frame/democracy/src/conviction.rs | 3 ++- frame/democracy/src/vote.rs | 4 +++- frame/democracy/src/vote_threshold.rs | 3 ++- frame/support/procedural/src/construct_runtime/mod.rs | 5 ++++- frame/support/src/origin.rs | 5 ++++- frame/support/src/storage/types/key.rs | 3 ++- frame/support/test/tests/construct_runtime.rs | 3 ++- frame/support/test/tests/pallet_compatibility.rs | 7 +++++-- frame/support/test/tests/pallet_compatibility_instance.rs | 7 +++++-- frame/support/test/tests/pallet_instance.rs | 8 ++++++-- primitives/arithmetic/src/fixed_point.rs | 5 ++++- primitives/npos-elections/compact/src/lib.rs | 7 ++++++- primitives/runtime/src/traits.rs | 6 +++--- primitives/version/src/lib.rs | 3 ++- 14 files changed, 50 insertions(+), 19 deletions(-) diff --git a/frame/democracy/src/conviction.rs b/frame/democracy/src/conviction.rs index a31cdc99cd172..7f9714a7d5ce4 100644 --- a/frame/democracy/src/conviction.rs +++ b/frame/democracy/src/conviction.rs @@ -20,10 +20,11 @@ use sp_std::{result::Result, convert::TryFrom}; use sp_runtime::{RuntimeDebug, traits::{Zero, Bounded, CheckedMul, CheckedDiv}}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use crate::types::Delegations; /// A value denoting the strength of conviction of a vote. -#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, scale_info::TypeInfo)] +#[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo)] pub enum Conviction { /// 0.1x votes, unlocked. None, diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index 1f28d3ee10824..0b607376930ad 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -90,7 +90,9 @@ impl AccountVote { } /// A "prior" lock, i.e. a lock for some now-forgotten reason. -#[derive(Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo)] +#[derive( + Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo +)] pub struct PriorLock(BlockNumber, Balance); impl PriorLock { diff --git a/frame/democracy/src/vote_threshold.rs b/frame/democracy/src/vote_threshold.rs index fd1736afa680d..87e6ded0dd554 100644 --- a/frame/democracy/src/vote_threshold.rs +++ b/frame/democracy/src/vote_threshold.rs @@ -20,12 +20,13 @@ #[cfg(feature = "std")] use serde::{Serialize, Deserialize}; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::traits::{Zero, IntegerSquareRoot}; use sp_std::ops::{Add, Mul, Div, Rem}; use crate::Tally; /// A means of determining if a vote is past pass threshold. -#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum VoteThreshold { /// A supermajority of approvals is needed to pass this vote. diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index f16612f6323e5..00b4cfa56e41d 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -167,7 +167,10 @@ fn construct_runtime_parsed(definition: RuntimeDefinition) -> Result KeyGenerator for Key } } -impl KeyGeneratorMaxEncodedLen for Key { +impl KeyGeneratorMaxEncodedLen +for Key { fn key_max_encoded_len() -> usize { H::max_len::() } diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 2f5ab8b11f881..0f3a683530bef 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -762,7 +762,8 @@ fn test_metadata() { ] }; - let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let actual_metadata = Runtime::metadata(); pretty_assertions::assert_eq!(actual_metadata, expected_metadata); } diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index b6c0eaadbbf13..8b41f4a5c322a 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -261,7 +261,10 @@ mod test { use super::pallet; use super::pallet_old; use codec::{Decode, Encode}; - use scale_info::form::PortableForm; + use scale_info::{ + form::PortableForm, + Variant, + }; #[test] fn metadata() { @@ -292,7 +295,7 @@ mod test { } }; - let assert_enum_variants = |vs1: &[scale_info::Variant], vs2: &[scale_info::Variant]| { + let assert_enum_variants = |vs1: &[Variant], vs2: &[Variant]| { assert_eq!(vs1.len(), vs2.len()); for i in 0..vs1.len() { let v1 = &vs2[i]; diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index 837e31c78d08d..946bab3b05310 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -276,7 +276,10 @@ mod test { use super::pallet; use super::pallet_old; use codec::{Decode, Encode}; - use scale_info::form::PortableForm; + use scale_info::{ + form::PortableForm, + Variant, + }; #[test] fn metadata() { @@ -307,7 +310,7 @@ mod test { } }; - let assert_enum_variants = |vs1: &[scale_info::Variant], vs2: &[scale_info::Variant]| { + let assert_enum_variants = |vs1: &[Variant], vs2: &[Variant]| { assert_eq!(vs1.len(), vs2.len()); for i in 0..vs1.len() { let v1 = &vs2[i]; diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 81192530ac31d..33533fe0f23bb 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -161,7 +161,10 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] + #[derive( + EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, + scale_info::TypeInfo + )] #[scale_info(skip_type_params(T, I))] pub struct Origin(PhantomData<(T, I)>); @@ -892,7 +895,8 @@ fn metadata() { ] }; - let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { RuntimeMetadata::V14(metadata) => { metadata diff --git a/primitives/arithmetic/src/fixed_point.rs b/primitives/arithmetic/src/fixed_point.rs index e25d353b7a6ed..52609a3830771 100644 --- a/primitives/arithmetic/src/fixed_point.rs +++ b/primitives/arithmetic/src/fixed_point.rs @@ -324,7 +324,10 @@ macro_rules! implement_fixed { /// A fixed point number representation in the range. /// #[doc = $title] - #[derive(Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, scale_info::TypeInfo)] + #[derive( + Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, + scale_info::TypeInfo, + )] pub struct $name($inner_type); impl From<$inner_type> for $name { diff --git a/primitives/npos-elections/compact/src/lib.rs b/primitives/npos-elections/compact/src/lib.rs index 96e7e088a6538..557b812d7bbfc 100644 --- a/primitives/npos-elections/compact/src/lib.rs +++ b/primitives/npos-elections/compact/src/lib.rs @@ -173,7 +173,12 @@ fn struct_def( } } else { // automatically derived. - quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode, _npos::scale_info::TypeInfo)]) + quote! { + #[derive( + Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode, + _npos::scale_info::TypeInfo, + )] + } }; let from_impl = assignment::from_impl(count); diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 6c19f36670e0c..9ba8c90e3d2b6 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -27,7 +27,7 @@ use std::str::FromStr; use serde::{Serialize, Deserialize, de::DeserializeOwned}; use sp_core::{self, Hasher, TypeId, RuntimeDebug}; use crate::codec::{Codec, Encode, Decode, MaxEncodedLen}; -use crate::scale_info::TypeInfo; +use crate::scale_info::{TypeInfo, StaticTypeInfo}; use crate::transaction_validity::{ ValidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, @@ -234,7 +234,7 @@ impl StaticLookup for AccountIdLookup: Codec + scale_info::StaticTypeInfo, + crate::MultiAddress: Codec + StaticTypeInfo, { type Source = crate::MultiAddress; type Target = AccountId; @@ -742,7 +742,7 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + scale_info::StaticTypeInfo { +pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + StaticTypeInfo { /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used diff --git a/primitives/version/src/lib.rs b/primitives/version/src/lib.rs index 907e052a4cd66..c4f1378e81e87 100644 --- a/primitives/version/src/lib.rs +++ b/primitives/version/src/lib.rs @@ -27,6 +27,7 @@ use std::fmt; use std::collections::HashSet; use codec::{Encode, Decode}; +use scale_info::TypeInfo; use sp_runtime::RuntimeString; pub use sp_runtime::create_runtime_str; #[doc(hidden)] @@ -114,7 +115,7 @@ macro_rules! create_apis_vec { /// This triplet have different semantics and mis-interpretation could cause problems. /// In particular: bug fixes should result in an increment of `spec_version` and possibly `authoring_version`, /// absolutely not `impl_version` since they change the semantics of the runtime. -#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug, scale_info::TypeInfo)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, Default, sp_runtime::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] pub struct RuntimeVersion { From 2cd71b650d284fd6f110ab4465d213f95a5e371c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 09:51:17 +0100 Subject: [PATCH 320/503] Cargo.lock --- Cargo.lock | 122 ++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 115 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0662c3da6d6a7..9e74a88f0687d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1690,9 +1690,8 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74a1bfdcc776e63e49f741c7ce6116fa1b887e8ac2e3ccb14dd4aa113e54feb9" +version = "0.15.0" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#c6e1612b93a20574513d542f2a4e628c693a455c" dependencies = [ "either", "futures 0.3.15", @@ -1702,6 +1701,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.3", + "scale-info", ] [[package]] @@ -1769,6 +1769,7 @@ dependencies = [ "log", "parity-scale-codec", "paste 1.0.4", + "scale-info", "serde", "sp-api", "sp-io", @@ -1807,6 +1808,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -1824,6 +1826,7 @@ dependencies = [ "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1835,18 +1838,20 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "13.0.0" +version = "14.0.0" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#32584873b4fc87044869bd55aa6908feb1d0084d" dependencies = [ + "cfg-if 1.0.0", "parity-scale-codec", + "scale-info", "serde", - "sp-core", - "sp-std", ] [[package]] name = "frame-support" version = "3.0.0" dependencies = [ + "assert_matches", "bitflags", "frame-metadata", "frame-support-procedural", @@ -1858,6 +1863,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -1906,18 +1912,20 @@ dependencies = [ name = "frame-support-test" version = "3.0.0" dependencies = [ - "frame-metadata", "frame-support", "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", + "scale-info", "serde", + "sp-arithmetic", "sp-core", "sp-io", "sp-runtime", "sp-state-machine", "sp-std", + "sp-version", "trybuild", ] @@ -1930,6 +1938,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -1948,6 +1957,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4249,6 +4259,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4286,6 +4297,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "pretty_assertions 0.6.1", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4395,6 +4407,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", + "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4474,6 +4487,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", + "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4703,6 +4717,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4717,6 +4732,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4734,6 +4750,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "parking_lot 0.11.1", + "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4750,6 +4767,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4767,6 +4785,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", + "scale-info", "serde", "sp-authorship", "sp-core", @@ -4792,6 +4811,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -4813,6 +4833,7 @@ dependencies = [ "log", "pallet-transaction-payment", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4829,6 +4850,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4847,6 +4869,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4875,6 +4898,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.3", "rand_pcg 0.3.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -4892,6 +4916,7 @@ version = "3.0.0" dependencies = [ "bitflags", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-runtime", @@ -4932,6 +4957,7 @@ version = "3.0.0" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -4948,6 +4974,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -4972,6 +4999,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -4992,6 +5020,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5009,6 +5038,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -5027,6 +5057,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5042,6 +5073,7 @@ dependencies = [ "lite-json", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5056,6 +5088,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5073,6 +5106,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5098,6 +5132,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5119,6 +5154,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5136,6 +5172,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5153,6 +5190,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5170,6 +5208,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5186,6 +5225,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5204,6 +5244,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5253,6 +5294,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5267,6 +5309,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5281,6 +5324,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5296,6 +5340,7 @@ dependencies = [ "log", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5322,6 +5367,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5340,6 +5386,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5354,6 +5401,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5369,6 +5417,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5384,6 +5433,7 @@ dependencies = [ "frame-system", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5399,6 +5449,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5416,6 +5467,7 @@ dependencies = [ "log", "pallet-timestamp", "parity-scale-codec", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5441,6 +5493,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-core", "sp-io", @@ -5459,6 +5512,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5484,6 +5538,7 @@ dependencies = [ "parking_lot 0.11.1", "paste 1.0.4", "rand_chacha 0.2.2", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5523,6 +5578,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5537,6 +5593,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5553,6 +5610,7 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5571,6 +5629,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5587,6 +5646,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5634,6 +5694,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5653,6 +5714,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -5670,6 +5732,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5685,6 +5748,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5702,6 +5766,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -6254,6 +6319,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", + "scale-info", "uint", ] @@ -8234,6 +8300,32 @@ dependencies = [ "wasm-timer", ] +[[package]] +name = "scale-info" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8522f54f50117bc4281f42b611bb71b9cc4b9c2f40cf2b725fd518e1c0d0f160" +dependencies = [ + "bitvec", + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "113a247fe47d242d24f4795cbc069d0a168a70da9e1cff98b5645edcc21f1eed" +dependencies = [ + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "schannel" version = "0.1.19" @@ -8698,6 +8790,7 @@ name = "sp-application-crypto" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "serde", "sp-core", "sp-io", @@ -8726,6 +8819,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-debug-derive", @@ -8749,6 +8843,7 @@ name = "sp-authority-discovery" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -8827,6 +8922,7 @@ version = "0.9.0" dependencies = [ "async-trait", "parity-scale-codec", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -8844,6 +8940,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -8874,6 +8971,7 @@ name = "sp-consensus-slots" version = "0.9.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -8918,6 +9016,7 @@ dependencies = [ "rand 0.7.3", "rand_chacha 0.2.2", "regex", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -8972,6 +9071,7 @@ dependencies = [ "finality-grandpa", "log", "parity-scale-codec", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9061,6 +9161,7 @@ version = "3.0.0" dependencies = [ "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9091,6 +9192,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", @@ -9138,6 +9240,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", + "scale-info", "serde", "serde_json", "sp-api", @@ -9250,6 +9353,7 @@ name = "sp-session" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9262,6 +9366,7 @@ name = "sp-staking" version = "3.0.0" dependencies = [ "parity-scale-codec", + "scale-info", "sp-runtime", "sp-std", ] @@ -9387,6 +9492,7 @@ dependencies = [ "async-trait", "log", "parity-scale-codec", + "scale-info", "sp-core", "sp-inherents", "sp-runtime", @@ -9430,6 +9536,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9725,6 +9832,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", + "scale-info", "serde", "sp-api", "sp-application-crypto", From 848d3b6f178e9df71c9d93c2a05451dfffaad07c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 10:52:43 +0100 Subject: [PATCH 321/503] Add scale-info/std --- bin/node/primitives/Cargo.toml | 1 + bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 1 + frame/aura/Cargo.toml | 1 + frame/authority-discovery/Cargo.toml | 1 + frame/authorship/Cargo.toml | 1 + frame/babe/Cargo.toml | 1 + frame/benchmarking/Cargo.toml | 1 + frame/bounties/Cargo.toml | 1 + frame/collective/Cargo.toml | 1 + frame/contracts/Cargo.toml | 1 + frame/contracts/common/Cargo.toml | 1 + frame/contracts/rpc/runtime-api/Cargo.toml | 1 + frame/democracy/Cargo.toml | 1 + frame/election-provider-multi-phase/Cargo.toml | 1 + frame/elections-phragmen/Cargo.toml | 1 + frame/elections/Cargo.toml | 1 + frame/example-offchain-worker/Cargo.toml | 1 + frame/example-parallel/Cargo.toml | 1 + frame/example/Cargo.toml | 1 + frame/executive/Cargo.toml | 1 + frame/gilt/Cargo.toml | 1 + frame/grandpa/Cargo.toml | 1 + frame/identity/Cargo.toml | 1 + frame/im-online/Cargo.toml | 1 + frame/indices/Cargo.toml | 1 + frame/lottery/Cargo.toml | 1 + frame/membership/Cargo.toml | 1 + frame/multisig/Cargo.toml | 1 + frame/nicks/Cargo.toml | 1 + frame/node-authorization/Cargo.toml | 1 + frame/offences/Cargo.toml | 1 + frame/offences/benchmarking/Cargo.toml | 1 + frame/randomness-collective-flip/Cargo.toml | 1 + frame/recovery/Cargo.toml | 1 + frame/scheduler/Cargo.toml | 1 + frame/scored-pool/Cargo.toml | 1 + frame/session/Cargo.toml | 1 + frame/society/Cargo.toml | 1 + frame/staking/Cargo.toml | 1 + frame/sudo/Cargo.toml | 1 + frame/support/Cargo.toml | 1 + frame/support/test/Cargo.toml | 1 + frame/system/benchmarking/Cargo.toml | 1 + frame/timestamp/Cargo.toml | 1 + frame/tips/Cargo.toml | 1 + frame/transaction-payment/Cargo.toml | 1 - frame/transaction-storage/Cargo.toml | 1 + frame/treasury/Cargo.toml | 1 + frame/uniques/Cargo.toml | 1 + frame/utility/Cargo.toml | 1 + frame/vesting/Cargo.toml | 1 + primitives/application-crypto/Cargo.toml | 1 + primitives/arithmetic/Cargo.toml | 1 + primitives/authority-discovery/Cargo.toml | 1 + primitives/consensus/aura/Cargo.toml | 1 + primitives/consensus/babe/Cargo.toml | 1 + primitives/consensus/slots/Cargo.toml | 1 + primitives/core/Cargo.toml | 1 + primitives/npos-elections/Cargo.toml | 1 + primitives/runtime/Cargo.toml | 1 + primitives/staking/Cargo.toml | 1 + primitives/version/Cargo.toml | 1 + test-utils/runtime/Cargo.toml | 1 + 64 files changed, 63 insertions(+), 2 deletions(-) diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 136e2509af329..27c8843b63753 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -26,6 +26,7 @@ pretty_assertions = "0.6.1" default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-system/std", "sp-application-crypto/std", "sp-core/std", diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 62b1f281e0722..8b2c4497f7f5e 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -101,7 +101,6 @@ sp-io = { version = "3.0.0", path = "../../../primitives/io" } default = ["std"] with-tracing = [ "frame-executive/with-tracing" ] std = [ - "scale-info/std", "sp-authority-discovery/std", "pallet-assets/std", "pallet-authority-discovery/std", @@ -112,6 +111,7 @@ std = [ "pallet-bounties/std", "sp-block-builder/std", "codec/std", + "scale-info/std", "pallet-collective/std", "pallet-contracts/std", "pallet-contracts-primitives/std", diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index b1e43aa1316cc..7ae77294d7246 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -34,6 +34,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index a1bf822a8f0cd..4bfa130a9d4b8 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -35,6 +35,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 1139b01c4d9f7..38d9ba9d4f73e 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -34,6 +34,7 @@ std = [ "sp-application-crypto/std", "sp-authority-discovery/std", "codec/std", + "scale-info/std", "sp-std/std", "pallet-session/std", "sp-runtime/std", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index a5456e83e9203..8c36241836b25 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -31,6 +31,7 @@ serde = { version = "1.0.101" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-support/std", diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index d9b847201fef3..b66bb776276e2 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -43,6 +43,7 @@ frame-election-provider-support = { version = "3.0.0", path = "../election-provi default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "frame-support/std", "frame-system/std", diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 0b62883dfc446..ea014117675ad 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -35,6 +35,7 @@ serde = "1.0.101" default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime-interface/std", "sp-runtime/std", "sp-api/std", diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index f516b8990e736..5be11e98adf29 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -33,6 +33,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 51c84badd4bce..fe566d8aafb48 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -32,6 +32,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 5fd01c613d7b0..a744bf2a86dde 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -55,6 +55,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-runtime/std", "sp-io/std", diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index fd91fb9539f6d..2db5f7ebca2f2 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -27,6 +27,7 @@ sp-runtime = { version = "3.0.0", default-features = false, path = "../../../pri default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-core/std", "sp-runtime/std", "sp-std/std", diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index 9d3dd62fc0dca..cac8db67ef680 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -27,6 +27,7 @@ default = ["std"] std = [ "sp-api/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "pallet-contracts-primitives/std", diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 478d669058d0f..d48b92b32c60e 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -36,6 +36,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-benchmarking/std", diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 31e8aff368062..2cb7e5027377f 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -51,6 +51,7 @@ frame-benchmarking = { version = "3.1.0", path = "../benchmarking" } default = ["std"] std = [ "codec/std", + "scale-info/std", "log/std", "frame-support/std", diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index ccbc91e2546d7..0ec152123c3be 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -35,6 +35,7 @@ substrate-test-utils = { version = "3.0.0", path = "../../test-utils" } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "sp-runtime/std", "sp-npos-elections/std", diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 5542fbd4768d2..28ab33c52134b 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -30,6 +30,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index f97ead4a86dc0..6912d7c25f263 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -29,6 +29,7 @@ log = { version = "0.4.14", default-features = false } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "lite-json/std", diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index ae13c51827b77..d4e54bbd9d89e 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -29,6 +29,7 @@ serde = { version = "1.0.101" } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 94ef20ed61674..2747a2a674d4d 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -31,6 +31,7 @@ sp-core = { version = "3.0.0", path = "../../primitives/core", default-features default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "frame-support/std", "frame-system/std", diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index d78bbf9232f85..e562d677dd075 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -40,6 +40,7 @@ with-tracing = [ ] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 65f091542d656..12ff2df3fa4ec 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -31,6 +31,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "sp-arithmetic/std", diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 8956c152a6bfd..2b99b800fd952 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -45,6 +45,7 @@ frame-election-provider-support = { version = "3.0.0", path = "../election-provi default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-benchmarking/std", "sp-application-crypto/std", "sp-core/std", diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 306a19f392ae2..f055a2fbc27dd 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -31,6 +31,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 69248d92673b6..522f1314e2530 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -37,6 +37,7 @@ std = [ "sp-application-crypto/std", "pallet-authorship/std", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 98f09ac5cca48..92ee2b01e994b 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -33,6 +33,7 @@ default = ["std"] std = [ "sp-keyring", "codec/std", + "scale-info/std", "sp-core/std", "sp-std/std", "sp-io/std", diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index a6b2e9f4a1f1b..68581e4b735f2 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -33,6 +33,7 @@ serde = { version = "1.0.101" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "frame-support/std", "sp-runtime/std", diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index a4771599f3a5d..93d10b49c48eb 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -31,6 +31,7 @@ sp-core = { version = "3.0.0", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "log/std", "sp-std/std", "sp-io/std", diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index a6bce677ddce5..b2ba9d6c89aa7 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -32,6 +32,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index ffbde3eccb86f..d070d67602177 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -29,6 +29,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index a48b22b74dff1..5a3fce0a78aaf 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -26,6 +26,7 @@ log = { version = "0.4.14", default-features = false } default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", "sp-core/std", diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 017cc2a91f19b..3af5696bae21a 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -33,6 +33,7 @@ default = ["std"] std = [ "pallet-balances/std", "codec/std", + "scale-info/std", "sp-std/std", "serde", "sp-runtime/std", diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 62e3fb8cee27d..f8997b59f1b67 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -55,4 +55,5 @@ std = [ "frame-election-provider-support/std", "sp-std/std", "codec/std", + "scale-info/std", ] diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 6d3a982c669e6..1cd58fe42e208 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -31,6 +31,7 @@ default = ["std"] std = [ "safe-mix/std", "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-system/std", diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 8ab7db76b6926..bd27028aef791 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -30,6 +30,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 6dc7df252023a..b2c6b1cc80d74 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -29,6 +29,7 @@ substrate-test-utils = { version = "3.0.0", path = "../../test-utils" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-benchmarking/std", "frame-support/std", diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 5dbed3f4e0116..e8e5aa1ca2e7c 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -29,6 +29,7 @@ sp-core = { version = "3.0.0", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-io/std", "sp-runtime/std", "sp-std/std", diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index c28980a6c547e..bc2fefae4a095 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -37,6 +37,7 @@ default = ["std", "historical"] historical = ["sp-trie"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-support/std", diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 0e4eaf8f47f7e..da1f8bbb11cae 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -31,6 +31,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "rand_chacha/std", "sp-std/std", diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index f99c6ebe256a5..a82a51817f15a 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -53,6 +53,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "frame-support/std", diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index acb5f82ccbde3..efb429196ad42 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -28,6 +28,7 @@ sp-core = { version = "3.0.0", path = "../../primitives/core" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 3f0f294d52c8b..e79b41b126774 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -48,6 +48,7 @@ std = [ "scale-info/std", "sp-io/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "sp-tracing/std", diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 6a12ad92ca86e..aa9359aeb2a12 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -33,6 +33,7 @@ default = ["std"] std = [ "serde/std", "codec/std", + "scale-info/std", "sp-io/std", "frame-support/std", "frame-system/std", diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index 095be2c265a37..c64103fa0d1c2 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -30,6 +30,7 @@ sp-io ={ version = "3.0.0", path = "../../../primitives/io" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", "frame-benchmarking/std", diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 8bb6bde606a64..881fafc8bfcf2 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -37,6 +37,7 @@ default = ["std"] std = [ "sp-inherents/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-benchmarking/std", diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index f003bc4d06746..7f8a61aea1655 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -35,6 +35,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 2a6e5671d4beb..79b6ddfc83b47 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -36,7 +36,6 @@ default = ["std"] std = [ "serde", "codec/std", - "scale-info/std", "sp-core/std", "sp-io/std", "sp-runtime/std", diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 94677d9388685..6415f825e329a 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -41,6 +41,7 @@ runtime-benchmarks = [ std = [ "serde", "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 490bd8462fba0..764142828dc59 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -35,6 +35,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index 8787ac67a5d7c..053d65bfe06ce 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -32,6 +32,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-core/std", "sp-runtime/std", diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 2a3904ccdb8d5..30795e7c2c149 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -32,6 +32,7 @@ pallet-balances = { version = "3.0.0", path = "../balances" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "frame-support/std", "frame-system/std", diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index d738cb1f100dc..49ca173c79e85 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -33,6 +33,7 @@ hex-literal = "0.3.1" default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "frame-support/std", diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index fde37a12b4c10..a7bc1baee5ab4 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -28,6 +28,7 @@ std = [ "full_crypto", "sp-core/std", "codec/std", + "scale-info/std", "serde", "sp-std/std", "sp-io/std", diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 55a426eb28599..f474466b1e273 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -34,6 +34,7 @@ primitive-types = "0.10.0" default = ["std"] std = [ "codec/std", + "scale-info/std", "num-traits/std", "sp-std/std", "serde", diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 2b730e80f1bca..49f0dc908889d 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -25,6 +25,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-api/std", "sp-runtime/std" diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index b1237ae2412d9..4caa1397bc8fe 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -30,6 +30,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "sp-std/std", "sp-api/std", "sp-runtime/std", diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index b4db91a0df7e3..3d0502b81e3b2 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -35,6 +35,7 @@ default = ["std"] std = [ "sp-application-crypto/std", "codec/std", + "scale-info/std", "merlin/std", "sp-std/std", "sp-api/std", diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index a86e597936a1d..d1fd3fd819593 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -22,6 +22,7 @@ sp-arithmetic = { version = "3.0.0", default-features = false, path = "../../ari default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-arithmetic/std", ] diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 37809f151f909..2e919d67a4312 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -86,6 +86,7 @@ std = [ "primitive-types/rustc-hex", "impl-serde", "codec/std", + "scale-info/std", "hash256-std-hasher/std", "hash-db/std", "sp-std/std", diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index ba78387d908d0..066b0629b869e 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -32,6 +32,7 @@ bench = [] mocks = [] std = [ "codec/std", + "scale-info/std", "serde", "sp-std/std", "sp-arithmetic/std", diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 213d836f82b89..47598ab9ef17a 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -47,6 +47,7 @@ std = [ "sp-application-crypto/std", "sp-arithmetic/std", "codec/std", + "scale-info/std", "log/std", "sp-core/std", "rand", diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index c2f08e1408052..6e60180ee804e 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -22,6 +22,7 @@ sp-std = { version = "3.0.0", default-features = false, path = "../std" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-runtime/std", "sp-std/std", ] diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index dcdd09b50b3cd..eea370b4f696b 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -31,6 +31,7 @@ std = [ "impl-serde", "serde", "codec/std", + "scale-info/std", "sp-std/std", "sp-runtime/std", "parity-wasm", diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index dfd764c01beb2..d050617953db9 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -69,6 +69,7 @@ std = [ "sp-consensus-babe/std", "sp-block-builder/std", "codec/std", + "scale-info/std", "sp-inherents/std", "sp-keyring", "log/std", From 6e2313c2d15b47cd776f3d41daf308052714827d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 11:42:53 +0100 Subject: [PATCH 322/503] Update frame/system/src/lib.rs Co-authored-by: Guillaume Thiolliere --- frame/system/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 27b006cfc06b9..1a72ece7284b8 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -297,7 +297,7 @@ pub mod pallet { #[pallet::call] impl Pallet { - /// A dispatch that will fill the block weight up to the given ratio.a + /// A dispatch that will fill the block weight up to the given ratio. // TODO: This should only be available for testing, rather than in general usage, but // that's not possible at present (since it's within the pallet macro). #[pallet::weight(*_ratio * T::BlockWeights::get().max_block)] From 8a2bbdbffaba974fb3195e18eb57f303bf946fe8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 11:48:08 +0100 Subject: [PATCH 323/503] Use `skip_type_params` to remove `TypeInfo` requirements on checks --- frame/system/src/extensions/check_genesis.rs | 2 +- frame/system/src/extensions/check_mortality.rs | 9 +++++---- frame/system/src/extensions/check_nonce.rs | 7 ++++--- frame/system/src/extensions/check_spec_version.rs | 11 ++++++----- frame/system/src/extensions/check_tx_version.rs | 9 +++++---- frame/system/src/extensions/check_weight.rs | 9 +++++---- 6 files changed, 26 insertions(+), 21 deletions(-) diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index 4a7997bb2447a..d5b3a49c9f326 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -40,7 +40,7 @@ impl sp_std::fmt::Debug for CheckGenesis { } } -impl CheckGenesis { +impl CheckGenesis { /// Creates new `SignedExtension` to check genesis hash. pub fn new() -> Self { Self(sp_std::marker::PhantomData) diff --git a/frame/system/src/extensions/check_mortality.rs b/frame/system/src/extensions/check_mortality.rs index f1261d18a2a28..d3d27f09606a4 100644 --- a/frame/system/src/extensions/check_mortality.rs +++ b/frame/system/src/extensions/check_mortality.rs @@ -28,16 +28,17 @@ use sp_runtime::{ /// Check for transaction mortality. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] -pub struct CheckMortality(Era, sp_std::marker::PhantomData); +#[scale_info(skip_type_params(T))] +pub struct CheckMortality(Era, sp_std::marker::PhantomData); -impl CheckMortality { +impl CheckMortality { /// utility constructor. Used only in client/factory code. pub fn from(era: Era) -> Self { Self(era, sp_std::marker::PhantomData) } } -impl sp_std::fmt::Debug for CheckMortality { +impl sp_std::fmt::Debug for CheckMortality { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckMortality({:?})", self.0) @@ -49,7 +50,7 @@ impl sp_std::fmt::Debug for CheckMortality SignedExtension for CheckMortality { +impl SignedExtension for CheckMortality { type AccountId = T::AccountId; type Call = T::Call; type AdditionalSigned = T::Hash; diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 776d93eac1649..9012ad58aeb4a 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -33,16 +33,17 @@ use sp_std::vec; /// Note that this does not set any priority by default. Make sure that AT LEAST one of the signed /// extension sets some kind of priority upon validating transactions. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] pub struct CheckNonce(#[codec(compact)] T::Index); -impl CheckNonce { +impl CheckNonce { /// utility constructor. Used only in client/factory code. pub fn from(nonce: T::Index) -> Self { Self(nonce) } } -impl sp_std::fmt::Debug for CheckNonce { +impl sp_std::fmt::Debug for CheckNonce { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckNonce({})", self.0) @@ -54,7 +55,7 @@ impl sp_std::fmt::Debug for CheckNonce { } } -impl SignedExtension for CheckNonce where +impl SignedExtension for CheckNonce where T::Call: Dispatchable { type AccountId = T::AccountId; diff --git a/frame/system/src/extensions/check_spec_version.rs b/frame/system/src/extensions/check_spec_version.rs index 4f94e0227bf1d..ad95b57147dc9 100644 --- a/frame/system/src/extensions/check_spec_version.rs +++ b/frame/system/src/extensions/check_spec_version.rs @@ -24,10 +24,11 @@ use sp_runtime::{ }; /// Ensure the runtime version registered in the transaction is the same as at present. -#[derive(Encode, Decode, Clone, Eq, PartialEq, scale_info::TypeInfo)] -pub struct CheckSpecVersion(sp_std::marker::PhantomData); +#[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] +#[scale_info(skip_type_params(T))] +pub struct CheckSpecVersion(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckSpecVersion { +impl sp_std::fmt::Debug for CheckSpecVersion { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckSpecVersion") @@ -39,14 +40,14 @@ impl sp_std::fmt::Debug for CheckSpecVersion } } -impl CheckSpecVersion { +impl CheckSpecVersion { /// Create new `SignedExtension` to check runtime version. pub fn new() -> Self { Self(sp_std::marker::PhantomData) } } -impl SignedExtension for CheckSpecVersion { +impl SignedExtension for CheckSpecVersion { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = u32; diff --git a/frame/system/src/extensions/check_tx_version.rs b/frame/system/src/extensions/check_tx_version.rs index f2468e7b0755a..13043cf6d33a7 100644 --- a/frame/system/src/extensions/check_tx_version.rs +++ b/frame/system/src/extensions/check_tx_version.rs @@ -25,9 +25,10 @@ use sp_runtime::{ /// Ensure the transaction version registered in the transaction is the same as at present. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] -pub struct CheckTxVersion(sp_std::marker::PhantomData); +#[scale_info(skip_type_params(T))] +pub struct CheckTxVersion(sp_std::marker::PhantomData); -impl sp_std::fmt::Debug for CheckTxVersion { +impl sp_std::fmt::Debug for CheckTxVersion { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckTxVersion") @@ -39,14 +40,14 @@ impl sp_std::fmt::Debug for CheckTxVersion CheckTxVersion { +impl CheckTxVersion { /// Create new `SignedExtension` to check transaction version. pub fn new() -> Self { Self(sp_std::marker::PhantomData) } } -impl SignedExtension for CheckTxVersion { +impl SignedExtension for CheckTxVersion { type AccountId = T::AccountId; type Call = ::Call; type AdditionalSigned = u32; diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index ec6beb4327141..3569eea51dbb7 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -33,9 +33,10 @@ use frame_support::{ /// Block resource (weight) limit check. #[derive(Encode, Decode, Clone, Eq, PartialEq, Default, TypeInfo)] -pub struct CheckWeight(sp_std::marker::PhantomData); +#[scale_info(skip_type_params(T))] +pub struct CheckWeight(sp_std::marker::PhantomData); -impl CheckWeight where +impl CheckWeight where T::Call: Dispatchable, { /// Checks if the current extrinsic does not exceed the maximum weight a single extrinsic @@ -185,7 +186,7 @@ pub fn calculate_consumed_weight( Ok(all_weight) } -impl SignedExtension for CheckWeight where +impl SignedExtension for CheckWeight where T::Call: Dispatchable { type AccountId = T::AccountId; @@ -264,7 +265,7 @@ impl SignedExtension for CheckWeight wher } } -impl sp_std::fmt::Debug for CheckWeight { +impl sp_std::fmt::Debug for CheckWeight { #[cfg(feature = "std")] fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { write!(f, "CheckWeight") From 5de104ed5743307c2ee56f901f0cb972c17905d8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 12:20:08 +0100 Subject: [PATCH 324/503] Revert "Remove unused Call metadata stuff" This reverts commit 41311f85 --- .../src/construct_runtime/expand/call.rs | 32 +++++++ .../procedural/src/pallet/expand/call.rs | 15 ++++ frame/support/src/dispatch.rs | 84 ++++++++++++++++++- frame/support/src/lib.rs | 2 +- frame/support/src/traits.rs | 2 +- frame/support/src/traits/metadata.rs | 27 ++++++ frame/support/test/tests/construct_runtime.rs | 33 ++++++++ frame/support/test/tests/pallet.rs | 7 +- frame/support/test/tests/pallet_instance.rs | 12 ++- 9 files changed, 208 insertions(+), 6 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/call.rs b/frame/support/procedural/src/construct_runtime/expand/call.rs index eb335e11d5b76..b6d886abff332 100644 --- a/frame/support/procedural/src/construct_runtime/expand/call.rs +++ b/frame/support/procedural/src/construct_runtime/expand/call.rs @@ -67,6 +67,38 @@ pub fn expand_outer_dispatch( } } } + impl #scrate::dispatch::GetCallMetadata for Call { + fn get_call_metadata(&self) -> #scrate::dispatch::CallMetadata { + use #scrate::dispatch::GetCallName; + match self { + #( + #variant_patterns => { + let function_name = call.get_call_name(); + let pallet_name = stringify!(#pallet_names); + #scrate::dispatch::CallMetadata { function_name, pallet_name } + } + )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[#( + stringify!(#pallet_names), + )*] + } + + fn get_call_names(module: &str) -> &'static [&'static str] { + use #scrate::dispatch::{Callable, GetCallName}; + match module { + #( + stringify!(#pallet_names) => + <<#pallet_names as Callable<#runtime>>::Call + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } impl #scrate::dispatch::Dispatchable for Call { type Origin = Origin; type Config = Call; diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 5f98ac5cdcd3b..e6da9fbc8aa70 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -179,6 +179,21 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { } } + impl<#type_impl_gen> #frame_support::dispatch::GetCallName for #call_ident<#type_use_gen> + #where_clause + { + fn get_call_name(&self) -> &'static str { + match *self { + #( Self::#fn_name(..) => stringify!(#fn_name), )* + Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ #( stringify!(#fn_name), )* ] + } + } + impl<#type_impl_gen> #frame_support::traits::UnfilteredDispatchable for #call_ident<#type_use_gen> #where_clause diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index fc52db6df07c1..c7c1b12db6d64 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -25,7 +25,9 @@ pub use crate::weights::{ PaysFee, PostDispatchInfo, WithPostDispatchInfo, }; pub use sp_runtime::{traits::Dispatchable, DispatchError}; -pub use crate::traits::{UnfilteredDispatchable, GetPalletVersion}; +pub use crate::traits::{ + CallMetadata, GetCallMetadata, GetCallName, UnfilteredDispatchable, GetPalletVersion, +}; /// The return typ of a `Dispatchable` in frame. When returned explicitly from /// a dispatchable function it allows overriding the default `PostDispatchInfo` @@ -1987,6 +1989,32 @@ macro_rules! decl_module { } } + // Implement GetCallName for the Call. + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::dispatch::GetCallName + for $call_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* + { + fn get_call_name(&self) -> &'static str { + match *self { + $( + $call_type::$fn_name( $( ref $param_name ),* ) => { + // Don't generate any warnings for unused variables + let _ = ( $( $param_name ),* ); + stringify!($fn_name) + }, + )* + $call_type::__PhantomItem(_, _) => unreachable!("__PhantomItem should never be used."), + } + } + + fn get_call_names() -> &'static [&'static str] { + &[ + $( + stringify!($fn_name), + )* + ] + } + } + // Bring `GetPalletVersion` into scope to make it easily usable. pub use $crate::traits::GetPalletVersion as _; // Implement `GetPalletVersion` for `Module` @@ -2162,7 +2190,36 @@ macro_rules! impl_outer_dispatch { } } } + impl $crate::dispatch::GetCallMetadata for $call_type { + fn get_call_metadata(&self) -> $crate::dispatch::CallMetadata { + use $crate::dispatch::GetCallName; + match self { + $( $call_type::$camelcase(call) => { + let function_name = call.get_call_name(); + let pallet_name = stringify!($camelcase); + $crate::dispatch::CallMetadata { function_name, pallet_name } + }, )* + } + } + + fn get_module_names() -> &'static [&'static str] { + &[$( + stringify!($camelcase), + )*] + } + fn get_call_names(module: &str) -> &'static [&'static str] { + use $crate::dispatch::{Callable, GetCallName}; + match module { + $( + stringify!($camelcase) => + <<$camelcase as Callable<$runtime>>::Call + as GetCallName>::get_call_names(), + )* + _ => unreachable!(), + } + } + } impl $crate::dispatch::Dispatchable for $call_type { type Origin = $origin; type Config = $call_type; @@ -2588,7 +2645,10 @@ macro_rules! __check_reserved_fn_name { mod tests { use super::*; use crate::weights::{DispatchInfo, DispatchClass, Pays, RuntimeDbWeight}; - use crate::traits::{OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, IntegrityTest, Get, PalletInfo}; + use crate::traits::{ + CallMetadata, GetCallMetadata, GetCallName, OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, + IntegrityTest, Get, PalletInfo, + }; use crate::metadata::*; pub trait Config: system::Config + Sized where Self::AccountId: From { } @@ -2870,6 +2930,26 @@ mod tests { ); } + #[test] + fn call_name() { + let name = Call::::aux_3().get_call_name(); + assert_eq!("aux_3", name); + } + + #[test] + fn call_metadata() { + let call = OuterCall::Test(Call::::aux_3()); + let metadata = call.get_call_metadata(); + let expected = CallMetadata { function_name: "aux_3".into(), pallet_name: "Test".into() }; + assert_eq!(metadata, expected); + } + + #[test] + fn get_call_names() { + let call_names = Call::::get_call_names(); + assert_eq!(["aux_0", "aux_1", "aux_2", "aux_3", "aux_4", "aux_5", "operational"], call_names); + } + #[test] fn get_module_names() { let module_names = OuterCall::get_module_names(); diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 1c8c39c22d135..ab1e39620bdd8 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1513,7 +1513,7 @@ pub mod pallet_prelude { /// /// The macro create an enum `Call` with one variant per dispatchable. This enum implements: /// `Clone`, `Eq`, `PartialEq`, `Debug` (with stripped implementation in `not("std")`), `Encode`, -/// `Decode`, `GetDispatchInfo`, `UnfilteredDispatchable`. +/// `Decode`, `GetDispatchInfo`, `GetCallName`, `UnfilteredDispatchable`. /// /// The macro implement on `Pallet`, the `Callable` trait and a function `call_functions` which /// returns the dispatchable metadatas. diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index 84811cd97b6f1..e8ce07528c8a0 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -61,7 +61,7 @@ pub use randomness::Randomness; mod metadata; pub use metadata::{ - PalletInfo, PalletVersion, GetPalletVersion, + CallMetadata, GetCallMetadata, GetCallName, PalletInfo, PalletVersion, GetPalletVersion, PALLET_VERSION_STORAGE_KEY_POSTFIX, PalletInfoAccess, }; diff --git a/frame/support/src/traits/metadata.rs b/frame/support/src/traits/metadata.rs index a95690d8ac02d..b13a0464b30c0 100644 --- a/frame/support/src/traits/metadata.rs +++ b/frame/support/src/traits/metadata.rs @@ -41,6 +41,33 @@ pub trait PalletInfoAccess { fn name() -> &'static str; } +/// The function and pallet name of the Call. +#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug)] +pub struct CallMetadata { + /// Name of the function. + pub function_name: &'static str, + /// Name of the pallet to which the function belongs. + pub pallet_name: &'static str, +} + +/// Gets the function name of the Call. +pub trait GetCallName { + /// Return all function names. + fn get_call_names() -> &'static [&'static str]; + /// Return the function name of the Call. + fn get_call_name(&self) -> &'static str; +} + +/// Gets the metadata for the Call - function name and pallet name. +pub trait GetCallMetadata { + /// Return all module names. + fn get_module_names() -> &'static [&'static str]; + /// Return all function names for the given `module`. + fn get_call_names(module: &str) -> &'static [&'static str]; + /// Return a [`CallMetadata`], containing function and pallet name of the Call. + fn get_call_metadata(&self) -> CallMetadata; +} + /// The storage key postfix that is used to store the [`PalletVersion`] per pallet. /// /// The full storage key is built by using: diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 0f3a683530bef..0222d09639074 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -530,6 +530,39 @@ fn call_weight_should_attach_to_call_enum() { ); } +#[test] +fn call_name() { + use frame_support::dispatch::GetCallName; + let name = module3::Call::::aux_4().get_call_name(); + assert_eq!("aux_4", name); +} + +#[test] +fn call_metadata() { + use frame_support::dispatch::{CallMetadata, GetCallMetadata}; + let call = Call::Module3(module3::Call::::aux_4()); + let metadata = call.get_call_metadata(); + let expected = CallMetadata { function_name: "aux_4".into(), pallet_name: "Module3".into() }; + assert_eq!(metadata, expected); +} + +#[test] +fn get_call_names() { + use frame_support::dispatch::GetCallName; + let call_names = module3::Call::::get_call_names(); + assert_eq!(["fail", "aux_1", "aux_2", "aux_3", "aux_4", "operational"], call_names); +} + +#[test] +fn get_module_names() { + use frame_support::dispatch::GetCallMetadata; + let module_names = Call::get_module_names(); + assert_eq!([ + "System", "Module1_1", "Module2", "Module1_2", "NestedModule3", "Module3", + "Module1_4", "Module1_6", "Module1_7", "Module1_8", "Module1_9", + ], module_names); +} + #[test] fn call_subtype_conversion() { use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index dd2be49b5b7f9..14ceee4c70343 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -17,7 +17,7 @@ use frame_support::{ weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, - traits::{OnInitialize, OnFinalize, OnRuntimeUpgrade, GetPalletVersion, OnGenesis}, + traits::{GetCallName, OnInitialize, OnFinalize, OnRuntimeUpgrade, GetPalletVersion, OnGenesis}, dispatch::{UnfilteredDispatchable, Parameter}, storage::unhashed, scale_info, @@ -529,6 +529,11 @@ fn call_expand() { pays_fee: Pays::Yes, } ); + assert_eq!(call_foo.get_call_name(), "foo"); + assert_eq!( + pallet::Call::::get_call_names(), + &["foo", "foo_transactional", "foo_no_post_info"], + ); } #[test] diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 33533fe0f23bb..2c4e3ac9f0b36 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -18,7 +18,7 @@ use frame_support::{ weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, traits::{ - GetPalletVersion, OnInitialize, OnFinalize, OnRuntimeUpgrade, OnGenesis, + GetCallName, GetPalletVersion, OnInitialize, OnFinalize, OnRuntimeUpgrade, OnGenesis, }, dispatch::UnfilteredDispatchable, storage::unhashed, @@ -326,6 +326,11 @@ fn call_expand() { pays_fee: Pays::Yes, } ); + assert_eq!(call_foo.get_call_name(), "foo"); + assert_eq!( + pallet::Call::::get_call_names(), + &["foo", "foo_transactional"], + ); let call_foo = pallet::Call::::foo(3); assert_eq!( @@ -336,6 +341,11 @@ fn call_expand() { pays_fee: Pays::Yes, } ); + assert_eq!(call_foo.get_call_name(), "foo"); + assert_eq!( + pallet::Call::::get_call_names(), + &["foo", "foo_transactional"], + ); } #[test] From b825c085d087907b40e80f36abdc555bde306525 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 12:54:23 +0100 Subject: [PATCH 325/503] Skip BalanceSwapAction type parameter --- frame/atomic-swap/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index ed5013fe3baa9..8189e77f7b0ed 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -90,6 +90,7 @@ pub trait SwapAction { /// A swap action that only allows transferring balances. #[derive(Clone, RuntimeDebug, Eq, PartialEq, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(C))] pub struct BalanceSwapAction> { value: >::Balance, _marker: PhantomData, From 72c8e8e2550155a13047d76ca6ab4ddf81f53107 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 12:54:57 +0100 Subject: [PATCH 326/503] Remove unused event metadata macro --- frame/support/src/event.rs | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 007d69a27e7a9..35b77800b99e4 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -289,37 +289,6 @@ macro_rules! __decl_generic_event { } } -#[macro_export] -#[doc(hidden)] -macro_rules! __events_to_metadata { - ( - $( $metadata:expr ),*; - $( #[doc = $doc_attr:tt] )* - $event:ident $( ( $( $param:path ),* $(,)? ) )*, - $( $rest:tt )* - ) => { - $crate::__events_to_metadata!( - $( $metadata, )* - $crate::metadata::EventMetadata { - name: stringify!($event), - arguments: $crate::sp_std::vec![ - $( $( $crate::metadata::TypeSpec::new::<$param>(stringify!($param)) ),* )* - ], - #[cfg(feature = "metadata-docs")] - documentation: $crate::sp_std::vec![ $( $doc_attr ),* ], - #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::sp_std::vec![], - }; - $( $rest )* - ) - }; - ( - $( $metadata:expr ),*; - ) => { - $crate::sp_std::vec![ $( $metadata ),* ] - } -} - /// Constructs an Event type for a runtime. This is usually called automatically by the /// construct_runtime macro. #[macro_export] From 8c7abdcee6b9abf93447f7abc4e1aa58558b1203 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 14:58:03 +0100 Subject: [PATCH 327/503] Update frame-metadata --- Cargo.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index 9e74a88f0687d..7ef116f8a770a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1839,7 +1839,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#32584873b4fc87044869bd55aa6908feb1d0084d" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#7bd13ff5059718cd0a6dbcac4dd22e1843aa5fff" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", From 6a199e51ce1a2be904e695be74c1994bd58b8694 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 16:03:54 +0100 Subject: [PATCH 328/503] Update primitives/npos-elections/compact/src/codec.rs Co-authored-by: Guillaume Thiolliere --- primitives/npos-elections/compact/src/codec.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/primitives/npos-elections/compact/src/codec.rs b/primitives/npos-elections/compact/src/codec.rs index 9e6ea6b365521..5416587f2044f 100644 --- a/primitives/npos-elections/compact/src/codec.rs +++ b/primitives/npos-elections/compact/src/codec.rs @@ -260,13 +260,13 @@ fn scale_info_impl( fn type_info() -> _npos::scale_info::Type<_npos::scale_info::form::MetaForm> { _npos::scale_info::Type::builder() - .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) - .composite( - _npos::scale_info::build::Fields::named() - #scale_info_impl_single - #scale_info_impl_double - #scale_info_impl_rest - ) + .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) + .composite( + _npos::scale_info::build::Fields::named() + #scale_info_impl_single + #scale_info_impl_double + #scale_info_impl_rest + ) } } ) From 876d77b3d943783dc6aa61304a80d3f08023eb15 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 16:38:19 +0100 Subject: [PATCH 329/503] Manual TypeInfo for Header --- primitives/runtime/src/generic/header.rs | 27 +++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index 18ae3d45efb5c..021ae85b038a9 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -20,6 +20,7 @@ #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; use crate::codec::{Decode, Encode, Codec, Input, Output, HasCompact, EncodeAsRef, Error}; +use crate::scale_info::TypeInfo; use crate::traits::{ self, Member, AtLeast32BitUnsigned, SimpleBitOps, Hash as HashT, MaybeSerializeDeserialize, MaybeSerialize, MaybeDisplay, @@ -115,18 +116,34 @@ impl Encode for Header where } } -impl scale_info::TypeInfo for Header where - Number: HasCompact + Copy + Into + TryFrom + 'static, +impl TypeInfo for Header where + Number: HasCompact + Copy + Into + TryFrom + TypeInfo + 'static, Hash: HashT, - Hash::Output: scale_info::TypeInfo, + Hash::Output: TypeInfo, { type Identity = Self; fn type_info() -> scale_info::Type { - // todo [AJ] provide accurate custom TypeInfo impl scale_info::Type::builder() .path(scale_info::Path::new("Header", module_path!())) - .composite(scale_info::build::Fields::unit()) + .docs(&["Abstraction over a block header for a substrate chain."]) + .composite(scale_info::build::Fields::named() + .field(|f| f + .name("parent_hash").ty::().type_name("Hash::Output") + ) + .field(|f| f + .name("number").compact::().type_name("Number") + ) + .field(|f| f + .name("state_root").ty::().type_name("Hash::Output") + ) + .field(|f| f + .name("extrinsics_root").ty::().type_name("Hash::Output") + ) + .field(|f| f + .name("digest").ty::>().type_name("Digest") + ) + ) } } From b7a2a782b757038b25fe26d0bb56a3f88cf60c30 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 16:50:45 +0100 Subject: [PATCH 330/503] Remove TypeInfo requirement for consts in BoundedVec etc. --- frame/balances/src/lib.rs | 4 ++-- frame/collective/src/lib.rs | 2 +- frame/proxy/src/lib.rs | 4 ++-- frame/support/src/lib.rs | 13 ------------- frame/support/src/storage/bounded_vec.rs | 1 + frame/support/src/storage/weak_bounded_vec.rs | 1 + frame/treasury/src/lib.rs | 2 +- frame/uniques/src/lib.rs | 7 +++---- frame/uniques/src/types.rs | 2 ++ 9 files changed, 13 insertions(+), 23 deletions(-) diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 6ab41df662640..e293f2c5d3047 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -215,10 +215,10 @@ pub mod pallet { /// The maximum number of locks that should exist on an account. /// Not strictly enforced, but used for weight estimation. - type MaxLocks: Get + TypeInfo; + type MaxLocks: Get; /// The maximum number of named reserves that can exist on an account. - type MaxReserves: Get + TypeInfo; + type MaxReserves: Get; /// The id type for named reserves. type ReserveIdentifier: Parameter + Member + MaxEncodedLen + Ord + Copy; diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index f51bd3661c607..ace4a3dae111a 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -138,7 +138,7 @@ pub trait Config: frame_system::Config { type MotionDuration: Get; /// Maximum number of proposals allowed to be active in parallel. - type MaxProposals: Get + TypeInfo; + type MaxProposals: Get; /// The maximum number of members supported by the pallet. Used for weight estimation. /// diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 6c252e5a53f67..10236f5ec5902 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -136,14 +136,14 @@ pub mod pallet { /// The maximum amount of proxies allowed for a single account. #[pallet::constant] - type MaxProxies: Get + TypeInfo; + type MaxProxies: Get; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; /// The maximum amount of time-delayed announcements that are allowed to be pending. #[pallet::constant] - type MaxPending: Get + TypeInfo; + type MaxPending: Get; /// The type of hash used for hashing the call. type CallHasher: Hash; diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index ab1e39620bdd8..edea1a3243eee 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -364,19 +364,6 @@ macro_rules! parameter_types { I::from($value) } } - - impl $crate::scale_info::TypeInfo for $name { - type Identity = Self; - - fn type_info() -> $crate::scale_info::Type<$crate::scale_info::form::MetaForm> { - $crate::scale_info::Type::builder() - .path($crate::scale_info::Path::new(stringify!($name), module_path!())) - .composite( - scale_info::build::Fields::unnamed() - .field(|f| f.ty::<$type>().type_name(stringify!($type))) - ) - } - } }; (IMPL $name:ident, $type:ty, $value:expr) => { impl $name { diff --git a/frame/support/src/storage/bounded_vec.rs b/frame/support/src/storage/bounded_vec.rs index 00e09d6662795..bdf1b020cebf2 100644 --- a/frame/support/src/storage/bounded_vec.rs +++ b/frame/support/src/storage/bounded_vec.rs @@ -38,6 +38,7 @@ use crate::{ /// As the name suggests, the length of the queue is always bounded. All internal operations ensure /// this bound is respected. #[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct BoundedVec(Vec, PhantomData); /// A bounded slice. diff --git a/frame/support/src/storage/weak_bounded_vec.rs b/frame/support/src/storage/weak_bounded_vec.rs index feac9ecb8a6a4..ed30781ad78bf 100644 --- a/frame/support/src/storage/weak_bounded_vec.rs +++ b/frame/support/src/storage/weak_bounded_vec.rs @@ -38,6 +38,7 @@ use crate::{ /// The length of the vec is not strictly bounded. Decoding a vec with more element that the bound /// is accepted, and some method allow to bypass the restriction with warnings. #[derive(Encode, scale_info::TypeInfo)] +#[scale_info(skip_type_params(S))] pub struct WeakBoundedVec(Vec, PhantomData); impl> Decode for WeakBoundedVec { diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index 952b64b10703b..500f8de687dc3 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -134,7 +134,7 @@ pub trait Config: frame_system::Config { type SpendFunds: SpendFunds; /// The maximum number of approvals that can wait in the spending queue. - type MaxApprovals: Get + TypeInfo; + type MaxApprovals: Get; } /// A trait to allow the Treasury Pallet to spend it's funds for other purposes. diff --git a/frame/uniques/src/lib.rs b/frame/uniques/src/lib.rs index c021584216c71..fce7ba29c2a24 100644 --- a/frame/uniques/src/lib.rs +++ b/frame/uniques/src/lib.rs @@ -40,7 +40,6 @@ mod functions; mod impl_nonfungibles; pub use types::*; -use scale_info::TypeInfo; use sp_std::prelude::*; use sp_runtime::{RuntimeDebug, ArithmeticError, traits::{Zero, StaticLookup, Saturating}}; use codec::{Encode, Decode, HasCompact}; @@ -96,13 +95,13 @@ pub mod pallet { type DepositPerByte: Get>; /// The maximum length of data stored on-chain. - type StringLimit: Get + TypeInfo; + type StringLimit: Get; /// The maximum length of an attribute key. - type KeyLimit: Get + TypeInfo; + type KeyLimit: Get; /// The maximum length of an attribute value. - type ValueLimit: Get + TypeInfo; + type ValueLimit: Get; /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; diff --git a/frame/uniques/src/types.rs b/frame/uniques/src/types.rs index 3f4169b2c3af1..55f206f117d0e 100644 --- a/frame/uniques/src/types.rs +++ b/frame/uniques/src/types.rs @@ -96,6 +96,7 @@ pub struct InstanceDetails { } #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] +#[scale_info(skip_type_params(StringLimit))] pub struct ClassMetadata> { /// The balance deposited for this metadata. /// @@ -110,6 +111,7 @@ pub struct ClassMetadata> { } #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, Default, TypeInfo)] +#[scale_info(skip_type_params(StringLimit))] pub struct InstanceMetadata> { /// The balance deposited for this metadata. /// From b63835a5234c168313f78e35e7de2a9ea86ccae8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 16:52:44 +0100 Subject: [PATCH 331/503] Another TypeInfo bound removed --- frame/assets/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 89ec77a0eb8e2..8b8384ff8a5f5 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -202,7 +202,7 @@ pub mod pallet { type ApprovalDeposit: Get>; /// The maximum length of a name or symbol stored on-chain. - type StringLimit: Get + scale_info::TypeInfo; + type StringLimit: Get; /// A hook to allow a per-asset, per-account minimum balance to be enforced. This must be /// respected in all permissionless operations. From e9e60c08d77aa1cc382c9c13670a5b4e6debc241 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 7 Jul 2021 17:15:56 +0100 Subject: [PATCH 332/503] review: fix indentation --- frame/support/procedural/tools/src/docs.rs | 28 +++++++++++----------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/frame/support/procedural/tools/src/docs.rs b/frame/support/procedural/tools/src/docs.rs index 85715a66dbd73..4f865135262ab 100644 --- a/frame/support/procedural/tools/src/docs.rs +++ b/frame/support/procedural/tools/src/docs.rs @@ -18,23 +18,23 @@ #[cfg(feature = "metadata-docs")] /// Return all doc attributes literals found. pub fn get_doc_literals(attrs: &Vec) -> Vec { - attrs.iter() - .filter_map(|attr| { - if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - Some(meta.lit) - } else { - None - } - } else { - None - } - }) - .collect() + attrs.iter() + .filter_map(|attr| { + if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + Some(meta.lit) + } else { + None + } + } else { + None + } + }) + .collect() } #[cfg(not(feature = "metadata-docs"))] /// No-op, returns empty Vec unless the "metadata-docs" feature is enabled. pub fn get_doc_literals(_attrs: &Vec) -> Vec { - Vec::new() + Vec::new() } \ No newline at end of file From 68f465917546c52df0b5dab077724fc62509f11d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 8 Jul 2021 10:40:21 +0100 Subject: [PATCH 333/503] TypeInfo impls for Identity types --- frame/identity/src/types.rs | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 59781aadbd31e..9b5b9c262c9eb 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -16,6 +16,7 @@ // limitations under the License. use codec::{Encode, Decode, MaxEncodedLen}; +use scale_info::TypeInfo; use enumflags2::BitFlags; use frame_support::{ traits::{ConstU32, Get}, @@ -33,7 +34,7 @@ use super::*; /// than 32-bytes then it will be truncated when encoding. /// /// Can also be `None`. -#[derive(Clone, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Clone, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] // todo: [AJ] custom TypeInfo pub enum Data { /// No data here. None, @@ -106,7 +107,7 @@ pub type RegistrarIndex = u32; /// /// NOTE: Registrars may pay little attention to some fields. Registrars may want to make clear /// which fields their attestation is relevant for by off-chain means. -#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub enum Judgement< Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq > { @@ -157,7 +158,7 @@ impl< /// The fields that we use to identify the owner of an account with. Each corresponds to a field /// in the `IdentityInfo` struct. #[repr(u64)] -#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug)] +#[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug, TypeInfo)] pub enum IdentityField { Display = 0b0000000000000000000000000000000000000000000000000000000000000001, Legal = 0b0000000000000000000000000000000000000000000000000000000000000010, @@ -197,14 +198,32 @@ impl Decode for IdentityFields { Ok(Self(>::from_bits(field as u64).map_err(|_| "invalid value")?)) } } +impl TypeInfo for IdentityFields { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("IdentityFields", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::() + .type_name("BitFlags") + ) + ) + } +} /// Information concerning the identity of the controller of an account. /// /// NOTE: This should be stored at the end of the storage item to facilitate the addition of extra /// fields in a backwards compatible way through a specialized `Decode` impl. -#[derive(CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound)] +#[derive( + CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, + TypeInfo, +)] #[codec(mel_bound(FieldLimit: Get))] #[cfg_attr(test, derive(frame_support::DefaultNoBound))] +#[scale_info(skip_type_params(FieldLimit))] pub struct IdentityInfo> { /// Additional fields of the identity that are not catered for with the struct's explicit /// fields. @@ -255,12 +274,13 @@ pub struct IdentityInfo> { /// /// NOTE: This is stored separately primarily to facilitate the addition of extra fields in a /// backwards compatible way through a specialized `Decode` impl. -#[derive(CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound)] +#[derive(CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo)] #[codec(mel_bound( Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq + Zero + Add, MaxJudgements: Get, MaxAdditionalFields: Get, ))] +#[scale_info(skip_type_params(MaxJudgements, MaxAdditionalFields))] pub struct Registration< Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq, MaxJudgements: Get, @@ -301,7 +321,7 @@ impl< } /// Information concerning a registrar. -#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] +#[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct RegistrarInfo< Balance: Encode + Decode + Clone + Debug + Eq + PartialEq, AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq From 24adee55267c5db34d7c4b4b2c48e4f0c34c6b8e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 8 Jul 2021 10:49:33 +0100 Subject: [PATCH 334/503] Add some todos to add custom TypeInfo impls --- frame/democracy/src/vote.rs | 2 +- primitives/runtime/src/generic/era.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index 0b607376930ad..a1e2a647a7fd6 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -24,7 +24,7 @@ use sp_runtime::{RuntimeDebug, traits::{Saturating, Zero}}; use crate::{Conviction, ReferendumIndex, Delegations}; /// A number of lock periods, plus a vote, one way or the other. -#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, TypeInfo)] +#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, TypeInfo)] // todo: [AJ] custom TypeInfo pub struct Vote { pub aye: bool, pub conviction: Conviction, diff --git a/primitives/runtime/src/generic/era.rs b/primitives/runtime/src/generic/era.rs index adf3661bf2649..db8118cec0898 100644 --- a/primitives/runtime/src/generic/era.rs +++ b/primitives/runtime/src/generic/era.rs @@ -29,7 +29,7 @@ pub type Period = u64; pub type Phase = u64; /// An era to describe the longevity of a transaction. -#[derive(PartialEq, Eq, Clone, Copy, scale_info::TypeInfo, sp_core::RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, Copy, sp_core::RuntimeDebug, scale_info::TypeInfo)] // todo: [AJ] custom TypeInfo #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum Era { /// The transaction is valid forever. The genesis hash must be present in the signed content. From ea6ce170e81393f88a835722b9ede8925f0651f4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 8 Jul 2021 15:32:48 +0100 Subject: [PATCH 335/503] Update frame/support/procedural/src/pallet/expand/pallet_struct.rs Co-authored-by: Guillaume Thiolliere --- frame/support/procedural/src/pallet/expand/pallet_struct.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index a0eb6130ff4ea..bb0b2ba2cb6cc 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -21,6 +21,7 @@ use frame_support_procedural_tools::get_doc_literals; /// * Add derive trait on Pallet /// * Implement GetPalletVersion on Pallet /// * Implement OnGenesis on Pallet +/// * Implement `fn error_metadata` on Pallet /// * declare Module type alias for construct_runtime /// * replace the first field type of `struct Pallet` with `PhantomData` if it is `_` /// * implementation of `PalletInfoAccess` information From f56a2c19bc2d424fd816008be842c519b3cb75d4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 8 Jul 2021 16:39:07 +0100 Subject: [PATCH 336/503] Add some todos to add custom TypeInfo impls --- frame/identity/src/types.rs | 61 +++++++++++++++++++++++++++++++++---- 1 file changed, 55 insertions(+), 6 deletions(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 9b5b9c262c9eb..72d310d6e0121 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -16,7 +16,7 @@ // limitations under the License. use codec::{Encode, Decode, MaxEncodedLen}; -use scale_info::TypeInfo; +use scale_info::{build::{Fields, Variants}, Path, Type, TypeInfo}; use enumflags2::BitFlags; use frame_support::{ traits::{ConstU32, Get}, @@ -34,7 +34,7 @@ use super::*; /// than 32-bytes then it will be truncated when encoding. /// /// Can also be `None`. -#[derive(Clone, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] // todo: [AJ] custom TypeInfo +#[derive(Clone, Eq, PartialEq, RuntimeDebug, MaxEncodedLen)] pub enum Data { /// No data here. None, @@ -94,6 +94,55 @@ impl Encode for Data { } impl codec::EncodeLike for Data {} +/// Add a Raw variant with the given index and a fixed sized byte array +macro_rules! data_raw_variants { + ($variants:ident, $(($index:literal, $size:literal)),* ) => { + $variants + $( + .variant(stringify!(Raw$size), |v| v + .index($index) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; $size]>())) + ) + )* + } +} + +impl TypeInfo for Data { + type Identity = Self; + + fn type_info() -> Type { + let variants = Variants::new() + .variant("None", |v| v.index(0)); + + // create a variant for all sizes of Raw data from 0-32 + let variants = data_raw_variants!(variants, + (1, 0), (2, 1), (3, 2), (4, 3), (5, 4), (6, 5), (7, 6), (8, 7), + (9, 8), (10, 9), (11, 10), (12, 11), (13, 12), (14, 13), (15, 14), (16, 15), + (17, 16), (18, 17), (19, 18), (20, 19), (21, 20), (22, 21), (23, 22), (24, 23), + (25, 24), (26, 25), (27, 26), (28, 27), (29, 28), (30, 29), (31, 30), (32, 31), (33, 32) + ); + + let variants = + variants + .variant("BlakeTwo256", |v| v + .index(34) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) + .variant("Sha256", |v| v + .index(35) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) + .variant("Keccak256", |v| v + .index(36) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) + .variant("ShaThree256", |v| v + .index(37) + .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))); + + Type::builder() + .path(Path::new("Data", module_path!())) + .variant(variants) + } +} + impl Default for Data { fn default() -> Self { Self::None @@ -201,11 +250,11 @@ impl Decode for IdentityFields { impl TypeInfo for IdentityFields { type Identity = Self; - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("IdentityFields", module_path!())) + fn type_info() -> Type { + Type::builder() + .path(Path::new("IdentityFields", module_path!())) .composite( - scale_info::build::Fields::unnamed() + Fields::unnamed() .field(|f| f.ty::() .type_name("BitFlags") ) From 6fbce22b1063a64af134c51b82299d99559686b4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 10:09:49 +0100 Subject: [PATCH 337/503] Add a test for manual Data TypeInfo impl --- Cargo.lock | 4 +- frame/identity/Cargo.toml | 2 +- frame/identity/src/types.rs | 75 ++++++++++++++++++++++++++++++++++++- 3 files changed, 76 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 14122a3618b36..54aa1a3700358 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8301,9 +8301,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8522f54f50117bc4281f42b611bb71b9cc4b9c2f40cf2b725fd518e1c0d0f160" +checksum = "0af18b6ba34ebe0429199eb77e64f04c98508b8afb420aaac8fc187bb1c8ff02" dependencies = [ "bitvec", "cfg-if 1.0.0", diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index d38b7c7531885..def71efaa2c95 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.2", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "3.0.0", default-features = false, path = "../../primitives/std" } sp-io = { version = "3.0.0", default-features = false, path = "../../primitives/io" } diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 72d310d6e0121..49b29423ec170 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -99,7 +99,7 @@ macro_rules! data_raw_variants { ($variants:ident, $(($index:literal, $size:literal)),* ) => { $variants $( - .variant(stringify!(Raw$size), |v| v + .variant(concat!(stringify!(Raw), stringify!($size)), |v| v .index($index) .fields(Fields::unnamed().field(|f| f.ty::<[u8; $size]>())) ) @@ -119,7 +119,8 @@ impl TypeInfo for Data { (1, 0), (2, 1), (3, 2), (4, 3), (5, 4), (6, 5), (7, 6), (8, 7), (9, 8), (10, 9), (11, 10), (12, 11), (13, 12), (14, 13), (15, 14), (16, 15), (17, 16), (18, 17), (19, 18), (20, 19), (21, 20), (22, 21), (23, 22), (24, 23), - (25, 24), (26, 25), (27, 26), (28, 27), (29, 28), (30, 29), (31, 30), (32, 31), (33, 32) + (25, 24), (26, 25), (27, 26), (28, 27), (29, 28), (30, 29), (31, 30), (32, 31), + (33, 32) ); let variants = @@ -385,3 +386,73 @@ pub struct RegistrarInfo< /// these fields. pub fields: IdentityFields, } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn manual_data_type_info() { + let mut registry = scale_info::Registry::new(); + let type_id = registry.register_type(&scale_info::meta_type::()); + let registry: scale_info::PortableRegistry = registry.into(); + let type_info = registry.resolve(type_id.id()).unwrap(); + + let check_type_info = |data: &Data| { + let variant_name = + match data { + Data::None => "None".to_string(), + Data::BlakeTwo256(_) => "BlakeTwo256".to_string(), + Data::Sha256(_) => "Sha256".to_string(), + Data::Keccak256(_) => "Keccak256".to_string(), + Data::ShaThree256(_) => "ShaThree256".to_string(), + Data::Raw(bytes) => format!("Raw{}", bytes.len()), + }; + if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { + let variant = variant + .variants() + .iter() + .find(|v| v.name() == &variant_name) + .expect(&format!("Expected to find variant {}", variant_name)); + + let index = variant.index().expect("index for all variants should be set"); + + let field_arr_len = variant + .fields() + .first() + .and_then(|f| registry.resolve(f.ty().id())) + .map(|ty| + if let scale_info::TypeDef::Array(arr) = ty.type_def() { + arr.len() + } else { + panic!("Should be an array type") + }) + .unwrap_or(0); + + let encoded = data.encode(); + assert_eq!(encoded[0], index); + assert_eq!(encoded.len() as u32 - 1, field_arr_len); + } else { + panic!("Should be a variant type") + }; + }; + + let mut data = vec! [ + Data::None, + Data::BlakeTwo256(Default::default()), + Data::Sha256(Default::default()), + Data::Keccak256(Default::default()), + Data::ShaThree256(Default::default()), + ]; + + // A Raw instance for all possible sizes of the Raw data + for n in 0..32 { + data.push(Data::Raw(vec![0u8; n as usize].try_into().unwrap())) + } + + for d in data.iter() { + check_type_info(d); + } + } +} + From 39073c17df361324fa75cace23b7d4229741f0b1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 10:40:19 +0100 Subject: [PATCH 338/503] Add custom TypeInfo impl for Vote --- frame/democracy/src/vote.rs | 14 +++++++++++++- primitives/runtime/src/generic/header.rs | 2 +- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index a1e2a647a7fd6..1f2baa81833a8 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -24,7 +24,7 @@ use sp_runtime::{RuntimeDebug, traits::{Saturating, Zero}}; use crate::{Conviction, ReferendumIndex, Delegations}; /// A number of lock periods, plus a vote, one way or the other. -#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug, TypeInfo)] // todo: [AJ] custom TypeInfo +#[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug)] pub struct Vote { pub aye: bool, pub conviction: Conviction, @@ -49,6 +49,18 @@ impl Decode for Vote { } } +impl TypeInfo for Vote { + type Identity = Self; + + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("Vote", module_path!())) + .composite(scale_info::build::Fields::unnamed() + .field(|f| f.ty::().docs(&["Raw vote byte, encodes aye + conviction"])) + ) + } +} + /// A vote for a referendum of a particular account. #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, RuntimeDebug, TypeInfo)] pub enum AccountVote { diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index 021ae85b038a9..d924415f926d9 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -123,7 +123,7 @@ impl TypeInfo for Header where { type Identity = Self; - fn type_info() -> scale_info::Type { + fn type_info() -> scale_info::Type { scale_info::Type::builder() .path(scale_info::Path::new("Header", module_path!())) .docs(&["Abstraction over a block header for a substrate chain."]) From 481d77e545f65e114cd82f9d08bb40a0da0f89cd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 13:26:35 +0100 Subject: [PATCH 339/503] Era custom TypeInfo crimes --- primitives/runtime/src/generic/era.rs | 49 ++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/era.rs b/primitives/runtime/src/generic/era.rs index db8118cec0898..6b365f221b3bd 100644 --- a/primitives/runtime/src/generic/era.rs +++ b/primitives/runtime/src/generic/era.rs @@ -29,7 +29,7 @@ pub type Period = u64; pub type Phase = u64; /// An era to describe the longevity of a transaction. -#[derive(PartialEq, Eq, Clone, Copy, sp_core::RuntimeDebug, scale_info::TypeInfo)] // todo: [AJ] custom TypeInfo +#[derive(PartialEq, Eq, Clone, Copy, sp_core::RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub enum Era { /// The transaction is valid forever. The genesis hash must be present in the signed content. @@ -137,6 +137,53 @@ impl Decode for Era { } } +/// Add Mortal{N}(u8) variants with the given indices, to describe custom encoding. +macro_rules! mortal_variants { + ($variants:ident, $($index:literal),* ) => { + $variants + $( + .variant(concat!(stringify!(Mortal), stringify!($index)), |v| v + .index($index) + .fields(scale_info::build::Fields::unnamed().field(|f| f.ty::())) + ) + )* + } +} + +impl scale_info::TypeInfo for Era { + type Identity = Self; + + fn type_info() -> scale_info::Type { + let variants = scale_info::build::Variants::new() + .variant("Immortal", |v| v.index(0)); + + // this is necessary since the size of the encoded Mortal variant is `u16`, conditional on + // the value of the first byte being > 0. + let variants = mortal_variants!(variants, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, + 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, + 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, + 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, + 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, + 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, + 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, + 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, + 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255 + ); + + scale_info::Type::builder() + .path(scale_info::Path::new("Era", module_path!())) + .variant(variants) + } +} + #[cfg(test)] mod tests { use super::*; From 214aa2399ea2b7eddf11c5358a73ef17a42841a3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 16:49:05 +0100 Subject: [PATCH 340/503] Revert finality-grandpa version to 0.14.z --- Cargo.lock | 4 ++-- client/finality-grandpa-warp-sync/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 4 ++-- client/finality-grandpa/rpc/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 54aa1a3700358..274e21aeb7d11 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1690,8 +1690,8 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.15.0" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#c6e1612b93a20574513d542f2a4e628c693a455c" +version = "0.14.3" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#e7cdb8781c51f2f02d1dc24de5b50f811be1309f" dependencies = [ "either", "futures 0.3.15", diff --git a/client/finality-grandpa-warp-sync/Cargo.toml b/client/finality-grandpa-warp-sync/Cargo.toml index 16ce463119719..27728e159c762 100644 --- a/client/finality-grandpa-warp-sync/Cargo.toml +++ b/client/finality-grandpa-warp-sync/Cargo.toml @@ -28,7 +28,7 @@ sp-finality-grandpa = { version = "3.0.0", path = "../../primitives/finality-gra sp-runtime = { version = "3.0.0", path = "../../primitives/runtime" } [dev-dependencies] -finality-grandpa = { version = "0.15.0" } +finality-grandpa = { version = "0.14.1" } rand = "0.8" sc-block-builder = { version = "0.9.0", path = "../block-builder" } sp-consensus = { version = "0.9.0", path = "../../primitives/consensus/common" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index 5f876cc49dc9a..3cb577aee5db8 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -44,7 +44,7 @@ sc-network-gossip = { version = "0.9.0", path = "../network-gossip" } sp-finality-grandpa = { version = "3.0.0", path = "../../primitives/finality-grandpa" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} sc-block-builder = { version = "0.9.0", path = "../block-builder" } -finality-grandpa = { version = "0.15.0", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } pin-project = "1.0.4" linked-hash-map = "0.5.2" async-trait = "0.1.42" @@ -52,7 +52,7 @@ wasm-timer = "0.2" [dev-dependencies] assert_matches = "1.3.0" -finality-grandpa = { version = "0.15.0", features = ["derive-codec", "test-helpers"] } +finality-grandpa = { version = "0.14.1", features = ["derive-codec", "test-helpers"] } sc-network = { version = "0.9.0", path = "../network" } sc-network-test = { version = "0.8.0", path = "../network/test" } sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } diff --git a/client/finality-grandpa/rpc/Cargo.toml b/client/finality-grandpa/rpc/Cargo.toml index e34e0947d56d7..97359120fcaae 100644 --- a/client/finality-grandpa/rpc/Cargo.toml +++ b/client/finality-grandpa/rpc/Cargo.toml @@ -14,7 +14,7 @@ sc-rpc = { version = "3.0.0", path = "../../rpc" } sp-blockchain = { version = "3.0.0", path = "../../../primitives/blockchain" } sp-core = { version = "3.0.0", path = "../../../primitives/core" } sp-runtime = { version = "3.0.0", path = "../../../primitives/runtime" } -finality-grandpa = { version = "0.15.0", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 2b99b800fd952..cc2776a1b4f46 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -32,7 +32,7 @@ log = { version = "0.4.14", default-features = false } [dev-dependencies] frame-benchmarking = { version = "3.1.0", path = "../benchmarking" } -grandpa = { package = "finality-grandpa", version = "0.15.0", features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.14.1", features = ["derive-codec"] } sp-keyring = { version = "3.0.0", path = "../../primitives/keyring" } pallet-balances = { version = "3.0.0", path = "../balances" } pallet-offences = { version = "3.0.0", path = "../offences" } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 46e2737e6cd2b..8feb5cabf50b4 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -grandpa = { package = "finality-grandpa", version = "0.15.0", default-features = false, features = ["derive-codec"] } +grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.101", optional = true, features = ["derive"] } sp-api = { version = "3.0.0", default-features = false, path = "../api" } From 90315e619881ceba9998e689977fa610250714af Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 16:54:35 +0100 Subject: [PATCH 341/503] review: renamed module to pallet_constants_metadata --- .../support/procedural/src/construct_runtime/expand/metadata.rs | 2 +- frame/support/procedural/src/pallet/expand/constants.rs | 2 +- frame/support/src/dispatch.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index c1fe66d45ba3d..315091e3d9807 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -159,7 +159,7 @@ fn expand_pallet_metadata_constants( let instance = decl.instance.as_ref().into_iter(); quote!{ - #path::Pallet::<#runtime #(, #path::#instance)*>::module_constants_metadata() + #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() } } diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index b92d80e212094..f5b5de0251827 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -109,7 +109,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ #[doc(hidden)] - pub fn module_constants_metadata() + pub fn pallet_constants_metadata() -> #frame_support::sp_std::vec::Vec<#frame_support::metadata::PalletConstantMetadata> { #frame_support::sp_std::vec![ #( #consts ),* ] diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index c7c1b12db6d64..c3cb732bf328a 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2443,7 +2443,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn module_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { + pub fn pallet_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] From 38de8362c9ead803b1d7b3708a9a6a4a22f99958 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 16:56:01 +0100 Subject: [PATCH 342/503] New line at end of file --- frame/support/procedural/tools/src/docs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/procedural/tools/src/docs.rs b/frame/support/procedural/tools/src/docs.rs index 4f865135262ab..d51313a10804f 100644 --- a/frame/support/procedural/tools/src/docs.rs +++ b/frame/support/procedural/tools/src/docs.rs @@ -37,4 +37,4 @@ pub fn get_doc_literals(attrs: &Vec) -> Vec { /// No-op, returns empty Vec unless the "metadata-docs" feature is enabled. pub fn get_doc_literals(_attrs: &Vec) -> Vec { Vec::new() -} \ No newline at end of file +} From 7ea7943c4f0ec6c02eacaa69629cb8fc925b499b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 9 Jul 2021 16:58:08 +0100 Subject: [PATCH 343/503] Add missing scale-info/std --- frame/transaction-payment/Cargo.toml | 1 + primitives/transaction-storage-proof/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 79b6ddfc83b47..2a6e5671d4beb 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -36,6 +36,7 @@ default = ["std"] std = [ "serde", "codec/std", + "scale-info/std", "sp-core/std", "sp-io/std", "sp-runtime/std", diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 66d866bd2ae11..df0cb48a204e9 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -27,6 +27,7 @@ async-trait = { version = "0.1.48", optional = true } default = [ "std" ] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-inherents/std", "sp-runtime/std", From ce2ecc8c5bd046ab04f71e62bd12436a7e9be394 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Jul 2021 09:54:36 +0100 Subject: [PATCH 344/503] Update frame/support/src/storage/types/mod.rs Co-authored-by: Guillaume Thiolliere --- frame/support/src/storage/types/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 4457fad21eb69..46824c26ded3e 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -105,7 +105,7 @@ where /// Provide metadata for a storage entry. /// -/// Implemented by each of the storage entry kinds: value, map and doublemap. +/// Implemented by each of the storage entry kinds: value, map, doublemap and nmap. pub trait StorageEntryMetadata { const MODIFIER: StorageEntryModifier; const NAME: &'static str; From 60b5887091414db281212285ddcf86b8ad5ab1e4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Jul 2021 10:02:12 +0100 Subject: [PATCH 345/503] Remove StorageEntryType::Map unused flag --- Cargo.lock | 2 +- frame/support/procedural/src/storage/metadata.rs | 1 - frame/support/src/lib.rs | 4 ---- frame/support/src/storage/types/map.rs | 1 - frame/support/test/tests/decl_storage.rs | 8 -------- frame/support/test/tests/instance.rs | 1 - frame/support/test/tests/pallet.rs | 3 --- frame/support/test/tests/pallet_instance.rs | 2 -- 8 files changed, 1 insertion(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 13e9a6c59a09a..74ca592d5482e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1840,7 +1840,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#7bd13ff5059718cd0a6dbcac4dd22e1843aa5fff" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#8c37923bdfc7853a3b8521ced643afb61a193878" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 18d30cfa1b7ae..91e6e94684322 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -40,7 +40,6 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> hasher: #scrate::metadata::#hasher, key: #scrate::scale_info::meta_type::<#key>(), value: #scrate::scale_info::meta_type::<#value_type>(), - unused: false, } } }, diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index edea1a3243eee..dfcd39cdcd1a1 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1124,7 +1124,6 @@ pub mod tests { hasher: StorageHasher::Twox64Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false, }, default: vec![0, 0, 0, 0, 0, 0, 0, 0], documentation: vec![], @@ -1136,7 +1135,6 @@ pub mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false, }, default: vec![0], documentation: vec![], @@ -1148,7 +1146,6 @@ pub mod tests { hasher: StorageHasher::Identity, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false }, default: vec![0, 0, 0, 0], documentation: vec![], @@ -1160,7 +1157,6 @@ pub mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false }, default: vec![0], documentation: vec![], diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 9ccfddf97b21a..e15cc1b1550ae 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -339,7 +339,6 @@ impl StorageEntryMeta hasher: Hasher::METADATA, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false } } diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 3abf6f6000ac3..30f86cfd92b93 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -224,7 +224,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![0], documentation: vec![], @@ -236,7 +235,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![0], documentation: vec![], @@ -248,7 +246,6 @@ mod tests { // hasher: StorageHasher::Blake2_128Concat, // key: scale_info::meta_type::(), // value: scale_info::meta_type::(), - // unused: false, // }, // default: vec![0], // documentation: vec![], @@ -260,7 +257,6 @@ mod tests { // hasher: StorageHasher::Blake2_128Concat, // key: scale_info::meta_type::(), // value: scale_info::meta_type::(), - // unused: false, // }, // default: vec![0], // documentation: vec![], @@ -272,7 +268,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![0, 0, 0, 0], documentation: vec![], @@ -284,7 +279,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![0, 0, 0, 0], documentation: vec![], @@ -296,7 +290,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![109, 97, 112, 100], // "map" documentation: vec![], @@ -308,7 +301,6 @@ mod tests { hasher: StorageHasher::Blake2_128Concat, key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), - unused: false, }, default: vec![112, 117, 98, 109], // "pubmap" documentation: vec![], diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 1d74232aab518..d1e540fbc821a 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -413,7 +413,6 @@ fn expected_metadata() -> PalletStorageMetadata { hasher: StorageHasher::Identity, key: scale_info::meta_type::(), value: scale_info::meta_type::(), - unused: false, }, default: [0u8; 8].to_vec(), documentation: vec![], diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 404eb251a8bb9..48d2afd500690 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1024,7 +1024,6 @@ fn metadata() { key: scale_info::meta_type::(), value: scale_info::meta_type::(), hasher: StorageHasher::Blake2_128Concat, - unused: false, }, default: vec![4, 0], documentation: vec![], @@ -1036,7 +1035,6 @@ fn metadata() { key: scale_info::meta_type::(), value: scale_info::meta_type::(), hasher: StorageHasher::Twox64Concat, - unused: false, }, default: vec![0], documentation: vec![], @@ -1103,7 +1101,6 @@ fn metadata() { key: scale_info::meta_type::(), value: scale_info::meta_type::(), hasher: StorageHasher::Twox64Concat, - unused: false, }, default: vec![0], documentation: vec![], diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 2c4e3ac9f0b36..8dfb53209de1b 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -755,7 +755,6 @@ fn metadata() { key: scale_info::meta_type::(), value: scale_info::meta_type::(), hasher: StorageHasher::Blake2_128Concat, - unused: false, }, default: vec![0], documentation: vec![], @@ -767,7 +766,6 @@ fn metadata() { key: scale_info::meta_type::(), value: scale_info::meta_type::(), hasher: StorageHasher::Twox64Concat, - unused: false, }, default: vec![0], documentation: vec![], From 3050faccabbf030449ab0dfc0bd2646cda01e8fc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 12 Jul 2021 12:01:41 +0100 Subject: [PATCH 346/503] Add missing scale-info dependency after merge --- Cargo.lock | 348 +++++++++++++++++----------------- frame/support/test/Cargo.toml | 1 + 2 files changed, 175 insertions(+), 174 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 74ca592d5482e..04d5563bffb38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1760,7 +1760,7 @@ dependencies = [ [[package]] name = "frame-benchmarking" -version = "3.1.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -1781,7 +1781,7 @@ dependencies = [ [[package]] name = "frame-benchmarking-cli" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "Inflector", "chrono", @@ -1804,7 +1804,7 @@ dependencies = [ [[package]] name = "frame-election-provider-support" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -1818,7 +1818,7 @@ dependencies = [ [[package]] name = "frame-executive" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -1850,7 +1850,7 @@ dependencies = [ [[package]] name = "frame-support" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "assert_matches", "bitflags", @@ -1880,7 +1880,7 @@ dependencies = [ [[package]] name = "frame-support-procedural" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "Inflector", "frame-support-procedural-tools", @@ -1891,7 +1891,7 @@ dependencies = [ [[package]] name = "frame-support-procedural-tools" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support-procedural-tools-derive", "proc-macro-crate 1.0.0", @@ -1932,7 +1932,7 @@ dependencies = [ [[package]] name = "frame-system" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "criterion", "frame-support", @@ -1952,7 +1952,7 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -1968,7 +1968,7 @@ dependencies = [ [[package]] name = "frame-system-rpc-runtime-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "sp-api", @@ -1976,7 +1976,7 @@ dependencies = [ [[package]] name = "frame-try-runtime" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "frame-support", "parity-scale-codec", @@ -4101,7 +4101,7 @@ dependencies = [ [[package]] name = "node-bench" -version = "0.8.0" +version = "0.9.0-dev" dependencies = [ "derive_more", "fs_extra", @@ -4139,7 +4139,7 @@ dependencies = [ [[package]] name = "node-browser-testing" -version = "2.0.0" +version = "3.0.0-dev" dependencies = [ "futures 0.3.15", "futures-timer 3.0.2", @@ -4157,7 +4157,7 @@ dependencies = [ [[package]] name = "node-cli" -version = "2.0.0" +version = "3.0.0-dev" dependencies = [ "assert_cmd", "async-std", @@ -4226,6 +4226,7 @@ dependencies = [ "sp-keystore", "sp-runtime", "sp-timestamp", + "sp-transaction-pool", "sp-trie", "structopt", "substrate-browser-utils", @@ -4239,7 +4240,7 @@ dependencies = [ [[package]] name = "node-executor" -version = "2.0.0" +version = "3.0.0-dev" dependencies = [ "criterion", "frame-benchmarking", @@ -4277,7 +4278,7 @@ dependencies = [ [[package]] name = "node-inspect" -version = "0.8.0" +version = "0.9.0-dev" dependencies = [ "derive_more", "log", @@ -4307,7 +4308,7 @@ dependencies = [ [[package]] name = "node-rpc" -version = "2.0.0" +version = "3.0.0-dev" dependencies = [ "jsonrpc-core", "node-primitives", @@ -4351,7 +4352,7 @@ dependencies = [ [[package]] name = "node-runtime" -version = "2.0.1" +version = "3.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -4505,7 +4506,7 @@ dependencies = [ [[package]] name = "node-testing" -version = "2.0.0" +version = "3.0.0-dev" dependencies = [ "criterion", "frame-support", @@ -4711,7 +4712,7 @@ dependencies = [ [[package]] name = "pallet-assets" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -4727,7 +4728,7 @@ dependencies = [ [[package]] name = "pallet-atomic-swap" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -4742,7 +4743,7 @@ dependencies = [ [[package]] name = "pallet-aura" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -4762,7 +4763,7 @@ dependencies = [ [[package]] name = "pallet-authority-discovery" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -4780,7 +4781,7 @@ dependencies = [ [[package]] name = "pallet-authorship" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -4797,7 +4798,7 @@ dependencies = [ [[package]] name = "pallet-babe" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -4826,7 +4827,7 @@ dependencies = [ [[package]] name = "pallet-balances" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -4843,7 +4844,7 @@ dependencies = [ [[package]] name = "pallet-bounties" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -4861,7 +4862,7 @@ dependencies = [ [[package]] name = "pallet-collective" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -4879,7 +4880,7 @@ dependencies = [ [[package]] name = "pallet-contracts" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "assert_matches", "bitflags", @@ -4913,7 +4914,7 @@ dependencies = [ [[package]] name = "pallet-contracts-primitives" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", @@ -4926,7 +4927,7 @@ dependencies = [ [[package]] name = "pallet-contracts-proc-macro" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "proc-macro2", "quote", @@ -4935,7 +4936,7 @@ dependencies = [ [[package]] name = "pallet-contracts-rpc" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "jsonrpc-core", "jsonrpc-core-client", @@ -4954,7 +4955,7 @@ dependencies = [ [[package]] name = "pallet-contracts-rpc-runtime-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", @@ -4966,7 +4967,7 @@ dependencies = [ [[package]] name = "pallet-democracy" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -4987,7 +4988,7 @@ dependencies = [ [[package]] name = "pallet-election-provider-multi-phase" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -5014,7 +5015,7 @@ dependencies = [ [[package]] name = "pallet-elections" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5030,7 +5031,7 @@ dependencies = [ [[package]] name = "pallet-elections-phragmen" -version = "4.0.0" +version = "5.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5050,7 +5051,7 @@ dependencies = [ [[package]] name = "pallet-example" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5067,7 +5068,7 @@ dependencies = [ [[package]] name = "pallet-example-offchain-worker" -version = "2.0.1" +version = "3.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5084,7 +5085,7 @@ dependencies = [ [[package]] name = "pallet-example-parallel" -version = "2.0.1" +version = "3.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5100,7 +5101,7 @@ dependencies = [ [[package]] name = "pallet-gilt" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5117,7 +5118,7 @@ dependencies = [ [[package]] name = "pallet-grandpa" -version = "3.1.0" +version = "4.0.0-dev" dependencies = [ "finality-grandpa", "frame-benchmarking", @@ -5147,7 +5148,7 @@ dependencies = [ [[package]] name = "pallet-identity" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "enumflags2", "frame-benchmarking", @@ -5164,7 +5165,7 @@ dependencies = [ [[package]] name = "pallet-im-online" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5184,7 +5185,7 @@ dependencies = [ [[package]] name = "pallet-indices" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5201,7 +5202,7 @@ dependencies = [ [[package]] name = "pallet-lottery" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5219,7 +5220,7 @@ dependencies = [ [[package]] name = "pallet-membership" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5235,7 +5236,7 @@ dependencies = [ [[package]] name = "pallet-mmr" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "ckb-merkle-mountain-range", "env_logger 0.8.3", @@ -5254,7 +5255,7 @@ dependencies = [ [[package]] name = "pallet-mmr-primitives" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5288,7 +5289,7 @@ dependencies = [ [[package]] name = "pallet-multisig" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5304,7 +5305,7 @@ dependencies = [ [[package]] name = "pallet-nicks" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5319,7 +5320,7 @@ dependencies = [ [[package]] name = "pallet-node-authorization" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5334,7 +5335,7 @@ dependencies = [ [[package]] name = "pallet-offences" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5352,7 +5353,7 @@ dependencies = [ [[package]] name = "pallet-offences-benchmarking" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -5379,7 +5380,7 @@ dependencies = [ [[package]] name = "pallet-proxy" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5396,7 +5397,7 @@ dependencies = [ [[package]] name = "pallet-randomness-collective-flip" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5411,7 +5412,7 @@ dependencies = [ [[package]] name = "pallet-recovery" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "enumflags2", "frame-support", @@ -5427,7 +5428,7 @@ dependencies = [ [[package]] name = "pallet-scheduler" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5444,7 +5445,7 @@ dependencies = [ [[package]] name = "pallet-scored-pool" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5459,7 +5460,7 @@ dependencies = [ [[package]] name = "pallet-session" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5481,7 +5482,7 @@ dependencies = [ [[package]] name = "pallet-session-benchmarking" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -5505,7 +5506,7 @@ dependencies = [ [[package]] name = "pallet-society" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-support-test", @@ -5522,7 +5523,7 @@ dependencies = [ [[package]] name = "pallet-staking" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-election-provider-support", @@ -5555,7 +5556,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-curve" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -5566,7 +5567,7 @@ dependencies = [ [[package]] name = "pallet-staking-reward-fn" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "log", "sp-arithmetic", @@ -5574,7 +5575,7 @@ dependencies = [ [[package]] name = "pallet-sudo" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5603,7 +5604,7 @@ dependencies = [ [[package]] name = "pallet-timestamp" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5622,7 +5623,7 @@ dependencies = [ [[package]] name = "pallet-tips" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5641,7 +5642,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -5660,7 +5661,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "jsonrpc-core", "jsonrpc-core-client", @@ -5676,7 +5677,7 @@ dependencies = [ [[package]] name = "pallet-transaction-payment-rpc-runtime-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "pallet-transaction-payment", "parity-scale-codec", @@ -5686,7 +5687,7 @@ dependencies = [ [[package]] name = "pallet-transaction-storage" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5707,7 +5708,7 @@ dependencies = [ [[package]] name = "pallet-treasury" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5726,7 +5727,7 @@ dependencies = [ [[package]] name = "pallet-uniques" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5742,7 +5743,7 @@ dependencies = [ [[package]] name = "pallet-utility" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", @@ -5758,7 +5759,7 @@ dependencies = [ [[package]] name = "pallet-vesting" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "enumflags2", "frame-benchmarking", @@ -6820,7 +6821,7 @@ dependencies = [ [[package]] name = "remote-externalities" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "env_logger 0.8.3", "frame-support", @@ -7049,7 +7050,7 @@ dependencies = [ [[package]] name = "sc-allocator" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "log", "sp-core", @@ -7059,7 +7060,7 @@ dependencies = [ [[package]] name = "sc-authority-discovery" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", @@ -7091,7 +7092,7 @@ dependencies = [ [[package]] name = "sc-basic-authorship" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "futures 0.3.15", "futures-timer 3.0.2", @@ -7116,7 +7117,7 @@ dependencies = [ [[package]] name = "sc-block-builder" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "parity-scale-codec", "sc-client-api", @@ -7133,7 +7134,7 @@ dependencies = [ [[package]] name = "sc-chain-spec" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -7152,7 +7153,7 @@ dependencies = [ [[package]] name = "sc-chain-spec-derive" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -7162,7 +7163,7 @@ dependencies = [ [[package]] name = "sc-cli" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "chrono", "fdlimit", @@ -7200,7 +7201,7 @@ dependencies = [ [[package]] name = "sc-client-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "derive_more", "fnv", @@ -7237,7 +7238,7 @@ dependencies = [ [[package]] name = "sc-client-db" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "blake2-rfc", "hash-db", @@ -7271,7 +7272,7 @@ dependencies = [ [[package]] name = "sc-consensus" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "parking_lot 0.11.1", @@ -7283,7 +7284,7 @@ dependencies = [ [[package]] name = "sc-consensus-aura" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", @@ -7325,7 +7326,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", @@ -7381,7 +7382,7 @@ dependencies = [ [[package]] name = "sc-consensus-babe-rpc" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "derive_more", "futures 0.3.15", @@ -7410,7 +7411,7 @@ dependencies = [ [[package]] name = "sc-consensus-epochs" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "fork-tree", "parity-scale-codec", @@ -7422,7 +7423,7 @@ dependencies = [ [[package]] name = "sc-consensus-manual-seal" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "async-trait", @@ -7461,7 +7462,7 @@ dependencies = [ [[package]] name = "sc-consensus-pow" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", @@ -7484,7 +7485,7 @@ dependencies = [ [[package]] name = "sc-consensus-slots" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "futures 0.3.15", @@ -7512,7 +7513,7 @@ dependencies = [ [[package]] name = "sc-consensus-uncles" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "sc-client-api", "sp-authorship", @@ -7522,7 +7523,7 @@ dependencies = [ [[package]] name = "sc-executor" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "derive_more", @@ -7564,7 +7565,7 @@ dependencies = [ [[package]] name = "sc-executor-common" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "derive_more", "parity-scale-codec", @@ -7580,7 +7581,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmi" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "log", "parity-scale-codec", @@ -7594,7 +7595,7 @@ dependencies = [ [[package]] name = "sc-executor-wasmtime" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "cfg-if 1.0.0", @@ -7617,7 +7618,7 @@ dependencies = [ [[package]] name = "sc-finality-grandpa" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "async-trait", @@ -7666,7 +7667,7 @@ dependencies = [ [[package]] name = "sc-finality-grandpa-rpc" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "derive_more", "finality-grandpa", @@ -7696,7 +7697,7 @@ dependencies = [ [[package]] name = "sc-finality-grandpa-warp-sync" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "derive_more", "finality-grandpa", @@ -7722,7 +7723,7 @@ dependencies = [ [[package]] name = "sc-informant" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "ansi_term 0.12.1", "futures 0.3.15", @@ -7739,7 +7740,7 @@ dependencies = [ [[package]] name = "sc-keystore" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "async-trait", "derive_more", @@ -7759,7 +7760,7 @@ dependencies = [ [[package]] name = "sc-light" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "hash-db", "lazy_static", @@ -7777,7 +7778,7 @@ dependencies = [ [[package]] name = "sc-network" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "async-std", @@ -7837,7 +7838,7 @@ dependencies = [ [[package]] name = "sc-network-gossip" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-std", "futures 0.3.15", @@ -7885,7 +7886,7 @@ dependencies = [ [[package]] name = "sc-offchain" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "bytes 0.5.6", "fnv", @@ -7921,7 +7922,7 @@ dependencies = [ [[package]] name = "sc-peerset" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.3.15", "libp2p", @@ -7942,7 +7943,7 @@ dependencies = [ [[package]] name = "sc-rpc" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "assert_matches", "futures 0.1.31", @@ -7985,7 +7986,7 @@ dependencies = [ [[package]] name = "sc-rpc-api" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "derive_more", "futures 0.3.15", @@ -8009,7 +8010,7 @@ dependencies = [ [[package]] name = "sc-rpc-server" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.1.31", "jsonrpc-core", @@ -8039,7 +8040,7 @@ dependencies = [ [[package]] name = "sc-service" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-std", "async-trait", @@ -8148,7 +8149,7 @@ dependencies = [ [[package]] name = "sc-state-db" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "log", "parity-scale-codec", @@ -8162,7 +8163,7 @@ dependencies = [ [[package]] name = "sc-sync-state-rpc" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "jsonrpc-core", "jsonrpc-core-client", @@ -8181,7 +8182,7 @@ dependencies = [ [[package]] name = "sc-telemetry" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "chrono", "futures 0.3.15", @@ -8200,7 +8201,7 @@ dependencies = [ [[package]] name = "sc-tracing" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "ansi_term 0.12.1", "atty", @@ -8236,7 +8237,7 @@ dependencies = [ [[package]] name = "sc-tracing-proc-macro" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -8246,7 +8247,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "assert_matches", "criterion", @@ -8282,7 +8283,7 @@ dependencies = [ [[package]] name = "sc-transaction-pool-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "derive_more", "futures 0.3.15", @@ -8731,7 +8732,7 @@ dependencies = [ [[package]] name = "sp-api" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "hash-db", "log", @@ -8748,7 +8749,7 @@ dependencies = [ [[package]] name = "sp-api-proc-macro" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "blake2-rfc", "proc-macro-crate 1.0.0", @@ -8781,7 +8782,7 @@ dependencies = [ [[package]] name = "sp-application-crypto" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "scale-info", @@ -8805,7 +8806,7 @@ dependencies = [ [[package]] name = "sp-arithmetic" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "criterion", "integer-sqrt", @@ -8834,7 +8835,7 @@ dependencies = [ [[package]] name = "sp-authority-discovery" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "scale-info", @@ -8846,7 +8847,7 @@ dependencies = [ [[package]] name = "sp-authorship" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "async-trait", "parity-scale-codec", @@ -8857,7 +8858,7 @@ dependencies = [ [[package]] name = "sp-block-builder" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "sp-api", @@ -8868,7 +8869,7 @@ dependencies = [ [[package]] name = "sp-blockchain" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.3.15", "log", @@ -8885,7 +8886,7 @@ dependencies = [ [[package]] name = "sp-consensus" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "futures 0.3.15", @@ -8912,7 +8913,7 @@ dependencies = [ [[package]] name = "sp-consensus-aura" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", @@ -8929,7 +8930,7 @@ dependencies = [ [[package]] name = "sp-consensus-babe" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "merlin", @@ -8951,7 +8952,7 @@ dependencies = [ [[package]] name = "sp-consensus-pow" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "parity-scale-codec", "sp-api", @@ -8962,7 +8963,7 @@ dependencies = [ [[package]] name = "sp-consensus-slots" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "parity-scale-codec", "scale-info", @@ -8972,7 +8973,7 @@ dependencies = [ [[package]] name = "sp-consensus-vrf" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "parity-scale-codec", "schnorrkel", @@ -8983,7 +8984,7 @@ dependencies = [ [[package]] name = "sp-core" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "base58", "blake2-rfc", @@ -9033,7 +9034,7 @@ dependencies = [ [[package]] name = "sp-database" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "kvdb", "parking_lot 0.11.1", @@ -9050,7 +9051,7 @@ dependencies = [ [[package]] name = "sp-externalities" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "environmental", "parity-scale-codec", @@ -9060,7 +9061,7 @@ dependencies = [ [[package]] name = "sp-finality-grandpa" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "finality-grandpa", "log", @@ -9077,7 +9078,7 @@ dependencies = [ [[package]] name = "sp-inherents" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "async-trait", "futures 0.3.15", @@ -9091,7 +9092,7 @@ dependencies = [ [[package]] name = "sp-io" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.3.15", "hash-db", @@ -9115,7 +9116,7 @@ dependencies = [ [[package]] name = "sp-keyring" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "lazy_static", "sp-core", @@ -9125,7 +9126,7 @@ dependencies = [ [[package]] name = "sp-keystore" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", @@ -9143,7 +9144,7 @@ dependencies = [ [[package]] name = "sp-maybe-compressed-blob" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "ruzstd", "zstd", @@ -9151,7 +9152,7 @@ dependencies = [ [[package]] name = "sp-npos-elections" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", @@ -9167,7 +9168,7 @@ dependencies = [ [[package]] name = "sp-npos-elections-compact" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "proc-macro-crate 1.0.0", @@ -9196,7 +9197,7 @@ dependencies = [ [[package]] name = "sp-offchain" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "sp-api", "sp-core", @@ -9213,7 +9214,7 @@ dependencies = [ [[package]] name = "sp-rpc" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "rustc-hash", "serde", @@ -9224,7 +9225,7 @@ dependencies = [ [[package]] name = "sp-runtime" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "either", "hash256-std-hasher", @@ -9250,7 +9251,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -9272,7 +9273,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "Inflector", "proc-macro-crate 1.0.0", @@ -9322,7 +9323,7 @@ dependencies = [ [[package]] name = "sp-sandbox" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "assert_matches", "parity-scale-codec", @@ -9344,7 +9345,7 @@ dependencies = [ [[package]] name = "sp-session" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "scale-info", @@ -9357,7 +9358,7 @@ dependencies = [ [[package]] name = "sp-staking" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "scale-info", @@ -9367,7 +9368,7 @@ dependencies = [ [[package]] name = "sp-state-machine" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "hash-db", "hex-literal", @@ -9392,11 +9393,11 @@ dependencies = [ [[package]] name = "sp-std" -version = "3.0.0" +version = "4.0.0-dev" [[package]] name = "sp-storage" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9408,7 +9409,7 @@ dependencies = [ [[package]] name = "sp-tasks" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "log", "parity-scale-codec", @@ -9433,7 +9434,7 @@ dependencies = [ [[package]] name = "sp-timestamp" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "async-trait", "futures-timer 3.0.2", @@ -9449,7 +9450,7 @@ dependencies = [ [[package]] name = "sp-tracing" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "erased-serde", "log", @@ -9466,7 +9467,7 @@ dependencies = [ [[package]] name = "sp-transaction-pool" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "sp-api", "sp-runtime", @@ -9474,7 +9475,7 @@ dependencies = [ [[package]] name = "sp-transaction-storage-proof" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "async-trait", "log", @@ -9489,7 +9490,7 @@ dependencies = [ [[package]] name = "sp-trie" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "criterion", "hash-db", @@ -9507,7 +9508,7 @@ dependencies = [ [[package]] name = "sp-utils" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.3.15", "futures-core", @@ -9518,7 +9519,7 @@ dependencies = [ [[package]] name = "sp-version" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "impl-serde", "parity-scale-codec", @@ -9533,7 +9534,7 @@ dependencies = [ [[package]] name = "sp-version-proc-macro" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "proc-macro-crate 1.0.0", @@ -9545,7 +9546,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", @@ -9677,7 +9678,7 @@ dependencies = [ [[package]] name = "substrate-browser-utils" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "chrono", "console_error_panic_hook", @@ -9709,7 +9710,7 @@ dependencies = [ [[package]] name = "substrate-frame-cli" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", @@ -9737,7 +9738,7 @@ dependencies = [ [[package]] name = "substrate-frame-rpc-system" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "frame-system-rpc-runtime-api", "futures 0.3.15", @@ -9883,7 +9884,7 @@ dependencies = [ [[package]] name = "substrate-test-utils" -version = "3.0.0" +version = "4.0.0-dev" dependencies = [ "futures 0.3.15", "sc-service", @@ -9894,7 +9895,7 @@ dependencies = [ [[package]] name = "substrate-test-utils-derive" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "proc-macro-crate 1.0.0", "quote", @@ -9912,7 +9913,7 @@ dependencies = [ [[package]] name = "substrate-wasm-builder" -version = "4.0.0" +version = "5.0.0-dev" dependencies = [ "ansi_term 0.12.1", "atty", @@ -10690,7 +10691,7 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" [[package]] name = "try-runtime-cli" -version = "0.9.0" +version = "0.10.0-dev" dependencies = [ "frame-try-runtime", "log", @@ -10706,7 +10707,6 @@ dependencies = [ "sp-blockchain", "sp-core", "sp-externalities", - "sp-io", "sp-keystore", "sp-runtime", "sp-state-machine", diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 4c17f3af91eab..fbb6c4346050f 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.101", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } From aa5417b2b20d8816701fe3becdac7eada3268e37 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 10:26:32 +0100 Subject: [PATCH 347/503] SignedExtension::AdditionalSigned metadata --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- .../src/construct_runtime/expand/metadata.rs | 9 ++--- frame/support/test/tests/construct_runtime.rs | 6 +++- frame/support/test/tests/pallet.rs | 6 +++- frame/support/test/tests/pallet_instance.rs | 6 +++- primitives/runtime/src/traits.rs | 35 +++++++++++++------ 7 files changed, 47 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 04d5563bffb38..cae296addc08b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1840,7 +1840,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#8c37923bdfc7853a3b8521ced643afb61a193878" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-additional-signed#54f626afbb22db34f4f495121547ce3ca9e9471d" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 90fcba2c6ab93..982832481d6f9 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.101", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-additional-signed", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 315091e3d9807..a4498521127fa 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -74,11 +74,12 @@ pub fn expand_runtime_metadata( < #extrinsic as #scrate::sp_runtime::traits::ExtrinsicMetadata >::SignedExtensions as #scrate::sp_runtime::traits::SignedExtension - >::identifier() + >::metadata() .into_iter() - .map(|(id, ty)| #scrate::metadata::SignedExtensionMetadata { - identifier: id, - ty, + .map(|meta| #scrate::metadata::SignedExtensionMetadata { + identifier: meta.identifier, + ty: meta.ty, + additional_signed: meta.additional_signed, }) .collect(), }, diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 0222d09639074..e9b49775785bd 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -791,7 +791,11 @@ fn test_metadata() { ty: scale_info::meta_type::(), version: 4, signed_extensions: vec![ - SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + } ] }; diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 48d2afd500690..2dcee38451ccc 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1221,7 +1221,11 @@ fn metadata() { ty: scale_info::meta_type::(), version: 4, signed_extensions: vec![ - SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + } ] }; diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 8dfb53209de1b..4f98580745385 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -899,7 +899,11 @@ fn metadata() { ty: scale_info::meta_type::(), version: 4, signed_extensions: vec![ - SignedExtensionMetadata { identifier: "UnitSignedExtension", ty: scale_info::meta_type::<()>() } + SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + } ] }; diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index 9ba8c90e3d2b6..fd4aed1b43362 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -27,7 +27,7 @@ use std::str::FromStr; use serde::{Serialize, Deserialize, de::DeserializeOwned}; use sp_core::{self, Hasher, TypeId, RuntimeDebug}; use crate::codec::{Codec, Encode, Decode, MaxEncodedLen}; -use crate::scale_info::{TypeInfo, StaticTypeInfo}; +use crate::scale_info::{MetaType, TypeInfo, StaticTypeInfo}; use crate::transaction_validity::{ ValidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, @@ -757,7 +757,7 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq /// Any additional data that will go into the signed payload. This may be created dynamically /// from the transaction using the `additional_signed` function. - type AdditionalSigned: Encode; + type AdditionalSigned: Encode + TypeInfo; /// The type that encodes information that can be passed from pre_dispatch to post-dispatch. type Pre: Default; @@ -862,18 +862,33 @@ pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq Ok(()) } - /// Returns the list of unique identifier for this signed extension. + /// Returns the metadata for this signed extension. /// /// As a [`SignedExtension`] can be a tuple of [`SignedExtension`]s we need to return a `Vec` - /// that holds all the unique identifiers. Each individual `SignedExtension` must return - /// *exactly* one identifier. + /// that holds the metadata of each one. Each individual `SignedExtension` must return + /// *exactly* one [`SignedExtensionMetadata`]. /// - /// This method provides a default implementation that returns `vec![SELF::IDENTIFIER]`. - fn identifier() -> Vec<(&'static str, scale_info::MetaType)> { - sp_std::vec![(Self::IDENTIFIER, scale_info::meta_type::())] + /// This method provides a default implementation that returns a vec containing a single + /// [`SignedExtensionMetadata`]. + fn metadata() -> Vec { + sp_std::vec![SignedExtensionMetadata { + identifier: Self::IDENTIFIER, + ty: scale_info::meta_type::(), + additional_signed: scale_info::meta_type::() + }] } } +/// Information about a [`SignedExtension`] for the runtime metadata. +pub struct SignedExtensionMetadata { + /// The unique identifier of the [`SignedExtension`]. + pub identifier: &'static str, + /// The type of the [`SignedExtension`]. + pub ty: MetaType, + /// The type of the [`SignedExtension`] additional signed data for the payload. + pub additional_signed: MetaType, +} + #[impl_for_tuples(1, 12)] impl SignedExtension for Tuple { for_tuples!( where #( Tuple: SignedExtension )* ); @@ -934,9 +949,9 @@ impl SignedExtension for Tuple { Ok(()) } - fn identifier() -> Vec<(&'static str, scale_info::MetaType)> { + fn metadata() -> Vec { let mut ids = Vec::new(); - for_tuples!( #( ids.extend(Tuple::identifier()); )* ); + for_tuples!( #( ids.extend(Tuple::metadata()); )* ); ids } } From e42fe3e47dcdfecb674aae533b0bfc5167ad41fd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 11:29:31 +0100 Subject: [PATCH 348/503] Update frame-metadata, use abbreviated docs and args fields --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- .../procedural/src/pallet/expand/call.rs | 4 +- .../procedural/src/pallet/expand/constants.rs | 2 +- .../procedural/src/pallet/expand/storage.rs | 2 +- .../procedural/src/storage/metadata.rs | 2 +- frame/support/src/dispatch.rs | 38 +++---- frame/support/src/lib.rs | 16 +-- frame/support/test/tests/construct_runtime.rs | 36 +++---- frame/support/test/tests/decl_storage.rs | 62 ++++++------ frame/support/test/tests/instance.rs | 6 +- frame/support/test/tests/pallet.rs | 98 +++++++++---------- frame/support/test/tests/pallet_instance.rs | 76 +++++++------- 13 files changed, 173 insertions(+), 173 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 75a77d8152104..4eebd246b6b57 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1840,7 +1840,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-additional-signed#54f626afbb22db34f4f495121547ce3ca9e9471d" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#9cfeeeeab8ec01f2acc370e6132adc5ed48cd920" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index dab7de891c043..6ad90ec846692 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-additional-signed", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index e6da9fbc8aa70..4875947cb2c30 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -234,13 +234,13 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let calls = #frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionMetadata { name: stringify!(#fn_name), - arguments: #frame_support::sp_std::vec![ #( + args: #frame_support::sp_std::vec![ #( #frame_support::metadata::FunctionArgumentMetadata { name: stringify!(#args_name), ty: #frame_support::scale_info::meta_type::<#args_meta_type>(), }, )* ], - documentation: #frame_support::sp_std::vec![ #( #fn_doc ),* ], + docs: #frame_support::sp_std::vec![ #( #fn_doc ),* ], }, )* ]; #frame_support::metadata::PalletCallMetadata { ty, calls } diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index f5b5de0251827..0d7e6f8d714ef 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -100,7 +100,7 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), - documentation: #frame_support::sp_std::vec![ #( #doc ),* ], + docs: #frame_support::sp_std::vec![ #( #doc ),* ], } }) }); diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index 69291db7b7486..f3b8e91219abd 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -177,7 +177,7 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), - documentation: #frame_support::sp_std::vec![ + docs: #frame_support::sp_std::vec![ #( #docs, )* ], } diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 91e6e94684322..3438500dc7903 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -172,7 +172,7 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { modifier: #modifier, ty: #ty, default: #default_byte_getter_struct_instance.default_byte(), - documentation: #scrate::sp_std::vec![ #( #docs ),* ], + docs: #scrate::sp_std::vec![ #( #docs ),* ], }, }; diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 71c43b425384e..69e22c2f60142 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2321,9 +2321,9 @@ macro_rules! __impl_module_constants_metadata { Default::default() ).default_byte(), #[cfg(feature = "metadata-docs")] - documentation: $crate::sp_std::vec![ $( $doc_attr ),* ], + docs: $crate::sp_std::vec![ $( $doc_attr ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::sp_std::vec![], + docs: $crate::sp_std::vec![], } ),* ] @@ -2403,7 +2403,7 @@ macro_rules! __function_to_metadata { ) => { $crate::metadata::FunctionMetadata { name: stringify!($fn_name), - arguments: $crate::sp_std::vec![ + args: $crate::sp_std::vec![ $( $crate::metadata::FunctionArgumentMetadata { name: stringify!($param_name), @@ -2414,9 +2414,9 @@ macro_rules! __function_to_metadata { ),* ], #[cfg(feature = "metadata-docs")] - documentation: $crate::sp_std::vec![ $( $fn_doc ),* ], + docs: $crate::sp_std::vec![ $( $fn_doc ),* ], #[cfg(not(feature = "metadata-docs"))] - documentation: $crate::sp_std::vec![], + docs: $crate::sp_std::vec![], } }; @@ -2573,24 +2573,24 @@ mod tests { vec![ FunctionMetadata { name: "aux_0", - arguments: vec![], - documentation: vec![ + args: vec![], + docs: vec![ " Hi, this is a comment." ] }, FunctionMetadata { name: "aux_1", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::>(), } ], - documentation: vec![], + docs: vec![], }, FunctionMetadata { name: "aux_2", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::(), @@ -2600,26 +2600,26 @@ mod tests { ty: scale_info::meta_type::(), } ], - documentation: vec![], + docs: vec![], }, FunctionMetadata { name: "aux_3", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }, FunctionMetadata { name: "aux_4", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::(), } ], - documentation: vec![], + docs: vec![], }, FunctionMetadata { name: "aux_5", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::(), @@ -2629,12 +2629,12 @@ mod tests { ty: scale_info::meta_type::>() } ], - documentation: vec![], + docs: vec![], }, FunctionMetadata { name: "operational", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }, ] } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index f8e809a40872e..8d5827efb8513 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1129,7 +1129,7 @@ pub mod tests { value: scale_info::meta_type::(), }, default: vec![0, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "OptionLinkedMap", @@ -1140,7 +1140,7 @@ pub mod tests { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GenericData", @@ -1151,7 +1151,7 @@ pub mod tests { value: scale_info::meta_type::(), }, default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GenericData2", @@ -1162,7 +1162,7 @@ pub mod tests { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DataDM", @@ -1175,7 +1175,7 @@ pub mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GenericDataDM", @@ -1188,7 +1188,7 @@ pub mod tests { key2_hasher: StorageHasher::Identity, }, default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GenericData2DM", @@ -1201,7 +1201,7 @@ pub mod tests { key2_hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "AppendableDM", @@ -1214,7 +1214,7 @@ pub mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, ], } diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 3574dd8c95183..2f06a350bb85b 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -588,8 +588,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "noop", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -609,8 +609,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -630,8 +630,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -651,8 +651,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -681,8 +681,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: None, @@ -711,8 +711,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -732,8 +732,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -753,8 +753,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { @@ -774,8 +774,8 @@ fn test_metadata() { ty: scale_info::meta_type::>(), calls: vec![FunctionMetadata { name: "fail", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }], }), event: Some(PalletEventMetadata { diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 30f86cfd92b93..ae7e6f07f7923 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -110,112 +110,112 @@ mod tests { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![" Hello, this is doc!"], + docs: vec![" Hello, this is doc!"], }, StorageEntryMetadata { name: "PUBU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "U32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GETU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GETU32WITHCONFIG", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIG", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GETU32MYDEF", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![3, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "GETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![2, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIGMYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![1, 0, 0, 0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIGMYDEFOPT", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "GetU32WithBuilder", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderSome", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderNone", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "MAPU32", @@ -226,7 +226,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "PUBMAPU32", @@ -237,7 +237,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0], - documentation: vec![], + docs:vec![], }, // StorageEntryMetadata { // name: "MAPU32MYDEF", @@ -248,7 +248,7 @@ mod tests { // value: scale_info::meta_type::(), // }, // default: vec![0], - // documentation: vec![], + // docs:vec![], // }, // StorageEntryMetadata { // name: "PUBMAPU32MYDEF", @@ -259,7 +259,7 @@ mod tests { // value: scale_info::meta_type::(), // }, // default: vec![0], - // documentation: vec![], + // docs:vec![], // }, StorageEntryMetadata { name: "GETMAPU32", @@ -270,7 +270,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0, 0, 0, 0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "PUBGETMAPU32", @@ -281,7 +281,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0, 0, 0, 0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "GETMAPU32MYDEF", @@ -292,7 +292,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![109, 97, 112, 100], // "map" - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "PUBGETMAPU32MYDEF", @@ -303,7 +303,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![112, 117, 98, 109], // "pubmap" - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "DOUBLEMAP", @@ -316,7 +316,7 @@ mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "DOUBLEMAP2", @@ -329,28 +329,28 @@ mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<(Option,)>()), default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<([[(u16, Option<()>); 32]; 12], u32)>()), default: [0u8; 1156].to_vec(), - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE3", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<[u32; 25]>()), default: [0u8; 100].to_vec(), - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "NMAP", @@ -361,7 +361,7 @@ mod tests { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs:vec![], }, StorageEntryMetadata { name: "NMAP2", @@ -372,7 +372,7 @@ mod tests { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs:vec![], }, ], } diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 810b39a21ab72..3cb4a7ee8a95e 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -404,7 +404,7 @@ fn expected_metadata() -> PalletStorageMetadata { modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Map", @@ -415,7 +415,7 @@ fn expected_metadata() -> PalletStorageMetadata { value: scale_info::meta_type::(), }, default: [0u8; 8].to_vec(), - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DoubleMap", @@ -428,7 +428,7 @@ fn expected_metadata() -> PalletStorageMetadata { value: scale_info::meta_type::(), }, default: [0u8; 8].to_vec(), - documentation: vec![], + docs: vec![], }, ] } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 1781525d2dd12..8cd8644e80bf7 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -863,97 +863,97 @@ fn metadata() { calls: vec![ FunctionMetadata { name: "fill_block", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_ratio", ty: scale_info::meta_type::() } ], - documentation: vec![ + docs: vec![ " A dispatch that will fill the block weight up to the given ratio.a" ] }, FunctionMetadata { name: "remark", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_remark", ty: scale_info::meta_type::>(), }], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_heap_pages", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "pages", ty: scale_info::meta_type::(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_code", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "code", ty: scale_info::meta_type::>(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_code_without_checks", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "code", ty: scale_info::meta_type::>(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_changes_trie_config", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "changes_trie_config", ty: scale_info::meta_type::>(), } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "set_storage", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "items", ty: scale_info::meta_type::>(), } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "kill_storage", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "kill_prefix", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, FunctionArgumentMetadata { name: "_subkeys", ty: scale_info::meta_type::() } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "remark_with_event", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "remark", ty: scale_info::meta_type::>(), } ], - documentation: vec![] } + docs: vec![] } ] }), event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), @@ -962,37 +962,37 @@ fn metadata() { name: "BlockWeights", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![] + docs: vec![] }, PalletConstantMetadata { name: "BlockLength", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "BlockHashCount", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "DbWeight", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "Version", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "SS58Prefix", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![] + docs: vec![] } ], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), @@ -1008,14 +1008,14 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Value", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Map", @@ -1026,7 +1026,7 @@ fn metadata() { hasher: StorageHasher::Blake2_128Concat, }, default: vec![4, 0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Map2", @@ -1037,7 +1037,7 @@ fn metadata() { hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DoubleMap", @@ -1050,7 +1050,7 @@ fn metadata() { key2_hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DoubleMap2", @@ -1063,7 +1063,7 @@ fn metadata() { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMap", @@ -1074,7 +1074,7 @@ fn metadata() { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMap2", @@ -1085,14 +1085,14 @@ fn metadata() { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, #[cfg(feature = "conditional-storage")] StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, #[cfg(feature = "conditional-storage")] StorageEntryMetadata { name: "ConditionalMap", @@ -1103,7 +1103,7 @@ fn metadata() { hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, #[cfg(feature = "conditional-storage")] StorageEntryMetadata { name: "ConditionalDoubleMap", @@ -1116,7 +1116,7 @@ fn metadata() { key2_hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, #[cfg(feature = "conditional-storage")] StorageEntryMetadata { name: "ConditionalNMap", @@ -1127,7 +1127,7 @@ fn metadata() { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, ], }), @@ -1136,7 +1136,7 @@ fn metadata() { calls: vec![ FunctionMetadata { name: "foo", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_foo", ty: scale_info::meta_type::>(), @@ -1146,26 +1146,26 @@ fn metadata() { ty: scale_info::meta_type::(), } ], - documentation: vec![ + docs: vec![ " Doc comment put in metadata", ], }, FunctionMetadata { name: "foo_transactional", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "foo", ty: scale_info::meta_type::>(), } ], - documentation: vec![ + docs: vec![ " Doc comment put in metadata", ], }, FunctionMetadata { name: "foo_no_post_info", - arguments: vec![], - documentation: vec![], + args: vec![], + docs: vec![], }, ] }), @@ -1175,7 +1175,7 @@ fn metadata() { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], - documentation: vec![ + docs: vec![ " Some comment", " Some comment", ], @@ -1184,7 +1184,7 @@ fn metadata() { name: "MyGetParam2", ty: scale_info::meta_type::(), value: vec![11, 0, 0, 0], - documentation: vec![ + docs: vec![ " Some comment", " Some comment", ], @@ -1193,13 +1193,13 @@ fn metadata() { name: "MyGetParam3", ty: scale_info::meta_type::(), value: vec![12, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "some_extra", ty: scale_info::meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![ + docs: vec![ " Some doc", " Some doc", ], @@ -1208,7 +1208,7 @@ fn metadata() { name: "some_extra_extra", ty: scale_info::meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], - documentation: vec![ + docs: vec![ " Some doc", ], }, diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 94508318d66a7..4fa4ae0d9c097 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -600,97 +600,97 @@ fn metadata() { calls: vec![ FunctionMetadata { name: "fill_block", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_ratio", ty: scale_info::meta_type::() } ], - documentation: vec![ + docs: vec![ " A dispatch that will fill the block weight up to the given ratio.a" ] }, FunctionMetadata { name: "remark", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_remark", ty: scale_info::meta_type::>(), }], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_heap_pages", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "pages", ty: scale_info::meta_type::(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_code", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "code", ty: scale_info::meta_type::>(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_code_without_checks", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "code", ty: scale_info::meta_type::>(), } ], - documentation: vec![] + docs: vec![] }, FunctionMetadata { name: "set_changes_trie_config", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "changes_trie_config", ty: scale_info::meta_type::>(), } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "set_storage", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "items", ty: scale_info::meta_type::>(), } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "kill_storage", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "kill_prefix", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, FunctionArgumentMetadata { name: "_subkeys", ty: scale_info::meta_type::() } ], - documentation: vec![] }, + docs: vec![] }, FunctionMetadata { name: "remark_with_event", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "remark", ty: scale_info::meta_type::>(), } ], - documentation: vec![] } + docs: vec![] } ] }), event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), @@ -699,37 +699,37 @@ fn metadata() { name: "BlockWeights", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![] + docs: vec![] }, PalletConstantMetadata { name: "BlockLength", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "BlockHashCount", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "DbWeight", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "Version", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![], + docs: vec![], }, PalletConstantMetadata { name: "SS58Prefix", ty: scale_info::meta_type::(), value: vec![], - documentation: vec![] + docs: vec![] } ], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), @@ -746,7 +746,7 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Map", @@ -757,7 +757,7 @@ fn metadata() { hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "Map2", @@ -768,7 +768,7 @@ fn metadata() { hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DoubleMap", @@ -781,7 +781,7 @@ fn metadata() { key2_hasher: StorageHasher::Twox64Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "DoubleMap2", @@ -794,7 +794,7 @@ fn metadata() { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMap", @@ -805,7 +805,7 @@ fn metadata() { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMap2", @@ -816,7 +816,7 @@ fn metadata() { value: scale_info::meta_type::(), }, default: vec![0], - documentation: vec![], + docs: vec![], }, ], }), @@ -825,25 +825,25 @@ fn metadata() { calls: vec![ FunctionMetadata { name: "foo", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_foo", ty: scale_info::meta_type::>(), } ], - documentation: vec![ + docs: vec![ " Doc comment put in metadata", ], }, FunctionMetadata { name: "foo_transactional", - arguments: vec![ + args: vec![ FunctionArgumentMetadata { name: "_foo", ty: scale_info::meta_type::>(), } ], - documentation: vec![ + docs: vec![ " Doc comment put in metadata", ], }, @@ -855,7 +855,7 @@ fn metadata() { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], - documentation: vec![], + docs: vec![], }, ], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), From 1dd29908a4ec9aa0c326a36d657b4b578c245ff2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 11:38:22 +0100 Subject: [PATCH 349/503] Update frame/example/Cargo.toml Co-authored-by: Keith Yeung --- frame/example/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 037a542bb4b82..32ead0bc01dfa 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -31,7 +31,6 @@ sp-core = { version = "4.0.0-dev", path = "../../primitives/core", default-featu default = ["std"] std = [ "codec/std", - "scale-info/std", "frame-benchmarking/std", "frame-support/std", "frame-system/std", From 166c9bb7016244d345802087d559e4a49d9a082c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 11:41:13 +0100 Subject: [PATCH 350/503] Add scale_info/std and remove unused scale-info dependency --- Cargo.lock | 1 - frame/election-provider-support/Cargo.toml | 1 + frame/session/benchmarking/Cargo.toml | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4eebd246b6b57..666dd9b7e730f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5496,7 +5496,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index 557b62a12ee30..c50a050bd5768 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -29,6 +29,7 @@ sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } default = ["std"] std = [ "codec/std", + "scale-info/std", "sp-std/std", "sp-npos-elections/std", "sp-arithmetic/std", diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 696cfdfdeb2d0..2f8e069347bc1 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -13,7 +13,6 @@ readme = "README.md" targets = ["x86_64-unknown-linux-gnu"] [dependencies] -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-session = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/session" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } From 4bee35601be25f5cf3c948abd1f4bf274a8ac8e2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 11:44:41 +0100 Subject: [PATCH 351/503] Remove scale-info dependency --- Cargo.lock | 1 - frame/election-provider-support/Cargo.toml | 2 -- 2 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 666dd9b7e730f..4623ed445be13 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1809,7 +1809,6 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", "sp-arithmetic", "sp-npos-elections", "sp-runtime", diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index c50a050bd5768..c0d332315b020 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,7 +14,6 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } @@ -29,7 +28,6 @@ sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } default = ["std"] std = [ "codec/std", - "scale-info/std", "sp-std/std", "sp-npos-elections/std", "sp-arithmetic/std", From cab3bb5fc6779283b6f788178fcdda2cc121e691 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 11:49:52 +0100 Subject: [PATCH 352/503] Remove treasury pallet::metadata --- frame/treasury/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index 93fae5bf9b038..c33e62c9c09a4 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -251,7 +251,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event, I: 'static = ()> { /// New proposal. \[proposal_index\] Proposed(ProposalIndex), From bacc0002a18f9601b84040e78d16d7be3c188612 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 13 Jul 2021 12:00:07 +0100 Subject: [PATCH 353/503] Remove redundant Event test --- frame/support/src/event.rs | 210 +------------------------------------ 1 file changed, 1 insertion(+), 209 deletions(-) diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 37651d52fb2b6..ac881fbe301b7 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -287,212 +287,4 @@ macro_rules! __decl_generic_event { (@cannot_parse $ty:ty) => { compile_error!(concat!("The type `", stringify!($ty), "` can't be parsed as an unnamed one, please name it `Name = ", stringify!($ty), "`")); } -} - -#[cfg(test)] -#[allow(dead_code)] -mod tests { - use serde::Serialize; - use codec::{Encode, Decode}; - - mod system { - pub trait Config: 'static { - type Origin; - type BlockNumber; - type PalletInfo: crate::traits::PalletInfo; - type DbWeight: crate::traits::Get; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self {} - } - - decl_event!( - pub enum Event { - SystemEvent, - } - ); - } - - mod system_renamed { - pub trait Config: 'static { - type Origin; - type BlockNumber; - type PalletInfo: crate::traits::PalletInfo; - type DbWeight: crate::traits::Get; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=self {} - } - - decl_event!( - pub enum Event { - SystemEvent, - } - ); - } - - mod event_module { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event without renaming the generic parameter `Balance` and `Origin`. - pub enum Event where ::Balance, ::Origin - { - /// Hi, I am a comment. - TestEvent(Balance, Origin), - /// Dog - EventWithoutParams, - } - ); - } - - mod event_module2 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event with renamed generic parameter - pub enum Event where - BalanceRenamed = ::Balance, - OriginRenamed = ::Origin - { - TestEvent(BalanceRenamed), - TestOrigin(OriginRenamed), - } - ); - } - - mod event_module3 { - decl_event!( - pub enum Event { - HiEvent, - } - ); - } - - mod event_module4 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event finish formatting on an unnamed one with trailing comma - pub enum Event where - ::Balance, - ::Origin, - { - TestEvent(Balance, Origin), - } - ); - } - - mod event_module5 { - use super::system; - - pub trait Config: system::Config { - type Balance; - } - - decl_module! { - pub struct Module for enum Call where origin: T::Origin, system=system {} - } - - decl_event!( - /// Event finish formatting on an named one with trailing comma - pub enum Event where - BalanceRenamed = ::Balance, - OriginRenamed = ::Origin, - { - TestEvent(BalanceRenamed, OriginRenamed), - TrailingCommaInArgs( - u32, - u32, - ), - } - ); - } - - #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, Serialize)] - pub struct TestRuntime; - - #[derive(Debug, Clone, PartialEq, Eq, Encode, Decode, Serialize)] - pub struct TestRuntime2; - - impl event_module::Config for TestRuntime { - type Balance = u32; - } - - impl event_module2::Config for TestRuntime { - type Balance = u32; - } - - impl system::Config for TestRuntime { - type Origin = u32; - type BlockNumber = u32; - type PalletInfo = crate::tests::PanicPalletInfo; - type DbWeight = (); - } - - impl event_module::Config for TestRuntime2 { - type Balance = u32; - } - - impl event_module2::Config for TestRuntime2 { - type Balance = u32; - } - - impl system_renamed::Config for TestRuntime2 { - type Origin = u32; - type BlockNumber = u32; - type PalletInfo = crate::tests::PanicPalletInfo; - type DbWeight = (); - } - - impl system::Config for TestRuntime2 { - type Origin = u32; - type BlockNumber = u32; - type PalletInfo = crate::tests::PanicPalletInfo; - type DbWeight = (); - } - - #[test] - fn test_codec() { - let runtime_1_event_module_2 = TestEvent::event_module2( - event_module2::Event::::TestEvent(3) - ); - assert_eq!(runtime_1_event_module_2.encode()[0], 2); - - let runtime_2_event_module_2 = TestEventSystemRenamed::event_module2( - event_module2::Event::::TestEvent(3) - ); - assert_eq!(runtime_2_event_module_2.encode()[0], 5); - - let runtime_2_event_module_3 = TestEventSystemRenamed::event_module3( - event_module3::Event::HiEvent - ); - assert_eq!(runtime_2_event_module_3.encode()[0], 3); - } -} +} \ No newline at end of file From c91215c20a428f32c01911461207f070d488bcf7 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 10:37:56 +0100 Subject: [PATCH 354/503] Add back scale-info as dev dependency --- Cargo.lock | 1 + frame/session/benchmarking/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 4623ed445be13..74e8501bf352f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5495,6 +5495,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "serde", "sp-core", "sp-io", diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 2f8e069347bc1..a5a9c9a361ef4 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -26,6 +26,7 @@ rand = { version = "0.7.2", default-features = false } [dev-dependencies] serde = { version = "1.0.126" } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io ={ version = "4.0.0-dev", path = "../../../primitives/io" } From 27fa7e234d2169a40d18dfac7f10624552b770c2 Mon Sep 17 00:00:00 2001 From: thiolliere Date: Wed, 14 Jul 2021 11:42:29 +0200 Subject: [PATCH 355/503] fix error metadata when no error defined in decl_module --- frame/support/src/dispatch.rs | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 69e22c2f60142..3d88cd419537a 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1125,7 +1125,7 @@ macro_rules! decl_module { { $( $on_finalize )* } { $( $offchain )* } { $( $constants )* } - { &'static str } + { __NO_ERROR_DEFINED } { $( $integrity_test)* } [ $($t)* ] $($rest)* @@ -1148,7 +1148,7 @@ macro_rules! decl_module { { $( $on_finalize:tt )* } { $( $offchain:tt )* } { $( $constants:tt )* } - { $error_type:ty } + { $( $error_type:tt )* } { $( $integrity_test:tt )* } [ $( $dispatchables:tt )* ] $(#[doc = $doc_attr:tt])* @@ -1173,7 +1173,7 @@ macro_rules! decl_module { { $( $on_finalize )* } { $( $offchain )* } { $( $constants )* } - { $error_type } + { $( $error_type )* } { $( $integrity_test)* } [ $( $dispatchables )* @@ -1664,7 +1664,6 @@ macro_rules! decl_module { (@impl_function $module:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?>; $origin_ty:ty; - $error_type:ty; $ignore:ident; $(#[$fn_attr:meta])* $vis:vis fn $name:ident ( @@ -1686,7 +1685,6 @@ macro_rules! decl_module { (@impl_function $module:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?>; $origin_ty:ty; - $error_type:ty; $ignore:ident; $(#[$fn_attr:meta])* $vis:vis fn $name:ident ( @@ -1841,7 +1839,7 @@ macro_rules! decl_module { { $( $on_finalize:tt )* } { $( $offchain:tt )* } { $( $constants:tt )* } - { $error_type:ty } + { $( $error_type:tt )* } { $( $integrity_test:tt )* } ) => { $crate::__check_reserved_fn_name! { $( $fn_name )* } @@ -1925,7 +1923,6 @@ macro_rules! decl_module { @impl_function $mod_type<$trait_instance: $trait_name $(, $fn_instance: $fn_instantiable)?>; $origin_type; - $error_type; $from; $(#[doc = $doc_attr])* /// @@ -2145,7 +2142,7 @@ macro_rules! decl_module { $crate::__impl_error_metadata! { $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> { $( $other_where_bounds )* } - $error_type + $( $error_type )* } $crate::__impl_module_constants_metadata ! { $mod_type<$trait_instance: $trait_name $(, $instance: $instantiable)?> @@ -2190,8 +2187,7 @@ macro_rules! __impl_error_metadata { ( $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> { $( $other_where_bounds:tt )* } - $error_type:tt - $($rest:tt)* + __NO_ERROR_DEFINED ) => { impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* @@ -2199,17 +2195,14 @@ macro_rules! __impl_error_metadata { #[doc(hidden)] #[allow(dead_code)] pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { - Some($crate::metadata::PalletErrorMetadata { - ty: $crate::scale_info::meta_type::<$error_type>() - }) + None } } }; ( $mod_type:ident<$trait_instance:ident: $trait_name:ident$(, $instance:ident: $instantiable:path)?> { $( $other_where_bounds:tt )* } - { &'static str } - $($rest:tt)* + $( $error_type:tt )* ) => { impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $mod_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* @@ -2217,7 +2210,9 @@ macro_rules! __impl_error_metadata { #[doc(hidden)] #[allow(dead_code)] pub fn error_metadata() -> Option<$crate::metadata::PalletErrorMetadata> { - None + Some($crate::metadata::PalletErrorMetadata { + ty: $crate::scale_info::meta_type::<$( $error_type )*>() + }) } } }; From 82fb538a602922c58604cdf3d28f1b9f8912c950 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 10:47:44 +0100 Subject: [PATCH 356/503] Add Module3 to tests --- frame/support/test/tests/construct_runtime.rs | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 2f06a350bb85b..af3b8f9d35027 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -662,6 +662,98 @@ fn test_metadata() { error: None, index: 33, }, + PalletMetadata { + name: "NestedModule3", + storage: Some(PalletStorageMetadata { + prefix: "Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![FunctionMetadata { + name: "fail", + args: vec![], + docs: vec![], + }], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::(), + }), + constants: vec![], + error: None, + index: 34, + }, + PalletMetadata { + name: "Module3", + storage: Some(PalletStorageMetadata { + prefix: "Module", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "fail", + args: vec![], + docs: vec![], + }, + FunctionMetadata { + name: "aux_1", + args: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::>() + } + ], + docs: vec![], + }, + FunctionMetadata { + name: "aux_2", + args: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::() + }, + FunctionArgumentMetadata { + name: "_data2", + ty: scale_info::meta_type::>() + } + ], + docs: vec![], + }, + FunctionMetadata { + name: "aux_3", + args: vec![ + FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::() + }, + FunctionArgumentMetadata { + name: "_data2", + ty: scale_info::meta_type::() + } + ], + docs: vec![], + }, + FunctionMetadata { + name: "aux_4", + args: vec![], + docs: vec![], + }, + FunctionMetadata { + name: "operational", + args: vec![], + docs: vec![], + }, + ], + }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::(), + }), + constants: vec![], + error: None, + index: 33, + }, PalletMetadata { name: "Module1_3", storage: Some(PalletStorageMetadata { From e3865c2eba675b5301433be881c44800c84b0e9d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 11:12:30 +0100 Subject: [PATCH 357/503] Fix metadata test --- frame/support/test/tests/construct_runtime.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index af3b8f9d35027..3a217118f8dc5 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -752,7 +752,7 @@ fn test_metadata() { }), constants: vec![], error: None, - index: 33, + index: 35, }, PalletMetadata { name: "Module1_3", From b2d9c25e6d399306236b53dfd9ccd0f94773e564 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 16:09:21 +0100 Subject: [PATCH 358/503] Add docs feature to frame-support test --- frame/support/test/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 826162032ad60..a1782241d8cac 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -18,7 +18,7 @@ scale-info = { version = "0.9.0", default-features = false, features = ["derive" sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } -frame-support = { version = "4.0.0-dev", default-features = false, path = "../" } +frame-support = { version = "4.0.0-dev", default-features = false, features = ["metadata-docs"], path = "../" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } From 9fcccb828742f743bb584959992cd45e5f14b7f0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 16:12:36 +0100 Subject: [PATCH 359/503] WIP fixing pallet metadata test --- frame/support/test/tests/pallet.rs | 249 ++++++++++++++++++++++++++++- 1 file changed, 248 insertions(+), 1 deletion(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 8cd8644e80bf7..e2969130f1ec2 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1017,6 +1017,238 @@ fn metadata() { default: vec![0], docs: vec![], }, + StorageEntryMetadata { + name: "Value2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map", + modifier: StorageEntryModifier::Default, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + }, + default: vec![4, 0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Map2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "DoubleMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + key2_hasher: StorageHasher::Blake2_128Concat, + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::(), + hashers: vec![StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "NMap2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalDoubleMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::DoubleMap { + value: scale_info::meta_type::(), + key1: scale_info::meta_type::(), + key2: scale_info::meta_type::(), + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + }, + default: vec![0], + docs: vec![], + }, + #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + name: "ConditionalNMap", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::NMap { + keys: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![], + }, + ], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![ + FunctionMetadata { + name: "foo", + args: vec![ + FunctionArgumentMetadata { + name: "_foo", + ty: scale_info::meta_type::>(), + }, + FunctionArgumentMetadata { + name: "_bar", + ty: scale_info::meta_type::(), + } + ], + docs: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_transactional", + args: vec![ + FunctionArgumentMetadata { + name: "foo", + ty: scale_info::meta_type::>(), + } + ], + docs: vec![ + " Doc comment put in metadata", + ], + }, + FunctionMetadata { + name: "foo_no_post_info", + args: vec![], + docs: vec![], + }, + ] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + constants: vec![ + PalletConstantMetadata { + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], + docs: vec![ + " Some comment", + " Some comment", + ], + }, + PalletConstantMetadata { + name: "MyGetParam2", + ty: scale_info::meta_type::(), + value: vec![11, 0, 0, 0], + docs: vec![ + " Some comment", + " Some comment", + ], + }, + PalletConstantMetadata { + name: "MyGetParam3", + ty: scale_info::meta_type::(), + value: vec![12, 0, 0, 0, 0, 0, 0, 0], + docs: vec![], + }, + PalletConstantMetadata { + name: "some_extra", + ty: scale_info::meta_type::(), + value: vec![100, 0, 0, 0, 0, 0, 0, 0], + docs: vec![ + " Some doc", + " Some doc", + ], + }, + PalletConstantMetadata { + name: "some_extra_extra", + ty: scale_info::meta_type::(), + value: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![ + " Some doc", + ], + }, + ], + error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + }, + PalletMetadata { + index: 1, + name: "Example", + storage: Some(PalletStorageMetadata { + prefix: "Example", + entries: vec![ + StorageEntryMetadata { + name: "ValueWhereClause", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Value", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, + StorageEntryMetadata { + name: "Value2", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::()), + default: vec![0], + docs: vec![], + }, StorageEntryMetadata { name: "Map", modifier: StorageEntryModifier::Default, @@ -1215,6 +1447,21 @@ fn metadata() { ], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }, + PalletMetadata { + index: 2, + name: "Example2", + storage: Some(PalletStorageMetadata { + prefix: "Example2", + entries: vec![], + }), + calls: Some(PalletCallMetadata { + ty: scale_info::meta_type::>(), + calls: vec![] + }), + event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), + constants: vec![], + error: None, + } ]; let extrinsic = ExtrinsicMetadata { @@ -1244,7 +1491,7 @@ fn metadata() { _ => panic!("metadata has been bumped, test needs to be updated"), }; - pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); + pretty_assertions::assert_eq!(actual_metadata, expected_metadata); } #[test] From 3c3945c7c1462ab16b0392c69587b9f868d1d5ea Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 17:09:00 +0100 Subject: [PATCH 360/503] Remove redundant FunctionMetadata, FunctionArgumentMetadata as per https://github.com/paritytech/frame-metadata/pull/20 --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- .../procedural/src/pallet/expand/call.rs | 27 +-- frame/support/src/dispatch.rs | 108 +--------- frame/support/test/tests/construct_runtime.rs | 198 +++--------------- frame/support/test/tests/pallet.rs | 190 +---------------- .../test/tests/pallet_compatibility.rs | 1 - .../tests/pallet_compatibility_instance.rs | 13 +- frame/support/test/tests/pallet_instance.rs | 129 +----------- 9 files changed, 43 insertions(+), 627 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1132ddf6055e8..5310af10aab75 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1852,7 +1852,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#9cfeeeeab8ec01f2acc370e6132adc5ed48cd920" +source = "git+https://github.com/paritytech/frame-metadata?branch=aj-remove-function-metadata#cefd26e7f007a6f37e0902e1fc14bb0c25be3e4e" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 6ad90ec846692..d214171d615cc 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-remove-function-metadata", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 4875947cb2c30..b9ed2685b3018 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -67,18 +67,6 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .collect::>() }); - let args_meta_type = methods.iter().map(|method| { - method.args.iter() - .map(|(is_compact, _, type_)| { - if *is_compact { - quote::quote_spanned!(type_.span() => #frame_support::codec::Compact<#type_>) - } else { - quote::quote_spanned!(type_.span() => #type_ ) - } - }) - .collect::>() - }); - let default_docs = [syn::parse_quote!( r"Contains one variant per dispatchable that can be called by an extrinsic." )]; @@ -230,20 +218,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #pallet_ident<#type_use_gen> #where_clause { #[doc(hidden)] pub fn call_functions() -> #frame_support::metadata::PalletCallMetadata { - let ty = #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>(); - let calls = #frame_support::sp_std::vec![ #( - #frame_support::metadata::FunctionMetadata { - name: stringify!(#fn_name), - args: #frame_support::sp_std::vec![ #( - #frame_support::metadata::FunctionArgumentMetadata { - name: stringify!(#args_name), - ty: #frame_support::scale_info::meta_type::<#args_meta_type>(), - }, - )* ], - docs: #frame_support::sp_std::vec![ #( #fn_doc ),* ], - }, - )* ]; - #frame_support::metadata::PalletCallMetadata { ty, calls } + #frame_support::scale_info::meta_type::<#call_ident<#type_use_gen>>().into() } } ) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 3d88cd419537a..58e0683ad1d54 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2170,11 +2170,7 @@ macro_rules! __dispatch_impl_metadata { #[doc(hidden)] #[allow(dead_code)] pub fn call_functions() -> $crate::metadata::PalletCallMetadata { - let ty = $crate::scale_info::meta_type::<$call_type<$trait_instance $(, $instance)?>>(); - $crate::metadata::PalletCallMetadata { - ty, - calls: $crate::__call_to_functions!($($rest)*), - } + $crate::scale_info::meta_type::<$call_type<$trait_instance $(, $instance)?>>().into() } } } @@ -2327,108 +2323,6 @@ macro_rules! __impl_module_constants_metadata { } } -/// Convert the list of calls into their JSON representation, joined by ",". -#[macro_export] -#[doc(hidden)] -macro_rules! __call_to_functions { - ( - $origin_type:ty - { - $( - $(#[doc = $doc_attr:tt])* - fn $fn_name:ident($from:ident - $( - , $(#[$codec_attr:ident])* $param_name:ident : $param:ty - )* - ); - )* - } - ) => { - $crate::__functions_to_metadata!(0; $origin_type;; $( - fn $fn_name( $($(#[$codec_attr])* $param_name: $param ),* ); - $( $doc_attr ),*; - )*) - }; -} - - -/// Convert a list of functions into a list of `FunctionMetadata` items. -#[macro_export] -#[doc(hidden)] -macro_rules! __functions_to_metadata{ - ( - $fn_id:expr; - $origin_type:ty; - $( $function_metadata:expr ),*; - fn $fn_name:ident( - $( - $(#[$codec_attr:ident])* $param_name:ident : $param:ty - ),* - ); - $( $fn_doc:expr ),*; - $( $rest:tt )* - ) => { - $crate::__functions_to_metadata!( - $fn_id + 1; $origin_type; - $( $function_metadata, )* $crate::__function_to_metadata!( - fn $fn_name($( $(#[$codec_attr])* $param_name : $param ),*); $( $fn_doc ),*; $fn_id; - ); - $($rest)* - ) - }; - ( - $fn_id:expr; - $origin_type:ty; - $( $function_metadata:expr ),*; - ) => { - $crate::sp_std::vec![ $( $function_metadata ),* ] - } -} - -/// Convert a function into its metadata representation. -#[macro_export] -#[doc(hidden)] -macro_rules! __function_to_metadata { - ( - fn $fn_name:ident( - $( $(#[$codec_attr:ident])* $param_name:ident : $param:ty),* - ); - $( $fn_doc:expr ),*; - $fn_id:expr; - ) => { - $crate::metadata::FunctionMetadata { - name: stringify!($fn_name), - args: $crate::sp_std::vec![ - $( - $crate::metadata::FunctionArgumentMetadata { - name: stringify!($param_name), - ty: $crate::__function_to_metadata!(@meta_type - $(#[$codec_attr])* $param_name $param - ), - } - ),* - ], - #[cfg(feature = "metadata-docs")] - docs: $crate::sp_std::vec![ $( $fn_doc ),* ], - #[cfg(not(feature = "metadata-docs"))] - docs: $crate::sp_std::vec![], - } - }; - - (@meta_type #[compact] $param_name:ident $param:ty) => { - $crate::scale_info::meta_type::<$crate::codec::Compact<$param>>() - }; - (@meta_type $param_name:ident $param:ty) => { - $crate::scale_info::meta_type::<$param>() - }; - (@meta_type $(#[codec_attr:ident])* $param_name:ident, $param:ty) => { - compile_error!(concat!( - "Invalid attribute for parameter `", stringify!($param_name), - "`, the following attributes are supported: `#[compact]`" - )); - } -} - #[macro_export] #[doc(hidden)] macro_rules! __check_reserved_fn_name { diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 3a217118f8dc5..35b8096c29bee 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -578,23 +578,15 @@ fn call_subtype_conversion() { #[test] fn test_metadata() { + use scale_info::meta_type; use frame_support::metadata::*; let pallets = vec![ PalletMetadata { name: "System", storage: None, - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "noop", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 30, @@ -605,17 +597,8 @@ fn test_metadata() { prefix: "Instance1Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 31, @@ -626,17 +609,8 @@ fn test_metadata() { prefix: "Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), constants: vec![], error: None, index: 32, @@ -647,17 +621,8 @@ fn test_metadata() { prefix: "Instance2Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 33, @@ -668,17 +633,8 @@ fn test_metadata() { prefix: "Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), constants: vec![], error: None, index: 34, @@ -689,67 +645,8 @@ fn test_metadata() { prefix: "Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }, - FunctionMetadata { - name: "aux_1", - args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::>() - } - ], - docs: vec![], - }, - FunctionMetadata { - name: "aux_2", - args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::() - }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::>() - } - ], - docs: vec![], - }, - FunctionMetadata { - name: "aux_3", - args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::() - }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::() - } - ], - docs: vec![], - }, - FunctionMetadata { - name: "aux_4", - args: vec![], - docs: vec![], - }, - FunctionMetadata { - name: "operational", - args: vec![], - docs: vec![], - }, - ], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::().into()), constants: vec![], error: None, index: 35, @@ -769,14 +666,7 @@ fn test_metadata() { PalletMetadata { name: "Module1_4", storage: None, - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), + calls: Some(meta_type::>().into()), event: None, constants: vec![], error: None, @@ -786,9 +676,7 @@ fn test_metadata() { name: "Module1_5", storage: None, calls: None, - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 4, @@ -799,17 +687,8 @@ fn test_metadata() { prefix: "Instance6Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 1, @@ -820,16 +699,9 @@ fn test_metadata() { prefix: "Instance7Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), + calls: Some(meta_type::>().into()), event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), + ty: meta_type::>(), }), constants: vec![], error: None, @@ -841,17 +713,8 @@ fn test_metadata() { prefix: "Instance8Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 12, @@ -862,17 +725,8 @@ fn test_metadata() { prefix: "Instance9Module", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![FunctionMetadata { - name: "fail", - args: vec![], - docs: vec![], - }], - }), - event: Some(PalletEventMetadata { - ty: scale_info::meta_type::>(), - }), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![], error: None, index: 13, @@ -880,13 +734,13 @@ fn test_metadata() { ]; let extrinsic = ExtrinsicMetadata { - ty: scale_info::meta_type::(), + ty: meta_type::(), version: 4, signed_extensions: vec![ SignedExtensionMetadata { identifier: "UnitSignedExtension", - ty: scale_info::meta_type::<()>(), - additional_signed: scale_info::meta_type::<()>(), + ty: meta_type::<()>(), + additional_signed: meta_type::<()>(), } ] }; diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index e2969130f1ec2..18a23cfb59cc2 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -858,105 +858,8 @@ fn metadata() { index: 0, name: "System", storage: None, - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "fill_block", - args: vec![ - FunctionArgumentMetadata { - name: "_ratio", - ty: scale_info::meta_type::() - } - ], - docs: vec![ - " A dispatch that will fill the block weight up to the given ratio.a" - ] - }, - FunctionMetadata { - name: "remark", - args: vec![ - FunctionArgumentMetadata { - name: "_remark", - ty: scale_info::meta_type::>(), - }], - docs: vec![] - }, - FunctionMetadata { - name: "set_heap_pages", - args: vec![ - FunctionArgumentMetadata { - name: "pages", - ty: scale_info::meta_type::(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_code", - args: vec![ - FunctionArgumentMetadata { - name: "code", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_code_without_checks", - args: vec![ - FunctionArgumentMetadata { - name: "code", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_changes_trie_config", - args: vec![ - FunctionArgumentMetadata { - name: "changes_trie_config", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] }, - FunctionMetadata { - name: "set_storage", - args: vec![ - FunctionArgumentMetadata { - name: "items", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] }, - FunctionMetadata { name: "kill_storage", - args: vec![ - FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } - ], - docs: vec![] }, - FunctionMetadata { - name: "kill_prefix", - args: vec![ - FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, - FunctionArgumentMetadata { - name: "_subkeys", - ty: scale_info::meta_type::() - } - ], - docs: vec![] }, - FunctionMetadata { - name: "remark_with_event", - args: vec![ - FunctionArgumentMetadata { - name: "remark", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] } - ] - }), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + calls: Some(scale_info::meta_type::>().into()), + event: Some(scale_info::meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "BlockWeights", @@ -995,7 +898,7 @@ fn metadata() { docs: vec![] } ], - error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + error: Some(scale_info::meta_type::>().into()), }, PalletMetadata { index: 1, @@ -1138,45 +1041,8 @@ fn metadata() { }, ], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "foo", - args: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - }, - FunctionArgumentMetadata { - name: "_bar", - ty: scale_info::meta_type::(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_transactional", - args: vec![ - FunctionArgumentMetadata { - name: "foo", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_no_post_info", - args: vec![], - docs: vec![], - }, - ] - }), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + calls: Some(scale_info::meta_type::>().into()), + event: Some(scale_info::meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "MyGetParam", @@ -1363,45 +1229,8 @@ fn metadata() { }, ], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "foo", - args: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - }, - FunctionArgumentMetadata { - name: "_bar", - ty: scale_info::meta_type::(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_transactional", - args: vec![ - FunctionArgumentMetadata { - name: "foo", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_no_post_info", - args: vec![], - docs: vec![], - }, - ] - }), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + calls: Some(scale_info::meta_type::>().into()), + event: Some(scale_info::meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "MyGetParam", @@ -1454,10 +1283,7 @@ fn metadata() { prefix: "Example2", entries: vec![], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![] - }), + calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), constants: vec![], error: None, diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 586f050d6388e..27e6aaba9e47f 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -314,7 +314,6 @@ mod test { let calls1 = pallets[1].calls.as_ref().unwrap(); let calls2 = pallets[2].calls.as_ref().unwrap(); - pretty_assertions::assert_eq!(calls1.calls, calls2.calls); assert_meta_types(calls1.ty.id(), calls2.ty.id()); // event: check variants and fields but ignore the type name which will be different diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index 94263a78580a9..3fbc6e49192b6 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -290,12 +290,6 @@ mod test { _ => unreachable!(), }; - let assert_meta_types = |ty_id1, ty_id2| { - let ty1 = types.resolve(ty_id1).map(|ty| ty.type_def()); - let ty2 = types.resolve(ty_id2).map(|ty| ty.type_def()); - pretty_assertions::assert_eq!(ty1, ty2); - }; - let get_enum_variants = |ty_id| { match types.resolve(ty_id).map(|ty| ty.type_def()) { Some(ty) => { @@ -328,10 +322,9 @@ mod test { for i in vec![1, 3, 5].into_iter() { pretty_assertions::assert_eq!(pallets[i].storage, pallets[i + 1].storage); - let calls1 = pallets[i].calls.as_ref().unwrap(); - let calls2 = pallets[i + 1].calls.as_ref().unwrap(); - pretty_assertions::assert_eq!(calls1.calls, calls2.calls); - assert_meta_types(calls1.ty.id(), calls2.ty.id()); + let call1_variants = get_enum_variants(pallets[i].calls.as_ref().unwrap().ty.id()); + let call2_variants = get_enum_variants(pallets[i + 1].calls.as_ref().unwrap().ty.id()); + assert_enum_variants(call1_variants, call2_variants); // event: check variants and fields but ignore the type name which will be different let event1_variants = get_enum_variants(pallets[i].event.as_ref().unwrap().ty.id()); diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 4fa4ae0d9c097..902bdc7061139 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -595,104 +595,7 @@ fn metadata() { index: 0, name: "System", storage: None, - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "fill_block", - args: vec![ - FunctionArgumentMetadata { - name: "_ratio", - ty: scale_info::meta_type::() - } - ], - docs: vec![ - " A dispatch that will fill the block weight up to the given ratio.a" - ] - }, - FunctionMetadata { - name: "remark", - args: vec![ - FunctionArgumentMetadata { - name: "_remark", - ty: scale_info::meta_type::>(), - }], - docs: vec![] - }, - FunctionMetadata { - name: "set_heap_pages", - args: vec![ - FunctionArgumentMetadata { - name: "pages", - ty: scale_info::meta_type::(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_code", - args: vec![ - FunctionArgumentMetadata { - name: "code", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_code_without_checks", - args: vec![ - FunctionArgumentMetadata { - name: "code", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] - }, - FunctionMetadata { - name: "set_changes_trie_config", - args: vec![ - FunctionArgumentMetadata { - name: "changes_trie_config", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] }, - FunctionMetadata { - name: "set_storage", - args: vec![ - FunctionArgumentMetadata { - name: "items", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] }, - FunctionMetadata { name: "kill_storage", - args: vec![ - FunctionArgumentMetadata { name: "keys", ty: scale_info::meta_type::>() } - ], - docs: vec![] }, - FunctionMetadata { - name: "kill_prefix", - args: vec![ - FunctionArgumentMetadata { name: "prefix", ty: scale_info::meta_type::() }, - FunctionArgumentMetadata { - name: "_subkeys", - ty: scale_info::meta_type::() - } - ], - docs: vec![] }, - FunctionMetadata { - name: "remark_with_event", - args: vec![ - FunctionArgumentMetadata { - name: "remark", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![] } - ] - }), + calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), constants: vec![ PalletConstantMetadata { @@ -820,35 +723,7 @@ fn metadata() { }, ], }), - calls: Some(PalletCallMetadata { - ty: scale_info::meta_type::>(), - calls: vec![ - FunctionMetadata { - name: "foo", - args: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - FunctionMetadata { - name: "foo_transactional", - args: vec![ - FunctionArgumentMetadata { - name: "_foo", - ty: scale_info::meta_type::>(), - } - ], - docs: vec![ - " Doc comment put in metadata", - ], - }, - ] - }), + calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), constants: vec![ PalletConstantMetadata { From c849732355d1cbf631dc4926cb753ec916a0500c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 14 Jul 2021 17:14:10 +0100 Subject: [PATCH 361/503] Use main branch of frame-metadata --- Cargo.lock | 2 +- frame/support/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5310af10aab75..2a3fec20c8590 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1852,7 +1852,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=aj-remove-function-metadata#cefd26e7f007a6f37e0902e1fc14bb0c25be3e4e" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#7bb2935f9c2a51193708d5cd4b190deee726c663" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index d214171d615cc..6ad90ec846692 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "aj-remove-function-metadata", default-features = false, features = ["v14"] } +frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } From fa4b2d77b40e59f322984803130c1587e012236f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 16 Jul 2021 09:57:11 +0100 Subject: [PATCH 362/503] Use patch of scale-info for latest changes --- Cargo.lock | 6 ++---- Cargo.toml | 3 ++- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a3fec20c8590..25ddd7a9946b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8338,8 +8338,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0af18b6ba34ebe0429199eb77e64f04c98508b8afb420aaac8fc187bb1c8ff02" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#340d9008d9b1b5cffabb78cc50d00b4465b923cd" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8352,8 +8351,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "113a247fe47d242d24f4795cbc069d0a168a70da9e1cff98b5645edcc21f1eed" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#340d9008d9b1b5cffabb78cc50d00b4465b923cd" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 376002842a463..54303f7b82543 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -273,4 +273,5 @@ zeroize = { opt-level = 3 } panic = "unwind" [patch.crates-io] -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", features = ["derive"] } From 22ab7d286ba10608604110a4ca1861cb0d07cc4a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 16 Jul 2021 12:12:58 +0100 Subject: [PATCH 363/503] Use latest patched scale-info --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25ddd7a9946b8..3d3f9220372ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8338,7 +8338,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#340d9008d9b1b5cffabb78cc50d00b4465b923cd" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#25161eb6cc7570120ec04a82a512a8ade54aa757" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8351,7 +8351,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#340d9008d9b1b5cffabb78cc50d00b4465b923cd" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#25161eb6cc7570120ec04a82a512a8ade54aa757" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From 9d67a31af8880a839a0b2d8b04c63399a7463afd Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 16 Jul 2021 16:40:39 +0100 Subject: [PATCH 364/503] Manual TypeInfo for DigestItem --- primitives/runtime/src/generic/digest.rs | 113 ++++++++++++++++++++++- 1 file changed, 111 insertions(+), 2 deletions(-) diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 03c6c4ee26680..871c52bcb1b53 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -24,7 +24,7 @@ use sp_std::prelude::*; use crate::ConsensusEngineId; use crate::codec::{Decode, Encode, Input, Error}; -use crate::scale_info::TypeInfo; +use crate::scale_info::{Path, TypeInfo, Type, TypeParameter, build::{Fields, Variants}, meta_type}; use sp_core::{ChangesTrieConfiguration, RuntimeDebug}; /// Generic header digest. @@ -74,7 +74,7 @@ impl Digest { /// Digest item that is able to encode/decode 'system' digest items and /// provide opaque access to other items. -#[derive(PartialEq, Eq, Clone, RuntimeDebug, TypeInfo)] +#[derive(PartialEq, Eq, Clone, RuntimeDebug)] #[cfg_attr(feature = "std", derive(parity_util_mem::MallocSizeOf))] pub enum DigestItem { /// System digest item that contains the root of changes trie at given @@ -151,6 +151,71 @@ impl<'a, Hash: Decode> serde::Deserialize<'a> for DigestItem { } } +impl TypeInfo for DigestItem +where + Hash: TypeInfo + 'static +{ + type Identity = Self; + + fn type_info() -> Type { + Type::builder() + .path(Path::new("DigestItem", module_path!())) + .type_params(vec![TypeParameter::new("Hash", Some(meta_type::()))]) + .variant( + Variants::new() + .variant("ChangesTrieRoot", |v| { + v.index(DigestItemType::ChangesTrieRoot as u8).fields( + Fields::unnamed() + .field(|f| f.ty::().type_name("Hash")), + ) + }) + .variant("PreRuntime", |v| { + v.index(DigestItemType::PreRuntime as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::() + .type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("Consensus", |v| { + v.index(DigestItemType::Consensus as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::() + .type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("Seal", |v| { + v.index(DigestItemType::Seal as u8).fields( + Fields::unnamed() + .field(|f| { + f.ty::() + .type_name("ConsensusEngineId") + }) + .field(|f| f.ty::>().type_name("Vec")), + ) + }) + .variant("ChangesTrieSignal", |v| { + v.index(DigestItemType::ChangesTrieSignal as u8).fields( + Fields::unnamed().field(|f| { + f.ty::().type_name("ChangesTrieSignal") + }), + ) + }) + .variant("Other", |v| { + v.index(DigestItemType::Other as u8).fields( + Fields::unnamed() + .field(|f| f.ty::>().type_name("Vec")), + ) + }), + ) + } +} + /// A 'referencing view' for digest item. Does not own its contents. Used by /// final runtime implementations for encoding/decoding its log items. #[derive(PartialEq, Eq, Clone, RuntimeDebug)] @@ -492,4 +557,48 @@ mod tests { r#"{"logs":["0x0204000000","0x000c010203","0x05746573740c010203"]}"# ); } + + #[test] + fn digest_item_type_info() { + let type_info = DigestItem::::type_info(); + let variants = + if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { + variant.variants() + } else { + panic!("Should be a TypeDef::TypeDefVariant") + }; + + // ensure that all variants are covered by manual TypeInfo impl + let check = |digest_item_type: DigestItemType| { + let (variant_name, digest_item) = + match digest_item_type { + DigestItemType::Other => + ("Other", DigestItem::Other(Default::default())), + DigestItemType::ChangesTrieRoot => + ("ChangesTrieRoot", DigestItem::ChangesTrieRoot(Default::default())), + DigestItemType::Consensus => + ("Consensus", DigestItem::Consensus(Default::default(), Default::default())), + DigestItemType::Seal => + ("Seal", DigestItem::Seal(Default::default(), Default::default())), + DigestItemType::PreRuntime => + ("PreRuntime", DigestItem::PreRuntime(Default::default(), Default::default())), + DigestItemType::ChangesTrieSignal => + ("ChangesTrieSignal", DigestItem::ChangesTrieSignal( + ChangesTrieSignal::NewConfiguration(Default::default()) + )), + }; + let encoded = digest_item.encode(); + let variant = variants.iter().find(|v| &v.name() == variant_name) + .expect(&format!("Variant {} not found", variant_name)); + + assert_eq!(digest_item_type as u8, variant.index()) + }; + + check(DigestItemType::Other); + check(DigestItemType::ChangesTrieRoot); + check(DigestItemType::Consensus); + check(DigestItemType::Seal); + check(DigestItemType::PreRuntime); + check(DigestItemType::ChangesTrieSignal); + } } From 4491a421f05e22ecafff957a948d7c172f174b34 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 16 Jul 2021 16:42:43 +0100 Subject: [PATCH 365/503] Manual TypeInfo for DigestItem --- primitives/runtime/src/generic/digest.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 871c52bcb1b53..24bd7fbe94528 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -573,7 +573,7 @@ mod tests { let (variant_name, digest_item) = match digest_item_type { DigestItemType::Other => - ("Other", DigestItem::Other(Default::default())), + ("Other", DigestItem::::Other(Default::default())), DigestItemType::ChangesTrieRoot => ("ChangesTrieRoot", DigestItem::ChangesTrieRoot(Default::default())), DigestItemType::Consensus => @@ -588,10 +588,10 @@ mod tests { )), }; let encoded = digest_item.encode(); - let variant = variants.iter().find(|v| &v.name() == variant_name) + let variant = variants.iter().find(|v| v.name() == &variant_name) .expect(&format!("Variant {} not found", variant_name)); - assert_eq!(digest_item_type as u8, variant.index()) + assert_eq!(encoded[0], variant.index()) }; check(DigestItemType::Other); From 094f0da39f538ac2ea6a53770742ec73933c2fde Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Jul 2021 10:47:00 +0100 Subject: [PATCH 366/503] Update scale-info --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3d3f9220372ec..eafcea049598f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8338,7 +8338,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#25161eb6cc7570120ec04a82a512a8ade54aa757" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#639627a0d93de088cebdb7dd1d4910bd56fe8f3e" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8351,7 +8351,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#25161eb6cc7570120ec04a82a512a8ade54aa757" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#639627a0d93de088cebdb7dd1d4910bd56fe8f3e" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From eb43e7bacc086bc892026abd21f16693570f9eee Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 19 Jul 2021 16:42:32 +0100 Subject: [PATCH 367/503] Skip __Ignore variants for Error, depends on https://github.com/paritytech/scale-info/pull/117 --- Cargo.lock | 4 ++-- frame/support/procedural/src/pallet/expand/error.rs | 1 + frame/support/src/error.rs | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index be459bfdc328e..1d2808d1edc06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8348,7 +8348,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#639627a0d93de088cebdb7dd1d4910bd56fe8f3e" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#21b6d3515ef140912f8b6e86bbb6a6d8ba5b6fea" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8361,7 +8361,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#639627a0d93de088cebdb7dd1d4910bd56fe8f3e" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#21b6d3515ef140912f8b6e86bbb6a6d8ba5b6fea" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index d5ef8b08d99b6..208efa068967f 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -35,6 +35,7 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { let phantom_variant: syn::Variant = syn::parse_quote!( #[doc(hidden)] + #[codec(skip)] __Ignore( #frame_support::sp_std::marker::PhantomData<(#type_use_gen)>, #frame_support::Never, diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index a5df4f7d4cbc3..d51189057eb56 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -91,6 +91,7 @@ macro_rules! decl_error { $( where $( $where_ty: $where_bound ),* )? { #[doc(hidden)] + #[codec(skip)] __Ignore( $crate::sp_std::marker::PhantomData<($generic, $( $inst_generic)?)>, $crate::Never, From a83096ef7fa2a6d1b67eab1a7b00e23911651688 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 20 Jul 2021 17:08:07 +0100 Subject: [PATCH 368/503] Named fields for FRAME v2 pallet Call variants --- bin/node/runtime/src/lib.rs | 4 ++-- frame/authorship/src/lib.rs | 16 ++++++++-------- frame/babe/src/equivocation.rs | 6 +++--- frame/democracy/src/lib.rs | 2 +- frame/election-provider-multi-phase/src/lib.rs | 4 ++-- .../src/unsigned.rs | 8 ++++---- frame/grandpa/src/equivocation.rs | 6 +++--- frame/im-online/src/lib.rs | 8 ++++---- frame/proxy/src/lib.rs | 6 +++--- .../support/procedural/src/pallet/expand/call.rs | 13 +++++++++---- frame/timestamp/src/lib.rs | 6 +++--- frame/transaction-storage/src/lib.rs | 4 ++-- frame/utility/src/lib.rs | 2 +- 13 files changed, 45 insertions(+), 40 deletions(-) diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index b9f9b5750864e..c27af62c55d74 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -276,8 +276,8 @@ impl InstanceFilter for ProxyType { Call::Balances(..) | Call::Assets(..) | Call::Uniques(..) | - Call::Vesting(pallet_vesting::Call::vested_transfer(..)) | - Call::Indices(pallet_indices::Call::transfer(..)) + Call::Vesting(pallet_vesting::Call::vested_transfer { .. }) | + Call::Indices(pallet_indices::Call::transfer { .. }) ), ProxyType::Governance => matches!( c, diff --git a/frame/authorship/src/lib.rs b/frame/authorship/src/lib.rs index 0a1c4d22d57ac..de96a1cead7ad 100644 --- a/frame/authorship/src/lib.rs +++ b/frame/authorship/src/lib.rs @@ -247,7 +247,7 @@ pub mod pallet { fn create_inherent(data: &InherentData) -> Option { let uncles = data.uncles().unwrap_or_default(); - let mut set_uncles = Vec::new(); + let mut new_uncles = Vec::new(); if !uncles.is_empty() { let prev_uncles = >::get(); @@ -264,10 +264,10 @@ pub mod pallet { match Self::verify_uncle(&uncle, &existing_hashes, &mut acc) { Ok(_) => { let hash = uncle.hash(); - set_uncles.push(uncle); + new_uncles.push(uncle); existing_hashes.push(hash); - if set_uncles.len() == MAX_UNCLES { + if new_uncles.len() == MAX_UNCLES { break } } @@ -278,18 +278,18 @@ pub mod pallet { } } - if set_uncles.is_empty() { + if new_uncles.is_empty() { None } else { - Some(Call::set_uncles(set_uncles)) + Some(Call::set_uncles { new_uncles }) } } fn check_inherent(call: &Self::Call, _data: &InherentData) -> result::Result<(), Self::Error> { match call { - Call::set_uncles(ref uncles) if uncles.len() > MAX_UNCLES => { + Call::set_uncles { ref new_uncles } if new_uncles.len() > MAX_UNCLES => { Err(InherentError::Uncles(Error::::TooManyUncles.as_str().into())) - }, + } _ => { Ok(()) }, @@ -297,7 +297,7 @@ pub mod pallet { } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::set_uncles(_)) + matches!(call, Call::set_uncles { .. }) } } } diff --git a/frame/babe/src/equivocation.rs b/frame/babe/src/equivocation.rs index e9017205c6b58..e4a1e81becad3 100644 --- a/frame/babe/src/equivocation.rs +++ b/frame/babe/src/equivocation.rs @@ -157,7 +157,7 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof); + let call = Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( @@ -185,7 +185,7 @@ where /// unsigned equivocation reports. impl Pallet { pub fn validate_unsigned(source: TransactionSource, call: &Call) -> TransactionValidity { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } @@ -222,7 +222,7 @@ impl Pallet { } pub fn pre_dispatch(call: &Call) -> Result<(), TransactionValidityError> { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { is_known_offence::(equivocation_proof, key_owner_proof) } else { Err(InvalidTransaction::Call.into()) diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 50e03bdcacce9..99920e98fb1b1 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -1690,7 +1690,7 @@ impl Pallet { None, 63, frame_system::RawOrigin::Root.into(), - Call::enact_proposal(status.proposal_hash, index).into(), + Call::enact_proposal { proposal_hash: status.proposal_hash, index }.into(), ).is_err() { frame_support::print("LOGIC ERROR: bake_referendum/schedule_named failed"); } diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index ac341c622724a..5555ad4ac6560 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -1064,7 +1064,7 @@ pub mod pallet { impl ValidateUnsigned for Pallet { type Call = Call; fn validate_unsigned(source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::submit_unsigned(solution, _) = call { + if let Call::submit_unsigned { solution, .. } = call { // Discard solution not coming from the local OCW. match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } @@ -1101,7 +1101,7 @@ pub mod pallet { } fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { - if let Call::submit_unsigned(solution, _) = call { + if let Call::submit_unsigned { solution, .. } = call { Self::unsigned_pre_dispatch_checks(solution) .map_err(dispatch_error_to_invalid) .map_err(Into::into) diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index aaeb5e4c0c9e4..6b1c0368fc575 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -156,7 +156,7 @@ impl Pallet { let call = restore_solution::() .and_then(|call| { // ensure the cached call is still current before submitting - if let Call::submit_unsigned(solution, _) = &call { + if let Call::submit_unsigned { solution, .. } = &call { // prevent errors arising from state changes in a forkful chain Self::basic_checks(solution, "restored")?; Ok(call) @@ -206,10 +206,10 @@ impl Pallet { pub fn mine_checked_call() -> Result, MinerError> { let iters = Self::get_balancing_iters(); // get the solution, with a load of checks to ensure if submitted, IT IS ABSOLUTELY VALID. - let (raw_solution, witness) = Self::mine_and_check(iters)?; + let (solution, witness) = Self::mine_and_check(iters)?; - let score = raw_solution.score.clone(); - let call: Call = Call::submit_unsigned(raw_solution, witness).into(); + let score = solution.score.clone(); + let call: Call = Call::submit_unsigned { solution, witness }.into(); log!( debug, diff --git a/frame/grandpa/src/equivocation.rs b/frame/grandpa/src/equivocation.rs index 0383d2d9a9be6..f1d4363645356 100644 --- a/frame/grandpa/src/equivocation.rs +++ b/frame/grandpa/src/equivocation.rs @@ -168,7 +168,7 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof); + let call = Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( @@ -206,7 +206,7 @@ pub struct GrandpaTimeSlot { /// unsigned equivocation reports. impl Pallet { pub fn validate_unsigned(source: TransactionSource, call: &Call) -> TransactionValidity { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } @@ -244,7 +244,7 @@ impl Pallet { } pub fn pre_dispatch(call: &Call) -> Result<(), TransactionValidityError> { - if let Call::report_equivocation_unsigned(equivocation_proof, key_owner_proof) = call { + if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { is_known_offence::(equivocation_proof, key_owner_proof) } else { Err(InvalidTransaction::Call.into()) diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 79d33bfa073ed..37c1a89cf7f9f 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -460,7 +460,7 @@ pub mod pallet { type Call = Call; fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::heartbeat(heartbeat, signature) = call { + if let Call::heartbeat { heartbeat, _signature: signature } = call { if >::is_online(heartbeat.authority_index) { // we already received a heartbeat for this authority return InvalidTransaction::Stale.into(); @@ -642,7 +642,7 @@ impl Pallet { let prepare_heartbeat = || -> OffchainResult> { let network_state = sp_io::offchain::network_state() .map_err(|_| OffchainErr::NetworkState)?; - let heartbeat_data = Heartbeat { + let heartbeat = Heartbeat { block_number, network_state, session_index, @@ -650,9 +650,9 @@ impl Pallet { validators_len, }; - let signature = key.sign(&heartbeat_data.encode()).ok_or(OffchainErr::FailedSigning)?; + let signature = key.sign(&heartbeat.encode()).ok_or(OffchainErr::FailedSigning)?; - Ok(Call::heartbeat(heartbeat_data, signature)) + Ok(Call::heartbeat { heartbeat, _signature: signature }) }; if Self::is_online(authority_index) { diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 10236f5ec5902..8f4c42b76c6eb 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -760,10 +760,10 @@ impl Pallet { // We make sure the proxy call does access this pallet to change modify proxies. match c.is_sub_type() { // Proxy call cannot add or remove a proxy with more permissions than it already has. - Some(Call::add_proxy(_, ref pt, _)) | Some(Call::remove_proxy(_, ref pt, _)) - if !def.proxy_type.is_superset(&pt) => false, + Some(Call::add_proxy { ref proxy_type, .. } | Call::remove_proxy { ref proxy_type, .. }) + if !def.proxy_type.is_superset(&proxy_type) => false, // Proxy call cannot remove all proxies or kill anonymous proxies unless it has full permissions. - Some(Call::remove_proxies(..)) | Some(Call::kill_anonymous(..)) + Some(Call::remove_proxies { .. } | Call::kill_anonymous { .. }) if def.proxy_type != T::ProxyType::default() => false, _ => def.proxy_type.filter(c) } diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index b9ed2685b3018..13573154891f6 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -128,7 +128,12 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::sp_std::marker::PhantomData<(#type_use_gen,)>, #frame_support::Never, ), - #( #( #[doc = #fn_doc] )* #fn_name( #( #args_compact_attr #args_type ),* ), )* + #( + #( #[doc = #fn_doc] )* + #fn_name { + #( #args_compact_attr #args_name: #args_type ),* + }, + )* } impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo @@ -138,7 +143,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { match *self { #( - Self::#fn_name ( #( ref #args_name, )* ) => { + Self::#fn_name { #( ref #args_name, )* } => { let __pallet_base_weight = #fn_weight; let __pallet_weight = < @@ -172,7 +177,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { { fn get_call_name(&self) -> &'static str { match *self { - #( Self::#fn_name(..) => stringify!(#fn_name), )* + #( Self::#fn_name { .. } => stringify!(#fn_name), )* Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), } } @@ -193,7 +198,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ) -> #frame_support::dispatch::DispatchResultWithPostInfo { match self { #( - Self::#fn_name( #( #args_name, )* ) => { + Self::#fn_name { #( #args_name, )* } => { #frame_support::sp_tracing::enter_span!( #frame_support::sp_tracing::trace_span!(stringify!(#fn_name)) ); diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index 5da14fc6437b3..9d8c0d6c9b801 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -215,7 +215,7 @@ pub mod pallet { let data = (*inherent_data).saturated_into::(); let next_time = cmp::max(data, Self::now() + T::MinimumPeriod::get()); - Some(Call::set(next_time.into())) + Some(Call::set { now: next_time.into() }) } fn check_inherent(call: &Self::Call, data: &InherentData) -> result::Result<(), Self::Error> { @@ -223,7 +223,7 @@ pub mod pallet { sp_timestamp::Timestamp::new(30 * 1000); let t: u64 = match call { - Call::set(ref t) => t.clone().saturated_into::(), + Call::set { ref now } => now.clone().saturated_into::(), _ => return Ok(()), }; @@ -242,7 +242,7 @@ pub mod pallet { } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::set(_)) + matches!(call, Call::set { .. }) } } } diff --git a/frame/transaction-storage/src/lib.rs b/frame/transaction-storage/src/lib.rs index 00ec7decd3ea2..6661637d57edc 100644 --- a/frame/transaction-storage/src/lib.rs +++ b/frame/transaction-storage/src/lib.rs @@ -410,7 +410,7 @@ pub mod pallet { fn create_inherent(data: &InherentData) -> Option { let proof = data.get_data::(&Self::INHERENT_IDENTIFIER).unwrap_or(None); - proof.map(Call::check_proof) + proof.map(|proof| Call::check_proof { proof }) } fn check_inherent(_call: &Self::Call, _data: &InherentData) -> result::Result<(), Self::Error> { @@ -418,7 +418,7 @@ pub mod pallet { } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::check_proof(_)) + matches!(call, Call::check_proof { .. }) } } diff --git a/frame/utility/src/lib.rs b/frame/utility/src/lib.rs index b8170ac8ba002..96e83a20a6420 100644 --- a/frame/utility/src/lib.rs +++ b/frame/utility/src/lib.rs @@ -272,7 +272,7 @@ pub mod pallet { // Don't allow users to nest `batch_all` calls. filtered_origin.add_filter(move |c: &::Call| { let c = ::Call::from_ref(c); - !matches!(c.is_sub_type(), Some(Call::batch_all(_))) + !matches!(c.is_sub_type(), Some(Call::batch_all { .. })) }); call.dispatch(filtered_origin) }; From 42752e6f455ae65c7dd9924f9d78efe139462d30 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 08:15:12 +0100 Subject: [PATCH 369/503] Named fields for FRAME v1 pallet Call variants --- frame/support/src/dispatch.rs | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 58e0683ad1d54..8452730788aa2 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1708,7 +1708,7 @@ macro_rules! decl_module { variant $fn_name:ident; $( #[doc = $doc_attr:tt] )* #[compact] - $type:ty; + $name:ident : $type:ty; $( $rest:tt )* ) => { $crate::decl_module! { @@ -1720,7 +1720,7 @@ macro_rules! decl_module { { $( $current_params )* #[codec(compact)] - $type, + $name: $type, } variant $fn_name; $( #[doc = $doc_attr] )* @@ -1737,7 +1737,7 @@ macro_rules! decl_module { { $( $current_params:tt )* } variant $fn_name:ident; $(#[doc = $doc_attr:tt])* - $type:ty; + $name:ident : $type:ty; $( $rest:tt )* ) => { $crate::decl_module! { @@ -1748,7 +1748,7 @@ macro_rules! decl_module { { $( $generated_variants )* } { $( $current_params )* - $type, + $name: $type, } variant $fn_name; $( #[doc = $doc_attr] )* @@ -1778,9 +1778,9 @@ macro_rules! decl_module { $( $generated_variants )* #[allow(non_camel_case_types)] $(#[doc = $doc_attr])* - $fn_name ( + $fn_name { $( $current_params )* - ), + }, } {} $( @@ -1947,7 +1947,7 @@ macro_rules! decl_module { $(#[doc = $doc_attr])* $( $(#[$codec_attr])* - $param; + $param_name : $param; )* )* } @@ -1959,7 +1959,7 @@ macro_rules! decl_module { fn get_dispatch_info(&self) -> $crate::dispatch::DispatchInfo { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { let __pallet_base_weight = $weight; let __pallet_weight = >::weigh_data( &__pallet_base_weight, @@ -1992,7 +1992,7 @@ macro_rules! decl_module { fn get_call_name(&self) -> &'static str { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { // Don't generate any warnings for unused variables let _ = ( $( $param_name ),* ); stringify!($fn_name) @@ -2048,8 +2048,8 @@ macro_rules! decl_module { fn clone(&self) -> Self { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => - $call_type::$fn_name( $( (*$param_name).clone() ),* ) + $call_type::$fn_name { $( ref $param_name ),* } => + $call_type::$fn_name { $( $param_name: (*$param_name).clone() ),* } ,)* _ => unreachable!(), } @@ -2062,9 +2062,9 @@ macro_rules! decl_module { fn eq(&self, _other: &Self) -> bool { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => { + $call_type::$fn_name { $( ref $param_name ),* } => { let self_params = ( $( $param_name, )* ); - if let $call_type::$fn_name( $( ref $param_name ),* ) = *_other { + if let $call_type::$fn_name { $( ref $param_name ),* } = *_other { self_params == ( $( $param_name, )* ) } else { match *_other { @@ -2092,7 +2092,7 @@ macro_rules! decl_module { ) -> $crate::dispatch::result::Result<(), $crate::dispatch::fmt::Error> { match *self { $( - $call_type::$fn_name( $( ref $param_name ),* ) => + $call_type::$fn_name { $( ref $param_name ),* } => write!(_f, "{}{:?}", stringify!($fn_name), ( $( $param_name.clone(), )* ) @@ -2110,7 +2110,7 @@ macro_rules! decl_module { fn dispatch_bypass_filter(self, _origin: Self::Origin) -> $crate::dispatch::DispatchResultWithPostInfo { match self { $( - $call_type::$fn_name( $( $param_name ),* ) => { + $call_type::$fn_name { $( $param_name ),* } => { $crate::decl_module!( @call $from From 6053b08ece6bc6de333e31d53f375b990c6e586b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 10:20:46 +0100 Subject: [PATCH 370/503] Add missing scale-info dependency --- Cargo.lock | 1 + frame/support/test/pallet/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 1d2808d1edc06..db273bab1f9f4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1950,6 +1950,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", ] [[package]] diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 3a421ecc461fe..7970fb9b76760 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,6 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } From 0397626215dfcbc2d7db68e69134166700c161d3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 13:07:12 +0100 Subject: [PATCH 371/503] WIP expand benchmark call variant --- frame/benchmarking/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index fb4fd0801a245..731daa70b8173 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -306,7 +306,7 @@ macro_rules! benchmarks_iter { < Call as $crate::frame_support::traits::UnfilteredDispatchable >::dispatch_bypass_filter( - Call::::$dispatch($($arg),*), $origin.into() + Call::::$dispatch { $($arg),* }, $origin.into() )?; } verify $postcode From dbb0c1e7ceb2035b3189e1e3b0510112f111db5c Mon Sep 17 00:00:00 2001 From: thiolliere Date: Wed, 21 Jul 2021 14:56:03 +0200 Subject: [PATCH 372/503] fix benchmark with new function create a new function for each variant of a pallet call. This function is called by benchmarking macro in order not to break call creation with unnamed argument --- frame/benchmarking/src/lib.rs | 33 +++++++++++-------- .../procedural/src/pallet/expand/call.rs | 20 +++++++++++ frame/support/src/dispatch.rs | 22 +++++++++++++ frame/support/test/tests/pallet.rs | 5 +++ 4 files changed, 67 insertions(+), 13 deletions(-) diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index 731daa70b8173..98867d9b7b9c3 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -297,20 +297,27 @@ macro_rules! benchmarks_iter { verify $postcode:block $( $rest:tt )* ) => { - $crate::benchmarks_iter! { - { $( $instance: $instance_bound )? } - { $( $where_clause )* } - ( $( $names )* ) - ( $( $names_extra )* ) - $name { $( $code )* }: { - < - Call as $crate::frame_support::traits::UnfilteredDispatchable - >::dispatch_bypass_filter( - Call::::$dispatch { $($arg),* }, $origin.into() - )?; + $crate::paste::paste! { + $crate::benchmarks_iter! { + { $( $instance: $instance_bound )? } + { $( $where_clause )* } + ( $( $names )* ) + ( $( $names_extra )* ) + $name { $( $code )* }: { + let call = Call::< + T + $( , $instance )? + >:: [< new_call_variant_ $dispatch >] ( + $($arg),* + ); + + < + Call as $crate::frame_support::traits::UnfilteredDispatchable + >::dispatch_bypass_filter(call, $origin.into())?; + } + verify $postcode + $( $rest )* } - verify $postcode - $( $rest )* } }; // iteration arm: diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 13573154891f6..758703afb2fae 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -42,6 +42,13 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let pallet_ident = &def.pallet_struct.pallet; let fn_name = methods.iter().map(|method| &method.name).collect::>(); + let new_call_variant_fn_name = fn_name.iter() + .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) + .collect::>(); + + let new_call_variant_doc = fn_name.iter() + .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) + .collect::>(); let fn_weight = methods.iter().map(|method| &method.weight); @@ -136,6 +143,19 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { )* } + impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { + #( + #[doc = #new_call_variant_doc] + pub fn #new_call_variant_fn_name( + #( #args_name: #args_type ),* + ) -> Self { + Self::#fn_name { + #( #args_name ),* + } + } + )* + } + impl<#type_impl_gen> #frame_support::dispatch::GetDispatchInfo for #call_ident<#type_use_gen> #where_clause diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 8452730788aa2..5d5ec0ac364b6 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1952,6 +1952,23 @@ macro_rules! decl_module { )* } + $crate::paste::paste! { + impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> + $call_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* + { + $( + #[doc = "Create a call with the variant `" $fn_name "`."] + pub fn [< new_call_variant_ $fn_name >]( + $( $param_name: $param ),* + ) -> Self { + Self::$fn_name { + $( $param_name ),* + } + } + )* + } + } + // Implement weight calculation function for Call impl<$trait_instance: $trait_name $(, $instance: $instantiable)?> $crate::dispatch::GetDispatchInfo for $call_type<$trait_instance $(, $instance)?> where $( $other_where_bounds )* @@ -2719,4 +2736,9 @@ mod tests { fn integrity_test_should_work() { as IntegrityTest>::integrity_test(); } + + #[test] + fn test_new_call_variant() { + Call::::new_call_variant_aux_0(); + } } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 18a23cfb59cc2..9e7a03f825854 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -732,6 +732,11 @@ fn pallet_expand_deposit_event() { }) } +#[test] +fn pallet_new_call_variant() { + Call::Example(Example::Call::new_call_variant_foo(3, 4)); +} + #[test] fn storage_expand() { use frame_support::pallet_prelude::*; From fe85997de9ddd05b51bb9c81ca92e9208b0e6fc2 Mon Sep 17 00:00:00 2001 From: thiolliere Date: Wed, 21 Jul 2021 15:03:31 +0200 Subject: [PATCH 373/503] fix tests --- frame/support/test/tests/pallet_instance.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 902bdc7061139..7a8356bb73ffb 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -317,7 +317,7 @@ frame_support::construct_runtime!( #[test] fn call_expand() { - let call_foo = pallet::Call::::foo(3); + let call_foo = pallet::Call::::foo { _foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { @@ -332,7 +332,7 @@ fn call_expand() { &["foo", "foo_transactional"], ); - let call_foo = pallet::Call::::foo(3); + let call_foo = pallet::Call::::foo { _foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { @@ -395,7 +395,7 @@ fn instance_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { _foo: 3 }.dispatch_bypass_filter(None.into()).unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), @@ -404,7 +404,8 @@ fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { _foo: 3 } + .dispatch_bypass_filter(None.into()).unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Instance1Example(pallet::Event::Something(3)), From 5e499eae0b912aee79bbd59ce86d1a26e20939b5 Mon Sep 17 00:00:00 2001 From: thiolliere Date: Wed, 21 Jul 2021 15:09:57 +0200 Subject: [PATCH 374/503] more fix --- frame/support/src/dispatch.rs | 14 +++++++------- frame/support/src/weights.rs | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 5d5ec0ac364b6..6a67c5f0148b6 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2638,12 +2638,12 @@ mod tests { #[test] fn compact_attr() { - let call: Call = Call::aux_1(1); + let call: Call = Call::aux_1 { _data: 1 }; let encoded = call.encode(); assert_eq!(2, encoded.len()); assert_eq!(vec![1, 4], encoded); - let call: Call = Call::aux_5(1, 2); + let call: Call = Call::aux_5 { _data: 1, _data2: 2 }; let encoded = call.encode(); assert_eq!(6, encoded.len()); assert_eq!(vec![5, 1, 0, 0, 0, 8], encoded); @@ -2651,13 +2651,13 @@ mod tests { #[test] fn encode_is_correct_and_decode_works() { - let call: Call = Call::aux_0(); + let call: Call = Call::aux_0 {}; let encoded = call.encode(); assert_eq!(vec![0], encoded); let decoded = Call::::decode(&mut &encoded[..]).unwrap(); assert_eq!(decoded, call); - let call: Call = Call::aux_2(32, "hello".into()); + let call: Call = Call::aux_2 { _data: 32, _data2: "hello".into() }; let encoded = call.encode(); assert_eq!(vec![2, 32, 0, 0, 0, 20, 104, 101, 108, 108, 111], encoded); let decoded = Call::::decode(&mut &encoded[..]).unwrap(); @@ -2709,19 +2709,19 @@ mod tests { fn weight_should_attach_to_call_enum() { // operational. assert_eq!( - Call::::operational().get_dispatch_info(), + Call::::operational {}.get_dispatch_info(), DispatchInfo { weight: 5, class: DispatchClass::Operational, pays_fee: Pays::Yes }, ); // custom basic assert_eq!( - Call::::aux_3().get_dispatch_info(), + Call::::aux_3 {}.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes }, ); } #[test] fn call_name() { - let name = Call::::aux_3().get_call_name(); + let name = Call::::aux_3 {}.get_call_name(); assert_eq!("aux_3", name); } diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 5aeecbfd5d5d3..ce9008302deb8 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -876,49 +876,49 @@ mod tests { #[test] fn weights_are_correct() { // #[weight = 1000] - let info = Call::::f00().get_dispatch_info(); + let info = Call::::f00 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, DispatchClass::Mandatory)] - let info = Call::::f01().get_dispatch_info(); + let info = Call::::f01 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Mandatory); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, Pays::No)] - let info = Call::::f02().get_dispatch_info(); + let info = Call::::f02 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::No); // #[weight = (1000, DispatchClass::Operational, Pays::No)] - let info = Call::::f03().get_dispatch_info(); + let info = Call::::f03 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::No); // #[weight = ((_a * 10 + _eb * 1) as Weight, DispatchClass::Normal, Pays::Yes)] - let info = Call::::f11(13, 20).get_dispatch_info(); + let info = Call::::f11 { _a: 13, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 150); // 13*10 + 20 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (0, DispatchClass::Operational, Pays::Yes)] - let info = Call::::f12(10, 20).get_dispatch_info(); + let info = Call::::f12 { _a: 10, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 0); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads(3) + T::DbWeight::get().writes(2) + 10_000] - let info = Call::::f20().get_dispatch_info(); + let info = Call::::f20 {}.get_dispatch_info(); assert_eq!(info.weight, 12300); // 100*3 + 1000*2 + 10_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads_writes(6, 5) + 40_000] - let info = Call::::f21().get_dispatch_info(); + let info = Call::::f21 {}.get_dispatch_info(); assert_eq!(info.weight, 45600); // 100*6 + 1000*5 + 40_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); From 9b426acf2105c3372fd7fc5ab72ede4c1fbfb057 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 15:25:06 +0100 Subject: [PATCH 375/503] Fix staking tests --- frame/staking/src/tests.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/frame/staking/src/tests.rs b/frame/staking/src/tests.rs index bbb0d5522fcc6..0a71681fe7fd3 100644 --- a/frame/staking/src/tests.rs +++ b/frame/staking/src/tests.rs @@ -3411,7 +3411,7 @@ fn payout_stakers_handles_weight_refund() { start_active_era(2); // Collect payouts when there are no nominators - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 1)); + let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 1 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3426,7 +3426,7 @@ fn payout_stakers_handles_weight_refund() { start_active_era(3); // Collect payouts for an era where the validator did not receive any points. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 2)); + let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 2 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3439,7 +3439,7 @@ fn payout_stakers_handles_weight_refund() { start_active_era(4); // Collect payouts when the validator has `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 3)); + let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 3 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3462,14 +3462,14 @@ fn payout_stakers_handles_weight_refund() { start_active_era(6); // Collect payouts when the validator had `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 5)); + let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), max_nom_rewarded_weight); // Try and collect payouts for an era that has already been collected. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers(11, 5)); + let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert!(result.is_err()); From 11f74190d257b4270c3242654c42ba349954c747 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 15:51:18 +0100 Subject: [PATCH 376/503] Fix offchain workers calls --- frame/example-offchain-worker/src/lib.rs | 16 ++++++++-------- frame/example-offchain-worker/src/tests.rs | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index adac79be45b31..a705b5f9b3b73 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -288,15 +288,15 @@ pub mod pallet { call: &Self::Call, ) -> TransactionValidity { // Firstly let's check that we call the right function. - if let Call::submit_price_unsigned_with_signed_payload( - ref payload, ref signature - ) = call { + if let Call::submit_price_unsigned_with_signed_payload { + price_payload: ref payload, _signature: ref signature, + } = call { let signature_valid = SignedPayload::::verify::(payload, signature.clone()); if !signature_valid { return InvalidTransaction::BadProof.into(); } Self::validate_transaction_parameters(&payload.block_number, &payload.price) - } else if let Call::submit_price_unsigned(block_number, new_price) = call { + } else if let Call::submit_price_unsigned { block_number, new_price } = call { Self::validate_transaction_parameters(block_number, new_price) } else { InvalidTransaction::Call.into() @@ -433,7 +433,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price` public function of this pallet. // This means that the transaction, when executed, will simply call that function passing // `price` as an argument. - Call::submit_price(price) + Call::submit_price { price } } ); @@ -463,7 +463,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price_unsigned` public function of this // pallet. This means that the transaction, when executed, will simply call that function // passing `price` as an argument. - let call = Call::submit_price_unsigned(block_number, price); + let call = Call::submit_price_unsigned { block_number, price }; // Now let's create a transaction out of this call and submit it to the pool. // Here we showcase two ways to send an unsigned transaction / unsigned payload (raw) @@ -500,7 +500,7 @@ impl Pallet { public: account.public.clone() }, |payload, signature| { - Call::submit_price_unsigned_with_signed_payload(payload, signature) + Call::submit_price_unsigned_with_signed_payload { payload, signature } } ).ok_or("No local accounts accounts available.")?; result.map_err(|()| "Unable to submit transaction")?; @@ -530,7 +530,7 @@ impl Pallet { public: account.public.clone() }, |payload, signature| { - Call::submit_price_unsigned_with_signed_payload(payload, signature) + Call::submit_price_unsigned_with_signed_payload { payload, signature } } ); for (_account_id, result) in transaction_results.into_iter() { diff --git a/frame/example-offchain-worker/src/tests.rs b/frame/example-offchain-worker/src/tests.rs index 7d16e59490342..e2ad3d8561170 100644 --- a/frame/example-offchain-worker/src/tests.rs +++ b/frame/example-offchain-worker/src/tests.rs @@ -232,7 +232,7 @@ fn should_submit_signed_transaction_on_chain() { assert!(pool_state.read().transactions.is_empty()); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature.unwrap().0, 0); - assert_eq!(tx.call, Call::Example(crate::Call::submit_price(15523))); + assert_eq!(tx.call, Call::Example(crate::Call::submit_price { price: 15523 })); }); } @@ -276,7 +276,7 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { let tx = pool_state.write().transactions.pop().unwrap(); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload(body, signature)) = tx.call { + if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { price_payload: body, _signature: signature }) = tx.call { assert_eq!(body, price_payload); let signature_valid = Date: Wed, 21 Jul 2021 16:06:35 +0100 Subject: [PATCH 377/503] Cherry pick rustfmt.toml from master --- rustfmt.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/rustfmt.toml b/rustfmt.toml index 1c9ebe03c02e3..15e9bdcdf10f1 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -7,8 +7,6 @@ imports_granularity = "Crate" reorder_imports = true # Consistency newline_style = "Unix" -normalize_comments = true -normalize_doc_attributes = true # Misc chain_width = 80 spaces_around_ranges = false From 3b74c20a9bd53d6ed0f593ade9d0b78c2e7d78d8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 21 Jul 2021 16:26:44 +0100 Subject: [PATCH 378/503] cargo +nightly-2021-06-22 fmt --all --- bin/node-template/node/src/chain_spec.rs | 103 +- bin/node-template/node/src/cli.rs | 2 +- bin/node-template/node/src/command.rs | 47 +- bin/node-template/node/src/lib.rs | 2 +- bin/node-template/node/src/rpc.rs | 30 +- bin/node-template/node/src/service.rs | 154 +- .../pallets/template/src/benchmarking.rs | 10 +- bin/node-template/pallets/template/src/lib.rs | 3 +- .../pallets/template/src/mock.rs | 7 +- .../pallets/template/src/tests.rs | 9 +- bin/node-template/runtime/src/lib.rs | 46 +- bin/node/bench/src/construct.rs | 120 +- bin/node/bench/src/core.rs | 22 +- bin/node/bench/src/generator.rs | 7 +- bin/node/bench/src/import.rs | 32 +- bin/node/bench/src/main.rs | 37 +- bin/node/bench/src/simple_trie.rs | 8 +- bin/node/bench/src/state_sizes.rs | 2 +- bin/node/bench/src/tempdb.rs | 52 +- bin/node/bench/src/trie.rs | 47 +- bin/node/bench/src/txpool.rs | 42 +- bin/node/browser-testing/src/lib.rs | 20 +- bin/node/cli/build.rs | 11 +- bin/node/cli/src/browser.rs | 15 +- bin/node/cli/src/chain_spec.rs | 313 +- bin/node/cli/src/cli.rs | 2 +- bin/node/cli/src/command.rs | 75 +- bin/node/cli/src/service.rs | 480 ++- bin/node/cli/tests/common.rs | 22 +- bin/node/cli/tests/export_import_flow.rs | 38 +- .../tests/running_the_node_and_interrupt.rs | 9 +- bin/node/cli/tests/telemetry.rs | 35 +- bin/node/cli/tests/temp_base_path_works.rs | 39 +- bin/node/cli/tests/version.rs | 34 +- bin/node/cli/tests/websocket_server.rs | 9 +- bin/node/executor/benches/bench.rs | 200 +- bin/node/executor/src/lib.rs | 2 +- bin/node/executor/tests/basic.rs | 304 +- bin/node/executor/tests/common.rs | 95 +- bin/node/executor/tests/fees.rs | 75 +- bin/node/executor/tests/submit_transaction.rs | 108 +- bin/node/inspect/src/cli.rs | 2 +- bin/node/inspect/src/command.rs | 10 +- bin/node/inspect/src/lib.rs | 135 +- bin/node/primitives/src/lib.rs | 5 +- bin/node/rpc-client/src/main.rs | 23 +- bin/node/rpc/src/lib.rs | 143 +- bin/node/runtime/src/constants.rs | 4 +- bin/node/runtime/src/impls.rs | 79 +- bin/node/runtime/src/lib.rs | 196 +- bin/node/test-runner-example/src/lib.rs | 44 +- bin/node/testing/src/bench.rs | 260 +- bin/node/testing/src/client.rs | 20 +- bin/node/testing/src/genesis.rs | 71 +- bin/node/testing/src/keyring.rs | 37 +- bin/node/testing/src/lib.rs | 2 +- bin/utils/chain-spec-builder/src/main.rs | 102 +- bin/utils/subkey/src/lib.rs | 8 +- client/allocator/src/error.rs | 2 +- client/allocator/src/freeing_bump.rs | 37 +- client/allocator/src/lib.rs | 2 +- client/api/src/backend.rs | 164 +- client/api/src/call_executor.rs | 46 +- client/api/src/cht.rs | 224 +- client/api/src/client.rs | 41 +- client/api/src/execution_extensions.rs | 68 +- client/api/src/in_mem.rs | 360 ++- client/api/src/leaves.rs | 79 +- client/api/src/lib.rs | 26 +- client/api/src/light.rs | 131 +- client/api/src/notifications.rs | 284 +- client/api/src/proof_provider.rs | 18 +- client/authority-discovery/src/interval.rs | 18 +- client/authority-discovery/src/lib.rs | 26 +- client/authority-discovery/src/service.rs | 15 +- client/authority-discovery/src/tests.rs | 24 +- client/authority-discovery/src/worker.rs | 271 +- .../src/worker/addr_cache.rs | 77 +- .../authority-discovery/src/worker/tests.rs | 435 +-- .../basic-authorship/src/basic_authorship.rs | 463 +-- client/basic-authorship/src/lib.rs | 2 +- client/block-builder/src/lib.rs | 91 +- client/chain-spec/derive/src/impls.rs | 73 +- client/chain-spec/src/chain_spec.rs | 190 +- client/chain-spec/src/extension.rs | 130 +- client/chain-spec/src/lib.rs | 8 +- client/cli/src/arg_enums.rs | 17 +- client/cli/src/commands/build_spec_cmd.rs | 16 +- client/cli/src/commands/check_block_cmd.rs | 10 +- client/cli/src/commands/export_blocks_cmd.rs | 23 +- client/cli/src/commands/export_state_cmd.rs | 8 +- client/cli/src/commands/generate.rs | 19 +- client/cli/src/commands/generate_node_key.rs | 9 +- client/cli/src/commands/import_blocks_cmd.rs | 31 +- client/cli/src/commands/insert_key.rs | 75 +- client/cli/src/commands/inspect_key.rs | 7 +- client/cli/src/commands/inspect_node_key.rs | 16 +- client/cli/src/commands/key.rs | 7 +- client/cli/src/commands/mod.rs | 41 +- client/cli/src/commands/purge_chain_cmd.rs | 25 +- client/cli/src/commands/revert_cmd.rs | 20 +- client/cli/src/commands/run_cmd.rs | 69 +- client/cli/src/commands/sign.rs | 24 +- client/cli/src/commands/utils.rs | 49 +- client/cli/src/commands/vanity.rs | 77 +- client/cli/src/commands/verify.rs | 33 +- client/cli/src/config.rs | 64 +- client/cli/src/lib.rs | 2 +- client/cli/src/params/database_params.rs | 2 +- client/cli/src/params/import_params.rs | 39 +- client/cli/src/params/keystore_params.rs | 13 +- client/cli/src/params/mod.rs | 37 +- client/cli/src/params/network_params.rs | 27 +- client/cli/src/params/node_key_params.rs | 54 +- .../cli/src/params/offchain_worker_params.rs | 13 +- client/cli/src/params/pruning_params.rs | 6 +- client/cli/src/params/shared_params.rs | 7 +- client/cli/src/runner.rs | 57 +- client/consensus/aura/src/import_queue.rs | 177 +- client/consensus/aura/src/lib.rs | 365 ++- client/consensus/babe/rpc/src/lib.rs | 112 +- client/consensus/babe/src/authorship.rs | 115 +- client/consensus/babe/src/aux_schema.rs | 150 +- client/consensus/babe/src/lib.rs | 641 ++-- client/consensus/babe/src/migration.rs | 18 +- client/consensus/babe/src/tests.rs | 413 +-- client/consensus/babe/src/verification.rs | 143 +- client/consensus/common/src/longest_chain.rs | 27 +- client/consensus/common/src/shared_data.rs | 15 +- client/consensus/epochs/src/lib.rs | 501 ++- client/consensus/epochs/src/migration.rs | 12 +- client/consensus/manual-seal/src/consensus.rs | 14 +- .../manual-seal/src/consensus/babe.rs | 183 +- client/consensus/manual-seal/src/error.rs | 10 +- .../manual-seal/src/finalize_block.rs | 30 +- client/consensus/manual-seal/src/lib.rs | 370 +-- client/consensus/manual-seal/src/rpc.rs | 40 +- .../consensus/manual-seal/src/seal_block.rs | 55 +- client/consensus/pow/src/lib.rs | 198 +- client/consensus/pow/src/worker.rs | 55 +- client/consensus/slots/src/aux_schema.rs | 152 +- client/consensus/slots/src/lib.rs | 309 +- client/consensus/slots/src/slots.rs | 29 +- client/consensus/uncles/src/lib.rs | 5 +- client/db/src/bench.rs | 201 +- client/db/src/cache/list_cache.rs | 1483 ++++++--- client/db/src/cache/list_entry.rs | 88 +- client/db/src/cache/list_storage.rs | 145 +- client/db/src/cache/mod.rs | 124 +- client/db/src/changes_tries_storage.rs | 499 ++- client/db/src/children.rs | 18 +- client/db/src/lib.rs | 1282 ++++---- client/db/src/light.rs | 495 +-- client/db/src/offchain.rs | 17 +- client/db/src/parity_db.rs | 32 +- client/db/src/stats.rs | 11 +- client/db/src/storage_cache.rs | 630 ++-- client/db/src/upgrade.rs | 70 +- client/db/src/utils.rs | 176 +- client/executor/common/src/lib.rs | 2 +- .../runtime_blob/data_segments_snapshot.rs | 10 +- .../src/runtime_blob/globals_snapshot.rs | 6 +- .../executor/common/src/runtime_blob/mod.rs | 2 +- .../common/src/runtime_blob/runtime_blob.rs | 52 +- client/executor/common/src/sandbox.rs | 149 +- client/executor/runtime-test/src/lib.rs | 699 ++-- .../executor/src/integration_tests/linux.rs | 12 +- .../src/integration_tests/linux/smaps.rs | 6 +- client/executor/src/integration_tests/mod.rs | 307 +- .../executor/src/integration_tests/sandbox.rs | 155 +- client/executor/src/lib.rs | 19 +- client/executor/src/native_executor.rs | 243 +- client/executor/src/wasm_runtime.rs | 177 +- client/executor/wasmi/src/lib.rs | 181 +- client/executor/wasmtime/src/host.rs | 50 +- client/executor/wasmtime/src/imports.rs | 93 +- .../executor/wasmtime/src/instance_wrapper.rs | 106 +- client/executor/wasmtime/src/lib.rs | 4 +- client/executor/wasmtime/src/runtime.rs | 81 +- client/executor/wasmtime/src/tests.rs | 39 +- client/finality-grandpa-warp-sync/src/lib.rs | 59 +- .../finality-grandpa-warp-sync/src/proof.rs | 58 +- client/finality-grandpa/rpc/src/finality.rs | 5 +- client/finality-grandpa/rpc/src/lib.rs | 100 +- .../finality-grandpa/rpc/src/notification.rs | 4 +- client/finality-grandpa/rpc/src/report.rs | 13 +- client/finality-grandpa/src/authorities.rs | 507 ++- client/finality-grandpa/src/aux_schema.rs | 348 +- .../src/communication/gossip.rs | 792 ++--- .../finality-grandpa/src/communication/mod.rs | 297 +- .../src/communication/periodic.rs | 40 +- .../src/communication/tests.rs | 247 +- client/finality-grandpa/src/environment.rs | 279 +- client/finality-grandpa/src/finality_proof.rs | 117 +- client/finality-grandpa/src/import.rs | 110 +- client/finality-grandpa/src/justification.rs | 82 +- client/finality-grandpa/src/lib.rs | 305 +- client/finality-grandpa/src/notification.rs | 17 +- client/finality-grandpa/src/observer.rs | 72 +- client/finality-grandpa/src/tests.rs | 513 +-- client/finality-grandpa/src/until_imported.rs | 474 ++- client/finality-grandpa/src/voting_rule.rs | 109 +- client/informant/src/display.rs | 71 +- client/informant/src/lib.rs | 30 +- client/keystore/src/lib.rs | 18 +- client/keystore/src/local.rs | 296 +- client/light/src/backend.rs | 185 +- client/light/src/blockchain.rs | 84 +- client/light/src/call_executor.rs | 93 +- client/light/src/fetcher.rs | 226 +- client/light/src/lib.rs | 17 +- client/network-gossip/src/bridge.rs | 241 +- client/network-gossip/src/lib.rs | 27 +- client/network-gossip/src/state_machine.rs | 180 +- client/network-gossip/src/validator.rs | 13 +- client/network/build.rs | 7 +- client/network/src/behaviour.rs | 184 +- client/network/src/bitswap.rs | 94 +- client/network/src/block_request_handler.rs | 103 +- client/network/src/chain.rs | 34 +- client/network/src/config.rs | 160 +- client/network/src/discovery.rs | 443 ++- client/network/src/error.rs | 6 +- client/network/src/lib.rs | 19 +- client/network/src/light_client_requests.rs | 122 +- .../src/light_client_requests/handler.rs | 160 +- .../src/light_client_requests/sender.rs | 456 ++- client/network/src/network_state.rs | 13 +- client/network/src/on_demand_layer.rs | 36 +- client/network/src/peer_info.rs | 116 +- client/network/src/protocol.rs | 720 +++-- client/network/src/protocol/event.rs | 3 +- client/network/src/protocol/message.rs | 63 +- client/network/src/protocol/notifications.rs | 8 +- .../src/protocol/notifications/behaviour.rs | 763 +++-- .../src/protocol/notifications/handler.rs | 324 +- .../src/protocol/notifications/tests.rs | 167 +- .../src/protocol/notifications/upgrade.rs | 17 +- .../protocol/notifications/upgrade/collec.rs | 14 +- .../notifications/upgrade/notifications.rs | 187 +- client/network/src/protocol/sync.rs | 1108 +++---- client/network/src/protocol/sync/blocks.rs | 192 +- .../src/protocol/sync/extra_requests.rs | 132 +- client/network/src/protocol/sync/state.rs | 67 +- client/network/src/request_responses.rs | 645 ++-- client/network/src/service.rs | 858 +++-- client/network/src/service/metrics.rs | 52 +- client/network/src/service/out_events.rs | 71 +- client/network/src/service/tests.rs | 305 +- client/network/src/state_request_handler.rs | 55 +- client/network/src/transactions.rs | 121 +- client/network/src/transport.rs | 45 +- client/network/src/utils.rs | 10 +- client/network/test/src/block_import.rs | 67 +- client/network/test/src/lib.rs | 469 +-- client/network/test/src/sync.rs | 192 +- client/offchain/src/api.rs | 113 +- client/offchain/src/api/http.rs | 414 +-- client/offchain/src/api/http_dummy.rs | 62 +- client/offchain/src/api/timestamp.rs | 18 +- client/offchain/src/lib.rs | 171 +- client/peerset/src/lib.rs | 182 +- client/peerset/src/peersstate.rs | 241 +- client/peerset/tests/fuzz.rs | 105 +- client/proposer-metrics/src/lib.rs | 18 +- client/rpc-api/src/author/error.rs | 20 +- client/rpc-api/src/author/hash.rs | 2 +- client/rpc-api/src/author/mod.rs | 26 +- client/rpc-api/src/chain/error.rs | 2 +- client/rpc-api/src/chain/mod.rs | 4 +- client/rpc-api/src/child_state/mod.rs | 12 +- client/rpc-api/src/helpers.rs | 2 +- client/rpc-api/src/lib.rs | 2 +- client/rpc-api/src/metadata.rs | 6 +- client/rpc-api/src/offchain/error.rs | 4 +- client/rpc-api/src/offchain/mod.rs | 4 +- client/rpc-api/src/state/error.rs | 2 +- client/rpc-api/src/state/helpers.rs | 2 +- client/rpc-api/src/state/mod.rs | 49 +- client/rpc-api/src/system/error.rs | 4 +- client/rpc-api/src/system/helpers.rs | 20 +- client/rpc-api/src/system/mod.rs | 36 +- client/rpc-servers/src/lib.rs | 64 +- client/rpc-servers/src/middleware.rs | 36 +- client/rpc/src/author/mod.rs | 132 +- client/rpc/src/author/tests.rs | 103 +- client/rpc/src/chain/chain_full.rs | 38 +- client/rpc/src/chain/chain_light.rs | 56 +- client/rpc/src/chain/mod.rs | 139 +- client/rpc/src/chain/tests.rs | 35 +- client/rpc/src/lib.rs | 4 +- client/rpc/src/offchain/mod.rs | 11 +- client/rpc/src/offchain/tests.rs | 2 +- client/rpc/src/state/mod.rs | 196 +- client/rpc/src/state/state_full.rs | 386 ++- client/rpc/src/state/state_light.rs | 534 +-- client/rpc/src/state/tests.rs | 242 +- client/rpc/src/system/mod.rs | 67 +- client/rpc/src/system/tests.rs | 180 +- client/rpc/src/testing.rs | 4 +- client/service/src/builder.rs | 573 ++-- client/service/src/chain_ops/check_block.rs | 14 +- client/service/src/chain_ops/export_blocks.rs | 25 +- .../service/src/chain_ops/export_raw_state.rs | 17 +- client/service/src/chain_ops/import_blocks.rs | 201 +- client/service/src/chain_ops/revert_chain.rs | 4 +- client/service/src/client/block_rules.rs | 15 +- client/service/src/client/call_executor.rs | 155 +- client/service/src/client/client.rs | 955 +++--- client/service/src/client/genesis.rs | 17 +- client/service/src/client/light.rs | 43 +- client/service/src/client/mod.rs | 10 +- client/service/src/client/wasm_override.rs | 71 +- client/service/src/client/wasm_substitutes.rs | 45 +- client/service/src/config.rs | 51 +- client/service/src/error.rs | 4 +- client/service/src/lib.rs | 259 +- client/service/src/metrics.rs | 161 +- client/service/src/task_manager/mod.rs | 72 +- .../src/task_manager/prometheus_future.rs | 14 +- client/service/src/task_manager/tests.rs | 11 +- client/service/test/src/client/db.rs | 7 +- client/service/test/src/client/light.rs | 575 ++-- client/service/test/src/client/mod.rs | 1333 ++++---- client/service/test/src/lib.rs | 278 +- client/state-db/src/lib.rs | 165 +- client/state-db/src/noncanonical.rs | 254 +- client/state-db/src/pruning.rs | 73 +- client/state-db/src/test.rs | 18 +- client/sync-state-rpc/src/lib.rs | 59 +- client/telemetry/src/endpoints.rs | 25 +- client/telemetry/src/lib.rs | 67 +- client/telemetry/src/node.rs | 57 +- client/telemetry/src/transport.rs | 8 +- client/tracing/proc-macro/src/lib.rs | 9 +- client/tracing/src/block/mod.rs | 121 +- client/tracing/src/lib.rs | 79 +- client/tracing/src/logging/directives.rs | 18 +- client/tracing/src/logging/event_format.rs | 27 +- .../tracing/src/logging/layers/console_log.rs | 10 +- .../src/logging/layers/prefix_layer.rs | 6 +- client/tracing/src/logging/mod.rs | 58 +- client/transaction-pool/api/src/error.rs | 12 +- client/transaction-pool/api/src/lib.rs | 41 +- client/transaction-pool/benches/basics.rs | 53 +- client/transaction-pool/src/api.rs | 158 +- client/transaction-pool/src/error.rs | 1 - .../transaction-pool/src/graph/base_pool.rs | 428 +-- client/transaction-pool/src/graph/future.rs | 68 +- client/transaction-pool/src/graph/listener.rs | 19 +- client/transaction-pool/src/graph/mod.rs | 14 +- client/transaction-pool/src/graph/pool.rs | 509 +-- client/transaction-pool/src/graph/ready.rs | 147 +- client/transaction-pool/src/graph/rotator.rs | 35 +- .../transaction-pool/src/graph/tracked_map.rs | 32 +- .../src/graph/validated_pool.rs | 171 +- client/transaction-pool/src/graph/watcher.rs | 14 +- client/transaction-pool/src/lib.rs | 280 +- client/transaction-pool/src/metrics.rs | 14 +- client/transaction-pool/src/revalidation.rs | 108 +- client/transaction-pool/tests/pool.rs | 317 +- client/transaction-pool/tests/revalidation.rs | 40 +- frame/assets/src/benchmarking.rs | 66 +- frame/assets/src/extra_mutator.rs | 5 +- frame/assets/src/functions.rs | 31 +- frame/assets/src/impl_fungibles.rs | 59 +- frame/assets/src/lib.rs | 216 +- frame/assets/src/mock.rs | 10 +- frame/assets/src/tests.rs | 133 +- frame/assets/src/types.rs | 22 +- frame/assets/src/weights.rs | 24 +- frame/atomic-swap/src/lib.rs | 76 +- frame/atomic-swap/src/tests.rs | 27 +- frame/aura/src/lib.rs | 51 +- frame/aura/src/migrations.rs | 6 +- frame/aura/src/mock.rs | 13 +- frame/aura/src/tests.rs | 2 +- frame/authority-discovery/src/lib.rs | 71 +- frame/authorship/src/lib.rs | 198 +- frame/babe/src/benchmarking.rs | 5 +- frame/babe/src/default_weights.rs | 3 +- frame/babe/src/equivocation.rs | 35 +- frame/babe/src/lib.rs | 203 +- frame/babe/src/mock.rs | 82 +- frame/babe/src/randomness.rs | 2 +- frame/babe/src/tests.rs | 223 +- frame/balances/src/benchmarking.rs | 5 +- frame/balances/src/lib.rs | 706 ++-- frame/balances/src/tests_composite.rs | 29 +- frame/balances/src/tests_local.rs | 100 +- frame/balances/src/tests_reentrancy.rs | 230 +- frame/balances/src/weights.rs | 6 +- frame/benchmarking/src/analysis.rs | 401 ++- frame/benchmarking/src/lib.rs | 28 +- frame/benchmarking/src/tests.rs | 42 +- frame/benchmarking/src/utils.rs | 50 +- frame/bounties/src/benchmarking.rs | 31 +- frame/bounties/src/lib.rs | 42 +- frame/bounties/src/tests.rs | 318 +- frame/bounties/src/weights.rs | 18 +- frame/collective/src/benchmarking.rs | 16 +- frame/collective/src/lib.rs | 985 ++++-- frame/collective/src/weights.rs | 66 +- frame/contracts/common/src/lib.rs | 4 +- frame/contracts/proc-macro/src/lib.rs | 24 +- frame/contracts/rpc/runtime-api/src/lib.rs | 4 +- frame/contracts/rpc/src/lib.rs | 72 +- frame/contracts/src/benchmarking/code.rs | 261 +- frame/contracts/src/benchmarking/mod.rs | 64 +- frame/contracts/src/benchmarking/sandbox.rs | 11 +- frame/contracts/src/chain_extension.rs | 56 +- frame/contracts/src/exec.rs | 718 ++--- frame/contracts/src/gas.rs | 27 +- frame/contracts/src/lib.rs | 217 +- frame/contracts/src/migration.rs | 6 +- frame/contracts/src/rent.rs | 179 +- frame/contracts/src/schedule.rs | 124 +- frame/contracts/src/storage.rs | 100 +- frame/contracts/src/tests.rs | 2860 ++++++++--------- frame/contracts/src/wasm/code_cache.rs | 72 +- frame/contracts/src/wasm/env_def/macros.rs | 33 +- frame/contracts/src/wasm/env_def/mod.rs | 11 +- frame/contracts/src/wasm/mod.rs | 447 +-- frame/contracts/src/wasm/prepare.rs | 296 +- frame/contracts/src/wasm/runtime.rs | 193 +- frame/contracts/src/weights.rs | 650 ++-- frame/democracy/src/benchmarking.rs | 48 +- frame/democracy/src/conviction.rs | 18 +- frame/democracy/src/lib.rs | 298 +- frame/democracy/src/tests.rs | 39 +- frame/democracy/src/tests/cancellation.rs | 6 +- frame/democracy/src/tests/decoders.rs | 8 +- .../democracy/src/tests/external_proposing.rs | 65 +- frame/democracy/src/tests/fast_tracking.rs | 15 +- frame/democracy/src/tests/lock_voting.rs | 53 +- frame/democracy/src/tests/preimage.rs | 72 +- frame/democracy/src/tests/public_proposals.rs | 5 +- frame/democracy/src/tests/scheduling.rs | 10 +- frame/democracy/src/tests/voting.rs | 11 +- frame/democracy/src/types.rs | 76 +- frame/democracy/src/vote.rs | 43 +- frame/democracy/src/vote_threshold.rs | 53 +- frame/democracy/src/weights.rs | 138 +- .../src/benchmarking.rs | 34 +- .../src/helpers.rs | 16 +- .../election-provider-multi-phase/src/lib.rs | 160 +- .../election-provider-multi-phase/src/mock.rs | 14 +- .../src/signed.rs | 158 +- .../src/unsigned.rs | 356 +- .../src/weights.rs | 36 +- frame/election-provider-support/src/lib.rs | 7 +- .../election-provider-support/src/onchain.rs | 29 +- frame/elections-phragmen/src/benchmarking.rs | 70 +- frame/elections-phragmen/src/lib.rs | 763 ++--- frame/elections-phragmen/src/migrations/v3.rs | 91 +- frame/elections-phragmen/src/migrations/v4.rs | 16 +- frame/elections-phragmen/src/weights.rs | 54 +- frame/elections/src/lib.rs | 270 +- frame/elections/src/mock.rs | 56 +- frame/elections/src/tests.rs | 679 ++-- frame/example-offchain-worker/src/lib.rs | 154 +- frame/example-offchain-worker/src/tests.rs | 88 +- frame/example-parallel/src/lib.rs | 22 +- frame/example-parallel/src/tests.rs | 5 +- frame/example/src/benchmarking.rs | 4 +- frame/example/src/lib.rs | 46 +- frame/example/src/tests.rs | 14 +- frame/example/src/weights.rs | 24 +- frame/executive/src/lib.rs | 350 +- frame/gilt/src/benchmarking.rs | 22 +- frame/gilt/src/lib.rs | 159 +- frame/gilt/src/mock.rs | 15 +- frame/gilt/src/tests.rs | 376 ++- frame/gilt/src/weights.rs | 36 +- frame/grandpa/src/benchmarking.rs | 7 +- frame/grandpa/src/default_weights.rs | 6 +- frame/grandpa/src/equivocation.rs | 20 +- frame/grandpa/src/lib.rs | 99 +- frame/grandpa/src/migrations/v3_1.rs | 31 +- frame/grandpa/src/mock.rs | 46 +- frame/grandpa/src/tests.rs | 240 +- frame/identity/src/benchmarking.rs | 40 +- frame/identity/src/lib.rs | 234 +- frame/identity/src/tests.rs | 122 +- frame/identity/src/types.rs | 199 +- frame/identity/src/weights.rs | 102 +- frame/im-online/src/benchmarking.rs | 32 +- frame/im-online/src/lib.rs | 238 +- frame/im-online/src/mock.rs | 32 +- frame/im-online/src/tests.rs | 151 +- frame/im-online/src/weights.rs | 12 +- frame/indices/src/benchmarking.rs | 9 +- frame/indices/src/lib.rs | 42 +- frame/indices/src/mock.rs | 12 +- frame/indices/src/tests.rs | 10 +- frame/indices/src/weights.rs | 6 +- frame/lottery/src/benchmarking.rs | 10 +- frame/lottery/src/lib.rs | 129 +- frame/lottery/src/mock.rs | 8 +- frame/lottery/src/tests.rs | 21 +- frame/lottery/src/weights.rs | 12 +- frame/membership/src/lib.rs | 63 +- frame/membership/src/weights.rs | 48 +- .../primitives/src/lib.rs | 92 +- frame/merkle-mountain-range/rpc/src/lib.rs | 49 +- .../merkle-mountain-range/src/benchmarking.rs | 8 +- .../src/default_weights.rs | 8 +- frame/merkle-mountain-range/src/lib.rs | 55 +- frame/merkle-mountain-range/src/mmr/mmr.rs | 72 +- frame/merkle-mountain-range/src/mmr/mod.rs | 4 +- .../merkle-mountain-range/src/mmr/storage.rs | 27 +- frame/merkle-mountain-range/src/mmr/utils.rs | 9 +- frame/merkle-mountain-range/src/mock.rs | 18 +- frame/merkle-mountain-range/src/tests.rs | 190 +- frame/multisig/src/benchmarking.rs | 16 +- frame/multisig/src/lib.rs | 213 +- frame/multisig/src/tests.rs | 424 ++- frame/multisig/src/weights.rs | 66 +- frame/nicks/src/lib.rs | 51 +- frame/node-authorization/src/lib.rs | 64 +- frame/node-authorization/src/mock.rs | 14 +- frame/node-authorization/src/tests.rs | 118 +- frame/node-authorization/src/weights.rs | 41 +- frame/offences/benchmarking/src/lib.rs | 129 +- frame/offences/benchmarking/src/mock.rs | 19 +- frame/offences/src/lib.rs | 69 +- frame/offences/src/migration.rs | 9 +- frame/offences/src/mock.rs | 40 +- frame/offences/src/tests.rs | 135 +- frame/proxy/src/benchmarking.rs | 12 +- frame/proxy/src/lib.rs | 177 +- frame/proxy/src/tests.rs | 163 +- frame/proxy/src/weights.rs | 66 +- frame/randomness-collective-flip/src/lib.rs | 44 +- frame/recovery/src/lib.rs | 76 +- frame/recovery/src/mock.rs | 14 +- frame/recovery/src/tests.rs | 114 +- frame/scheduler/src/benchmarking.rs | 14 +- frame/scheduler/src/lib.rs | 289 +- frame/scheduler/src/weights.rs | 30 +- frame/scored-pool/src/lib.rs | 92 +- frame/scored-pool/src/mock.rs | 37 +- frame/scored-pool/src/tests.rs | 51 +- frame/session/benchmarking/src/lib.rs | 29 +- frame/session/benchmarking/src/mock.rs | 5 +- frame/session/src/historical/mod.rs | 90 +- frame/session/src/historical/offchain.rs | 92 +- frame/session/src/historical/onchain.rs | 7 +- frame/session/src/historical/shared.rs | 8 +- frame/session/src/lib.rs | 128 +- frame/session/src/mock.rs | 44 +- frame/session/src/tests.rs | 121 +- frame/session/src/weights.rs | 6 +- frame/society/src/lib.rs | 269 +- frame/society/src/mock.rs | 29 +- frame/society/src/tests.rs | 74 +- frame/staking/reward-curve/src/lib.rs | 138 +- frame/staking/reward-curve/src/log.rs | 8 +- frame/staking/reward-fn/src/lib.rs | 41 +- frame/staking/reward-fn/tests/test.rs | 4 +- frame/staking/src/benchmarking.rs | 52 +- frame/staking/src/inflation.rs | 19 +- frame/staking/src/lib.rs | 584 ++-- frame/staking/src/mock.rs | 215 +- frame/staking/src/slashing.rs | 132 +- frame/staking/src/testing_utils.rs | 81 +- frame/staking/src/tests.rs | 1639 +++++----- frame/staking/src/weights.rs | 132 +- frame/sudo/src/lib.rs | 17 +- frame/sudo/src/mock.rs | 40 +- frame/sudo/src/tests.rs | 14 +- .../support/procedural/src/clone_no_bound.rs | 82 +- .../src/construct_runtime/expand/call.rs | 8 +- .../src/construct_runtime/expand/config.rs | 23 +- .../src/construct_runtime/expand/event.rs | 20 +- .../src/construct_runtime/expand/metadata.rs | 36 +- .../src/construct_runtime/expand/origin.rs | 47 +- .../procedural/src/construct_runtime/mod.rs | 116 +- .../procedural/src/construct_runtime/parse.rs | 69 +- .../support/procedural/src/debug_no_bound.rs | 93 +- .../procedural/src/default_no_bound.rs | 55 +- .../procedural/src/dummy_part_checker.rs | 14 +- frame/support/procedural/src/key_prefix.rs | 14 +- frame/support/procedural/src/lib.rs | 33 +- .../procedural/src/pallet/expand/call.rs | 29 +- .../procedural/src/pallet/expand/constants.rs | 59 +- .../procedural/src/pallet/expand/error.rs | 34 +- .../procedural/src/pallet/expand/event.rs | 31 +- .../src/pallet/expand/genesis_build.rs | 4 +- .../src/pallet/expand/genesis_config.rs | 25 +- .../procedural/src/pallet/expand/hooks.rs | 2 +- .../procedural/src/pallet/expand/inherent.rs | 7 +- .../procedural/src/pallet/expand/instances.rs | 7 +- .../procedural/src/pallet/expand/mod.rs | 20 +- .../procedural/src/pallet/expand/origin.rs | 4 +- .../src/pallet/expand/pallet_struct.rs | 40 +- .../procedural/src/pallet/expand/storage.rs | 97 +- .../src/pallet/expand/store_trait.rs | 10 +- .../src/pallet/expand/validate_unsigned.rs | 10 +- frame/support/procedural/src/pallet/mod.rs | 9 +- .../procedural/src/pallet/parse/call.rs | 45 +- .../procedural/src/pallet/parse/config.rs | 106 +- .../procedural/src/pallet/parse/error.rs | 24 +- .../procedural/src/pallet/parse/event.rs | 24 +- .../src/pallet/parse/extra_constants.rs | 17 +- .../src/pallet/parse/genesis_build.rs | 18 +- .../src/pallet/parse/genesis_config.rs | 15 +- .../procedural/src/pallet/parse/helper.rs | 139 +- .../procedural/src/pallet/parse/hooks.rs | 17 +- .../procedural/src/pallet/parse/inherent.rs | 10 +- .../procedural/src/pallet/parse/mod.rs | 74 +- .../procedural/src/pallet/parse/origin.rs | 20 +- .../src/pallet/parse/pallet_struct.rs | 22 +- .../procedural/src/pallet/parse/storage.rs | 243 +- .../procedural/src/pallet/parse/type_value.rs | 17 +- .../src/pallet/parse/validate_unsigned.rs | 10 +- .../support/procedural/src/pallet_version.rs | 6 +- .../procedural/src/partial_eq_no_bound.rs | 53 +- .../src/storage/genesis_config/builder_def.rs | 26 +- .../genesis_config/genesis_config_def.rs | 96 +- .../src/storage/genesis_config/mod.rs | 35 +- .../support/procedural/src/storage/getters.rs | 16 +- .../procedural/src/storage/instance_trait.rs | 27 +- .../procedural/src/storage/metadata.rs | 34 +- frame/support/procedural/src/storage/mod.rs | 164 +- frame/support/procedural/src/storage/parse.rs | 164 +- .../src/storage/print_pallet_upgrade.rs | 139 +- .../procedural/src/storage/storage_info.rs | 2 +- .../procedural/src/storage/storage_struct.rs | 29 +- .../procedural/src/storage/store_trait.rs | 24 +- frame/support/procedural/src/transactional.rs | 2 +- .../procedural/tools/derive/src/lib.rs | 32 +- frame/support/procedural/tools/src/docs.rs | 3 +- frame/support/procedural/tools/src/lib.rs | 16 +- frame/support/procedural/tools/src/syn_ext.rs | 62 +- frame/support/src/dispatch.rs | 112 +- frame/support/src/error.rs | 2 +- frame/support/src/event.rs | 2 +- frame/support/src/hash.rs | 18 +- frame/support/src/inherent.rs | 10 +- frame/support/src/lib.rs | 168 +- .../support/src/storage/bounded_btree_map.rs | 15 +- .../support/src/storage/bounded_btree_set.rs | 19 +- frame/support/src/storage/bounded_vec.rs | 17 +- frame/support/src/storage/child.rs | 135 +- .../src/storage/generator/double_map.rs | 158 +- frame/support/src/storage/generator/map.rs | 78 +- frame/support/src/storage/generator/mod.rs | 56 +- frame/support/src/storage/generator/nmap.rs | 48 +- frame/support/src/storage/generator/value.rs | 9 +- frame/support/src/storage/hashed.rs | 2 +- frame/support/src/storage/migration.rs | 64 +- frame/support/src/storage/mod.rs | 168 +- frame/support/src/storage/types/double_map.rs | 203 +- frame/support/src/storage/types/key.rs | 34 +- frame/support/src/storage/types/map.rs | 146 +- frame/support/src/storage/types/mod.rs | 4 +- frame/support/src/storage/types/nmap.rs | 227 +- frame/support/src/storage/types/value.rs | 125 +- frame/support/src/storage/unhashed.rs | 2 +- frame/support/src/storage/weak_bounded_vec.rs | 15 +- frame/support/src/traits.rs | 53 +- frame/support/src/traits/filter.rs | 31 +- frame/support/src/traits/hooks.rs | 30 +- frame/support/src/traits/members.rs | 40 +- frame/support/src/traits/metadata.rs | 17 +- frame/support/src/traits/misc.rs | 54 +- frame/support/src/traits/schedule.rs | 10 +- frame/support/src/traits/stored_map.rs | 33 +- frame/support/src/traits/tokens.rs | 10 +- frame/support/src/traits/tokens/currency.rs | 37 +- .../src/traits/tokens/currency/lockable.rs | 14 +- .../src/traits/tokens/currency/reservable.rs | 37 +- frame/support/src/traits/tokens/fungible.rs | 131 +- .../src/traits/tokens/fungible/balanced.rs | 58 +- .../src/traits/tokens/fungible/imbalance.rs | 56 +- frame/support/src/traits/tokens/fungibles.rs | 65 +- .../src/traits/tokens/fungibles/balanced.rs | 58 +- .../src/traits/tokens/fungibles/imbalance.rs | 50 +- frame/support/src/traits/tokens/imbalance.rs | 27 +- .../traits/tokens/imbalance/on_unbalanced.rs | 9 +- .../tokens/imbalance/signed_imbalance.rs | 17 +- .../traits/tokens/imbalance/split_two_ways.rs | 29 +- frame/support/src/traits/tokens/misc.rs | 20 +- .../support/src/traits/tokens/nonfungible.rs | 57 +- .../support/src/traits/tokens/nonfungibles.rs | 50 +- frame/support/src/traits/validation.rs | 30 +- frame/support/src/traits/voting.rs | 7 +- frame/support/src/weights.rs | 145 +- frame/support/test/src/pallet_version.rs | 5 +- frame/support/test/tests/construct_runtime.rs | 100 +- frame/support/test/tests/decl_storage.rs | 59 +- frame/support/test/tests/derive_no_bound.rs | 66 +- frame/support/test/tests/final_keys.rs | 14 +- frame/support/test/tests/genesisconfig.rs | 4 +- frame/support/test/tests/instance.rs | 91 +- frame/support/test/tests/issue2219.rs | 34 +- frame/support/test/tests/pallet.rs | 476 +-- .../test/tests/pallet_compatibility.rs | 82 +- .../tests/pallet_compatibility_instance.rs | 70 +- frame/support/test/tests/pallet_instance.rs | 177 +- frame/support/test/tests/pallet_version.rs | 41 +- .../tests/pallet_with_name_trait_is_valid.rs | 5 +- .../support/test/tests/storage_transaction.rs | 10 +- frame/support/test/tests/system.rs | 12 +- frame/system/benches/bench.rs | 23 +- frame/system/benchmarking/src/lib.rs | 21 +- frame/system/src/extensions/check_genesis.rs | 2 +- .../system/src/extensions/check_mortality.rs | 18 +- frame/system/src/extensions/check_nonce.rs | 49 +- .../src/extensions/check_spec_version.rs | 7 +- .../system/src/extensions/check_tx_version.rs | 7 +- frame/system/src/extensions/check_weight.rs | 229 +- frame/system/src/extensions/mod.rs | 1 - frame/system/src/lib.rs | 380 ++- frame/system/src/limits.rs | 57 +- frame/system/src/mock.rs | 27 +- frame/system/src/mocking.rs | 5 +- frame/system/src/offchain.rs | 276 +- frame/system/src/tests.rs | 194 +- frame/system/src/weights.rs | 42 +- frame/timestamp/src/benchmarking.rs | 10 +- frame/timestamp/src/lib.rs | 44 +- frame/timestamp/src/weights.rs | 6 +- frame/tips/src/benchmarking.rs | 29 +- frame/tips/src/lib.rs | 48 +- frame/tips/src/tests.rs | 99 +- frame/tips/src/weights.rs | 36 +- frame/transaction-payment/rpc/src/lib.rs | 50 +- frame/transaction-payment/src/lib.rs | 821 ++--- frame/transaction-payment/src/payment.rs | 31 +- frame/transaction-payment/src/types.rs | 23 +- frame/transaction-storage/src/benchmarking.rs | 29 +- frame/transaction-storage/src/lib.rs | 84 +- frame/transaction-storage/src/mock.rs | 17 +- frame/transaction-storage/src/tests.rs | 63 +- frame/transaction-storage/src/weights.rs | 12 +- frame/treasury/src/benchmarking.rs | 28 +- frame/treasury/src/lib.rs | 84 +- frame/treasury/src/tests.rs | 33 +- frame/treasury/src/weights.rs | 18 +- frame/try-runtime/src/lib.rs | 2 +- frame/uniques/src/benchmarking.rs | 58 +- frame/uniques/src/functions.rs | 44 +- frame/uniques/src/impl_nonfungibles.rs | 25 +- frame/uniques/src/lib.rs | 71 +- frame/uniques/src/mock.rs | 7 +- frame/uniques/src/tests.rs | 134 +- frame/uniques/src/types.rs | 8 +- frame/uniques/src/weights.rs | 18 +- frame/utility/src/benchmarking.rs | 8 +- frame/utility/src/lib.rs | 43 +- frame/utility/src/tests.rs | 148 +- frame/utility/src/weights.rs | 18 +- frame/vesting/src/benchmarking.rs | 7 +- frame/vesting/src/lib.rs | 90 +- frame/vesting/src/tests.rs | 568 ++-- frame/vesting/src/weights.rs | 42 +- .../api/proc-macro/src/decl_runtime_apis.rs | 340 +- .../api/proc-macro/src/impl_runtime_apis.rs | 265 +- primitives/api/proc-macro/src/lib.rs | 2 +- .../proc-macro/src/mock_impl_runtime_apis.rs | 109 +- primitives/api/proc-macro/src/utils.rs | 121 +- primitives/api/src/lib.rs | 93 +- primitives/api/test/benches/bench.rs | 19 +- primitives/api/test/tests/decl_and_impl.rs | 35 +- primitives/api/test/tests/runtime_calls.rs | 85 +- primitives/application-crypto/src/ecdsa.rs | 4 +- primitives/application-crypto/src/ed25519.rs | 4 +- primitives/application-crypto/src/lib.rs | 162 +- primitives/application-crypto/src/sr25519.rs | 4 +- primitives/application-crypto/src/traits.rs | 20 +- .../application-crypto/test/src/ecdsa.rs | 22 +- .../application-crypto/test/src/ed25519.rs | 22 +- primitives/application-crypto/test/src/lib.rs | 4 +- .../application-crypto/test/src/sr25519.rs | 22 +- primitives/arithmetic/benches/bench.rs | 6 +- primitives/arithmetic/fuzzer/src/biguint.rs | 58 +- .../arithmetic/fuzzer/src/fixed_point.rs | 8 +- .../fuzzer/src/multiply_by_rational.rs | 2 +- primitives/arithmetic/fuzzer/src/normalize.rs | 13 +- .../fuzzer/src/per_thing_rational.rs | 10 +- primitives/arithmetic/src/biguint.rs | 94 +- primitives/arithmetic/src/fixed_point.rs | 342 +- primitives/arithmetic/src/helpers_128bit.rs | 17 +- primitives/arithmetic/src/lib.rs | 148 +- primitives/arithmetic/src/per_things.rs | 202 +- primitives/arithmetic/src/rational.rs | 96 +- primitives/arithmetic/src/traits.rs | 223 +- primitives/authority-discovery/src/lib.rs | 6 +- primitives/authorship/src/lib.rs | 8 +- primitives/blockchain/src/backend.rs | 70 +- primitives/blockchain/src/error.rs | 13 +- primitives/blockchain/src/header_metadata.rs | 55 +- primitives/blockchain/src/lib.rs | 4 +- primitives/consensus/aura/src/digests.rs | 9 +- primitives/consensus/aura/src/inherents.rs | 25 +- primitives/consensus/aura/src/lib.rs | 6 +- primitives/consensus/babe/src/digests.rs | 12 +- primitives/consensus/babe/src/inherents.rs | 11 +- primitives/consensus/babe/src/lib.rs | 33 +- .../consensus/common/src/block_import.rs | 58 +- .../consensus/common/src/block_validation.rs | 5 +- primitives/consensus/common/src/error.rs | 10 +- primitives/consensus/common/src/evaluation.rs | 8 +- .../consensus/common/src/import_queue.rs | 139 +- .../common/src/import_queue/basic_queue.rs | 81 +- .../common/src/import_queue/buffered_link.rs | 31 +- primitives/consensus/common/src/lib.rs | 59 +- primitives/consensus/common/src/metrics.rs | 44 +- .../consensus/common/src/select_chain.rs | 1 - primitives/consensus/pow/src/lib.rs | 4 +- primitives/consensus/vrf/src/schnorrkel.rs | 38 +- primitives/core/benches/bench.rs | 81 +- primitives/core/src/changes_trie.rs | 123 +- primitives/core/src/crypto.rs | 457 ++- primitives/core/src/ecdsa.rs | 138 +- primitives/core/src/ed25519.rs | 128 +- primitives/core/src/hash.rs | 53 +- primitives/core/src/hasher.rs | 8 +- primitives/core/src/hexdisplay.rs | 26 +- primitives/core/src/lib.rs | 75 +- primitives/core/src/offchain/mod.rs | 108 +- primitives/core/src/offchain/storage.rs | 21 +- primitives/core/src/offchain/testing.rs | 103 +- primitives/core/src/sandbox.rs | 44 +- primitives/core/src/sr25519.rs | 176 +- primitives/core/src/testing.rs | 6 +- primitives/core/src/traits.rs | 18 +- primitives/core/src/u32_trait.rs | 546 +++- primitives/core/src/uint.rs | 47 +- primitives/database/src/kvdb.rs | 59 +- primitives/database/src/lib.rs | 11 +- primitives/database/src/mem.rs | 32 +- primitives/debug-derive/src/impls.rs | 126 +- primitives/debug-derive/src/lib.rs | 3 +- primitives/debug-derive/tests/tests.rs | 26 +- primitives/externalities/src/extensions.rs | 27 +- primitives/externalities/src/lib.rs | 63 +- primitives/externalities/src/scope_limited.rs | 3 +- primitives/finality-grandpa/src/lib.rs | 40 +- primitives/inherents/src/client_side.rs | 6 +- primitives/inherents/src/lib.rs | 45 +- primitives/io/src/batch_verifier.rs | 34 +- primitives/io/src/lib.rs | 357 +- primitives/keyring/src/ed25519.rs | 56 +- primitives/keyring/src/sr25519.rs | 59 +- primitives/keystore/src/lib.rs | 59 +- primitives/keystore/src/testing.rs | 243 +- primitives/keystore/src/vrf.rs | 12 +- primitives/maybe-compressed-blob/src/lib.rs | 11 +- primitives/npos-elections/benches/phragmen.rs | 41 +- .../npos-elections/compact/src/assignment.rs | 106 +- .../npos-elections/compact/src/codec.rs | 171 +- .../compact/src/index_assignment.rs | 2 +- primitives/npos-elections/compact/src/lib.rs | 98 +- .../npos-elections/fuzzer/src/common.rs | 6 +- .../npos-elections/fuzzer/src/compact.rs | 8 +- .../fuzzer/src/phragmen_balancing.rs | 39 +- .../fuzzer/src/phragmms_balancing.rs | 39 +- .../npos-elections/fuzzer/src/reduce.rs | 64 +- primitives/npos-elections/src/assignments.rs | 46 +- primitives/npos-elections/src/balancing.rs | 46 +- primitives/npos-elections/src/helpers.rs | 17 +- primitives/npos-elections/src/lib.rs | 94 +- primitives/npos-elections/src/mock.rs | 132 +- primitives/npos-elections/src/node.rs | 43 +- primitives/npos-elections/src/phragmen.rs | 19 +- primitives/npos-elections/src/phragmms.rs | 171 +- primitives/npos-elections/src/pjr.rs | 224 +- primitives/npos-elections/src/reduce.rs | 430 +-- primitives/npos-elections/src/tests.rs | 776 ++--- primitives/panic-handler/src/lib.rs | 48 +- primitives/rpc/src/lib.rs | 18 +- primitives/rpc/src/list.rs | 2 +- primitives/rpc/src/number.rs | 7 +- primitives/rpc/src/tracing.rs | 6 +- .../runtime-interface/proc-macro/src/lib.rs | 18 +- .../proc-macro/src/pass_by/codec.rs | 6 +- .../proc-macro/src/pass_by/enum_.rs | 19 +- .../proc-macro/src/pass_by/inner.rs | 19 +- .../bare_function_interface.rs | 110 +- .../host_function_interface.rs | 326 +- .../src/runtime_interface/trait_decl_impl.rs | 63 +- .../runtime-interface/proc-macro/src/utils.rs | 135 +- primitives/runtime-interface/src/impls.rs | 38 +- primitives/runtime-interface/src/lib.rs | 9 +- primitives/runtime-interface/src/pass_by.rs | 60 +- primitives/runtime-interface/src/wasm.rs | 5 +- .../test-wasm-deprecated/src/lib.rs | 6 +- .../runtime-interface/test-wasm/src/lib.rs | 8 +- primitives/runtime-interface/test/src/lib.rs | 63 +- primitives/runtime/src/curve.rs | 37 +- primitives/runtime/src/generic/block.rs | 16 +- .../runtime/src/generic/checked_extrinsic.rs | 19 +- primitives/runtime/src/generic/digest.rs | 137 +- primitives/runtime/src/generic/era.rs | 55 +- primitives/runtime/src/generic/header.rs | 184 +- primitives/runtime/src/generic/mod.rs | 22 +- primitives/runtime/src/generic/tests.rs | 41 +- .../src/generic/unchecked_extrinsic.rs | 148 +- primitives/runtime/src/lib.rs | 222 +- primitives/runtime/src/multiaddress.rs | 8 +- primitives/runtime/src/offchain/http.rs | 103 +- primitives/runtime/src/offchain/storage.rs | 48 +- .../runtime/src/offchain/storage_lock.rs | 37 +- primitives/runtime/src/runtime_logger.rs | 17 +- primitives/runtime/src/runtime_string.rs | 7 +- primitives/runtime/src/testing.rs | 104 +- primitives/runtime/src/traits.rs | 314 +- .../runtime/src/transaction_validity.rs | 51 +- primitives/sandbox/src/lib.rs | 22 +- primitives/serializer/src/lib.rs | 7 +- primitives/session/src/lib.rs | 9 +- primitives/staking/src/offence.rs | 9 +- primitives/state-machine/src/backend.rs | 106 +- primitives/state-machine/src/basic.rs | 153 +- .../state-machine/src/changes_trie/build.rs | 1144 ++++--- .../src/changes_trie/build_cache.rs | 73 +- .../src/changes_trie/build_iterator.rs | 160 +- .../src/changes_trie/changes_iterator.rs | 463 +-- .../state-machine/src/changes_trie/input.rs | 8 +- .../state-machine/src/changes_trie/mod.rs | 197 +- .../state-machine/src/changes_trie/prune.rs | 85 +- .../state-machine/src/changes_trie/storage.rs | 51 +- .../src/changes_trie/surface_iterator.rs | 164 +- primitives/state-machine/src/error.rs | 1 - primitives/state-machine/src/ext.rs | 300 +- .../state-machine/src/in_memory_backend.rs | 64 +- primitives/state-machine/src/lib.rs | 611 ++-- .../src/overlayed_changes/changeset.rs | 164 +- .../src/overlayed_changes/mod.rs | 267 +- .../src/overlayed_changes/offchain.rs | 22 +- .../state-machine/src/proving_backend.rs | 205 +- primitives/state-machine/src/read_only.rs | 72 +- primitives/state-machine/src/stats.rs | 4 +- primitives/state-machine/src/testing.rs | 95 +- primitives/state-machine/src/trie_backend.rs | 96 +- .../state-machine/src/trie_backend_essence.rs | 171 +- primitives/std/src/lib.rs | 26 +- primitives/storage/src/lib.rs | 49 +- primitives/tasks/src/async_externalities.rs | 48 +- primitives/tasks/src/lib.rs | 154 +- primitives/test-primitives/src/lib.rs | 16 +- primitives/timestamp/src/lib.rs | 16 +- primitives/tracing/src/lib.rs | 21 +- primitives/tracing/src/types.rs | 269 +- .../transaction-pool/src/runtime_api.rs | 6 +- .../transaction-storage-proof/src/lib.rs | 72 +- primitives/trie/benches/bench.rs | 2 +- primitives/trie/src/error.rs | 10 +- primitives/trie/src/lib.rs | 355 +- primitives/trie/src/node_codec.rs | 87 +- primitives/trie/src/node_header.rs | 26 +- primitives/trie/src/storage_proof.rs | 39 +- primitives/trie/src/trie_codec.rs | 67 +- primitives/trie/src/trie_stream.rs | 53 +- primitives/utils/src/metrics.rs | 19 +- primitives/utils/src/mpsc.rs | 84 +- primitives/utils/src/status_sinks.rs | 29 +- .../proc-macro/src/decl_runtime_version.rs | 78 +- primitives/version/src/embed.rs | 5 +- primitives/version/src/lib.rs | 67 +- primitives/wasm-interface/src/lib.rs | 40 +- primitives/wasm-interface/src/wasmi_impl.rs | 2 +- test-utils/client/src/client_ext.rs | 67 +- test-utils/client/src/lib.rs | 180 +- test-utils/derive/src/lib.rs | 6 +- .../runtime/client/src/block_builder_ext.rs | 22 +- test-utils/runtime/client/src/lib.rs | 152 +- test-utils/runtime/client/src/trait_tests.rs | 471 ++- test-utils/runtime/src/genesismap.rs | 62 +- test-utils/runtime/src/lib.rs | 187 +- test-utils/runtime/src/system.rs | 193 +- .../runtime/transaction-pool/src/lib.rs | 139 +- test-utils/src/lib.rs | 8 +- test-utils/test-runner/src/client.rs | 376 +-- test-utils/test-runner/src/host_functions.rs | 20 +- test-utils/test-runner/src/lib.rs | 13 +- test-utils/test-runner/src/node.rs | 97 +- test-utils/test-runner/src/utils.rs | 40 +- utils/browser/src/lib.rs | 45 +- utils/build-script-utils/src/git.rs | 10 +- utils/build-script-utils/src/lib.rs | 2 +- utils/build-script-utils/src/version.rs | 6 +- utils/fork-tree/src/lib.rs | 441 +-- utils/frame/benchmarking-cli/src/command.rs | 52 +- utils/frame/benchmarking-cli/src/writer.rs | 201 +- utils/frame/frame-utilities-cli/src/lib.rs | 1 - .../frame-utilities-cli/src/pallet_id.rs | 28 +- utils/frame/remote-externalities/src/lib.rs | 71 +- .../frame/remote-externalities/src/rpc_api.rs | 21 +- utils/frame/rpc/support/src/lib.rs | 23 +- utils/frame/rpc/system/src/lib.rs | 164 +- utils/frame/try-runtime/cli/src/lib.rs | 146 +- utils/frame/try-runtime/cli/src/parse.rs | 7 +- utils/prometheus/src/lib.rs | 55 +- utils/prometheus/src/networking.rs | 28 +- utils/prometheus/src/sourced.rs | 45 +- utils/wasm-builder/src/builder.rs | 40 +- utils/wasm-builder/src/lib.rs | 26 +- utils/wasm-builder/src/prerequisites.rs | 65 +- utils/wasm-builder/src/wasm_project.rs | 188 +- 1003 files changed, 53615 insertions(+), 51338 deletions(-) diff --git a/bin/node-template/node/src/chain_spec.rs b/bin/node-template/node/src/chain_spec.rs index 5093a77b571e9..7009b3be5c279 100644 --- a/bin/node-template/node/src/chain_spec.rs +++ b/bin/node-template/node/src/chain_spec.rs @@ -1,12 +1,12 @@ -use sp_core::{Pair, Public, sr25519}; use node_template_runtime::{ - AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig, - SudoConfig, SystemConfig, WASM_BINARY, Signature + AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig, Signature, SudoConfig, + SystemConfig, WASM_BINARY, }; +use sc_service::ChainType; use sp_consensus_aura::sr25519::AuthorityId as AuraId; +use sp_core::{sr25519, Pair, Public}; use sp_finality_grandpa::AuthorityId as GrandpaId; -use sp_runtime::traits::{Verify, IdentifyAccount}; -use sc_service::ChainType; +use sp_runtime::traits::{IdentifyAccount, Verify}; // The URL for the telemetry server. // const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/"; @@ -24,18 +24,16 @@ pub fn get_from_seed(seed: &str) -> ::Pu type AccountPublic = ::Signer; /// Generate an account ID from seed. -pub fn get_account_id_from_seed(seed: &str) -> AccountId where - AccountPublic: From<::Public> +pub fn get_account_id_from_seed(seed: &str) -> AccountId +where + AccountPublic: From<::Public>, { AccountPublic::from(get_from_seed::(seed)).into_account() } /// Generate an Aura authority key. pub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) { - ( - get_from_seed::(s), - get_from_seed::(s), - ) + (get_from_seed::(s), get_from_seed::(s)) } pub fn development_config() -> Result { @@ -47,23 +45,23 @@ pub fn development_config() -> Result { // ID "dev", ChainType::Development, - move || testnet_genesis( - wasm_binary, - // Initial PoA authorities - vec![ - authority_keys_from_seed("Alice"), - ], - // Sudo account - get_account_id_from_seed::("Alice"), - // Pre-funded accounts - vec![ + move || { + testnet_genesis( + wasm_binary, + // Initial PoA authorities + vec![authority_keys_from_seed("Alice")], + // Sudo account get_account_id_from_seed::("Alice"), - get_account_id_from_seed::("Bob"), - get_account_id_from_seed::("Alice//stash"), - get_account_id_from_seed::("Bob//stash"), - ], - true, - ), + // Pre-funded accounts + vec![ + get_account_id_from_seed::("Alice"), + get_account_id_from_seed::("Bob"), + get_account_id_from_seed::("Alice//stash"), + get_account_id_from_seed::("Bob//stash"), + ], + true, + ) + }, // Bootnodes vec![], // Telemetry @@ -86,32 +84,31 @@ pub fn local_testnet_config() -> Result { // ID "local_testnet", ChainType::Local, - move || testnet_genesis( - wasm_binary, - // Initial PoA authorities - vec![ - authority_keys_from_seed("Alice"), - authority_keys_from_seed("Bob"), - ], - // Sudo account - get_account_id_from_seed::("Alice"), - // Pre-funded accounts - vec![ + move || { + testnet_genesis( + wasm_binary, + // Initial PoA authorities + vec![authority_keys_from_seed("Alice"), authority_keys_from_seed("Bob")], + // Sudo account get_account_id_from_seed::("Alice"), - get_account_id_from_seed::("Bob"), - get_account_id_from_seed::("Charlie"), - get_account_id_from_seed::("Dave"), - get_account_id_from_seed::("Eve"), - get_account_id_from_seed::("Ferdie"), - get_account_id_from_seed::("Alice//stash"), - get_account_id_from_seed::("Bob//stash"), - get_account_id_from_seed::("Charlie//stash"), - get_account_id_from_seed::("Dave//stash"), - get_account_id_from_seed::("Eve//stash"), - get_account_id_from_seed::("Ferdie//stash"), - ], - true, - ), + // Pre-funded accounts + vec![ + get_account_id_from_seed::("Alice"), + get_account_id_from_seed::("Bob"), + get_account_id_from_seed::("Charlie"), + get_account_id_from_seed::("Dave"), + get_account_id_from_seed::("Eve"), + get_account_id_from_seed::("Ferdie"), + get_account_id_from_seed::("Alice//stash"), + get_account_id_from_seed::("Bob//stash"), + get_account_id_from_seed::("Charlie//stash"), + get_account_id_from_seed::("Dave//stash"), + get_account_id_from_seed::("Eve//stash"), + get_account_id_from_seed::("Ferdie//stash"), + ], + true, + ) + }, // Bootnodes vec![], // Telemetry @@ -141,7 +138,7 @@ fn testnet_genesis( }, balances: BalancesConfig { // Configure endowed accounts with initial balance of 1 << 60. - balances: endowed_accounts.iter().cloned().map(|k|(k, 1 << 60)).collect(), + balances: endowed_accounts.iter().cloned().map(|k| (k, 1 << 60)).collect(), }, aura: AuraConfig { authorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(), diff --git a/bin/node-template/node/src/cli.rs b/bin/node-template/node/src/cli.rs index 947123a6bbf5b..8b551051c1b19 100644 --- a/bin/node-template/node/src/cli.rs +++ b/bin/node-template/node/src/cli.rs @@ -1,5 +1,5 @@ -use structopt::StructOpt; use sc_cli::RunCmd; +use structopt::StructOpt; #[derive(Debug, StructOpt)] pub struct Cli { diff --git a/bin/node-template/node/src/command.rs b/bin/node-template/node/src/command.rs index e61dd86418825..d3a04e0ae91e8 100644 --- a/bin/node-template/node/src/command.rs +++ b/bin/node-template/node/src/command.rs @@ -15,11 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{chain_spec, service}; -use crate::cli::{Cli, Subcommand}; -use sc_cli::{SubstrateCli, RuntimeVersion, Role, ChainSpec}; -use sc_service::PartialComponents; +use crate::{ + chain_spec, + cli::{Cli, Subcommand}, + service, +}; use node_template_runtime::Block; +use sc_cli::{ChainSpec, Role, RuntimeVersion, SubstrateCli}; +use sc_service::PartialComponents; impl SubstrateCli for Cli { fn impl_name() -> String { @@ -50,9 +53,8 @@ impl SubstrateCli for Cli { Ok(match id { "dev" => Box::new(chain_spec::development_config()?), "" | "local" => Box::new(chain_spec::local_testnet_config()?), - path => Box::new(chain_spec::ChainSpec::from_json_file( - std::path::PathBuf::from(path), - )?), + path => + Box::new(chain_spec::ChainSpec::from_json_file(std::path::PathBuf::from(path))?), }) } @@ -74,32 +76,30 @@ pub fn run() -> sc_cli::Result<()> { Some(Subcommand::CheckBlock(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, import_queue, ..} - = service::new_partial(&config)?; + let PartialComponents { client, task_manager, import_queue, .. } = + service::new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) }, Some(Subcommand::ExportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, ..} - = service::new_partial(&config)?; + let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?; Ok((cmd.run(client, config.database), task_manager)) }) }, Some(Subcommand::ExportState(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, ..} - = service::new_partial(&config)?; + let PartialComponents { client, task_manager, .. } = service::new_partial(&config)?; Ok((cmd.run(client, config.chain_spec), task_manager)) }) }, Some(Subcommand::ImportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, import_queue, ..} - = service::new_partial(&config)?; + let PartialComponents { client, task_manager, import_queue, .. } = + service::new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) }, @@ -110,29 +110,30 @@ pub fn run() -> sc_cli::Result<()> { Some(Subcommand::Revert(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, backend, ..} - = service::new_partial(&config)?; + let PartialComponents { client, task_manager, backend, .. } = + service::new_partial(&config)?; Ok((cmd.run(client, backend), task_manager)) }) }, - Some(Subcommand::Benchmark(cmd)) => { + Some(Subcommand::Benchmark(cmd)) => if cfg!(feature = "runtime-benchmarks") { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::(config)) } else { Err("Benchmarking wasn't enabled when building the node. \ - You can enable it with `--features runtime-benchmarks`.".into()) - } - }, + You can enable it with `--features runtime-benchmarks`." + .into()) + }, None => { let runner = cli.create_runner(&cli.run)?; runner.run_node_until_exit(|config| async move { match config.role { Role::Light => service::new_light(config), _ => service::new_full(config), - }.map_err(sc_cli::Error::Service) + } + .map_err(sc_cli::Error::Service) }) - } + }, } } diff --git a/bin/node-template/node/src/lib.rs b/bin/node-template/node/src/lib.rs index 777c4f0a77147..f117b8aae6192 100644 --- a/bin/node-template/node/src/lib.rs +++ b/bin/node-template/node/src/lib.rs @@ -1,3 +1,3 @@ pub mod chain_spec; -pub mod service; pub mod rpc; +pub mod service; diff --git a/bin/node-template/node/src/rpc.rs b/bin/node-template/node/src/rpc.rs index a03d1aad2a883..d23b23178ec2a 100644 --- a/bin/node-template/node/src/rpc.rs +++ b/bin/node-template/node/src/rpc.rs @@ -8,12 +8,11 @@ use std::sync::Arc; use node_template_runtime::{opaque::Block, AccountId, Balance, Index}; -use sp_api::ProvideRuntimeApi; -use sp_blockchain::{Error as BlockChainError, HeaderMetadata, HeaderBackend}; -use sp_block_builder::BlockBuilder; pub use sc_rpc_api::DenyUnsafe; use sc_transaction_pool_api::TransactionPool; - +use sp_api::ProvideRuntimeApi; +use sp_block_builder::BlockBuilder; +use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata}; /// Full client dependencies. pub struct FullDeps { @@ -26,34 +25,25 @@ pub struct FullDeps { } /// Instantiate all full RPC extensions. -pub fn create_full( - deps: FullDeps, -) -> jsonrpc_core::IoHandler where +pub fn create_full(deps: FullDeps) -> jsonrpc_core::IoHandler +where C: ProvideRuntimeApi, - C: HeaderBackend + HeaderMetadata + 'static, + C: HeaderBackend + HeaderMetadata + 'static, C: Send + Sync + 'static, C::Api: substrate_frame_rpc_system::AccountNonceApi, C::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi, C::Api: BlockBuilder, P: TransactionPool + 'static, { - use substrate_frame_rpc_system::{FullSystem, SystemApi}; use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi}; + use substrate_frame_rpc_system::{FullSystem, SystemApi}; let mut io = jsonrpc_core::IoHandler::default(); - let FullDeps { - client, - pool, - deny_unsafe, - } = deps; + let FullDeps { client, pool, deny_unsafe } = deps; - io.extend_with( - SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe)) - ); + io.extend_with(SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe))); - io.extend_with( - TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone())) - ); + io.extend_with(TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone()))); // Extend this RPC with a custom API by using the following syntax. // `YourRpcStruct` should have a reference to a client, which is needed diff --git a/bin/node-template/node/src/service.rs b/bin/node-template/node/src/service.rs index c19824e9eaa38..d97f29c00bca4 100644 --- a/bin/node-template/node/src/service.rs +++ b/bin/node-template/node/src/service.rs @@ -1,17 +1,17 @@ //! Service and ServiceFactory implementation. Specialized wrapper over substrate service. -use std::{sync::Arc, time::Duration}; -use sc_client_api::{ExecutorProvider, RemoteBackend}; use node_template_runtime::{self, opaque::Block, RuntimeApi}; -use sc_service::{error::Error as ServiceError, Configuration, TaskManager}; +use sc_client_api::{ExecutorProvider, RemoteBackend}; +use sc_consensus_aura::{ImportQueueParams, SlotProportion, StartAuraParams}; use sc_executor::native_executor_instance; pub use sc_executor::NativeExecutor; -use sp_consensus_aura::sr25519::AuthorityPair as AuraPair; -use sc_consensus_aura::{ImportQueueParams, StartAuraParams, SlotProportion}; use sc_finality_grandpa::SharedVoterState; use sc_keystore::LocalKeystore; +use sc_service::{error::Error as ServiceError, Configuration, TaskManager}; use sc_telemetry::{Telemetry, TelemetryWorker}; use sp_consensus::SlotData; +use sp_consensus_aura::sr25519::AuthorityPair as AuraPair; +use std::{sync::Arc, time::Duration}; // Our native executor instance. native_executor_instance!( @@ -25,22 +25,35 @@ type FullClient = sc_service::TFullClient; type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; -pub fn new_partial(config: &Configuration) -> Result, - sc_transaction_pool::FullPool, - ( - sc_finality_grandpa::GrandpaBlockImport, - sc_finality_grandpa::LinkHalf, - Option, - ) ->, ServiceError> { +pub fn new_partial( + config: &Configuration, +) -> Result< + sc_service::PartialComponents< + FullClient, + FullBackend, + FullSelectChain, + sp_consensus::DefaultImportQueue, + sc_transaction_pool::FullPool, + ( + sc_finality_grandpa::GrandpaBlockImport< + FullBackend, + Block, + FullClient, + FullSelectChain, + >, + sc_finality_grandpa::LinkHalf, + Option, + ), + >, + ServiceError, +> { if config.keystore_remote.is_some() { - return Err(ServiceError::Other( - format!("Remote Keystores are not supported."))) + return Err(ServiceError::Other(format!("Remote Keystores are not supported."))) } - let telemetry = config.telemetry_endpoints.clone() + let telemetry = config + .telemetry_endpoints + .clone() .filter(|x| !x.is_empty()) .map(|endpoints| -> Result<_, sc_telemetry::Error> { let worker = TelemetryWorker::new(16)?; @@ -56,11 +69,10 @@ pub fn new_partial(config: &Configuration) -> Result Result( - ImportQueueParams { + let import_queue = + sc_consensus_aura::import_queue::(ImportQueueParams { block_import: grandpa_block_import.clone(), justification_import: Some(Box::new(grandpa_block_import.clone())), client: client.clone(), @@ -98,12 +110,13 @@ pub fn new_partial(config: &Configuration) -> Result Result if let Some(url) = &config.keystore_remote { match remote_keystore(url) { Ok(k) => keystore_container.set_remote_keystore(k), - Err(e) => { - return Err(ServiceError::Other( - format!("Error hooking up remote keystore for {}: {}", url, e))) - } + Err(e) => + return Err(ServiceError::Other(format!( + "Error hooking up remote keystore for {}: {}", + url, e + ))), }; } @@ -162,7 +176,10 @@ pub fn new_full(mut config: Configuration) -> Result if config.offchain_worker.enabled { sc_service::build_offchain_workers( - &config, task_manager.spawn_handle(), client.clone(), network.clone(), + &config, + task_manager.spawn_handle(), + client.clone(), + network.clone(), ); } @@ -178,32 +195,27 @@ pub fn new_full(mut config: Configuration) -> Result let pool = transaction_pool.clone(); Box::new(move |deny_unsafe, _| { - let deps = crate::rpc::FullDeps { - client: client.clone(), - pool: pool.clone(), - deny_unsafe, - }; + let deps = + crate::rpc::FullDeps { client: client.clone(), pool: pool.clone(), deny_unsafe }; crate::rpc::create_full(deps) }) }; - let _rpc_handlers = sc_service::spawn_tasks( - sc_service::SpawnTasksParams { - network: network.clone(), - client: client.clone(), - keystore: keystore_container.sync_keystore(), - task_manager: &mut task_manager, - transaction_pool: transaction_pool.clone(), - rpc_extensions_builder, - on_demand: None, - remote_blockchain: None, - backend, - system_rpc_tx, - config, - telemetry: telemetry.as_mut(), - }, - )?; + let _rpc_handlers = sc_service::spawn_tasks(sc_service::SpawnTasksParams { + network: network.clone(), + client: client.clone(), + keystore: keystore_container.sync_keystore(), + task_manager: &mut task_manager, + transaction_pool: transaction_pool.clone(), + rpc_extensions_builder, + on_demand: None, + remote_blockchain: None, + backend, + system_rpc_tx, + config, + telemetry: telemetry.as_mut(), + })?; if role.is_authority() { let proposer_factory = sc_basic_authorship::ProposerFactory::new( @@ -257,11 +269,8 @@ pub fn new_full(mut config: Configuration) -> Result // if the node isn't actively participating in consensus then it doesn't // need a keystore, regardless of which protocol we use below. - let keystore = if role.is_authority() { - Some(keystore_container.sync_keystore()) - } else { - None - }; + let keystore = + if role.is_authority() { Some(keystore_container.sync_keystore()) } else { None }; let grandpa_config = sc_finality_grandpa::Config { // FIXME #1578 make this available through chainspec @@ -295,7 +304,7 @@ pub fn new_full(mut config: Configuration) -> Result // if it fails we take down the service with it. task_manager.spawn_essential_handle().spawn_blocking( "grandpa-voter", - sc_finality_grandpa::run_grandpa_voter(grandpa_config)? + sc_finality_grandpa::run_grandpa_voter(grandpa_config)?, ); } @@ -305,7 +314,9 @@ pub fn new_full(mut config: Configuration) -> Result /// Builds a new service for a light client. pub fn new_light(mut config: Configuration) -> Result { - let telemetry = config.telemetry_endpoints.clone() + let telemetry = config + .telemetry_endpoints + .clone() .filter(|x| !x.is_empty()) .map(|endpoints| -> Result<_, sc_telemetry::Error> { let worker = TelemetryWorker::new(16)?; @@ -320,11 +331,10 @@ pub fn new_light(mut config: Configuration) -> Result telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()), )?; - let mut telemetry = telemetry - .map(|(worker, telemetry)| { - task_manager.spawn_handle().spawn("telemetry", worker.run()); - telemetry - }); + let mut telemetry = telemetry.map(|(worker, telemetry)| { + task_manager.spawn_handle().spawn("telemetry", worker.run()); + telemetry + }); config.network.extra_sets.push(sc_finality_grandpa::grandpa_peers_set_config()); @@ -347,8 +357,8 @@ pub fn new_light(mut config: Configuration) -> Result let slot_duration = sc_consensus_aura::slot_duration(&*client)?.slot_duration(); - let import_queue = sc_consensus_aura::import_queue::( - ImportQueueParams { + let import_queue = + sc_consensus_aura::import_queue::(ImportQueueParams { block_import: grandpa_block_import.clone(), justification_import: Some(Box::new(grandpa_block_import.clone())), client: client.clone(), @@ -368,8 +378,7 @@ pub fn new_light(mut config: Configuration) -> Result registry: config.prometheus_registry(), check_for_equivocation: Default::default(), telemetry: telemetry.as_ref().map(|x| x.handle()), - }, - )?; + })?; let (network, system_rpc_tx, network_starter) = sc_service::build_network(sc_service::BuildNetworkParams { @@ -384,7 +393,10 @@ pub fn new_light(mut config: Configuration) -> Result if config.offchain_worker.enabled { sc_service::build_offchain_workers( - &config, task_manager.spawn_handle(), client.clone(), network.clone(), + &config, + task_manager.spawn_handle(), + client.clone(), + network.clone(), ); } diff --git a/bin/node-template/pallets/template/src/benchmarking.rs b/bin/node-template/pallets/template/src/benchmarking.rs index 93d7fa395ad6b..fea9e65969b99 100644 --- a/bin/node-template/pallets/template/src/benchmarking.rs +++ b/bin/node-template/pallets/template/src/benchmarking.rs @@ -2,10 +2,10 @@ use super::*; -use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; #[allow(unused)] use crate::Pallet as Template; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_system::RawOrigin; benchmarks! { do_something { @@ -17,8 +17,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Template, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Template, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/bin/node-template/pallets/template/src/lib.rs b/bin/node-template/pallets/template/src/lib.rs index b70068faf241d..ee3ca695b64da 100644 --- a/bin/node-template/pallets/template/src/lib.rs +++ b/bin/node-template/pallets/template/src/lib.rs @@ -3,7 +3,6 @@ /// Edit this file to define custom logic or remove it if it is not needed. /// Learn more about FRAME and the core library of Substrate FRAME pallets: /// - pub use pallet::*; #[cfg(test)] @@ -62,7 +61,7 @@ pub mod pallet { // These functions materialize as "extrinsics", which are often compared to transactions. // Dispatchable functions must be annotated with a weight and must return a DispatchResult. #[pallet::call] - impl Pallet { + impl Pallet { /// An example dispatchable that takes a singles value as a parameter, writes the value to /// storage and emits an event. This function must be dispatched by a signed extrinsic. #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] diff --git a/bin/node-template/pallets/template/src/mock.rs b/bin/node-template/pallets/template/src/mock.rs index 9bea61df22edb..76742477000fb 100644 --- a/bin/node-template/pallets/template/src/mock.rs +++ b/bin/node-template/pallets/template/src/mock.rs @@ -1,10 +1,11 @@ use crate as pallet_template; -use sp_core::H256; use frame_support::parameter_types; +use frame_system as system; +use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, testing::Header, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, }; -use frame_system as system; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; diff --git a/bin/node-template/pallets/template/src/tests.rs b/bin/node-template/pallets/template/src/tests.rs index 3356b29ff3598..2205658601721 100644 --- a/bin/node-template/pallets/template/src/tests.rs +++ b/bin/node-template/pallets/template/src/tests.rs @@ -1,5 +1,5 @@ -use crate::{Error, mock::*}; -use frame_support::{assert_ok, assert_noop}; +use crate::{mock::*, Error}; +use frame_support::{assert_noop, assert_ok}; #[test] fn it_works_for_default_value() { @@ -15,9 +15,6 @@ fn it_works_for_default_value() { fn correct_error_for_none_value() { new_test_ext().execute_with(|| { // Ensure the expected error is thrown when no value is present. - assert_noop!( - TemplateModule::cause_error(Origin::signed(1)), - Error::::NoneValue - ); + assert_noop!(TemplateModule::cause_error(Origin::signed(1)), Error::::NoneValue); }); } diff --git a/bin/node-template/runtime/src/lib.rs b/bin/node-template/runtime/src/lib.rs index 1c89b7b86f22d..9cd1ccf0b61b2 100644 --- a/bin/node-template/runtime/src/lib.rs +++ b/bin/node-template/runtime/src/lib.rs @@ -1,43 +1,44 @@ #![cfg_attr(not(feature = "std"), no_std)] // `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256. -#![recursion_limit="256"] +#![recursion_limit = "256"] // Make the WASM binary available. #[cfg(feature = "std")] include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); -use sp_std::prelude::*; -use sp_core::{crypto::KeyTypeId, OpaqueMetadata}; -use sp_runtime::{ - ApplyExtrinsicResult, generic, create_runtime_str, impl_opaque_keys, MultiSignature, - transaction_validity::{TransactionValidity, TransactionSource}, -}; -use sp_runtime::traits::{ - BlakeTwo256, Block as BlockT, AccountIdLookup, Verify, IdentifyAccount, NumberFor, +use pallet_grandpa::{ + fg_primitives, AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList, }; use sp_api::impl_runtime_apis; use sp_consensus_aura::sr25519::AuthorityId as AuraId; -use pallet_grandpa::{AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList}; -use pallet_grandpa::fg_primitives; -use sp_version::RuntimeVersion; +use sp_core::{crypto::KeyTypeId, OpaqueMetadata}; +use sp_runtime::{ + create_runtime_str, generic, impl_opaque_keys, + traits::{AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, NumberFor, Verify}, + transaction_validity::{TransactionSource, TransactionValidity}, + ApplyExtrinsicResult, MultiSignature, +}; +use sp_std::prelude::*; #[cfg(feature = "std")] use sp_version::NativeVersion; +use sp_version::RuntimeVersion; // A few exports that help ease life for downstream crates. -#[cfg(any(feature = "std", test))] -pub use sp_runtime::BuildStorage; -pub use pallet_timestamp::Call as TimestampCall; -pub use pallet_balances::Call as BalancesCall; -pub use sp_runtime::{Permill, Perbill}; pub use frame_support::{ - construct_runtime, parameter_types, StorageValue, + construct_runtime, parameter_types, traits::{KeyOwnerProofSystem, Randomness, StorageInfo}, weights::{ - Weight, IdentityFee, constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND}, + IdentityFee, Weight, }, + StorageValue, }; +pub use pallet_balances::Call as BalancesCall; +pub use pallet_timestamp::Call as TimestampCall; use pallet_transaction_payment::CurrencyAdapter; +#[cfg(any(feature = "std", test))] +pub use sp_runtime::BuildStorage; +pub use sp_runtime::{Perbill, Permill}; /// Import the template pallet. pub use pallet_template; @@ -123,10 +124,7 @@ pub const DAYS: BlockNumber = HOURS * 24; /// The version information used to identify this runtime when compiled natively. #[cfg(feature = "std")] pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } + NativeVersion { runtime_version: VERSION, can_author_with: Default::default() } } const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75); @@ -306,7 +304,7 @@ pub type SignedExtra = ( frame_system::CheckEra, frame_system::CheckNonce, frame_system::CheckWeight, - pallet_transaction_payment::ChargeTransactionPayment + pallet_transaction_payment::ChargeTransactionPayment, ); /// Unchecked extrinsic type as expected by this runtime. pub type UncheckedExtrinsic = generic::UncheckedExtrinsic; diff --git a/bin/node/bench/src/construct.rs b/bin/node/bench/src/construct.rs index 491b261518a4f..eeeb833c1ff12 100644 --- a/bin/node/bench/src/construct.rs +++ b/bin/node/bench/src/construct.rs @@ -24,36 +24,22 @@ //! DO NOT depend on user input). Thus transaction generation should be //! based on randomized data. -use std::{ - borrow::Cow, - collections::HashMap, - pin::Pin, - sync::Arc, -}; use futures::Future; +use std::{borrow::Cow, collections::HashMap, pin::Pin, sync::Arc}; use node_primitives::Block; -use node_testing::bench::{BenchDb, Profile, BlockType, KeyTypes, DatabaseType}; -use sp_runtime::{ - generic::BlockId, - traits::NumberFor, - OpaqueExtrinsic, -}; +use node_testing::bench::{BenchDb, BlockType, DatabaseType, KeyTypes, Profile}; use sc_transaction_pool_api::{ - ImportNotificationStream, - PoolFuture, - PoolStatus, - TransactionFor, - TransactionSource, - TransactionStatusStreamFor, - TxHash, + ImportNotificationStream, PoolFuture, PoolStatus, TransactionFor, TransactionSource, + TransactionStatusStreamFor, TxHash, }; use sp_consensus::{Environment, Proposer}; use sp_inherents::InherentDataProvider; +use sp_runtime::{generic::BlockId, traits::NumberFor, OpaqueExtrinsic}; use crate::{ common::SizeType, - core::{self, Path, Mode}, + core::{self, Mode, Path}, }; pub struct ConstructionBenchmarkDescription { @@ -72,7 +58,6 @@ pub struct ConstructionBenchmark { impl core::BenchmarkDescription for ConstructionBenchmarkDescription { fn path(&self) -> Path { - let mut path = Path::new(&["node", "proposer"]); match self.profile { @@ -104,11 +89,7 @@ impl core::BenchmarkDescription for ConstructionBenchmarkDescription { fn setup(self: Box) -> Box { let mut extrinsics: Vec> = Vec::new(); - let mut bench_db = BenchDb::with_key_types( - self.database_type, - 50_000, - self.key_types - ); + let mut bench_db = BenchDb::with_key_types(self.database_type, 50_000, self.key_types); let client = bench_db.client(); @@ -127,11 +108,9 @@ impl core::BenchmarkDescription for ConstructionBenchmarkDescription { fn name(&self) -> Cow<'static, str> { format!( "Block construction ({:?}/{}, {:?}, {:?} backend)", - self.block_type, - self.size, - self.profile, - self.database_type, - ).into() + self.block_type, self.size, self.profile, self.database_type, + ) + .into() } } @@ -139,7 +118,9 @@ impl core::Benchmark for ConstructionBenchmark { fn run(&mut self, mode: Mode) -> std::time::Duration { let context = self.database.create_context(self.profile); - let _ = context.client.runtime_version_at(&BlockId::Number(0)) + let _ = context + .client + .runtime_version_at(&BlockId::Number(0)) .expect("Failed to get runtime version") .spec_version; @@ -158,20 +139,25 @@ impl core::Benchmark for ConstructionBenchmark { let start = std::time::Instant::now(); - let proposer = futures::executor::block_on(proposer_factory.init( - &context.client.header(&BlockId::number(0)) - .expect("Database error querying block #0") - .expect("Block #0 should exist"), - )).expect("Proposer initialization failed"); - - let _block = futures::executor::block_on( - proposer.propose( - timestamp_provider.create_inherent_data().expect("Create inherent data failed"), - Default::default(), - std::time::Duration::from_secs(20), - None, + let proposer = futures::executor::block_on( + proposer_factory.init( + &context + .client + .header(&BlockId::number(0)) + .expect("Database error querying block #0") + .expect("Block #0 should exist"), ), - ).map(|r| r.block).expect("Proposing failed"); + ) + .expect("Proposer initialization failed"); + + let _block = futures::executor::block_on(proposer.propose( + timestamp_provider.create_inherent_data().expect("Create inherent data failed"), + Default::default(), + std::time::Duration::from_secs(20), + None, + )) + .map(|r| r.block) + .expect("Proposing failed"); let elapsed = start.elapsed(); @@ -191,10 +177,7 @@ pub struct PoolTransaction { impl From for PoolTransaction { fn from(e: OpaqueExtrinsic) -> Self { - PoolTransaction { - data: e, - hash: node_primitives::Hash::zero(), - } + PoolTransaction { data: e, hash: node_primitives::Hash::zero() } } } @@ -210,15 +193,25 @@ impl sc_transaction_pool_api::InPoolTransaction for PoolTransaction { &self.hash } - fn priority(&self) -> &u64 { unimplemented!() } + fn priority(&self) -> &u64 { + unimplemented!() + } - fn longevity(&self) -> &u64 { unimplemented!() } + fn longevity(&self) -> &u64 { + unimplemented!() + } - fn requires(&self) -> &[Vec] { unimplemented!() } + fn requires(&self) -> &[Vec] { + unimplemented!() + } - fn provides(&self) -> &[Vec] { unimplemented!() } + fn provides(&self) -> &[Vec] { + unimplemented!() + } - fn is_propagable(&self) -> bool { unimplemented!() } + fn is_propagable(&self) -> bool { + unimplemented!() + } } #[derive(Clone, Debug)] @@ -236,7 +229,7 @@ impl sc_transaction_pool_api::TransactionPool for Transactions { _at: &BlockId, _source: TransactionSource, _xts: Vec>, - ) -> PoolFuture>, Self::Error> { + ) -> PoolFuture>, Self::Error> { unimplemented!() } @@ -259,14 +252,21 @@ impl sc_transaction_pool_api::TransactionPool for Transactions { unimplemented!() } - fn ready_at(&self, _at: NumberFor) - -> Pin> + Send>> + Send>> - { - let iter: Box> + Send> = Box::new(self.0.clone().into_iter()); + fn ready_at( + &self, + _at: NumberFor, + ) -> Pin< + Box< + dyn Future> + Send>> + + Send, + >, + > { + let iter: Box> + Send> = + Box::new(self.0.clone().into_iter()); Box::pin(futures::future::ready(iter)) } - fn ready(&self) -> Box> + Send> { + fn ready(&self) -> Box> + Send> { unimplemented!() } diff --git a/bin/node/bench/src/core.rs b/bin/node/bench/src/core.rs index 26b7f92b14483..56c0f3526a4dc 100644 --- a/bin/node/bench/src/core.rs +++ b/bin/node/bench/src/core.rs @@ -16,8 +16,11 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{fmt, borrow::{Cow, ToOwned}}; use serde::Serialize; +use std::{ + borrow::{Cow, ToOwned}, + fmt, +}; pub struct Path(Vec); @@ -33,7 +36,11 @@ impl Path { } pub fn full(&self) -> String { - self.0.iter().fold(String::new(), |mut val, next| { val.push_str("::"); val.push_str(next); val }) + self.0.iter().fold(String::new(), |mut val, next| { + val.push_str("::"); + val.push_str(next); + val + }) } pub fn has(&self, path: &str) -> bool { @@ -115,10 +122,7 @@ impl fmt::Display for BenchmarkOutput { } } -pub fn run_benchmark( - benchmark: Box, - mode: Mode, -) -> BenchmarkOutput { +pub fn run_benchmark(benchmark: Box, mode: Mode) -> BenchmarkOutput { let name = benchmark.name().to_owned(); let mut benchmark = benchmark.setup(); @@ -133,11 +137,7 @@ pub fn run_benchmark( let raw_average = (durations.iter().sum::() / (durations.len() as u128)) as u64; let average = (durations.iter().skip(10).take(30).sum::() / 30) as u64; - BenchmarkOutput { - name: name.into(), - raw_average, - average, - } + BenchmarkOutput { name: name.into(), raw_average, average } } macro_rules! matrix( diff --git a/bin/node/bench/src/generator.rs b/bin/node/bench/src/generator.rs index c540ae147c9f0..e3aa1192b5d1f 100644 --- a/bin/node/bench/src/generator.rs +++ b/bin/node/bench/src/generator.rs @@ -30,14 +30,15 @@ use crate::simple_trie::SimpleTrie; /// return root. pub fn generate_trie( db: Arc, - key_values: impl IntoIterator, Vec)>, + key_values: impl IntoIterator, Vec)>, ) -> Hash { let mut root = Hash::default(); let (db, overlay) = { let mut overlay = HashMap::new(); overlay.insert( - hex::decode("03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314").expect("null key is valid"), + hex::decode("03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314") + .expect("null key is valid"), Some(vec![0]), ); let mut trie = SimpleTrie { db, overlay: &mut overlay }; @@ -50,7 +51,7 @@ pub fn generate_trie( trie_db.commit(); } - ( trie.db, overlay ) + (trie.db, overlay) }; let mut transaction = db.transaction(); diff --git a/bin/node/bench/src/import.rs b/bin/node/bench/src/import.rs index b4fee58dac025..a4056b49f7f44 100644 --- a/bin/node/bench/src/import.rs +++ b/bin/node/bench/src/import.rs @@ -32,15 +32,15 @@ use std::borrow::Cow; -use node_testing::bench::{BenchDb, Profile, BlockType, KeyTypes, DatabaseType}; use node_primitives::Block; +use node_testing::bench::{BenchDb, BlockType, DatabaseType, KeyTypes, Profile}; use sc_client_api::backend::Backend; use sp_runtime::generic::BlockId; use sp_state_machine::InspectState; use crate::{ common::SizeType, - core::{self, Path, Mode}, + core::{self, Mode, Path}, }; pub struct ImportBenchmarkDescription { @@ -60,7 +60,6 @@ pub struct ImportBenchmark { impl core::BenchmarkDescription for ImportBenchmarkDescription { fn path(&self) -> Path { - let mut path = Path::new(&["node", "import"]); match self.profile { @@ -91,11 +90,7 @@ impl core::BenchmarkDescription for ImportBenchmarkDescription { fn setup(self: Box) -> Box { let profile = self.profile; - let mut bench_db = BenchDb::with_key_types( - self.database_type, - 50_000, - self.key_types - ); + let mut bench_db = BenchDb::with_key_types(self.database_type, 50_000, self.key_types); let block = bench_db.generate_block(self.block_type.to_content(self.size.transactions())); Box::new(ImportBenchmark { database: bench_db, @@ -108,11 +103,9 @@ impl core::BenchmarkDescription for ImportBenchmarkDescription { fn name(&self) -> Cow<'static, str> { format!( "Block import ({:?}/{}, {:?}, {:?} backend)", - self.block_type, - self.size, - self.profile, - self.database_type, - ).into() + self.block_type, self.size, self.profile, self.database_type, + ) + .into() } } @@ -120,7 +113,9 @@ impl core::Benchmark for ImportBenchmark { fn run(&mut self, mode: Mode) -> std::time::Duration { let mut context = self.database.create_context(self.profile); - let _ = context.client.runtime_version_at(&BlockId::Number(0)) + let _ = context + .client + .runtime_version_at(&BlockId::Number(0)) .expect("Failed to get runtime version") .spec_version; @@ -133,7 +128,8 @@ impl core::Benchmark for ImportBenchmark { let elapsed = start.elapsed(); // Sanity checks. - context.client + context + .client .state_at(&BlockId::number(1)) .expect("state_at failed for block#1") .inspect_state(|| { @@ -155,19 +151,17 @@ impl core::Benchmark for ImportBenchmark { BlockType::Noop => { assert_eq!( node_runtime::System::events().len(), - // should be 2 per signed extrinsic + 1 per unsigned // we have 1 unsigned and the rest are signed in the block // those 2 events per signed are: // - deposit event for charging transaction fee // - extrinsic success - (self.block.extrinsics.len() - 1) * 2 + 1, + (self.block.extrinsics.len() - 1) * 2 + 1, ); }, _ => {}, } - } - ); + }); if mode == Mode::Profile { std::thread::park_timeout(std::time::Duration::from_secs(1)); diff --git a/bin/node/bench/src/main.rs b/bin/node/bench/src/main.rs index 40e9e1577777e..4b006b387d0ea 100644 --- a/bin/node/bench/src/main.rs +++ b/bin/node/bench/src/main.rs @@ -18,9 +18,10 @@ mod common; mod construct; -#[macro_use] mod core; -mod import; +#[macro_use] +mod core; mod generator; +mod import; mod simple_trie; mod state_sizes; mod tempdb; @@ -29,15 +30,15 @@ mod txpool; use structopt::StructOpt; -use node_testing::bench::{Profile, KeyTypes, BlockType, DatabaseType as BenchDataBaseType}; +use node_testing::bench::{BlockType, DatabaseType as BenchDataBaseType, KeyTypes, Profile}; use crate::{ common::SizeType, + construct::ConstructionBenchmarkDescription, core::{run_benchmark, Mode as BenchmarkMode}, - tempdb::DatabaseType, import::ImportBenchmarkDescription, - trie::{TrieReadBenchmarkDescription, TrieWriteBenchmarkDescription, DatabaseSize}, - construct::ConstructionBenchmarkDescription, + tempdb::DatabaseType, + trie::{DatabaseSize, TrieReadBenchmarkDescription, TrieWriteBenchmarkDescription}, txpool::PoolBenchmarkDescription, }; @@ -92,14 +93,25 @@ fn main() { SizeType::Large, SizeType::Full, SizeType::Custom(opt.transactions.unwrap_or(0)), - ].iter() { + ] + .iter() + { for block_type in [ BlockType::RandomTransfersKeepAlive, BlockType::RandomTransfersReaping, BlockType::Noop, - ].iter() { - for database_type in [BenchDataBaseType::RocksDb, BenchDataBaseType::ParityDb].iter() { - import_benchmarks.push((profile, size.clone(), block_type.clone(), database_type)); + ] + .iter() + { + for database_type in + [BenchDataBaseType::RocksDb, BenchDataBaseType::ParityDb].iter() + { + import_benchmarks.push(( + profile, + size.clone(), + block_type.clone(), + database_type, + )); } } } @@ -163,7 +175,7 @@ fn main() { println!("{}: {}", benchmark.name(), benchmark.path().full()) } } - return; + return } let mut results = Vec::new(); @@ -183,7 +195,8 @@ fn main() { } if opt.json { - let json_result: String = serde_json::to_string(&results).expect("Failed to construct json"); + let json_result: String = + serde_json::to_string(&results).expect("Failed to construct json"); println!("{}", json_result); } } diff --git a/bin/node/bench/src/simple_trie.rs b/bin/node/bench/src/simple_trie.rs index a29b51a38af58..651772c71575f 100644 --- a/bin/node/bench/src/simple_trie.rs +++ b/bin/node/bench/src/simple_trie.rs @@ -18,10 +18,10 @@ use std::{collections::HashMap, sync::Arc}; +use hash_db::{AsHashDB, HashDB, Hasher as _, Prefix}; use kvdb::KeyValueDB; use node_primitives::Hash; use sp_trie::DBValue; -use hash_db::{HashDB, AsHashDB, Prefix, Hasher as _}; pub type Hasher = sp_core::Blake2Hasher; @@ -32,7 +32,9 @@ pub struct SimpleTrie<'a> { } impl<'a> AsHashDB for SimpleTrie<'a> { - fn as_hash_db(&self) -> &dyn hash_db::HashDB { &*self } + fn as_hash_db(&self) -> &dyn hash_db::HashDB { + &*self + } fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn HashDB + 'b) { &mut *self @@ -43,7 +45,7 @@ impl<'a> HashDB for SimpleTrie<'a> { fn get(&self, key: &Hash, prefix: Prefix) -> Option { let key = sp_trie::prefixed_key::(key, prefix); if let Some(value) = self.overlay.get(&key) { - return value.clone(); + return value.clone() } self.db.get(0, &key).expect("Database backend error") } diff --git a/bin/node/bench/src/state_sizes.rs b/bin/node/bench/src/state_sizes.rs index f9288c1054898..27112ed42d455 100644 --- a/bin/node/bench/src/state_sizes.rs +++ b/bin/node/bench/src/state_sizes.rs @@ -17,7 +17,7 @@ // along with this program. If not, see . /// Kusama value size distribution -pub const KUSAMA_STATE_DISTRIBUTION: &'static[(u32, u32)] = &[ +pub const KUSAMA_STATE_DISTRIBUTION: &'static [(u32, u32)] = &[ (32, 35), (33, 20035), (34, 5369), diff --git a/bin/node/bench/src/tempdb.rs b/bin/node/bench/src/tempdb.rs index 31ef71fba7b5e..3c1c0f250e49f 100644 --- a/bin/node/bench/src/tempdb.rs +++ b/bin/node/bench/src/tempdb.rs @@ -16,9 +16,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use kvdb::{DBTransaction, KeyValueDB}; +use kvdb_rocksdb::{Database, DatabaseConfig}; use std::{io, path::PathBuf, sync::Arc}; -use kvdb::{KeyValueDB, DBTransaction}; -use kvdb_rocksdb::{DatabaseConfig, Database}; #[derive(Debug, Clone, Copy, derive_more::Display)] pub enum DatabaseType { @@ -44,13 +44,14 @@ impl KeyValueDB for ParityDbWrapper { /// Write a transaction of changes to the buffer. fn write(&self, transaction: DBTransaction) -> io::Result<()> { - self.0.commit( - transaction.ops.iter().map(|op| match op { - kvdb::DBOp::Insert { col, key, value } => (*col as u8, &key[key.len() - 32..], Some(value.to_vec())), + self.0 + .commit(transaction.ops.iter().map(|op| match op { + kvdb::DBOp::Insert { col, key, value } => + (*col as u8, &key[key.len() - 32..], Some(value.to_vec())), kvdb::DBOp::Delete { col, key } => (*col as u8, &key[key.len() - 32..], None), - kvdb::DBOp::DeletePrefix { col: _, prefix: _ } => unimplemented!() - }) - ).expect("db error"); + kvdb::DBOp::DeletePrefix { col: _, prefix: _ } => unimplemented!(), + })) + .expect("db error"); Ok(()) } @@ -90,21 +91,19 @@ impl TempDatabase { match db_type { DatabaseType::RocksDb => { let db_cfg = DatabaseConfig::with_columns(1); - let db = Database::open(&db_cfg, &self.0.path().to_string_lossy()).expect("Database backend error"); + let db = Database::open(&db_cfg, &self.0.path().to_string_lossy()) + .expect("Database backend error"); Arc::new(db) }, - DatabaseType::ParityDb => { - Arc::new(ParityDbWrapper({ - let mut options = parity_db::Options::with_columns(self.0.path(), 1); - let mut column_options = &mut options.columns[0]; - column_options.ref_counted = true; - column_options.preimage = true; - column_options.uniform = true; - parity_db::Db::open(&options).expect("db open error") - })) - } + DatabaseType::ParityDb => Arc::new(ParityDbWrapper({ + let mut options = parity_db::Options::with_columns(self.0.path(), 1); + let mut column_options = &mut options.columns[0]; + column_options.ref_counted = true; + column_options.preimage = true; + column_options.uniform = true; + parity_db::Db::open(&options).expect("db open error") + })), } - } } @@ -121,15 +120,10 @@ impl Clone for TempDatabase { ); let self_db_files = std::fs::read_dir(self_dir) .expect("failed to list file in seed dir") - .map(|f_result| - f_result.expect("failed to read file in seed db") - .path() - ).collect::>(); - fs_extra::copy_items( - &self_db_files, - new_dir.path(), - &fs_extra::dir::CopyOptions::new(), - ).expect("Copy of seed database is ok"); + .map(|f_result| f_result.expect("failed to read file in seed db").path()) + .collect::>(); + fs_extra::copy_items(&self_db_files, new_dir.path(), &fs_extra::dir::CopyOptions::new()) + .expect("Copy of seed database is ok"); TempDatabase(new_dir) } diff --git a/bin/node/bench/src/trie.rs b/bin/node/bench/src/trie.rs index a3e7620473d98..a17e386ca879b 100644 --- a/bin/node/bench/src/trie.rs +++ b/bin/node/bench/src/trie.rs @@ -18,13 +18,13 @@ //! Trie benchmark (integrated). -use std::{borrow::Cow, collections::HashMap, sync::Arc}; +use hash_db::Prefix; use kvdb::KeyValueDB; use lazy_static::lazy_static; use rand::Rng; -use hash_db::Prefix; use sp_state_machine::Backend as _; use sp_trie::{trie_types::TrieDBMut, TrieMut as _}; +use std::{borrow::Cow, collections::HashMap, sync::Arc}; use node_primitives::Hash; @@ -32,7 +32,7 @@ use crate::{ core::{self, Mode, Path}, generator::generate_trie, simple_trie::SimpleTrie, - tempdb::{TempDatabase, DatabaseType}, + tempdb::{DatabaseType, TempDatabase}, }; pub const SAMPLE_SIZE: usize = 100; @@ -142,10 +142,7 @@ impl core::BenchmarkDescription for TrieReadBenchmarkDescription { assert_eq!(warmup_keys.len(), SAMPLE_SIZE); assert_eq!(query_keys.len(), SAMPLE_SIZE); - let root = generate_trie( - database.open(self.database_type), - key_values, - ); + let root = generate_trie(database.open(self.database_type), key_values); Box::new(TrieReadBenchmark { database, @@ -162,7 +159,8 @@ impl core::BenchmarkDescription for TrieReadBenchmarkDescription { self.database_size, pretty_print(self.database_size.keys()), self.database_type, - ).into() + ) + .into() } } @@ -182,12 +180,10 @@ impl core::Benchmark for TrieReadBenchmark { let storage: Arc> = Arc::new(Storage(db.open(self.database_type))); - let trie_backend = sp_state_machine::TrieBackend::new( - storage, - self.root, - ); + let trie_backend = sp_state_machine::TrieBackend::new(storage, self.root); for (warmup_key, warmup_value) in self.warmup_keys.iter() { - let value = trie_backend.storage(&warmup_key[..]) + let value = trie_backend + .storage(&warmup_key[..]) .expect("Failed to get key: db error") .expect("Warmup key should exist"); @@ -218,7 +214,6 @@ pub struct TrieWriteBenchmarkDescription { pub database_type: DatabaseType, } - impl core::BenchmarkDescription for TrieWriteBenchmarkDescription { fn path(&self) -> Path { let mut path = Path::new(&["trie", "write"]); @@ -253,10 +248,7 @@ impl core::BenchmarkDescription for TrieWriteBenchmarkDescription { assert_eq!(warmup_keys.len(), SAMPLE_SIZE); - let root = generate_trie( - database.open(self.database_type), - key_values, - ); + let root = generate_trie(database.open(self.database_type), key_values); Box::new(TrieWriteBenchmark { database, @@ -272,7 +264,8 @@ impl core::BenchmarkDescription for TrieWriteBenchmarkDescription { self.database_size, pretty_print(self.database_size.keys()), self.database_type, - ).into() + ) + .into() } } @@ -292,15 +285,13 @@ impl core::Benchmark for TrieWriteBenchmark { let mut new_root = self.root.clone(); let mut overlay = HashMap::new(); - let mut trie = SimpleTrie { - db: kvdb.clone(), - overlay: &mut overlay, - }; - let mut trie_db_mut = TrieDBMut::from_existing(&mut trie, &mut new_root) - .expect("Failed to create TrieDBMut"); + let mut trie = SimpleTrie { db: kvdb.clone(), overlay: &mut overlay }; + let mut trie_db_mut = + TrieDBMut::from_existing(&mut trie, &mut new_root).expect("Failed to create TrieDBMut"); for (warmup_key, warmup_value) in self.warmup_keys.iter() { - let value = trie_db_mut.get(&warmup_key[..]) + let value = trie_db_mut + .get(&warmup_key[..]) .expect("Failed to get key: db error") .expect("Warmup key should exist"); @@ -367,7 +358,9 @@ impl SizePool { fn value(&self, rng: &mut R) -> Vec { let sr = (rng.next_u64() % self.total as u64) as u32; - let mut range = self.distribution.range((std::ops::Bound::Included(sr), std::ops::Bound::Unbounded)); + let mut range = self + .distribution + .range((std::ops::Bound::Included(sr), std::ops::Bound::Unbounded)); let size = *range.next().unwrap().1 as usize; random_vec(rng, size) } diff --git a/bin/node/bench/src/txpool.rs b/bin/node/bench/src/txpool.rs index ef1c816109c8d..b0db734534855 100644 --- a/bin/node/bench/src/txpool.rs +++ b/bin/node/bench/src/txpool.rs @@ -23,13 +23,13 @@ use std::borrow::Cow; -use node_testing::bench::{BenchDb, Profile, BlockType, KeyTypes, DatabaseType}; +use node_testing::bench::{BenchDb, BlockType, DatabaseType, KeyTypes, Profile}; use sc_transaction_pool::BasicPool; -use sp_runtime::generic::BlockId; use sc_transaction_pool_api::{TransactionPool, TransactionSource}; +use sp_runtime::generic::BlockId; -use crate::core::{self, Path, Mode}; +use crate::core::{self, Mode, Path}; pub struct PoolBenchmarkDescription { pub database_type: DatabaseType, @@ -46,11 +46,7 @@ impl core::BenchmarkDescription for PoolBenchmarkDescription { fn setup(self: Box) -> Box { Box::new(PoolBenchmark { - database: BenchDb::with_key_types( - self.database_type, - 50_000, - KeyTypes::Sr25519, - ), + database: BenchDb::with_key_types(self.database_type, 50_000, KeyTypes::Sr25519), }) } @@ -63,7 +59,9 @@ impl core::Benchmark for PoolBenchmark { fn run(&mut self, mode: Mode) -> std::time::Duration { let context = self.database.create_context(Profile::Wasm); - let _ = context.client.runtime_version_at(&BlockId::Number(0)) + let _ = context + .client + .runtime_version_at(&BlockId::Number(0)) .expect("Failed to get runtime version") .spec_version; @@ -80,22 +78,20 @@ impl core::Benchmark for PoolBenchmark { context.client.clone(), ); - let generated_transactions = self.database.block_content( - BlockType::RandomTransfersKeepAlive.to_content(Some(100)), - &context.client, - ).into_iter().collect::>(); + let generated_transactions = self + .database + .block_content( + BlockType::RandomTransfersKeepAlive.to_content(Some(100)), + &context.client, + ) + .into_iter() + .collect::>(); let start = std::time::Instant::now(); - let submissions = generated_transactions.into_iter().map(|tx| { - txpool.submit_one( - &BlockId::Number(0), - TransactionSource::External, - tx, - ) - }); - futures::executor::block_on( - futures::future::join_all(submissions) - ); + let submissions = generated_transactions + .into_iter() + .map(|tx| txpool.submit_one(&BlockId::Number(0), TransactionSource::External, tx)); + futures::executor::block_on(futures::future::join_all(submissions)); let elapsed = start.elapsed(); if mode == Mode::Profile { diff --git a/bin/node/browser-testing/src/lib.rs b/bin/node/browser-testing/src/lib.rs index a269e9cab21e4..35804bef2168e 100644 --- a/bin/node/browser-testing/src/lib.rs +++ b/bin/node/browser-testing/src/lib.rs @@ -28,11 +28,11 @@ //! flag and open a browser to the url that `wasm-pack test` outputs. //! For more infomation see . -use wasm_bindgen_test::{wasm_bindgen_test, wasm_bindgen_test_configure}; -use wasm_bindgen_futures::JsFuture; -use wasm_bindgen::JsValue; -use jsonrpc_core::types::{MethodCall, Success, Version, Params, Id}; +use jsonrpc_core::types::{Id, MethodCall, Params, Success, Version}; use serde::de::DeserializeOwned; +use wasm_bindgen::JsValue; +use wasm_bindgen_futures::JsFuture; +use wasm_bindgen_test::{wasm_bindgen_test, wasm_bindgen_test_configure}; wasm_bindgen_test_configure!(run_in_browser); @@ -41,8 +41,9 @@ fn rpc_call(method: &str) -> String { jsonrpc: Some(Version::V2), method: method.into(), params: Params::None, - id: Id::Num(1) - }).unwrap() + id: Id::Num(1), + }) + .unwrap() } fn deserialize_rpc_result(js_value: JsValue) -> T { @@ -55,15 +56,12 @@ fn deserialize_rpc_result(js_value: JsValue) -> T { #[wasm_bindgen_test] async fn runs() { - let mut client = node_cli::start_client(None, "info".into()) - .unwrap(); + let mut client = node_cli::start_client(None, "info".into()).unwrap(); // Check that the node handles rpc calls. // TODO: Re-add the code that checks if the node is syncing. let chain_name: String = deserialize_rpc_result( - JsFuture::from(client.rpc_send(&rpc_call("system_chain"))) - .await - .unwrap() + JsFuture::from(client.rpc_send(&rpc_call("system_chain"))).await.unwrap(), ); assert_eq!(chain_name, "Development"); } diff --git a/bin/node/cli/build.rs b/bin/node/cli/build.rs index befcdaea6d9cf..90aec2222c9ec 100644 --- a/bin/node/cli/build.rs +++ b/bin/node/cli/build.rs @@ -25,8 +25,8 @@ fn main() { mod cli { include!("src/cli.rs"); - use std::{fs, env, path::Path}; use sc_cli::structopt::clap::Shell; + use std::{env, fs, path::Path}; use substrate_build_script_utils::{generate_cargo_keys, rerun_if_git_head_changed}; pub fn main() { @@ -51,9 +51,12 @@ mod cli { Some(dir) => dir, }; let path = Path::new(&outdir) - .parent().unwrap() - .parent().unwrap() - .parent().unwrap() + .parent() + .unwrap() + .parent() + .unwrap() + .parent() + .unwrap() .join("completion-scripts"); fs::create_dir(&path).ok(); diff --git a/bin/node/cli/src/browser.rs b/bin/node/cli/src/browser.rs index 82f1921d2a6b5..dee93180e70d5 100644 --- a/bin/node/cli/src/browser.rs +++ b/bin/node/cli/src/browser.rs @@ -17,18 +17,14 @@ // along with this program. If not, see . use crate::chain_spec::ChainSpec; +use browser_utils::{browser_configuration, init_logging, set_console_error_panic_hook, Client}; use log::info; use wasm_bindgen::prelude::*; -use browser_utils::{ - Client, - browser_configuration, init_logging, set_console_error_panic_hook, -}; /// Starts the client. #[wasm_bindgen] pub fn start_client(chain_spec: Option, log_level: String) -> Result { - start_inner(chain_spec, log_level) - .map_err(|err| JsValue::from_str(&err.to_string())) + start_inner(chain_spec, log_level).map_err(|err| JsValue::from_str(&err.to_string())) } fn start_inner( @@ -53,10 +49,9 @@ fn start_inner( info!("👤 Role: {:?}", config.role); // Create the service. This is the most heavy initialization step. - let (task_manager, rpc_handlers) = - crate::service::new_light_base(config) - .map(|(components, rpc_handlers, _, _, _)| (components, rpc_handlers)) - .map_err(|e| format!("{:?}", e))?; + let (task_manager, rpc_handlers) = crate::service::new_light_base(config) + .map(|(components, rpc_handlers, _, _, _)| (components, rpc_handlers)) + .map_err(|e| format!("{:?}", e))?; Ok(browser_utils::start_client(task_manager, rpc_handlers)) } diff --git a/bin/node/cli/src/chain_spec.rs b/bin/node/cli/src/chain_spec.rs index e3ba16b9de6f3..2891736e5c225 100644 --- a/bin/node/cli/src/chain_spec.rs +++ b/bin/node/cli/src/chain_spec.rs @@ -18,25 +18,26 @@ //! Substrate chain configurations. -use sc_chain_spec::ChainSpecExtension; -use sp_core::{Pair, Public, crypto::UncheckedInto, sr25519}; -use serde::{Serialize, Deserialize}; +use grandpa_primitives::AuthorityId as GrandpaId; +use hex_literal::hex; use node_runtime::{ - AuthorityDiscoveryConfig, BabeConfig, BalancesConfig, CouncilConfig, - DemocracyConfig, GrandpaConfig, ImOnlineConfig, SessionConfig, SessionKeys, StakerStatus, - StakingConfig, ElectionsConfig, IndicesConfig, SocietyConfig, SudoConfig, SystemConfig, - TechnicalCommitteeConfig, wasm_binary_unwrap, MAX_NOMINATIONS, + constants::currency::*, wasm_binary_unwrap, AuthorityDiscoveryConfig, BabeConfig, + BalancesConfig, Block, CouncilConfig, DemocracyConfig, ElectionsConfig, GrandpaConfig, + ImOnlineConfig, IndicesConfig, SessionConfig, SessionKeys, SocietyConfig, StakerStatus, + StakingConfig, SudoConfig, SystemConfig, TechnicalCommitteeConfig, MAX_NOMINATIONS, }; -use node_runtime::Block; -use node_runtime::constants::currency::*; +use pallet_im_online::sr25519::AuthorityId as ImOnlineId; +use sc_chain_spec::ChainSpecExtension; use sc_service::ChainType; -use hex_literal::hex; use sc_telemetry::TelemetryEndpoints; -use grandpa_primitives::{AuthorityId as GrandpaId}; -use sp_consensus_babe::{AuthorityId as BabeId}; -use pallet_im_online::sr25519::{AuthorityId as ImOnlineId}; +use serde::{Deserialize, Serialize}; use sp_authority_discovery::AuthorityId as AuthorityDiscoveryId; -use sp_runtime::{Perbill, traits::{Verify, IdentifyAccount}}; +use sp_consensus_babe::AuthorityId as BabeId; +use sp_core::{crypto::UncheckedInto, sr25519, Pair, Public}; +use sp_runtime::{ + traits::{IdentifyAccount, Verify}, + Perbill, +}; pub use node_primitives::{AccountId, Balance, Signature}; pub use node_runtime::GenesisConfig; @@ -59,10 +60,7 @@ pub struct Extensions { } /// Specialized `ChainSpec`. -pub type ChainSpec = sc_service::GenericChainSpec< - GenesisConfig, - Extensions, ->; +pub type ChainSpec = sc_service::GenericChainSpec; /// Flaming Fir testnet generator pub fn flaming_fir_config() -> Result { ChainSpec::from_json_bytes(&include_bytes!("../res/flaming-fir.json")[..]) @@ -84,65 +82,94 @@ fn staging_testnet_config_genesis() -> GenesisConfig { // and // for i in 1 2 3 4 ; do for j in session; do subkey --ed25519 inspect "$secret"//fir//$j//$i; done; done - let initial_authorities: Vec<(AccountId, AccountId, GrandpaId, BabeId, ImOnlineId, AuthorityDiscoveryId)> = vec![( - // 5Fbsd6WXDGiLTxunqeK5BATNiocfCqu9bS1yArVjCgeBLkVy - hex!["9c7a2ee14e565db0c69f78c7b4cd839fbf52b607d867e9e9c5a79042898a0d12"].into(), - // 5EnCiV7wSHeNhjW3FSUwiJNkcc2SBkPLn5Nj93FmbLtBjQUq - hex!["781ead1e2fa9ccb74b44c19d29cb2a7a4b5be3972927ae98cd3877523976a276"].into(), - // 5Fb9ayurnxnaXj56CjmyQLBiadfRCqUbL2VWNbbe1nZU6wiC - hex!["9becad03e6dcac03cee07edebca5475314861492cdfc96a2144a67bbe9699332"].unchecked_into(), - // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 - hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"].unchecked_into(), - // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 - hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"].unchecked_into(), - // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 - hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"].unchecked_into(), - ),( - // 5ERawXCzCWkjVq3xz1W5KGNtVx2VdefvZ62Bw1FEuZW4Vny2 - hex!["68655684472b743e456907b398d3a44c113f189e56d1bbfd55e889e295dfde78"].into(), - // 5Gc4vr42hH1uDZc93Nayk5G7i687bAQdHHc9unLuyeawHipF - hex!["c8dc79e36b29395413399edaec3e20fcca7205fb19776ed8ddb25d6f427ec40e"].into(), - // 5EockCXN6YkiNCDjpqqnbcqd4ad35nU4RmA1ikM4YeRN4WcE - hex!["7932cff431e748892fa48e10c63c17d30f80ca42e4de3921e641249cd7fa3c2f"].unchecked_into(), - // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ - hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"].unchecked_into(), - // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ - hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"].unchecked_into(), - // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ - hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"].unchecked_into(), - ),( - // 5DyVtKWPidondEu8iHZgi6Ffv9yrJJ1NDNLom3X9cTDi98qp - hex!["547ff0ab649283a7ae01dbc2eb73932eba2fb09075e9485ff369082a2ff38d65"].into(), - // 5FeD54vGVNpFX3PndHPXJ2MDakc462vBCD5mgtWRnWYCpZU9 - hex!["9e42241d7cd91d001773b0b616d523dd80e13c6c2cab860b1234ef1b9ffc1526"].into(), - // 5E1jLYfLdUQKrFrtqoKgFrRvxM3oQPMbf6DfcsrugZZ5Bn8d - hex!["5633b70b80a6c8bb16270f82cca6d56b27ed7b76c8fd5af2986a25a4788ce440"].unchecked_into(), - // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH - hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"].unchecked_into(), - // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH - hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"].unchecked_into(), - // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH - hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"].unchecked_into(), - ),( - // 5HYZnKWe5FVZQ33ZRJK1rG3WaLMztxWrrNDb1JRwaHHVWyP9 - hex!["f26cdb14b5aec7b2789fd5ca80f979cef3761897ae1f37ffb3e154cbcc1c2663"].into(), - // 5EPQdAQ39WQNLCRjWsCk5jErsCitHiY5ZmjfWzzbXDoAoYbn - hex!["66bc1e5d275da50b72b15de072a2468a5ad414919ca9054d2695767cf650012f"].into(), - // 5DMa31Hd5u1dwoRKgC4uvqyrdK45RHv3CpwvpUC1EzuwDit4 - hex!["3919132b851ef0fd2dae42a7e734fe547af5a6b809006100f48944d7fae8e8ef"].unchecked_into(), - // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x - hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"].unchecked_into(), - // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x - hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"].unchecked_into(), - // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x - hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"].unchecked_into(), - )]; + let initial_authorities: Vec<( + AccountId, + AccountId, + GrandpaId, + BabeId, + ImOnlineId, + AuthorityDiscoveryId, + )> = vec![ + ( + // 5Fbsd6WXDGiLTxunqeK5BATNiocfCqu9bS1yArVjCgeBLkVy + hex!["9c7a2ee14e565db0c69f78c7b4cd839fbf52b607d867e9e9c5a79042898a0d12"].into(), + // 5EnCiV7wSHeNhjW3FSUwiJNkcc2SBkPLn5Nj93FmbLtBjQUq + hex!["781ead1e2fa9ccb74b44c19d29cb2a7a4b5be3972927ae98cd3877523976a276"].into(), + // 5Fb9ayurnxnaXj56CjmyQLBiadfRCqUbL2VWNbbe1nZU6wiC + hex!["9becad03e6dcac03cee07edebca5475314861492cdfc96a2144a67bbe9699332"] + .unchecked_into(), + // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 + hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"] + .unchecked_into(), + // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 + hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"] + .unchecked_into(), + // 5EZaeQ8djPcq9pheJUhgerXQZt9YaHnMJpiHMRhwQeinqUW8 + hex!["6e7e4eb42cbd2e0ab4cae8708ce5509580b8c04d11f6758dbf686d50fe9f9106"] + .unchecked_into(), + ), + ( + // 5ERawXCzCWkjVq3xz1W5KGNtVx2VdefvZ62Bw1FEuZW4Vny2 + hex!["68655684472b743e456907b398d3a44c113f189e56d1bbfd55e889e295dfde78"].into(), + // 5Gc4vr42hH1uDZc93Nayk5G7i687bAQdHHc9unLuyeawHipF + hex!["c8dc79e36b29395413399edaec3e20fcca7205fb19776ed8ddb25d6f427ec40e"].into(), + // 5EockCXN6YkiNCDjpqqnbcqd4ad35nU4RmA1ikM4YeRN4WcE + hex!["7932cff431e748892fa48e10c63c17d30f80ca42e4de3921e641249cd7fa3c2f"] + .unchecked_into(), + // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ + hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"] + .unchecked_into(), + // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ + hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"] + .unchecked_into(), + // 5DhLtiaQd1L1LU9jaNeeu9HJkP6eyg3BwXA7iNMzKm7qqruQ + hex!["482dbd7297a39fa145c570552249c2ca9dd47e281f0c500c971b59c9dcdcd82e"] + .unchecked_into(), + ), + ( + // 5DyVtKWPidondEu8iHZgi6Ffv9yrJJ1NDNLom3X9cTDi98qp + hex!["547ff0ab649283a7ae01dbc2eb73932eba2fb09075e9485ff369082a2ff38d65"].into(), + // 5FeD54vGVNpFX3PndHPXJ2MDakc462vBCD5mgtWRnWYCpZU9 + hex!["9e42241d7cd91d001773b0b616d523dd80e13c6c2cab860b1234ef1b9ffc1526"].into(), + // 5E1jLYfLdUQKrFrtqoKgFrRvxM3oQPMbf6DfcsrugZZ5Bn8d + hex!["5633b70b80a6c8bb16270f82cca6d56b27ed7b76c8fd5af2986a25a4788ce440"] + .unchecked_into(), + // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH + hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"] + .unchecked_into(), + // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH + hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"] + .unchecked_into(), + // 5DhKqkHRkndJu8vq7pi2Q5S3DfftWJHGxbEUNH43b46qNspH + hex!["482a3389a6cf42d8ed83888cfd920fec738ea30f97e44699ada7323f08c3380a"] + .unchecked_into(), + ), + ( + // 5HYZnKWe5FVZQ33ZRJK1rG3WaLMztxWrrNDb1JRwaHHVWyP9 + hex!["f26cdb14b5aec7b2789fd5ca80f979cef3761897ae1f37ffb3e154cbcc1c2663"].into(), + // 5EPQdAQ39WQNLCRjWsCk5jErsCitHiY5ZmjfWzzbXDoAoYbn + hex!["66bc1e5d275da50b72b15de072a2468a5ad414919ca9054d2695767cf650012f"].into(), + // 5DMa31Hd5u1dwoRKgC4uvqyrdK45RHv3CpwvpUC1EzuwDit4 + hex!["3919132b851ef0fd2dae42a7e734fe547af5a6b809006100f48944d7fae8e8ef"] + .unchecked_into(), + // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x + hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"] + .unchecked_into(), + // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x + hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"] + .unchecked_into(), + // 5C4vDQxA8LTck2xJEy4Yg1hM9qjDt4LvTQaMo4Y8ne43aU6x + hex!["00299981a2b92f878baaf5dbeba5c18d4e70f2a1fcd9c61b32ea18daf38f4378"] + .unchecked_into(), + ), + ]; // generated with secret: subkey inspect "$secret"/fir let root_key: AccountId = hex![ // 5Ff3iXP75ruzroPWRP2FYBHWnmGGBSb63857BgnzCoXNxfPo "9ee5e5bdc0ec239eb164f865ecc345ce4c88e76ee002e0f7e318097347471809" - ].into(); + ] + .into(); let endowed_accounts: Vec = vec![root_key.clone()]; @@ -158,8 +185,10 @@ pub fn staging_testnet_config() -> ChainSpec { ChainType::Live, staging_testnet_config_genesis, boot_nodes, - Some(TelemetryEndpoints::new(vec![(STAGING_TELEMETRY_URL.to_string(), 0)]) - .expect("Staging telemetry url is valid; qed")), + Some( + TelemetryEndpoints::new(vec![(STAGING_TELEMETRY_URL.to_string(), 0)]) + .expect("Staging telemetry url is valid; qed"), + ), None, None, Default::default(), @@ -174,21 +203,17 @@ pub fn get_from_seed(seed: &str) -> ::Pu } /// Helper function to generate an account ID from seed -pub fn get_account_id_from_seed(seed: &str) -> AccountId where - AccountPublic: From<::Public> +pub fn get_account_id_from_seed(seed: &str) -> AccountId +where + AccountPublic: From<::Public>, { AccountPublic::from(get_from_seed::(seed)).into_account() } /// Helper function to generate stash, controller and session key from seed -pub fn authority_keys_from_seed(seed: &str) -> ( - AccountId, - AccountId, - GrandpaId, - BabeId, - ImOnlineId, - AuthorityDiscoveryId, -) { +pub fn authority_keys_from_seed( + seed: &str, +) -> (AccountId, AccountId, GrandpaId, BabeId, ImOnlineId, AuthorityDiscoveryId) { ( get_account_id_from_seed::(&format!("{}//stash", seed)), get_account_id_from_seed::(seed), @@ -230,11 +255,15 @@ pub fn testnet_genesis( ] }); // endow all authorities and nominators. - initial_authorities.iter().map(|x| &x.0).chain(initial_nominators.iter()).for_each(|x| { - if !endowed_accounts.contains(&x) { - endowed_accounts.push(x.clone()) - } - }); + initial_authorities + .iter() + .map(|x| &x.0) + .chain(initial_nominators.iter()) + .for_each(|x| { + if !endowed_accounts.contains(&x) { + endowed_accounts.push(x.clone()) + } + }); // stakers: all validators and nominators. let mut rng = rand::thread_rng(); @@ -266,22 +295,20 @@ pub fn testnet_genesis( changes_trie_config: Default::default(), }, balances: BalancesConfig { - balances: endowed_accounts.iter().cloned() - .map(|x| (x, ENDOWMENT)) - .collect() - }, - indices: IndicesConfig { - indices: vec![], + balances: endowed_accounts.iter().cloned().map(|x| (x, ENDOWMENT)).collect(), }, + indices: IndicesConfig { indices: vec![] }, session: SessionConfig { - keys: initial_authorities.iter().map(|x| { - (x.0.clone(), x.0.clone(), session_keys( - x.2.clone(), - x.3.clone(), - x.4.clone(), - x.5.clone(), - )) - }).collect::>(), + keys: initial_authorities + .iter() + .map(|x| { + ( + x.0.clone(), + x.0.clone(), + session_keys(x.2.clone(), x.3.clone(), x.4.clone(), x.5.clone()), + ) + }) + .collect::>(), }, staking: StakingConfig { validator_count: initial_authorities.len() as u32, @@ -289,47 +316,42 @@ pub fn testnet_genesis( invulnerables: initial_authorities.iter().map(|x| x.0.clone()).collect(), slash_reward_fraction: Perbill::from_percent(10), stakers, - .. Default::default() + ..Default::default() }, democracy: DemocracyConfig::default(), elections: ElectionsConfig { - members: endowed_accounts.iter() - .take((num_endowed_accounts + 1) / 2) - .cloned() - .map(|member| (member, STASH)) - .collect(), + members: endowed_accounts + .iter() + .take((num_endowed_accounts + 1) / 2) + .cloned() + .map(|member| (member, STASH)) + .collect(), }, council: CouncilConfig::default(), technical_committee: TechnicalCommitteeConfig { - members: endowed_accounts.iter() - .take((num_endowed_accounts + 1) / 2) - .cloned() - .collect(), + members: endowed_accounts + .iter() + .take((num_endowed_accounts + 1) / 2) + .cloned() + .collect(), phantom: Default::default(), }, - sudo: SudoConfig { - key: root_key, - }, + sudo: SudoConfig { key: root_key }, babe: BabeConfig { authorities: vec![], epoch_config: Some(node_runtime::BABE_GENESIS_EPOCH_CONFIG), }, - im_online: ImOnlineConfig { - keys: vec![], - }, - authority_discovery: AuthorityDiscoveryConfig { - keys: vec![], - }, - grandpa: GrandpaConfig { - authorities: vec![], - }, + im_online: ImOnlineConfig { keys: vec![] }, + authority_discovery: AuthorityDiscoveryConfig { keys: vec![] }, + grandpa: GrandpaConfig { authorities: vec![] }, technical_membership: Default::default(), treasury: Default::default(), society: SocietyConfig { - members: endowed_accounts.iter() - .take((num_endowed_accounts + 1) / 2) - .cloned() - .collect(), + members: endowed_accounts + .iter() + .take((num_endowed_accounts + 1) / 2) + .cloned() + .collect(), pot: 0, max_members: 999, }, @@ -341,9 +363,7 @@ pub fn testnet_genesis( fn development_config_genesis() -> GenesisConfig { testnet_genesis( - vec![ - authority_keys_from_seed("Alice"), - ], + vec![authority_keys_from_seed("Alice")], vec![], get_account_id_from_seed::("Alice"), None, @@ -367,10 +387,7 @@ pub fn development_config() -> ChainSpec { fn local_testnet_genesis() -> GenesisConfig { testnet_genesis( - vec![ - authority_keys_from_seed("Alice"), - authority_keys_from_seed("Bob"), - ], + vec![authority_keys_from_seed("Alice"), authority_keys_from_seed("Bob")], vec![], get_account_id_from_seed::("Alice"), None, @@ -401,9 +418,7 @@ pub(crate) mod tests { fn local_testnet_genesis_instant_single() -> GenesisConfig { testnet_genesis( - vec![ - authority_keys_from_seed("Alice"), - ], + vec![authority_keys_from_seed("Alice")], vec![], get_account_id_from_seed::("Alice"), None, @@ -446,14 +461,24 @@ pub(crate) mod tests { sc_service_test::connectivity( integration_test_config_with_two_authorities(), |config| { - let NewFullBase { task_manager, client, network, transaction_pool, .. } - = new_full_base(config,|_, _| ())?; - Ok(sc_service_test::TestNetComponents::new(task_manager, client, network, transaction_pool)) + let NewFullBase { task_manager, client, network, transaction_pool, .. } = + new_full_base(config, |_, _| ())?; + Ok(sc_service_test::TestNetComponents::new( + task_manager, + client, + network, + transaction_pool, + )) }, |config| { let (keep_alive, _, client, network, transaction_pool) = new_light_base(config)?; - Ok(sc_service_test::TestNetComponents::new(keep_alive, client, network, transaction_pool)) - } + Ok(sc_service_test::TestNetComponents::new( + keep_alive, + client, + network, + transaction_pool, + )) + }, ); } diff --git a/bin/node/cli/src/cli.rs b/bin/node/cli/src/cli.rs index 11ea58f4068df..850581748fde3 100644 --- a/bin/node/cli/src/cli.rs +++ b/bin/node/cli/src/cli.rs @@ -16,7 +16,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sc_cli::{RunCmd, KeySubcommand, SignCmd, VanityCmd, VerifyCmd}; +use sc_cli::{KeySubcommand, RunCmd, SignCmd, VanityCmd, VerifyCmd}; use structopt::StructOpt; /// An overarching CLI command definition. diff --git a/bin/node/cli/src/command.rs b/bin/node/cli/src/command.rs index 1ef1da6ba6819..b904ea99e8f9f 100644 --- a/bin/node/cli/src/command.rs +++ b/bin/node/cli/src/command.rs @@ -16,12 +16,11 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{chain_spec, service, Cli, Subcommand}; +use crate::{chain_spec, service, service::new_partial, Cli, Subcommand}; use node_executor::Executor; use node_runtime::{Block, RuntimeApi}; -use sc_cli::{Result, SubstrateCli, RuntimeVersion, Role, ChainSpec}; +use sc_cli::{ChainSpec, Result, Role, RuntimeVersion, SubstrateCli}; use sc_service::PartialComponents; -use crate::service::new_partial; impl SubstrateCli for Cli { fn impl_name() -> String { @@ -49,17 +48,19 @@ impl SubstrateCli for Cli { } fn load_spec(&self, id: &str) -> std::result::Result, String> { - let spec = - match id { - "" => return Err("Please specify which chain you want to run, e.g. --dev or --chain=local".into()), - "dev" => Box::new(chain_spec::development_config()), - "local" => Box::new(chain_spec::local_testnet_config()), - "fir" | "flaming-fir" => Box::new(chain_spec::flaming_fir_config()?), - "staging" => Box::new(chain_spec::staging_testnet_config()), - path => Box::new(chain_spec::ChainSpec::from_json_file( - std::path::PathBuf::from(path), - )?), - }; + let spec = match id { + "" => + return Err( + "Please specify which chain you want to run, e.g. --dev or --chain=local" + .into(), + ), + "dev" => Box::new(chain_spec::development_config()), + "local" => Box::new(chain_spec::local_testnet_config()), + "fir" | "flaming-fir" => Box::new(chain_spec::flaming_fir_config()?), + "staging" => Box::new(chain_spec::staging_testnet_config()), + path => + Box::new(chain_spec::ChainSpec::from_json_file(std::path::PathBuf::from(path))?), + }; Ok(spec) } @@ -79,24 +80,25 @@ pub fn run() -> Result<()> { match config.role { Role::Light => service::new_light(config), _ => service::new_full(config), - }.map_err(sc_cli::Error::Service) + } + .map_err(sc_cli::Error::Service) }) - } + }, Some(Subcommand::Inspect(cmd)) => { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::(config)) - } - Some(Subcommand::Benchmark(cmd)) => { + }, + Some(Subcommand::Benchmark(cmd)) => if cfg!(feature = "runtime-benchmarks") { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::(config)) } else { Err("Benchmarking wasn't enabled when building the node. \ - You can enable it with `--features runtime-benchmarks`.".into()) - } - } + You can enable it with `--features runtime-benchmarks`." + .into()) + }, Some(Subcommand::Key(cmd)) => cmd.run(&cli), Some(Subcommand::Sign(cmd)) => cmd.run(), Some(Subcommand::Verify(cmd)) => cmd.run(), @@ -108,32 +110,30 @@ pub fn run() -> Result<()> { Some(Subcommand::CheckBlock(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, import_queue, ..} - = new_partial(&config)?; + let PartialComponents { client, task_manager, import_queue, .. } = + new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) }, Some(Subcommand::ExportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, ..} - = new_partial(&config)?; + let PartialComponents { client, task_manager, .. } = new_partial(&config)?; Ok((cmd.run(client, config.database), task_manager)) }) }, Some(Subcommand::ExportState(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, ..} - = new_partial(&config)?; + let PartialComponents { client, task_manager, .. } = new_partial(&config)?; Ok((cmd.run(client, config.chain_spec), task_manager)) }) }, Some(Subcommand::ImportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, import_queue, ..} - = new_partial(&config)?; + let PartialComponents { client, task_manager, import_queue, .. } = + new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) }, @@ -144,8 +144,7 @@ pub fn run() -> Result<()> { Some(Subcommand::Revert(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { - let PartialComponents { client, task_manager, backend, ..} - = new_partial(&config)?; + let PartialComponents { client, task_manager, backend, .. } = new_partial(&config)?; Ok((cmd.run(client, backend), task_manager)) }) }, @@ -156,18 +155,16 @@ pub fn run() -> Result<()> { // we don't need any of the components of new_partial, just a runtime, or a task // manager to do `async_run`. let registry = config.prometheus_config.as_ref().map(|cfg| &cfg.registry); - let task_manager = sc_service::TaskManager::new( - config.task_executor.clone(), - registry, - ).map_err(|e| sc_cli::Error::Service(sc_service::Error::Prometheus(e)))?; + let task_manager = + sc_service::TaskManager::new(config.task_executor.clone(), registry) + .map_err(|e| sc_cli::Error::Service(sc_service::Error::Prometheus(e)))?; Ok((cmd.run::(config), task_manager)) }) }, #[cfg(not(feature = "try-runtime"))] - Some(Subcommand::TryRuntime) => { - Err("TryRuntime wasn't enabled when building the node. \ - You can enable it with `--features try-runtime`.".into()) - }, + Some(Subcommand::TryRuntime) => Err("TryRuntime wasn't enabled when building the node. \ + You can enable it with `--features try-runtime`." + .into()), } } diff --git a/bin/node/cli/src/service.rs b/bin/node/cli/src/service.rs index 4886b798b050c..47bc5f5b021f5 100644 --- a/bin/node/cli/src/service.rs +++ b/bin/node/cli/src/service.rs @@ -20,20 +20,17 @@ //! Service implementation. Specialized wrapper over substrate service. -use std::sync::Arc; -use sc_consensus_babe; +use futures::prelude::*; +use node_executor::Executor; use node_primitives::Block; use node_runtime::RuntimeApi; -use sc_service::{ - config::Configuration, error::Error as ServiceError, RpcHandlers, TaskManager, -}; -use sc_network::{Event, NetworkService}; -use sp_runtime::traits::Block as BlockT; -use futures::prelude::*; use sc_client_api::{ExecutorProvider, RemoteBackend}; -use node_executor::Executor; +use sc_consensus_babe::{self, SlotProportion}; +use sc_network::{Event, NetworkService}; +use sc_service::{config::Configuration, error::Error as ServiceError, RpcHandlers, TaskManager}; use sc_telemetry::{Telemetry, TelemetryWorker}; -use sc_consensus_babe::SlotProportion; +use sp_runtime::traits::Block as BlockT; +use std::sync::Arc; type FullClient = sc_service::TFullClient; type FullBackend = sc_service::TFullBackend; @@ -44,25 +41,29 @@ type LightClient = sc_service::TLightClient; pub fn new_partial( config: &Configuration, -) -> Result, - sc_transaction_pool::FullPool, - ( - impl Fn( - node_rpc::DenyUnsafe, - sc_rpc::SubscriptionTaskExecutor, - ) -> node_rpc::IoHandler, +) -> Result< + sc_service::PartialComponents< + FullClient, + FullBackend, + FullSelectChain, + sp_consensus::DefaultImportQueue, + sc_transaction_pool::FullPool, ( - sc_consensus_babe::BabeBlockImport, - grandpa::LinkHalf, - sc_consensus_babe::BabeLink, + impl Fn(node_rpc::DenyUnsafe, sc_rpc::SubscriptionTaskExecutor) -> node_rpc::IoHandler, + ( + sc_consensus_babe::BabeBlockImport, + grandpa::LinkHalf, + sc_consensus_babe::BabeLink, + ), + grandpa::SharedVoterState, + Option, ), - grandpa::SharedVoterState, - Option, - ) ->, ServiceError> { - let telemetry = config.telemetry_endpoints.clone() + >, + ServiceError, +> { + let telemetry = config + .telemetry_endpoints + .clone() .filter(|x| !x.is_empty()) .map(|endpoints| -> Result<_, sc_telemetry::Error> { let worker = TelemetryWorker::new(16)?; @@ -78,11 +79,10 @@ pub fn new_partial( )?; let client = Arc::new(client); - let telemetry = telemetry - .map(|(worker, telemetry)| { - task_manager.spawn_handle().spawn("telemetry", worker.run()); - telemetry - }); + let telemetry = telemetry.map(|(worker, telemetry)| { + task_manager.spawn_handle().spawn("telemetry", worker.run()); + telemetry + }); let select_chain = sc_consensus::LongestChain::new(backend.clone()); @@ -115,21 +115,19 @@ pub fn new_partial( Some(Box::new(justification_import)), client.clone(), select_chain.clone(), - move |_, ()| { - async move { - let timestamp = sp_timestamp::InherentDataProvider::from_system_time(); + move |_, ()| async move { + let timestamp = sp_timestamp::InherentDataProvider::from_system_time(); - let slot = - sp_consensus_babe::inherents::InherentDataProvider::from_timestamp_and_duration( - *timestamp, - slot_duration, - ); + let slot = + sp_consensus_babe::inherents::InherentDataProvider::from_timestamp_and_duration( + *timestamp, + slot_duration, + ); - let uncles = - sp_authorship::InherentDataProvider::<::Header>::check_inherents(); + let uncles = + sp_authorship::InherentDataProvider::<::Header>::check_inherents(); - Ok((timestamp, slot, uncles)) - } + Ok((timestamp, slot, uncles)) }, &task_manager.spawn_essential_handle(), config.prometheus_registry(), @@ -213,7 +211,7 @@ pub fn new_full_base( with_startup_data: impl FnOnce( &sc_consensus_babe::BabeBlockImport, &sc_consensus_babe::BabeLink, - ) + ), ) -> Result { let sc_service::PartialComponents { client, @@ -238,7 +236,7 @@ pub fn new_full_base( task_manager.spawn_handle(), backend.clone(), import_setup.1.shared_authority_set().clone(), - ) + ), ); let (network, system_rpc_tx, network_starter) = @@ -254,7 +252,10 @@ pub fn new_full_base( if config.offchain_worker.enabled { sc_service::build_offchain_workers( - &config, task_manager.spawn_handle(), client.clone(), network.clone(), + &config, + task_manager.spawn_handle(), + client.clone(), + network.clone(), ); } @@ -266,22 +267,20 @@ pub fn new_full_base( let enable_grandpa = !config.disable_grandpa; let prometheus_registry = config.prometheus_registry().cloned(); - let _rpc_handlers = sc_service::spawn_tasks( - sc_service::SpawnTasksParams { - config, - backend: backend.clone(), - client: client.clone(), - keystore: keystore_container.sync_keystore(), - network: network.clone(), - rpc_extensions_builder: Box::new(rpc_extensions_builder), - transaction_pool: transaction_pool.clone(), - task_manager: &mut task_manager, - on_demand: None, - remote_blockchain: None, - system_rpc_tx, - telemetry: telemetry.as_mut(), - }, - )?; + let _rpc_handlers = sc_service::spawn_tasks(sc_service::SpawnTasksParams { + config, + backend: backend.clone(), + client: client.clone(), + keystore: keystore_container.sync_keystore(), + network: network.clone(), + rpc_extensions_builder: Box::new(rpc_extensions_builder), + transaction_pool: transaction_pool.clone(), + task_manager: &mut task_manager, + on_demand: None, + remote_blockchain: None, + system_rpc_tx, + telemetry: telemetry.as_mut(), + })?; let (block_import, grandpa_link, babe_link) = import_setup; @@ -343,36 +342,37 @@ pub fn new_full_base( // Spawn authority discovery module. if role.is_authority() { - let authority_discovery_role = sc_authority_discovery::Role::PublishAndDiscover( - keystore_container.keystore(), - ); - let dht_event_stream = network.event_stream("authority-discovery") - .filter_map(|e| async move { match e { - Event::Dht(e) => Some(e), - _ => None, - }}); - let (authority_discovery_worker, _service) = sc_authority_discovery::new_worker_and_service_with_config( - sc_authority_discovery::WorkerConfig { - publish_non_global_ips: auth_disc_publish_non_global_ips, - ..Default::default() - }, - client.clone(), - network.clone(), - Box::pin(dht_event_stream), - authority_discovery_role, - prometheus_registry.clone(), - ); + let authority_discovery_role = + sc_authority_discovery::Role::PublishAndDiscover(keystore_container.keystore()); + let dht_event_stream = + network.event_stream("authority-discovery").filter_map(|e| async move { + match e { + Event::Dht(e) => Some(e), + _ => None, + } + }); + let (authority_discovery_worker, _service) = + sc_authority_discovery::new_worker_and_service_with_config( + sc_authority_discovery::WorkerConfig { + publish_non_global_ips: auth_disc_publish_non_global_ips, + ..Default::default() + }, + client.clone(), + network.clone(), + Box::pin(dht_event_stream), + authority_discovery_role, + prometheus_registry.clone(), + ); - task_manager.spawn_handle().spawn("authority-discovery-worker", authority_discovery_worker.run()); + task_manager + .spawn_handle() + .spawn("authority-discovery-worker", authority_discovery_worker.run()); } // if the node isn't actively participating in consensus then it doesn't // need a keystore, regardless of which protocol we use below. - let keystore = if role.is_authority() { - Some(keystore_container.sync_keystore()) - } else { - None - }; + let keystore = + if role.is_authority() { Some(keystore_container.sync_keystore()) } else { None }; let config = grandpa::Config { // FIXME #1578 make this available through chainspec @@ -404,46 +404,41 @@ pub fn new_full_base( // the GRANDPA voter task is considered infallible, i.e. // if it fails we take down the service with it. - task_manager.spawn_essential_handle().spawn_blocking( - "grandpa-voter", - grandpa::run_grandpa_voter(grandpa_config)? - ); + task_manager + .spawn_essential_handle() + .spawn_blocking("grandpa-voter", grandpa::run_grandpa_voter(grandpa_config)?); } network_starter.start_network(); - Ok(NewFullBase { - task_manager, - client, - network, - transaction_pool, - }) + Ok(NewFullBase { task_manager, client, network, transaction_pool }) } /// Builds a new service for a full client. -pub fn new_full( - config: Configuration, -) -> Result { - new_full_base(config, |_, _| ()).map(|NewFullBase { task_manager, .. }| { - task_manager - }) +pub fn new_full(config: Configuration) -> Result { + new_full_base(config, |_, _| ()).map(|NewFullBase { task_manager, .. }| task_manager) } pub fn new_light_base( mut config: Configuration, -) -> Result<( - TaskManager, - RpcHandlers, - Arc, - Arc::Hash>>, - Arc>> -), ServiceError> { - let telemetry = config.telemetry_endpoints.clone() +) -> Result< + ( + TaskManager, + RpcHandlers, + Arc, + Arc::Hash>>, + Arc< + sc_transaction_pool::LightPool>, + >, + ), + ServiceError, +> { + let telemetry = config + .telemetry_endpoints + .clone() .filter(|x| !x.is_empty()) .map(|endpoints| -> Result<_, sc_telemetry::Error> { #[cfg(feature = "browser")] - let transport = Some( - sc_telemetry::ExtTransport::new(libp2p_wasm_ext::ffi::websocket_transport()) - ); + let transport = Some(sc_telemetry::ExtTransport::new(libp2p_wasm_ext::ffi::websocket_transport())); #[cfg(not(feature = "browser"))] let transport = None; @@ -459,11 +454,10 @@ pub fn new_light_base( telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()), )?; - let mut telemetry = telemetry - .map(|(worker, telemetry)| { - task_manager.spawn_handle().spawn("telemetry", worker.run()); - telemetry - }); + let mut telemetry = telemetry.map(|(worker, telemetry)| { + task_manager.spawn_handle().spawn("telemetry", worker.run()); + telemetry + }); config.network.extra_sets.push(grandpa::grandpa_peers_set_config()); @@ -567,71 +561,60 @@ pub fn new_light_base( let rpc_extensions = node_rpc::create_light(light_deps); - let rpc_handlers = - sc_service::spawn_tasks(sc_service::SpawnTasksParams { - on_demand: Some(on_demand), - remote_blockchain: Some(backend.remote_blockchain()), - rpc_extensions_builder: Box::new(sc_service::NoopRpcExtensionBuilder(rpc_extensions)), - client: client.clone(), - transaction_pool: transaction_pool.clone(), - keystore: keystore_container.sync_keystore(), - config, backend, system_rpc_tx, - network: network.clone(), - task_manager: &mut task_manager, - telemetry: telemetry.as_mut(), - })?; + let rpc_handlers = sc_service::spawn_tasks(sc_service::SpawnTasksParams { + on_demand: Some(on_demand), + remote_blockchain: Some(backend.remote_blockchain()), + rpc_extensions_builder: Box::new(sc_service::NoopRpcExtensionBuilder(rpc_extensions)), + client: client.clone(), + transaction_pool: transaction_pool.clone(), + keystore: keystore_container.sync_keystore(), + config, + backend, + system_rpc_tx, + network: network.clone(), + task_manager: &mut task_manager, + telemetry: telemetry.as_mut(), + })?; network_starter.start_network(); - Ok(( - task_manager, - rpc_handlers, - client, - network, - transaction_pool, - )) + Ok((task_manager, rpc_handlers, client, network, transaction_pool)) } /// Builds a new service for a light client. -pub fn new_light( - config: Configuration, -) -> Result { - new_light_base(config).map(|(task_manager, _, _, _, _)| { - task_manager - }) +pub fn new_light(config: Configuration) -> Result { + new_light_base(config).map(|(task_manager, _, _, _, _)| task_manager) } #[cfg(test)] mod tests { - use std::{sync::Arc, borrow::Cow, convert::TryInto}; - use sc_consensus_babe::{CompatibleDigestItem, BabeIntermediate, INTERMEDIATE_KEY}; + use crate::service::{new_full_base, new_light_base, NewFullBase}; + use codec::Encode; + use node_primitives::{Block, DigestItem, Signature}; + use node_runtime::{ + constants::{currency::CENTS, time::SLOT_DURATION}, + Address, BalancesCall, Call, UncheckedExtrinsic, + }; + use sc_client_api::BlockBackend; + use sc_consensus_babe::{BabeIntermediate, CompatibleDigestItem, INTERMEDIATE_KEY}; use sc_consensus_epochs::descendent_query; + use sc_keystore::LocalKeystore; + use sc_service_test::TestNetNode; + use sc_transaction_pool_api::{ChainEvent, MaintainedTransactionPool}; use sp_consensus::{ - Environment, Proposer, BlockImportParams, BlockOrigin, ForkChoiceStrategy, BlockImport, - }; - use node_primitives::{Block, DigestItem, Signature}; - use node_runtime::{BalancesCall, Call, UncheckedExtrinsic, Address}; - use node_runtime::constants::{currency::CENTS, time::SLOT_DURATION}; - use codec::Encode; - use sp_core::{ - crypto::Pair as CryptoPair, - H256, - Public + BlockImport, BlockImportParams, BlockOrigin, Environment, ForkChoiceStrategy, Proposer, }; - use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; + use sp_core::{crypto::Pair as CryptoPair, Public, H256}; + use sp_inherents::InherentDataProvider; + use sp_keyring::AccountKeyring; + use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; use sp_runtime::{ - generic::{BlockId, Era, Digest, SignedPayload}, - traits::{Block as BlockT, Header as HeaderT}, - traits::Verify, + generic::{BlockId, Digest, Era, SignedPayload}, + key_types::BABE, + traits::{Block as BlockT, Header as HeaderT, IdentifyAccount, Verify}, + RuntimeAppPublic, }; use sp_timestamp; - use sp_keyring::AccountKeyring; - use sc_service_test::TestNetNode; - use crate::service::{new_full_base, new_light_base, NewFullBase}; - use sp_runtime::{key_types::BABE, traits::IdentifyAccount, RuntimeAppPublic}; - use sc_transaction_pool_api::{MaintainedTransactionPool, ChainEvent}; - use sc_client_api::BlockBackend; - use sc_keystore::LocalKeystore; - use sp_inherents::InherentDataProvider; + use std::{borrow::Cow, convert::TryInto, sync::Arc}; type AccountPublic = ::Signer; @@ -641,10 +624,12 @@ mod tests { #[ignore] fn test_sync() { let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); - let alice: sp_consensus_babe::AuthorityId = SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some("//Alice")) - .expect("Creates authority pair").into(); + let keystore: SyncCryptoStorePtr = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); + let alice: sp_consensus_babe::AuthorityId = + SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some("//Alice")) + .expect("Creates authority pair") + .into(); let chain_spec = crate::chain_spec::tests::integration_test_config_with_single_authority(); @@ -660,25 +645,31 @@ mod tests { chain_spec, |config| { let mut setup_handles = None; - let NewFullBase { - task_manager, client, network, transaction_pool, .. - } = new_full_base(config, - | - block_import: &sc_consensus_babe::BabeBlockImport, - babe_link: &sc_consensus_babe::BabeLink, - | { - setup_handles = Some((block_import.clone(), babe_link.clone())); - } - )?; + let NewFullBase { task_manager, client, network, transaction_pool, .. } = + new_full_base( + config, + |block_import: &sc_consensus_babe::BabeBlockImport, + babe_link: &sc_consensus_babe::BabeLink| { + setup_handles = Some((block_import.clone(), babe_link.clone())); + }, + )?; let node = sc_service_test::TestNetComponents::new( - task_manager, client, network, transaction_pool + task_manager, + client, + network, + transaction_pool, ); Ok((node, setup_handles.unwrap())) }, |config| { let (keep_alive, _, client, network, transaction_pool) = new_light_base(config)?; - Ok(sc_service_test::TestNetComponents::new(keep_alive, client, network, transaction_pool)) + Ok(sc_service_test::TestNetComponents::new( + keep_alive, + client, + network, + transaction_pool, + )) }, |service, &mut (ref mut block_import, ref babe_link)| { let parent_id = BlockId::number(service.client().chain_info().best_number); @@ -686,14 +677,9 @@ mod tests { let parent_hash = parent_header.hash(); let parent_number = *parent_header.number(); - futures::executor::block_on( - service.transaction_pool().maintain( - ChainEvent::NewBestBlock { - hash: parent_header.hash(), - tree_route: None, - }, - ) - ); + futures::executor::block_on(service.transaction_pool().maintain( + ChainEvent::NewBestBlock { hash: parent_header.hash(), tree_route: None }, + )); let mut proposer_factory = sc_basic_authorship::ProposerFactory::new( service.spawn_handle(), @@ -708,23 +694,30 @@ mod tests { // even though there's only one authority some slots might be empty, // so we must keep trying the next slots until we can claim one. let (babe_pre_digest, epoch_descriptor) = loop { - let epoch_descriptor = babe_link.epoch_changes().shared_data().epoch_descriptor_for_child_of( - descendent_query(&*service.client()), - &parent_hash, - parent_number, - slot.into(), - ).unwrap().unwrap(); - - let epoch = babe_link.epoch_changes().shared_data().epoch_data( - &epoch_descriptor, - |slot| sc_consensus_babe::Epoch::genesis(&babe_link.config(), slot), - ).unwrap(); - - if let Some(babe_pre_digest) = sc_consensus_babe::authorship::claim_slot( - slot.into(), - &epoch, - &keystore, - ).map(|(digest, _)| digest) { + let epoch_descriptor = babe_link + .epoch_changes() + .shared_data() + .epoch_descriptor_for_child_of( + descendent_query(&*service.client()), + &parent_hash, + parent_number, + slot.into(), + ) + .unwrap() + .unwrap(); + + let epoch = babe_link + .epoch_changes() + .shared_data() + .epoch_data(&epoch_descriptor, |slot| { + sc_consensus_babe::Epoch::genesis(&babe_link.config(), slot) + }) + .unwrap(); + + if let Some(babe_pre_digest) = + sc_consensus_babe::authorship::claim_slot(slot.into(), &epoch, &keystore) + .map(|(digest, _)| digest) + { break (babe_pre_digest, epoch_descriptor) } @@ -736,19 +729,21 @@ mod tests { std::time::Duration::from_millis(SLOT_DURATION * slot).into(), ), sp_consensus_babe::inherents::InherentDataProvider::new(slot.into()), - ).create_inherent_data().expect("Creates inherent data"); + ) + .create_inherent_data() + .expect("Creates inherent data"); digest.push(::babe_pre_digest(babe_pre_digest)); let new_block = futures::executor::block_on(async move { let proposer = proposer_factory.init(&parent_header).await; - proposer.unwrap().propose( - inherent_data, - digest, - std::time::Duration::from_secs(1), - None, - ).await - }).expect("Error making test block").block; + proposer + .unwrap() + .propose(inherent_data, digest, std::time::Duration::from_secs(1), None) + .await + }) + .expect("Error making test block") + .block; let (new_header, new_body) = new_block.deconstruct(); let pre_hash = new_header.hash(); @@ -760,10 +755,12 @@ mod tests { sp_consensus_babe::AuthorityId::ID, &alice.to_public_crypto_pair(), &to_sign, - ).unwrap().unwrap().try_into().unwrap(); - let item = ::babe_seal( - signature, - ); + ) + .unwrap() + .unwrap() + .try_into() + .unwrap(); + let item = ::babe_seal(signature); slot += 1; let mut params = BlockImportParams::new(BlockOrigin::File, new_header); @@ -811,19 +808,13 @@ mod tests { let raw_payload = SignedPayload::from_raw( function, extra, - (spec_version, transaction_version, genesis_hash, genesis_hash, (), (), ()) + (spec_version, transaction_version, genesis_hash, genesis_hash, (), (), ()), ); - let signature = raw_payload.using_encoded(|payload| { - signer.sign(payload) - }); + let signature = raw_payload.using_encoded(|payload| signer.sign(payload)); let (function, extra, _) = raw_payload.deconstruct(); index += 1; - UncheckedExtrinsic::new_signed( - function, - from.into(), - signature.into(), - extra, - ).into() + UncheckedExtrinsic::new_signed(function, from.into(), signature.into(), extra) + .into() }, ); } @@ -834,18 +825,25 @@ mod tests { sc_service_test::consensus( crate::chain_spec::tests::integration_test_config_with_two_authorities(), |config| { - let NewFullBase { task_manager, client, network, transaction_pool, .. } - = new_full_base(config,|_, _| ())?; - Ok(sc_service_test::TestNetComponents::new(task_manager, client, network, transaction_pool)) + let NewFullBase { task_manager, client, network, transaction_pool, .. } = + new_full_base(config, |_, _| ())?; + Ok(sc_service_test::TestNetComponents::new( + task_manager, + client, + network, + transaction_pool, + )) }, |config| { let (keep_alive, _, client, network, transaction_pool) = new_light_base(config)?; - Ok(sc_service_test::TestNetComponents::new(keep_alive, client, network, transaction_pool)) + Ok(sc_service_test::TestNetComponents::new( + keep_alive, + client, + network, + transaction_pool, + )) }, - vec![ - "//Alice".into(), - "//Bob".into(), - ], + vec!["//Alice".into(), "//Bob".into()], ) } } diff --git a/bin/node/cli/tests/common.rs b/bin/node/cli/tests/common.rs index c3bb96555da56..50776202d79eb 100644 --- a/bin/node/cli/tests/common.rs +++ b/bin/node/cli/tests/common.rs @@ -18,11 +18,18 @@ #![cfg(unix)] -use std::{process::{Child, ExitStatus}, thread, time::Duration, path::Path}; use assert_cmd::cargo::cargo_bin; -use std::{convert::TryInto, process::Command}; -use nix::sys::signal::{kill, Signal::SIGINT}; -use nix::unistd::Pid; +use nix::{ + sys::signal::{kill, Signal::SIGINT}, + unistd::Pid, +}; +use std::{ + convert::TryInto, + path::Path, + process::{Child, Command, ExitStatus}, + thread, + time::Duration, +}; /// Wait for the given `child` the given number of `secs`. /// @@ -50,12 +57,7 @@ pub fn wait_for(child: &mut Child, secs: usize) -> Option { pub fn run_dev_node_for_a_while(base_path: &Path) { let mut cmd = Command::new(cargo_bin("substrate")); - let mut cmd = cmd - .args(&["--dev"]) - .arg("-d") - .arg(base_path) - .spawn() - .unwrap(); + let mut cmd = cmd.args(&["--dev"]).arg("-d").arg(base_path).spawn().unwrap(); // Let it produce some blocks. thread::sleep(Duration::from_secs(30)); diff --git a/bin/node/cli/tests/export_import_flow.rs b/bin/node/cli/tests/export_import_flow.rs index 02fba49e834ef..583445434d391 100644 --- a/bin/node/cli/tests/export_import_flow.rs +++ b/bin/node/cli/tests/export_import_flow.rs @@ -19,9 +19,9 @@ #![cfg(unix)] use assert_cmd::cargo::cargo_bin; -use std::{process::Command, fs, path::PathBuf}; -use tempfile::{tempdir, TempDir}; use regex::Regex; +use std::{fs, path::PathBuf, process::Command}; +use tempfile::{tempdir, TempDir}; pub mod common; @@ -63,26 +63,23 @@ impl<'a> ExportImportRevertExecutor<'a> { fn new( base_path: &'a TempDir, exported_blocks_file: &'a PathBuf, - db_path: &'a PathBuf + db_path: &'a PathBuf, ) -> Self { - Self { - base_path, - exported_blocks_file, - db_path, - num_exported_blocks: None, - } + Self { base_path, exported_blocks_file, db_path, num_exported_blocks: None } } /// Helper method to run a command. Returns a string corresponding to what has been logged. - fn run_block_command(&self, + fn run_block_command( + &self, sub_command: SubCommand, format_opt: FormatOpt, - expected_to_fail: bool + expected_to_fail: bool, ) -> String { let sub_command_str = sub_command.to_string(); // Adding "--binary" if need be. let arguments: Vec<&str> = match format_opt { - FormatOpt::Binary => vec![&sub_command_str, "--dev", "--pruning", "archive", "--binary", "-d"], + FormatOpt::Binary => + vec![&sub_command_str, "--dev", "--pruning", "archive", "--binary", "-d"], FormatOpt::Json => vec![&sub_command_str, "--dev", "--pruning", "archive", "-d"], }; @@ -94,7 +91,7 @@ impl<'a> ExportImportRevertExecutor<'a> { SubCommand::ImportBlocks => { tmp = tempdir().unwrap(); tmp.path() - } + }, }; // Running the command and capturing the output. @@ -144,16 +141,13 @@ impl<'a> ExportImportRevertExecutor<'a> { if !expected_to_fail { // Using regex to find out how much block we imported, // and what's the best current block. - let re = Regex::new(r"Imported (?P\d*) blocks. Best: #(?P\d*)").unwrap(); + let re = + Regex::new(r"Imported (?P\d*) blocks. Best: #(?P\d*)").unwrap(); let caps = re.captures(&log).expect("capture should have succeeded"); let imported = caps["imported"].parse::().unwrap(); let best = caps["best"].parse::().unwrap(); - assert_eq!( - imported, - best, - "numbers of blocks imported and best number differs" - ); + assert_eq!(imported, best, "numbers of blocks imported and best number differs"); assert_eq!( best, self.num_exported_blocks.expect("number of exported blocks cannot be None; qed"), @@ -195,11 +189,7 @@ fn export_import_revert() { common::run_dev_node_for_a_while(base_path.path()); - let mut executor = ExportImportRevertExecutor::new( - &base_path, - &exported_blocks_file, - &db_path, - ); + let mut executor = ExportImportRevertExecutor::new(&base_path, &exported_blocks_file, &db_path); // Binary and binary should work. executor.run(FormatOpt::Binary, FormatOpt::Binary, false); diff --git a/bin/node/cli/tests/running_the_node_and_interrupt.rs b/bin/node/cli/tests/running_the_node_and_interrupt.rs index 05eb9a7027b71..7a945a30a4166 100644 --- a/bin/node/cli/tests/running_the_node_and_interrupt.rs +++ b/bin/node/cli/tests/running_the_node_and_interrupt.rs @@ -25,8 +25,13 @@ pub mod common; #[test] #[cfg(unix)] fn running_the_node_works_and_can_be_interrupted() { - use nix::sys::signal::{kill, Signal::{self, SIGINT, SIGTERM}}; - use nix::unistd::Pid; + use nix::{ + sys::signal::{ + kill, + Signal::{self, SIGINT, SIGTERM}, + }, + unistd::Pid, + }; fn run_command_and_kill(signal: Signal) { let base_path = tempdir().expect("could not create a temp dir"); diff --git a/bin/node/cli/tests/telemetry.rs b/bin/node/cli/tests/telemetry.rs index 0b90f56a03998..78a306284c4ac 100644 --- a/bin/node/cli/tests/telemetry.rs +++ b/bin/node/cli/tests/telemetry.rs @@ -17,10 +17,11 @@ // along with this program. If not, see . use assert_cmd::cargo::cargo_bin; -use nix::sys::signal::{kill, Signal::SIGINT}; -use nix::unistd::Pid; -use std::convert::TryInto; -use std::process; +use nix::{ + sys::signal::{kill, Signal::SIGINT}, + unistd::Pid, +}; +use std::{convert::TryInto, process}; pub mod common; pub mod websocket_server; @@ -45,27 +46,22 @@ async fn telemetry_works() { Event::ConnectionOpen { address } => { println!("New connection from {:?}", address); server.accept(); - } + }, // Received a message from a connection. Event::BinaryFrame { message, .. } => { let json: serde_json::Value = serde_json::from_slice(&message).unwrap(); - let object = json - .as_object() - .unwrap() - .get("payload") - .unwrap() - .as_object() - .unwrap(); + let object = + json.as_object().unwrap().get("payload").unwrap().as_object().unwrap(); if matches!(object.get("best"), Some(serde_json::Value::String(_))) { - break; + break } - } + }, Event::TextFrame { .. } => panic!("Got a TextFrame over the socket, this is a bug"), // Connection has been closed. - Event::ConnectionError { .. } => {} + Event::ConnectionError { .. } => {}, } } }); @@ -83,16 +79,11 @@ async fn telemetry_works() { server_task.await; - assert!( - substrate.try_wait().unwrap().is_none(), - "the process should still be running" - ); + assert!(substrate.try_wait().unwrap().is_none(), "the process should still be running"); // Stop the process kill(Pid::from_raw(substrate.id().try_into().unwrap()), SIGINT).unwrap(); - assert!(common::wait_for(&mut substrate, 40) - .map(|x| x.success()) - .unwrap_or_default()); + assert!(common::wait_for(&mut substrate, 40).map(|x| x.success()).unwrap_or_default()); let output = substrate.wait_with_output().unwrap(); diff --git a/bin/node/cli/tests/temp_base_path_works.rs b/bin/node/cli/tests/temp_base_path_works.rs index 0152ddb464dc7..c107740b9b0a5 100644 --- a/bin/node/cli/tests/temp_base_path_works.rs +++ b/bin/node/cli/tests/temp_base_path_works.rs @@ -19,15 +19,19 @@ #![cfg(unix)] use assert_cmd::cargo::cargo_bin; -use nix::sys::signal::{kill, Signal::SIGINT}; -use nix::unistd::Pid; +use nix::{ + sys::signal::{kill, Signal::SIGINT}, + unistd::Pid, +}; use regex::Regex; -use std::convert::TryInto; -use std::io::Read; -use std::path::PathBuf; -use std::process::{Command, Stdio}; -use std::thread; -use std::time::Duration; +use std::{ + convert::TryInto, + io::Read, + path::PathBuf, + process::{Command, Stdio}, + thread, + time::Duration, +}; pub mod common; @@ -44,29 +48,18 @@ fn temp_base_path_works() { // Let it produce some blocks. thread::sleep(Duration::from_secs(30)); - assert!( - cmd.try_wait().unwrap().is_none(), - "the process should still be running" - ); + assert!(cmd.try_wait().unwrap().is_none(), "the process should still be running"); // Stop the process kill(Pid::from_raw(cmd.id().try_into().unwrap()), SIGINT).unwrap(); - assert!(common::wait_for(&mut cmd, 40) - .map(|x| x.success()) - .unwrap_or_default()); + assert!(common::wait_for(&mut cmd, 40).map(|x| x.success()).unwrap_or_default()); // Ensure the database has been deleted let mut stderr = String::new(); cmd.stderr.unwrap().read_to_string(&mut stderr).unwrap(); let re = Regex::new(r"Database: .+ at (\S+)").unwrap(); - let db_path = PathBuf::from( - re.captures(stderr.as_str()) - .unwrap() - .get(1) - .unwrap() - .as_str() - .to_string(), - ); + let db_path = + PathBuf::from(re.captures(stderr.as_str()).unwrap().get(1).unwrap().as_str().to_string()); assert!(!db_path.exists()); } diff --git a/bin/node/cli/tests/version.rs b/bin/node/cli/tests/version.rs index 38e4b1fbda72e..5ed3a9a8800c8 100644 --- a/bin/node/cli/tests/version.rs +++ b/bin/node/cli/tests/version.rs @@ -22,61 +22,45 @@ use regex::Regex; use std::process::Command; fn expected_regex() -> Regex { - Regex::new(r"^substrate (\d+\.\d+\.\d+(?:-.+?)?)-([a-f\d]+|unknown)-(.+?)-(.+?)(?:-(.+))?$").unwrap() + Regex::new(r"^substrate (\d+\.\d+\.\d+(?:-.+?)?)-([a-f\d]+|unknown)-(.+?)-(.+?)(?:-(.+))?$") + .unwrap() } #[test] fn version_is_full() { let expected = expected_regex(); - let output = Command::new(cargo_bin("substrate")) - .args(&["--version"]) - .output() - .unwrap(); + let output = Command::new(cargo_bin("substrate")).args(&["--version"]).output().unwrap(); - assert!( - output.status.success(), - "command returned with non-success exit code" - ); + assert!(output.status.success(), "command returned with non-success exit code"); let output = String::from_utf8_lossy(&output.stdout).trim().to_owned(); - let captures = expected - .captures(output.as_str()) - .expect("could not parse version in output"); + let captures = expected.captures(output.as_str()).expect("could not parse version in output"); assert_eq!(&captures[1], env!("CARGO_PKG_VERSION")); assert_eq!(&captures[3], TARGET_ARCH.as_str()); assert_eq!(&captures[4], TARGET_OS.as_str()); - assert_eq!( - captures.get(5).map(|x| x.as_str()), - TARGET_ENV.map(|x| x.as_str()) - ); + assert_eq!(captures.get(5).map(|x| x.as_str()), TARGET_ENV.map(|x| x.as_str())); } #[test] fn test_regex_matches_properly() { let expected = expected_regex(); - let captures = expected - .captures("substrate 2.0.0-da487d19d-x86_64-linux-gnu") - .unwrap(); + let captures = expected.captures("substrate 2.0.0-da487d19d-x86_64-linux-gnu").unwrap(); assert_eq!(&captures[1], "2.0.0"); assert_eq!(&captures[2], "da487d19d"); assert_eq!(&captures[3], "x86_64"); assert_eq!(&captures[4], "linux"); assert_eq!(captures.get(5).map(|x| x.as_str()), Some("gnu")); - let captures = expected - .captures("substrate 2.0.0-alpha.5-da487d19d-x86_64-linux-gnu") - .unwrap(); + let captures = expected.captures("substrate 2.0.0-alpha.5-da487d19d-x86_64-linux-gnu").unwrap(); assert_eq!(&captures[1], "2.0.0-alpha.5"); assert_eq!(&captures[2], "da487d19d"); assert_eq!(&captures[3], "x86_64"); assert_eq!(&captures[4], "linux"); assert_eq!(captures.get(5).map(|x| x.as_str()), Some("gnu")); - let captures = expected - .captures("substrate 2.0.0-alpha.5-da487d19d-x86_64-linux") - .unwrap(); + let captures = expected.captures("substrate 2.0.0-alpha.5-da487d19d-x86_64-linux").unwrap(); assert_eq!(&captures[1], "2.0.0-alpha.5"); assert_eq!(&captures[2], "da487d19d"); assert_eq!(&captures[3], "x86_64"); diff --git a/bin/node/cli/tests/websocket_server.rs b/bin/node/cli/tests/websocket_server.rs index a8af1c3599521..9fc9b33bb33bf 100644 --- a/bin/node/cli/tests/websocket_server.rs +++ b/bin/node/cli/tests/websocket_server.rs @@ -129,15 +129,10 @@ impl WsServer { }; match server - .send_response(&{ - Response::Accept { - key: &websocket_key, - protocol: None, - } - }) + .send_response(&{ Response::Accept { key: &websocket_key, protocol: None } }) .await { - Ok(()) => {} + Ok(()) => {}, Err(err) => return Err(Box::new(err) as Box<_>), }; diff --git a/bin/node/executor/benches/bench.rs b/bin/node/executor/benches/bench.rs index d21aedd1d1849..8ac4b90150806 100644 --- a/bin/node/executor/benches/bench.rs +++ b/bin/node/executor/benches/bench.rs @@ -16,29 +16,33 @@ // limitations under the License. use codec::{Decode, Encode}; -use criterion::{BatchSize, Criterion, criterion_group, criterion_main}; +use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; +use frame_support::Hashable; use node_executor::Executor; use node_primitives::{BlockNumber, Hash}; use node_runtime::{ - Block, BuildStorage, Call, CheckedExtrinsic, GenesisConfig, Header, UncheckedExtrinsic, + constants::currency::*, Block, BuildStorage, Call, CheckedExtrinsic, GenesisConfig, Header, + UncheckedExtrinsic, }; -use node_runtime::constants::currency::*; use node_testing::keyring::*; -use sp_core::{NativeOrEncoded, NeverNativeValue}; -use sp_core::storage::well_known_keys; -use sp_core::traits::{CodeExecutor, RuntimeCode}; -use frame_support::Hashable; -use sp_state_machine::TestExternalities as CoreTestExternalities; -use sc_executor::{NativeExecutor, RuntimeInfo, WasmExecutionMethod, Externalities}; +use sc_executor::{Externalities, NativeExecutor, RuntimeInfo, WasmExecutionMethod}; +use sp_core::{ + storage::well_known_keys, + traits::{CodeExecutor, RuntimeCode}, + NativeOrEncoded, NeverNativeValue, +}; use sp_runtime::traits::BlakeTwo256; +use sp_state_machine::TestExternalities as CoreTestExternalities; criterion_group!(benches, bench_execute_block); criterion_main!(benches); /// The wasm runtime code. pub fn compact_code_unwrap() -> &'static [u8] { - node_runtime::WASM_BINARY.expect("Development wasm binary is not available. \ - Testing is only supported with the flag disabled.") + node_runtime::WASM_BINARY.expect( + "Development wasm binary is not available. \ + Testing is only supported with the flag disabled.", + ) } const GENESIS_HASH: [u8; 32] = [69u8; 32]; @@ -66,7 +70,9 @@ fn new_test_ext(genesis_config: &GenesisConfig) -> TestExternalities( parent_hash: Hash, extrinsics: Vec, ) -> (Vec, Hash) { - use sp_trie::{TrieConfiguration, trie_types::Layout}; + use sp_trie::{trie_types::Layout, TrieConfiguration}; // sign extrinsics. let extrinsics = extrinsics.into_iter().map(sign).collect::>(); // calculate the header fields that we can. - let extrinsics_root = Layout::::ordered_trie_root( - extrinsics.iter().map(Encode::encode) - ).to_fixed_bytes() - .into(); + let extrinsics_root = + Layout::::ordered_trie_root(extrinsics.iter().map(Encode::encode)) + .to_fixed_bytes() + .into(); let header = Header { parent_hash, @@ -103,34 +109,44 @@ fn construct_block( }; // execute the block to get the real header. - executor.call:: _>( - ext, - &runtime_code, - "Core_initialize_block", - &header.encode(), - true, - None, - ).0.unwrap(); - - for i in extrinsics.iter() { - executor.call:: _>( + executor + .call:: _>( ext, &runtime_code, - "BlockBuilder_apply_extrinsic", - &i.encode(), + "Core_initialize_block", + &header.encode(), true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); + + for i in extrinsics.iter() { + executor + .call:: _>( + ext, + &runtime_code, + "BlockBuilder_apply_extrinsic", + &i.encode(), + true, + None, + ) + .0 + .unwrap(); } - let header = match executor.call:: _>( - ext, - &runtime_code, - "BlockBuilder_finalize_block", - &[0u8;0], - true, - None, - ).0.unwrap() { + let header = match executor + .call:: _>( + ext, + &runtime_code, + "BlockBuilder_finalize_block", + &[0u8; 0], + true, + None, + ) + .0 + .unwrap() + { NativeOrEncoded::Native(_) => unreachable!(), NativeOrEncoded::Encoded(h) => Header::decode(&mut &h[..]).unwrap(), }; @@ -139,29 +155,21 @@ fn construct_block( (Block { header, extrinsics }.encode(), hash.into()) } -fn test_blocks(genesis_config: &GenesisConfig, executor: &NativeExecutor) - -> Vec<(Vec, Hash)> -{ +fn test_blocks( + genesis_config: &GenesisConfig, + executor: &NativeExecutor, +) -> Vec<(Vec, Hash)> { let mut test_ext = new_test_ext(genesis_config); - let mut block1_extrinsics = vec![ - CheckedExtrinsic { - signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(0)), - }, - ]; - block1_extrinsics.extend((0..20).map(|i| { - CheckedExtrinsic { - signed: Some((alice(), signed_extra(i, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 1 * DOLLARS)), - } + let mut block1_extrinsics = vec![CheckedExtrinsic { + signed: None, + function: Call::Timestamp(pallet_timestamp::Call::set(0)), + }]; + block1_extrinsics.extend((0..20).map(|i| CheckedExtrinsic { + signed: Some((alice(), signed_extra(i, 0))), + function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 1 * DOLLARS)), })); - let block1 = construct_block( - executor, - &mut test_ext.ext(), - 1, - GENESIS_HASH.into(), - block1_extrinsics, - ); + let block1 = + construct_block(executor, &mut test_ext.ext(), 1, GENESIS_HASH.into(), block1_extrinsics); vec![block1] } @@ -176,47 +184,47 @@ fn bench_execute_block(c: &mut Criterion) { ]; for strategy in execution_methods { - group.bench_function( - format!("{:?}", strategy), - |b| { - let genesis_config = node_testing::genesis::config(false, Some(compact_code_unwrap())); - let (use_native, wasm_method) = match strategy { - ExecutionMethod::Native => (true, WasmExecutionMethod::Interpreted), - ExecutionMethod::Wasm(wasm_method) => (false, wasm_method), - }; - - let executor = NativeExecutor::new(wasm_method, None, 8); - let runtime_code = RuntimeCode { - code_fetcher: &sp_core::traits::WrappedRuntimeCode(compact_code_unwrap().into()), - hash: vec![1, 2, 3], - heap_pages: None, - }; - - // Get the runtime version to initialize the runtimes cache. - { - let mut test_ext = new_test_ext(&genesis_config); - executor.runtime_version(&mut test_ext.ext(), &runtime_code).unwrap(); - } - - let blocks = test_blocks(&genesis_config, &executor); - - b.iter_batched_ref( - || new_test_ext(&genesis_config), - |test_ext| { - for block in blocks.iter() { - executor.call:: _>( + group.bench_function(format!("{:?}", strategy), |b| { + let genesis_config = node_testing::genesis::config(false, Some(compact_code_unwrap())); + let (use_native, wasm_method) = match strategy { + ExecutionMethod::Native => (true, WasmExecutionMethod::Interpreted), + ExecutionMethod::Wasm(wasm_method) => (false, wasm_method), + }; + + let executor = NativeExecutor::new(wasm_method, None, 8); + let runtime_code = RuntimeCode { + code_fetcher: &sp_core::traits::WrappedRuntimeCode(compact_code_unwrap().into()), + hash: vec![1, 2, 3], + heap_pages: None, + }; + + // Get the runtime version to initialize the runtimes cache. + { + let mut test_ext = new_test_ext(&genesis_config); + executor.runtime_version(&mut test_ext.ext(), &runtime_code).unwrap(); + } + + let blocks = test_blocks(&genesis_config, &executor); + + b.iter_batched_ref( + || new_test_ext(&genesis_config), + |test_ext| { + for block in blocks.iter() { + executor + .call:: _>( &mut test_ext.ext(), &runtime_code, "Core_execute_block", &block.0, use_native, None, - ).0.unwrap(); - } - }, - BatchSize::LargeInput, - ); - }, - ); + ) + .0 + .unwrap(); + } + }, + BatchSize::LargeInput, + ); + }); } } diff --git a/bin/node/executor/src/lib.rs b/bin/node/executor/src/lib.rs index e7fb09a19c514..0f4bfcf2eee26 100644 --- a/bin/node/executor/src/lib.rs +++ b/bin/node/executor/src/lib.rs @@ -18,8 +18,8 @@ //! A `CodeExecutor` specialization which uses natively compiled runtime when the wasm to be //! executed is equivalent to the natively compiled code. -pub use sc_executor::NativeExecutor; use sc_executor::native_executor_instance; +pub use sc_executor::NativeExecutor; // Declare an instance of the native executor named `Executor`. Include the wasm binary as the // equivalent wasm code. diff --git a/bin/node/executor/tests/basic.rs b/bin/node/executor/tests/basic.rs index 4e17366795909..f3beb93f598bb 100644 --- a/bin/node/executor/tests/basic.rs +++ b/bin/node/executor/tests/basic.rs @@ -15,30 +15,28 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode, Joiner}; +use codec::{Decode, Encode, Joiner}; use frame_support::{ traits::Currency, - weights::{GetDispatchInfo, DispatchInfo, DispatchClass}, + weights::{DispatchClass, DispatchInfo, GetDispatchInfo}, }; -use sp_core::{NeverNativeValue, traits::Externalities, storage::well_known_keys}; +use frame_system::{self, AccountInfo, EventRecord, Phase}; +use sp_core::{storage::well_known_keys, traits::Externalities, NeverNativeValue}; use sp_runtime::{ - ApplyExtrinsicResult, - traits::Hash as HashT, - transaction_validity::InvalidTransaction, + traits::Hash as HashT, transaction_validity::InvalidTransaction, ApplyExtrinsicResult, }; -use frame_system::{self, EventRecord, Phase, AccountInfo}; +use node_primitives::{Balance, Hash}; use node_runtime::{ - Header, Block, UncheckedExtrinsic, CheckedExtrinsic, Call, Runtime, Balances, - System, TransactionPayment, Event, - constants::{time::SLOT_DURATION, currency::*}, + constants::{currency::*, time::SLOT_DURATION}, + Balances, Block, Call, CheckedExtrinsic, Event, Header, Runtime, System, TransactionPayment, + UncheckedExtrinsic, }; -use node_primitives::{Balance, Hash}; -use wat; use node_testing::keyring::*; +use wat; pub mod common; -use self::common::{*, sign}; +use self::common::{sign, *}; /// The wasm runtime binary which hasn't undergone the compacting process. /// @@ -46,8 +44,10 @@ use self::common::{*, sign}; /// have to execute provided wasm code instead of the native equivalent. This trick is used to /// test code paths that differ between native and wasm versions. pub fn bloaty_code_unwrap() -> &'static [u8] { - node_runtime::WASM_BINARY_BLOATY.expect("Development wasm binary is not available. \ - Testing is only supported with the flag disabled.") + node_runtime::WASM_BINARY_BLOATY.expect( + "Development wasm binary is not available. \ + Testing is only supported with the flag disabled.", + ) } /// Default transfer fee. This will use the same logic that is implemented in transaction-payment module. @@ -87,7 +87,10 @@ fn changes_trie_block() -> (Vec, Hash) { }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 69 * DOLLARS)), + function: Call::Balances(pallet_balances::Call::transfer( + bob().into(), + 69 * DOLLARS, + )), }, ], (time / SLOT_DURATION).into(), @@ -111,7 +114,10 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 69 * DOLLARS)), + function: Call::Balances(pallet_balances::Call::transfer( + bob().into(), + 69 * DOLLARS, + )), }, ], (time1 / SLOT_DURATION).into(), @@ -128,12 +134,18 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { }, CheckedExtrinsic { signed: Some((bob(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer(alice().into(), 5 * DOLLARS)), + function: Call::Balances(pallet_balances::Call::transfer( + alice().into(), + 5 * DOLLARS, + )), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(1, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 15 * DOLLARS)), - } + function: Call::Balances(pallet_balances::Call::transfer( + bob().into(), + 15 * DOLLARS, + )), + }, ], (time2 / SLOT_DURATION).into(), ); @@ -158,7 +170,7 @@ fn block_with_size(time: u64, nonce: u32, size: usize) -> (Vec, Hash) { CheckedExtrinsic { signed: Some((alice(), signed_extra(nonce, 0))), function: Call::System(frame_system::Call::remark(vec![0; size])), - } + }, ], (time * 1000 / SLOT_DURATION).into(), ) @@ -169,7 +181,7 @@ fn panic_execution_with_foreign_code_gives_error() { let mut t = new_test_ext(bloaty_code_unwrap(), false); t.insert( >::hashed_key_for(alice()), - (69u128, 0u32, 0u128, 0u128, 0u128).encode() + (69u128, 0u32, 0u128, 0u128, 0u128).encode(), ); t.insert(>::hashed_key().to_vec(), 69_u128.encode()); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -180,7 +192,8 @@ fn panic_execution_with_foreign_code_gives_error() { &vec![].and(&from_block_number(1u32)), true, None, - ).0; + ) + .0; assert!(r.is_ok()); let v = executor_call:: _>( &mut t, @@ -188,7 +201,9 @@ fn panic_execution_with_foreign_code_gives_error() { &vec![].and(&xt()), true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); let r = ApplyExtrinsicResult::decode(&mut &v.as_encoded()[..]).unwrap(); assert_eq!(r, Err(InvalidTransaction::Payment.into())); } @@ -198,7 +213,7 @@ fn bad_extrinsic_with_native_equivalent_code_gives_error() { let mut t = new_test_ext(compact_code_unwrap(), false); t.insert( >::hashed_key_for(alice()), - (0u32, 0u32, 0u32, 69u128, 0u128, 0u128, 0u128).encode() + (0u32, 0u32, 0u32, 69u128, 0u128, 0u128, 0u128).encode(), ); t.insert(>::hashed_key().to_vec(), 69_u128.encode()); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -209,7 +224,8 @@ fn bad_extrinsic_with_native_equivalent_code_gives_error() { &vec![].and(&from_block_number(1u32)), true, None, - ).0; + ) + .0; assert!(r.is_ok()); let v = executor_call:: _>( &mut t, @@ -217,7 +233,9 @@ fn bad_extrinsic_with_native_equivalent_code_gives_error() { &vec![].and(&xt()), true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); let r = ApplyExtrinsicResult::decode(&mut &v.as_encoded()[..]).unwrap(); assert_eq!(r, Err(InvalidTransaction::Payment.into())); } @@ -229,19 +247,21 @@ fn successful_execution_with_native_equivalent_code_gives_ok() { >::hashed_key_for(alice()), AccountInfo::<::Index, _> { data: (111 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key_for(bob()), AccountInfo::<::Index, _> { data: (0 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key().to_vec(), - (111 * DOLLARS).encode() + (111 * DOLLARS).encode(), ); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -251,7 +271,8 @@ fn successful_execution_with_native_equivalent_code_gives_ok() { &vec![].and(&from_block_number(1u32)), true, None, - ).0; + ) + .0; assert!(r.is_ok()); let fees = t.execute_with(|| transfer_fee(&xt())); @@ -262,7 +283,8 @@ fn successful_execution_with_native_equivalent_code_gives_ok() { &vec![].and(&xt()), true, None, - ).0; + ) + .0; assert!(r.is_ok()); t.execute_with(|| { @@ -278,19 +300,21 @@ fn successful_execution_with_foreign_code_gives_ok() { >::hashed_key_for(alice()), AccountInfo::<::Index, _> { data: (111 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key_for(bob()), AccountInfo::<::Index, _> { data: (0 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key().to_vec(), - (111 * DOLLARS).encode() + (111 * DOLLARS).encode(), ); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -300,7 +324,8 @@ fn successful_execution_with_foreign_code_gives_ok() { &vec![].and(&from_block_number(1u32)), true, None, - ).0; + ) + .0; assert!(r.is_ok()); let fees = t.execute_with(|| transfer_fee(&xt())); @@ -311,7 +336,8 @@ fn successful_execution_with_foreign_code_gives_ok() { &vec![].and(&xt()), true, None, - ).0; + ) + .0; assert!(r.is_ok()); t.execute_with(|| { @@ -330,7 +356,9 @@ fn full_native_block_import_works() { let mut fees = t.execute_with(|| transfer_fee(&xt())); let transfer_weight = default_transfer_call().get_dispatch_info().weight; - let timestamp_weight = pallet_timestamp::Call::set::(Default::default()).get_dispatch_info().weight; + let timestamp_weight = pallet_timestamp::Call::set::(Default::default()) + .get_dispatch_info() + .weight; executor_call:: _>( &mut t, @@ -338,7 +366,9 @@ fn full_native_block_import_works() { &block1.0, true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); t.execute_with(|| { assert_eq!(Balances::total_balance(&alice()), 42 * DOLLARS - fees); @@ -347,9 +377,11 @@ fn full_native_block_import_works() { let events = vec![ EventRecord { phase: Phase::ApplyExtrinsic(0), - event: Event::System(frame_system::Event::ExtrinsicSuccess( - DispatchInfo { weight: timestamp_weight, class: DispatchClass::Mandatory, ..Default::default() } - )), + event: Event::System(frame_system::Event::ExtrinsicSuccess(DispatchInfo { + weight: timestamp_weight, + class: DispatchClass::Mandatory, + ..Default::default() + })), topics: vec![], }, EventRecord { @@ -368,9 +400,10 @@ fn full_native_block_import_works() { }, EventRecord { phase: Phase::ApplyExtrinsic(1), - event: Event::System(frame_system::Event::ExtrinsicSuccess( - DispatchInfo { weight: transfer_weight, ..Default::default() } - )), + event: Event::System(frame_system::Event::ExtrinsicSuccess(DispatchInfo { + weight: transfer_weight, + ..Default::default() + })), topics: vec![], }, ]; @@ -385,34 +418,33 @@ fn full_native_block_import_works() { &block2.0, true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); t.execute_with(|| { assert_eq!( Balances::total_balance(&alice()), alice_last_known_balance - 10 * DOLLARS - fees, ); - assert_eq!( - Balances::total_balance(&bob()), - 179 * DOLLARS - fees, - ); + assert_eq!(Balances::total_balance(&bob()), 179 * DOLLARS - fees,); let events = vec![ EventRecord { phase: Phase::ApplyExtrinsic(0), - event: Event::System(frame_system::Event::ExtrinsicSuccess( - DispatchInfo { weight: timestamp_weight, class: DispatchClass::Mandatory, ..Default::default() } - )), + event: Event::System(frame_system::Event::ExtrinsicSuccess(DispatchInfo { + weight: timestamp_weight, + class: DispatchClass::Mandatory, + ..Default::default() + })), topics: vec![], }, EventRecord { phase: Phase::ApplyExtrinsic(1), - event: Event::Balances( - pallet_balances::Event::Transfer( - bob().into(), - alice().into(), - 5 * DOLLARS, - ) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + bob().into(), + alice().into(), + 5 * DOLLARS, + )), topics: vec![], }, EventRecord { @@ -422,20 +454,19 @@ fn full_native_block_import_works() { }, EventRecord { phase: Phase::ApplyExtrinsic(1), - event: Event::System(frame_system::Event::ExtrinsicSuccess( - DispatchInfo { weight: transfer_weight, ..Default::default() } - )), + event: Event::System(frame_system::Event::ExtrinsicSuccess(DispatchInfo { + weight: transfer_weight, + ..Default::default() + })), topics: vec![], }, EventRecord { phase: Phase::ApplyExtrinsic(2), - event: Event::Balances( - pallet_balances::Event::Transfer( - alice().into(), - bob().into(), - 15 * DOLLARS, - ) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + alice().into(), + bob().into(), + 15 * DOLLARS, + )), topics: vec![], }, EventRecord { @@ -445,9 +476,10 @@ fn full_native_block_import_works() { }, EventRecord { phase: Phase::ApplyExtrinsic(2), - event: Event::System(frame_system::Event::ExtrinsicSuccess( - DispatchInfo { weight: transfer_weight, ..Default::default() } - )), + event: Event::System(frame_system::Event::ExtrinsicSuccess(DispatchInfo { + weight: transfer_weight, + ..Default::default() + })), topics: vec![], }, ]; @@ -470,7 +502,9 @@ fn full_wasm_block_import_works() { &block1.0, false, None, - ).0.unwrap(); + ) + .0 + .unwrap(); t.execute_with(|| { assert_eq!(Balances::total_balance(&alice()), 42 * DOLLARS - fees); @@ -486,17 +520,16 @@ fn full_wasm_block_import_works() { &block2.0, false, None, - ).0.unwrap(); + ) + .0 + .unwrap(); t.execute_with(|| { assert_eq!( Balances::total_balance(&alice()), alice_last_known_balance - 10 * DOLLARS - fees, ); - assert_eq!( - Balances::total_balance(&bob()), - 179 * DOLLARS - 1 * fees, - ); + assert_eq!(Balances::total_balance(&bob()), 179 * DOLLARS - 1 * fees,); }); } @@ -600,11 +633,7 @@ fn deploying_wasm_contract_should_work() { let transfer_code = wat::parse_str(CODE_TRANSFER).unwrap(); let transfer_ch = ::Hashing::hash(&transfer_code); - let addr = pallet_contracts::Pallet::::contract_address( - &charlie(), - &transfer_ch, - &[], - ); + let addr = pallet_contracts::Pallet::::contract_address(&charlie(), &transfer_ch, &[]); let subsistence = pallet_contracts::Pallet::::subsistence_threshold(); @@ -627,19 +656,17 @@ fn deploying_wasm_contract_should_work() { transfer_code, Vec::new(), Vec::new(), - ) + ), ), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::Contracts( - pallet_contracts::Call::call::( - sp_runtime::MultiAddress::Id(addr.clone()), - 10, - 500_000_000, - vec![0x00, 0x01, 0x02, 0x03] - ) - ), + function: Call::Contracts(pallet_contracts::Call::call::( + sp_runtime::MultiAddress::Id(addr.clone()), + 10, + 500_000_000, + vec![0x00, 0x01, 0x02, 0x03], + )), }, ], (time / SLOT_DURATION).into(), @@ -647,20 +674,14 @@ fn deploying_wasm_contract_should_work() { let mut t = new_test_ext(compact_code_unwrap(), false); - executor_call:: _>( - &mut t, - "Core_execute_block", - &b.0, - false, - None, - ).0.unwrap(); + executor_call:: _>(&mut t, "Core_execute_block", &b.0, false, None) + .0 + .unwrap(); t.execute_with(|| { // Verify that the contract does exist by querying some of its storage items // It does not matter that the storage item itself does not exist. - assert!( - &pallet_contracts::Pallet::::get_storage(addr, Default::default()).is_ok() - ); + assert!(&pallet_contracts::Pallet::::get_storage(addr, Default::default()).is_ok()); }); } @@ -676,7 +697,8 @@ fn wasm_big_block_import_fails() { &block_with_size(42, 0, 120_000).0, false, None, - ).0; + ) + .0; assert!(result.is_err()); // Err(Wasmi(Trap(Trap { kind: Host(AllocatorOutOfSpace) }))) } @@ -690,7 +712,9 @@ fn native_big_block_import_succeeds() { &block_with_size(42, 0, 120_000).0, true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); } #[test] @@ -700,15 +724,15 @@ fn native_big_block_import_fails_on_fallback() { // We set the heap pages to 8 because we know that should give an OOM in WASM with the given block. set_heap_pages(&mut t.ext(), 8); - assert!( - executor_call:: _>( - &mut t, - "Core_execute_block", - &block_with_size(42, 0, 120_000).0, - false, - None, - ).0.is_err() - ); + assert!(executor_call:: _>( + &mut t, + "Core_execute_block", + &block_with_size(42, 0, 120_000).0, + false, + None, + ) + .0 + .is_err()); } #[test] @@ -718,8 +742,9 @@ fn panic_execution_gives_error() { >::hashed_key_for(alice()), AccountInfo::<::Index, _> { data: (0 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert(>::hashed_key().to_vec(), 0_u128.encode()); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -730,7 +755,8 @@ fn panic_execution_gives_error() { &vec![].and(&from_block_number(1u32)), false, None, - ).0; + ) + .0; assert!(r.is_ok()); let r = executor_call:: _>( &mut t, @@ -738,7 +764,10 @@ fn panic_execution_gives_error() { &vec![].and(&xt()), false, None, - ).0.unwrap().into_encoded(); + ) + .0 + .unwrap() + .into_encoded(); let r = ApplyExtrinsicResult::decode(&mut &r[..]).unwrap(); assert_eq!(r, Err(InvalidTransaction::Payment.into())); } @@ -750,19 +779,21 @@ fn successful_execution_gives_ok() { >::hashed_key_for(alice()), AccountInfo::<::Index, _> { data: (111 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key_for(bob()), AccountInfo::<::Index, _> { data: (0 * DOLLARS, 0u128, 0u128, 0u128), - .. Default::default() - }.encode(), + ..Default::default() + } + .encode(), ); t.insert( >::hashed_key().to_vec(), - (111 * DOLLARS).encode() + (111 * DOLLARS).encode(), ); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -772,7 +803,8 @@ fn successful_execution_gives_ok() { &vec![].and(&from_block_number(1u32)), false, None, - ).0; + ) + .0; assert!(r.is_ok()); t.execute_with(|| { assert_eq!(Balances::total_balance(&alice()), 111 * DOLLARS); @@ -786,7 +818,10 @@ fn successful_execution_gives_ok() { &vec![].and(&xt()), false, None, - ).0.unwrap().into_encoded(); + ) + .0 + .unwrap() + .into_encoded(); ApplyExtrinsicResult::decode(&mut &r[..]) .unwrap() .expect("Extrinsic could not be applied") @@ -811,7 +846,9 @@ fn full_native_block_import_works_with_changes_trie() { &block.encode(), true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); assert!(t.ext().storage_changes_root(&GENESIS_HASH).unwrap().is_some()); } @@ -827,7 +864,9 @@ fn full_wasm_block_import_works_with_changes_trie() { &block1.0, false, None, - ).0.unwrap(); + ) + .0 + .unwrap(); assert!(t.ext().storage_changes_root(&GENESIS_HASH).unwrap().is_some()); } @@ -835,8 +874,7 @@ fn full_wasm_block_import_works_with_changes_trie() { #[test] fn should_import_block_with_test_client() { use node_testing::client::{ - ClientBlockImportExt, TestClientBuilderExt, TestClientBuilder, - sp_consensus::BlockOrigin, + sp_consensus::BlockOrigin, ClientBlockImportExt, TestClientBuilder, TestClientBuilderExt, }; let mut client = TestClientBuilder::new().build(); diff --git a/bin/node/executor/tests/common.rs b/bin/node/executor/tests/common.rs index 635155b5d00b2..414b335406be8 100644 --- a/bin/node/executor/tests/common.rs +++ b/bin/node/executor/tests/common.rs @@ -15,34 +15,32 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode}; -use frame_system::offchain::AppCrypto; +use codec::{Decode, Encode}; use frame_support::Hashable; -use sp_state_machine::TestExternalities as CoreTestExternalities; -use sp_consensus_babe::{BABE_ENGINE_ID, Slot, digests::{PreDigest, SecondaryPlainPreDigest}}; +use frame_system::offchain::AppCrypto; +use sc_executor::{error::Result, NativeExecutor, WasmExecutionMethod}; +use sp_consensus_babe::{ + digests::{PreDigest, SecondaryPlainPreDigest}, + Slot, BABE_ENGINE_ID, +}; use sp_core::{ - NeverNativeValue, NativeOrEncoded, crypto::KeyTypeId, sr25519::Signature, traits::{CodeExecutor, RuntimeCode}, + NativeOrEncoded, NeverNativeValue, }; use sp_runtime::{ - ApplyExtrinsicResult, - MultiSigner, - MultiSignature, - Digest, - DigestItem, - traits::{Header as HeaderT, BlakeTwo256}, + traits::{BlakeTwo256, Header as HeaderT}, + ApplyExtrinsicResult, Digest, DigestItem, MultiSignature, MultiSigner, }; -use sc_executor::{NativeExecutor, WasmExecutionMethod}; -use sc_executor::error::Result; +use sp_state_machine::TestExternalities as CoreTestExternalities; use node_executor::Executor; +use node_primitives::{BlockNumber, Hash}; use node_runtime::{ - Header, Block, UncheckedExtrinsic, CheckedExtrinsic, Runtime, BuildStorage, - constants::currency::*, + constants::currency::*, Block, BuildStorage, CheckedExtrinsic, Header, Runtime, + UncheckedExtrinsic, }; -use node_primitives::{Hash, BlockNumber}; use node_testing::keyring::*; use sp_externalities::Externalities; @@ -50,8 +48,8 @@ pub const TEST_KEY_TYPE_ID: KeyTypeId = KeyTypeId(*b"test"); pub mod sr25519 { mod app_sr25519 { - use sp_application_crypto::{app_crypto, sr25519}; use super::super::TEST_KEY_TYPE_ID; + use sp_application_crypto::{app_crypto, sr25519}; app_crypto!(sr25519, TEST_KEY_TYPE_ID); } @@ -72,8 +70,10 @@ impl AppCrypto for TestAuthorityId { /// as canonical. This is why `native_executor_instance` also uses the compact version of the /// runtime. pub fn compact_code_unwrap() -> &'static [u8] { - node_runtime::WASM_BINARY.expect("Development wasm binary is not available. \ - Testing is only supported with the flag disabled.") + node_runtime::WASM_BINARY.expect( + "Development wasm binary is not available. \ + Testing is only supported with the flag disabled.", + ) } pub const GENESIS_HASH: [u8; 32] = [69u8; 32]; @@ -101,8 +101,9 @@ pub fn executor() -> NativeExecutor { } pub fn executor_call< - R:Decode + Encode + PartialEq, - NC: FnOnce() -> std::result::Result> + std::panic::UnwindSafe + R: Decode + Encode + PartialEq, + NC: FnOnce() -> std::result::Result> + + std::panic::UnwindSafe, >( t: &mut TestExternalities, method: &str, @@ -120,20 +121,15 @@ pub fn executor_call< heap_pages: heap_pages.and_then(|hp| Decode::decode(&mut &hp[..]).ok()), }; - executor().call::( - &mut t, - &runtime_code, - method, - data, - use_native, - native_call, - ) + executor().call::(&mut t, &runtime_code, method, data, use_native, native_call) } pub fn new_test_ext(code: &[u8], support_changes_trie: bool) -> TestExternalities { let mut ext = TestExternalities::new_with_code( code, - node_testing::genesis::config(support_changes_trie, Some(code)).build_storage().unwrap(), + node_testing::genesis::config(support_changes_trie, Some(code)) + .build_storage() + .unwrap(), ); ext.changes_trie_storage().insert(0, GENESIS_HASH.into(), Default::default()); ext @@ -150,7 +146,7 @@ pub fn construct_block( extrinsics: Vec, babe_slot: Slot, ) -> (Vec, Hash) { - use sp_trie::{TrieConfiguration, trie_types::Layout}; + use sp_trie::{trie_types::Layout, TrieConfiguration}; // sign extrinsics. let extrinsics = extrinsics.into_iter().map(sign).collect::>(); @@ -167,15 +163,14 @@ pub fn construct_block( extrinsics_root, state_root: Default::default(), digest: Digest { - logs: vec![ - DigestItem::PreRuntime( - BABE_ENGINE_ID, - PreDigest::SecondaryPlain(SecondaryPlainPreDigest { - slot: babe_slot, - authority_index: 42, - }).encode() - ), - ], + logs: vec![DigestItem::PreRuntime( + BABE_ENGINE_ID, + PreDigest::SecondaryPlain(SecondaryPlainPreDigest { + slot: babe_slot, + authority_index: 42, + }) + .encode(), + )], }, }; @@ -186,7 +181,9 @@ pub fn construct_block( &header.encode(), true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); for extrinsic in extrinsics.iter() { // Try to apply the `extrinsic`. It should be valid, in the sense that it passes @@ -197,8 +194,13 @@ pub fn construct_block( &extrinsic.encode(), true, None, - ).0.expect("application of an extrinsic failed").into_encoded(); - match ApplyExtrinsicResult::decode(&mut &r[..]).expect("apply result deserialization failed") { + ) + .0 + .expect("application of an extrinsic failed") + .into_encoded(); + match ApplyExtrinsicResult::decode(&mut &r[..]) + .expect("apply result deserialization failed") + { Ok(_) => {}, Err(e) => panic!("Applying extrinsic failed: {:?}", e), } @@ -207,10 +209,13 @@ pub fn construct_block( let header = match executor_call:: _>( env, "BlockBuilder_finalize_block", - &[0u8;0], + &[0u8; 0], true, None, - ).0.unwrap() { + ) + .0 + .unwrap() + { NativeOrEncoded::Native(_) => unreachable!(), NativeOrEncoded::Encoded(h) => Header::decode(&mut &h[..]).unwrap(), }; diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 5474adbd32a89..3bc9179da2b3d 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -18,20 +18,21 @@ use codec::{Encode, Joiner}; use frame_support::{ traits::Currency, - weights::{GetDispatchInfo, constants::ExtrinsicBaseWeight, IdentityFee, WeightToFeePolynomial}, + weights::{ + constants::ExtrinsicBaseWeight, GetDispatchInfo, IdentityFee, WeightToFeePolynomial, + }, }; -use sp_core::NeverNativeValue; -use sp_runtime::{Perbill, traits::One}; +use node_primitives::Balance; use node_runtime::{ - CheckedExtrinsic, Call, Runtime, Balances, TransactionPayment, Multiplier, - TransactionByteFee, - constants::{time::SLOT_DURATION, currency::*}, + constants::{currency::*, time::SLOT_DURATION}, + Balances, Call, CheckedExtrinsic, Multiplier, Runtime, TransactionByteFee, TransactionPayment, }; -use node_primitives::Balance; use node_testing::keyring::*; +use sp_core::NeverNativeValue; +use sp_runtime::{traits::One, Perbill}; pub mod common; -use self::common::{*, sign}; +use self::common::{sign, *}; #[test] fn fee_multiplier_increases_and_decreases_on_big_weight() { @@ -60,7 +61,7 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), function: Call::System(frame_system::Call::fill_block(Perbill::from_percent(60))), - } + }, ], (time1 / SLOT_DURATION).into(), ); @@ -79,7 +80,7 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), function: Call::System(frame_system::Call::remark(vec![0; 1])), - } + }, ], (time2 / SLOT_DURATION).into(), ); @@ -97,7 +98,9 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { &block1.0, true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); // weight multiplier is increased for next block. t.execute_with(|| { @@ -114,7 +117,9 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { &block2.0, true, None, - ).0.unwrap(); + ) + .0 + .unwrap(); // weight multiplier is increased for next block. t.execute_with(|| { @@ -131,7 +136,8 @@ fn new_account_info(free_dollars: u128) -> Vec { providers: 0, sufficients: 0, data: (free_dollars * DOLLARS, 0 * DOLLARS, 0 * DOLLARS, 0 * DOLLARS), - }.encode() + } + .encode() } #[test] @@ -148,7 +154,7 @@ fn transaction_fee_is_correct() { t.insert(>::hashed_key_for(bob()), new_account_info(10)); t.insert( >::hashed_key().to_vec(), - (110 * DOLLARS).encode() + (110 * DOLLARS).encode(), ); t.insert(>::hashed_key_for(0), vec![0u8; 32]); @@ -164,7 +170,8 @@ fn transaction_fee_is_correct() { &vec![].and(&from_block_number(1u32)), true, None, - ).0; + ) + .0; assert!(r.is_ok()); let r = executor_call:: _>( @@ -173,7 +180,8 @@ fn transaction_fee_is_correct() { &vec![].and(&xt.clone()), true, None, - ).0; + ) + .0; assert!(r.is_ok()); t.execute_with(|| { @@ -228,15 +236,20 @@ fn block_weight_capacity_report() { loop { let num_transfers = block_number * factor; - let mut xts = (0..num_transfers).map(|i| CheckedExtrinsic { - signed: Some((charlie(), signed_extra(nonce + i as Index, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 0)), - }).collect::>(); - - xts.insert(0, CheckedExtrinsic { - signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time * 1000)), - }); + let mut xts = (0..num_transfers) + .map(|i| CheckedExtrinsic { + signed: Some((charlie(), signed_extra(nonce + i as Index, 0))), + function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 0)), + }) + .collect::>(); + + xts.insert( + 0, + CheckedExtrinsic { + signed: None, + function: Call::Timestamp(pallet_timestamp::Call::set(time * 1000)), + }, + ); // NOTE: this is super slow. Can probably be improved. let block = construct_block( @@ -262,7 +275,8 @@ fn block_weight_capacity_report() { &block.0, true, None, - ).0; + ) + .0; println!(" || Result = {:?}", r); assert!(r.is_ok()); @@ -307,7 +321,11 @@ fn block_length_capacity_report() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark(vec![0u8; (block_number * factor) as usize])), + function: Call::System(frame_system::Call::remark(vec![ + 0u8; + (block_number * factor) + as usize + ])), }, ], (time * 1000 / SLOT_DURATION).into(), @@ -327,7 +345,8 @@ fn block_length_capacity_report() { &block.0, true, None, - ).0; + ) + .0; println!(" || Result = {:?}", r); assert!(r.is_ok()); diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index 590bdac4db757..c83e48c8c933b 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -15,26 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::sync::Arc; -use node_runtime::{ - Executive, Indices, Runtime, UncheckedExtrinsic, -}; -use sp_application_crypto::AppKey; -use sp_core::{ - offchain::{ - TransactionPoolExt, - testing::TestTransactionPoolExt, - }, -}; -use sp_keystore::{KeystoreExt, SyncCryptoStore, testing::KeyStore}; -use frame_system::{ - offchain::{ - Signer, - SubmitTransaction, - SendSignedTransaction, - } -}; use codec::Decode; +use frame_system::offchain::{SendSignedTransaction, Signer, SubmitTransaction}; +use node_runtime::{Executive, Indices, Runtime, UncheckedExtrinsic}; +use sp_application_crypto::AppKey; +use sp_core::offchain::{testing::TestTransactionPoolExt, TransactionPoolExt}; +use sp_keystore::{testing::KeyStore, KeystoreExt, SyncCryptoStore}; +use std::sync::Arc; pub mod common; use self::common::*; @@ -56,8 +43,10 @@ fn should_submit_unsigned_transaction() { }; let call = pallet_im_online::Call::heartbeat(heartbeat_data, signature); - SubmitTransaction::>::submit_unsigned_transaction(call.into()) - .unwrap(); + SubmitTransaction::>::submit_unsigned_transaction( + call.into(), + ) + .unwrap(); assert_eq!(state.read().transactions.len(), 1) }); @@ -75,23 +64,26 @@ fn should_submit_signed_transaction() { SyncCryptoStore::sr25519_generate_new( &keystore, sr25519::AuthorityId::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); SyncCryptoStore::sr25519_generate_new( &keystore, sr25519::AuthorityId::ID, - Some(&format!("{}/hunter2", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter2", PHRASE)), + ) + .unwrap(); SyncCryptoStore::sr25519_generate_new( &keystore, sr25519::AuthorityId::ID, - Some(&format!("{}/hunter3", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter3", PHRASE)), + ) + .unwrap(); t.register_extension(KeystoreExt(Arc::new(keystore))); t.execute_with(|| { - let results = Signer::::all_accounts() - .send_signed_transaction(|_| { + let results = + Signer::::all_accounts().send_signed_transaction(|_| { pallet_balances::Call::transfer(Default::default(), Default::default()) }); @@ -112,18 +104,20 @@ fn should_submit_signed_twice_from_the_same_account() { SyncCryptoStore::sr25519_generate_new( &keystore, sr25519::AuthorityId::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); SyncCryptoStore::sr25519_generate_new( &keystore, sr25519::AuthorityId::ID, - Some(&format!("{}/hunter2", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter2", PHRASE)), + ) + .unwrap(); t.register_extension(KeystoreExt(Arc::new(keystore))); t.execute_with(|| { - let result = Signer::::any_account() - .send_signed_transaction(|_| { + let result = + Signer::::any_account().send_signed_transaction(|_| { pallet_balances::Call::transfer(Default::default(), Default::default()) }); @@ -131,8 +125,8 @@ fn should_submit_signed_twice_from_the_same_account() { assert_eq!(state.read().transactions.len(), 1); // submit another one from the same account. The nonce should be incremented. - let result = Signer::::any_account() - .send_signed_transaction(|_| { + let result = + Signer::::any_account().send_signed_transaction(|_| { pallet_balances::Call::transfer(Default::default(), Default::default()) }); @@ -147,10 +141,7 @@ fn should_submit_signed_twice_from_the_same_account() { } let nonce1 = nonce(UncheckedExtrinsic::decode(&mut &*s.transactions[0]).unwrap()); let nonce2 = nonce(UncheckedExtrinsic::decode(&mut &*s.transactions[1]).unwrap()); - assert!( - nonce1 != nonce2, - "Transactions should have different nonces. Got: {:?}", nonce1 - ); + assert!(nonce1 != nonce2, "Transactions should have different nonces. Got: {:?}", nonce1); }); } @@ -161,14 +152,12 @@ fn should_submit_signed_twice_from_all_accounts() { t.register_extension(TransactionPoolExt::new(pool)); let keystore = KeyStore::new(); - keystore.sr25519_generate_new( - sr25519::AuthorityId::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); - keystore.sr25519_generate_new( - sr25519::AuthorityId::ID, - Some(&format!("{}/hunter2", PHRASE)) - ).unwrap(); + keystore + .sr25519_generate_new(sr25519::AuthorityId::ID, Some(&format!("{}/hunter1", PHRASE))) + .unwrap(); + keystore + .sr25519_generate_new(sr25519::AuthorityId::ID, Some(&format!("{}/hunter2", PHRASE))) + .unwrap(); t.register_extension(KeystoreExt(Arc::new(keystore))); t.execute_with(|| { @@ -217,8 +206,10 @@ fn should_submit_signed_twice_from_all_accounts() { #[test] fn submitted_transaction_should_be_valid() { use codec::Encode; - use sp_runtime::transaction_validity::{TransactionSource, TransactionTag}; - use sp_runtime::traits::StaticLookup; + use sp_runtime::{ + traits::StaticLookup, + transaction_validity::{TransactionSource, TransactionTag}, + }; let mut t = new_test_ext(compact_code_unwrap(), false); let (pool, state) = TestTransactionPoolExt::new(); @@ -227,13 +218,15 @@ fn submitted_transaction_should_be_valid() { let keystore = KeyStore::new(); SyncCryptoStore::sr25519_generate_new( &keystore, - sr25519::AuthorityId::ID, Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); + sr25519::AuthorityId::ID, + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); t.register_extension(KeystoreExt(Arc::new(keystore))); t.execute_with(|| { - let results = Signer::::all_accounts() - .send_signed_transaction(|_| { + let results = + Signer::::all_accounts().send_signed_transaction(|_| { pallet_balances::Call::transfer(Default::default(), Default::default()) }); let len = results.len(); @@ -252,7 +245,7 @@ fn submitted_transaction_should_be_valid() { let author = extrinsic.signature.clone().unwrap().0; let address = Indices::lookup(author).unwrap(); let data = pallet_balances::AccountData { free: 5_000_000_000_000, ..Default::default() }; - let account = frame_system::AccountInfo { data, .. Default::default() }; + let account = frame_system::AccountInfo { data, ..Default::default() }; >::insert(&address, account); // check validity @@ -260,7 +253,8 @@ fn submitted_transaction_should_be_valid() { source, extrinsic, frame_system::BlockHash::::get(0), - ).unwrap(); + ) + .unwrap(); // We ignore res.priority since this number can change based on updates to weights and such. assert_eq!(res.requires, Vec::::new()); diff --git a/bin/node/inspect/src/cli.rs b/bin/node/inspect/src/cli.rs index abdbedc296d02..c054fedaf57c4 100644 --- a/bin/node/inspect/src/cli.rs +++ b/bin/node/inspect/src/cli.rs @@ -18,8 +18,8 @@ //! Structs to easily compose inspect sub-command for CLI. -use std::fmt::Debug; use sc_cli::{ImportParams, SharedParams}; +use std::fmt::Debug; use structopt::StructOpt; /// The `inspect` command used to print decoded chain data. diff --git a/bin/node/inspect/src/command.rs b/bin/node/inspect/src/command.rs index 9c14a71375f5f..a2c63d684bf96 100644 --- a/bin/node/inspect/src/command.rs +++ b/bin/node/inspect/src/command.rs @@ -18,8 +18,10 @@ //! Command ran by the CLI -use crate::cli::{InspectCmd, InspectSubCmd}; -use crate::Inspector; +use crate::{ + cli::{InspectCmd, InspectSubCmd}, + Inspector, +}; use sc_cli::{CliConfiguration, ImportParams, Result, SharedParams}; use sc_service::{new_full_client, Configuration, NativeExecutionDispatch}; use sp_runtime::traits::Block; @@ -43,13 +45,13 @@ impl InspectCmd { let res = inspect.block(input).map_err(|e| format!("{}", e))?; println!("{}", res); Ok(()) - } + }, InspectSubCmd::Extrinsic { input } => { let input = input.parse()?; let res = inspect.extrinsic(input).map_err(|e| format!("{}", e))?; println!("{}", res); Ok(()) - } + }, } } } diff --git a/bin/node/inspect/src/lib.rs b/bin/node/inspect/src/lib.rs index 3abb9e9ff41e7..30e7250ea2c6c 100644 --- a/bin/node/inspect/src/lib.rs +++ b/bin/node/inspect/src/lib.rs @@ -27,33 +27,27 @@ pub mod cli; pub mod command; -use std::{ - fmt, - fmt::Debug, - marker::PhantomData, - str::FromStr, -}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sc_client_api::BlockBackend; use sp_blockchain::HeaderBackend; use sp_core::hexdisplay::HexDisplay; use sp_runtime::{ generic::BlockId, - traits::{Block, HashFor, NumberFor, Hash} + traits::{Block, Hash, HashFor, NumberFor}, }; +use std::{fmt, fmt::Debug, marker::PhantomData, str::FromStr}; /// A helper type for a generic block input. -pub type BlockAddressFor = BlockAddress< - as Hash>::Output, - NumberFor ->; +pub type BlockAddressFor = + BlockAddress< as Hash>::Output, NumberFor>; /// A Pretty formatter implementation. pub trait PrettyPrinter { /// Nicely format block. fn fmt_block(&self, fmt: &mut fmt::Formatter, block: &TBlock) -> fmt::Result; /// Nicely format extrinsic. - fn fmt_extrinsic(&self, fmt: &mut fmt::Formatter, extrinsic: &TBlock::Extrinsic) -> fmt::Result; + fn fmt_extrinsic(&self, fmt: &mut fmt::Formatter, extrinsic: &TBlock::Extrinsic) + -> fmt::Result; } /// Default dummy debug printer. @@ -72,7 +66,11 @@ impl PrettyPrinter for DebugPrinter { Ok(()) } - fn fmt_extrinsic(&self, fmt: &mut fmt::Formatter, extrinsic: &TBlock::Extrinsic) -> fmt::Result { + fn fmt_extrinsic( + &self, + fmt: &mut fmt::Formatter, + extrinsic: &TBlock::Extrinsic, + ) -> fmt::Result { writeln!(fmt, " {:#?}", extrinsic)?; writeln!(fmt, " Bytes: {:?}", HexDisplay::from(&extrinsic.encode()))?; Ok(()) @@ -101,15 +99,14 @@ impl std::error::Error for Error { } /// A helper trait to access block headers and bodies. -pub trait ChainAccess: - HeaderBackend + - BlockBackend -{} +pub trait ChainAccess: HeaderBackend + BlockBackend {} -impl ChainAccess for T where +impl ChainAccess for T +where TBlock: Block, T: sp_blockchain::HeaderBackend + sc_client_api::BlockBackend, -{} +{ +} /// Blockchain inspector. pub struct Inspector = DebugPrinter> { @@ -120,22 +117,16 @@ pub struct Inspector = DebugPrint impl> Inspector { /// Create new instance of the inspector with default printer. - pub fn new( - chain: impl ChainAccess + 'static, - ) -> Self where TPrinter: Default { + pub fn new(chain: impl ChainAccess + 'static) -> Self + where + TPrinter: Default, + { Self::with_printer(chain, Default::default()) } /// Customize pretty-printing of the data. - pub fn with_printer( - chain: impl ChainAccess + 'static, - printer: TPrinter, - ) -> Self { - Inspector { - chain: Box::new(chain) as _, - printer, - _block: Default::default(), - } + pub fn with_printer(chain: impl ChainAccess + 'static, printer: TPrinter) -> Self { + Inspector { chain: Box::new(chain) as _, printer, _block: Default::default() } } /// Get a pretty-printed block. @@ -153,25 +144,27 @@ impl> Inspector fn get_block(&self, input: BlockAddressFor) -> Result { Ok(match input { - BlockAddress::Bytes(bytes) => { - TBlock::decode(&mut &*bytes)? - }, + BlockAddress::Bytes(bytes) => TBlock::decode(&mut &*bytes)?, BlockAddress::Number(number) => { let id = BlockId::number(number); let not_found = format!("Could not find block {:?}", id); - let body = self.chain.block_body(&id)? - .ok_or_else(|| Error::NotFound(not_found.clone()))?; - let header = self.chain.header(id)? + let body = self + .chain + .block_body(&id)? .ok_or_else(|| Error::NotFound(not_found.clone()))?; + let header = + self.chain.header(id)?.ok_or_else(|| Error::NotFound(not_found.clone()))?; TBlock::new(header, body) }, BlockAddress::Hash(hash) => { let id = BlockId::hash(hash); let not_found = format!("Could not find block {:?}", id); - let body = self.chain.block_body(&id)? - .ok_or_else(|| Error::NotFound(not_found.clone()))?; - let header = self.chain.header(id)? + let body = self + .chain + .block_body(&id)? .ok_or_else(|| Error::NotFound(not_found.clone()))?; + let header = + self.chain.header(id)?.ok_or_else(|| Error::NotFound(not_found.clone()))?; TBlock::new(header, body) }, }) @@ -192,16 +185,14 @@ impl> Inspector let ext = match input { ExtrinsicAddress::Block(block, index) => { let block = self.get_block(block)?; - block.extrinsics() - .get(index) - .cloned() - .ok_or_else(|| Error::NotFound(format!( - "Could not find extrinsic {} in block {:?}", index, block - )))? + block.extrinsics().get(index).cloned().ok_or_else(|| { + Error::NotFound(format!( + "Could not find extrinsic {} in block {:?}", + index, block + )) + })? }, - ExtrinsicAddress::Bytes(bytes) => { - TBlock::Extrinsic::decode(&mut &*bytes)? - } + ExtrinsicAddress::Bytes(bytes) => TBlock::Extrinsic::decode(&mut &*bytes)?, }; Ok(format!("{}", ExtrinsicPrinter(ext, &self.printer))) @@ -234,12 +225,12 @@ impl FromStr for BlockAddress { } // then assume it's bytes (hex-encoded) - sp_core::bytes::from_hex(s) - .map(Self::Bytes) - .map_err(|e| format!( + sp_core::bytes::from_hex(s).map(Self::Bytes).map_err(|e| { + format!( "Given string does not look like hash or number. It could not be parsed as bytes either: {}", e - )) + ) + }) } } @@ -263,11 +254,13 @@ impl FromStr for ExtrinsicAddres // split by a bunch of different characters let mut it = s.split(|c| c == '.' || c == ':' || c == ' '); - let block = it.next() + let block = it + .next() .expect("First element of split iterator is never empty; qed") .parse()?; - let index = it.next() + let index = it + .next() .ok_or_else(|| format!("Extrinsic index missing: example \"5:0\""))? .parse() .map_err(|e| format!("Invalid index format: {}", e))?; @@ -290,10 +283,10 @@ mod tests { let b2 = BlockAddress::from_str("0"); let b3 = BlockAddress::from_str("0x0012345f"); - - assert_eq!(b0, Ok(BlockAddress::Hash( - "3BfC20f0B9aFcAcE800D73D2191166FF16540258".parse().unwrap() - ))); + assert_eq!( + b0, + Ok(BlockAddress::Hash("3BfC20f0B9aFcAcE800D73D2191166FF16540258".parse().unwrap())) + ); assert_eq!(b1, Ok(BlockAddress::Number(1234))); assert_eq!(b2, Ok(BlockAddress::Number(0))); assert_eq!(b3, Ok(BlockAddress::Bytes(vec![0, 0x12, 0x34, 0x5f]))); @@ -310,20 +303,16 @@ mod tests { let b2 = ExtrinsicAddress::from_str("0 0"); let b3 = ExtrinsicAddress::from_str("0x0012345f"); - assert_eq!(e0, Err("Extrinsic index missing: example \"5:0\"".into())); - assert_eq!(b0, Ok(ExtrinsicAddress::Block( - BlockAddress::Hash("3BfC20f0B9aFcAcE800D73D2191166FF16540258".parse().unwrap()), - 5 - ))); - assert_eq!(b1, Ok(ExtrinsicAddress::Block( - BlockAddress::Number(1234), - 0 - ))); - assert_eq!(b2, Ok(ExtrinsicAddress::Block( - BlockAddress::Number(0), - 0 - ))); + assert_eq!( + b0, + Ok(ExtrinsicAddress::Block( + BlockAddress::Hash("3BfC20f0B9aFcAcE800D73D2191166FF16540258".parse().unwrap()), + 5 + )) + ); + assert_eq!(b1, Ok(ExtrinsicAddress::Block(BlockAddress::Number(1234), 0))); + assert_eq!(b2, Ok(ExtrinsicAddress::Block(BlockAddress::Number(0), 0))); assert_eq!(b3, Ok(ExtrinsicAddress::Bytes(vec![0, 0x12, 0x34, 0x5f]))); } } diff --git a/bin/node/primitives/src/lib.rs b/bin/node/primitives/src/lib.rs index 9470adc399f96..dade598c704d2 100644 --- a/bin/node/primitives/src/lib.rs +++ b/bin/node/primitives/src/lib.rs @@ -18,11 +18,12 @@ //! Low-level types used throughout the Substrate code. #![warn(missing_docs)] - #![cfg_attr(not(feature = "std"), no_std)] use sp_runtime::{ - generic, traits::{Verify, BlakeTwo256, IdentifyAccount}, OpaqueExtrinsic, MultiSignature + generic, + traits::{BlakeTwo256, IdentifyAccount, Verify}, + MultiSignature, OpaqueExtrinsic, }; /// An index to a block. diff --git a/bin/node/rpc-client/src/main.rs b/bin/node/rpc-client/src/main.rs index ddd8a50ad36e4..46e700a73911b 100644 --- a/bin/node/rpc-client/src/main.rs +++ b/bin/node/rpc-client/src/main.rs @@ -24,15 +24,9 @@ use futures::Future; use hyper::rt; +use jsonrpc_core_client::{transports::http, RpcError}; use node_primitives::Hash; -use sc_rpc::author::{ - AuthorClient, - hash::ExtrinsicOrHash, -}; -use jsonrpc_core_client::{ - transports::http, - RpcError, -}; +use sc_rpc::author::{hash::ExtrinsicOrHash, AuthorClient}; fn main() { sp_tracing::try_init_simple(); @@ -41,9 +35,7 @@ fn main() { let uri = "http://localhost:9933"; http::connect(uri) - .and_then(|client: AuthorClient| { - remove_all_extrinsics(client) - }) + .and_then(|client: AuthorClient| remove_all_extrinsics(client)) .map_err(|e| { println!("Error: {:?}", e); }) @@ -58,11 +50,14 @@ fn main() { /// /// As the result of running the code the entire content of the transaction pool is going /// to be removed and the extrinsics are going to be temporarily banned. -fn remove_all_extrinsics(client: AuthorClient) -> impl Future { - client.pending_extrinsics() +fn remove_all_extrinsics( + client: AuthorClient, +) -> impl Future { + client + .pending_extrinsics() .and_then(move |pending| { client.remove_extrinsic( - pending.into_iter().map(|tx| ExtrinsicOrHash::Extrinsic(tx.into())).collect() + pending.into_iter().map(|tx| ExtrinsicOrHash::Extrinsic(tx.into())).collect(), ) }) .map(|removed| { diff --git a/bin/node/rpc/src/lib.rs b/bin/node/rpc/src/lib.rs index ba17bf7d2c50d..1b326eda6c196 100644 --- a/bin/node/rpc/src/lib.rs +++ b/bin/node/rpc/src/lib.rs @@ -32,24 +32,24 @@ use std::sync::Arc; -use sp_keystore::SyncCryptoStorePtr; -use node_primitives::{Block, BlockNumber, AccountId, Index, Balance, Hash}; +use node_primitives::{AccountId, Balance, Block, BlockNumber, Hash, Index}; +use sc_client_api::AuxStore; use sc_consensus_babe::{Config, Epoch}; use sc_consensus_babe_rpc::BabeRpcHandler; use sc_consensus_epochs::SharedEpochChanges; use sc_finality_grandpa::{ - SharedVoterState, SharedAuthoritySet, FinalityProofProvider, GrandpaJustificationStream + FinalityProofProvider, GrandpaJustificationStream, SharedAuthoritySet, SharedVoterState, }; use sc_finality_grandpa_rpc::GrandpaRpcHandler; +use sc_rpc::SubscriptionTaskExecutor; pub use sc_rpc_api::DenyUnsafe; +use sc_transaction_pool_api::TransactionPool; use sp_api::ProvideRuntimeApi; use sp_block_builder::BlockBuilder; -use sp_blockchain::{Error as BlockChainError, HeaderMetadata, HeaderBackend}; +use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata}; use sp_consensus::SelectChain; use sp_consensus_babe::BabeApi; -use sc_rpc::SubscriptionTaskExecutor; -use sc_transaction_pool_api::TransactionPool; -use sc_client_api::AuxStore; +use sp_keystore::SyncCryptoStorePtr; /// Light client extra dependencies. pub struct LightDeps { @@ -111,9 +111,15 @@ pub type IoHandler = jsonrpc_core::IoHandler; /// Instantiate all Full RPC extensions. pub fn create_full( deps: FullDeps, -) -> jsonrpc_core::IoHandler where - C: ProvideRuntimeApi + HeaderBackend + AuxStore + - HeaderMetadata + Sync + Send + 'static, +) -> jsonrpc_core::IoHandler +where + C: ProvideRuntimeApi + + HeaderBackend + + AuxStore + + HeaderMetadata + + Sync + + Send + + 'static, C::Api: substrate_frame_rpc_system::AccountNonceApi, C::Api: pallet_contracts_rpc::ContractsRuntimeApi, C::Api: pallet_mmr_rpc::MmrRuntimeApi::Hash>, @@ -121,31 +127,19 @@ pub fn create_full( C::Api: BabeApi, C::Api: BlockBuilder, P: TransactionPool + 'static, - SC: SelectChain +'static, + SC: SelectChain + 'static, B: sc_client_api::Backend + Send + Sync + 'static, B::State: sc_client_api::backend::StateBackend>, { - use substrate_frame_rpc_system::{FullSystem, SystemApi}; use pallet_contracts_rpc::{Contracts, ContractsApi}; - use pallet_mmr_rpc::{MmrApi, Mmr}; + use pallet_mmr_rpc::{Mmr, MmrApi}; use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi}; + use substrate_frame_rpc_system::{FullSystem, SystemApi}; let mut io = jsonrpc_core::IoHandler::default(); - let FullDeps { - client, - pool, - select_chain, - chain_spec, - deny_unsafe, - babe, - grandpa, - } = deps; + let FullDeps { client, pool, select_chain, chain_spec, deny_unsafe, babe, grandpa } = deps; - let BabeDeps { - keystore, - babe_config, - shared_epoch_changes, - } = babe; + let BabeDeps { keystore, babe_config, shared_epoch_changes } = babe; let GrandpaDeps { shared_voter_state, shared_authority_set, @@ -154,64 +148,45 @@ pub fn create_full( finality_provider, } = grandpa; - io.extend_with( - SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe)) - ); + io.extend_with(SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe))); // Making synchronous calls in light client freezes the browser currently, // more context: https://github.com/paritytech/substrate/pull/3480 // These RPCs should use an asynchronous caller instead. - io.extend_with( - ContractsApi::to_delegate(Contracts::new(client.clone())) - ); - io.extend_with( - MmrApi::to_delegate(Mmr::new(client.clone())) - ); - io.extend_with( - TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone())) - ); - io.extend_with( - sc_consensus_babe_rpc::BabeApi::to_delegate( - BabeRpcHandler::new( - client.clone(), - shared_epoch_changes.clone(), - keystore, - babe_config, - select_chain, - deny_unsafe, - ), - ) - ); - io.extend_with( - sc_finality_grandpa_rpc::GrandpaApi::to_delegate( - GrandpaRpcHandler::new( - shared_authority_set.clone(), - shared_voter_state, - justification_stream, - subscription_executor, - finality_provider, - ) - ) - ); - - io.extend_with( - sc_sync_state_rpc::SyncStateRpcApi::to_delegate( - sc_sync_state_rpc::SyncStateRpcHandler::new( - chain_spec, - client, - shared_authority_set, - shared_epoch_changes, - deny_unsafe, - ) - ) - ); + io.extend_with(ContractsApi::to_delegate(Contracts::new(client.clone()))); + io.extend_with(MmrApi::to_delegate(Mmr::new(client.clone()))); + io.extend_with(TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone()))); + io.extend_with(sc_consensus_babe_rpc::BabeApi::to_delegate(BabeRpcHandler::new( + client.clone(), + shared_epoch_changes.clone(), + keystore, + babe_config, + select_chain, + deny_unsafe, + ))); + io.extend_with(sc_finality_grandpa_rpc::GrandpaApi::to_delegate(GrandpaRpcHandler::new( + shared_authority_set.clone(), + shared_voter_state, + justification_stream, + subscription_executor, + finality_provider, + ))); + + io.extend_with(sc_sync_state_rpc::SyncStateRpcApi::to_delegate( + sc_sync_state_rpc::SyncStateRpcHandler::new( + chain_spec, + client, + shared_authority_set, + shared_epoch_changes, + deny_unsafe, + ), + )); io } /// Instantiate all Light RPC extensions. -pub fn create_light( - deps: LightDeps, -) -> jsonrpc_core::IoHandler where +pub fn create_light(deps: LightDeps) -> jsonrpc_core::IoHandler +where C: sp_blockchain::HeaderBackend, C: Send + Sync + 'static, F: sc_client_api::light::Fetcher + 'static, @@ -220,16 +195,14 @@ pub fn create_light( { use substrate_frame_rpc_system::{LightSystem, SystemApi}; - let LightDeps { + let LightDeps { client, pool, remote_blockchain, fetcher } = deps; + let mut io = jsonrpc_core::IoHandler::default(); + io.extend_with(SystemApi::::to_delegate(LightSystem::new( client, - pool, remote_blockchain, - fetcher - } = deps; - let mut io = jsonrpc_core::IoHandler::default(); - io.extend_with( - SystemApi::::to_delegate(LightSystem::new(client, remote_blockchain, fetcher, pool)) - ); + fetcher, + pool, + ))); io } diff --git a/bin/node/runtime/src/constants.rs b/bin/node/runtime/src/constants.rs index 2f6ad002a9283..7533025a70b00 100644 --- a/bin/node/runtime/src/constants.rs +++ b/bin/node/runtime/src/constants.rs @@ -22,7 +22,7 @@ pub mod currency { use node_primitives::Balance; pub const MILLICENTS: Balance = 1_000_000_000; - pub const CENTS: Balance = 1_000 * MILLICENTS; // assume this is worth about a cent. + pub const CENTS: Balance = 1_000 * MILLICENTS; // assume this is worth about a cent. pub const DOLLARS: Balance = 100 * CENTS; pub const fn deposit(items: u32, bytes: u32) -> Balance { @@ -32,7 +32,7 @@ pub mod currency { /// Time. pub mod time { - use node_primitives::{Moment, BlockNumber}; + use node_primitives::{BlockNumber, Moment}; /// Since BABE is probabilistic this is the average expected block time that /// we are targeting. Blocks will be produced at a minimum duration defined diff --git a/bin/node/runtime/src/impls.rs b/bin/node/runtime/src/impls.rs index d3d0541b6ec0e..e315a45e698ce 100644 --- a/bin/node/runtime/src/impls.rs +++ b/bin/node/runtime/src/impls.rs @@ -17,8 +17,8 @@ //! Some configurable implementations as associated type for the substrate runtime. -use frame_support::traits::{OnUnbalanced, Currency}; -use crate::{Balances, Authorship, NegativeImbalance}; +use crate::{Authorship, Balances, NegativeImbalance}; +use frame_support::traits::{Currency, OnUnbalanced}; pub struct Author; impl OnUnbalanced for Author { @@ -29,19 +29,24 @@ impl OnUnbalanced for Author { #[cfg(test)] mod multiplier_tests { - use sp_runtime::{assert_eq_error_rate, FixedPointNumber, traits::{Convert, One, Zero}}; use pallet_transaction_payment::{Multiplier, TargetedFeeAdjustment}; + use sp_runtime::{ + assert_eq_error_rate, + traits::{Convert, One, Zero}, + FixedPointNumber, + }; use crate::{ constants::{currency::*, time::*}, - TransactionPayment, Runtime, TargetBlockFullness, - AdjustmentVariable, System, MinimumMultiplier, - RuntimeBlockWeights as BlockWeights, + AdjustmentVariable, MinimumMultiplier, Runtime, RuntimeBlockWeights as BlockWeights, + System, TargetBlockFullness, TransactionPayment, }; - use frame_support::weights::{Weight, WeightToFeePolynomial, DispatchClass}; + use frame_support::weights::{DispatchClass, Weight, WeightToFeePolynomial}; fn max_normal() -> Weight { - BlockWeights::get().get(DispatchClass::Normal).max_total + BlockWeights::get() + .get(DispatchClass::Normal) + .max_total .unwrap_or_else(|| BlockWeights::get().max_block) } @@ -64,7 +69,7 @@ mod multiplier_tests { } // update based on reference impl. - fn truth_value_update(block_weight: Weight, previous: Multiplier) -> Multiplier { + fn truth_value_update(block_weight: Weight, previous: Multiplier) -> Multiplier { let accuracy = Multiplier::accuracy() as f64; let previous_float = previous.into_inner() as f64 / accuracy; // bump if it is zero. @@ -81,15 +86,20 @@ mod multiplier_tests { // Current saturation in terms of weight let s = block_weight; - let t1 = v * (s/m - ss/m); - let t2 = v.powi(2) * (s/m - ss/m).powi(2) / 2.0; + let t1 = v * (s / m - ss / m); + let t2 = v.powi(2) * (s / m - ss / m).powi(2) / 2.0; let next_float = previous_float * (1.0 + t1 + t2); Multiplier::from_float(next_float) } - fn run_with_system_weight(w: Weight, assertions: F) where F: Fn() -> () { - let mut t: sp_io::TestExternalities = - frame_system::GenesisConfig::default().build_storage::().unwrap().into(); + fn run_with_system_weight(w: Weight, assertions: F) + where + F: Fn() -> (), + { + let mut t: sp_io::TestExternalities = frame_system::GenesisConfig::default() + .build_storage::() + .unwrap() + .into(); t.execute_with(|| { System::set_block_consumed_resources(w, 0); assertions() @@ -157,7 +167,9 @@ mod multiplier_tests { loop { let next = runtime_multiplier_update(fm); fm = next; - if fm == min_multiplier() { break; } + if fm == min_multiplier() { + break + } iterations += 1; } assert!(iterations > 533_333); @@ -198,7 +210,9 @@ mod multiplier_tests { loop { let next = runtime_multiplier_update(fm); // if no change, panic. This should never happen in this case. - if fm == next { panic!("The fee should ever increase"); } + if fm == next { + panic!("The fee should ever increase"); + } fm = next; iterations += 1; let fee = @@ -225,7 +239,7 @@ mod multiplier_tests { let next = runtime_multiplier_update(fm); assert_eq_error_rate!( next, - truth_value_update(target() / 4 , fm), + truth_value_update(target() / 4, fm), Multiplier::from_inner(100), ); @@ -237,12 +251,11 @@ mod multiplier_tests { let next = runtime_multiplier_update(fm); assert_eq_error_rate!( next, - truth_value_update(target() / 2 , fm), + truth_value_update(target() / 2, fm), Multiplier::from_inner(100), ); // Light block. Multiplier is reduced a little. assert!(next < fm); - }); run_with_system_weight(target(), || { let next = runtime_multiplier_update(fm); @@ -259,7 +272,7 @@ mod multiplier_tests { let next = runtime_multiplier_update(fm); assert_eq_error_rate!( next, - truth_value_update(target() * 2 , fm), + truth_value_update(target() * 2, fm), Multiplier::from_inner(100), ); @@ -326,28 +339,24 @@ mod multiplier_tests { BlockWeights::get().max_block, Weight::max_value() / 2, Weight::max_value(), - ].into_iter().for_each(|i| { + ] + .into_iter() + .for_each(|i| { run_with_system_weight(i, || { let next = runtime_multiplier_update(Multiplier::one()); let truth = truth_value_update(i, Multiplier::one()); - assert_eq_error_rate!( - truth, - next, - Multiplier::from_inner(50_000_000) - ); + assert_eq_error_rate!(truth, next, Multiplier::from_inner(50_000_000)); }); }); // Some values that are all above the target and will cause an increase. let t = target(); - vec![t + 100, t * 2, t * 4] - .into_iter() - .for_each(|i| { - run_with_system_weight(i, || { - let fm = runtime_multiplier_update(max_fm); - // won't grow. The convert saturates everything. - assert_eq!(fm, max_fm); - }) - }); + vec![t + 100, t * 2, t * 4].into_iter().for_each(|i| { + run_with_system_weight(i, || { + let fm = runtime_multiplier_update(max_fm); + // won't grow. The convert saturates everything. + assert_eq!(fm, max_fm); + }) + }); } } diff --git a/bin/node/runtime/src/lib.rs b/bin/node/runtime/src/lib.rs index c27af62c55d74..cf636b6c31455 100644 --- a/bin/node/runtime/src/lib.rs +++ b/bin/node/runtime/src/lib.rs @@ -22,67 +22,67 @@ // `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256. #![recursion_limit = "256"] - -use sp_std::prelude::*; +use codec::{Decode, Encode, MaxEncodedLen}; use frame_support::{ - construct_runtime, parameter_types, RuntimeDebug, + construct_runtime, parameter_types, + traits::{ + AllowAll, Currency, DenyAll, Imbalance, InstanceFilter, KeyOwnerProofSystem, + LockIdentifier, OnUnbalanced, U128CurrencyToVote, + }, weights::{ - Weight, IdentityFee, constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND}, - DispatchClass, - }, - traits::{ - Currency, Imbalance, KeyOwnerProofSystem, OnUnbalanced, LockIdentifier, - U128CurrencyToVote, AllowAll, DenyAll, + DispatchClass, IdentityFee, Weight, }, + PalletId, RuntimeDebug, }; use frame_system::{ - EnsureRoot, EnsureOneOf, - limits::{BlockWeights, BlockLength} + limits::{BlockLength, BlockWeights}, + EnsureOneOf, EnsureRoot, +}; +pub use node_primitives::{AccountId, Signature}; +use node_primitives::{AccountIndex, Balance, BlockNumber, Hash, Index, Moment}; +use pallet_contracts::weights::WeightInfo; +use pallet_election_provider_multi_phase::FallbackStrategy; +use pallet_grandpa::{ + fg_primitives, AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList, }; -use frame_support::{traits::InstanceFilter, PalletId}; -use codec::{Encode, Decode, MaxEncodedLen}; +use pallet_im_online::sr25519::AuthorityId as ImOnlineId; +use pallet_session::historical as pallet_session_historical; +pub use pallet_transaction_payment::{CurrencyAdapter, Multiplier, TargetedFeeAdjustment}; +use pallet_transaction_payment::{FeeDetails, RuntimeDispatchInfo}; +use sp_api::impl_runtime_apis; +use sp_authority_discovery::AuthorityId as AuthorityDiscoveryId; use sp_core::{ crypto::KeyTypeId, u32_trait::{_1, _2, _3, _4, _5}, OpaqueMetadata, }; -pub use node_primitives::{AccountId, Signature}; -use node_primitives::{AccountIndex, Balance, BlockNumber, Hash, Index, Moment}; -use sp_api::impl_runtime_apis; +use sp_inherents::{CheckInherentsResult, InherentData}; use sp_runtime::{ - Permill, Perbill, Perquintill, Percent, ApplyExtrinsicResult, impl_opaque_keys, generic, - create_runtime_str, FixedPointNumber, -}; -use sp_runtime::curve::PiecewiseLinear; -use sp_runtime::transaction_validity::{TransactionValidity, TransactionSource, TransactionPriority}; -use sp_runtime::traits::{ - self, BlakeTwo256, Block as BlockT, StaticLookup, SaturatedConversion, ConvertInto, OpaqueKeys, - NumberFor, + create_runtime_str, + curve::PiecewiseLinear, + generic, impl_opaque_keys, + traits::{ + self, BlakeTwo256, Block as BlockT, ConvertInto, NumberFor, OpaqueKeys, + SaturatedConversion, StaticLookup, + }, + transaction_validity::{TransactionPriority, TransactionSource, TransactionValidity}, + ApplyExtrinsicResult, FixedPointNumber, Perbill, Percent, Permill, Perquintill, }; -use sp_version::RuntimeVersion; +use sp_std::prelude::*; #[cfg(any(feature = "std", test))] use sp_version::NativeVersion; -use pallet_grandpa::{AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList}; -use pallet_grandpa::fg_primitives; -use pallet_im_online::sr25519::AuthorityId as ImOnlineId; -use sp_authority_discovery::AuthorityId as AuthorityDiscoveryId; -use pallet_transaction_payment::{FeeDetails, RuntimeDispatchInfo}; -pub use pallet_transaction_payment::{Multiplier, TargetedFeeAdjustment, CurrencyAdapter}; -use pallet_session::{historical as pallet_session_historical}; -use sp_inherents::{InherentData, CheckInherentsResult}; +use sp_version::RuntimeVersion; use static_assertions::const_assert; -use pallet_contracts::weights::WeightInfo; -use pallet_election_provider_multi_phase::FallbackStrategy; #[cfg(any(feature = "std", test))] -pub use sp_runtime::BuildStorage; +pub use frame_system::Call as SystemCall; #[cfg(any(feature = "std", test))] pub use pallet_balances::Call as BalancesCall; #[cfg(any(feature = "std", test))] -pub use frame_system::Call as SystemCall; -#[cfg(any(feature = "std", test))] pub use pallet_staking::StakerStatus; +#[cfg(any(feature = "std", test))] +pub use sp_runtime::BuildStorage; /// Implementations of some helper traits passed into runtime modules as associated types. pub mod impls; @@ -90,7 +90,7 @@ use impls::Author; /// Constant values used within the runtime. pub mod constants; -use constants::{time::*, currency::*}; +use constants::{currency::*, time::*}; use sp_runtime::generic::Era; // Make the WASM binary available. @@ -100,9 +100,11 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_unwrap() -> &'static [u8] { - WASM_BINARY.expect("Development wasm binary is not available. This means the client is \ + WASM_BINARY.expect( + "Development wasm binary is not available. This means the client is \ built with `SKIP_WASM_BUILD` flag and it is only usable for \ - production chains. Please rebuild with the flag disabled.") + production chains. Please rebuild with the flag disabled.", + ) } /// Runtime version. @@ -125,23 +127,20 @@ pub const VERSION: RuntimeVersion = RuntimeVersion { pub const BABE_GENESIS_EPOCH_CONFIG: sp_consensus_babe::BabeEpochConfiguration = sp_consensus_babe::BabeEpochConfiguration { c: PRIMARY_PROBABILITY, - allowed_slots: sp_consensus_babe::AllowedSlots::PrimaryAndSecondaryPlainSlots + allowed_slots: sp_consensus_babe::AllowedSlots::PrimaryAndSecondaryPlainSlots, }; /// Native version. #[cfg(any(feature = "std", test))] pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } + NativeVersion { runtime_version: VERSION, can_author_with: Default::default() } } type NegativeImbalance = >::NegativeImbalance; pub struct DealWithFees; impl OnUnbalanced for DealWithFees { - fn on_unbalanceds(mut fees_then_tips: impl Iterator) { + fn on_unbalanceds(mut fees_then_tips: impl Iterator) { if let Some(fees) = fees_then_tips.next() { // for fees, 80% to treasury, 20% to author let mut split = fees.ration(80, 20); @@ -257,7 +256,16 @@ parameter_types! { /// The type used to represent the kinds of proxying allowed. #[derive( - Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, MaxEncodedLen, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + MaxEncodedLen, scale_info::TypeInfo, )] pub enum ProxyType { @@ -266,7 +274,11 @@ pub enum ProxyType { Governance, Staking, } -impl Default for ProxyType { fn default() -> Self { Self::Any } } +impl Default for ProxyType { + fn default() -> Self { + Self::Any + } +} impl InstanceFilter for ProxyType { fn filter(&self, c: &Call) -> bool { match self { @@ -274,19 +286,16 @@ impl InstanceFilter for ProxyType { ProxyType::NonTransfer => !matches!( c, Call::Balances(..) | - Call::Assets(..) | - Call::Uniques(..) | - Call::Vesting(pallet_vesting::Call::vested_transfer { .. }) | - Call::Indices(pallet_indices::Call::transfer { .. }) + Call::Assets(..) | Call::Uniques(..) | + Call::Vesting(pallet_vesting::Call::vested_transfer { .. }) | + Call::Indices(pallet_indices::Call::transfer { .. }) ), ProxyType::Governance => matches!( c, Call::Democracy(..) | - Call::Council(..) | - Call::Society(..) | - Call::TechnicalCommittee(..) | - Call::Elections(..) | - Call::Treasury(..) + Call::Council(..) | Call::Society(..) | + Call::TechnicalCommittee(..) | + Call::Elections(..) | Call::Treasury(..) ), ProxyType::Staking => matches!(c, Call::Staking(..)), } @@ -503,15 +512,16 @@ impl pallet_staking::Config for Runtime { type SlashCancelOrigin = EnsureOneOf< AccountId, EnsureRoot, - pallet_collective::EnsureProportionAtLeast<_3, _4, AccountId, CouncilCollective> + pallet_collective::EnsureProportionAtLeast<_3, _4, AccountId, CouncilCollective>, >; type SessionInterface = Self; type EraPayout = pallet_staking::ConvertCurve; type NextNewSession = Session; type MaxNominatorRewardedPerValidator = MaxNominatorRewardedPerValidator; type ElectionProvider = ElectionProviderMultiPhase; - type GenesisElectionProvider = - onchain::OnChainSequentialPhragmen>; + type GenesisElectionProvider = onchain::OnChainSequentialPhragmen< + pallet_election_provider_multi_phase::OnChainConfig, + >; type WeightInfo = pallet_staking::weights::SubstrateWeight; } @@ -621,20 +631,26 @@ impl pallet_democracy::Config for Runtime { type VotingPeriod = VotingPeriod; type MinimumDeposit = MinimumDeposit; /// A straight majority of the council can decide what their next motion is. - type ExternalOrigin = pallet_collective::EnsureProportionAtLeast<_1, _2, AccountId, CouncilCollective>; + type ExternalOrigin = + pallet_collective::EnsureProportionAtLeast<_1, _2, AccountId, CouncilCollective>; /// A super-majority can have the next scheduled referendum be a straight majority-carries vote. - type ExternalMajorityOrigin = pallet_collective::EnsureProportionAtLeast<_3, _4, AccountId, CouncilCollective>; + type ExternalMajorityOrigin = + pallet_collective::EnsureProportionAtLeast<_3, _4, AccountId, CouncilCollective>; /// A unanimous council can have the next scheduled referendum be a straight default-carries /// (NTB) vote. - type ExternalDefaultOrigin = pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, CouncilCollective>; + type ExternalDefaultOrigin = + pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, CouncilCollective>; /// Two thirds of the technical committee can have an ExternalMajority/ExternalDefault vote /// be tabled immediately and with a shorter voting/enactment period. - type FastTrackOrigin = pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, TechnicalCollective>; - type InstantOrigin = pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, TechnicalCollective>; + type FastTrackOrigin = + pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, TechnicalCollective>; + type InstantOrigin = + pallet_collective::EnsureProportionAtLeast<_1, _1, AccountId, TechnicalCollective>; type InstantAllowed = InstantAllowed; type FastTrackVotingPeriod = FastTrackVotingPeriod; // To cancel a proposal which has been passed, 2/3 of the council must agree to it. - type CancellationOrigin = pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, CouncilCollective>; + type CancellationOrigin = + pallet_collective::EnsureProportionAtLeast<_2, _3, AccountId, CouncilCollective>; // To cancel a proposal before it has been passed, the technical committee must be unanimous or // Root must agree. type CancelProposalOrigin = EnsureOneOf< @@ -731,7 +747,7 @@ impl pallet_collective::Config for Runtime { type EnsureRootOrHalfCouncil = EnsureOneOf< AccountId, EnsureRoot, - pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective> + pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>, >; impl pallet_membership::Config for Runtime { type Event = Event; @@ -771,12 +787,12 @@ impl pallet_treasury::Config for Runtime { type ApproveOrigin = EnsureOneOf< AccountId, EnsureRoot, - pallet_collective::EnsureProportionAtLeast<_3, _5, AccountId, CouncilCollective> + pallet_collective::EnsureProportionAtLeast<_3, _5, AccountId, CouncilCollective>, >; type RejectOrigin = EnsureOneOf< AccountId, EnsureRoot, - pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective> + pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>, >; type Event = Event; type OnSlash = (); @@ -879,8 +895,8 @@ parameter_types! { } impl frame_system::offchain::CreateSignedTransaction for Runtime - where - Call: From, +where + Call: From, { fn create_transaction>( call: Call, @@ -890,10 +906,8 @@ impl frame_system::offchain::CreateSignedTransaction for R ) -> Option<(Call, ::SignaturePayload)> { let tip = 0; // take the biggest period possible. - let period = BlockHashCount::get() - .checked_next_power_of_two() - .map(|c| c / 2) - .unwrap_or(2) as u64; + let period = + BlockHashCount::get().checked_next_power_of_two().map(|c| c / 2).unwrap_or(2) as u64; let current_block = System::block_number() .saturated_into::() // The `System::block_number` is initialized with `n+1`, @@ -914,10 +928,7 @@ impl frame_system::offchain::CreateSignedTransaction for R log::warn!("Unable to create signed payload: {:?}", e); }) .ok()?; - let signature = raw_payload - .using_encoded(|payload| { - C::sign(payload, public) - })?; + let signature = raw_payload.using_encoded(|payload| C::sign(payload, public))?; let address = Indices::unlookup(account); let (call, extra, _) = raw_payload.deconstruct(); Some((call, (address, signature.into(), extra))) @@ -929,7 +940,8 @@ impl frame_system::offchain::SigningTypes for Runtime { type Signature = Signature; } -impl frame_system::offchain::SendTransactionTypes for Runtime where +impl frame_system::offchain::SendTransactionTypes for Runtime +where Call: From, { type Extrinsic = UncheckedExtrinsic; @@ -968,8 +980,11 @@ impl pallet_grandpa::Config for Runtime { GrandpaId, )>>::IdentificationTuple; - type HandleEquivocation = - pallet_grandpa::EquivocationHandler; + type HandleEquivocation = pallet_grandpa::EquivocationHandler< + Self::KeyOwnerIdentification, + Offences, + ReportLongevity, + >; type WeightInfo = (); } @@ -1039,7 +1054,8 @@ impl pallet_society::Config for Runtime { type MembershipChanged = (); type RotationPeriod = RotationPeriod; type MaxLockDuration = MaxLockDuration; - type FounderSetOrigin = pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>; + type FounderSetOrigin = + pallet_collective::EnsureProportionMoreThan<_1, _2, AccountId, CouncilCollective>; type SuspensionJudgementOrigin = pallet_society::EnsureFounder; type MaxCandidateIntake = MaxCandidateIntake; type ChallengePeriod = ChallengePeriod; @@ -1264,11 +1280,7 @@ mod mmr { use super::Runtime; pub use pallet_mmr::primitives::*; - pub type Leaf = < - ::LeafData - as - LeafDataProvider - >::LeafData; + pub type Leaf = <::LeafData as LeafDataProvider>::LeafData; pub type Hash = ::Hash; pub type Hashing = ::Hashing; } @@ -1616,9 +1628,11 @@ mod tests { #[test] fn validate_transaction_submitter_bounds() { - fn is_submit_signed_transaction() where + fn is_submit_signed_transaction() + where T: CreateSignedTransaction, - {} + { + } is_submit_signed_transaction::(); } diff --git a/bin/node/test-runner-example/src/lib.rs b/bin/node/test-runner-example/src/lib.rs index 513c8a7d8b5c8..f0b306db6b0c1 100644 --- a/bin/node/test-runner-example/src/lib.rs +++ b/bin/node/test-runner-example/src/lib.rs @@ -19,12 +19,12 @@ //! Basic example of end to end runtime tests. -use test_runner::{ChainInfo, SignatureVerificationOverride}; use grandpa::GrandpaBlockImport; -use sc_service::{TFullBackend, TFullClient}; use sc_consensus_babe::BabeBlockImport; use sc_consensus_manual_seal::consensus::babe::SlotTimestampProvider; +use sc_service::{TFullBackend, TFullClient}; use sp_runtime::generic::Era; +use test_runner::{ChainInfo, SignatureVerificationOverride}; type BlockImport = BabeBlockImport>; @@ -54,15 +54,20 @@ impl ChainInfo for NodeTemplateChainInfo { Self::SelectChain, >; type SignedExtras = node_runtime::SignedExtra; - type InherentDataProviders = (SlotTimestampProvider, sp_consensus_babe::inherents::InherentDataProvider); + type InherentDataProviders = + (SlotTimestampProvider, sp_consensus_babe::inherents::InherentDataProvider); - fn signed_extras(from: ::AccountId) -> Self::SignedExtras { + fn signed_extras( + from: ::AccountId, + ) -> Self::SignedExtras { ( frame_system::CheckSpecVersion::::new(), frame_system::CheckTxVersion::::new(), frame_system::CheckGenesis::::new(), frame_system::CheckMortality::::from(Era::Immortal), - frame_system::CheckNonce::::from(frame_system::Pallet::::account_nonce(from)), + frame_system::CheckNonce::::from( + frame_system::Pallet::::account_nonce(from), + ), frame_system::CheckWeight::::new(), pallet_transaction_payment::ChargeTransactionPayment::::from(0), ) @@ -72,32 +77,43 @@ impl ChainInfo for NodeTemplateChainInfo { #[cfg(test)] mod tests { use super::*; - use test_runner::{Node, client_parts, ConfigOrChainSpec, build_runtime, task_executor}; - use sp_keyring::sr25519::Keyring::Alice; use node_cli::chain_spec::development_config; + use sp_keyring::sr25519::Keyring::Alice; use sp_runtime::{traits::IdentifyAccount, MultiSigner}; + use test_runner::{build_runtime, client_parts, task_executor, ConfigOrChainSpec, Node}; #[test] fn test_runner() { let mut tokio_runtime = build_runtime().unwrap(); let task_executor = task_executor(tokio_runtime.handle().clone()); - let (rpc, task_manager, client, pool, command_sink, backend) = - client_parts::( - ConfigOrChainSpec::ChainSpec(Box::new(development_config()), task_executor) - ).unwrap(); - let node = Node::::new(rpc, task_manager, client, pool, command_sink, backend); + let (rpc, task_manager, client, pool, command_sink, backend) = client_parts::< + NodeTemplateChainInfo, + >( + ConfigOrChainSpec::ChainSpec(Box::new(development_config()), task_executor), + ) + .unwrap(); + let node = Node::::new( + rpc, + task_manager, + client, + pool, + command_sink, + backend, + ); tokio_runtime.block_on(async { // seals blocks node.seal_blocks(1).await; // submit extrinsics let alice = MultiSigner::from(Alice.public()).into_account(); - let _hash = node.submit_extrinsic(frame_system::Call::remark((b"hello world").to_vec()), alice) + let _hash = node + .submit_extrinsic(frame_system::Call::remark((b"hello world").to_vec()), alice) .await .unwrap(); // look ma, I can read state. - let _events = node.with_state(|| frame_system::Pallet::::events()); + let _events = + node.with_state(|| frame_system::Pallet::::events()); // get access to the underlying client. let _client = node.client(); }) diff --git a/bin/node/testing/src/bench.rs b/bin/node/testing/src/bench.rs index edb99c617771a..e855ecae843bc 100644 --- a/bin/node/testing/src/bench.rs +++ b/bin/node/testing/src/bench.rs @@ -22,44 +22,42 @@ //! can pregenerate seed database and `clone` it for every iteration of your benchmarks //! or tests to get consistent, smooth benchmark experience! -use std::{sync::Arc, path::{Path, PathBuf}, collections::BTreeMap}; +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, + sync::Arc, +}; +use crate::{ + client::{Backend, Client}, + keyring::*, +}; +use codec::{Decode, Encode}; +use futures::executor; use node_primitives::Block; -use crate::client::{Client, Backend}; -use crate::keyring::*; +use node_runtime::{ + constants::currency::DOLLARS, AccountId, BalancesCall, Call, CheckedExtrinsic, MinimumPeriod, + Signature, SystemCall, UncheckedExtrinsic, +}; +use sc_block_builder::BlockBuilderProvider; +use sc_client_api::{ + execution_extensions::{ExecutionExtensions, ExecutionStrategies}, + BlockBackend, ExecutionStrategy, +}; use sc_client_db::PruningMode; use sc_executor::{NativeExecutor, WasmExecutionMethod}; +use sp_api::ProvideRuntimeApi; +use sp_block_builder::BlockBuilder; use sp_consensus::{ - BlockOrigin, BlockImport, BlockImportParams, - ForkChoiceStrategy, ImportResult, ImportedAux + BlockImport, BlockImportParams, BlockOrigin, ForkChoiceStrategy, ImportResult, ImportedAux, }; +use sp_core::{blake2_256, ed25519, sr25519, traits::SpawnNamed, ExecutionContext, Pair, Public}; +use sp_inherents::InherentData; use sp_runtime::{ generic::BlockId, + traits::{Block as BlockT, IdentifyAccount, Verify, Zero}, OpaqueExtrinsic, - traits::{Block as BlockT, Verify, Zero, IdentifyAccount}, -}; -use codec::{Decode, Encode}; -use node_runtime::{ - Call, - CheckedExtrinsic, - constants::currency::DOLLARS, - UncheckedExtrinsic, - MinimumPeriod, - SystemCall, - BalancesCall, - AccountId, - Signature, -}; -use sp_core::{ExecutionContext, blake2_256, traits::SpawnNamed, Pair, Public, sr25519, ed25519}; -use sp_api::ProvideRuntimeApi; -use sp_block_builder::BlockBuilder; -use sp_inherents::InherentData; -use sc_client_api::{ - ExecutionStrategy, BlockBackend, - execution_extensions::{ExecutionExtensions, ExecutionStrategies}, }; -use sc_block_builder::BlockBuilderProvider; -use futures::executor; /// Keyring full of accounts for benching. /// @@ -92,19 +90,21 @@ impl BenchPair { /// /// Will panic if cache drop is impossbile. pub fn drop_system_cache() { - #[cfg(target_os = "windows")] { + #[cfg(target_os = "windows")] + { log::warn!( target: "bench-logistics", "Clearing system cache on windows is not supported. Benchmark might totally be wrong.", ); - return; + return } std::process::Command::new("sync") .output() .expect("Failed to execute system cache clear"); - #[cfg(target_os = "linux")] { + #[cfg(target_os = "linux")] + { log::trace!(target: "bench-logistics", "Clearing system cache..."); std::process::Command::new("echo") .args(&["3", ">", "/proc/sys/vm/drop_caches", "2>", "/dev/null"]) @@ -133,7 +133,8 @@ pub fn drop_system_cache() { log::trace!(target: "bench-logistics", "Clearing system cache done!"); } - #[cfg(target_os = "macos")] { + #[cfg(target_os = "macos")] + { log::trace!(target: "bench-logistics", "Clearing system cache..."); if let Err(err) = std::process::Command::new("purge").output() { log::error!("purge error {:?}: ", err); @@ -169,15 +170,10 @@ impl Clone for BenchDb { ); let seed_db_files = std::fs::read_dir(seed_dir) .expect("failed to list file in seed dir") - .map(|f_result| - f_result.expect("failed to read file in seed db") - .path() - ).collect::>(); - fs_extra::copy_items( - &seed_db_files, - dir.path(), - &fs_extra::dir::CopyOptions::new(), - ).expect("Copy of seed database is ok"); + .map(|f_result| f_result.expect("failed to read file in seed db").path()) + .collect::>(); + fs_extra::copy_items(&seed_db_files, dir.path(), &fs_extra::dir::CopyOptions::new()) + .expect("Copy of seed database is ok"); // We clear system cache after db clone but before any warmups. // This populates system cache with some data unrelated to actual @@ -204,10 +200,7 @@ pub enum BlockType { impl BlockType { /// Create block content description with specified number of transactions. pub fn to_content(self, size: Option) -> BlockContent { - BlockContent { - block_type: self, - size, - } + BlockContent { block_type: self, size } } } @@ -230,13 +223,8 @@ pub enum DatabaseType { impl DatabaseType { fn into_settings(self, path: PathBuf) -> sc_client_db::DatabaseSettingsSrc { match self { - Self::RocksDb => sc_client_db::DatabaseSettingsSrc::RocksDb { - path, - cache_size: 512, - }, - Self::ParityDb => sc_client_db::DatabaseSettingsSrc::ParityDb { - path, - } + Self::RocksDb => sc_client_db::DatabaseSettingsSrc::RocksDb { path, cache_size: 512 }, + Self::ParityDb => sc_client_db::DatabaseSettingsSrc::ParityDb { path }, } } } @@ -251,10 +239,7 @@ pub struct TaskExecutor { impl TaskExecutor { fn new() -> Self { - Self { - pool: executor::ThreadPool::new() - .expect("Failed to create task executor") - } + Self { pool: executor::ThreadPool::new().expect("Failed to create task executor") } } } @@ -279,21 +264,17 @@ pub struct BlockContentIterator<'a> { impl<'a> BlockContentIterator<'a> { fn new(content: BlockContent, keyring: &'a BenchKeyring, client: &Client) -> Self { - let runtime_version = client.runtime_version_at(&BlockId::number(0)) + let runtime_version = client + .runtime_version_at(&BlockId::number(0)) .expect("There should be runtime version at 0"); - let genesis_hash = client.block_hash(Zero::zero()) + let genesis_hash = client + .block_hash(Zero::zero()) .expect("Database error?") .expect("Genesis block always exists; qed") .into(); - BlockContentIterator { - iteration: 0, - content, - keyring, - runtime_version, - genesis_hash, - } + BlockContentIterator { iteration: 0, content, keyring, runtime_version, genesis_hash } } } @@ -302,41 +283,36 @@ impl<'a> Iterator for BlockContentIterator<'a> { fn next(&mut self) -> Option { if self.content.size.map(|size| size <= self.iteration).unwrap_or(false) { - return None; + return None } let sender = self.keyring.at(self.iteration); - let receiver = get_account_id_from_seed::( - &format!("random-user//{}", self.iteration) - ); + let receiver = get_account_id_from_seed::(&format!( + "random-user//{}", + self.iteration + )); let signed = self.keyring.sign( CheckedExtrinsic { - signed: Some((sender, signed_extra(0, node_runtime::ExistentialDeposit::get() + 1))), + signed: Some(( + sender, + signed_extra(0, node_runtime::ExistentialDeposit::get() + 1), + )), function: match self.content.block_type { - BlockType::RandomTransfersKeepAlive => { - Call::Balances( - BalancesCall::transfer_keep_alive( - sp_runtime::MultiAddress::Id(receiver), - node_runtime::ExistentialDeposit::get() + 1, - ) - ) - }, + BlockType::RandomTransfersKeepAlive => + Call::Balances(BalancesCall::transfer_keep_alive( + sp_runtime::MultiAddress::Id(receiver), + node_runtime::ExistentialDeposit::get() + 1, + )), BlockType::RandomTransfersReaping => { - Call::Balances( - BalancesCall::transfer( - sp_runtime::MultiAddress::Id(receiver), - // Transfer so that ending balance would be 1 less than existential deposit - // so that we kill the sender account. - 100*DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), - ) - ) - }, - BlockType::Noop => { - Call::System( - SystemCall::remark(Vec::new()) - ) + Call::Balances(BalancesCall::transfer( + sp_runtime::MultiAddress::Id(receiver), + // Transfer so that ending balance would be 1 less than existential deposit + // so that we kill the sender account. + 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), + )) }, + BlockType::Noop => Call::System(SystemCall::remark(Vec::new())), }, }, self.runtime_version.spec_version, @@ -346,8 +322,7 @@ impl<'a> Iterator for BlockContentIterator<'a> { let encoded = Encode::encode(&signed); - let opaque = OpaqueExtrinsic::decode(&mut &encoded[..]) - .expect("Failed to decode opaque"); + let opaque = OpaqueExtrinsic::decode(&mut &encoded[..]).expect("Failed to decode opaque"); self.iteration += 1; @@ -373,12 +348,8 @@ impl BenchDb { "Created seed db at {}", dir.path().to_string_lossy(), ); - let (_client, _backend, _task_executor) = Self::bench_client( - database_type, - dir.path(), - Profile::Native, - &keyring, - ); + let (_client, _backend, _task_executor) = + Self::bench_client(database_type, dir.path(), Profile::Native, &keyring); let directory_guard = Guard(dir); BenchDb { keyring, directory_guard, database_type } @@ -408,7 +379,7 @@ impl BenchDb { keyring: &BenchKeyring, ) -> (Client, std::sync::Arc, TaskExecutor) { let db_config = sc_client_db::DatabaseSettings { - state_cache_size: 16*1024*1024, + state_cache_size: 16 * 1024 * 1024, state_cache_child_ratio: Some((0, 100)), state_pruning: PruningMode::ArchiveAll, source: database_type.into_settings(dir.into()), @@ -429,7 +400,8 @@ impl BenchDb { None, None, Default::default(), - ).expect("Should not fail"); + ) + .expect("Should not fail"); (client, backend, task_executor) } @@ -445,12 +417,14 @@ impl BenchDb { .put_data(sp_timestamp::INHERENT_IDENTIFIER, ×tamp) .expect("Put timestamp failed"); - client.runtime_api() + client + .runtime_api() .inherent_extrinsics_with_context( &BlockId::number(0), ExecutionContext::BlockConstruction, inherent_data, - ).expect("Get inherents failed") + ) + .expect("Get inherents failed") } /// Iterate over some block content with transaction signed using this database keyring. @@ -474,9 +448,7 @@ impl BenchDb { pub fn generate_block(&mut self, content: BlockContent) -> Block { let client = self.client(); - let mut block = client - .new_block(Default::default()) - .expect("Block creation failed"); + let mut block = client.new_block(Default::default()).expect("Block creation failed"); for extrinsic in self.generate_inherents(&client) { block.push(extrinsic).expect("Push inherent failed"); @@ -486,14 +458,14 @@ impl BenchDb { for opaque in self.block_content(content, &client) { match block.push(opaque) { Err(sp_blockchain::Error::ApplyExtrinsicFailed( - sp_blockchain::ApplyExtrinsicFailed::Validity(e) + sp_blockchain::ApplyExtrinsicFailed::Validity(e), )) if e.exhausted_resources() => { - break; + break }, Err(err) => panic!("Error pushing transaction: {:?}", err), Ok(_) => {}, } - }; + } let block = block.build().expect("Block build failed").block; @@ -514,12 +486,8 @@ impl BenchDb { /// Clone this database and create context for testing/benchmarking. pub fn create_context(&self, profile: Profile) -> BenchContext { let BenchDb { directory_guard, keyring, database_type } = self.clone(); - let (client, backend, task_executor) = Self::bench_client( - database_type, - directory_guard.path(), - profile, - &keyring - ); + let (client, backend, task_executor) = + Self::bench_client(database_type, directory_guard.path(), profile, &keyring); BenchContext { client: Arc::new(client), @@ -549,7 +517,8 @@ impl BenchKeyring { let seed = format!("//endowed-user/{}", n); let (account_id, pair) = match key_types { KeyTypes::Sr25519 => { - let pair = sr25519::Pair::from_string(&seed, None).expect("failed to generate pair"); + let pair = + sr25519::Pair::from_string(&seed, None).expect("failed to generate pair"); let account_id = AccountPublic::from(pair.public()).into_account(); (account_id, BenchPair::Sr25519(pair)) }, @@ -581,28 +550,34 @@ impl BenchKeyring { xt: CheckedExtrinsic, spec_version: u32, tx_version: u32, - genesis_hash: [u8; 32] + genesis_hash: [u8; 32], ) -> UncheckedExtrinsic { match xt.signed { Some((signed, extra)) => { - let payload = (xt.function, extra.clone(), spec_version, tx_version, genesis_hash, genesis_hash); + let payload = ( + xt.function, + extra.clone(), + spec_version, + tx_version, + genesis_hash, + genesis_hash, + ); let key = self.accounts.get(&signed).expect("Account id not found in keyring"); - let signature = payload.using_encoded(|b| { - if b.len() > 256 { - key.sign(&sp_io::hashing::blake2_256(b)) - } else { - key.sign(b) - } - }).into(); + let signature = payload + .using_encoded(|b| { + if b.len() > 256 { + key.sign(&sp_io::hashing::blake2_256(b)) + } else { + key.sign(b) + } + }) + .into(); UncheckedExtrinsic { signature: Some((sp_runtime::MultiAddress::Id(signed), signature, extra)), function: payload.0, } - } - None => UncheckedExtrinsic { - signature: None, - function: xt.function, }, + None => UncheckedExtrinsic { signature: None, function: xt.function }, } } @@ -641,7 +616,7 @@ impl Profile { block_construction: ExecutionStrategy::NativeElseWasm, offchain_worker: ExecutionStrategy::NativeElseWasm, other: ExecutionStrategy::NativeElseWasm, - } + }, } } } @@ -676,7 +651,7 @@ fn get_from_seed(seed: &str) -> ::Public fn get_account_id_from_seed(seed: &str) -> AccountId where - AccountPublic: From<::Public> + AccountPublic: From<::Public>, { AccountPublic::from(get_from_seed::(seed)).into_account() } @@ -684,24 +659,25 @@ where impl BenchContext { /// Import some block. pub fn import_block(&mut self, block: Block) { - let mut import_params = BlockImportParams::new(BlockOrigin::NetworkBroadcast, block.header.clone()); + let mut import_params = + BlockImportParams::new(BlockOrigin::NetworkBroadcast, block.header.clone()); import_params.body = Some(block.extrinsics().to_vec()); import_params.fork_choice = Some(ForkChoiceStrategy::LongestChain); assert_eq!(self.client.chain_info().best_number, 0); assert_eq!( - futures::executor::block_on(self.client.import_block(import_params, Default::default())) - .expect("Failed to import block"), - ImportResult::Imported( - ImportedAux { - header_only: false, - clear_justification_requests: false, - needs_justification: false, - bad_justification: false, - is_new_best: true, - } + futures::executor::block_on( + self.client.import_block(import_params, Default::default()) ) + .expect("Failed to import block"), + ImportResult::Imported(ImportedAux { + header_only: false, + clear_justification_requests: false, + needs_justification: false, + bad_justification: false, + is_new_best: true, + }) ); assert_eq!(self.client.chain_info().best_number, 1); diff --git a/bin/node/testing/src/client.rs b/bin/node/testing/src/client.rs index d53519950dc1e..9538cd47d88a6 100644 --- a/bin/node/testing/src/client.rs +++ b/bin/node/testing/src/client.rs @@ -18,8 +18,8 @@ //! Utilities to build a `TestClient` for `node-runtime`. -use sp_runtime::BuildStorage; use sc_service::client; +use sp_runtime::BuildStorage; /// Re-export test-client utilities. pub use substrate_test_client::*; @@ -61,13 +61,15 @@ pub trait TestClientBuilderExt: Sized { fn build(self) -> Client; } -impl TestClientBuilderExt for substrate_test_client::TestClientBuilder< - node_primitives::Block, - client::LocalCallExecutor, - Backend, - GenesisParameters, -> { - fn new() -> Self{ +impl TestClientBuilderExt + for substrate_test_client::TestClientBuilder< + node_primitives::Block, + client::LocalCallExecutor, + Backend, + GenesisParameters, + > +{ + fn new() -> Self { Self::default() } @@ -75,5 +77,3 @@ impl TestClientBuilderExt for substrate_test_client::TestClientBuilder< self.build_with_native_executor(None).0 } } - - diff --git a/bin/node/testing/src/genesis.rs b/bin/node/testing/src/genesis.rs index 3a6d51f1971ed..50c1e6f9d20be 100644 --- a/bin/node/testing/src/genesis.rs +++ b/bin/node/testing/src/genesis.rs @@ -19,14 +19,13 @@ //! Genesis Configuration. use crate::keyring::*; -use sp_keyring::{Ed25519Keyring, Sr25519Keyring}; use node_runtime::{ - GenesisConfig, BalancesConfig, SessionConfig, StakingConfig, SystemConfig, - GrandpaConfig, IndicesConfig, SocietyConfig, wasm_binary_unwrap, - AccountId, StakerStatus, BabeConfig, BABE_GENESIS_EPOCH_CONFIG, + constants::currency::*, wasm_binary_unwrap, AccountId, BabeConfig, BalancesConfig, + GenesisConfig, GrandpaConfig, IndicesConfig, SessionConfig, SocietyConfig, StakerStatus, + StakingConfig, SystemConfig, BABE_GENESIS_EPOCH_CONFIG, }; -use node_runtime::constants::currency::*; use sp_core::ChangesTrieConfiguration; +use sp_keyring::{Ed25519Keyring, Sr25519Keyring}; use sp_runtime::Perbill; /// Create genesis runtime configuration for tests. @@ -41,7 +40,6 @@ pub fn config_endowed( code: Option<&[u8]>, extra_endowed: Vec, ) -> GenesisConfig { - let mut endowed = vec![ (alice(), 111 * DOLLARS), (bob(), 100 * DOLLARS), @@ -51,59 +49,44 @@ pub fn config_endowed( (ferdie(), 100 * DOLLARS), ]; - endowed.extend( - extra_endowed.into_iter().map(|endowed| (endowed, 100*DOLLARS)) - ); + endowed.extend(extra_endowed.into_iter().map(|endowed| (endowed, 100 * DOLLARS))); GenesisConfig { system: SystemConfig { - changes_trie_config: if support_changes_trie { Some(ChangesTrieConfiguration { - digest_interval: 2, - digest_levels: 2, - }) } else { None }, + changes_trie_config: if support_changes_trie { + Some(ChangesTrieConfiguration { digest_interval: 2, digest_levels: 2 }) + } else { + None + }, code: code.map(|x| x.to_vec()).unwrap_or_else(|| wasm_binary_unwrap().to_vec()), }, - indices: IndicesConfig { - indices: vec![], - }, - balances: BalancesConfig { - balances: endowed, - }, + indices: IndicesConfig { indices: vec![] }, + balances: BalancesConfig { balances: endowed }, session: SessionConfig { keys: vec![ - (dave(), alice(), to_session_keys( - &Ed25519Keyring::Alice, - &Sr25519Keyring::Alice, - )), - (eve(), bob(), to_session_keys( - &Ed25519Keyring::Bob, - &Sr25519Keyring::Bob, - )), - (ferdie(), charlie(), to_session_keys( - &Ed25519Keyring::Charlie, - &Sr25519Keyring::Charlie, - )), - ] + (dave(), alice(), to_session_keys(&Ed25519Keyring::Alice, &Sr25519Keyring::Alice)), + (eve(), bob(), to_session_keys(&Ed25519Keyring::Bob, &Sr25519Keyring::Bob)), + ( + ferdie(), + charlie(), + to_session_keys(&Ed25519Keyring::Charlie, &Sr25519Keyring::Charlie), + ), + ], }, staking: StakingConfig { stakers: vec![ (dave(), alice(), 111 * DOLLARS, StakerStatus::Validator), (eve(), bob(), 100 * DOLLARS, StakerStatus::Validator), - (ferdie(), charlie(), 100 * DOLLARS, StakerStatus::Validator) + (ferdie(), charlie(), 100 * DOLLARS, StakerStatus::Validator), ], validator_count: 3, minimum_validator_count: 0, slash_reward_fraction: Perbill::from_percent(10), invulnerables: vec![alice(), bob(), charlie()], - .. Default::default() - }, - babe: BabeConfig { - authorities: vec![], - epoch_config: Some(BABE_GENESIS_EPOCH_CONFIG), - }, - grandpa: GrandpaConfig { - authorities: vec![], + ..Default::default() }, + babe: BabeConfig { authorities: vec![], epoch_config: Some(BABE_GENESIS_EPOCH_CONFIG) }, + grandpa: GrandpaConfig { authorities: vec![] }, im_online: Default::default(), authority_discovery: Default::default(), democracy: Default::default(), @@ -113,11 +96,7 @@ pub fn config_endowed( elections: Default::default(), sudo: Default::default(), treasury: Default::default(), - society: SocietyConfig { - members: vec![alice(), bob()], - pot: 0, - max_members: 999, - }, + society: SocietyConfig { members: vec![alice(), bob()], pot: 0, max_members: 999 }, vesting: Default::default(), gilt: Default::default(), transaction_storage: Default::default(), diff --git a/bin/node/testing/src/keyring.rs b/bin/node/testing/src/keyring.rs index da61040206ea4..4e2d88b4bba33 100644 --- a/bin/node/testing/src/keyring.rs +++ b/bin/node/testing/src/keyring.rs @@ -18,11 +18,11 @@ //! Test accounts. -use sp_keyring::{AccountKeyring, Sr25519Keyring, Ed25519Keyring}; +use codec::Encode; use node_primitives::{AccountId, Balance, Index}; -use node_runtime::{CheckedExtrinsic, UncheckedExtrinsic, SessionKeys, SignedExtra}; +use node_runtime::{CheckedExtrinsic, SessionKeys, SignedExtra, UncheckedExtrinsic}; +use sp_keyring::{AccountKeyring, Ed25519Keyring, Sr25519Keyring}; use sp_runtime::generic::Era; -use codec::Encode; /// Alice's account id. pub fn alice() -> AccountId { @@ -81,26 +81,31 @@ pub fn signed_extra(nonce: Index, extra_fee: Balance) -> SignedExtra { } /// Sign given `CheckedExtrinsic`. -pub fn sign(xt: CheckedExtrinsic, spec_version: u32, tx_version: u32, genesis_hash: [u8; 32]) -> UncheckedExtrinsic { +pub fn sign( + xt: CheckedExtrinsic, + spec_version: u32, + tx_version: u32, + genesis_hash: [u8; 32], +) -> UncheckedExtrinsic { match xt.signed { Some((signed, extra)) => { - let payload = (xt.function, extra.clone(), spec_version, tx_version, genesis_hash, genesis_hash); + let payload = + (xt.function, extra.clone(), spec_version, tx_version, genesis_hash, genesis_hash); let key = AccountKeyring::from_account_id(&signed).unwrap(); - let signature = payload.using_encoded(|b| { - if b.len() > 256 { - key.sign(&sp_io::hashing::blake2_256(b)) - } else { - key.sign(b) - } - }).into(); + let signature = payload + .using_encoded(|b| { + if b.len() > 256 { + key.sign(&sp_io::hashing::blake2_256(b)) + } else { + key.sign(b) + } + }) + .into(); UncheckedExtrinsic { signature: Some((sp_runtime::MultiAddress::Id(signed), signature, extra)), function: payload.0, } - } - None => UncheckedExtrinsic { - signature: None, - function: xt.function, }, + None => UncheckedExtrinsic { signature: None, function: xt.function }, } } diff --git a/bin/node/testing/src/lib.rs b/bin/node/testing/src/lib.rs index c5792bccee80d..a3392bcb29d5d 100644 --- a/bin/node/testing/src/lib.rs +++ b/bin/node/testing/src/lib.rs @@ -20,7 +20,7 @@ #![warn(missing_docs)] +pub mod bench; pub mod client; pub mod genesis; pub mod keyring; -pub mod bench; diff --git a/bin/utils/chain-spec-builder/src/main.rs b/bin/utils/chain-spec-builder/src/main.rs index a3f8eaa1f8547..60d46dcfeee53 100644 --- a/bin/utils/chain-spec-builder/src/main.rs +++ b/bin/utils/chain-spec-builder/src/main.rs @@ -16,19 +16,23 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{fs, path::{Path, PathBuf}, sync::Arc}; +use std::{ + fs, + path::{Path, PathBuf}, + sync::Arc, +}; use ansi_term::Style; -use rand::{Rng, distributions::Alphanumeric, rngs::OsRng}; +use rand::{distributions::Alphanumeric, rngs::OsRng, Rng}; use structopt::StructOpt; -use sc_keystore::LocalKeystore; use node_cli::chain_spec::{self, AccountId}; +use sc_keystore::LocalKeystore; use sp_core::{ - sr25519, crypto::{Public, Ss58Codec}, + sr25519, }; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; /// A utility to easily create a testnet chain spec definition with a given set /// of authorities and endowed accounts and/or generate random accounts. @@ -86,10 +90,8 @@ impl ChainSpecBuilder { /// Returns the path where the chain spec should be saved. fn chain_spec_path(&self) -> &Path { match self { - ChainSpecBuilder::New { chain_spec_path, .. } => - chain_spec_path.as_path(), - ChainSpecBuilder::Generate { chain_spec_path, .. } => - chain_spec_path.as_path(), + ChainSpecBuilder::New { chain_spec_path, .. } => chain_spec_path.as_path(), + ChainSpecBuilder::Generate { chain_spec_path, .. } => chain_spec_path.as_path(), } } } @@ -125,11 +127,15 @@ fn generate_chain_spec( .map_err(|err| format!("Failed to parse account address: {:?}", err)) }; - let nominator_accounts = - nominator_accounts.into_iter().map(parse_account).collect::, String>>()?; + let nominator_accounts = nominator_accounts + .into_iter() + .map(parse_account) + .collect::, String>>()?; - let endowed_accounts = - endowed_accounts.into_iter().map(parse_account).collect::, String>>()?; + let endowed_accounts = endowed_accounts + .into_iter() + .map(parse_account) + .collect::, String>>()?; let sudo_account = parse_account(sudo_account)?; @@ -137,7 +143,14 @@ fn generate_chain_spec( "Custom", "custom", sc_chain_spec::ChainType::Live, - move || genesis_constructor(&authority_seeds, &nominator_accounts, &endowed_accounts, &sudo_account), + move || { + genesis_constructor( + &authority_seeds, + &nominator_accounts, + &endowed_accounts, + &sudo_account, + ) + }, vec![], None, None, @@ -148,42 +161,26 @@ fn generate_chain_spec( chain_spec.as_json(false).map_err(|err| err) } -fn generate_authority_keys_and_store( - seeds: &[String], - keystore_path: &Path, -) -> Result<(), String> { +fn generate_authority_keys_and_store(seeds: &[String], keystore_path: &Path) -> Result<(), String> { for (n, seed) in seeds.into_iter().enumerate() { - let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open( - keystore_path.join(format!("auth-{}", n)), - None, - ).map_err(|err| err.to_string())?); + let keystore: SyncCryptoStorePtr = Arc::new( + LocalKeystore::open(keystore_path.join(format!("auth-{}", n)), None) + .map_err(|err| err.to_string())?, + ); let (_, _, grandpa, babe, im_online, authority_discovery) = chain_spec::authority_keys_from_seed(seed); let insert_key = |key_type, public| { - SyncCryptoStore::insert_unknown( - &*keystore, - key_type, - &format!("//{}", seed), - public, - ).map_err(|_| format!("Failed to insert key: {}", grandpa)) + SyncCryptoStore::insert_unknown(&*keystore, key_type, &format!("//{}", seed), public) + .map_err(|_| format!("Failed to insert key: {}", grandpa)) }; - insert_key( - sp_core::crypto::key_types::BABE, - babe.as_slice(), - )?; + insert_key(sp_core::crypto::key_types::BABE, babe.as_slice())?; - insert_key( - sp_core::crypto::key_types::GRANDPA, - grandpa.as_slice(), - )?; + insert_key(sp_core::crypto::key_types::GRANDPA, grandpa.as_slice())?; - insert_key( - sp_core::crypto::key_types::IM_ONLINE, - im_online.as_slice(), - )?; + insert_key(sp_core::crypto::key_types::IM_ONLINE, im_online.as_slice())?; insert_key( sp_core::crypto::key_types::AUTHORITY_DISCOVERY, @@ -206,10 +203,7 @@ fn print_seeds( println!("{}", header.paint("Authority seeds")); for (n, seed) in authority_seeds.iter().enumerate() { - println!("{} //{}", - entry.paint(format!("auth-{}:", n)), - seed, - ); + println!("{} //{}", entry.paint(format!("auth-{}:", n)), seed,); } println!("{}", header.paint("Nominator seeds")); @@ -223,10 +217,7 @@ fn print_seeds( if !endowed_seeds.is_empty() { println!("{}", header.paint("Endowed seeds")); for (n, seed) in endowed_seeds.iter().enumerate() { - println!("{} //{}", - entry.paint(format!("endowed-{}:", n)), - seed, - ); + println!("{} //{}", entry.paint(format!("endowed-{}:", n)), seed,); } println!(); @@ -260,10 +251,7 @@ fn main() -> Result<(), String> { print_seeds(&authority_seeds, &nominator_seeds, &endowed_seeds, &sudo_seed); if let Some(keystore_path) = keystore_path { - generate_authority_keys_and_store( - &authority_seeds, - &keystore_path, - )?; + generate_authority_keys_and_store(&authority_seeds, &keystore_path)?; } let nominator_accounts = nominator_seeds @@ -284,7 +272,7 @@ fn main() -> Result<(), String> { chain_spec::get_account_id_from_seed::(&sudo_seed).to_ss58check(); (authority_seeds, nominator_accounts, endowed_accounts, sudo_account) - } + }, ChainSpecBuilder::New { authority_seeds, nominator_accounts, @@ -294,12 +282,8 @@ fn main() -> Result<(), String> { } => (authority_seeds, nominator_accounts, endowed_accounts, sudo_account), }; - let json = generate_chain_spec( - authority_seeds, - nominator_accounts, - endowed_accounts, - sudo_account, - )?; + let json = + generate_chain_spec(authority_seeds, nominator_accounts, endowed_accounts, sudo_account)?; fs::write(chain_spec_path, json).map_err(|err| err.to_string()) } diff --git a/bin/utils/subkey/src/lib.rs b/bin/utils/subkey/src/lib.rs index 5e9f04418a6b5..5052d1b104c2c 100644 --- a/bin/utils/subkey/src/lib.rs +++ b/bin/utils/subkey/src/lib.rs @@ -16,17 +16,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use structopt::StructOpt; use sc_cli::{ - Error, VanityCmd, SignCmd, VerifyCmd, GenerateNodeKeyCmd, GenerateCmd, InspectKeyCmd, - InspectNodeKeyCmd + Error, GenerateCmd, GenerateNodeKeyCmd, InspectKeyCmd, InspectNodeKeyCmd, SignCmd, VanityCmd, + VerifyCmd, }; +use structopt::StructOpt; #[derive(Debug, StructOpt)] #[structopt( name = "subkey", author = "Parity Team ", - about = "Utility for generating and restoring with Substrate keys", + about = "Utility for generating and restoring with Substrate keys" )] pub enum Subkey { /// Generate a random node libp2p key, save it to file or print it to stdout diff --git a/client/allocator/src/error.rs b/client/allocator/src/error.rs index e880e8d0ae75d..2b2cc127dcfb3 100644 --- a/client/allocator/src/error.rs +++ b/client/allocator/src/error.rs @@ -26,5 +26,5 @@ pub enum Error { AllocatorOutOfSpace, /// Some other error occurred. #[error("Other: {0}")] - Other(&'static str) + Other(&'static str), } diff --git a/client/allocator/src/freeing_bump.rs b/client/allocator/src/freeing_bump.rs index 7f83576aedfa6..3b0db13d8fde7 100644 --- a/client/allocator/src/freeing_bump.rs +++ b/client/allocator/src/freeing_bump.rs @@ -68,8 +68,12 @@ //! sizes. use crate::Error; -use std::{mem, convert::{TryFrom, TryInto}, ops::{Range, Index, IndexMut}}; use sp_wasm_interface::{Pointer, WordSize}; +use std::{ + convert::{TryFrom, TryInto}, + mem, + ops::{Index, IndexMut, Range}, +}; /// The minimal alignment guaranteed by this allocator. /// @@ -139,7 +143,7 @@ impl Order { fn from_size(size: u32) -> Result { let clamped_size = if size > MAX_POSSIBLE_ALLOCATION { log::warn!(target: LOG_TARGET, "going to fail due to allocating {:?}", size); - return Err(Error::RequestedAllocationTooLarge); + return Err(Error::RequestedAllocationTooLarge) } else if size < MIN_POSSIBLE_ALLOCATION { MIN_POSSIBLE_ALLOCATION } else { @@ -290,9 +294,7 @@ struct FreeLists { impl FreeLists { /// Creates the free empty lists. fn new() -> Self { - Self { - heads: [Link::Nil; N_ORDERS] - } + Self { heads: [Link::Nil; N_ORDERS] } } /// Replaces a given link for the specified order and returns the old one. @@ -397,15 +399,11 @@ impl FreeingBumpHeapAllocator { self.free_lists[order] = next_free; header_ptr - } + }, Link::Nil => { // Corresponding free list is empty. Allocate a new item. - Self::bump( - &mut self.bumper, - order.size() + HEADER_SIZE, - mem.size(), - )? - } + Self::bump(&mut self.bumper, order.size() + HEADER_SIZE, mem.size())? + }, }; // Write the order in the occupied header. @@ -440,7 +438,11 @@ impl FreeingBumpHeapAllocator { /// /// - `mem` - a slice representing the linear memory on which this allocator operates. /// - `ptr` - pointer to the allocated chunk - pub fn deallocate(&mut self, mem: &mut M, ptr: Pointer) -> Result<(), Error> { + pub fn deallocate( + &mut self, + mem: &mut M, + ptr: Pointer, + ) -> Result<(), Error> { if self.poisoned { return Err(error("the allocator has been poisoned")) } @@ -480,8 +482,13 @@ impl FreeingBumpHeapAllocator { /// the operation would exhaust the heap. fn bump(bumper: &mut u32, size: u32, heap_end: u32) -> Result { if *bumper + size > heap_end { - log::error!(target: LOG_TARGET, "running out of space with current bumper {}, mem size {}", bumper, heap_end); - return Err(Error::AllocatorOutOfSpace); + log::error!( + target: LOG_TARGET, + "running out of space with current bumper {}, mem size {}", + bumper, + heap_end + ); + return Err(Error::AllocatorOutOfSpace) } let res = *bumper; diff --git a/client/allocator/src/lib.rs b/client/allocator/src/lib.rs index a82c7542199d4..4493db3c7d146 100644 --- a/client/allocator/src/lib.rs +++ b/client/allocator/src/lib.rs @@ -25,5 +25,5 @@ mod error; mod freeing_bump; -pub use freeing_bump::FreeingBumpHeapAllocator; pub use error::Error; +pub use freeing_bump::FreeingBumpHeapAllocator; diff --git a/client/api/src/backend.rs b/client/api/src/backend.rs index b09995f887c4a..965e0151c3cba 100644 --- a/client/api/src/backend.rs +++ b/client/api/src/backend.rs @@ -18,30 +18,32 @@ //! Substrate Client data backend -use std::sync::Arc; -use std::collections::{HashMap, HashSet}; -use sp_core::ChangesTrieConfigurationRange; -use sp_core::offchain::OffchainStorage; -use sp_runtime::{generic::BlockId, Justification, Justifications, Storage}; -use sp_runtime::traits::{Block as BlockT, NumberFor, HashFor}; -use sp_state_machine::{ - ChangesTrieState, ChangesTrieStorage as StateChangesTrieStorage, ChangesTrieTransaction, - StorageCollection, ChildStorageCollection, OffchainChangesCollection, IndexOperation, -}; -use sp_storage::{StorageData, StorageKey, PrefixedStorageKey, ChildInfo}; use crate::{ - blockchain::{ - Backend as BlockchainBackend, well_known_cache_keys - }, + blockchain::{well_known_cache_keys, Backend as BlockchainBackend}, light::RemoteBlockchain, UsageInfo, }; +use parking_lot::RwLock; use sp_blockchain; use sp_consensus::BlockOrigin; -use parking_lot::RwLock; +use sp_core::{offchain::OffchainStorage, ChangesTrieConfigurationRange}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, HashFor, NumberFor}, + Justification, Justifications, Storage, +}; +use sp_state_machine::{ + ChangesTrieState, ChangesTrieStorage as StateChangesTrieStorage, ChangesTrieTransaction, + ChildStorageCollection, IndexOperation, OffchainChangesCollection, StorageCollection, +}; +use sp_storage::{ChildInfo, PrefixedStorageKey, StorageData, StorageKey}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; -pub use sp_state_machine::Backend as StateBackend; pub use sp_consensus::ImportedState; +pub use sp_state_machine::Backend as StateBackend; use std::marker::PhantomData; /// Extracts the state backend type for the given backend. @@ -90,16 +92,17 @@ pub fn apply_aux<'a, 'b: 'a, 'c: 'a, B, Block, D, I>( insert: I, delete: D, ) -> sp_blockchain::Result<()> - where - Block: BlockT, - B: Backend, - I: IntoIterator, - D: IntoIterator, +where + Block: BlockT, + B: Backend, + I: IntoIterator, + D: IntoIterator, { operation.op.insert_aux( - insert.into_iter() + insert + .into_iter() .map(|(k, v)| (k.to_vec(), Some(v.to_vec()))) - .chain(delete.into_iter().map(|k| (k.to_vec(), None))) + .chain(delete.into_iter().map(|k| (k.to_vec(), None))), ) } @@ -165,7 +168,11 @@ pub trait BlockImportOperation { /// Set genesis state. If `commit` is `false` the state is saved in memory, but is not written /// to the database. - fn set_genesis_state(&mut self, storage: Storage, commit: bool) -> sp_blockchain::Result; + fn set_genesis_state( + &mut self, + storage: Storage, + commit: bool, + ) -> sp_blockchain::Result; /// Inject storage data into the database replacing any existing data. fn reset_storage(&mut self, storage: Storage) -> sp_blockchain::Result; @@ -182,7 +189,7 @@ pub trait BlockImportOperation { &mut self, _offchain_update: OffchainChangesCollection, ) -> sp_blockchain::Result<()> { - Ok(()) + Ok(()) } /// Inject changes trie data into the database. @@ -195,7 +202,8 @@ pub trait BlockImportOperation { /// /// Values are `None` if should be deleted. fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> - where I: IntoIterator, Option>)>; + where + I: IntoIterator, Option>)>; /// Mark a block as finalized. fn mark_finalized( @@ -209,16 +217,17 @@ pub trait BlockImportOperation { fn mark_head(&mut self, id: BlockId) -> sp_blockchain::Result<()>; /// Add a transaction index operation. - fn update_transaction_index(&mut self, index: Vec) -> sp_blockchain::Result<()>; + fn update_transaction_index(&mut self, index: Vec) + -> sp_blockchain::Result<()>; } /// Interface for performing operations on the backend. pub trait LockImportRun> { /// Lock the import lock, and run operations inside. fn lock_import_and_run(&self, f: F) -> Result - where - F: FnOnce(&mut ClientImportOperation) -> Result, - Err: From; + where + F: FnOnce(&mut ClientImportOperation) -> Result, + Err: From; } /// Finalize Facilities @@ -270,9 +279,13 @@ pub trait AuxStore { 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()>; + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()>; /// Query auxiliary data from key-value store. fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>>; @@ -287,16 +300,10 @@ pub struct KeyIterator<'a, State, Block> { _phantom: PhantomData, } -impl <'a, State, Block> KeyIterator<'a, State, Block> { +impl<'a, State, Block> KeyIterator<'a, State, Block> { /// create a KeyIterator instance pub fn new(state: State, prefix: Option<&'a StorageKey>, current_key: Vec) -> Self { - Self { - state, - child_storage: None, - prefix, - current_key, - _phantom: PhantomData, - } + Self { state, child_storage: None, prefix, current_key, _phantom: PhantomData } } /// Create a `KeyIterator` instance for a child storage. @@ -306,17 +313,12 @@ impl <'a, State, Block> KeyIterator<'a, State, Block> { prefix: Option<&'a StorageKey>, current_key: Vec, ) -> Self { - Self { - state, - child_storage: Some(child_info), - prefix, - current_key, - _phantom: PhantomData, - } + Self { state, child_storage: Some(child_info), prefix, current_key, _phantom: PhantomData } } } -impl<'a, State, Block> Iterator for KeyIterator<'a, State, Block> where +impl<'a, State, Block> Iterator for KeyIterator<'a, State, Block> +where Block: BlockT, State: StateBackend>, { @@ -327,11 +329,13 @@ impl<'a, State, Block> Iterator for KeyIterator<'a, State, Block> where self.state.next_child_storage_key(child_info, &self.current_key) } else { self.state.next_storage_key(&self.current_key) - }.ok().flatten()?; + } + .ok() + .flatten()?; // this terminates the iterator the first time it fails. if let Some(prefix) = self.prefix { if !next_key.starts_with(&prefix.0[..]) { - return None; + return None } } self.current_key = next_key.clone(); @@ -342,19 +346,31 @@ impl<'a, State, Block> Iterator for KeyIterator<'a, State, Block> where /// Provides acess to storage primitives pub trait StorageProvider> { /// Given a `BlockId` and a key, return the value under the key in that block. - fn storage(&self, id: &BlockId, key: &StorageKey) -> sp_blockchain::Result>; + fn storage( + &self, + id: &BlockId, + key: &StorageKey, + ) -> sp_blockchain::Result>; /// Given a `BlockId` and a key prefix, return the matching storage keys in that block. - fn storage_keys(&self, id: &BlockId, key_prefix: &StorageKey) -> sp_blockchain::Result>; + fn storage_keys( + &self, + id: &BlockId, + key_prefix: &StorageKey, + ) -> sp_blockchain::Result>; /// Given a `BlockId` and a key, return the value under the hash in that block. - fn storage_hash(&self, id: &BlockId, key: &StorageKey) -> sp_blockchain::Result>; + fn storage_hash( + &self, + id: &BlockId, + key: &StorageKey, + ) -> sp_blockchain::Result>; /// Given a `BlockId` and a key prefix, return the matching child storage keys and values in that block. fn storage_pairs( &self, id: &BlockId, - key_prefix: &StorageKey + key_prefix: &StorageKey, ) -> sp_blockchain::Result>; /// Given a `BlockId` and a key prefix, return a `KeyIterator` iterates matching storage keys in that block. @@ -362,7 +378,7 @@ pub trait StorageProvider> { &self, id: &BlockId, prefix: Option<&'a StorageKey>, - start_key: Option<&StorageKey> + start_key: Option<&StorageKey>, ) -> sp_blockchain::Result>; /// Given a `BlockId`, a key and a child storage key, return the value under the key in that block. @@ -370,7 +386,7 @@ pub trait StorageProvider> { &self, id: &BlockId, child_info: &ChildInfo, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result>; /// Given a `BlockId`, a key prefix, and a child storage key, return the matching child storage keys. @@ -378,7 +394,7 @@ pub trait StorageProvider> { &self, id: &BlockId, child_info: &ChildInfo, - key_prefix: &StorageKey + key_prefix: &StorageKey, ) -> sp_blockchain::Result>; /// Given a `BlockId` and a key `prefix` and a child storage key, @@ -388,7 +404,7 @@ pub trait StorageProvider> { id: &BlockId, child_info: ChildInfo, prefix: Option<&'a StorageKey>, - start_key: Option<&StorageKey> + start_key: Option<&StorageKey>, ) -> sp_blockchain::Result>; /// Given a `BlockId`, a key and a child storage key, return the hash under the key in that block. @@ -396,7 +412,7 @@ pub trait StorageProvider> { &self, id: &BlockId, child_info: &ChildInfo, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result>; /// Get longest range within [first; last] that is possible to use in `key_changes` @@ -418,7 +434,7 @@ pub trait StorageProvider> { first: NumberFor, last: BlockId, storage_key: Option<&PrefixedStorageKey>, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result, u32)>>; } @@ -511,20 +527,20 @@ pub trait Backend: AuxStore + Send + Sync { ) -> sp_blockchain::Result<(NumberFor, HashSet)>; /// Discard non-best, unfinalized leaf block. - fn remove_leaf_block( - &self, - hash: &Block::Hash, - ) -> sp_blockchain::Result<()>; + fn remove_leaf_block(&self, hash: &Block::Hash) -> sp_blockchain::Result<()>; /// Insert auxiliary data into key-value store. fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()> - { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { AuxStore::insert_aux(self, insert, delete) } /// Query auxiliary data from key-value store. @@ -548,9 +564,10 @@ pub trait PrunableStateChangesTrieStorage: /// Get reference to StateChangesTrieStorage. fn storage(&self) -> &dyn StateChangesTrieStorage, NumberFor>; /// Get configuration at given block. - fn configuration_at(&self, at: &BlockId) -> sp_blockchain::Result< - ChangesTrieConfigurationRange, Block::Hash> - >; + fn configuration_at( + &self, + at: &BlockId, + ) -> sp_blockchain::Result, Block::Hash>>; /// Get end block (inclusive) of oldest pruned max-level (or skewed) digest trie blocks range. /// It is guaranteed that we have no any changes tries before (and including) this block. /// It is guaranteed that all existing changes tries after this block are not yet pruned (if created). @@ -584,7 +601,8 @@ pub fn changes_tries_state_at_block<'a, Block: BlockT>( let config_range = storage.configuration_at(block)?; match config_range.config { - Some(config) => Ok(Some(ChangesTrieState::new(config, config_range.zero.0, storage.storage()))), + Some(config) => + Ok(Some(ChangesTrieState::new(config, config_range.zero.0, storage.storage()))), None => Ok(None), } } diff --git a/client/api/src/call_executor.rs b/client/api/src/call_executor.rs index 621cc292a71ac..2d19c9fe3504c 100644 --- a/client/api/src/call_executor.rs +++ b/client/api/src/call_executor.rs @@ -18,20 +18,19 @@ //! A method call executor interface. -use std::{panic::UnwindSafe, result, cell::RefCell}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use sc_executor::{NativeVersion, RuntimeVersion}; +use sp_core::NativeOrEncoded; +use sp_externalities::Extensions; use sp_runtime::{ - generic::BlockId, traits::{Block as BlockT, HashFor}, -}; -use sp_state_machine::{ - OverlayedChanges, ExecutionManager, ExecutionStrategy, StorageProof, + generic::BlockId, + traits::{Block as BlockT, HashFor}, }; -use sc_executor::{RuntimeVersion, NativeVersion}; -use sp_externalities::Extensions; -use sp_core::NativeOrEncoded; +use sp_state_machine::{ExecutionManager, ExecutionStrategy, OverlayedChanges, StorageProof}; +use std::{cell::RefCell, panic::UnwindSafe, result}; -use sp_api::{ProofRecorder, StorageTransactionCache}; use crate::execution_extensions::ExecutionExtensions; +use sp_api::{ProofRecorder, StorageTransactionCache}; /// Executor Provider pub trait ExecutorProvider { @@ -73,7 +72,7 @@ pub trait CallExecutor { fn contextual_call< EM: Fn( Result, Self::Error>, - Result, Self::Error> + Result, Self::Error>, ) -> Result, Self::Error>, R: Encode + Decode + PartialEq, NC: FnOnce() -> result::Result + UnwindSafe, @@ -83,14 +82,18 @@ pub trait CallExecutor { method: &str, call_data: &[u8], changes: &RefCell, - storage_transaction_cache: Option<&RefCell< - StorageTransactionCache>::State>, - >>, + storage_transaction_cache: Option< + &RefCell< + StorageTransactionCache>::State>, + >, + >, execution_manager: ExecutionManager, native_call: Option, proof_recorder: &Option>, extensions: Option, - ) -> sp_blockchain::Result> where ExecutionManager: Clone; + ) -> sp_blockchain::Result> + where + ExecutionManager: Clone; /// Extract RuntimeVersion of given block /// @@ -105,12 +108,13 @@ pub trait CallExecutor { mut state: S, overlay: &mut OverlayedChanges, method: &str, - call_data: &[u8] + call_data: &[u8], ) -> Result<(Vec, StorageProof), sp_blockchain::Error> { - let trie_state = state.as_trie_backend() - .ok_or_else(|| - sp_blockchain::Error::from_state(Box::new(sp_state_machine::ExecutionError::UnableToGenerateProof) as Box<_>) - )?; + let trie_state = state.as_trie_backend().ok_or_else(|| { + sp_blockchain::Error::from_state(Box::new( + sp_state_machine::ExecutionError::UnableToGenerateProof, + ) as Box<_>) + })?; self.prove_at_trie_state(trie_state, overlay, method, call_data) } @@ -122,7 +126,7 @@ pub trait CallExecutor { trie_state: &sp_state_machine::TrieBackend>, overlay: &mut OverlayedChanges, method: &str, - call_data: &[u8] + call_data: &[u8], ) -> Result<(Vec, StorageProof), sp_blockchain::Error>; /// Get runtime version if supported. diff --git a/client/api/src/cht.rs b/client/api/src/cht.rs index 96a5a272916e5..50b54a17f8c0a 100644 --- a/client/api/src/cht.rs +++ b/client/api/src/cht.rs @@ -25,15 +25,15 @@ //! root hash. A correct proof implies that the claimed block is identical to the one //! we discarded. -use hash_db; use codec::Encode; +use hash_db; use sp_trie; -use sp_core::{H256, convert_hash}; -use sp_runtime::traits::{Header as HeaderT, AtLeast32Bit, Zero, One}; +use sp_core::{convert_hash, H256}; +use sp_runtime::traits::{AtLeast32Bit, Header as HeaderT, One, Zero}; use sp_state_machine::{ - MemoryDB, TrieBackend, Backend as StateBackend, StorageProof, InMemoryBackend, - prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend + prove_read_on_trie_backend, read_proof_check, read_proof_check_on_proving_backend, + Backend as StateBackend, InMemoryBackend, MemoryDB, StorageProof, TrieBackend, }; use sp_blockchain::{Error as ClientError, Result as ClientResult}; @@ -49,17 +49,17 @@ pub fn size>() -> N { /// Returns Some(cht_number) if CHT is need to be built when the block with given number is canonized. pub fn is_build_required(cht_size: N, block_num: N) -> Option - where - N: Clone + AtLeast32Bit, +where + N: Clone + AtLeast32Bit, { let block_cht_num = block_to_cht_number(cht_size.clone(), block_num.clone())?; let two = N::one() + N::one(); if block_cht_num < two { - return None; + return None } let cht_start = start_number(cht_size, block_cht_num.clone()); if cht_start != block_num { - return None; + return None } Some(block_cht_num - two) @@ -67,13 +67,13 @@ pub fn is_build_required(cht_size: N, block_num: N) -> Option /// Returns Some(max_cht_number) if CHT has ever been built given maximal canonical block number. pub fn max_cht_number(cht_size: N, max_canonical_block: N) -> Option - where - N: Clone + AtLeast32Bit, +where + N: Clone + AtLeast32Bit, { let max_cht_number = block_to_cht_number(cht_size, max_canonical_block)?; let two = N::one() + N::one(); if max_cht_number < two { - return None; + return None } Some(max_cht_number - two) } @@ -86,16 +86,16 @@ pub fn compute_root( cht_num: Header::Number, hashes: I, ) -> ClientResult - where - Header: HeaderT, - Hasher: hash_db::Hasher, - Hasher::Out: Ord, - I: IntoIterator>>, +where + Header: HeaderT, + Hasher: hash_db::Hasher, + Hasher::Out: Ord, + I: IntoIterator>>, { use sp_trie::TrieConfiguration; - Ok(sp_trie::trie_types::Layout::::trie_root( - build_pairs::(cht_size, cht_num, hashes)? - )) + Ok(sp_trie::trie_types::Layout::::trie_root(build_pairs::( + cht_size, cht_num, hashes, + )?)) } /// Build CHT-based header proof. @@ -103,26 +103,28 @@ pub fn build_proof( cht_size: Header::Number, cht_num: Header::Number, blocks: BlocksI, - hashes: HashesI + hashes: HashesI, ) -> ClientResult - where - Header: HeaderT, - Hasher: hash_db::Hasher, - Hasher::Out: Ord + codec::Codec, - BlocksI: IntoIterator, - HashesI: IntoIterator>>, +where + Header: HeaderT, + Hasher: hash_db::Hasher, + Hasher::Out: Ord + codec::Codec, + BlocksI: IntoIterator, + HashesI: IntoIterator>>, { let transaction = build_pairs::(cht_size, cht_num, hashes)? .into_iter() .map(|(k, v)| (k, Some(v))) .collect::>(); let mut storage = InMemoryBackend::::default().update(vec![(None, transaction)]); - let trie_storage = storage.as_trie_backend() + let trie_storage = storage + .as_trie_backend() .expect("InMemoryState::as_trie_backend always returns Some; qed"); prove_read_on_trie_backend( trie_storage, blocks.into_iter().map(|number| encode_cht_key(number)), - ).map_err(ClientError::from_state) + ) + .map_err(ClientError::from_state) } /// Check CHT-based header proof. @@ -132,25 +134,24 @@ pub fn check_proof( remote_hash: Header::Hash, remote_proof: StorageProof, ) -> ClientResult<()> - where - Header: HeaderT, - Hasher: hash_db::Hasher, - Hasher::Out: Ord + codec::Codec, +where + Header: HeaderT, + Hasher: hash_db::Hasher, + Hasher::Out: Ord + codec::Codec, { do_check_proof::( local_root, local_number, remote_hash, - move |local_root, local_cht_key| + move |local_root, local_cht_key| { read_proof_check::( local_root, remote_proof, ::std::iter::once(local_cht_key), ) - .map(|mut map| map - .remove(local_cht_key) - .expect("checked proof of local_cht_key; qed")) - .map_err(ClientError::from_state), + .map(|mut map| map.remove(local_cht_key).expect("checked proof of local_cht_key; qed")) + .map_err(ClientError::from_state) + }, ) } @@ -161,20 +162,19 @@ pub fn check_proof_on_proving_backend( remote_hash: Header::Hash, proving_backend: &TrieBackend, Hasher>, ) -> ClientResult<()> - where - Header: HeaderT, - Hasher: hash_db::Hasher, - Hasher::Out: Ord + codec::Codec, +where + Header: HeaderT, + Hasher: hash_db::Hasher, + Hasher::Out: Ord + codec::Codec, { do_check_proof::( local_root, local_number, remote_hash, - |_, local_cht_key| - read_proof_check_on_proving_backend::( - proving_backend, - local_cht_key, - ).map_err(ClientError::from_state), + |_, local_cht_key| { + read_proof_check_on_proving_backend::(proving_backend, local_cht_key) + .map_err(ClientError::from_state) + }, ) } @@ -185,22 +185,22 @@ fn do_check_proof( remote_hash: Header::Hash, checker: F, ) -> ClientResult<()> - where - Header: HeaderT, - Hasher: hash_db::Hasher, - Hasher::Out: Ord, - F: FnOnce(Hasher::Out, &[u8]) -> ClientResult>>, +where + Header: HeaderT, + Hasher: hash_db::Hasher, + Hasher::Out: Ord, + F: FnOnce(Hasher::Out, &[u8]) -> ClientResult>>, { let root: Hasher::Out = convert_hash(&local_root); let local_cht_key = encode_cht_key(local_number); let local_cht_value = checker(root, &local_cht_key)?; let local_cht_value = local_cht_value.ok_or_else(|| ClientError::InvalidCHTProof)?; - let local_hash = decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?; + let local_hash = + decode_cht_value(&local_cht_value).ok_or_else(|| ClientError::InvalidCHTProof)?; match &local_hash[..] == remote_hash.as_ref() { true => Ok(()), false => Err(ClientError::InvalidCHTProof.into()), } - } /// Group ordered blocks by CHT number and call functor with blocks of each group. @@ -210,29 +210,31 @@ pub fn for_each_cht_group( mut functor: F, mut functor_param: P, ) -> ClientResult<()> - where - Header: HeaderT, - I: IntoIterator, - F: FnMut(P, Header::Number, Vec) -> ClientResult

, +where + Header: HeaderT, + I: IntoIterator, + F: FnMut(P, Header::Number, Vec) -> ClientResult

, { let mut current_cht_num = None; let mut current_cht_blocks = Vec::new(); for block in blocks { - let new_cht_num = block_to_cht_number(cht_size, block).ok_or_else(|| ClientError::Backend(format!( - "Cannot compute CHT root for the block #{}", block)) - )?; + let new_cht_num = block_to_cht_number(cht_size, block).ok_or_else(|| { + ClientError::Backend(format!("Cannot compute CHT root for the block #{}", block)) + })?; let advance_to_next_cht = current_cht_num.is_some() && current_cht_num != Some(new_cht_num); if advance_to_next_cht { - let current_cht_num = current_cht_num.expect("advance_to_next_cht is true; - it is true only when current_cht_num is Some; qed"); - assert!(new_cht_num > current_cht_num, "for_each_cht_group only supports ordered iterators"); - - functor_param = functor( - functor_param, - current_cht_num, - std::mem::take(&mut current_cht_blocks), - )?; + let current_cht_num = current_cht_num.expect( + "advance_to_next_cht is true; + it is true only when current_cht_num is Some; qed", + ); + assert!( + new_cht_num > current_cht_num, + "for_each_cht_group only supports ordered iterators" + ); + + functor_param = + functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?; } current_cht_blocks.push(block); @@ -240,11 +242,7 @@ pub fn for_each_cht_group( } if let Some(current_cht_num) = current_cht_num { - functor( - functor_param, - current_cht_num, - std::mem::take(&mut current_cht_blocks), - )?; + functor(functor_param, current_cht_num, std::mem::take(&mut current_cht_blocks))?; } Ok(()) @@ -254,26 +252,22 @@ pub fn for_each_cht_group( fn build_pairs( cht_size: Header::Number, cht_num: Header::Number, - hashes: I + hashes: I, ) -> ClientResult, Vec)>> - where - Header: HeaderT, - I: IntoIterator>>, +where + Header: HeaderT, + I: IntoIterator>>, { let start_num = start_number(cht_size, cht_num); let mut pairs = Vec::new(); let mut hash_index = Header::Number::zero(); for hash in hashes.into_iter() { - let hash = hash?.ok_or_else(|| ClientError::from( - ClientError::MissingHashRequiredForCHT - ))?; - pairs.push(( - encode_cht_key(start_num + hash_index).to_vec(), - encode_cht_value(hash) - )); + let hash = + hash?.ok_or_else(|| ClientError::from(ClientError::MissingHashRequiredForCHT))?; + pairs.push((encode_cht_key(start_num + hash_index).to_vec(), encode_cht_value(hash))); hash_index += Header::Number::one(); if hash_index == cht_size { - break; + break } } @@ -325,7 +319,6 @@ pub fn decode_cht_value(value: &[u8]) -> Option { 32 => Some(H256::from_slice(&value[0..32])), _ => None, } - } #[cfg(test)] @@ -379,8 +372,12 @@ mod tests { #[test] fn build_pairs_fails_when_no_enough_blocks() { - assert!(build_pairs::(SIZE as _, 0, - ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2)).is_err()); + assert!(build_pairs::( + SIZE as _, + 0, + ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize / 2) + ) + .is_err()); } #[test] @@ -391,9 +388,12 @@ mod tests { ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))) .take(SIZE as usize / 2) .chain(::std::iter::once(Ok(None))) - .chain(::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2)))) - .take(SIZE as usize / 2 - 1)) - ).is_err()); + .chain( + ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(2)))) + .take(SIZE as usize / 2 - 1) + ) + ) + .is_err()); } #[test] @@ -401,9 +401,9 @@ mod tests { assert!(compute_root::( SIZE as _, 42, - ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))) - .take(SIZE as usize) - ).is_ok()); + ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize) + ) + .is_ok()); } #[test] @@ -413,9 +413,9 @@ mod tests { SIZE as _, 0, vec![(SIZE * 1000) as u64], - ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))) - .take(SIZE as usize) - ).is_err()); + ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize) + ) + .is_err()); } #[test] @@ -424,9 +424,9 @@ mod tests { SIZE as _, 0, vec![(SIZE / 2) as u64], - ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))) - .take(SIZE as usize) - ).is_ok()); + ::std::iter::repeat_with(|| Ok(Some(H256::from_low_u64_be(1)))).take(SIZE as usize) + ) + .is_ok()); } #[test] @@ -447,19 +447,27 @@ mod tests { let _ = for_each_cht_group::( cht_size, vec![ - cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5, - cht_size * 4 + 1, cht_size * 4 + 7, - cht_size * 6 + 1 - ], |_, cht_num, blocks| { + cht_size * 2 + 1, + cht_size * 2 + 2, + cht_size * 2 + 5, + cht_size * 4 + 1, + cht_size * 4 + 7, + cht_size * 6 + 1, + ], + |_, cht_num, blocks| { match cht_num { - 2 => assert_eq!(blocks, vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5]), + 2 => assert_eq!( + blocks, + vec![cht_size * 2 + 1, cht_size * 2 + 2, cht_size * 2 + 5] + ), 4 => assert_eq!(blocks, vec![cht_size * 4 + 1, cht_size * 4 + 7]), 6 => assert_eq!(blocks, vec![cht_size * 6 + 1]), _ => unreachable!(), } Ok(()) - }, () + }, + (), ); } } diff --git a/client/api/src/client.rs b/client/api/src/client.rs index 3f4dfc8f35be1..69c89f1aa5f6f 100644 --- a/client/api/src/client.rs +++ b/client/api/src/client.rs @@ -18,20 +18,19 @@ //! A set of APIs supported by the client along with their primitives. -use std::{fmt, collections::HashSet, sync::Arc, convert::TryFrom}; +use sp_consensus::BlockOrigin; use sp_core::storage::StorageKey; use sp_runtime::{ - traits::{Block as BlockT, NumberFor}, generic::{BlockId, SignedBlock}, + traits::{Block as BlockT, NumberFor}, Justifications, }; -use sp_consensus::BlockOrigin; +use std::{collections::HashSet, convert::TryFrom, fmt, sync::Arc}; -use crate::blockchain::Info; -use crate::notifications::StorageEventStream; -use sp_utils::mpsc::TracingUnboundedReceiver; -use sp_blockchain; +use crate::{blockchain::Info, notifications::StorageEventStream}; use sc_transaction_pool_api::ChainEvent; +use sp_blockchain; +use sp_utils::mpsc::TracingUnboundedReceiver; /// Type that implements `futures::Stream` of block import events. pub type ImportNotifications = TracingUnboundedReceiver>; @@ -82,7 +81,7 @@ pub trait BlockBackend { /// Get block body by ID. Returns `None` if the body is not stored. fn block_body( &self, - id: &BlockId + id: &BlockId, ) -> sp_blockchain::Result::Extrinsic>>>; /// Get all indexed transactions for a block, @@ -99,7 +98,8 @@ pub trait BlockBackend { fn block(&self, id: &BlockId) -> sp_blockchain::Result>>; /// Get block status. - fn block_status(&self, id: &BlockId) -> sp_blockchain::Result; + fn block_status(&self, id: &BlockId) + -> sp_blockchain::Result; /// Get block justifications for the block with the given id. fn justifications(&self, id: &BlockId) -> sp_blockchain::Result>; @@ -107,14 +107,11 @@ pub trait BlockBackend { /// Get block hash by number. fn block_hash(&self, number: NumberFor) -> sp_blockchain::Result>; - /// Get single indexed transaction by content hash. + /// Get single indexed transaction by content hash. /// /// Note that this will only fetch transactions /// that are indexed by the runtime with `storage_index_transaction`. - fn indexed_transaction( - &self, - hash: &Block::Hash, - ) -> sp_blockchain::Result>>; + fn indexed_transaction(&self, hash: &Block::Hash) -> sp_blockchain::Result>>; /// Check if transaction index exists. fn has_indexed_transaction(&self, hash: &Block::Hash) -> sp_blockchain::Result { @@ -125,8 +122,11 @@ pub trait BlockBackend { /// Provide a list of potential uncle headers for a given block. pub trait ProvideUncles { /// Gets the uncles of the block with `target_hash` going back `max_generation` ancestors. - fn uncles(&self, target_hash: Block::Hash, max_generation: NumberFor) - -> sp_blockchain::Result>; + fn uncles( + &self, + target_hash: Block::Hash, + max_generation: NumberFor, + ) -> sp_blockchain::Result>; } /// Client info @@ -284,10 +284,7 @@ impl TryFrom> for ChainEvent { fn try_from(n: BlockImportNotification) -> Result { if n.is_new_best { - Ok(Self::NewBestBlock { - hash: n.hash, - tree_route: n.tree_route, - }) + Ok(Self::NewBestBlock { hash: n.hash, tree_route: n.tree_route }) } else { Err(()) } @@ -296,8 +293,6 @@ impl TryFrom> for ChainEvent { impl From> for ChainEvent { fn from(n: FinalityNotification) -> Self { - Self::Finalized { - hash: n.hash, - } + Self::Finalized { hash: n.hash } } } diff --git a/client/api/src/execution_extensions.rs b/client/api/src/execution_extensions.rs index fbde16afc7952..ec44294b8a96c 100644 --- a/client/api/src/execution_extensions.rs +++ b/client/api/src/execution_extensions.rs @@ -22,22 +22,19 @@ //! strategy for the runtime calls and provide the right `Externalities` //! extensions to support APIs for particular execution context & capabilities. -use std::sync::{Weak, Arc}; use codec::Decode; +use parking_lot::RwLock; +use sc_transaction_pool_api::OffchainSubmitTransaction; use sp_core::{ + offchain::{self, OffchainDbExt, OffchainWorkerExt, TransactionPoolExt}, ExecutionContext, - offchain::{self, OffchainWorkerExt, TransactionPoolExt, OffchainDbExt}, }; +use sp_externalities::Extensions; use sp_keystore::{KeystoreExt, SyncCryptoStorePtr}; -use sp_runtime::{ - generic::BlockId, - traits, -}; -use sp_state_machine::{ExecutionManager, DefaultHandler}; +use sp_runtime::{generic::BlockId, traits}; pub use sp_state_machine::ExecutionStrategy; -use sp_externalities::Extensions; -use parking_lot::RwLock; -use sc_transaction_pool_api::OffchainSubmitTransaction; +use sp_state_machine::{DefaultHandler, ExecutionManager}; +use std::sync::{Arc, Weak}; /// Execution strategies settings. #[derive(Debug, Clone)] @@ -151,7 +148,8 @@ impl ExecutionExtensions { /// Register transaction pool extension. pub fn register_transaction_pool(&self, pool: &Arc) - where T: OffchainSubmitTransaction + 'static + where + T: OffchainSubmitTransaction + 'static, { *self.transaction_pool.write() = Some(Arc::downgrade(&pool) as _); } @@ -171,14 +169,10 @@ impl ExecutionExtensions { if capabilities.has(offchain::Capability::TransactionPool) { if let Some(pool) = self.transaction_pool.read().as_ref().and_then(|x| x.upgrade()) { - extensions.register( - TransactionPoolExt( - Box::new(TransactionPoolAdapter { - at: *at, - pool, - }) as _ - ), - ); + extensions + .register(TransactionPoolExt( + Box::new(TransactionPoolAdapter { at: *at, pool }) as _, + )); } } @@ -186,19 +180,18 @@ impl ExecutionExtensions { capabilities.has(offchain::Capability::OffchainDbWrite) { if let Some(offchain_db) = self.offchain_db.as_ref() { - extensions.register( - OffchainDbExt::new(offchain::LimitedExternalities::new( - capabilities, - offchain_db.create(), - )) - ); + extensions.register(OffchainDbExt::new(offchain::LimitedExternalities::new( + capabilities, + offchain_db.create(), + ))); } } if let ExecutionContext::OffchainCall(Some(ext)) = context { - extensions.register( - OffchainWorkerExt::new(offchain::LimitedExternalities::new(capabilities, ext.0)), - ); + extensions.register(OffchainWorkerExt::new(offchain::LimitedExternalities::new( + capabilities, + ext.0, + ))); } extensions @@ -212,21 +205,14 @@ impl ExecutionExtensions { &self, at: &BlockId, context: ExecutionContext, - ) -> ( - ExecutionManager>, - Extensions, - ) { + ) -> (ExecutionManager>, Extensions) { let manager = match context { - ExecutionContext::BlockConstruction => - self.strategies.block_construction.get_manager(), - ExecutionContext::Syncing => - self.strategies.syncing.get_manager(), - ExecutionContext::Importing => - self.strategies.importing.get_manager(), + ExecutionContext::BlockConstruction => self.strategies.block_construction.get_manager(), + ExecutionContext::Syncing => self.strategies.syncing.get_manager(), + ExecutionContext::Importing => self.strategies.importing.get_manager(), ExecutionContext::OffchainCall(Some((_, capabilities))) if capabilities.has_all() => self.strategies.offchain_worker.get_manager(), - ExecutionContext::OffchainCall(_) => - self.strategies.other.get_manager(), + ExecutionContext::OffchainCall(_) => self.strategies.other.get_manager(), }; (manager, self.extensions(at, context)) @@ -245,7 +231,7 @@ impl offchain::TransactionPool for TransactionPoolAdapter< Ok(xt) => xt, Err(e) => { log::warn!("Unable to decode extrinsic: {:?}: {}", data, e); - return Err(()); + return Err(()) }, }; diff --git a/client/api/src/in_mem.rs b/client/api/src/in_mem.rs index 505b69981694a..eea2c73acd1b8 100644 --- a/client/api/src/in_mem.rs +++ b/client/api/src/in_mem.rs @@ -18,30 +18,31 @@ //! In memory client backend -use std::collections::{HashMap, HashSet}; -use std::ptr; -use std::sync::Arc; use parking_lot::RwLock; +use sp_blockchain::{CachedHeaderMetadata, HeaderMetadata}; use sp_core::{ - storage::well_known_keys, offchain::storage::InMemOffchainStorage as OffchainStorage, + offchain::storage::InMemOffchainStorage as OffchainStorage, storage::well_known_keys, +}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, HashFor, Header as HeaderT, NumberFor, Zero}, + Justification, Justifications, Storage, }; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Zero, NumberFor, HashFor}; -use sp_runtime::{Justification, Justifications, Storage}; use sp_state_machine::{ - ChangesTrieTransaction, InMemoryBackend, Backend as StateBackend, StorageCollection, - ChildStorageCollection, IndexOperation, + Backend as StateBackend, ChangesTrieTransaction, ChildStorageCollection, InMemoryBackend, + IndexOperation, StorageCollection, +}; +use std::{ + collections::{HashMap, HashSet}, + ptr, + sync::Arc, }; -use sp_blockchain::{CachedHeaderMetadata, HeaderMetadata}; use crate::{ backend::{self, NewBlockState, ProvideChtRoots}, - blockchain::{ - self, BlockStatus, HeaderBackend, well_known_cache_keys::Id as CacheKeyId - }, - UsageInfo, - light, + blockchain::{self, well_known_cache_keys::Id as CacheKeyId, BlockStatus, HeaderBackend}, leaves::LeafSet, + light, UsageInfo, }; struct PendingBlock { @@ -56,7 +57,11 @@ enum StoredBlock { } impl StoredBlock { - fn new(header: B::Header, body: Option>, just: Option) -> Self { + fn new( + header: B::Header, + body: Option>, + just: Option, + ) -> Self { match body { Some(body) => StoredBlock::Full(B::new(header, body), just), None => StoredBlock::Header(header, just), @@ -72,7 +77,7 @@ impl StoredBlock { fn justifications(&self) -> Option<&Justifications> { match *self { - StoredBlock::Header(_, ref j) | StoredBlock::Full(_, ref j) => j.as_ref() + StoredBlock::Header(_, ref j) | StoredBlock::Full(_, ref j) => j.as_ref(), } } @@ -89,7 +94,7 @@ impl StoredBlock { StoredBlock::Full(block, just) => { let (header, body) = block.deconstruct(); (header, Some(body), just) - } + }, } } } @@ -123,9 +128,7 @@ impl Default for Blockchain { impl Clone for Blockchain { fn clone(&self) -> Self { let storage = Arc::new(RwLock::new(self.storage.read().clone())); - Blockchain { - storage, - } + Blockchain { storage } } } @@ -140,23 +143,20 @@ impl Blockchain { /// Create new in-memory blockchain storage. pub fn new() -> Blockchain { - let storage = Arc::new(RwLock::new( - BlockchainStorage { - blocks: HashMap::new(), - hashes: HashMap::new(), - best_hash: Default::default(), - best_number: Zero::zero(), - finalized_hash: Default::default(), - finalized_number: Zero::zero(), - genesis_hash: Default::default(), - header_cht_roots: HashMap::new(), - changes_trie_cht_roots: HashMap::new(), - leaves: LeafSet::new(), - aux: HashMap::new(), - })); - Blockchain { - storage, - } + let storage = Arc::new(RwLock::new(BlockchainStorage { + blocks: HashMap::new(), + hashes: HashMap::new(), + best_hash: Default::default(), + best_number: Zero::zero(), + finalized_hash: Default::default(), + finalized_number: Zero::zero(), + genesis_hash: Default::default(), + header_cht_roots: HashMap::new(), + changes_trie_cht_roots: HashMap::new(), + leaves: LeafSet::new(), + aux: HashMap::new(), + })); + Blockchain { storage } } /// Insert a block header and associated data. @@ -175,8 +175,12 @@ impl Blockchain { { let mut storage = self.storage.write(); - storage.leaves.import(hash.clone(), number.clone(), header.parent_hash().clone()); - storage.blocks.insert(hash.clone(), StoredBlock::new(header, body, justifications)); + storage + .leaves + .import(hash.clone(), number.clone(), header.parent_hash().clone()); + storage + .blocks + .insert(hash.clone(), StoredBlock::new(header, body, justifications)); if let NewBlockState::Final = new_state { storage.finalized_hash = hash; @@ -200,7 +204,7 @@ impl Blockchain { pub fn equals_to(&self, other: &Self) -> bool { // Check ptr equality first to avoid double read locks. if ptr::eq(self, other) { - return true; + return true } self.canon_equals_to(other) && self.storage.read().blocks == other.storage.read().blocks } @@ -209,14 +213,14 @@ impl Blockchain { pub fn canon_equals_to(&self, other: &Self) -> bool { // Check ptr equality first to avoid double read locks. if ptr::eq(self, other) { - return true; + return true } let this = self.storage.read(); let other = other.storage.read(); - this.hashes == other.hashes - && this.best_hash == other.best_hash - && this.best_number == other.best_number - && this.genesis_hash == other.genesis_hash + this.hashes == other.hashes && + this.best_hash == other.best_hash && + this.best_number == other.best_number && + this.genesis_hash == other.genesis_hash } /// Insert header CHT root. @@ -226,7 +230,8 @@ impl Blockchain { /// Set an existing block as head. pub fn set_head(&self, id: BlockId) -> sp_blockchain::Result<()> { - let header = self.header(id)? + let header = self + .header(id)? .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", id)))?; self.apply_head(&header) @@ -270,7 +275,11 @@ impl Blockchain { Ok(()) } - fn finalize_header(&self, id: BlockId, justification: Option) -> sp_blockchain::Result<()> { + fn finalize_header( + &self, + id: BlockId, + justification: Option, + ) -> sp_blockchain::Result<()> { let hash = match self.header(id)? { Some(h) => h.hash(), None => return Err(sp_blockchain::Error::UnknownBlock(format!("{}", id))), @@ -280,11 +289,13 @@ impl Blockchain { storage.finalized_hash = hash; if justification.is_some() { - let block = storage.blocks.get_mut(&hash) + let block = storage + .blocks + .get_mut(&hash) .expect("hash was fetched from a block in the db; qed"); let block_justifications = match block { - StoredBlock::Header(_, ref mut j) | StoredBlock::Full(_, ref mut j) => j + StoredBlock::Header(_, ref mut j) | StoredBlock::Full(_, ref mut j) => j, }; *block_justifications = justification.map(Justifications::from); @@ -293,9 +304,11 @@ impl Blockchain { Ok(()) } - fn append_justification(&self, id: BlockId, justification: Justification) - -> sp_blockchain::Result<()> - { + fn append_justification( + &self, + id: BlockId, + justification: Justification, + ) -> sp_blockchain::Result<()> { let hash = self.expect_block_hash_from_id(&id)?; let mut storage = self.storage.write(); @@ -305,14 +318,14 @@ impl Blockchain { .expect("hash was fetched from a block in the db; qed"); let block_justifications = match block { - StoredBlock::Header(_, ref mut j) | StoredBlock::Full(_, ref mut j) => j + StoredBlock::Header(_, ref mut j) | StoredBlock::Full(_, ref mut j) => j, }; if let Some(stored_justifications) = block_justifications { if !stored_justifications.append(justification) { return Err(sp_blockchain::Error::BadJustification( - "Duplicate consensus engine ID".into() - )); + "Duplicate consensus engine ID".into(), + )) } } else { *block_justifications = Some(Justifications::from(justification)); @@ -333,10 +346,13 @@ impl Blockchain { } impl HeaderBackend for Blockchain { - fn header(&self, id: BlockId) -> sp_blockchain::Result::Header>> { - Ok(self.id(id).and_then(|hash| { - self.storage.read().blocks.get(&hash).map(|b| b.header().clone()) - })) + fn header( + &self, + id: BlockId, + ) -> sp_blockchain::Result::Header>> { + Ok(self + .id(id) + .and_then(|hash| self.storage.read().blocks.get(&hash).map(|b| b.header().clone()))) } fn info(&self) -> blockchain::Info { @@ -352,7 +368,7 @@ impl HeaderBackend for Blockchain { } else { None }, - number_leaves: storage.leaves.count() + number_leaves: storage.leaves.count(), } } @@ -367,7 +383,10 @@ impl HeaderBackend for Blockchain { Ok(self.storage.read().blocks.get(&hash).map(|b| *b.header().number())) } - fn hash(&self, number: <::Header as HeaderT>::Number) -> sp_blockchain::Result> { + fn hash( + &self, + number: <::Header as HeaderT>::Number, + ) -> sp_blockchain::Result> { Ok(self.id(BlockId::Number(number))) } } @@ -375,9 +394,15 @@ impl HeaderBackend for Blockchain { impl HeaderMetadata for Blockchain { type Error = sp_blockchain::Error; - fn header_metadata(&self, hash: Block::Hash) -> Result, Self::Error> { - self.header(BlockId::hash(hash))?.map(|header| CachedHeaderMetadata::from(&header)) - .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("header not found: {}", hash))) + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header(BlockId::hash(hash))? + .map(|header| CachedHeaderMetadata::from(&header)) + .ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!("header not found: {}", hash)) + }) } fn insert_header_metadata(&self, _hash: Block::Hash, _metadata: CachedHeaderMetadata) { @@ -389,17 +414,27 @@ impl HeaderMetadata for Blockchain { } impl blockchain::Backend for Blockchain { - fn body(&self, id: BlockId) -> sp_blockchain::Result::Extrinsic>>> { + fn body( + &self, + id: BlockId, + ) -> sp_blockchain::Result::Extrinsic>>> { Ok(self.id(id).and_then(|hash| { - self.storage.read().blocks.get(&hash) + self.storage + .read() + .blocks + .get(&hash) .and_then(|b| b.extrinsics().map(|x| x.to_vec())) })) } fn justifications(&self, id: BlockId) -> sp_blockchain::Result> { - Ok(self.id(id).and_then(|hash| self.storage.read().blocks.get(&hash).and_then(|b| - b.justifications().map(|x| x.clone())) - )) + Ok(self.id(id).and_then(|hash| { + self.storage + .read() + .blocks + .get(&hash) + .and_then(|b| b.justifications().map(|x| x.clone())) + })) } fn last_finalized(&self) -> sp_blockchain::Result { @@ -418,16 +453,13 @@ impl blockchain::Backend for Blockchain { unimplemented!() } - fn indexed_transaction( - &self, - _hash: &Block::Hash, - ) -> sp_blockchain::Result>> { + fn indexed_transaction(&self, _hash: &Block::Hash) -> sp_blockchain::Result>> { unimplemented!("Not supported by the in-mem backend.") } fn block_indexed_body( &self, - _id: BlockId + _id: BlockId, ) -> sp_blockchain::Result>>> { unimplemented!("Not supported by the in-mem backend.") } @@ -444,9 +476,13 @@ impl backend::AuxStore for Blockchain { 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { let mut storage = self.storage.write(); for (k, v) in insert { storage.aux.insert(k.to_vec(), v.to_vec()); @@ -463,8 +499,8 @@ impl backend::AuxStore for Blockchain { } impl light::Storage for Blockchain - where - Block::Hash: From<[u8; 32]>, +where + Block::Hash: From<[u8; 32]>, { fn import_header( &self, @@ -507,8 +543,14 @@ impl ProvideChtRoots for Blockchain { _cht_size: NumberFor, block: NumberFor, ) -> sp_blockchain::Result> { - self.storage.read().header_cht_roots.get(&block).cloned() - .ok_or_else(|| sp_blockchain::Error::Backend(format!("Header CHT for block {} not exists", block))) + self.storage + .read() + .header_cht_roots + .get(&block) + .cloned() + .ok_or_else(|| { + sp_blockchain::Error::Backend(format!("Header CHT for block {} not exists", block)) + }) .map(Some) } @@ -517,8 +559,17 @@ impl ProvideChtRoots for Blockchain { _cht_size: NumberFor, block: NumberFor, ) -> sp_blockchain::Result> { - self.storage.read().changes_trie_cht_roots.get(&block).cloned() - .ok_or_else(|| sp_blockchain::Error::Backend(format!("Changes trie CHT for block {} not exists", block))) + self.storage + .read() + .changes_trie_cht_roots + .get(&block) + .cloned() + .ok_or_else(|| { + sp_blockchain::Error::Backend(format!( + "Changes trie CHT for block {} not exists", + block + )) + }) .map(Some) } } @@ -527,25 +578,30 @@ impl ProvideChtRoots for Blockchain { pub struct BlockImportOperation { pending_block: Option>, old_state: InMemoryBackend>, - new_state: Option<> as StateBackend>>::Transaction>, + new_state: + Option<> as StateBackend>>::Transaction>, aux: Vec<(Vec, Option>)>, finalized_blocks: Vec<(BlockId, Option)>, set_head: Option>, } -impl BlockImportOperation where +impl BlockImportOperation +where Block::Hash: Ord, { - fn apply_storage(&mut self, storage: Storage, commit: bool) -> sp_blockchain::Result { + fn apply_storage( + &mut self, + storage: Storage, + commit: bool, + ) -> sp_blockchain::Result { check_genesis_storage(&storage)?; - let child_delta = storage.children_default.iter() - .map(|(_storage_key, child_content)| - ( - &child_content.child_info, - child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))) - ) - ); + let child_delta = storage.children_default.iter().map(|(_storage_key, child_content)| { + ( + &child_content.child_info, + child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + ) + }); let (root, transaction) = self.old_state.full_storage_root( storage.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), @@ -559,7 +615,8 @@ impl BlockImportOperation where } } -impl backend::BlockImportOperation for BlockImportOperation where +impl backend::BlockImportOperation for BlockImportOperation +where Block::Hash: Ord, { type State = InMemoryBackend>; @@ -577,10 +634,8 @@ impl backend::BlockImportOperation for BlockImportOperatio state: NewBlockState, ) -> sp_blockchain::Result<()> { assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); - self.pending_block = Some(PendingBlock { - block: StoredBlock::new(header, body, justifications), - state, - }); + self.pending_block = + Some(PendingBlock { block: StoredBlock::new(header, body, justifications), state }); Ok(()) } @@ -601,7 +656,11 @@ impl backend::BlockImportOperation for BlockImportOperatio Ok(()) } - fn set_genesis_state(&mut self, storage: Storage, commit: bool) -> sp_blockchain::Result { + fn set_genesis_state( + &mut self, + storage: Storage, + commit: bool, + ) -> sp_blockchain::Result { self.apply_storage(storage, commit) } @@ -610,7 +669,8 @@ impl backend::BlockImportOperation for BlockImportOperatio } fn insert_aux(&mut self, ops: I) -> sp_blockchain::Result<()> - where I: IntoIterator, Option>)> + where + I: IntoIterator, Option>)>, { self.aux.append(&mut ops.into_iter().collect()); Ok(()) @@ -639,7 +699,10 @@ impl backend::BlockImportOperation for BlockImportOperatio Ok(()) } - fn update_transaction_index(&mut self, _index: Vec) -> sp_blockchain::Result<()> { + fn update_transaction_index( + &mut self, + _index: Vec, + ) -> sp_blockchain::Result<()> { Ok(()) } } @@ -648,13 +711,19 @@ impl backend::BlockImportOperation for BlockImportOperatio /// /// > **Warning**: Doesn't support all the features necessary for a proper database. Only use this /// > struct for testing purposes. Do **NOT** use in production. -pub struct Backend where Block::Hash: Ord { +pub struct Backend +where + Block::Hash: Ord, +{ states: RwLock>>>, blockchain: Blockchain, import_lock: RwLock<()>, } -impl Backend where Block::Hash: Ord { +impl Backend +where + Block::Hash: Ord, +{ /// Create a new instance of in-mem backend. pub fn new() -> Self { Backend { @@ -665,14 +734,21 @@ impl Backend where Block::Hash: Ord { } } -impl backend::AuxStore for Backend where Block::Hash: Ord { +impl backend::AuxStore for Backend +where + Block::Hash: Ord, +{ fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { self.blockchain.insert_aux(insert, delete) } @@ -681,7 +757,10 @@ impl backend::AuxStore for Backend where Block::Hash: Ord } } -impl backend::Backend for Backend where Block::Hash: Ord { +impl backend::Backend for Backend +where + Block::Hash: Ord, +{ type BlockImportOperation = BlockImportOperation; type Blockchain = Blockchain; type State = InMemoryBackend>; @@ -708,10 +787,7 @@ impl backend::Backend for Backend where Block::Hash Ok(()) } - fn commit_operation( - &self, - operation: Self::BlockImportOperation, - ) -> sp_blockchain::Result<()> { + fn commit_operation(&self, operation: Self::BlockImportOperation) -> sp_blockchain::Result<()> { if !operation.finalized_blocks.is_empty() { for (block, justification) in operation.finalized_blocks { self.blockchain.finalize_header(block, justification)?; @@ -780,12 +856,14 @@ impl backend::Backend for Backend where Block::Hash fn state_at(&self, block: BlockId) -> sp_blockchain::Result { match block { BlockId::Hash(h) if h == Default::default() => { - return Ok(Self::State::default()); + return Ok(Self::State::default()) }, _ => {}, } - self.blockchain.id(block).and_then(|id| self.states.read().get(&id).cloned()) + self.blockchain + .id(block) + .and_then(|id| self.states.read().get(&id).cloned()) .ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", block))) } @@ -797,10 +875,7 @@ impl backend::Backend for Backend where Block::Hash Ok((Zero::zero(), HashSet::new())) } - fn remove_leaf_block( - &self, - _hash: &Block::Hash, - ) -> sp_blockchain::Result<()> { + fn remove_leaf_block(&self, _hash: &Block::Hash) -> sp_blockchain::Result<()> { Ok(()) } @@ -811,9 +886,13 @@ impl backend::Backend for Backend where Block::Hash impl backend::LocalBackend for Backend where Block::Hash: Ord {} -impl backend::RemoteBackend for Backend where Block::Hash: Ord { +impl backend::RemoteBackend for Backend +where + Block::Hash: Ord, +{ fn is_local_state_available(&self, block: &BlockId) -> bool { - self.blockchain.expect_block_number_from_id(block) + self.blockchain + .expect_block_number_from_id(block) .map(|num| num.is_zero()) .unwrap_or(false) } @@ -826,12 +905,15 @@ impl backend::RemoteBackend for Backend where Block /// Check that genesis storage is valid. pub fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { if storage.top.iter().any(|(k, _)| well_known_keys::is_child_storage_key(k)) { - return Err(sp_blockchain::Error::InvalidState.into()); + return Err(sp_blockchain::Error::InvalidState.into()) } - if storage.children_default.keys() - .any(|child_key| !well_known_keys::is_child_storage_key(&child_key)) { - return Err(sp_blockchain::Error::InvalidState.into()); + if storage + .children_default + .keys() + .any(|child_key| !well_known_keys::is_child_storage_key(&child_key)) + { + return Err(sp_blockchain::Error::InvalidState.into()) } Ok(()) @@ -839,10 +921,10 @@ pub fn check_genesis_storage(storage: &Storage) -> sp_blockchain::Result<()> { #[cfg(test)] mod tests { - use crate::{NewBlockState, in_mem::Blockchain}; + use crate::{in_mem::Blockchain, NewBlockState}; use sp_api::{BlockId, HeaderT}; - use sp_runtime::{ConsensusEngineId, Justifications}; use sp_blockchain::Backend; + use sp_runtime::{ConsensusEngineId, Justifications}; use substrate_test_runtime::{Block, Header, H256}; pub const ID1: ConsensusEngineId = *b"TST1"; @@ -853,7 +935,13 @@ mod tests { 0 => Default::default(), _ => header(number - 1).hash(), }; - Header::new(number, H256::from_low_u64_be(0), H256::from_low_u64_be(0), parent_hash, Default::default()) + Header::new( + number, + H256::from_low_u64_be(0), + H256::from_low_u64_be(0), + parent_hash, + Default::default(), + ) } fn test_blockchain() -> Blockchain { @@ -862,10 +950,18 @@ mod tests { let just1 = Some(Justifications::from((ID1, vec![1]))); let just2 = None; let just3 = Some(Justifications::from((ID1, vec![3]))); - blockchain.insert(header(0).hash(), header(0), just0, None, NewBlockState::Final).unwrap(); - blockchain.insert(header(1).hash(), header(1), just1, None, NewBlockState::Final).unwrap(); - blockchain.insert(header(2).hash(), header(2), just2, None, NewBlockState::Best).unwrap(); - blockchain.insert(header(3).hash(), header(3), just3, None, NewBlockState::Final).unwrap(); + blockchain + .insert(header(0).hash(), header(0), just0, None, NewBlockState::Final) + .unwrap(); + blockchain + .insert(header(1).hash(), header(1), just1, None, NewBlockState::Final) + .unwrap(); + blockchain + .insert(header(2).hash(), header(2), just2, None, NewBlockState::Best) + .unwrap(); + blockchain + .insert(header(3).hash(), header(3), just3, None, NewBlockState::Final) + .unwrap(); blockchain } diff --git a/client/api/src/leaves.rs b/client/api/src/leaves.rs index 0474d5bb8fe17..db5a25b451c56 100644 --- a/client/api/src/leaves.rs +++ b/client/api/src/leaves.rs @@ -18,12 +18,11 @@ //! Helper for managing the set of available leaves in the chain for DB implementations. -use std::collections::BTreeMap; -use std::cmp::Reverse; +use codec::{Decode, Encode}; +use sp_blockchain::{Error, Result}; use sp_database::{Database, Transaction}; use sp_runtime::traits::AtLeast32Bit; -use codec::{Encode, Decode}; -use sp_blockchain::{Error, Result}; +use std::{cmp::Reverse, collections::BTreeMap}; type DbHash = sp_core::H256; @@ -57,7 +56,7 @@ impl FinalizationDisplaced { } /// Iterate over all displaced leaves. - pub fn leaves(&self) -> impl IntoIterator { + pub fn leaves(&self) -> impl IntoIterator { self.leaves.values().flatten() } } @@ -72,17 +71,14 @@ pub struct LeafSet { pending_removed: Vec, } -impl LeafSet where +impl LeafSet +where H: Clone + PartialEq + Decode + Encode, N: std::fmt::Debug + Clone + AtLeast32Bit + Decode + Encode, { /// Construct a new, blank leaf set. pub fn new() -> Self { - Self { - storage: BTreeMap::new(), - pending_added: Vec::new(), - pending_removed: Vec::new(), - } + Self { storage: BTreeMap::new(), pending_added: Vec::new(), pending_removed: Vec::new() } } /// Read the leaf list from the DB, using given prefix for keys. @@ -98,14 +94,10 @@ impl LeafSet where for (number, hashes) in vals.into_iter() { storage.insert(Reverse(number), hashes); } - } + }, None => {}, } - Ok(Self { - storage, - pending_added: Vec::new(), - pending_removed: Vec::new(), - }) + Ok(Self { storage, pending_added: Vec::new(), pending_removed: Vec::new() }) } /// update the leaf list on import. returns a displaced leaf if there was one. @@ -119,10 +111,7 @@ impl LeafSet where self.pending_removed.push(parent_hash.clone()); Some(ImportDisplaced { new_hash: hash.clone(), - displaced: LeafSetItem { - hash: parent_hash, - number: new_number, - }, + displaced: LeafSetItem { hash: parent_hash, number: new_number }, }) } else { None @@ -144,16 +133,15 @@ impl LeafSet where /// will be pruned soon afterwards anyway. pub fn finalize_height(&mut self, number: N) -> FinalizationDisplaced { let boundary = if number == N::zero() { - return FinalizationDisplaced { leaves: BTreeMap::new() }; + return FinalizationDisplaced { leaves: BTreeMap::new() } } else { number - N::one() }; let below_boundary = self.storage.split_off(&Reverse(boundary)); - self.pending_removed.extend(below_boundary.values().flat_map(|h| h.iter()).cloned()); - FinalizationDisplaced { - leaves: below_boundary, - } + self.pending_removed + .extend(below_boundary.values().flat_map(|h| h.iter()).cloned()); + FinalizationDisplaced { leaves: below_boundary } } /// Undo all pending operations. @@ -169,7 +157,9 @@ impl LeafSet where /// Revert to the given block height by dropping all leaves in the leaf set /// with a block number higher than the target. pub fn revert(&mut self, best_hash: H, best_number: N) { - let items = self.storage.iter() + let items = self + .storage + .iter() .flat_map(|(number, hashes)| hashes.iter().map(move |h| (h.clone(), number.clone()))) .collect::>(); @@ -185,7 +175,8 @@ impl LeafSet where } let best_number = Reverse(best_number); - let leaves_contains_best = self.storage + let leaves_contains_best = self + .storage .get(&best_number) .map_or(false, |hashes| hashes.contains(&best_hash)); @@ -209,7 +200,12 @@ impl LeafSet where } /// Write the leaf list to the database transaction. - pub fn prepare_transaction(&mut self, tx: &mut Transaction, column: u32, prefix: &[u8]) { + pub fn prepare_transaction( + &mut self, + tx: &mut Transaction, + column: u32, + prefix: &[u8], + ) { let leaves: Vec<_> = self.storage.iter().map(|(n, h)| (n.0.clone(), h.clone())).collect(); tx.set_from_vec(column, prefix, leaves.encode()); self.pending_added.clear(); @@ -218,7 +214,9 @@ impl LeafSet where /// Check if given block is a leaf. pub fn contains(&self, number: N, hash: H) -> bool { - self.storage.get(&Reverse(number)).map_or(false, |hashes| hashes.contains(&hash)) + self.storage + .get(&Reverse(number)) + .map_or(false, |hashes| hashes.contains(&hash)) } fn insert_leaf(&mut self, number: Reverse, hash: H) { @@ -230,14 +228,18 @@ impl LeafSet where let mut empty = false; let removed = self.storage.get_mut(number).map_or(false, |leaves| { let mut found = false; - leaves.retain(|h| if h == hash { - found = true; - false - } else { - true + leaves.retain(|h| { + if h == hash { + found = true; + false + } else { + true + } }); - if leaves.is_empty() { empty = true } + if leaves.is_empty() { + empty = true + } found }); @@ -255,7 +257,8 @@ pub struct Undo<'a, H: 'a, N: 'a> { inner: &'a mut LeafSet, } -impl<'a, H: 'a, N: 'a> Undo<'a, H, N> where +impl<'a, H: 'a, N: 'a> Undo<'a, H, N> +where H: Clone + PartialEq + Decode + Encode, N: std::fmt::Debug + Clone + AtLeast32Bit + Decode + Encode, { @@ -329,7 +332,7 @@ mod tests { fn two_leaves_same_height_can_be_included() { let mut set = LeafSet::new(); - set.import(1_1u32, 10u32,0u32); + set.import(1_1u32, 10u32, 0u32); set.import(1_2, 10, 0); assert!(set.storage.contains_key(&Reverse(10))); diff --git a/client/api/src/lib.rs b/client/api/src/lib.rs index 71cf499f79943..7904d43434110 100644 --- a/client/api/src/lib.rs +++ b/client/api/src/lib.rs @@ -21,27 +21,26 @@ pub mod backend; pub mod call_executor; -pub mod client; pub mod cht; +pub mod client; pub mod execution_extensions; pub mod in_mem; -pub mod light; pub mod leaves; +pub mod light; pub mod notifications; pub mod proof_provider; -pub use sp_blockchain as blockchain; pub use backend::*; -pub use notifications::*; pub use call_executor::*; pub use client::*; pub use light::*; pub use notifications::*; pub use proof_provider::*; +pub use sp_blockchain as blockchain; pub use sp_blockchain::HeaderBackend; -pub use sp_state_machine::{StorageProof, ExecutionStrategy}; -pub use sp_storage::{StorageData, StorageKey, PrefixedStorageKey, ChildInfo}; +pub use sp_state_machine::{ExecutionStrategy, StorageProof}; +pub use sp_storage::{ChildInfo, PrefixedStorageKey, StorageData, StorageKey}; /// Usage Information Provider interface /// @@ -52,7 +51,7 @@ pub trait UsageProvider { /// Utility methods for the client. pub mod utils { - use sp_blockchain::{HeaderBackend, HeaderMetadata, Error}; + use sp_blockchain::{Error, HeaderBackend, HeaderMetadata}; use sp_runtime::traits::Block as BlockT; use std::borrow::Borrow; @@ -66,19 +65,24 @@ pub mod utils { client: &'a T, current: Option<(Block::Hash, Block::Hash)>, ) -> impl Fn(&Block::Hash, &Block::Hash) -> Result + 'a - where T: HeaderBackend + HeaderMetadata, + where + T: HeaderBackend + HeaderMetadata, { move |base, hash| { - if base == hash { return Ok(false); } + if base == hash { + return Ok(false) + } let current = current.as_ref().map(|(c, p)| (c.borrow(), p.borrow())); let mut hash = hash; if let Some((current_hash, current_parent_hash)) = current { - if base == current_hash { return Ok(false); } + if base == current_hash { + return Ok(false) + } if hash == current_hash { if base == current_parent_hash { - return Ok(true); + return Ok(true) } else { hash = current_parent_hash; } diff --git a/client/api/src/light.rs b/client/api/src/light.rs index a068e2d4a3417..8638ddf741f30 100644 --- a/client/api/src/light.rs +++ b/client/api/src/light.rs @@ -18,23 +18,26 @@ //! Substrate light client interfaces -use std::sync::Arc; -use std::collections::{BTreeMap, HashMap}; -use std::future::Future; +use std::{ + collections::{BTreeMap, HashMap}, + future::Future, + sync::Arc, +}; -use sp_runtime::{ - traits::{ - Block as BlockT, Header as HeaderT, NumberFor, - }, - generic::BlockId +use crate::{ + backend::{AuxStore, NewBlockState}, + ProvideChtRoots, UsageInfo, }; -use sp_core::{ChangesTrieConfigurationRange, storage::PrefixedStorageKey}; -use sp_state_machine::StorageProof; use sp_blockchain::{ - HeaderMetadata, well_known_cache_keys, HeaderBackend, Cache as BlockchainCache, - Error as ClientError, Result as ClientResult, + well_known_cache_keys, Cache as BlockchainCache, Error as ClientError, HeaderBackend, + HeaderMetadata, Result as ClientResult, +}; +use sp_core::{storage::PrefixedStorageKey, ChangesTrieConfigurationRange}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor}, }; -use crate::{backend::{AuxStore, NewBlockState}, UsageInfo, ProvideChtRoots}; +use sp_state_machine::StorageProof; /// Remote call request. #[derive(Clone, Debug, PartialEq, Eq, Hash)] @@ -142,48 +145,48 @@ pub struct RemoteBodyRequest { /// is correct (see FetchedDataChecker) and return already checked data. pub trait Fetcher: Send + Sync { /// Remote header future. - type RemoteHeaderResult: Future> + Unpin + Send + 'static; + type RemoteHeaderResult: Future> + + Unpin + + Send + + 'static; /// Remote storage read future. - type RemoteReadResult: Future, Option>>, - ClientError, - >> + Unpin + Send + 'static; + type RemoteReadResult: Future, Option>>, ClientError>> + + Unpin + + Send + + 'static; /// Remote call result future. - type RemoteCallResult: Future, - ClientError, - >> + Unpin + Send + 'static; + type RemoteCallResult: Future, ClientError>> + Unpin + Send + 'static; /// Remote changes result future. - type RemoteChangesResult: Future, u32)>, - ClientError, - >> + Unpin + Send + 'static; + type RemoteChangesResult: Future, u32)>, ClientError>> + + Unpin + + Send + + 'static; /// Remote block body result future. - type RemoteBodyResult: Future, - ClientError, - >> + Unpin + Send + 'static; + type RemoteBodyResult: Future, ClientError>> + + Unpin + + Send + + 'static; /// Fetch remote header. - fn remote_header(&self, request: RemoteHeaderRequest) -> Self::RemoteHeaderResult; - /// Fetch remote storage value. - fn remote_read( + fn remote_header( &self, - request: RemoteReadRequest - ) -> Self::RemoteReadResult; + request: RemoteHeaderRequest, + ) -> Self::RemoteHeaderResult; + /// Fetch remote storage value. + fn remote_read(&self, request: RemoteReadRequest) -> Self::RemoteReadResult; /// Fetch remote storage child value. fn remote_read_child( &self, - request: RemoteReadChildRequest + request: RemoteReadChildRequest, ) -> Self::RemoteReadResult; /// Fetch remote call result. fn remote_call(&self, request: RemoteCallRequest) -> Self::RemoteCallResult; /// Fetch remote changes ((block number, extrinsic index)) where given key has been changed /// at a given blocks range. - fn remote_changes(&self, request: RemoteChangesRequest) -> Self::RemoteChangesResult; + fn remote_changes( + &self, + request: RemoteChangesRequest, + ) -> Self::RemoteChangesResult; /// Fetch remote block body fn remote_body(&self, request: RemoteBodyRequest) -> Self::RemoteBodyResult; } @@ -222,20 +225,22 @@ pub trait FetchChecker: Send + Sync { fn check_changes_proof( &self, request: &RemoteChangesRequest, - proof: ChangesProof + proof: ChangesProof, ) -> ClientResult, u32)>>; /// Check remote body proof. fn check_body_proof( &self, request: &RemoteBodyRequest, - body: Vec + body: Vec, ) -> ClientResult>; } - /// Light client blockchain storage. -pub trait Storage: AuxStore + HeaderBackend - + HeaderMetadata + ProvideChtRoots +pub trait Storage: + AuxStore + + HeaderBackend + + HeaderMetadata + + ProvideChtRoots { /// Store new header. Should refuse to revert any finalized blocks. /// @@ -280,10 +285,10 @@ pub enum LocalOrRemote { /// locally, or fetches required data from remote node. pub trait RemoteBlockchain: Send + Sync { /// Get block header. - fn header(&self, id: BlockId) -> ClientResult, - >>; + fn header( + &self, + id: BlockId, + ) -> ClientResult>>; } /// Returns future that resolves header either locally, or remotely. @@ -295,11 +300,8 @@ pub fn future_header>( use futures::future::{ready, Either, FutureExt}; match blockchain.header(id) { - Ok(LocalOrRemote::Remote(request)) => Either::Left( - fetcher - .remote_header(request) - .then(|header| ready(header.map(Some))) - ), + Ok(LocalOrRemote::Remote(request)) => + Either::Left(fetcher.remote_header(request).then(|header| ready(header.map(Some)))), Ok(LocalOrRemote::Unknown) => Either::Right(ready(Ok(None))), Ok(LocalOrRemote::Local(local_header)) => Either::Right(ready(Ok(Some(local_header)))), Err(err) => Either::Right(ready(Err(err))), @@ -308,11 +310,11 @@ pub fn future_header>( #[cfg(test)] pub mod tests { + use super::*; use futures::future::Ready; use parking_lot::Mutex; use sp_blockchain::Error as ClientError; - use sp_test_primitives::{Block, Header, Extrinsic}; - use super::*; + use sp_test_primitives::{Block, Extrinsic, Header}; #[derive(Debug, thiserror::Error)] #[error("Not implemented on test node")] @@ -322,12 +324,11 @@ pub mod tests { fn into(self) -> ClientError { ClientError::Application(Box::new(self)) } - } - + } + pub type OkCallFetcher = Mutex>; - fn not_implemented_in_tests() -> Ready> - { + fn not_implemented_in_tests() -> Ready> { futures::future::ready(Err(MockError.into())) } @@ -346,7 +347,10 @@ pub mod tests { not_implemented_in_tests() } - fn remote_read_child(&self, _request: RemoteReadChildRequest

) -> Self::RemoteReadResult { + fn remote_read_child( + &self, + _request: RemoteReadChildRequest
, + ) -> Self::RemoteReadResult { not_implemented_in_tests() } @@ -354,7 +358,10 @@ pub mod tests { futures::future::ready(Ok((*self.lock()).clone())) } - fn remote_changes(&self, _request: RemoteChangesRequest
) -> Self::RemoteChangesResult { + fn remote_changes( + &self, + _request: RemoteChangesRequest
, + ) -> Self::RemoteChangesResult { not_implemented_in_tests() } diff --git a/client/api/src/notifications.rs b/client/api/src/notifications.rs index b043a332d667d..c6bd39c468d98 100644 --- a/client/api/src/notifications.rs +++ b/client/api/src/notifications.rs @@ -19,15 +19,15 @@ //! Storage notifications use std::{ - collections::{HashSet, HashMap}, + collections::{HashMap, HashSet}, sync::Arc, }; -use fnv::{FnvHashSet, FnvHashMap}; -use sp_core::storage::{StorageKey, StorageData}; +use fnv::{FnvHashMap, FnvHashSet}; +use prometheus_endpoint::{register, CounterVec, Opts, Registry, U64}; +use sp_core::storage::{StorageData, StorageKey}; use sp_runtime::traits::Block as BlockT; -use sp_utils::mpsc::{TracingUnboundedSender, TracingUnboundedReceiver, tracing_unbounded}; -use prometheus_endpoint::{Registry, CounterVec, Opts, U64, register}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; /// Storage change set #[derive(Debug)] @@ -40,29 +40,34 @@ pub struct StorageChangeSet { impl StorageChangeSet { /// Convert the change set into iterator over storage items. - pub fn iter<'a>(&'a self) - -> impl Iterator, &'a StorageKey, Option<&'a StorageData>)> + 'a { - let top = self.changes + pub fn iter<'a>( + &'a self, + ) -> impl Iterator, &'a StorageKey, Option<&'a StorageData>)> + 'a + { + let top = self + .changes .iter() .filter(move |&(key, _)| match self.filter { Some(ref filter) => filter.contains(key), None => true, }) - .map(move |(k,v)| (None, k, v.as_ref())); - let children = self.child_changes + .map(move |(k, v)| (None, k, v.as_ref())); + let children = self + .child_changes .iter() - .filter_map(move |(sk, changes)| - self.child_filters.as_ref().and_then(|cf| - cf.get(sk).map(|filter| changes + .filter_map(move |(sk, changes)| { + self.child_filters.as_ref().and_then(|cf| { + cf.get(sk).map(|filter| { + changes .iter() .filter(move |&(key, _)| match filter { Some(ref filter) => filter.contains(key), None => true, }) - .map(move |(k,v)| (Some(sk), k, v.as_ref())) - ) - ) - ) + .map(move |(k, v)| (Some(sk), k, v.as_ref())) + }) + }) + }) .flatten(); top.chain(children) } @@ -82,15 +87,18 @@ pub struct StorageNotifications { next_id: SubscriberId, wildcard_listeners: FnvHashSet, listeners: HashMap>, - child_listeners: HashMap>, - FnvHashSet - )>, - sinks: FnvHashMap, - Option>, - Option>>>, - )>, + child_listeners: HashMap< + StorageKey, + (HashMap>, FnvHashSet), + >, + sinks: FnvHashMap< + SubscriberId, + ( + TracingUnboundedSender<(Block::Hash, StorageChangeSet)>, + Option>, + Option>>>, + ), + >, } impl Default for StorageNotifications { @@ -110,16 +118,17 @@ impl StorageNotifications { /// Initialize a new StorageNotifications /// optionally pass a prometheus registry to send subscriber metrics to pub fn new(prometheus_registry: Option) -> Self { - let metrics = prometheus_registry.and_then(|r| + let metrics = prometheus_registry.and_then(|r| { CounterVec::new( Opts::new( "storage_notification_subscribers", - "Number of subscribers in storage notification sytem" + "Number of subscribers in storage notification sytem", ), &["action"], //added | removed - ).and_then(|g| register(g, &r)) + ) + .and_then(|g| register(g, &r)) .ok() - ); + }); StorageNotifications { metrics, @@ -137,17 +146,16 @@ impl StorageNotifications { pub fn trigger( &mut self, hash: &Block::Hash, - changeset: impl Iterator, Option>)>, + changeset: impl Iterator, Option>)>, child_changeset: impl Iterator< - Item=(Vec, impl Iterator, Option>)>) + Item = (Vec, impl Iterator, Option>)>), >, ) { - let has_wildcard = !self.wildcard_listeners.is_empty(); // early exit if no listeners if !has_wildcard && self.listeners.is_empty() && self.child_listeners.is_empty() { - return; + return } let mut subscribers = self.wildcard_listeners.clone(); @@ -193,24 +201,29 @@ impl StorageNotifications { // Don't send empty notifications if changes.is_empty() && child_changes.is_empty() { - return; + return } let changes = Arc::new(changes); let child_changes = Arc::new(child_changes); // Trigger the events - let to_remove = self.sinks + let to_remove = self + .sinks .iter() .filter_map(|(subscriber, &(ref sink, ref filter, ref child_filters))| { let should_remove = { if subscribers.contains(subscriber) { - sink.unbounded_send((hash.clone(), StorageChangeSet { - changes: changes.clone(), - child_changes: child_changes.clone(), - filter: filter.clone(), - child_filters: child_filters.clone(), - })).is_err() + sink.unbounded_send(( + hash.clone(), + StorageChangeSet { + changes: changes.clone(), + child_changes: child_changes.clone(), + filter: filter.clone(), + child_filters: child_filters.clone(), + }, + )) + .is_err() } else { sink.is_closed() } @@ -221,7 +234,8 @@ impl StorageNotifications { } else { None } - }).collect::>(); + }) + .collect::>(); for sub_id in to_remove { self.remove_subscriber(sub_id); @@ -233,13 +247,12 @@ impl StorageNotifications { filters: &Option>, listeners: &mut HashMap>, wildcards: &mut FnvHashSet, - ){ + ) { match filters { None => { wildcards.remove(subscriber); }, - Some(filters) => { - + Some(filters) => for key in filters.iter() { let remove_key = match listeners.get_mut(key) { Some(ref mut set) => { @@ -252,8 +265,7 @@ impl StorageNotifications { if remove_key { listeners.remove(key); } - } - } + }, } } @@ -267,7 +279,6 @@ impl StorageNotifications { ); if let Some(child_filters) = child_filters.as_ref() { for (c_key, filters) in child_filters { - if let Some((listeners, wildcards)) = self.child_listeners.get_mut(&c_key) { Self::remove_subscriber_from( &subscriber, @@ -293,20 +304,24 @@ impl StorageNotifications { filter_keys: &Option>, listeners: &mut HashMap>, wildcards: &mut FnvHashSet, - ) -> Option> - { + ) -> Option> { match filter_keys { None => { wildcards.insert(current_id); None }, - Some(keys) => Some(keys.as_ref().iter().map(|key| { - listeners - .entry(key.clone()) - .or_insert_with(Default::default) - .insert(current_id); - key.clone() - }).collect()) + Some(keys) => Some( + keys.as_ref() + .iter() + .map(|key| { + listeners + .entry(key.clone()) + .or_insert_with(Default::default) + .insert(current_id); + key.clone() + }) + .collect(), + ), } } @@ -327,21 +342,20 @@ impl StorageNotifications { &mut self.wildcard_listeners, ); let child_keys = filter_child_keys.map(|filter_child_keys| { - filter_child_keys.iter().map(|(c_key, o_keys)| { - let (c_listeners, c_wildcards) = self.child_listeners - .entry(c_key.clone()) - .or_insert_with(Default::default); - - (c_key.clone(), Self::listen_from( - current_id, - o_keys, - &mut *c_listeners, - &mut *c_wildcards, - )) - }).collect() + filter_child_keys + .iter() + .map(|(c_key, o_keys)| { + let (c_listeners, c_wildcards) = + self.child_listeners.entry(c_key.clone()).or_insert_with(Default::default); + + ( + c_key.clone(), + Self::listen_from(current_id, o_keys, &mut *c_listeners, &mut *c_wildcards), + ) + }) + .collect() }); - // insert sink let (tx, rx) = tracing_unbounded("mpsc_storage_notification_items"); self.sinks.insert(current_id, (tx, keys, child_keys)); @@ -356,8 +370,8 @@ impl StorageNotifications { #[cfg(test)] mod tests { - use sp_runtime::testing::{H256 as Hash, Block as RawBlock, ExtrinsicWrapper}; use super::*; + use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper, H256 as Hash}; use std::iter::{empty, Empty}; type TestChangeSet = ( @@ -369,10 +383,12 @@ mod tests { impl From for StorageChangeSet { fn from(changes: TestChangeSet) -> Self { // warning hardcoded child trie wildcard to test upon - let child_filters = Some([ - (StorageKey(vec![4]), None), - (StorageKey(vec![5]), None), - ].iter().cloned().collect()); + let child_filters = Some( + [(StorageKey(vec![4]), None), (StorageKey(vec![5]), None)] + .iter() + .cloned() + .collect(), + ); StorageChangeSet { changes: Arc::new(changes.0), child_changes: Arc::new(changes.1), @@ -396,34 +412,40 @@ mod tests { // given let mut notifications = StorageNotifications::::default(); let child_filter = [(StorageKey(vec![4]), None)]; - let mut recv = futures::executor::block_on_stream( - notifications.listen(None, Some(&child_filter[..])) - ); + let mut recv = + futures::executor::block_on_stream(notifications.listen(None, Some(&child_filter[..]))); // when - let changeset = vec![ - (vec![2], Some(vec![3])), - (vec![3], None), - ]; - let c_changeset_1 = vec![ - (vec![5], Some(vec![4])), - (vec![6], None), - ]; + let changeset = vec![(vec![2], Some(vec![3])), (vec![3], None)]; + let c_changeset_1 = vec![(vec![5], Some(vec![4])), (vec![6], None)]; let c_changeset = vec![(vec![4], c_changeset_1)]; notifications.trigger( &Hash::from_low_u64_be(1), changeset.into_iter(), - c_changeset.into_iter().map(|(a,b)| (a, b.into_iter())), + c_changeset.into_iter().map(|(a, b)| (a, b.into_iter())), ); // then - assert_eq!(recv.next().unwrap(), (Hash::from_low_u64_be(1), (vec![ - (StorageKey(vec![2]), Some(StorageData(vec![3]))), - (StorageKey(vec![3]), None), - ], vec![(StorageKey(vec![4]), vec![ - (StorageKey(vec![5]), Some(StorageData(vec![4]))), - (StorageKey(vec![6]), None), - ])]).into())); + assert_eq!( + recv.next().unwrap(), + ( + Hash::from_low_u64_be(1), + ( + vec![ + (StorageKey(vec![2]), Some(StorageData(vec![3]))), + (StorageKey(vec![3]), None), + ], + vec![( + StorageKey(vec![4]), + vec![ + (StorageKey(vec![5]), Some(StorageData(vec![4]))), + (StorageKey(vec![6]), None), + ] + )] + ) + .into() + ) + ); } #[test] @@ -432,44 +454,52 @@ mod tests { let mut notifications = StorageNotifications::::default(); let child_filter = [(StorageKey(vec![4]), Some(vec![StorageKey(vec![5])]))]; let mut recv1 = futures::executor::block_on_stream( - notifications.listen(Some(&[StorageKey(vec![1])]), None) + notifications.listen(Some(&[StorageKey(vec![1])]), None), ); let mut recv2 = futures::executor::block_on_stream( - notifications.listen(Some(&[StorageKey(vec![2])]), None) + notifications.listen(Some(&[StorageKey(vec![2])]), None), ); let mut recv3 = futures::executor::block_on_stream( - notifications.listen(Some(&[]), Some(&child_filter)) + notifications.listen(Some(&[]), Some(&child_filter)), ); // when - let changeset = vec![ - (vec![2], Some(vec![3])), - (vec![1], None), - ]; - let c_changeset_1 = vec![ - (vec![5], Some(vec![4])), - (vec![6], None), - ]; + let changeset = vec![(vec![2], Some(vec![3])), (vec![1], None)]; + let c_changeset_1 = vec![(vec![5], Some(vec![4])), (vec![6], None)]; let c_changeset = vec![(vec![4], c_changeset_1)]; notifications.trigger( &Hash::from_low_u64_be(1), changeset.into_iter(), - c_changeset.into_iter().map(|(a,b)| (a, b.into_iter())), + c_changeset.into_iter().map(|(a, b)| (a, b.into_iter())), ); // then - assert_eq!(recv1.next().unwrap(), (Hash::from_low_u64_be(1), (vec![ - (StorageKey(vec![1]), None), - ], vec![]).into())); - assert_eq!(recv2.next().unwrap(), (Hash::from_low_u64_be(1), (vec![ - (StorageKey(vec![2]), Some(StorageData(vec![3]))), - ], vec![]).into())); - assert_eq!(recv3.next().unwrap(), (Hash::from_low_u64_be(1), (vec![], - vec![ - (StorageKey(vec![4]), vec![(StorageKey(vec![5]), Some(StorageData(vec![4])))]), - ]).into())); - + assert_eq!( + recv1.next().unwrap(), + (Hash::from_low_u64_be(1), (vec![(StorageKey(vec![1]), None),], vec![]).into()) + ); + assert_eq!( + recv2.next().unwrap(), + ( + Hash::from_low_u64_be(1), + (vec![(StorageKey(vec![2]), Some(StorageData(vec![3]))),], vec![]).into() + ) + ); + assert_eq!( + recv3.next().unwrap(), + ( + Hash::from_low_u64_be(1), + ( + vec![], + vec![( + StorageKey(vec![4]), + vec![(StorageKey(vec![5]), Some(StorageData(vec![4])))] + ),] + ) + .into() + ) + ); } #[test] @@ -479,27 +509,21 @@ mod tests { { let child_filter = [(StorageKey(vec![4]), Some(vec![StorageKey(vec![5])]))]; let _recv1 = futures::executor::block_on_stream( - notifications.listen(Some(&[StorageKey(vec![1])]), None) + notifications.listen(Some(&[StorageKey(vec![1])]), None), ); let _recv2 = futures::executor::block_on_stream( - notifications.listen(Some(&[StorageKey(vec![2])]), None) - ); - let _recv3 = futures::executor::block_on_stream( - notifications.listen(None, None) - ); - let _recv4 = futures::executor::block_on_stream( - notifications.listen(None, Some(&child_filter)) + notifications.listen(Some(&[StorageKey(vec![2])]), None), ); + let _recv3 = futures::executor::block_on_stream(notifications.listen(None, None)); + let _recv4 = + futures::executor::block_on_stream(notifications.listen(None, Some(&child_filter))); assert_eq!(notifications.listeners.len(), 2); assert_eq!(notifications.wildcard_listeners.len(), 2); assert_eq!(notifications.child_listeners.len(), 1); } // when - let changeset = vec![ - (vec![2], Some(vec![3])), - (vec![1], None), - ]; + let changeset = vec![(vec![2], Some(vec![3])), (vec![1], None)]; let c_changeset = empty::<(_, Empty<_>)>(); notifications.trigger(&Hash::from_low_u64_be(1), changeset.into_iter(), c_changeset); diff --git a/client/api/src/proof_provider.rs b/client/api/src/proof_provider.rs index 0e9fd5318ba90..ad0989c743961 100644 --- a/client/api/src/proof_provider.rs +++ b/client/api/src/proof_provider.rs @@ -17,12 +17,9 @@ // along with this program. If not, see . //! Proof utilities -use sp_runtime::{ - generic::BlockId, - traits::{Block as BlockT}, -}; -use crate::{StorageProof, ChangesProof}; -use sp_storage::{ChildInfo, StorageKey, PrefixedStorageKey}; +use crate::{ChangesProof, StorageProof}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; +use sp_storage::{ChildInfo, PrefixedStorageKey, StorageKey}; /// Interface for providing block proving utilities. pub trait ProofProvider { @@ -30,7 +27,7 @@ pub trait ProofProvider { fn read_proof( &self, id: &BlockId, - keys: &mut dyn Iterator, + keys: &mut dyn Iterator, ) -> sp_blockchain::Result; /// Reads child storage value at a given block + storage_key + key, returning @@ -39,7 +36,7 @@ pub trait ProofProvider { &self, id: &BlockId, child_info: &ChildInfo, - keys: &mut dyn Iterator, + keys: &mut dyn Iterator, ) -> sp_blockchain::Result; /// Execute a call to a contract on top of state in a block of given hash @@ -53,7 +50,10 @@ pub trait ProofProvider { call_data: &[u8], ) -> sp_blockchain::Result<(Vec, StorageProof)>; /// Reads given header and generates CHT-based header proof. - fn header_proof(&self, id: &BlockId) -> sp_blockchain::Result<(Block::Header, StorageProof)>; + fn header_proof( + &self, + id: &BlockId, + ) -> sp_blockchain::Result<(Block::Header, StorageProof)>; /// Get proof for computation of (block, extrinsic) pairs where key has been changed at given blocks range. /// `min` is the hash of the first block, which changes trie root is known to the requester - when we're using diff --git a/client/authority-discovery/src/interval.rs b/client/authority-discovery/src/interval.rs index 0710487203d53..f4e7c43e60d21 100644 --- a/client/authority-discovery/src/interval.rs +++ b/client/authority-discovery/src/interval.rs @@ -16,13 +16,13 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use futures::stream::Stream; -use futures::future::FutureExt; -use futures::ready; +use futures::{future::FutureExt, ready, stream::Stream}; use futures_timer::Delay; -use std::pin::Pin; -use std::task::{Context, Poll}; -use std::time::Duration; +use std::{ + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; /// Exponentially increasing interval /// @@ -37,11 +37,7 @@ impl ExpIncInterval { /// Create a new [`ExpIncInterval`]. pub fn new(start: Duration, max: Duration) -> Self { let delay = Delay::new(start); - Self { - max, - next: start * 2, - delay, - } + Self { max, next: start * 2, delay } } /// Fast forward the exponentially increasing interval to the configured maximum. diff --git a/client/authority-discovery/src/lib.rs b/client/authority-discovery/src/lib.rs index ab6338963da46..4929ce69917a0 100644 --- a/client/authority-discovery/src/lib.rs +++ b/client/authority-discovery/src/lib.rs @@ -26,18 +26,23 @@ //! //! See [`Worker`] and [`Service`] for more documentation. -pub use crate::{service::Service, worker::{NetworkProvider, Worker, Role}}; +pub use crate::{ + service::Service, + worker::{NetworkProvider, Role, Worker}, +}; use std::{sync::Arc, time::Duration}; -use futures::channel::{mpsc, oneshot}; -use futures::Stream; +use futures::{ + channel::{mpsc, oneshot}, + Stream, +}; use sc_client_api::blockchain::HeaderBackend; use sc_network::{DhtEvent, Multiaddr, PeerId}; +use sp_api::ProvideRuntimeApi; use sp_authority_discovery::{AuthorityDiscoveryApi, AuthorityId}; use sp_runtime::traits::Block as BlockT; -use sp_api::ProvideRuntimeApi; mod error; mod interval; @@ -141,15 +146,8 @@ where { let (to_worker, from_service) = mpsc::channel(0); - let worker = Worker::new( - from_service, - client, - network, - dht_event_rx, - role, - prometheus_registry, - config, - ); + let worker = + Worker::new(from_service, client, network, dht_event_rx, role, prometheus_registry, config); let service = Service::new(to_worker); (worker, service) @@ -160,5 +158,5 @@ pub(crate) enum ServicetoWorkerMsg { /// See [`Service::get_addresses_by_authority_id`]. GetAddressesByAuthorityId(AuthorityId, oneshot::Sender>>), /// See [`Service::get_authority_id_by_peer_id`]. - GetAuthorityIdByPeerId(PeerId, oneshot::Sender>) + GetAuthorityIdByPeerId(PeerId, oneshot::Sender>), } diff --git a/client/authority-discovery/src/service.rs b/client/authority-discovery/src/service.rs index a787ff8f51c21..2e5ae66e4dd4a 100644 --- a/client/authority-discovery/src/service.rs +++ b/client/authority-discovery/src/service.rs @@ -20,8 +20,10 @@ use std::fmt::Debug; use crate::ServicetoWorkerMsg; -use futures::channel::{mpsc, oneshot}; -use futures::SinkExt; +use futures::{ + channel::{mpsc, oneshot}, + SinkExt, +}; use sc_network::{Multiaddr, PeerId}; use sp_authority_discovery::AuthorityId; @@ -42,9 +44,7 @@ impl Debug for Service { /// [`crate::Worker`]'s local address cache for a given [`AuthorityId`]. impl Service { pub(crate) fn new(to_worker: mpsc::Sender) -> Self { - Self { - to_worker, - } + Self { to_worker } } /// Get the addresses for the given [`AuthorityId`] from the local address @@ -59,7 +59,10 @@ impl Service { /// enforced today, given that there are still authorities out there /// publishing the addresses of their sentry nodes on the DHT. In the future /// this guarantee can be provided. - pub async fn get_addresses_by_authority_id(&mut self, authority: AuthorityId) -> Option> { + pub async fn get_addresses_by_authority_id( + &mut self, + authority: AuthorityId, + ) -> Option> { let (tx, rx) = oneshot::channel(); self.to_worker diff --git a/client/authority-discovery/src/tests.rs b/client/authority-discovery/src/tests.rs index 78e978e07a1a0..ef2c2f24634b9 100644 --- a/client/authority-discovery/src/tests.rs +++ b/client/authority-discovery/src/tests.rs @@ -16,15 +16,24 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{new_worker_and_service, worker::{tests::{TestApi, TestNetwork}, Role}}; +use crate::{ + new_worker_and_service, + worker::{ + tests::{TestApi, TestNetwork}, + Role, + }, +}; -use std::sync::Arc; use futures::{channel::mpsc::channel, executor::LocalPool, task::LocalSpawn}; -use libp2p::core::{multiaddr::{Multiaddr, Protocol}, PeerId}; +use libp2p::core::{ + multiaddr::{Multiaddr, Protocol}, + PeerId, +}; +use std::sync::Arc; use sp_authority_discovery::AuthorityId; use sp_core::crypto::key_types; -use sp_keystore::{CryptoStore, testing::KeyStore}; +use sp_keystore::{testing::KeyStore, CryptoStore}; #[test] fn get_addresses_and_authority_id() { @@ -44,13 +53,12 @@ fn get_addresses_and_authority_id() { }); let remote_peer_id = PeerId::random(); - let remote_addr = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333".parse::() + let remote_addr = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333" + .parse::() .unwrap() .with(Protocol::P2p(remote_peer_id.clone().into())); - let test_api = Arc::new(TestApi { - authorities: vec![], - }); + let test_api = Arc::new(TestApi { authorities: vec![] }); let (mut worker, mut service) = new_worker_and_service( test_api, diff --git a/client/authority-discovery/src/worker.rs b/client/authority-discovery/src/worker.rs index bb9207e4e7ea6..f01e6e46c267e 100644 --- a/client/authority-discovery/src/worker.rs +++ b/client/authority-discovery/src/worker.rs @@ -16,43 +16,49 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{error::{Error, Result}, interval::ExpIncInterval, ServicetoWorkerMsg}; +use crate::{ + error::{Error, Result}, + interval::ExpIncInterval, + ServicetoWorkerMsg, +}; -use std::collections::{HashMap, HashSet}; -use std::convert::TryInto; -use std::marker::PhantomData; -use std::sync::Arc; -use std::time::Duration; +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, + marker::PhantomData, + sync::Arc, + time::Duration, +}; -use futures::channel::mpsc; -use futures::{future, FutureExt, Stream, StreamExt, stream::Fuse}; +use futures::{channel::mpsc, future, stream::Fuse, FutureExt, Stream, StreamExt}; use addr_cache::AddrCache; use async_trait::async_trait; use codec::Decode; use ip_network::IpNetwork; -use libp2p::{core::multiaddr, multihash::{Multihash, Hasher}}; +use libp2p::{ + core::multiaddr, + multihash::{Hasher, Multihash}, +}; use log::{debug, error, log_enabled}; -use prometheus_endpoint::{Counter, CounterVec, Gauge, Opts, U64, register}; +use prometheus_endpoint::{register, Counter, CounterVec, Gauge, Opts, U64}; use prost::Message; use rand::{seq::SliceRandom, thread_rng}; use sc_client_api::blockchain::HeaderBackend; -use sc_network::{ - DhtEvent, - ExHashT, - Multiaddr, - NetworkStateInfo, - PeerId, +use sc_network::{DhtEvent, ExHashT, Multiaddr, NetworkStateInfo, PeerId}; +use sp_api::ProvideRuntimeApi; +use sp_authority_discovery::{ + AuthorityDiscoveryApi, AuthorityId, AuthorityPair, AuthoritySignature, }; -use sp_authority_discovery::{AuthorityDiscoveryApi, AuthorityId, AuthoritySignature, AuthorityPair}; use sp_core::crypto::{key_types, CryptoTypePublicPair, Pair}; use sp_keystore::CryptoStore; -use sp_runtime::{traits::Block as BlockT, generic::BlockId}; -use sp_api::ProvideRuntimeApi; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; mod addr_cache; /// Dht payload schemas generated from Protobuf definitions via Prost crate in build.rs. -mod schema { include!(concat!(env!("OUT_DIR"), "/authority_discovery.rs")); } +mod schema { + include!(concat!(env!("OUT_DIR"), "/authority_discovery.rs")); +} #[cfg(test)] pub mod tests; @@ -72,7 +78,6 @@ pub enum Role { Discover, } - /// An authority discovery [`Worker`] can publish the local node's addresses as well as discover /// those of other nodes via a Kademlia DHT. /// @@ -141,8 +146,7 @@ where Block: BlockT + Unpin + 'static, Network: NetworkProvider, Client: ProvideRuntimeApi + Send + Sync + 'static + HeaderBackend, - >::Api: - AuthorityDiscoveryApi, + >::Api: AuthorityDiscoveryApi, DhtEventStream: Stream + Unpin, { /// Construct a [`Worker`]. @@ -161,33 +165,24 @@ where // thus timely retries are not needed. For this reasoning use an exponentially increasing // interval for `publish_interval`, `query_interval` and `priority_group_set_interval` // instead of a constant interval. - let publish_interval = ExpIncInterval::new( - Duration::from_secs(2), - config.max_publish_interval, - ); - let query_interval = ExpIncInterval::new( - Duration::from_secs(2), - config.max_query_interval, - ); + let publish_interval = + ExpIncInterval::new(Duration::from_secs(2), config.max_publish_interval); + let query_interval = ExpIncInterval::new(Duration::from_secs(2), config.max_query_interval); // An `ExpIncInterval` is overkill here because the interval is constant, but consistency // is more simple. - let publish_if_changed_interval = ExpIncInterval::new( - config.keystore_refresh_interval, - config.keystore_refresh_interval - ); + let publish_if_changed_interval = + ExpIncInterval::new(config.keystore_refresh_interval, config.keystore_refresh_interval); let addr_cache = AddrCache::new(); let metrics = match prometheus_registry { - Some(registry) => { - match Metrics::register(®istry) { - Ok(metrics) => Some(metrics), - Err(e) => { - error!(target: LOG_TARGET, "Failed to register metrics: {:?}", e); - None - }, - } + Some(registry) => match Metrics::register(®istry) { + Ok(metrics) => Some(metrics), + Err(e) => { + error!(target: LOG_TARGET, "Failed to register metrics: {:?}", e); + None + }, }, None => None, }; @@ -262,23 +257,23 @@ where let _ = sender.send( self.addr_cache.get_addresses_by_authority_id(&authority).map(Clone::clone), ); - } + }, ServicetoWorkerMsg::GetAuthorityIdByPeerId(peer_id, sender) => { - let _ = sender.send( - self.addr_cache.get_authority_id_by_peer_id(&peer_id).map(Clone::clone), - ); - } + let _ = sender + .send(self.addr_cache.get_authority_id_by_peer_id(&peer_id).map(Clone::clone)); + }, } } fn addresses_to_publish(&self) -> impl Iterator { let peer_id: Multihash = self.network.local_peer_id().into(); let publish_non_global_ips = self.publish_non_global_ips; - self.network.external_addresses() + self.network + .external_addresses() .into_iter() .filter(move |a| { if publish_non_global_ips { - return true; + return true } a.iter().all(|p| match p { @@ -321,9 +316,9 @@ where if let Some(metrics) = &self.metrics { metrics.publish.inc(); - metrics.amount_addresses_last_published.set( - addresses.len().try_into().unwrap_or(std::u64::MAX), - ); + metrics + .amount_addresses_last_published + .set(addresses.len().try_into().unwrap_or(std::u64::MAX)); } let mut serialized_addresses = vec![]; @@ -332,30 +327,26 @@ where .map_err(Error::EncodingProto)?; let keys_vec = keys.iter().cloned().collect::>(); - let signatures = key_store.sign_with_all( - key_types::AUTHORITY_DISCOVERY, - keys_vec.clone(), - serialized_addresses.as_slice(), - ).await.map_err(|_| Error::Signing)?; + let signatures = key_store + .sign_with_all( + key_types::AUTHORITY_DISCOVERY, + keys_vec.clone(), + serialized_addresses.as_slice(), + ) + .await + .map_err(|_| Error::Signing)?; for (sign_result, key) in signatures.into_iter().zip(keys_vec.iter()) { let mut signed_addresses = vec![]; // Verify that all signatures exist for all provided keys. - let signature = sign_result.ok() - .flatten() - .ok_or_else(|| Error::MissingSignature(key.clone()))?; - schema::SignedAuthorityAddresses { - addresses: serialized_addresses.clone(), - signature, - } - .encode(&mut signed_addresses) + let signature = + sign_result.ok().flatten().ok_or_else(|| Error::MissingSignature(key.clone()))?; + schema::SignedAuthorityAddresses { addresses: serialized_addresses.clone(), signature } + .encode(&mut signed_addresses) .map_err(Error::EncodingProto)?; - self.network.put_value( - hash_authority_id(key.1.as_ref()), - signed_addresses, - ); + self.network.put_value(hash_authority_id(key.1.as_ref()), signed_addresses); } self.latest_published_keys = keys; @@ -367,11 +358,11 @@ where let id = BlockId::hash(self.client.info().best_hash); let local_keys = match &self.role { - Role::PublishAndDiscover(key_store) => { - key_store.sr25519_public_keys( - key_types::AUTHORITY_DISCOVERY - ).await.into_iter().collect::>() - }, + Role::PublishAndDiscover(key_store) => key_store + .sr25519_public_keys(key_types::AUTHORITY_DISCOVERY) + .await + .into_iter() + .collect::>(), Role::Discover => HashSet::new(), }; @@ -393,9 +384,9 @@ where self.in_flight_lookups.clear(); if let Some(metrics) = &self.metrics { - metrics.requests_pending.set( - self.pending_lookups.len().try_into().unwrap_or(std::u64::MAX), - ); + metrics + .requests_pending + .set(self.pending_lookups.len().try_into().unwrap_or(std::u64::MAX)); } Ok(()) @@ -408,15 +399,14 @@ where None => return, }; let hash = hash_authority_id(authority_id.as_ref()); - self.network - .get_value(&hash); + self.network.get_value(&hash); self.in_flight_lookups.insert(hash, authority_id); if let Some(metrics) = &self.metrics { metrics.requests.inc(); - metrics.requests_pending.set( - self.pending_lookups.len().try_into().unwrap_or(std::u64::MAX), - ); + metrics + .requests_pending + .set(self.pending_lookups.len().try_into().unwrap_or(std::u64::MAX)); } } } @@ -431,10 +421,7 @@ where if log_enabled!(log::Level::Debug) { let hashes: Vec<_> = v.iter().map(|(hash, _value)| hash.clone()).collect(); - debug!( - target: LOG_TARGET, - "Value for hash '{:?}' found on Dht.", hashes, - ); + debug!(target: LOG_TARGET, "Value for hash '{:?}' found on Dht.", hashes,); } if let Err(e) = self.handle_dht_value_found_event(v) { @@ -442,22 +429,16 @@ where metrics.handle_value_found_event_failure.inc(); } - debug!( - target: LOG_TARGET, - "Failed to handle Dht value found event: {:?}", e, - ); + debug!(target: LOG_TARGET, "Failed to handle Dht value found event: {:?}", e,); } - } + }, DhtEvent::ValueNotFound(hash) => { if let Some(metrics) = &self.metrics { metrics.dht_event_received.with_label_values(&["value_not_found"]).inc(); } if self.in_flight_lookups.remove(&hash).is_some() { - debug!( - target: LOG_TARGET, - "Value for hash '{:?}' not found on Dht.", hash - ) + debug!(target: LOG_TARGET, "Value for hash '{:?}' not found on Dht.", hash) } else { debug!( target: LOG_TARGET, @@ -475,21 +456,15 @@ where metrics.dht_event_received.with_label_values(&["value_put"]).inc(); } - debug!( - target: LOG_TARGET, - "Successfully put hash '{:?}' on Dht.", hash, - ) + debug!(target: LOG_TARGET, "Successfully put hash '{:?}' on Dht.", hash,) }, DhtEvent::ValuePutFailed(hash) => { if let Some(metrics) = &self.metrics { metrics.dht_event_received.with_label_values(&["value_put_failed"]).inc(); } - debug!( - target: LOG_TARGET, - "Failed to put hash '{:?}' on Dht.", hash - ) - } + debug!(target: LOG_TARGET, "Failed to put hash '{:?}' on Dht.", hash) + }, } } @@ -498,34 +473,36 @@ where values: Vec<(libp2p::kad::record::Key, Vec)>, ) -> Result<()> { // Ensure `values` is not empty and all its keys equal. - let remote_key = values.iter().fold(Ok(None), |acc, (key, _)| { - match acc { + let remote_key = values + .iter() + .fold(Ok(None), |acc, (key, _)| match acc { Ok(None) => Ok(Some(key.clone())), - Ok(Some(ref prev_key)) if prev_key != key => Err( - Error::ReceivingDhtValueFoundEventWithDifferentKeys - ), + Ok(Some(ref prev_key)) if prev_key != key => + Err(Error::ReceivingDhtValueFoundEventWithDifferentKeys), x @ Ok(_) => x, Err(e) => Err(e), - } - })?.ok_or(Error::ReceivingDhtValueFoundEventWithNoRecords)?; + })? + .ok_or(Error::ReceivingDhtValueFoundEventWithNoRecords)?; - let authority_id: AuthorityId = self.in_flight_lookups + let authority_id: AuthorityId = self + .in_flight_lookups .remove(&remote_key) .ok_or(Error::ReceivingUnexpectedRecord)?; let local_peer_id = self.network.local_peer_id(); - let remote_addresses: Vec = values.into_iter() + let remote_addresses: Vec = values + .into_iter() .map(|(_k, v)| { let schema::SignedAuthorityAddresses { signature, addresses } = schema::SignedAuthorityAddresses::decode(v.as_slice()) - .map_err(Error::DecodingProto)?; + .map_err(Error::DecodingProto)?; let signature = AuthoritySignature::decode(&mut &signature[..]) .map_err(Error::EncodingDecodingScale)?; if !AuthorityPair::verify(&signature, &addresses, &authority_id) { - return Err(Error::VerifyingDhtPayload); + return Err(Error::VerifyingDhtPayload) } let addresses = schema::AuthorityAddresses::decode(addresses.as_slice()) @@ -542,33 +519,35 @@ where .into_iter() .flatten() // Ignore [`Multiaddr`]s without [`PeerId`] and own addresses. - .filter(|addr| addr.iter().any(|protocol| { - // Parse to PeerId first as Multihashes of old and new PeerId - // representation don't equal. - // - // See https://github.com/libp2p/rust-libp2p/issues/555 for - // details. - if let multiaddr::Protocol::P2p(hash) = protocol { - let peer_id = match PeerId::from_multihash(hash) { - Ok(peer_id) => peer_id, - Err(_) => return false, // Discard address. - }; - - // Discard if equal to local peer id, keep if it differs. - return !(peer_id == local_peer_id); - } + .filter(|addr| { + addr.iter().any(|protocol| { + // Parse to PeerId first as Multihashes of old and new PeerId + // representation don't equal. + // + // See https://github.com/libp2p/rust-libp2p/issues/555 for + // details. + if let multiaddr::Protocol::P2p(hash) = protocol { + let peer_id = match PeerId::from_multihash(hash) { + Ok(peer_id) => peer_id, + Err(_) => return false, // Discard address. + }; + + // Discard if equal to local peer id, keep if it differs. + return !(peer_id == local_peer_id) + } - false // `protocol` is not a [`Protocol::P2p`], let's keep looking. - })) + false // `protocol` is not a [`Protocol::P2p`], let's keep looking. + }) + }) .take(MAX_ADDRESSES_PER_AUTHORITY) .collect(); if !remote_addresses.is_empty() { self.addr_cache.insert(authority_id, remote_addresses); if let Some(metrics) = &self.metrics { - metrics.known_authorities_count.set( - self.addr_cache.num_ids().try_into().unwrap_or(std::u64::MAX) - ); + metrics + .known_authorities_count + .set(self.addr_cache.num_ids().try_into().unwrap_or(std::u64::MAX)); } } Ok(()) @@ -591,14 +570,16 @@ where .collect::>(); let id = BlockId::hash(client.info().best_hash); - let authorities = client.runtime_api() + let authorities = client + .runtime_api() .authorities(&id) .map_err(|e| Error::CallingRuntime(e.into()))? .into_iter() .map(std::convert::Into::into) .collect::>(); - let intersection = local_pub_keys.intersection(&authorities) + let intersection = local_pub_keys + .intersection(&authorities) .cloned() .map(std::convert::Into::into) .collect(); @@ -655,7 +636,7 @@ impl Metrics { publish: register( Counter::new( "authority_discovery_times_published_total", - "Number of times authority discovery has published external addresses." + "Number of times authority discovery has published external addresses.", )?, registry, )?, @@ -663,7 +644,7 @@ impl Metrics { Gauge::new( "authority_discovery_amount_external_addresses_last_published", "Number of external addresses published when authority discovery last \ - published addresses." + published addresses.", )?, registry, )?, @@ -671,14 +652,14 @@ impl Metrics { Counter::new( "authority_discovery_authority_addresses_requested_total", "Number of times authority discovery has requested external addresses of a \ - single authority." + single authority.", )?, registry, )?, requests_pending: register( Gauge::new( "authority_discovery_authority_address_requests_pending", - "Number of pending authority address requests." + "Number of pending authority address requests.", )?, registry, )?, @@ -686,7 +667,7 @@ impl Metrics { CounterVec::new( Opts::new( "authority_discovery_dht_event_received", - "Number of dht events received by authority discovery." + "Number of dht events received by authority discovery.", ), &["name"], )?, @@ -695,14 +676,14 @@ impl Metrics { handle_value_found_event_failure: register( Counter::new( "authority_discovery_handle_value_found_event_failure", - "Number of times handling a dht value found event failed." + "Number of times handling a dht value found event failed.", )?, registry, )?, known_authorities_count: register( Gauge::new( "authority_discovery_known_authorities_count", - "Number of authorities known by authority discovery." + "Number of authorities known by authority discovery.", )?, registry, )?, diff --git a/client/authority-discovery/src/worker/addr_cache.rs b/client/authority-discovery/src/worker/addr_cache.rs index c9b0711803ba9..3f9cee476d68c 100644 --- a/client/authority-discovery/src/worker/addr_cache.rs +++ b/client/authority-discovery/src/worker/addr_cache.rs @@ -19,8 +19,8 @@ use libp2p::core::multiaddr::{Multiaddr, Protocol}; use std::collections::HashMap; -use sp_authority_discovery::AuthorityId; use sc_network::PeerId; +use sp_authority_discovery::AuthorityId; /// Cache for [`AuthorityId`] -> [`Vec`] and [`PeerId`] -> [`AuthorityId`] mappings. pub(super) struct AddrCache { @@ -45,27 +45,34 @@ impl AddrCache { addresses.sort_unstable_by(|a, b| a.as_ref().cmp(b.as_ref())); // Insert into `self.peer_id_to_authority_id`. - let peer_ids = addresses.iter() + let peer_ids = addresses + .iter() .map(|a| peer_id_from_multiaddr(a)) .filter_map(|peer_id| peer_id); for peer_id in peer_ids.clone() { - let former_auth = match self.peer_id_to_authority_id.insert(peer_id, authority_id.clone()) { - Some(a) if a != authority_id => a, - _ => continue, - }; + let former_auth = + match self.peer_id_to_authority_id.insert(peer_id, authority_id.clone()) { + Some(a) if a != authority_id => a, + _ => continue, + }; // PeerId was associated to a different authority id before. // Remove corresponding authority from `self.authority_id_to_addresses`. let former_auth_addrs = match self.authority_id_to_addresses.get_mut(&former_auth) { Some(a) => a, - None => { debug_assert!(false); continue } + None => { + debug_assert!(false); + continue + }, }; former_auth_addrs.retain(|a| peer_id_from_multiaddr(a).map_or(true, |p| p != peer_id)); } // Insert into `self.authority_id_to_addresses`. - for former_addr in - self.authority_id_to_addresses.insert(authority_id.clone(), addresses.clone()).unwrap_or_default() + for former_addr in self + .authority_id_to_addresses + .insert(authority_id.clone(), addresses.clone()) + .unwrap_or_default() { // Must remove from `self.peer_id_to_authority_id` any PeerId formerly associated // to that authority but that can't be found in its new addresses. @@ -87,7 +94,10 @@ impl AddrCache { } /// Returns the addresses for the given [`AuthorityId`]. - pub fn get_addresses_by_authority_id(&self, authority_id: &AuthorityId) -> Option<&Vec> { + pub fn get_addresses_by_authority_id( + &self, + authority_id: &AuthorityId, + ) -> Option<&Vec> { self.authority_id_to_addresses.get(&authority_id) } @@ -100,7 +110,9 @@ impl AddrCache { /// [`AuthorityId`]s. pub fn retain_ids(&mut self, authority_ids: &Vec) { // The below logic could be replaced by `BtreeMap::drain_filter` once it stabilized. - let authority_ids_to_remove = self.authority_id_to_addresses.iter() + let authority_ids_to_remove = self + .authority_id_to_addresses + .iter() .filter(|(id, _addresses)| !authority_ids.contains(id)) .map(|entry| entry.0) .cloned() @@ -111,7 +123,8 @@ impl AddrCache { let addresses = self.authority_id_to_addresses.remove(&authority_id_to_remove); // Remove other entries from `self.peer_id_to_authority_id`. - let peer_ids = addresses.iter() + let peer_ids = addresses + .iter() .flatten() .map(|a| peer_id_from_multiaddr(a)) .filter_map(|peer_id| peer_id); @@ -125,10 +138,12 @@ impl AddrCache { } fn peer_id_from_multiaddr(addr: &Multiaddr) -> Option { - addr.iter().last().and_then(|protocol| if let Protocol::P2p(multihash) = protocol { - PeerId::from_multihash(multihash).ok() - } else { - None + addr.iter().last().and_then(|protocol| { + if let Protocol::P2p(multihash) = protocol { + PeerId::from_multihash(multihash).ok() + } else { + None + } }) } @@ -159,9 +174,11 @@ mod tests { fn arbitrary(g: &mut Gen) -> Self { let seed = (0..32).map(|_| u8::arbitrary(g)).collect::>(); let peer_id = PeerId::from_multihash( - Multihash::wrap(multihash::Code::Sha2_256.into(), &seed).unwrap() - ).unwrap(); - let multiaddr = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333".parse::() + Multihash::wrap(multihash::Code::Sha2_256.into(), &seed).unwrap(), + ) + .unwrap(); + let multiaddr = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333" + .parse::() .unwrap() .with(Protocol::P2p(peer_id.into())); @@ -176,12 +193,15 @@ mod tests { fn arbitrary(g: &mut Gen) -> Self { let seed = (0..32).map(|_| u8::arbitrary(g)).collect::>(); let peer_id = PeerId::from_multihash( - Multihash::wrap(multihash::Code::Sha2_256.into(), &seed).unwrap() - ).unwrap(); - let multiaddr1 = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333".parse::() + Multihash::wrap(multihash::Code::Sha2_256.into(), &seed).unwrap(), + ) + .unwrap(); + let multiaddr1 = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333" + .parse::() .unwrap() .with(Protocol::P2p(peer_id.clone().into())); - let multiaddr2 = "/ip6/2002:db8:0:0:0:0:0:2/tcp/30133".parse::() + let multiaddr2 = "/ip6/2002:db8:0:0:0:0:0:2/tcp/30133" + .parse::() .unwrap() .with(Protocol::P2p(peer_id.into())); TestMultiaddrsSamePeerCombo(multiaddr1, multiaddr2) @@ -219,11 +239,13 @@ mod tests { cache.retain_ids(&vec![first.0, second.0]); assert_eq!( - None, cache.get_addresses_by_authority_id(&third.0), + None, + cache.get_addresses_by_authority_id(&third.0), "Expect `get_addresses_by_authority_id` to not return `None` for third authority." ); assert_eq!( - None, cache.get_authority_id_by_peer_id(&peer_id_from_multiaddr(&third.1).unwrap()), + None, + cache.get_authority_id_by_peer_id(&peer_id_from_multiaddr(&third.1).unwrap()), "Expect `get_authority_id_by_peer_id` to return `None` for third authority." ); @@ -253,7 +275,10 @@ mod tests { let mut cache = AddrCache::new(); cache.insert(authority1.clone(), vec![multiaddr1.clone()]); - cache.insert(authority1.clone(), vec![multiaddr2.clone(), multiaddr3.clone(), multiaddr4.clone()]); + cache.insert( + authority1.clone(), + vec![multiaddr2.clone(), multiaddr3.clone(), multiaddr4.clone()], + ); assert_eq!( None, diff --git a/client/authority-discovery/src/worker/tests.rs b/client/authority-discovery/src/worker/tests.rs index 8be23e4840bde..b2f6ff544cb09 100644 --- a/client/authority-discovery/src/worker/tests.rs +++ b/client/authority-discovery/src/worker/tests.rs @@ -18,21 +18,26 @@ use crate::worker::schema; -use std::{sync::{Arc, Mutex}, task::Poll}; +use std::{ + sync::{Arc, Mutex}, + task::Poll, +}; use async_trait::async_trait; -use futures::channel::mpsc::{self, channel}; -use futures::executor::{block_on, LocalPool}; -use futures::future::FutureExt; -use futures::sink::SinkExt; -use futures::task::LocalSpawn; -use libp2p::{kad, core::multiaddr, PeerId}; +use futures::{ + channel::mpsc::{self, channel}, + executor::{block_on, LocalPool}, + future::FutureExt, + sink::SinkExt, + task::LocalSpawn, +}; +use libp2p::{core::multiaddr, kad, PeerId}; use prometheus_endpoint::prometheus::default_registry; -use sp_api::{ProvideRuntimeApi, ApiRef}; +use sp_api::{ApiRef, ProvideRuntimeApi}; use sp_core::crypto::Public; use sp_keystore::{testing::KeyStore, CryptoStore}; -use sp_runtime::traits::{Zero, Block as BlockT, NumberFor}; +use sp_runtime::traits::{Block as BlockT, NumberFor, Zero}; use substrate_test_runtime_client::runtime::Block; use super::*; @@ -46,9 +51,7 @@ impl ProvideRuntimeApi for TestApi { type Api = RuntimeApi; fn runtime_api<'a>(&'a self) -> ApiRef<'a, Self::Api> { - RuntimeApi { - authorities: self.authorities.clone(), - }.into() + RuntimeApi { authorities: self.authorities.clone() }.into() } } @@ -135,10 +138,7 @@ impl Default for TestNetwork { let (tx, rx) = mpsc::unbounded(); TestNetwork { peer_id: PeerId::random(), - external_addresses: vec![ - "/ip6/2001:db8::/tcp/30333" - .parse().unwrap(), - ], + external_addresses: vec!["/ip6/2001:db8::/tcp/30333".parse().unwrap()], put_value_call: Default::default(), get_value_call: Default::default(), event_sender: tx, @@ -151,11 +151,17 @@ impl Default for TestNetwork { impl NetworkProvider for TestNetwork { fn put_value(&self, key: kad::record::Key, value: Vec) { self.put_value_call.lock().unwrap().push((key.clone(), value.clone())); - self.event_sender.clone().unbounded_send(TestNetworkEvent::PutCalled(key, value)).unwrap(); + self.event_sender + .clone() + .unbounded_send(TestNetworkEvent::PutCalled(key, value)) + .unwrap(); } fn get_value(&self, key: &kad::record::Key) { self.get_value_call.lock().unwrap().push(key.clone()); - self.event_sender.clone().unbounded_send(TestNetworkEvent::GetCalled(key.clone())).unwrap(); + self.event_sender + .clone() + .unbounded_send(TestNetworkEvent::GetCalled(key.clone())) + .unwrap(); } } @@ -175,9 +181,8 @@ async fn build_dht_event( key_store: &KeyStore, ) -> (libp2p::kad::record::Key, Vec) { let mut serialized_addresses = vec![]; - schema::AuthorityAddresses { - addresses: addresses.into_iter().map(|a| a.to_vec()).collect() - }.encode(&mut serialized_addresses) + schema::AuthorityAddresses { addresses: addresses.into_iter().map(|a| a.to_vec()).collect() } + .encode(&mut serialized_addresses) .map_err(Error::EncodingProto) .unwrap(); @@ -192,11 +197,9 @@ async fn build_dht_event( .unwrap(); let mut signed_addresses = vec![]; - schema::SignedAuthorityAddresses { - addresses: serialized_addresses.clone(), - signature, - } - .encode(&mut signed_addresses).unwrap(); + schema::SignedAuthorityAddresses { addresses: serialized_addresses.clone(), signature } + .encode(&mut signed_addresses) + .unwrap(); let key = hash_authority_id(&public_key.to_raw_vec()); let value = signed_addresses; @@ -208,9 +211,7 @@ fn new_registers_metrics() { let (_dht_event_tx, dht_event_rx) = mpsc::channel(1000); let network: Arc = Arc::new(Default::default()); let key_store = KeyStore::new(); - let test_api = Arc::new(TestApi { - authorities: vec![], - }); + let test_api = Arc::new(TestApi { authorities: vec![] }); let registry = prometheus_endpoint::Registry::new(); @@ -275,65 +276,67 @@ fn publish_discover_cycle() { let key_store = KeyStore::new(); - let _ = pool.spawner().spawn_local_obj(async move { - let node_a_public = key_store - .sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None) - .await - .unwrap(); - let test_api = Arc::new(TestApi { - authorities: vec![node_a_public.into()], - }); - - let (_to_worker, from_service) = mpsc::channel(0); - let mut worker = Worker::new( - from_service, - test_api, - network.clone(), - Box::pin(dht_event_rx), - Role::PublishAndDiscover(key_store.into()), - None, - Default::default(), - ); - - worker.publish_ext_addresses(false).await.unwrap(); - - // Expect authority discovery to put a new record onto the dht. - assert_eq!(network.put_value_call.lock().unwrap().len(), 1); - - let dht_event = { - let (key, value) = network.put_value_call.lock().unwrap().pop().unwrap(); - sc_network::DhtEvent::ValueFound(vec![(key, value)]) - }; - - // Node B discovering node A's address. - - let (mut dht_event_tx, dht_event_rx) = channel(1000); - let test_api = Arc::new(TestApi { - // Make sure node B identifies node A as an authority. - authorities: vec![node_a_public.into()], - }); - let network: Arc = Arc::new(Default::default()); - let key_store = KeyStore::new(); - - let (_to_worker, from_service) = mpsc::channel(0); - let mut worker = Worker::new( - from_service, - test_api, - network.clone(), - Box::pin(dht_event_rx), - Role::PublishAndDiscover(key_store.into()), - None, - Default::default(), - ); - - dht_event_tx.try_send(dht_event.clone()).unwrap(); - - worker.refill_pending_lookups_queue().await.unwrap(); - worker.start_new_lookups(); - - // Make authority discovery handle the event. - worker.handle_dht_event(dht_event).await; - }.boxed_local().into()); + let _ = pool.spawner().spawn_local_obj( + async move { + let node_a_public = key_store + .sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None) + .await + .unwrap(); + let test_api = Arc::new(TestApi { authorities: vec![node_a_public.into()] }); + + let (_to_worker, from_service) = mpsc::channel(0); + let mut worker = Worker::new( + from_service, + test_api, + network.clone(), + Box::pin(dht_event_rx), + Role::PublishAndDiscover(key_store.into()), + None, + Default::default(), + ); + + worker.publish_ext_addresses(false).await.unwrap(); + + // Expect authority discovery to put a new record onto the dht. + assert_eq!(network.put_value_call.lock().unwrap().len(), 1); + + let dht_event = { + let (key, value) = network.put_value_call.lock().unwrap().pop().unwrap(); + sc_network::DhtEvent::ValueFound(vec![(key, value)]) + }; + + // Node B discovering node A's address. + + let (mut dht_event_tx, dht_event_rx) = channel(1000); + let test_api = Arc::new(TestApi { + // Make sure node B identifies node A as an authority. + authorities: vec![node_a_public.into()], + }); + let network: Arc = Arc::new(Default::default()); + let key_store = KeyStore::new(); + + let (_to_worker, from_service) = mpsc::channel(0); + let mut worker = Worker::new( + from_service, + test_api, + network.clone(), + Box::pin(dht_event_rx), + Role::PublishAndDiscover(key_store.into()), + None, + Default::default(), + ); + + dht_event_tx.try_send(dht_event.clone()).unwrap(); + + worker.refill_pending_lookups_queue().await.unwrap(); + worker.start_new_lookups(); + + // Make authority discovery handle the event. + worker.handle_dht_event(dht_event).await; + } + .boxed_local() + .into(), + ); pool.run(); } @@ -345,9 +348,7 @@ fn terminate_when_event_stream_terminates() { let (dht_event_tx, dht_event_rx) = channel(1000); let network: Arc = Arc::new(Default::default()); let key_store = KeyStore::new(); - let test_api = Arc::new(TestApi { - authorities: vec![], - }); + let test_api = Arc::new(TestApi { authorities: vec![] }); let (to_worker, from_service) = mpsc::channel(0); let worker = Worker::new( @@ -358,7 +359,8 @@ fn terminate_when_event_stream_terminates() { Role::PublishAndDiscover(key_store.into()), None, Default::default(), - ).run(); + ) + .run(); futures::pin_mut!(worker); block_on(async { @@ -367,7 +369,8 @@ fn terminate_when_event_stream_terminates() { // Drop sender side of service channel. drop(to_worker); assert_eq!( - Poll::Pending, futures::poll!(&mut worker), + Poll::Pending, + futures::poll!(&mut worker), "Expect the authority discovery module not to terminate once the \ sender side of the service channel is closed.", ); @@ -377,7 +380,8 @@ fn terminate_when_event_stream_terminates() { drop(dht_event_tx); assert_eq!( - Poll::Ready(()), futures::poll!(&mut worker), + Poll::Ready(()), + futures::poll!(&mut worker), "Expect the authority discovery module to terminate once the \ sending side of the dht event channel is closed.", ); @@ -390,14 +394,13 @@ fn dont_stop_polling_dht_event_stream_after_bogus_event() { let peer_id = PeerId::random(); let address: Multiaddr = "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); - address.with(multiaddr::Protocol::P2p( - peer_id.into(), - )) + address.with(multiaddr::Protocol::P2p(peer_id.into())) }; let remote_key_store = KeyStore::new(); - let remote_public_key: AuthorityId = block_on( - remote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None), - ).unwrap().into(); + let remote_public_key: AuthorityId = + block_on(remote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) + .unwrap() + .into(); let (mut dht_event_tx, dht_event_rx) = channel(1); let (network, mut network_events) = { @@ -407,9 +410,7 @@ fn dont_stop_polling_dht_event_stream_after_bogus_event() { }; let key_store = KeyStore::new(); - let test_api = Arc::new(TestApi { - authorities: vec![remote_public_key.clone()], - }); + let test_api = Arc::new(TestApi { authorities: vec![remote_public_key.clone()] }); let mut pool = LocalPool::new(); let (mut to_worker, from_service) = mpsc::channel(1); @@ -427,30 +428,35 @@ fn dont_stop_polling_dht_event_stream_after_bogus_event() { // // As this is a local pool, only one future at a time will have the CPU and // can make progress until the future returns `Pending`. - let _ = pool.spawner().spawn_local_obj(async move { - // Refilling `pending_lookups` only happens every X minutes. Fast - // forward by calling `refill_pending_lookups_queue` directly. - worker.refill_pending_lookups_queue().await.unwrap(); - worker.run().await - }.boxed_local().into()); + let _ = pool.spawner().spawn_local_obj( + async move { + // Refilling `pending_lookups` only happens every X minutes. Fast + // forward by calling `refill_pending_lookups_queue` directly. + worker.refill_pending_lookups_queue().await.unwrap(); + worker.run().await + } + .boxed_local() + .into(), + ); pool.run_until(async { // Assert worker to trigger a lookup for the one and only authority. - assert!(matches!( - network_events.next().await, - Some(TestNetworkEvent::GetCalled(_)) - )); + assert!(matches!(network_events.next().await, Some(TestNetworkEvent::GetCalled(_)))); // Send an event that should generate an error - dht_event_tx.send(DhtEvent::ValueFound(Default::default())).await + dht_event_tx + .send(DhtEvent::ValueFound(Default::default())) + .await .expect("Channel has capacity of 1."); // Make previously triggered lookup succeed. let dht_event = { let (key, value) = build_dht_event( vec![remote_multiaddr.clone()], - remote_public_key.clone(), &remote_key_store, - ).await; + remote_public_key.clone(), + &remote_key_store, + ) + .await; sc_network::DhtEvent::ValueFound(vec![(key, value)]) }; dht_event_tx.send(dht_event).await.expect("Channel has capacity of 1."); @@ -458,10 +464,10 @@ fn dont_stop_polling_dht_event_stream_after_bogus_event() { // Expect authority discovery to function normally, now knowing the // address for the remote node. let (sender, addresses) = futures::channel::oneshot::channel(); - to_worker.send(ServicetoWorkerMsg::GetAddressesByAuthorityId( - remote_public_key, - sender, - )).await.expect("Channel has capacity of 1."); + to_worker + .send(ServicetoWorkerMsg::GetAddressesByAuthorityId(remote_public_key, sender)) + .await + .expect("Channel has capacity of 1."); assert_eq!(Some(vec![remote_multiaddr]), addresses.await.unwrap()); }); } @@ -469,23 +475,19 @@ fn dont_stop_polling_dht_event_stream_after_bogus_event() { #[test] fn limit_number_of_addresses_added_to_cache_per_authority() { let remote_key_store = KeyStore::new(); - let remote_public = block_on(remote_key_store - .sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) - .unwrap(); + let remote_public = + block_on(remote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) + .unwrap(); - let addresses = (0..100).map(|_| { - let peer_id = PeerId::random(); - let address: Multiaddr = "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); - address.with(multiaddr::Protocol::P2p( - peer_id.into(), - )) - }).collect(); + let addresses = (0..100) + .map(|_| { + let peer_id = PeerId::random(); + let address: Multiaddr = "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); + address.with(multiaddr::Protocol::P2p(peer_id.into())) + }) + .collect(); - let dht_event = block_on(build_dht_event( - addresses, - remote_public.into(), - &remote_key_store, - )); + let dht_event = block_on(build_dht_event(addresses, remote_public.into(), &remote_key_store)); let (_dht_event_tx, dht_event_rx) = channel(1); @@ -506,16 +508,20 @@ fn limit_number_of_addresses_added_to_cache_per_authority() { worker.handle_dht_value_found_event(vec![dht_event]).unwrap(); assert_eq!( MAX_ADDRESSES_PER_AUTHORITY, - worker.addr_cache.get_addresses_by_authority_id(&remote_public.into()).unwrap().len(), + worker + .addr_cache + .get_addresses_by_authority_id(&remote_public.into()) + .unwrap() + .len(), ); } #[test] fn do_not_cache_addresses_without_peer_id() { let remote_key_store = KeyStore::new(); - let remote_public = block_on(remote_key_store - .sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) - .unwrap(); + let remote_public = + block_on(remote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) + .unwrap(); let multiaddr_with_peer_id = { let peer_id = PeerId::random(); @@ -524,21 +530,17 @@ fn do_not_cache_addresses_without_peer_id() { address.with(multiaddr::Protocol::P2p(peer_id.into())) }; - let multiaddr_without_peer_id: Multiaddr = "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); + let multiaddr_without_peer_id: Multiaddr = + "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); let dht_event = block_on(build_dht_event( - vec![ - multiaddr_with_peer_id.clone(), - multiaddr_without_peer_id, - ], + vec![multiaddr_with_peer_id.clone(), multiaddr_without_peer_id], remote_public.into(), &remote_key_store, )); let (_dht_event_tx, dht_event_rx) = channel(1); - let local_test_api = Arc::new(TestApi { - authorities: vec![remote_public.into()], - }); + let local_test_api = Arc::new(TestApi { authorities: vec![remote_public.into()] }); let local_network: Arc = Arc::new(Default::default()); let local_key_store = KeyStore::new(); @@ -578,9 +580,7 @@ fn addresses_to_publish_adds_p2p() { let (_to_worker, from_service) = mpsc::channel(0); let worker = Worker::new( from_service, - Arc::new(TestApi { - authorities: vec![], - }), + Arc::new(TestApi { authorities: vec![] }), network.clone(), Box::pin(dht_event_rx), Role::PublishAndDiscover(Arc::new(KeyStore::new())), @@ -605,17 +605,16 @@ fn addresses_to_publish_respects_existing_p2p_protocol() { let network: Arc = Arc::new(TestNetwork { external_addresses: vec![ "/ip6/2001:db8::/tcp/30333/p2p/QmcgpsyWgH8Y8ajJz1Cu72KnS5uo2Aa2LpzU7kinSupNKC" - .parse().unwrap(), + .parse() + .unwrap(), ], - .. Default::default() + ..Default::default() }); let (_to_worker, from_service) = mpsc::channel(0); let worker = Worker::new( from_service, - Arc::new(TestApi { - authorities: vec![], - }), + Arc::new(TestApi { authorities: vec![] }), network.clone(), Box::pin(dht_event_rx), Role::PublishAndDiscover(Arc::new(KeyStore::new())), @@ -624,7 +623,8 @@ fn addresses_to_publish_respects_existing_p2p_protocol() { ); assert_eq!( - network.external_addresses, worker.addresses_to_publish().collect::>(), + network.external_addresses, + worker.addresses_to_publish().collect::>(), "Expected Multiaddr from `TestNetwork` to not be altered.", ); } @@ -635,21 +635,21 @@ fn lookup_throttling() { let peer_id = PeerId::random(); let address: Multiaddr = "/ip6/2001:db8:0:0:0:0:0:1/tcp/30333".parse().unwrap(); - address.with(multiaddr::Protocol::P2p( - peer_id.into(), - )) + address.with(multiaddr::Protocol::P2p(peer_id.into())) }; let remote_key_store = KeyStore::new(); - let remote_public_keys: Vec = (0..20).map(|_| { - block_on(remote_key_store - .sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) - .unwrap().into() - }).collect(); - let remote_hash_to_key = remote_public_keys.iter() + let remote_public_keys: Vec = (0..20) + .map(|_| { + block_on(remote_key_store.sr25519_generate_new(key_types::AUTHORITY_DISCOVERY, None)) + .unwrap() + .into() + }) + .collect(); + let remote_hash_to_key = remote_public_keys + .iter() .map(|k| (hash_authority_id(k.as_ref()), k.clone())) .collect::>(); - let (mut dht_event_tx, dht_event_rx) = channel(1); let (_to_worker, from_service) = mpsc::channel(0); let mut network = TestNetwork::default(); @@ -668,56 +668,61 @@ fn lookup_throttling() { let mut pool = LocalPool::new(); let metrics = worker.metrics.clone().unwrap(); - let _ = pool.spawner().spawn_local_obj(async move { - // Refilling `pending_lookups` only happens every X minutes. Fast - // forward by calling `refill_pending_lookups_queue` directly. - worker.refill_pending_lookups_queue().await.unwrap(); - worker.run().await - }.boxed_local().into()); + let _ = pool.spawner().spawn_local_obj( + async move { + // Refilling `pending_lookups` only happens every X minutes. Fast + // forward by calling `refill_pending_lookups_queue` directly. + worker.refill_pending_lookups_queue().await.unwrap(); + worker.run().await + } + .boxed_local() + .into(), + ); - pool.run_until(async { - // Assert worker to trigger MAX_IN_FLIGHT_LOOKUPS lookups. - for _ in 0..MAX_IN_FLIGHT_LOOKUPS { + pool.run_until( + async { + // Assert worker to trigger MAX_IN_FLIGHT_LOOKUPS lookups. + for _ in 0..MAX_IN_FLIGHT_LOOKUPS { + assert!(matches!(receiver.next().await, Some(TestNetworkEvent::GetCalled(_)))); + } + assert_eq!( + metrics.requests_pending.get(), + (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS) as u64 + ); + assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); + + // Make first lookup succeed. + let remote_hash = network.get_value_call.lock().unwrap().pop().unwrap(); + let remote_key: AuthorityId = remote_hash_to_key.get(&remote_hash).unwrap().clone(); + let dht_event = { + let (key, value) = + build_dht_event(vec![remote_multiaddr.clone()], remote_key, &remote_key_store) + .await; + sc_network::DhtEvent::ValueFound(vec![(key, value)]) + }; + dht_event_tx.send(dht_event).await.expect("Channel has capacity of 1."); + + // Assert worker to trigger another lookup. assert!(matches!(receiver.next().await, Some(TestNetworkEvent::GetCalled(_)))); + assert_eq!( + metrics.requests_pending.get(), + (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS - 1) as u64 + ); + assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); + + // Make second one fail. + let remote_hash = network.get_value_call.lock().unwrap().pop().unwrap(); + let dht_event = sc_network::DhtEvent::ValueNotFound(remote_hash); + dht_event_tx.send(dht_event).await.expect("Channel has capacity of 1."); + + // Assert worker to trigger another lookup. + assert!(matches!(receiver.next().await, Some(TestNetworkEvent::GetCalled(_)))); + assert_eq!( + metrics.requests_pending.get(), + (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS - 2) as u64 + ); + assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); } - assert_eq!( - metrics.requests_pending.get(), - (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS) as u64 - ); - assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); - - // Make first lookup succeed. - let remote_hash = network.get_value_call.lock().unwrap().pop().unwrap(); - let remote_key: AuthorityId = remote_hash_to_key.get(&remote_hash).unwrap().clone(); - let dht_event = { - let (key, value) = build_dht_event( - vec![remote_multiaddr.clone()], - remote_key, - &remote_key_store - ).await; - sc_network::DhtEvent::ValueFound(vec![(key, value)]) - }; - dht_event_tx.send(dht_event).await.expect("Channel has capacity of 1."); - - // Assert worker to trigger another lookup. - assert!(matches!(receiver.next().await, Some(TestNetworkEvent::GetCalled(_)))); - assert_eq!( - metrics.requests_pending.get(), - (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS - 1) as u64 - ); - assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); - - // Make second one fail. - let remote_hash = network.get_value_call.lock().unwrap().pop().unwrap(); - let dht_event = sc_network::DhtEvent::ValueNotFound(remote_hash); - dht_event_tx.send(dht_event).await.expect("Channel has capacity of 1."); - - // Assert worker to trigger another lookup. - assert!(matches!(receiver.next().await, Some(TestNetworkEvent::GetCalled(_)))); - assert_eq!( - metrics.requests_pending.get(), - (remote_public_keys.len() - MAX_IN_FLIGHT_LOOKUPS - 2) as u64 - ); - assert_eq!(network.get_value_call.lock().unwrap().len(), MAX_IN_FLIGHT_LOOKUPS); - }.boxed_local()); + .boxed_local(), + ); } diff --git a/client/basic-authorship/src/basic_authorship.rs b/client/basic-authorship/src/basic_authorship.rs index 590f4275bf760..b606062948904 100644 --- a/client/basic-authorship/src/basic_authorship.rs +++ b/client/basic-authorship/src/basic_authorship.rs @@ -20,24 +20,30 @@ // FIXME #1021 move this into sp-consensus -use std::{pin::Pin, time, sync::Arc}; -use sc_client_api::backend; use codec::{Decode, Encode}; -use sp_consensus::{evaluation, Proposal, ProofRecording, DisableProofRecording, EnableProofRecording}; +use futures::{ + channel::oneshot, + future, + future::{Future, FutureExt}, + select, +}; +use log::{debug, error, info, trace, warn}; +use sc_block_builder::{BlockBuilderApi, BlockBuilderProvider}; +use sc_client_api::backend; +use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_INFO}; +use sc_transaction_pool_api::{InPoolTransaction, TransactionPool}; +use sp_api::{ApiExt, ProvideRuntimeApi}; +use sp_blockchain::{ApplyExtrinsicFailed::Validity, Error::ApplyExtrinsicFailed, HeaderBackend}; +use sp_consensus::{ + evaluation, DisableProofRecording, EnableProofRecording, ProofRecording, Proposal, +}; use sp_core::traits::SpawnNamed; use sp_inherents::InherentData; -use log::{error, info, debug, trace, warn}; use sp_runtime::{ generic::BlockId, - traits::{Block as BlockT, Hash as HashT, Header as HeaderT, DigestFor, BlakeTwo256}, + traits::{BlakeTwo256, Block as BlockT, DigestFor, Hash as HashT, Header as HeaderT}, }; -use sc_transaction_pool_api::{TransactionPool, InPoolTransaction}; -use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_INFO}; -use sc_block_builder::{BlockBuilderApi, BlockBuilderProvider}; -use sp_api::{ProvideRuntimeApi, ApiExt}; -use futures::{future, future::{Future, FutureExt}, channel::oneshot, select}; -use sp_blockchain::{HeaderBackend, ApplyExtrinsicFailed::Validity, Error::ApplyExtrinsicFailed}; -use std::marker::PhantomData; +use std::{marker::PhantomData, pin::Pin, sync::Arc, time}; use prometheus_endpoint::Registry as PrometheusRegistry; use sc_proposer_metrics::MetricsLink as PrometheusMetrics; @@ -141,14 +147,18 @@ impl ProposerFactory { } impl ProposerFactory - where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi - + Send + Sync + 'static, - C::Api: ApiExt> - + BlockBuilderApi, +where + A: TransactionPool + 'static, + B: backend::Backend + Send + Sync + 'static, + Block: BlockT, + C: BlockBuilderProvider + + HeaderBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + C::Api: + ApiExt> + BlockBuilderApi, { fn init_with_now( &mut self, @@ -180,26 +190,26 @@ impl ProposerFactory } } -impl sp_consensus::Environment for - ProposerFactory - where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi - + Send + Sync + 'static, - C::Api: ApiExt> - + BlockBuilderApi, - PR: ProofRecording, +impl sp_consensus::Environment for ProposerFactory +where + A: TransactionPool + 'static, + B: backend::Backend + Send + Sync + 'static, + Block: BlockT, + C: BlockBuilderProvider + + HeaderBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + C::Api: + ApiExt> + BlockBuilderApi, + PR: ProofRecording, { type CreateProposer = future::Ready>; type Proposer = Proposer; type Error = sp_blockchain::Error; - fn init( - &mut self, - parent_header: &::Header, - ) -> Self::CreateProposer { + fn init(&mut self, parent_header: &::Header) -> Self::CreateProposer { future::ready(Ok(self.init_with_now(parent_header, Box::new(time::Instant::now)))) } } @@ -220,22 +230,28 @@ pub struct Proposer { _phantom: PhantomData<(B, PR)>, } -impl sp_consensus::Proposer for - Proposer - where - A: TransactionPool + 'static, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi - + Send + Sync + 'static, - C::Api: ApiExt> - + BlockBuilderApi, - PR: ProofRecording, +impl sp_consensus::Proposer for Proposer +where + A: TransactionPool + 'static, + B: backend::Backend + Send + Sync + 'static, + Block: BlockT, + C: BlockBuilderProvider + + HeaderBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + C::Api: + ApiExt> + BlockBuilderApi, + PR: ProofRecording, { type Transaction = backend::TransactionFor; - type Proposal = Pin, Self::Error> - > + Send>>; + type Proposal = Pin< + Box< + dyn Future, Self::Error>> + + Send, + >, + >; type Error = sp_blockchain::Error; type ProofRecording = PR; type Proof = PR::Proof; @@ -250,36 +266,38 @@ impl sp_consensus::Proposer for let (tx, rx) = oneshot::channel(); let spawn_handle = self.spawn_handle.clone(); - spawn_handle.spawn_blocking("basic-authorship-proposer", Box::pin(async move { - // leave some time for evaluation and block finalization (33%) - let deadline = (self.now)() + max_duration - max_duration / 3; - let res = self.propose_with( - inherent_data, - inherent_digests, - deadline, - block_size_limit, - ).await; - if tx.send(res).is_err() { - trace!("Could not send block production result to proposer!"); - } - })); + spawn_handle.spawn_blocking( + "basic-authorship-proposer", + Box::pin(async move { + // leave some time for evaluation and block finalization (33%) + let deadline = (self.now)() + max_duration - max_duration / 3; + let res = self + .propose_with(inherent_data, inherent_digests, deadline, block_size_limit) + .await; + if tx.send(res).is_err() { + trace!("Could not send block production result to proposer!"); + } + }), + ); - async move { - rx.await? - }.boxed() + async move { rx.await? }.boxed() } } impl Proposer - where - A: TransactionPool, - B: backend::Backend + Send + Sync + 'static, - Block: BlockT, - C: BlockBuilderProvider + HeaderBackend + ProvideRuntimeApi - + Send + Sync + 'static, - C::Api: ApiExt> - + BlockBuilderApi, - PR: ProofRecording, +where + A: TransactionPool, + B: backend::Backend + Send + Sync + 'static, + Block: BlockT, + C: BlockBuilderProvider + + HeaderBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + C::Api: + ApiExt> + BlockBuilderApi, + PR: ProofRecording, { async fn propose_with( self, @@ -287,30 +305,30 @@ impl Proposer inherent_digests: DigestFor, deadline: time::Instant, block_size_limit: Option, - ) -> Result, PR::Proof>, sp_blockchain::Error> { + ) -> Result, PR::Proof>, sp_blockchain::Error> + { /// If the block is full we will attempt to push at most /// this number of transactions before quitting for real. /// It allows us to increase block utilization. const MAX_SKIPPED_TRANSACTIONS: usize = 8; - let mut block_builder = self.client.new_block_at( - &self.parent_id, - inherent_digests, - PR::ENABLED, - )?; + let mut block_builder = + self.client.new_block_at(&self.parent_id, inherent_digests, PR::ENABLED)?; for inherent in block_builder.create_inherents(inherent_data)? { match block_builder.push(inherent) { Err(ApplyExtrinsicFailed(Validity(e))) if e.exhausted_resources() => warn!("⚠️ Dropping non-mandatory inherent from overweight block."), Err(ApplyExtrinsicFailed(Validity(e))) if e.was_mandatory() => { - error!("❌️ Mandatory inherent extrinsic returned error. Block cannot be produced."); + error!( + "❌️ Mandatory inherent extrinsic returned error. Block cannot be produced." + ); Err(ApplyExtrinsicFailed(Validity(e)))? - } + }, Err(e) => { warn!("❗️ Inherent extrinsic returned unexpected error: {}. Dropping.", e); - } - Ok(_) => {} + }, + Ok(_) => {}, } } @@ -320,9 +338,8 @@ impl Proposer let mut unqueue_invalid = Vec::new(); let mut t1 = self.transaction_pool.ready_at(self.parent_number).fuse(); - let mut t2 = futures_timer::Delay::new( - deadline.saturating_duration_since((self.now)()) / 8, - ).fuse(); + let mut t2 = + futures_timer::Delay::new(deadline.saturating_duration_since((self.now)()) / 8).fuse(); let pending_iterator = select! { res = t1 => res, @@ -349,15 +366,14 @@ impl Proposer "Consensus deadline reached when pushing block transactions, \ proceeding with proposing." ); - break; + break } let pending_tx_data = pending_tx.data().clone(); let pending_tx_hash = pending_tx.hash().clone(); - let block_size = block_builder.estimate_block_size( - self.include_proof_in_block_size_estimation, - ); + let block_size = + block_builder.estimate_block_size(self.include_proof_in_block_size_estimation); if block_size + pending_tx_data.encoded_size() > block_size_limit { if skipped < MAX_SKIPPED_TRANSACTIONS { skipped += 1; @@ -366,11 +382,11 @@ impl Proposer but will try {} more transactions before quitting.", MAX_SKIPPED_TRANSACTIONS - skipped, ); - continue; + continue } else { debug!("Reached block size limit, proceeding with proposing."); hit_block_size_limit = true; - break; + break } } @@ -379,9 +395,8 @@ impl Proposer Ok(()) => { transaction_pushed = true; debug!("[{:?}] Pushed to the block.", pending_tx_hash); - } - Err(ApplyExtrinsicFailed(Validity(e))) - if e.exhausted_resources() => { + }, + Err(ApplyExtrinsicFailed(Validity(e))) if e.exhausted_resources() => { if skipped < MAX_SKIPPED_TRANSACTIONS { skipped += 1; debug!( @@ -390,20 +405,20 @@ impl Proposer ); } else { debug!("Block is full, proceed with proposing."); - break; + break } - } + }, Err(e) if skipped > 0 => { trace!( "[{:?}] Ignoring invalid transaction when skipping: {}", pending_tx_hash, e ); - } + }, Err(e) => { debug!("[{:?}] Invalid transaction: {}", pending_tx_hash, e); unqueue_invalid.push(pending_tx_hash); - } + }, } } @@ -418,12 +433,10 @@ impl Proposer let (block, storage_changes, proof) = block_builder.build()?.into_inner(); - self.metrics.report( - |metrics| { - metrics.number_of_transactions.set(block.extrinsics().len() as u64); - metrics.block_constructed.observe(block_timer.elapsed().as_secs_f64()); - } - ); + self.metrics.report(|metrics| { + metrics.number_of_transactions.set(block.extrinsics().len() as u64); + metrics.block_constructed.observe(block_timer.elapsed().as_secs_f64()); + }); info!( "🎁 Prepared block for proposing at {} [hash: {:?}; parent_hash: {}; extrinsics ({}): [{}]]", @@ -449,16 +462,14 @@ impl Proposer error!("Failed to verify block encoding/decoding"); } - if let Err(err) = evaluation::evaluate_initial( - &block, - &self.parent_hash, - self.parent_number, - ) { + if let Err(err) = + evaluation::evaluate_initial(&block, &self.parent_hash, self.parent_number) + { error!("Failed to evaluate authored block: {:?}", err); } - let proof = PR::into_proof(proof) - .map_err(|e| sp_blockchain::Error::Application(Box::new(e)))?; + let proof = + PR::into_proof(proof).map_err(|e| sp_blockchain::Error::Application(Box::new(e)))?; Ok(Proposal { block, proof, storage_changes }) } } @@ -467,19 +478,20 @@ impl Proposer mod tests { use super::*; + use futures::executor::block_on; use parking_lot::Mutex; - use sp_consensus::{BlockOrigin, Proposer}; - use substrate_test_runtime_client::{ - prelude::*, TestClientBuilder, runtime::{Extrinsic, Transfer}, TestClientBuilderExt, - }; - use sc_transaction_pool_api::{ChainEvent, MaintainedTransactionPool, TransactionSource}; + use sc_client_api::Backend; use sc_transaction_pool::BasicPool; + use sc_transaction_pool_api::{ChainEvent, MaintainedTransactionPool, TransactionSource}; use sp_api::Core; use sp_blockchain::HeaderBackend; + use sp_consensus::{BlockOrigin, Environment, Proposer}; use sp_runtime::traits::NumberFor; - use sc_client_api::Backend; - use futures::executor::block_on; - use sp_consensus::Environment; + use substrate_test_runtime_client::{ + prelude::*, + runtime::{Extrinsic, Transfer}, + TestClientBuilder, TestClientBuilderExt, + }; const SOURCE: TransactionSource = TransactionSource::External; @@ -489,16 +501,15 @@ mod tests { nonce, from: AccountKeyring::Alice.into(), to: Default::default(), - }.into_signed_tx() + } + .into_signed_tx() } fn chain_event(header: B::Header) -> ChainEvent - where NumberFor: From + where + NumberFor: From, { - ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - } + ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None } } #[test] @@ -514,25 +525,20 @@ mod tests { client.clone(), ); - block_on( - txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0), extrinsic(1)]) - ).unwrap(); + block_on(txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0), extrinsic(1)])) + .unwrap(); block_on( txpool.maintain(chain_event( - client.header(&BlockId::Number(0u64)) + client + .header(&BlockId::Number(0u64)) .expect("header get error") - .expect("there should be header") - )) + .expect("there should be header"), + )), ); - let mut proposer_factory = ProposerFactory::new( - spawner.clone(), - client.clone(), - txpool.clone(), - None, - None, - ); + let mut proposer_factory = + ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None); let cell = Mutex::new((false, time::Instant::now())); let proposer = proposer_factory.init_with_now( @@ -541,20 +547,21 @@ mod tests { let mut value = cell.lock(); if !value.0 { value.0 = true; - return value.1; + return value.1 } let old = value.1; let new = old + time::Duration::from_secs(2); *value = (true, new); old - }) + }), ); // when let deadline = time::Duration::from_secs(3); - let block = block_on( - proposer.propose(Default::default(), Default::default(), deadline, None) - ).map(|r| r.block).unwrap(); + let block = + block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) + .map(|r| r.block) + .unwrap(); // then // block should have some extrinsics although we have some more in the pool. @@ -574,13 +581,8 @@ mod tests { client.clone(), ); - let mut proposer_factory = ProposerFactory::new( - spawner.clone(), - client.clone(), - txpool.clone(), - None, - None, - ); + let mut proposer_factory = + ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None); let cell = Mutex::new((false, time::Instant::now())); let proposer = proposer_factory.init_with_now( @@ -589,18 +591,18 @@ mod tests { let mut value = cell.lock(); if !value.0 { value.0 = true; - return value.1; + return value.1 } let new = value.1 + time::Duration::from_secs(160); *value = (true, new); new - }) + }), ); let deadline = time::Duration::from_secs(1); - block_on( - proposer.propose(Default::default(), Default::default(), deadline, None) - ).map(|r| r.block).unwrap(); + block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) + .map(|r| r.block) + .unwrap(); } #[test] @@ -619,25 +621,19 @@ mod tests { let genesis_hash = client.info().best_hash; let block_id = BlockId::Hash(genesis_hash); - block_on( - txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0)]), - ).unwrap(); + block_on(txpool.submit_at(&BlockId::number(0), SOURCE, vec![extrinsic(0)])).unwrap(); block_on( txpool.maintain(chain_event( - client.header(&BlockId::Number(0u64)) + client + .header(&BlockId::Number(0u64)) .expect("header get error") .expect("there should be header"), - )) + )), ); - let mut proposer_factory = ProposerFactory::new( - spawner.clone(), - client.clone(), - txpool.clone(), - None, - None, - ); + let mut proposer_factory = + ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None); let proposer = proposer_factory.init_with_now( &client.header(&block_id).unwrap().unwrap(), @@ -645,9 +641,9 @@ mod tests { ); let deadline = time::Duration::from_secs(9); - let proposal = block_on( - proposer.propose(Default::default(), Default::default(), deadline, None), - ).unwrap(); + let proposal = + block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) + .unwrap(); assert_eq!(proposal.block.extrinsics().len(), 1); @@ -655,16 +651,13 @@ mod tests { api.execute_block(&block_id, proposal.block).unwrap(); let state = backend.state_at(block_id).unwrap(); - let changes_trie_state = backend::changes_tries_state_at_block( - &block_id, - backend.changes_trie_storage(), - ).unwrap(); + let changes_trie_state = + backend::changes_tries_state_at_block(&block_id, backend.changes_trie_storage()) + .unwrap(); - let storage_changes = api.into_storage_changes( - &state, - changes_trie_state.as_ref(), - genesis_hash, - ).unwrap(); + let storage_changes = api + .into_storage_changes(&state, changes_trie_state.as_ref(), genesis_hash) + .unwrap(); assert_eq!( proposal.storage_changes.transaction_storage_root, @@ -685,8 +678,10 @@ mod tests { client.clone(), ); - block_on( - txpool.submit_at(&BlockId::number(0), SOURCE, vec![ + block_on(txpool.submit_at( + &BlockId::number(0), + SOURCE, + vec![ extrinsic(0), extrinsic(1), Transfer { @@ -704,22 +699,16 @@ mod tests { }.into_resources_exhausting_tx(), extrinsic(5), extrinsic(6), - ]) - ).unwrap(); - - let mut proposer_factory = ProposerFactory::new( - spawner.clone(), - client.clone(), - txpool.clone(), - None, - None, - ); - let mut propose_block = | - client: &TestClient, - number, - expected_block_extrinsics, - expected_pool_transactions, - | { + ], + )) + .unwrap(); + + let mut proposer_factory = + ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None); + let mut propose_block = |client: &TestClient, + number, + expected_block_extrinsics, + expected_pool_transactions| { let proposer = proposer_factory.init_with_now( &client.header(&BlockId::number(number)).unwrap().unwrap(), Box::new(move || time::Instant::now()), @@ -727,9 +716,10 @@ mod tests { // when let deadline = time::Duration::from_secs(9); - let block = block_on( - proposer.propose(Default::default(), Default::default(), deadline, None) - ).map(|r| r.block).unwrap(); + let block = + block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) + .map(|r| r.block) + .unwrap(); // then // block should have some extrinsics although we have some more in the pool. @@ -741,10 +731,11 @@ mod tests { block_on( txpool.maintain(chain_event( - client.header(&BlockId::Number(0u64)) + client + .header(&BlockId::Number(0u64)) .expect("header get error") - .expect("there should be header") - )) + .expect("there should be header"), + )), ); // let's create one block and import it @@ -753,10 +744,11 @@ mod tests { block_on( txpool.maintain(chain_event( - client.header(&BlockId::Number(1)) + client + .header(&BlockId::Number(1)) .expect("header get error") - .expect("there should be header") - )) + .expect("there should be header"), + )), ); // now let's make sure that we can still make some progress @@ -775,7 +767,8 @@ mod tests { spawner.clone(), client.clone(), ); - let genesis_header = client.header(&BlockId::Number(0u64)) + let genesis_header = client + .header(&BlockId::Number(0u64)) .expect("header get error") .expect("there should be header"); @@ -784,40 +777,43 @@ mod tests { .map(|v| Extrinsic::IncludeData(vec![v as u8; 10])) .collect::>(); - let block_limit = genesis_header.encoded_size() - + extrinsics.iter().take(extrinsics_num - 1).map(Encode::encoded_size).sum::() - + Vec::::new().encoded_size(); + let block_limit = genesis_header.encoded_size() + + extrinsics + .iter() + .take(extrinsics_num - 1) + .map(Encode::encoded_size) + .sum::() + + Vec::::new().encoded_size(); - block_on( - txpool.submit_at(&BlockId::number(0), SOURCE, extrinsics) - ).unwrap(); + block_on(txpool.submit_at(&BlockId::number(0), SOURCE, extrinsics)).unwrap(); block_on(txpool.maintain(chain_event(genesis_header.clone()))); - let mut proposer_factory = ProposerFactory::new( - spawner.clone(), - client.clone(), - txpool.clone(), - None, - None, - ); + let mut proposer_factory = + ProposerFactory::new(spawner.clone(), client.clone(), txpool.clone(), None, None); let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); // Give it enough time let deadline = time::Duration::from_secs(300); - let block = block_on( - proposer.propose(Default::default(), Default::default(), deadline, Some(block_limit)) - ).map(|r| r.block).unwrap(); + let block = block_on(proposer.propose( + Default::default(), + Default::default(), + deadline, + Some(block_limit), + )) + .map(|r| r.block) + .unwrap(); // Based on the block limit, one transaction shouldn't be included. assert_eq!(block.extrinsics().len(), extrinsics_num - 1); let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); - let block = block_on( - proposer.propose(Default::default(), Default::default(), deadline, None, - )).map(|r| r.block).unwrap(); + let block = + block_on(proposer.propose(Default::default(), Default::default(), deadline, None)) + .map(|r| r.block) + .unwrap(); // Without a block limit we should include all of them assert_eq!(block.extrinsics().len(), extrinsics_num); @@ -833,9 +829,14 @@ mod tests { let proposer = block_on(proposer_factory.init(&genesis_header)).unwrap(); // Give it enough time - let block = block_on( - proposer.propose(Default::default(), Default::default(), deadline, Some(block_limit)) - ).map(|r| r.block).unwrap(); + let block = block_on(proposer.propose( + Default::default(), + Default::default(), + deadline, + Some(block_limit), + )) + .map(|r| r.block) + .unwrap(); // The block limit didn't changed, but we now include the proof in the estimation of the // block size and thus, one less transaction should fit into the limit. diff --git a/client/basic-authorship/src/lib.rs b/client/basic-authorship/src/lib.rs index 133b833cdddc8..4e7f981e6d156 100644 --- a/client/basic-authorship/src/lib.rs +++ b/client/basic-authorship/src/lib.rs @@ -73,4 +73,4 @@ mod basic_authorship; -pub use crate::basic_authorship::{ProposerFactory, Proposer, DEFAULT_BLOCK_SIZE_LIMIT}; +pub use crate::basic_authorship::{Proposer, ProposerFactory, DEFAULT_BLOCK_SIZE_LIMIT}; diff --git a/client/block-builder/src/lib.rs b/client/block-builder/src/lib.rs index 7d391f8fb85b3..e89421edfb168 100644 --- a/client/block-builder/src/lib.rs +++ b/client/block-builder/src/lib.rs @@ -28,14 +28,14 @@ use codec::Encode; -use sp_runtime::{ - generic::BlockId, - traits::{Header as HeaderT, Hash, Block as BlockT, HashFor, DigestFor, NumberFor, One}, +use sp_api::{ + ApiExt, ApiRef, Core, ProvideRuntimeApi, StorageChanges, StorageProof, TransactionOutcome, }; use sp_blockchain::{ApplyExtrinsicFailed, Error}; use sp_core::ExecutionContext; -use sp_api::{ - Core, ApiExt, ApiRef, ProvideRuntimeApi, StorageChanges, StorageProof, TransactionOutcome, +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, DigestFor, Hash, HashFor, Header as HeaderT, NumberFor, One}, }; pub use sp_block_builder::BlockBuilder as BlockBuilderApi; @@ -94,7 +94,9 @@ pub struct BuiltBlock, } -impl>> BuiltBlock { +impl>> + BuiltBlock +{ /// Convert into the inner values. pub fn into_inner(self) -> (Block, StorageChanges, Option) { (self.block, self.storage_changes, self.proof) @@ -103,11 +105,11 @@ impl>> BuiltBl /// Block builder provider pub trait BlockBuilderProvider - where - Block: BlockT, - B: backend::Backend, - Self: Sized, - RA: ProvideRuntimeApi, +where + Block: BlockT, + B: backend::Backend, + Self: Sized, + RA: ProvideRuntimeApi, { /// Create a new block, built on top of `parent`. /// @@ -143,7 +145,8 @@ impl<'a, Block, A, B> BlockBuilder<'a, Block, A, B> where Block: BlockT, A: ProvideRuntimeApi + 'a, - A::Api: BlockBuilderApi + ApiExt>, + A::Api: + BlockBuilderApi + ApiExt>, B: backend::Backend, { /// Create a new instance of builder based on the given `parent_hash` and `parent_number`. @@ -177,9 +180,7 @@ where let block_id = BlockId::Hash(parent_hash); - api.initialize_block_with_context( - &block_id, ExecutionContext::BlockConstruction, &header, - )?; + api.initialize_block_with_context(&block_id, ExecutionContext::BlockConstruction, &header)?; Ok(Self { parent_hash, @@ -207,12 +208,10 @@ where Ok(Ok(_)) => { extrinsics.push(xt); TransactionOutcome::Commit(Ok(())) - } - Ok(Err(tx_validity)) => { - TransactionOutcome::Rollback( - Err(ApplyExtrinsicFailed::Validity(tx_validity).into()), - ) }, + Ok(Err(tx_validity)) => TransactionOutcome::Rollback(Err( + ApplyExtrinsicFailed::Validity(tx_validity).into(), + )), Err(e) => TransactionOutcome::Rollback(Err(Error::from(e))), } }) @@ -224,9 +223,9 @@ where /// supplied by `self.api`, combined as [`BuiltBlock`]. /// The storage proof will be `Some(_)` when proof recording was enabled. pub fn build(mut self) -> Result>, Error> { - let header = self.api.finalize_block_with_context( - &self.block_id, ExecutionContext::BlockConstruction - )?; + let header = self + .api + .finalize_block_with_context(&self.block_id, ExecutionContext::BlockConstruction)?; debug_assert_eq!( header.extrinsics_root().clone(), @@ -244,11 +243,10 @@ where )?; let parent_hash = self.parent_hash; - let storage_changes = self.api.into_storage_changes( - &state, - changes_trie_state.as_ref(), - parent_hash, - ).map_err(|e| sp_blockchain::Error::StorageChanges(e))?; + let storage_changes = self + .api + .into_storage_changes(&state, changes_trie_state.as_ref(), parent_hash) + .map_err(|e| sp_blockchain::Error::StorageChanges(e))?; Ok(BuiltBlock { block: ::new(header, self.extrinsics), @@ -265,15 +263,17 @@ where inherent_data: sp_inherents::InherentData, ) -> Result, Error> { let block_id = self.block_id; - self.api.execute_in_transaction(move |api| { - // `create_inherents` should not change any state, to ensure this we always rollback - // the transaction. - TransactionOutcome::Rollback(api.inherent_extrinsics_with_context( - &block_id, - ExecutionContext::BlockConstruction, - inherent_data - )) - }).map_err(|e| Error::Application(Box::new(e))) + self.api + .execute_in_transaction(move |api| { + // `create_inherents` should not change any state, to ensure this we always rollback + // the transaction. + TransactionOutcome::Rollback(api.inherent_extrinsics_with_context( + &block_id, + ExecutionContext::BlockConstruction, + inherent_data, + )) + }) + .map_err(|e| Error::Application(Box::new(e))) } /// Estimate the size of the block in the current state. @@ -312,19 +312,22 @@ mod tests { RecordProof::Yes, Default::default(), &*backend, - ).unwrap().build().unwrap(); + ) + .unwrap() + .build() + .unwrap(); let proof = block.proof.expect("Proof is build on request"); let backend = sp_state_machine::create_proof_check_backend::( block.storage_changes.transaction_storage_root, proof, - ).unwrap(); + ) + .unwrap(); - assert!( - backend.storage(&sp_core::storage::well_known_keys::CODE) - .unwrap_err() - .contains("Database missing expected key"), - ); + assert!(backend + .storage(&sp_core::storage::well_known_keys::CODE) + .unwrap_err() + .contains("Database missing expected key"),); } } diff --git a/client/chain-spec/derive/src/impls.rs b/client/chain-spec/derive/src/impls.rs index 39984d4df1044..73634dcca42e5 100644 --- a/client/chain-spec/derive/src/impls.rs +++ b/client/chain-spec/derive/src/impls.rs @@ -17,9 +17,9 @@ // along with this program. If not, see . use proc_macro2::{Span, TokenStream}; -use quote::quote; -use syn::{DeriveInput, Ident, Error}; use proc_macro_crate::{crate_name, FoundCrate}; +use quote::quote; +use syn::{DeriveInput, Error, Ident}; const CRATE_NAME: &str = "sc-chain-spec"; const ATTRIBUTE_NAME: &str = "forks"; @@ -31,14 +31,18 @@ const ATTRIBUTE_NAME: &str = "forks"; pub fn extension_derive(ast: &DeriveInput) -> proc_macro::TokenStream { derive(ast, |crate_name, name, generics: &syn::Generics, field_names, field_types, fields| { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let forks = fields.named.iter().find_map(|f| { - if f.attrs.iter().any(|attr| attr.path.is_ident(ATTRIBUTE_NAME)) { - let typ = &f.ty; - Some(quote! { #typ }) - } else { - None - } - }).unwrap_or_else(|| quote! { #crate_name::NoExtension }); + let forks = fields + .named + .iter() + .find_map(|f| { + if f.attrs.iter().any(|attr| attr.path.is_ident(ATTRIBUTE_NAME)) { + let typ = &f.ty; + Some(quote! { #typ }) + } else { + None + } + }) + .unwrap_or_else(|| quote! { #crate_name::NoExtension }); quote! { impl #impl_generics #crate_name::Extension for #name #ty_generics #where_clause { @@ -80,13 +84,12 @@ pub fn group_derive(ast: &DeriveInput) -> proc_macro::TokenStream { Ok(FoundCrate::Itself) => Ident::new("serde", Span::call_site()), Ok(FoundCrate::Name(name)) => Ident::new(&name, Span::call_site()), Err(e) => { - let err = Error::new( - Span::call_site(), - &format!("Could not find `serde` crate: {}", e), - ).to_compile_error(); + let err = + Error::new(Span::call_site(), &format!("Could not find `serde` crate: {}", e)) + .to_compile_error(); - return quote!( #err ).into(); - } + return quote!( #err ).into() + }, }; quote! { @@ -131,14 +134,20 @@ pub fn group_derive(ast: &DeriveInput) -> proc_macro::TokenStream { pub fn derive( ast: &DeriveInput, derive: impl Fn( - &Ident, &Ident, &syn::Generics, Vec<&Ident>, Vec<&syn::Type>, &syn::FieldsNamed, + &Ident, + &Ident, + &syn::Generics, + Vec<&Ident>, + Vec<&syn::Type>, + &syn::FieldsNamed, ) -> TokenStream, ) -> proc_macro::TokenStream { let err = || { let err = Error::new( Span::call_site(), - "ChainSpecGroup is only available for structs with named fields." - ).to_compile_error(); + "ChainSpecGroup is only available for structs with named fields.", + ) + .to_compile_error(); quote!( #err ).into() }; @@ -168,47 +177,35 @@ pub fn derive( derive(&crate_name, name, &ast.generics, field_names, field_types, fields).into() } -fn generate_fork_fields( - crate_name: &Ident, - names: &[&Ident], - types: &[&syn::Type], -) -> TokenStream { +fn generate_fork_fields(crate_name: &Ident, names: &[&Ident], types: &[&syn::Type]) -> TokenStream { let crate_name = std::iter::repeat(crate_name); quote! { #( pub #names: Option<<#types as #crate_name::Group>::Fork>, )* } } -fn generate_base_to_fork( - fork_name: &Ident, - names: &[&Ident], -) -> TokenStream { +fn generate_base_to_fork(fork_name: &Ident, names: &[&Ident]) -> TokenStream { let names2 = names.to_vec(); - quote!{ + quote! { #fork_name { #( #names: Some(self.#names2.to_fork()), )* } } } -fn generate_combine_with( - names: &[&Ident], -) -> TokenStream { +fn generate_combine_with(names: &[&Ident]) -> TokenStream { let names2 = names.to_vec(); - quote!{ + quote! { #( self.#names.combine_with(other.#names2); )* } } -fn generate_fork_to_base( - fork: &Ident, - names: &[&Ident], -) -> TokenStream { +fn generate_fork_to_base(fork: &Ident, names: &[&Ident]) -> TokenStream { let names2 = names.to_vec(); - quote!{ + quote! { Some(#fork { #( #names: self.#names2?.to_base()?, )* }) diff --git a/client/chain-spec/src/chain_spec.rs b/client/chain-spec/src/chain_spec.rs index 59b55707e182b..681ab8ea640a2 100644 --- a/client/chain-spec/src/chain_spec.rs +++ b/client/chain-spec/src/chain_spec.rs @@ -19,15 +19,20 @@ //! Substrate chain configurations. #![warn(missing_docs)] -use std::{borrow::Cow, fs::File, path::PathBuf, sync::Arc, collections::HashMap}; -use serde::{Serialize, Deserialize}; -use sp_core::{storage::{StorageKey, StorageData, ChildInfo, Storage, StorageChild}, Bytes}; -use sp_runtime::BuildStorage; -use serde_json as json; -use crate::{RuntimeGenesis, ChainType, extension::GetExtension, Properties}; +use crate::{extension::GetExtension, ChainType, Properties, RuntimeGenesis}; use sc_network::config::MultiaddrWithPeerId; use sc_telemetry::TelemetryEndpoints; -use sp_runtime::traits::{Block as BlockT, NumberFor}; +use serde::{Deserialize, Serialize}; +use serde_json as json; +use sp_core::{ + storage::{ChildInfo, Storage, StorageChild, StorageData, StorageKey}, + Bytes, +}; +use sp_runtime::{ + traits::{Block as BlockT, NumberFor}, + BuildStorage, +}; +use std::{borrow::Cow, collections::HashMap, fs::File, path::PathBuf, sync::Arc}; enum GenesisSource { File(PathBuf), @@ -56,8 +61,8 @@ impl GenesisSource { match self { Self::File(path) => { - let file = File::open(path) - .map_err(|e| format!("Error opening spec file: {}", e))?; + let file = + File::open(path).map_err(|e| format!("Error opening spec file: {}", e))?; let genesis: GenesisContainer = json::from_reader(file) .map_err(|e| format!("Error parsing spec file: {}", e))?; Ok(genesis.genesis) @@ -69,22 +74,25 @@ impl GenesisSource { }, Self::Factory(f) => Ok(Genesis::Runtime(f())), Self::Storage(storage) => { - let top = storage.top + let top = storage + .top .iter() .map(|(k, v)| (StorageKey(k.clone()), StorageData(v.clone()))) .collect(); - let children_default = storage.children_default + let children_default = storage + .children_default .iter() - .map(|(k, child)| - ( - StorageKey(k.clone()), - child.data + .map(|(k, child)| { + ( + StorageKey(k.clone()), + child + .data .iter() .map(|(k, v)| (StorageKey(k.clone()), StorageData(v.clone()))) - .collect() - ) - ) + .collect(), + ) + }) .collect(); Ok(Genesis::Raw(RawGenesis { top, children_default })) @@ -99,24 +107,24 @@ impl BuildStorage for ChainSpec { Genesis::Runtime(gc) => gc.build_storage(), Genesis::Raw(RawGenesis { top: map, children_default: children_map }) => Ok(Storage { top: map.into_iter().map(|(k, v)| (k.0, v.0)).collect(), - children_default: children_map.into_iter().map(|(storage_key, child_content)| { - let child_info = ChildInfo::new_default(storage_key.0.as_slice()); - ( - storage_key.0, - StorageChild { - data: child_content.into_iter().map(|(k, v)| (k.0, v.0)).collect(), - child_info, - }, - ) - }).collect(), + children_default: children_map + .into_iter() + .map(|(storage_key, child_content)| { + let child_info = ChildInfo::new_default(storage_key.0.as_slice()); + ( + storage_key.0, + StorageChild { + data: child_content.into_iter().map(|(k, v)| (k.0, v.0)).collect(), + child_info, + }, + ) + }) + .collect(), }), } } - fn assimilate_storage( - &self, - _: &mut Storage, - ) -> Result<(), String> { + fn assimilate_storage(&self, _: &mut Storage) -> Result<(), String> { Err("`assimilate_storage` not implemented for `ChainSpec`.".into()) } } @@ -181,10 +189,7 @@ pub struct ChainSpec { impl Clone for ChainSpec { fn clone(&self) -> Self { - ChainSpec { - client_spec: self.client_spec.clone(), - genesis: self.genesis.clone(), - } + ChainSpec { client_spec: self.client_spec.clone(), genesis: self.genesis.clone() } } } @@ -258,10 +263,7 @@ impl ChainSpec { code_substitutes: HashMap::new(), }; - ChainSpec { - client_spec, - genesis: GenesisSource::Factory(Arc::new(constructor)), - } + ChainSpec { client_spec, genesis: GenesisSource::Factory(Arc::new(constructor)) } } /// Type of the chain. @@ -281,22 +283,15 @@ impl ChainSpec { let json = json.into(); let client_spec = json::from_slice(json.as_ref()) .map_err(|e| format!("Error parsing spec file: {}", e))?; - Ok(ChainSpec { - client_spec, - genesis: GenesisSource::Binary(json), - }) + Ok(ChainSpec { client_spec, genesis: GenesisSource::Binary(json) }) } /// Parse json file into a `ChainSpec` pub fn from_json_file(path: PathBuf) -> Result { - let file = File::open(&path) - .map_err(|e| format!("Error opening spec file: {}", e))?; - let client_spec = json::from_reader(file) - .map_err(|e| format!("Error parsing spec file: {}", e))?; - Ok(ChainSpec { - client_spec, - genesis: GenesisSource::File(path), - }) + let file = File::open(&path).map_err(|e| format!("Error opening spec file: {}", e))?; + let client_spec = + json::from_reader(file).map_err(|e| format!("Error parsing spec file: {}", e))?; + Ok(ChainSpec { client_spec, genesis: GenesisSource::File(path) }) } } @@ -312,33 +307,34 @@ impl ChainSpec { let genesis = match (raw, self.genesis.resolve()?) { (true, Genesis::Runtime(g)) => { let storage = g.build_storage()?; - let top = storage.top.into_iter() - .map(|(k, v)| (StorageKey(k), StorageData(v))) - .collect(); - let children_default = storage.children_default.into_iter() - .map(|(sk, child)| ( - StorageKey(sk), - child.data.into_iter() - .map(|(k, v)| (StorageKey(k), StorageData(v))) - .collect(), - )) + let top = + storage.top.into_iter().map(|(k, v)| (StorageKey(k), StorageData(v))).collect(); + let children_default = storage + .children_default + .into_iter() + .map(|(sk, child)| { + ( + StorageKey(sk), + child + .data + .into_iter() + .map(|(k, v)| (StorageKey(k), StorageData(v))) + .collect(), + ) + }) .collect(); Genesis::Raw(RawGenesis { top, children_default }) }, (_, genesis) => genesis, }; - Ok(JsonContainer { - client_spec: self.client_spec.clone(), - genesis, - }) + Ok(JsonContainer { client_spec: self.client_spec.clone(), genesis }) } /// Dump to json string. pub fn as_json(&self, raw: bool) -> Result { let container = self.json_container(raw)?; - json::to_string_pretty(&container) - .map_err(|e| format!("Error generating spec json: {}", e)) + json::to_string_pretty(&container).map_err(|e| format!("Error generating spec json: {}", e)) } } @@ -404,7 +400,11 @@ where } fn code_substitutes(&self) -> std::collections::HashMap> { - self.client_spec.code_substitutes.iter().map(|(h, c)| (h.clone(), c.0.clone())).collect() + self.client_spec + .code_substitutes + .iter() + .map(|(h, c)| (h.clone(), c.0.clone())) + .collect() } } @@ -417,7 +417,8 @@ pub struct LightSyncState { /// The babe weight of the finalized block. pub babe_finalized_block_weight: sp_consensus_babe::BabeBlockWeight, /// The authority set for grandpa. - pub grandpa_authority_set: sc_finality_grandpa::AuthoritySet<::Hash, NumberFor>, + pub grandpa_authority_set: + sc_finality_grandpa::AuthoritySet<::Hash, NumberFor>, } impl LightSyncState { @@ -427,25 +428,25 @@ impl LightSyncState { SerializableLightSyncState { finalized_block_header: StorageData(self.finalized_block_header.encode()), - babe_epoch_changes: - StorageData(self.babe_epoch_changes.encode()), - babe_finalized_block_weight: - self.babe_finalized_block_weight, - grandpa_authority_set: - StorageData(self.grandpa_authority_set.encode()), + babe_epoch_changes: StorageData(self.babe_epoch_changes.encode()), + babe_finalized_block_weight: self.babe_finalized_block_weight, + grandpa_authority_set: StorageData(self.grandpa_authority_set.encode()), } } /// Convert from a `SerializableLightSyncState`. - pub fn from_serializable(serialized: &SerializableLightSyncState) -> Result { + pub fn from_serializable( + serialized: &SerializableLightSyncState, + ) -> Result { Ok(Self { - finalized_block_header: codec::Decode::decode(&mut &serialized.finalized_block_header.0[..])?, - babe_epoch_changes: - codec::Decode::decode(&mut &serialized.babe_epoch_changes.0[..])?, - babe_finalized_block_weight: - serialized.babe_finalized_block_weight, - grandpa_authority_set: - codec::Decode::decode(&mut &serialized.grandpa_authority_set.0[..])?, + finalized_block_header: codec::Decode::decode( + &mut &serialized.finalized_block_header.0[..], + )?, + babe_epoch_changes: codec::Decode::decode(&mut &serialized.babe_epoch_changes.0[..])?, + babe_finalized_block_weight: serialized.babe_finalized_block_weight, + grandpa_authority_set: codec::Decode::decode( + &mut &serialized.grandpa_authority_set.0[..], + )?, }) } } @@ -469,12 +470,9 @@ mod tests { struct Genesis(HashMap); impl BuildStorage for Genesis { - fn assimilate_storage( - &self, - storage: &mut Storage, - ) -> Result<(), String> { + fn assimilate_storage(&self, storage: &mut Storage) -> Result<(), String> { storage.top.extend( - self.0.iter().map(|(a, b)| (a.clone().into_bytes(), b.clone().into_bytes())) + self.0.iter().map(|(a, b)| (a.clone().into_bytes(), b.clone().into_bytes())), ); Ok(()) } @@ -485,11 +483,10 @@ mod tests { #[test] fn should_deserialize_example_chain_spec() { let spec1 = TestSpec::from_json_bytes(Cow::Owned( - include_bytes!("../res/chain_spec.json").to_vec() - )).unwrap(); - let spec2 = TestSpec::from_json_file( - PathBuf::from("./res/chain_spec.json") - ).unwrap(); + include_bytes!("../res/chain_spec.json").to_vec(), + )) + .unwrap(); + let spec2 = TestSpec::from_json_file(PathBuf::from("./res/chain_spec.json")).unwrap(); assert_eq!(spec1.as_json(false), spec2.as_json(false)); assert_eq!(spec2.chain_type(), ChainType::Live) @@ -506,8 +503,9 @@ mod tests { #[test] fn should_deserialize_chain_spec_with_extensions() { let spec = TestSpec2::from_json_bytes(Cow::Owned( - include_bytes!("../res/chain_spec2.json").to_vec() - )).unwrap(); + include_bytes!("../res/chain_spec2.json").to_vec(), + )) + .unwrap(); assert_eq!(spec.extensions().my_property, "Test Extension"); } diff --git a/client/chain-spec/src/extension.rs b/client/chain-spec/src/extension.rs index 2a6126e4ce2ca..665f51303b6a6 100644 --- a/client/chain-spec/src/extension.rs +++ b/client/chain-spec/src/extension.rs @@ -18,19 +18,21 @@ //! Chain Spec extensions helpers. -use std::fmt::Debug; -use std::any::{TypeId, Any}; +use std::{ + any::{Any, TypeId}, + fmt::Debug, +}; use std::collections::BTreeMap; -use serde::{Serialize, Deserialize, de::DeserializeOwned}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; /// A `ChainSpec` extension. /// /// This trait is implemented automatically by `ChainSpecGroup` macro. pub trait Group: Clone + Sized { /// An associated type containing fork definition. - type Fork: Fork; + type Fork: Fork; /// Convert to fork type. fn to_fork(self) -> Self::Fork; @@ -45,7 +47,7 @@ pub trait Group: Clone + Sized { /// a complete set of parameters pub trait Fork: Serialize + DeserializeOwned + Clone + Sized { /// A base `Group` type. - type Base: Group; + type Base: Group; /// Combine with another struct. /// @@ -128,7 +130,8 @@ pub trait Extension: Serialize + DeserializeOwned + Clone { fn get_any(&self, t: TypeId) -> &dyn Any; /// Get forkable extensions of specific type. - fn forks(&self) -> Option> where + fn forks(&self) -> Option> + where BlockNumber: Ord + Clone + 'static, T: Group + 'static, ::Extension: Extension, @@ -142,8 +145,12 @@ pub trait Extension: Serialize + DeserializeOwned + Clone { impl Extension for crate::NoExtension { type Forks = Self; - fn get(&self) -> Option<&T> { None } - fn get_any(&self, _t: TypeId) -> &dyn Any { self } + fn get(&self) -> Option<&T> { + None + } + fn get_any(&self, _t: TypeId) -> &dyn Any { + self + } } pub trait IsForks { @@ -166,14 +173,12 @@ pub struct Forks { impl Default for Forks { fn default() -> Self { - Self { - base: Default::default(), - forks: Default::default(), - } + Self { base: Default::default(), forks: Default::default() } } } -impl Forks where +impl Forks +where T::Fork: Debug, { /// Create new fork definition given the base and the forks. @@ -195,7 +200,8 @@ impl Forks where } } -impl IsForks for Forks where +impl IsForks for Forks +where B: Ord + 'static, T: Group + 'static, { @@ -203,29 +209,31 @@ impl IsForks for Forks where type Extension = T; } -impl Forks where +impl Forks +where T::Fork: Extension, { /// Get forks definition for a subset of this extension. /// /// Returns the `Forks` struct, but limited to a particular type /// within the extension. - pub fn for_type(&self) -> Option> where + pub fn for_type(&self) -> Option> + where X: Group + 'static, { let base = self.base.get::()?.clone(); - let forks = self.forks.iter().filter_map(|(k, v)| { - Some((k.clone(), v.get::>()?.clone()?)) - }).collect(); - - Some(Forks { - base, - forks, - }) + let forks = self + .forks + .iter() + .filter_map(|(k, v)| Some((k.clone(), v.get::>()?.clone()?))) + .collect(); + + Some(Forks { base, forks }) } } -impl Extension for Forks where +impl Extension for Forks +where B: Serialize + DeserializeOwned + Ord + Clone + 'static, E: Extension + Group + 'static, { @@ -245,7 +253,8 @@ impl Extension for Forks where } } - fn forks(&self) -> Option> where + fn forks(&self) -> Option> + where BlockNumber: Ord + Clone + 'static, T: Group + 'static, ::Extension: Extension, @@ -266,7 +275,7 @@ pub trait GetExtension { fn get_any(&self, t: TypeId) -> &dyn Any; } -impl GetExtension for E { +impl GetExtension for E { fn get_any(&self, t: TypeId) -> &dyn Any { Extension::get_any(self, t) } @@ -281,7 +290,7 @@ pub fn get_extension(e: &dyn GetExtension) -> Option<&T> { #[cfg(test)] mod tests { use super::*; - use sc_chain_spec_derive::{ChainSpecGroup, ChainSpecExtension}; + use sc_chain_spec_derive::{ChainSpecExtension, ChainSpecGroup}; // Make the proc macro work for tests and doc tests. use crate as sc_chain_spec; @@ -297,7 +306,9 @@ mod tests { pub test: u8, } - #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension)] + #[derive( + Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension, + )] #[serde(deny_unknown_fields)] pub struct Extensions { pub ext1: Extension1, @@ -315,11 +326,12 @@ mod tests { #[test] fn forks_should_work_correctly() { - use super::Extension as _ ; + use super::Extension as _; // We first need to deserialize into a `Value` because of the following bug: // https://github.com/serde-rs/json/issues/505 - let ext_val: serde_json::Value = serde_json::from_str(r#" + let ext_val: serde_json::Value = serde_json::from_str( + r#" { "test": 11, "forkable": { @@ -342,40 +354,40 @@ mod tests { } } } - "#).unwrap(); + "#, + ) + .unwrap(); let ext: Ext2 = serde_json::from_value(ext_val).unwrap(); - assert_eq!(ext.get::(), Some(&Extension1 { - test: 11 - })); + assert_eq!(ext.get::(), Some(&Extension1 { test: 11 })); // get forks definition let forks = ext.get::>().unwrap(); - assert_eq!(forks.at_block(0), Extensions { - ext1: Extension1 { test: 15 }, - ext2: Extension2 { test: 123 }, - }); - assert_eq!(forks.at_block(1), Extensions { - ext1: Extension1 { test: 5 }, - ext2: Extension2 { test: 123 }, - }); - assert_eq!(forks.at_block(2), Extensions { - ext1: Extension1 { test: 5 }, - ext2: Extension2 { test: 5 }, - }); - assert_eq!(forks.at_block(4), Extensions { - ext1: Extension1 { test: 5 }, - ext2: Extension2 { test: 5 }, - }); - assert_eq!(forks.at_block(5), Extensions { - ext1: Extension1 { test: 5 }, - ext2: Extension2 { test: 1 }, - }); - assert_eq!(forks.at_block(10), Extensions { - ext1: Extension1 { test: 5 }, - ext2: Extension2 { test: 1 }, - }); + assert_eq!( + forks.at_block(0), + Extensions { ext1: Extension1 { test: 15 }, ext2: Extension2 { test: 123 } } + ); + assert_eq!( + forks.at_block(1), + Extensions { ext1: Extension1 { test: 5 }, ext2: Extension2 { test: 123 } } + ); + assert_eq!( + forks.at_block(2), + Extensions { ext1: Extension1 { test: 5 }, ext2: Extension2 { test: 5 } } + ); + assert_eq!( + forks.at_block(4), + Extensions { ext1: Extension1 { test: 5 }, ext2: Extension2 { test: 5 } } + ); + assert_eq!( + forks.at_block(5), + Extensions { ext1: Extension1 { test: 5 }, ext2: Extension2 { test: 1 } } + ); + assert_eq!( + forks.at_block(10), + Extensions { ext1: Extension1 { test: 5 }, ext2: Extension2 { test: 1 } } + ); assert!(forks.at_block(10).get::().is_some()); // filter forks for `Extension2` diff --git a/client/chain-spec/src/lib.rs b/client/chain-spec/src/lib.rs index 1bfa1808ee556..0011aa4532329 100644 --- a/client/chain-spec/src/lib.rs +++ b/client/chain-spec/src/lib.rs @@ -111,16 +111,16 @@ mod chain_spec; mod extension; pub use chain_spec::{ - ChainSpec as GenericChainSpec, NoExtension, LightSyncState, SerializableLightSyncState, + ChainSpec as GenericChainSpec, LightSyncState, NoExtension, SerializableLightSyncState, }; -pub use extension::{Group, Fork, Forks, Extension, GetExtension, get_extension}; +pub use extension::{get_extension, Extension, Fork, Forks, GetExtension, Group}; pub use sc_chain_spec_derive::{ChainSpecExtension, ChainSpecGroup}; -use serde::{Serialize, de::DeserializeOwned}; -use sp_runtime::BuildStorage; use sc_network::config::MultiaddrWithPeerId; use sc_telemetry::TelemetryEndpoints; +use serde::{de::DeserializeOwned, Serialize}; use sp_core::storage::Storage; +use sp_runtime::BuildStorage; /// The type of a chain. /// diff --git a/client/cli/src/arg_enums.rs b/client/cli/src/arg_enums.rs index d9a4210376298..83b1c57e071a4 100644 --- a/client/cli/src/arg_enums.rs +++ b/client/cli/src/arg_enums.rs @@ -74,9 +74,8 @@ impl WasmExecutionMethod { impl Into for WasmExecutionMethod { fn into(self) -> sc_service::config::WasmExecutionMethod { match self { - WasmExecutionMethod::Interpreted => { - sc_service::config::WasmExecutionMethod::Interpreted - } + WasmExecutionMethod::Interpreted => + sc_service::config::WasmExecutionMethod::Interpreted, #[cfg(feature = "wasmtime")] WasmExecutionMethod::Compiled => sc_service::config::WasmExecutionMethod::Compiled, #[cfg(not(feature = "wasmtime"))] @@ -250,14 +249,10 @@ impl Into for SyncMode { fn into(self) -> sc_network::config::SyncMode { match self { SyncMode::Full => sc_network::config::SyncMode::Full, - SyncMode::Fast => sc_network::config::SyncMode::Fast { - skip_proofs: false, - storage_chain_mode: false, - }, - SyncMode::FastUnsafe => sc_network::config::SyncMode::Fast { - skip_proofs: true, - storage_chain_mode: false, - }, + SyncMode::Fast => + sc_network::config::SyncMode::Fast { skip_proofs: false, storage_chain_mode: false }, + SyncMode::FastUnsafe => + sc_network::config::SyncMode::Fast { skip_proofs: true, storage_chain_mode: false }, } } } diff --git a/client/cli/src/commands/build_spec_cmd.rs b/client/cli/src/commands/build_spec_cmd.rs index 78ad3b64724d9..75fdf07643ee2 100644 --- a/client/cli/src/commands/build_spec_cmd.rs +++ b/client/cli/src/commands/build_spec_cmd.rs @@ -16,15 +16,19 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::params::NodeKeyParams; -use crate::params::SharedParams; -use crate::CliConfiguration; +use crate::{ + error, + params::{NodeKeyParams, SharedParams}, + CliConfiguration, +}; use log::info; use sc_network::config::build_multiaddr; -use sc_service::{config::{MultiaddrWithPeerId, NetworkConfiguration}, ChainSpec}; -use structopt::StructOpt; +use sc_service::{ + config::{MultiaddrWithPeerId, NetworkConfiguration}, + ChainSpec, +}; use std::io::Write; +use structopt::StructOpt; /// The `build-spec` command used to build a specification. #[derive(Debug, StructOpt, Clone)] diff --git a/client/cli/src/commands/check_block_cmd.rs b/client/cli/src/commands/check_block_cmd.rs index a47245de0f78c..07a76319dca3f 100644 --- a/client/cli/src/commands/check_block_cmd.rs +++ b/client/cli/src/commands/check_block_cmd.rs @@ -17,7 +17,9 @@ // along with this program. If not, see . use crate::{ - CliConfiguration, error, params::{ImportParams, SharedParams, BlockNumberOrHash}, + error, + params::{BlockNumberOrHash, ImportParams, SharedParams}, + CliConfiguration, }; use sc_client_api::{BlockBackend, UsageProvider}; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; @@ -48,11 +50,7 @@ pub struct CheckBlockCmd { impl CheckBlockCmd { /// Run the check-block command - pub async fn run( - &self, - client: Arc, - import_queue: IQ, - ) -> error::Result<()> + pub async fn run(&self, client: Arc, import_queue: IQ) -> error::Result<()> where B: BlockT + for<'de> serde::Deserialize<'de>, C: BlockBackend + UsageProvider + Send + Sync + 'static, diff --git a/client/cli/src/commands/export_blocks_cmd.rs b/client/cli/src/commands/export_blocks_cmd.rs index 4153c80a0545e..0ed8e3ff3591a 100644 --- a/client/cli/src/commands/export_blocks_cmd.rs +++ b/client/cli/src/commands/export_blocks_cmd.rs @@ -16,21 +16,16 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::params::{GenericNumber, DatabaseParams, PruningParams, SharedParams}; -use crate::CliConfiguration; -use log::info; -use sc_service::{ - config::DatabaseConfig, chain_ops::export_blocks, +use crate::{ + error, + params::{DatabaseParams, GenericNumber, PruningParams, SharedParams}, + CliConfiguration, }; +use log::info; use sc_client_api::{BlockBackend, UsageProvider}; +use sc_service::{chain_ops::export_blocks, config::DatabaseConfig}; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; -use std::fmt::Debug; -use std::fs; -use std::io; -use std::path::PathBuf; -use std::str::FromStr; -use std::sync::Arc; +use std::{fmt::Debug, fs, io, path::PathBuf, str::FromStr, sync::Arc}; use structopt::StructOpt; /// The `export-blocks` command used to export blocks. @@ -95,9 +90,7 @@ impl ExportBlocksCmd { None => Box::new(io::stdout()), }; - export_blocks(client, file, from.into(), to, binary) - .await - .map_err(Into::into) + export_blocks(client, file, from.into(), to, binary).await.map_err(Into::into) } } diff --git a/client/cli/src/commands/export_state_cmd.rs b/client/cli/src/commands/export_state_cmd.rs index e154c3a502217..36eabd2c24f5c 100644 --- a/client/cli/src/commands/export_state_cmd.rs +++ b/client/cli/src/commands/export_state_cmd.rs @@ -17,13 +17,15 @@ // along with this program. If not, see . use crate::{ - CliConfiguration, error, params::{PruningParams, SharedParams, BlockNumberOrHash}, + error, + params::{BlockNumberOrHash, PruningParams, SharedParams}, + CliConfiguration, }; use log::info; +use sc_client_api::{StorageProvider, UsageProvider}; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; -use std::{fmt::Debug, str::FromStr, io::Write, sync::Arc}; +use std::{fmt::Debug, io::Write, str::FromStr, sync::Arc}; use structopt::StructOpt; -use sc_client_api::{StorageProvider, UsageProvider}; /// The `export-state` command used to export the state of a given block into /// a chain spec. diff --git a/client/cli/src/commands/generate.rs b/client/cli/src/commands/generate.rs index 42214d2f5e458..7032ebd72e0c7 100644 --- a/client/cli/src/commands/generate.rs +++ b/client/cli/src/commands/generate.rs @@ -16,12 +16,12 @@ // limitations under the License. //! Implementation of the `generate` subcommand -use bip39::{MnemonicType, Mnemonic, Language}; -use structopt::StructOpt; use crate::{ - utils::print_from_uri, KeystoreParams, Error, - with_crypto_scheme, NetworkSchemeFlag, OutputTypeFlag, CryptoSchemeFlag, + utils::print_from_uri, with_crypto_scheme, CryptoSchemeFlag, Error, KeystoreParams, + NetworkSchemeFlag, OutputTypeFlag, }; +use bip39::{Language, Mnemonic, MnemonicType}; +use structopt::StructOpt; /// The `generate` command #[derive(Debug, StructOpt, Clone)] @@ -52,12 +52,11 @@ impl GenerateCmd { /// Run the command pub fn run(&self) -> Result<(), Error> { let words = match self.words { - Some(words) => { - MnemonicType::for_word_count(words) - .map_err(|_| { - Error::Input("Invalid number of words given for phrase: must be 12/15/18/21/24".into()) - })? - }, + Some(words) => MnemonicType::for_word_count(words).map_err(|_| { + Error::Input( + "Invalid number of words given for phrase: must be 12/15/18/21/24".into(), + ) + })?, None => MnemonicType::Words12, }; let mnemonic = Mnemonic::new(words, Language::English); diff --git a/client/cli/src/commands/generate_node_key.rs b/client/cli/src/commands/generate_node_key.rs index ec22c6298adb6..74a4197f36621 100644 --- a/client/cli/src/commands/generate_node_key.rs +++ b/client/cli/src/commands/generate_node_key.rs @@ -18,9 +18,9 @@ //! Implementation of the `generate-node-key` subcommand use crate::Error; -use structopt::StructOpt; -use std::{path::PathBuf, fs}; use libp2p::identity::{ed25519 as libp2p_ed25519, PublicKey}; +use std::{fs, path::PathBuf}; +use structopt::StructOpt; /// The `generate-node-key` command #[derive(Debug, StructOpt)] @@ -59,15 +59,14 @@ impl GenerateNodeKeyCmd { #[cfg(test)] mod tests { use super::*; - use tempfile::Builder; use std::io::Read; + use tempfile::Builder; #[test] fn generate_node_key() { let mut file = Builder::new().prefix("keyfile").tempfile().unwrap(); let file_path = file.path().display().to_string(); - let generate = - GenerateNodeKeyCmd::from_iter(&["generate-node-key", "--file", &file_path]); + let generate = GenerateNodeKeyCmd::from_iter(&["generate-node-key", "--file", &file_path]); assert!(generate.run().is_ok()); let mut buf = String::new(); assert!(file.read_to_string(&mut buf).is_ok()); diff --git a/client/cli/src/commands/import_blocks_cmd.rs b/client/cli/src/commands/import_blocks_cmd.rs index 89f70d06813ce..9b211b88d5563 100644 --- a/client/cli/src/commands/import_blocks_cmd.rs +++ b/client/cli/src/commands/import_blocks_cmd.rs @@ -16,19 +16,22 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::params::ImportParams; -use crate::params::SharedParams; -use crate::CliConfiguration; +use crate::{ + error, + params::{ImportParams, SharedParams}, + CliConfiguration, +}; +use sc_client_api::UsageProvider; use sc_service::chain_ops::import_blocks; use sp_runtime::traits::Block as BlockT; -use std::fmt::Debug; -use std::fs; -use std::io::{self, Read, Seek}; -use std::path::PathBuf; -use std::sync::Arc; +use std::{ + fmt::Debug, + fs, + io::{self, Read, Seek}, + path::PathBuf, + sync::Arc, +}; use structopt::StructOpt; -use sc_client_api::UsageProvider; /// The `import-blocks` command used to import blocks. #[derive(Debug, StructOpt)] @@ -63,11 +66,7 @@ impl ReadPlusSeek for T {} impl ImportBlocksCmd { /// Run the import-blocks command - pub async fn run( - &self, - client: Arc, - import_queue: IQ, - ) -> error::Result<()> + pub async fn run(&self, client: Arc, import_queue: IQ) -> error::Result<()> where C: UsageProvider + Send + Sync + 'static, B: BlockT + for<'de> serde::Deserialize<'de>, @@ -79,7 +78,7 @@ impl ImportBlocksCmd { let mut buffer = Vec::new(); io::stdin().read_to_end(&mut buffer)?; Box::new(io::Cursor::new(buffer)) - } + }, }; import_blocks(client, import_queue, file, false, self.binary) diff --git a/client/cli/src/commands/insert_key.rs b/client/cli/src/commands/insert_key.rs index f166db85c1564..05055dc53c1e2 100644 --- a/client/cli/src/commands/insert_key.rs +++ b/client/cli/src/commands/insert_key.rs @@ -18,22 +18,18 @@ //! Implementation of the `insert` subcommand use crate::{ - Error, KeystoreParams, CryptoSchemeFlag, SharedParams, utils, with_crypto_scheme, - SubstrateCli, + utils, with_crypto_scheme, CryptoSchemeFlag, Error, KeystoreParams, SharedParams, SubstrateCli, }; -use std::{sync::Arc, convert::TryFrom}; -use structopt::StructOpt; -use sp_core::{crypto::KeyTypeId, crypto::SecretString}; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; use sc_keystore::LocalKeystore; -use sc_service::config::{KeystoreConfig, BasePath}; +use sc_service::config::{BasePath, KeystoreConfig}; +use sp_core::crypto::{KeyTypeId, SecretString}; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use std::{convert::TryFrom, sync::Arc}; +use structopt::StructOpt; /// The `insert` command #[derive(Debug, StructOpt, Clone)] -#[structopt( - name = "insert", - about = "Insert a key to the keystore of a node." -)] +#[structopt(name = "insert", about = "Insert a key to the keystore of a node.")] pub struct InsertKeyCmd { /// The secret key URI. /// If the value is a file, the file content is used as URI. @@ -62,7 +58,8 @@ impl InsertKeyCmd { /// Run the command pub fn run(&self, cli: &C) -> Result<(), Error> { let suri = utils::read_uri(self.suri.as_ref())?; - let base_path = self.shared_params + let base_path = self + .shared_params .base_path() .unwrap_or_else(|| BasePath::from_project("", "", &C::executable_name())); let chain_id = self.shared_params.chain_id(self.shared_params.is_dev()); @@ -78,10 +75,11 @@ impl InsertKeyCmd { let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(path, password)?); (keystore, public) }, - _ => unreachable!("keystore_config always returns path and password; qed") + _ => unreachable!("keystore_config always returns path and password; qed"), }; - let key_type = KeyTypeId::try_from(self.key_type.as_str()).map_err(|_| Error::KeyTypeInvalid)?; + let key_type = + KeyTypeId::try_from(self.key_type.as_str()).map_err(|_| Error::KeyTypeInvalid)?; SyncCryptoStore::insert_unknown(&*keystore, key_type, &suri, &public[..]) .map_err(|_| Error::KeyStoreOperation)?; @@ -98,10 +96,10 @@ fn to_vec(uri: &str, pass: Option) -> Result std::result::Result, String> { - Ok( - Box::new( - GenericChainSpec::from_genesis( - "test", - "test_id", - ChainType::Development, - || unimplemented!("Not required in tests"), - Vec::new(), - None, - None, - None, - NoExtension::None, - ), - ), - ) + Ok(Box::new(GenericChainSpec::from_genesis( + "test", + "test_id", + ChainType::Development, + || unimplemented!("Not required in tests"), + Vec::new(), + None, + None, + None, + NoExtension::None, + ))) } } @@ -159,15 +153,20 @@ mod tests { let path_str = format!("{}", path.path().display()); let (key, uri, _) = Pair::generate_with_phrase(None); - let inspect = InsertKeyCmd::from_iter( - &["insert-key", "-d", &path_str, "--key-type", "test", "--suri", &uri], - ); + let inspect = InsertKeyCmd::from_iter(&[ + "insert-key", + "-d", + &path_str, + "--key-type", + "test", + "--suri", + &uri, + ]); assert!(inspect.run(&Cli).is_ok()); - let keystore = LocalKeystore::open( - path.path().join("chains").join("test_id").join("keystore"), - None, - ).unwrap(); + let keystore = + LocalKeystore::open(path.path().join("chains").join("test_id").join("keystore"), None) + .unwrap(); assert!(keystore.has_keys(&[(key.public().to_raw_vec(), KeyTypeId(*b"test"))])); } } diff --git a/client/cli/src/commands/inspect_key.rs b/client/cli/src/commands/inspect_key.rs index a60b6cd93a760..277c9015f4daf 100644 --- a/client/cli/src/commands/inspect_key.rs +++ b/client/cli/src/commands/inspect_key.rs @@ -18,8 +18,8 @@ //! Implementation of the `inspect` subcommand use crate::{ - utils::{self, print_from_uri, print_from_public}, KeystoreParams, - with_crypto_scheme, NetworkSchemeFlag, OutputTypeFlag, CryptoSchemeFlag, Error, + utils::{self, print_from_public, print_from_uri}, + with_crypto_scheme, CryptoSchemeFlag, Error, KeystoreParams, NetworkSchemeFlag, OutputTypeFlag, }; use structopt::StructOpt; /// The `inspect` command @@ -103,8 +103,7 @@ mod tests { "remember fiber forum demise paper uniform squirrel feel access exclude casual effort"; let seed = "0xad1fb77243b536b90cfe5f0d351ab1b1ac40e3890b41dc64f766ee56340cfca5"; - let inspect = - InspectKeyCmd::from_iter(&["inspect-key", words, "--password", "12345"]); + let inspect = InspectKeyCmd::from_iter(&["inspect-key", words, "--password", "12345"]); assert!(inspect.run().is_ok()); let inspect = InspectKeyCmd::from_iter(&["inspect-key", seed]); diff --git a/client/cli/src/commands/inspect_node_key.rs b/client/cli/src/commands/inspect_node_key.rs index 4db32aefb5fbb..92a71f8975052 100644 --- a/client/cli/src/commands/inspect_node_key.rs +++ b/client/cli/src/commands/inspect_node_key.rs @@ -18,9 +18,8 @@ //! Implementation of the `inspect-node-key` subcommand use crate::{Error, NetworkSchemeFlag}; -use std::fs; -use libp2p::identity::{PublicKey, ed25519}; -use std::path::PathBuf; +use libp2p::identity::{ed25519, PublicKey}; +use std::{fs, path::PathBuf}; use structopt::StructOpt; /// The `inspect-node-key` command @@ -42,10 +41,10 @@ pub struct InspectNodeKeyCmd { impl InspectNodeKeyCmd { /// runs the command pub fn run(&self) -> Result<(), Error> { - let mut file_content = hex::decode(fs::read(&self.file)?) - .map_err(|_| "failed to decode secret as hex")?; - let secret = ed25519::SecretKey::from_bytes(&mut file_content) - .map_err(|_| "Bad node key file")?; + let mut file_content = + hex::decode(fs::read(&self.file)?).map_err(|_| "failed to decode secret as hex")?; + let secret = + ed25519::SecretKey::from_bytes(&mut file_content).map_err(|_| "Bad node key file")?; let keypair = ed25519::Keypair::from(secret); let peer_id = PublicKey::Ed25519(keypair.public()).into_peer_id(); @@ -58,8 +57,7 @@ impl InspectNodeKeyCmd { #[cfg(test)] mod tests { - use super::*; - use super::super::GenerateNodeKeyCmd; + use super::{super::GenerateNodeKeyCmd, *}; #[test] fn inspect_node_key() { diff --git a/client/cli/src/commands/key.rs b/client/cli/src/commands/key.rs index 34602657da945..8e1103a8ca512 100644 --- a/client/cli/src/commands/key.rs +++ b/client/cli/src/commands/key.rs @@ -21,11 +21,8 @@ use crate::{Error, SubstrateCli}; use structopt::StructOpt; use super::{ - insert_key::InsertKeyCmd, - inspect_key::InspectKeyCmd, - generate::GenerateCmd, - inspect_node_key::InspectNodeKeyCmd, - generate_node_key::GenerateNodeKeyCmd, + generate::GenerateCmd, generate_node_key::GenerateNodeKeyCmd, insert_key::InsertKeyCmd, + inspect_key::InspectKeyCmd, inspect_node_key::InspectNodeKeyCmd, }; /// Key utilities for the cli. diff --git a/client/cli/src/commands/mod.rs b/client/cli/src/commands/mod.rs index 8c0d6acd6a511..9e7c5689b49c8 100644 --- a/client/cli/src/commands/mod.rs +++ b/client/cli/src/commands/mod.rs @@ -19,37 +19,26 @@ mod build_spec_cmd; mod check_block_cmd; mod export_blocks_cmd; mod export_state_cmd; -mod import_blocks_cmd; -mod purge_chain_cmd; -mod sign; -mod verify; -mod vanity; -mod revert_cmd; -mod run_cmd; -mod generate_node_key; mod generate; +mod generate_node_key; +mod import_blocks_cmd; mod insert_key; -mod inspect_node_key; mod inspect_key; +mod inspect_node_key; mod key; +mod purge_chain_cmd; +mod revert_cmd; +mod run_cmd; +mod sign; pub mod utils; +mod vanity; +mod verify; pub use self::{ - build_spec_cmd::BuildSpecCmd, - check_block_cmd::CheckBlockCmd, - export_blocks_cmd::ExportBlocksCmd, - export_state_cmd::ExportStateCmd, - import_blocks_cmd::ImportBlocksCmd, - purge_chain_cmd::PurgeChainCmd, - sign::SignCmd, - generate::GenerateCmd, - insert_key::InsertKeyCmd, - inspect_key::InspectKeyCmd, - generate_node_key::GenerateNodeKeyCmd, - inspect_node_key::InspectNodeKeyCmd, - key::KeySubcommand, - vanity::VanityCmd, - verify::VerifyCmd, - revert_cmd::RevertCmd, - run_cmd::RunCmd, + build_spec_cmd::BuildSpecCmd, check_block_cmd::CheckBlockCmd, + export_blocks_cmd::ExportBlocksCmd, export_state_cmd::ExportStateCmd, generate::GenerateCmd, + generate_node_key::GenerateNodeKeyCmd, import_blocks_cmd::ImportBlocksCmd, + insert_key::InsertKeyCmd, inspect_key::InspectKeyCmd, inspect_node_key::InspectNodeKeyCmd, + key::KeySubcommand, purge_chain_cmd::PurgeChainCmd, revert_cmd::RevertCmd, run_cmd::RunCmd, + sign::SignCmd, vanity::VanityCmd, verify::VerifyCmd, }; diff --git a/client/cli/src/commands/purge_chain_cmd.rs b/client/cli/src/commands/purge_chain_cmd.rs index c61e21a6a5ad0..590046aa779bc 100644 --- a/client/cli/src/commands/purge_chain_cmd.rs +++ b/client/cli/src/commands/purge_chain_cmd.rs @@ -16,13 +16,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::params::{DatabaseParams, SharedParams}; -use crate::CliConfiguration; +use crate::{ + error, + params::{DatabaseParams, SharedParams}, + CliConfiguration, +}; use sc_service::DatabaseConfig; -use std::fmt::Debug; -use std::fs; -use std::io::{self, Write}; +use std::{ + fmt::Debug, + fs, + io::{self, Write}, +}; use structopt::StructOpt; /// The `purge-chain` command used to remove the whole chain. @@ -44,10 +48,9 @@ pub struct PurgeChainCmd { impl PurgeChainCmd { /// Run the purge command pub fn run(&self, database_config: DatabaseConfig) -> error::Result<()> { - let db_path = database_config.path() - .ok_or_else(|| - error::Error::Input("Cannot purge custom database implementation".into()) - )?; + let db_path = database_config.path().ok_or_else(|| { + error::Error::Input("Cannot purge custom database implementation".into()) + })?; if !self.yes { print!("Are you sure to remove {:?}? [y/N]: ", &db_path); @@ -61,7 +64,7 @@ impl PurgeChainCmd { Some('y') | Some('Y') => {}, _ => { println!("Aborted"); - return Ok(()); + return Ok(()) }, } } diff --git a/client/cli/src/commands/revert_cmd.rs b/client/cli/src/commands/revert_cmd.rs index 2745ce2c65241..9ad49a03aa5fd 100644 --- a/client/cli/src/commands/revert_cmd.rs +++ b/client/cli/src/commands/revert_cmd.rs @@ -16,16 +16,16 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::params::{GenericNumber, PruningParams, SharedParams}; -use crate::CliConfiguration; +use crate::{ + error, + params::{GenericNumber, PruningParams, SharedParams}, + CliConfiguration, +}; +use sc_client_api::{Backend, UsageProvider}; use sc_service::chain_ops::revert_chain; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; -use std::fmt::Debug; -use std::str::FromStr; -use std::sync::Arc; +use std::{fmt::Debug, str::FromStr, sync::Arc}; use structopt::StructOpt; -use sc_client_api::{Backend, UsageProvider}; /// The `revert` command used revert the chain to a previous state. #[derive(Debug, StructOpt)] @@ -45,11 +45,7 @@ pub struct RevertCmd { impl RevertCmd { /// Run the revert command - pub async fn run( - &self, - client: Arc, - backend: Arc, - ) -> error::Result<()> + pub async fn run(&self, client: Arc, backend: Arc) -> error::Result<()> where B: BlockT, BA: Backend, diff --git a/client/cli/src/commands/run_cmd.rs b/client/cli/src/commands/run_cmd.rs index 285ffc9fdca16..2b5a3632543bd 100644 --- a/client/cli/src/commands/run_cmd.rs +++ b/client/cli/src/commands/run_cmd.rs @@ -16,15 +16,15 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::arg_enums::RpcMethods; -use crate::error::{Error, Result}; -use crate::params::ImportParams; -use crate::params::KeystoreParams; -use crate::params::NetworkParams; -use crate::params::OffchainWorkerParams; -use crate::params::SharedParams; -use crate::params::TransactionPoolParams; -use crate::CliConfiguration; +use crate::{ + arg_enums::RpcMethods, + error::{Error, Result}, + params::{ + ImportParams, KeystoreParams, NetworkParams, OffchainWorkerParams, SharedParams, + TransactionPoolParams, + }, + CliConfiguration, +}; use regex::Regex; use sc_service::{ config::{BasePath, PrometheusConfig, TransactionPoolOptions}, @@ -308,7 +308,7 @@ impl CliConfiguration for RunCmd { Error::Input(format!( "Invalid node name '{}'. Reason: {}. If unsure, use none.", name, msg - )) + )) })?; Ok(name) @@ -363,18 +363,13 @@ impl CliConfiguration for RunCmd { Ok(if self.no_prometheus { None } else { - let interface = if self.prometheus_external { - Ipv4Addr::UNSPECIFIED - } else { - Ipv4Addr::LOCALHOST - }; - - Some(PrometheusConfig::new_with_default_registry( - SocketAddr::new( - interface.into(), - self.prometheus_port.unwrap_or(default_listen_port), - ) - )) + let interface = + if self.prometheus_external { Ipv4Addr::UNSPECIFIED } else { Ipv4Addr::LOCALHOST }; + + Some(PrometheusConfig::new_with_default_registry(SocketAddr::new( + interface.into(), + self.prometheus_port.unwrap_or(default_listen_port), + ))) }) } @@ -416,7 +411,7 @@ impl CliConfiguration for RunCmd { self.rpc_external, self.unsafe_rpc_external, self.rpc_methods, - self.validator + self.validator, )?; Ok(Some(SocketAddr::new(interface, self.rpc_port.unwrap_or(default_listen_port)))) @@ -466,19 +461,19 @@ impl CliConfiguration for RunCmd { pub fn is_node_name_valid(_name: &str) -> std::result::Result<(), &str> { let name = _name.to_string(); if name.chars().count() >= crate::NODE_NAME_MAX_LENGTH { - return Err("Node name too long"); + return Err("Node name too long") } let invalid_chars = r"[\\.@]"; let re = Regex::new(invalid_chars).unwrap(); if re.is_match(&name) { - return Err("Node name should not contain invalid chars such as '.' and '@'"); + return Err("Node name should not contain invalid chars such as '.' and '@'") } let invalid_patterns = r"(https?:\\/+)?(www)+"; let re = Regex::new(invalid_patterns).unwrap(); if re.is_match(&name) { - return Err("Node name should not contain urls"); + return Err("Node name should not contain urls") } Ok(()) @@ -497,7 +492,7 @@ fn rpc_interface( or `--rpc-methods=unsafe` if you understand the risks. See the options \ description for more information." .to_owned(), - )); + )) } if is_external || is_unsafe_external { @@ -537,11 +532,10 @@ fn parse_telemetry_endpoints(s: &str) -> std::result::Result<(String, u8), Telem None => Err(TelemetryParsingError::MissingVerbosity), Some(pos_) => { let url = s[..pos_].to_string(); - let verbosity = s[pos_ + 1..] - .parse() - .map_err(TelemetryParsingError::VerbosityParsingError)?; + let verbosity = + s[pos_ + 1..].parse().map_err(TelemetryParsingError::VerbosityParsingError)?; Ok((url, verbosity)) - } + }, } } @@ -574,17 +568,13 @@ fn parse_cors(s: &str) -> std::result::Result> match part { "all" | "*" => { is_all = true; - break; - } + break + }, other => origins.push(other.to_owned()), } } - Ok(if is_all { - Cors::All - } else { - Cors::List(origins) - }) + Ok(if is_all { Cors::All } else { Cors::List(origins) }) } #[cfg(test)] @@ -600,7 +590,8 @@ mod tests { fn tests_node_name_bad() { assert!(is_node_name_valid( "very very long names are really not very cool for the ui at all, really they're not" - ).is_err()); + ) + .is_err()); assert!(is_node_name_valid("Dots.not.Ok").is_err()); assert!(is_node_name_valid("http://visit.me").is_err()); assert!(is_node_name_valid("https://visit.me").is_err()); diff --git a/client/cli/src/commands/sign.rs b/client/cli/src/commands/sign.rs index 5d487861428fd..20aacd9bf0020 100644 --- a/client/cli/src/commands/sign.rs +++ b/client/cli/src/commands/sign.rs @@ -5,7 +5,7 @@ // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or +// the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, @@ -18,15 +18,12 @@ //! Implementation of the `sign` subcommand use crate::{error, utils, with_crypto_scheme, CryptoSchemeFlag, KeystoreParams}; -use structopt::StructOpt; use sp_core::crypto::SecretString; +use structopt::StructOpt; /// The `sign` command #[derive(Debug, StructOpt, Clone)] -#[structopt( - name = "sign", - about = "Sign a message, with a given (secret) key" -)] +#[structopt(name = "sign", about = "Sign a message, with a given (secret) key")] pub struct SignCmd { /// The secret key URI. /// If the value is a file, the file content is used as URI. @@ -52,7 +49,6 @@ pub struct SignCmd { pub crypto_scheme: CryptoSchemeFlag, } - impl SignCmd { /// Run the command pub fn run(&self) -> error::Result<()> { @@ -60,17 +56,19 @@ impl SignCmd { let suri = utils::read_uri(self.suri.as_ref())?; let password = self.keystore_params.read_password()?; - let signature = with_crypto_scheme!( - self.crypto_scheme.scheme, - sign(&suri, password, message) - )?; + let signature = + with_crypto_scheme!(self.crypto_scheme.scheme, sign(&suri, password, message))?; println!("{}", signature); Ok(()) } } -fn sign(suri: &str, password: Option, message: Vec) -> error::Result { +fn sign( + suri: &str, + password: Option, + message: Vec, +) -> error::Result { let pair = utils::pair_from_suri::

(suri, password)?; Ok(format!("{}", hex::encode(pair.sign(&message)))) } @@ -91,7 +89,7 @@ mod test { "--message", &seed[2..], "--password", - "12345" + "12345", ]); assert!(sign.run().is_ok()); } diff --git a/client/cli/src/commands/utils.rs b/client/cli/src/commands/utils.rs index 69372e624095e..fa783f7a95a51 100644 --- a/client/cli/src/commands/utils.rs +++ b/client/cli/src/commands/utils.rs @@ -22,9 +22,8 @@ use crate::{ OutputType, }; use serde_json::json; -use sp_core::crypto::{ExposeSecret, SecretString, Zeroize}; use sp_core::{ - crypto::{Ss58AddressFormat, Ss58Codec}, + crypto::{ExposeSecret, SecretString, Ss58AddressFormat, Ss58Codec, Zeroize}, hexdisplay::HexDisplay, Pair, }; @@ -88,7 +87,7 @@ pub fn print_from_uri( "{}", serde_json::to_string_pretty(&json).expect("Json pretty print failed") ); - } + }, OutputType::Text => { println!( "Secret phrase `{}` is account:\n \ @@ -102,12 +101,9 @@ pub fn print_from_uri( format_public_key::(public_key.clone()), public_key.to_ss58check_with_version(network_override), format_account_id::(public_key), - pair.public() - .into() - .into_account() - .to_ss58check_with_version(network_override), + pair.public().into().into_account().to_ss58check_with_version(network_override), ); - } + }, } } else if let Ok((pair, seed)) = Pair::from_string_with_seed(uri, password.clone()) { let public_key = pair.public(); @@ -127,7 +123,7 @@ pub fn print_from_uri( "{}", serde_json::to_string_pretty(&json).expect("Json pretty print failed") ); - } + }, OutputType::Text => { println!( "Secret Key URI `{}` is account:\n \ @@ -137,20 +133,13 @@ pub fn print_from_uri( Account ID: {}\n \ SS58 Address: {}", uri, - if let Some(seed) = seed { - format_seed::(seed) - } else { - "n/a".into() - }, + if let Some(seed) = seed { format_seed::(seed) } else { "n/a".into() }, format_public_key::(public_key.clone()), public_key.to_ss58check_with_version(network_override), format_account_id::(public_key), - pair.public() - .into() - .into_account() - .to_ss58check_with_version(network_override), + pair.public().into().into_account().to_ss58check_with_version(network_override), ); - } + }, } } else if let Ok((public_key, network)) = Pair::Public::from_string_with_version(uri) { let network_override = network_override.unwrap_or(network); @@ -170,7 +159,7 @@ pub fn print_from_uri( "{}", serde_json::to_string_pretty(&json).expect("Json pretty print failed") ); - } + }, OutputType::Text => { println!( "Public Key URI `{}` is account:\n \ @@ -186,7 +175,7 @@ pub fn print_from_uri( format_account_id::(public_key.clone()), public_key.to_ss58check_with_version(network_override), ); - } + }, } } else { println!("Invalid phrase/URI given"); @@ -220,11 +209,8 @@ where "ss58Address": public_key.to_ss58check_with_version(network_override), }); - println!( - "{}", - serde_json::to_string_pretty(&json).expect("Json pretty print failed") - ); - } + println!("{}", serde_json::to_string_pretty(&json).expect("Json pretty print failed")); + }, OutputType::Text => { println!( "Network ID/version: {}\n \ @@ -238,7 +224,7 @@ where format_account_id::(public_key.clone()), public_key.to_ss58check_with_version(network_override), ); - } + }, } Ok(()) @@ -273,10 +259,7 @@ fn format_account_id(public_key: PublicFor

) -> String where PublicFor

: Into, { - format!( - "0x{}", - HexDisplay::from(&public_key.into().into_account().as_ref()) - ) + format!("0x{}", HexDisplay::from(&public_key.into().into_account().as_ref())) } /// helper method for decoding hex @@ -294,13 +277,13 @@ pub fn read_message(msg: Option<&String>, should_decode: bool) -> Result match msg { Some(m) => { message = decode_hex(m)?; - } + }, None => { std::io::stdin().lock().read_to_end(&mut message)?; if should_decode { message = decode_hex(&message)?; } - } + }, } Ok(message) } diff --git a/client/cli/src/commands/vanity.rs b/client/cli/src/commands/vanity.rs index ce1f079db8789..daeb81e86a1a1 100644 --- a/client/cli/src/commands/vanity.rs +++ b/client/cli/src/commands/vanity.rs @@ -5,7 +5,7 @@ // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or +// the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, @@ -19,21 +19,17 @@ //! implementation of the `vanity` subcommand use crate::{ - error, utils, with_crypto_scheme, - CryptoSchemeFlag, NetworkSchemeFlag, OutputTypeFlag, + error, utils, with_crypto_scheme, CryptoSchemeFlag, NetworkSchemeFlag, OutputTypeFlag, }; -use sp_core::crypto::{Ss58Codec, Ss58AddressFormat}; -use structopt::StructOpt; use rand::{rngs::OsRng, RngCore}; +use sp_core::crypto::{Ss58AddressFormat, Ss58Codec}; use sp_runtime::traits::IdentifyAccount; +use structopt::StructOpt; use utils::print_from_uri; /// The `vanity` command #[derive(Debug, StructOpt, Clone)] -#[structopt( - name = "vanity", - about = "Generate a seed that provides a vanity address" -)] +#[structopt(name = "vanity", about = "Generate a seed that provides a vanity address")] pub struct VanityCmd { /// Desired pattern #[structopt(long, parse(try_from_str = assert_non_empty_string))] @@ -78,10 +74,10 @@ fn generate_key( desired: &str, network_override: Ss58AddressFormat, ) -> Result - where - Pair: sp_core::Pair, - Pair::Public: IdentifyAccount, - ::AccountId: Ss58Codec, +where + Pair: sp_core::Pair, + Pair::Public: IdentifyAccount, + ::AccountId: Ss58Codec, { println!("Generating key containing pattern '{}'", desired); @@ -104,7 +100,7 @@ fn generate_key( best = score; if best >= top { println!("best: {} == top: {}", best, top); - return Ok(utils::format_seed::(seed.clone())); + return Ok(utils::format_seed::(seed.clone())) } } done += 1; @@ -129,11 +125,11 @@ fn next_seed(seed: &mut [u8]) { match seed[i] { 255 => { seed[i] = 0; - } + }, _ => { seed[i] += 1; - break; - } + break + }, } } } @@ -145,7 +141,7 @@ fn calculate_score(_desired: &str, key: &str) -> usize { let snip_size = _desired.len() - truncate; let truncated = &_desired[0..snip_size]; if let Some(pos) = key.find(truncated) { - return (47 - pos) + (snip_size * 48); + return (47 - pos) + (snip_size * 48) } } 0 @@ -160,15 +156,13 @@ fn assert_non_empty_string(pattern: &str) -> Result { } } - #[cfg(test)] mod tests { use super::*; - use sp_core::{crypto::Ss58Codec, Pair}; - use sp_core::sr25519; + use sp_core::{crypto::Ss58Codec, sr25519, Pair}; + use structopt::StructOpt; #[cfg(feature = "bench")] use test::Bencher; - use structopt::StructOpt; #[test] fn vanity() { @@ -179,25 +173,21 @@ mod tests { #[test] fn test_generation_with_single_char() { let seed = generate_key::("ab", Default::default()).unwrap(); - assert!( - sr25519::Pair::from_seed_slice(&hex::decode(&seed[2..]).unwrap()) - .unwrap() - .public() - .to_ss58check() - .contains("ab") - ); + assert!(sr25519::Pair::from_seed_slice(&hex::decode(&seed[2..]).unwrap()) + .unwrap() + .public() + .to_ss58check() + .contains("ab")); } #[test] fn generate_key_respects_network_override() { let seed = generate_key::("ab", Ss58AddressFormat::PolkadotAccount).unwrap(); - assert!( - sr25519::Pair::from_seed_slice(&hex::decode(&seed[2..]).unwrap()) - .unwrap() - .public() - .to_ss58check_with_version(Ss58AddressFormat::PolkadotAccount) - .contains("ab") - ); + assert!(sr25519::Pair::from_seed_slice(&hex::decode(&seed[2..]).unwrap()) + .unwrap() + .public() + .to_ss58check_with_version(Ss58AddressFormat::PolkadotAccount) + .contains("ab")); } #[test] @@ -208,10 +198,7 @@ mod tests { #[test] fn test_score_100() { - let score = calculate_score( - "Polkadot", - "5PolkadotwHY5k9GpdTgpqs9xjuNvtv8EcwCFpEeyEf3KHim", - ); + let score = calculate_score("Polkadot", "5PolkadotwHY5k9GpdTgpqs9xjuNvtv8EcwCFpEeyEf3KHim"); assert_eq!(score, 430); } @@ -219,10 +206,7 @@ mod tests { fn test_score_50_2() { // 50% for the position + 50% for the size assert_eq!( - calculate_score( - "Polkadot", - "5PolkXXXXwHY5k9GpdTgpqs9xjuNvtv8EcwCFpEeyEf3KHim" - ), + calculate_score("Polkadot", "5PolkXXXXwHY5k9GpdTgpqs9xjuNvtv8EcwCFpEeyEf3KHim"), 238 ); } @@ -230,10 +214,7 @@ mod tests { #[test] fn test_score_0() { assert_eq!( - calculate_score( - "Polkadot", - "5GUWv4bLCchGUHJrzULXnh4JgXsMpTKRnjuXTY7Qo1Kh9uYK" - ), + calculate_score("Polkadot", "5GUWv4bLCchGUHJrzULXnh4JgXsMpTKRnjuXTY7Qo1Kh9uYK"), 0 ); } diff --git a/client/cli/src/commands/verify.rs b/client/cli/src/commands/verify.rs index c6ce3ef9d69c8..760793374242e 100644 --- a/client/cli/src/commands/verify.rs +++ b/client/cli/src/commands/verify.rs @@ -5,7 +5,7 @@ // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or +// the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, @@ -19,7 +19,7 @@ //! implementation of the `verify` subcommand use crate::{error, utils, with_crypto_scheme, CryptoSchemeFlag}; -use sp_core::{Public, crypto::Ss58Codec}; +use sp_core::{crypto::Ss58Codec, Public}; use structopt::StructOpt; /// The `verify` command @@ -57,32 +57,23 @@ impl VerifyCmd { let message = utils::read_message(self.message.as_ref(), self.hex)?; let sig_data = utils::decode_hex(&self.sig)?; let uri = utils::read_uri(self.uri.as_ref())?; - let uri = if uri.starts_with("0x") { - &uri[2..] - } else { - &uri - }; - - with_crypto_scheme!( - self.crypto_scheme.scheme, - verify(sig_data, message, uri) - ) + let uri = if uri.starts_with("0x") { &uri[2..] } else { &uri }; + + with_crypto_scheme!(self.crypto_scheme.scheme, verify(sig_data, message, uri)) } } fn verify(sig_data: Vec, message: Vec, uri: &str) -> error::Result<()> - where - Pair: sp_core::Pair, - Pair::Signature: Default + AsMut<[u8]>, +where + Pair: sp_core::Pair, + Pair::Signature: Default + AsMut<[u8]>, { let mut signature = Pair::Signature::default(); if sig_data.len() != signature.as_ref().len() { - return Err( - error::Error::SignatureInvalidLength { - read: sig_data.len(), - expected: signature.as_ref().len(), - } - ); + return Err(error::Error::SignatureInvalidLength { + read: sig_data.len(), + expected: signature.as_ref().len(), + }) } signature.as_mut().copy_from_slice(&sig_data); diff --git a/client/cli/src/config.rs b/client/cli/src/config.rs index 8e435da253c04..d586156410507 100644 --- a/client/cli/src/config.rs +++ b/client/cli/src/config.rs @@ -18,24 +18,24 @@ //! Configuration trait for a CLI based on substrate -use crate::arg_enums::Database; -use crate::error::Result; use crate::{ - DatabaseParams, ImportParams, KeystoreParams, NetworkParams, NodeKeyParams, - OffchainWorkerParams, PruningParams, SharedParams, SubstrateCli, + arg_enums::Database, error::Result, DatabaseParams, ImportParams, KeystoreParams, + NetworkParams, NodeKeyParams, OffchainWorkerParams, PruningParams, SharedParams, SubstrateCli, }; use log::warn; use names::{Generator, Name}; use sc_client_api::execution_extensions::ExecutionStrategies; -use sc_service::config::{ - BasePath, Configuration, DatabaseConfig, ExtTransport, KeystoreConfig, NetworkConfiguration, - NodeKeyConfig, OffchainWorkerConfig, PrometheusConfig, PruningMode, Role, RpcMethods, - TaskExecutor, TelemetryEndpoints, TransactionPoolOptions, WasmExecutionMethod, +use sc_service::{ + config::{ + BasePath, Configuration, DatabaseConfig, ExtTransport, KeystoreConfig, + NetworkConfiguration, NodeKeyConfig, OffchainWorkerConfig, PrometheusConfig, PruningMode, + Role, RpcMethods, TaskExecutor, TelemetryEndpoints, TransactionPoolOptions, + WasmExecutionMethod, + }, + ChainSpec, KeepBlocks, TracingReceiver, TransactionStorageMode, }; -use sc_service::{ChainSpec, TracingReceiver, KeepBlocks, TransactionStorageMode}; use sc_tracing::logging::LoggerBuilder; -use std::net::SocketAddr; -use std::path::PathBuf; +use std::{net::SocketAddr, path::PathBuf}; /// The maximum number of characters for a node name. pub(crate) const NODE_NAME_MAX_LENGTH: usize = 64; @@ -178,12 +178,7 @@ pub trait CliConfiguration: Sized { default_listen_port, ) } else { - NetworkConfiguration::new( - node_name, - client_id, - node_key, - Some(net_config_dir), - ) + NetworkConfiguration::new(node_name, client_id, node_key, Some(net_config_dir)) }) } @@ -201,14 +196,13 @@ pub trait CliConfiguration: Sized { /// /// By default this is retrieved from `DatabaseParams` if it is available. Otherwise its `None`. fn database_cache_size(&self) -> Result> { - Ok(self.database_params() - .map(|x| x.database_cache_size()) - .unwrap_or_default()) + Ok(self.database_params().map(|x| x.database_cache_size()).unwrap_or_default()) } /// Get the database transaction storage scheme. fn database_transaction_storage(&self) -> Result { - Ok(self.database_params() + Ok(self + .database_params() .map(|x| x.transaction_storage()) .unwrap_or(TransactionStorageMode::BlockBody)) } @@ -228,13 +222,8 @@ pub trait CliConfiguration: Sized { database: Database, ) -> Result { Ok(match database { - Database::RocksDb => DatabaseConfig::RocksDb { - path: base_path.join("db"), - cache_size, - }, - Database::ParityDb => DatabaseConfig::ParityDb { - path: base_path.join("paritydb"), - }, + Database::RocksDb => DatabaseConfig::RocksDb { path: base_path.join("db"), cache_size }, + Database::ParityDb => DatabaseConfig::ParityDb { path: base_path.join("paritydb") }, }) } @@ -242,9 +231,7 @@ pub trait CliConfiguration: Sized { /// /// By default this is retrieved from `ImportParams` if it is available. Otherwise its `0`. fn state_cache_size(&self) -> Result { - Ok(self.import_params() - .map(|x| x.state_cache_size()) - .unwrap_or_default()) + Ok(self.import_params().map(|x| x.state_cache_size()).unwrap_or_default()) } /// Get the state cache child ratio (if any). @@ -293,18 +280,14 @@ pub trait CliConfiguration: Sized { /// By default this is retrieved from `ImportParams` if it is available. Otherwise its /// `WasmExecutionMethod::default()`. fn wasm_method(&self) -> Result { - Ok(self.import_params() - .map(|x| x.wasm_method()) - .unwrap_or_default()) + Ok(self.import_params().map(|x| x.wasm_method()).unwrap_or_default()) } /// Get the path where WASM overrides live. /// /// By default this is `None`. fn wasm_runtime_overrides(&self) -> Option { - self.import_params() - .map(|x| x.wasm_runtime_overrides()) - .unwrap_or_default() + self.import_params().map(|x| x.wasm_runtime_overrides()).unwrap_or_default() } /// Get the execution strategies. @@ -502,10 +485,7 @@ pub trait CliConfiguration: Sized { let (keystore_remote, keystore) = self.keystore_config(&config_dir)?; let telemetry_endpoints = self.telemetry_endpoints(&chain_spec)?; - let unsafe_pruning = self - .import_params() - .map(|p| p.unsafe_pruning) - .unwrap_or(false); + let unsafe_pruning = self.import_params().map(|p| p.unsafe_pruning).unwrap_or(false); Ok(Configuration { impl_name: C::impl_name(), @@ -628,7 +608,7 @@ pub fn generate_node_name() -> String { let count = node_name.chars().count(); if count < NODE_NAME_MAX_LENGTH { - return node_name; + return node_name } } } diff --git a/client/cli/src/lib.rs b/client/cli/src/lib.rs index e170d1a196ffc..0d5051bc113e2 100644 --- a/client/cli/src/lib.rs +++ b/client/cli/src/lib.rs @@ -159,7 +159,7 @@ pub trait SubstrateCli: Sized { let _ = std::io::stdout().write_all(e.message.as_bytes()); std::process::exit(0); } - } + }, }; ::from_clap(&matches) diff --git a/client/cli/src/params/database_params.rs b/client/cli/src/params/database_params.rs index d468f15555562..4d6cf5f1d3674 100644 --- a/client/cli/src/params/database_params.rs +++ b/client/cli/src/params/database_params.rs @@ -17,8 +17,8 @@ // along with this program. If not, see . use crate::arg_enums::Database; -use structopt::StructOpt; use sc_service::TransactionStorageMode; +use structopt::StructOpt; /// Parameters for block import. #[derive(Debug, StructOpt, Clone)] diff --git a/client/cli/src/params/import_params.rs b/client/cli/src/params/import_params.rs index a62ec98a97029..9248e210eb662 100644 --- a/client/cli/src/params/import_params.rs +++ b/client/cli/src/params/import_params.rs @@ -16,16 +16,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::arg_enums::{ - ExecutionStrategy, WasmExecutionMethod, DEFAULT_EXECUTION_BLOCK_CONSTRUCTION, - DEFAULT_EXECUTION_IMPORT_BLOCK, DEFAULT_EXECUTION_IMPORT_BLOCK_VALIDATOR, - DEFAULT_EXECUTION_OFFCHAIN_WORKER, DEFAULT_EXECUTION_OTHER, DEFAULT_EXECUTION_SYNCING, +use crate::{ + arg_enums::{ + ExecutionStrategy, WasmExecutionMethod, DEFAULT_EXECUTION_BLOCK_CONSTRUCTION, + DEFAULT_EXECUTION_IMPORT_BLOCK, DEFAULT_EXECUTION_IMPORT_BLOCK_VALIDATOR, + DEFAULT_EXECUTION_OFFCHAIN_WORKER, DEFAULT_EXECUTION_OTHER, DEFAULT_EXECUTION_SYNCING, + }, + params::{DatabaseParams, PruningParams}, }; -use crate::params::DatabaseParams; -use crate::params::PruningParams; use sc_client_api::execution_extensions::ExecutionStrategies; -use structopt::StructOpt; use std::path::PathBuf; +use structopt::StructOpt; #[cfg(feature = "wasmtime")] const WASM_METHOD_DEFAULT: &str = "Compiled"; @@ -73,11 +74,7 @@ pub struct ImportParams { pub execution_strategies: ExecutionStrategiesParams, /// Specify the state cache size. - #[structopt( - long = "state-cache-size", - value_name = "Bytes", - default_value = "67108864" - )] + #[structopt(long = "state-cache-size", value_name = "Bytes", default_value = "67108864")] pub state_cache_size: usize, } @@ -102,11 +99,7 @@ impl ImportParams { pub fn execution_strategies(&self, is_dev: bool, is_validator: bool) -> ExecutionStrategies { let exec = &self.execution_strategies; let exec_all_or = |strat: Option, default: ExecutionStrategy| { - let default = if is_dev { - ExecutionStrategy::Native - } else { - default - }; + let default = if is_dev { ExecutionStrategy::Native } else { default }; exec.execution.unwrap_or_else(|| strat.unwrap_or(default)).into() }; @@ -120,10 +113,14 @@ impl ImportParams { ExecutionStrategies { syncing: exec_all_or(exec.execution_syncing, DEFAULT_EXECUTION_SYNCING), importing: exec_all_or(exec.execution_import_block, default_execution_import_block), - block_construction: - exec_all_or(exec.execution_block_construction, DEFAULT_EXECUTION_BLOCK_CONSTRUCTION), - offchain_worker: - exec_all_or(exec.execution_offchain_worker, DEFAULT_EXECUTION_OFFCHAIN_WORKER), + block_construction: exec_all_or( + exec.execution_block_construction, + DEFAULT_EXECUTION_BLOCK_CONSTRUCTION, + ), + offchain_worker: exec_all_or( + exec.execution_offchain_worker, + DEFAULT_EXECUTION_OFFCHAIN_WORKER, + ), other: exec_all_or(exec.execution_other, DEFAULT_EXECUTION_OTHER), } } diff --git a/client/cli/src/params/keystore_params.rs b/client/cli/src/params/keystore_params.rs index 2975c9bf5041f..4eb5e5dc6c2d2 100644 --- a/client/cli/src/params/keystore_params.rs +++ b/client/cli/src/params/keystore_params.rs @@ -16,12 +16,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error::Result; +use crate::{error, error::Result}; use sc_service::config::KeystoreConfig; -use std::{fs, path::{PathBuf, Path}}; -use structopt::StructOpt; -use crate::error; use sp_core::crypto::SecretString; +use std::{ + fs, + path::{Path, PathBuf}, +}; +use structopt::StructOpt; /// default sub directory for the key store const DEFAULT_KEYSTORE_CONFIG_PATH: &'static str = "keystore"; @@ -81,8 +83,7 @@ impl KeystoreParams { #[cfg(target_os = "unknown")] None } else if let Some(ref file) = self.password_filename { - let password = fs::read_to_string(file) - .map_err(|e| format!("{}", e))?; + let password = fs::read_to_string(file).map_err(|e| format!("{}", e))?; Some(SecretString::new(password)) } else { self.password.clone() diff --git a/client/cli/src/params/mod.rs b/client/cli/src/params/mod.rs index 0769e5a87adcb..431e1750b2b8a 100644 --- a/client/cli/src/params/mod.rs +++ b/client/cli/src/params/mod.rs @@ -25,21 +25,20 @@ mod pruning_params; mod shared_params; mod transaction_pool_params; -use std::{fmt::Debug, str::FromStr, convert::TryFrom}; -use sp_runtime::{generic::BlockId, traits::{Block as BlockT, NumberFor}}; +use crate::arg_enums::{CryptoScheme, OutputType}; use sp_core::crypto::Ss58AddressFormat; -use crate::arg_enums::{OutputType, CryptoScheme}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, NumberFor}, +}; +use std::{convert::TryFrom, fmt::Debug, str::FromStr}; use structopt::StructOpt; -pub use crate::params::database_params::*; -pub use crate::params::import_params::*; -pub use crate::params::keystore_params::*; -pub use crate::params::network_params::*; -pub use crate::params::node_key_params::*; -pub use crate::params::offchain_worker_params::*; -pub use crate::params::pruning_params::*; -pub use crate::params::shared_params::*; -pub use crate::params::transaction_pool_params::*; +pub use crate::params::{ + database_params::*, import_params::*, keystore_params::*, network_params::*, + node_key_params::*, offchain_worker_params::*, pruning_params::*, shared_params::*, + transaction_pool_params::*, +}; /// Wrapper type of `String` that holds an unsigned integer of arbitrary size, formatted as a decimal. #[derive(Debug, Clone)] @@ -50,10 +49,7 @@ impl FromStr for GenericNumber { fn from_str(block_number: &str) -> Result { if let Some(pos) = block_number.chars().position(|d| !d.is_digit(10)) { - Err(format!( - "Expected block number, found illegal digit at position: {}", - pos, - )) + Err(format!("Expected block number, found illegal digit at position: {}", pos,)) } else { Ok(Self(block_number.to_owned())) } @@ -66,9 +62,9 @@ impl GenericNumber { /// See `https://doc.rust-lang.org/std/primitive.str.html#method.parse` for more elaborate /// documentation. pub fn parse(&self) -> Result - where - N: FromStr, - N::Err: std::fmt::Debug, + where + N: FromStr, + N::Err: std::fmt::Debug, { FromStr::from_str(&self.0).map_err(|e| format!("Failed to parse block number: {:?}", e)) } @@ -109,7 +105,7 @@ impl BlockNumberOrHash { if self.0.starts_with("0x") { Ok(BlockId::Hash( FromStr::from_str(&self.0[2..]) - .map_err(|e| format!("Failed to parse block hash: {:?}", e))? + .map_err(|e| format!("Failed to parse block hash: {:?}", e))?, )) } else { GenericNumber(self.0.clone()).parse().map(BlockId::Number) @@ -117,7 +113,6 @@ impl BlockNumberOrHash { } } - /// Optional flag for specifying crypto algorithm #[derive(Debug, StructOpt, Clone)] pub struct CryptoSchemeFlag { diff --git a/client/cli/src/params/network_params.rs b/client/cli/src/params/network_params.rs index 69f4c9d1ba74b..185a93f66b3d2 100644 --- a/client/cli/src/params/network_params.rs +++ b/client/cli/src/params/network_params.rs @@ -16,13 +16,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::params::node_key_params::NodeKeyParams; -use crate::arg_enums::SyncMode; +use crate::{arg_enums::SyncMode, params::node_key_params::NodeKeyParams}; use sc_network::{ - config::{NetworkConfiguration, NodeKeyConfig, NonReservedPeerMode, SetConfig, TransportConfig}, + config::{ + NetworkConfiguration, NodeKeyConfig, NonReservedPeerMode, SetConfig, TransportConfig, + }, multiaddr::Protocol, }; -use sc_service::{ChainSpec, ChainType, config::{Multiaddr, MultiaddrWithPeerId}}; +use sc_service::{ + config::{Multiaddr, MultiaddrWithPeerId}, + ChainSpec, ChainType, +}; use std::{borrow::Cow, path::PathBuf}; use structopt::StructOpt; @@ -97,11 +101,7 @@ pub struct NetworkParams { /// /// This allows downloading announced blocks from multiple peers. Decrease to save /// traffic and risk increased latency. - #[structopt( - long = "max-parallel-downloads", - value_name = "COUNT", - default_value = "5" - )] + #[structopt(long = "max-parallel-downloads", value_name = "COUNT", default_value = "5")] pub max_parallel_downloads: u32, #[allow(missing_docs)] @@ -184,15 +184,16 @@ impl NetworkParams { let chain_type = chain_spec.chain_type(); // Activate if the user explicitly requested local discovery, `--dev` is given or the // chain type is `Local`/`Development` - let allow_non_globals_in_dht = self.discover_local - || is_dev - || matches!(chain_type, ChainType::Local | ChainType::Development); + let allow_non_globals_in_dht = + self.discover_local || + is_dev || matches!(chain_type, ChainType::Local | ChainType::Development); let allow_private_ipv4 = match (self.allow_private_ipv4, self.no_private_ipv4) { (true, true) => unreachable!("`*_private_ipv4` flags are mutually exclusive; qed"), (true, false) => true, (false, true) => false, - (false, false) => is_dev || matches!(chain_type, ChainType::Local | ChainType::Development), + (false, false) => + is_dev || matches!(chain_type, ChainType::Local | ChainType::Development), }; NetworkConfiguration { diff --git a/client/cli/src/params/node_key_params.rs b/client/cli/src/params/node_key_params.rs index d5823341aa692..bc5606752a88f 100644 --- a/client/cli/src/params/node_key_params.rs +++ b/client/cli/src/params/node_key_params.rs @@ -16,13 +16,12 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sc_network::{config::identity::ed25519, config::NodeKeyConfig}; +use sc_network::config::{identity::ed25519, NodeKeyConfig}; use sp_core::H256; use std::{path::PathBuf, str::FromStr}; use structopt::StructOpt; -use crate::arg_enums::NodeKeyType; -use crate::error; +use crate::{arg_enums::NodeKeyType, error}; /// The file name of the node's Ed25519 secret key inside the chain-specific /// network config directory, if neither `--node-key` nor `--node-key-file` @@ -103,12 +102,12 @@ impl NodeKeyParams { sc_network::config::Secret::File( self.node_key_file .clone() - .unwrap_or_else(|| net_config_dir.join(NODE_KEY_ED25519_FILE)) + .unwrap_or_else(|| net_config_dir.join(NODE_KEY_ED25519_FILE)), ) }; NodeKeyConfig::Ed25519(secret) - } + }, }) } } @@ -120,13 +119,11 @@ fn invalid_node_key(e: impl std::fmt::Display) -> error::Error { /// Parse a Ed25519 secret key from a hex string into a `sc_network::Secret`. fn parse_ed25519_secret(hex: &str) -> error::Result { - H256::from_str(&hex) - .map_err(invalid_node_key) - .and_then(|bytes| { - ed25519::SecretKey::from_bytes(bytes) - .map(sc_network::config::Secret::Input) - .map_err(invalid_node_key) - }) + H256::from_str(&hex).map_err(invalid_node_key).and_then(|bytes| { + ed25519::SecretKey::from_bytes(bytes) + .map(sc_network::config::Secret::Input) + .map_err(invalid_node_key) + }) } #[cfg(test)] @@ -151,9 +148,7 @@ mod tests { params.node_key(net_config_dir).and_then(|c| match c { NodeKeyConfig::Ed25519(sc_network::config::Secret::Input(ref ski)) if node_key_type == NodeKeyType::Ed25519 && &sk[..] == ski.as_ref() => - { - Ok(()) - } + Ok(()), _ => Err(error::Error::Input("Unexpected node key config".into())), }) }) @@ -171,14 +166,14 @@ mod tests { node_key_file: Some(file), }; - let node_key = params.node_key(&PathBuf::from("not-used")) + let node_key = params + .node_key(&PathBuf::from("not-used")) .expect("Creates node key config") .into_keypair() .expect("Creates node key pair"); match node_key { - Keypair::Ed25519(ref pair) - if pair.secret().as_ref() == key.as_ref() => {} + Keypair::Ed25519(ref pair) if pair.secret().as_ref() == key.as_ref() => {}, _ => panic!("Invalid key"), } } @@ -202,11 +197,7 @@ mod tests { { NodeKeyType::variants().iter().try_for_each(|t| { let node_key_type = NodeKeyType::from_str(t).unwrap(); - f(NodeKeyParams { - node_key_type, - node_key: None, - node_key_file: None, - }) + f(NodeKeyParams { node_key_type, node_key: None, node_key_file: None }) }) } @@ -214,17 +205,12 @@ mod tests { with_def_params(|params| { let dir = PathBuf::from(net_config_dir.clone()); let typ = params.node_key_type; - params - .node_key(net_config_dir) - .and_then(move |c| match c { - NodeKeyConfig::Ed25519(sc_network::config::Secret::File(ref f)) - if typ == NodeKeyType::Ed25519 - && f == &dir.join(NODE_KEY_ED25519_FILE) => - { - Ok(()) - } - _ => Err(error::Error::Input("Unexpected node key config".into())), - }) + params.node_key(net_config_dir).and_then(move |c| match c { + NodeKeyConfig::Ed25519(sc_network::config::Secret::File(ref f)) + if typ == NodeKeyType::Ed25519 && f == &dir.join(NODE_KEY_ED25519_FILE) => + Ok(()), + _ => Err(error::Error::Input("Unexpected node key config".into())), + }) }) } diff --git a/client/cli/src/params/offchain_worker_params.rs b/client/cli/src/params/offchain_worker_params.rs index a6d65e4027a25..685328ef17795 100644 --- a/client/cli/src/params/offchain_worker_params.rs +++ b/client/cli/src/params/offchain_worker_params.rs @@ -27,8 +27,7 @@ use sc_network::config::Role; use sc_service::config::OffchainWorkerConfig; use structopt::StructOpt; -use crate::error; -use crate::OffchainWorkerEnabled; +use crate::{error, OffchainWorkerEnabled}; /// Offchain worker related parameters. #[derive(Debug, StructOpt, Clone)] @@ -49,10 +48,7 @@ pub struct OffchainWorkerParams { /// /// Enables a runtime to write directly to a offchain workers /// DB during block import. - #[structopt( - long = "enable-offchain-indexing", - value_name = "ENABLE_OFFCHAIN_INDEXING" - )] + #[structopt(long = "enable-offchain-indexing", value_name = "ENABLE_OFFCHAIN_INDEXING")] pub indexing_enabled: bool, } @@ -67,9 +63,6 @@ impl OffchainWorkerParams { }; let indexing_enabled = self.indexing_enabled; - Ok(OffchainWorkerConfig { - enabled, - indexing_enabled, - }) + Ok(OffchainWorkerConfig { enabled, indexing_enabled }) } } diff --git a/client/cli/src/params/pruning_params.rs b/client/cli/src/params/pruning_params.rs index 32abaa9a755b4..28c7fa301cc60 100644 --- a/client/cli/src/params/pruning_params.rs +++ b/client/cli/src/params/pruning_params.rs @@ -17,7 +17,7 @@ // along with this program. If not, see . use crate::error; -use sc_service::{PruningMode, Role, KeepBlocks}; +use sc_service::{KeepBlocks, PruningMode, Role}; use structopt::StructOpt; /// Parameters to define the pruning mode @@ -54,13 +54,13 @@ impl PruningParams { "Validators should run with state pruning disabled (i.e. archive). \ You can ignore this check with `--unsafe-pruning`." .to_string(), - )); + )) } PruningMode::keep_blocks(s.parse().map_err(|_| { error::Error::Input("Invalid pruning mode specified".to_string()) })?) - } + }, }) } diff --git a/client/cli/src/params/shared_params.rs b/client/cli/src/params/shared_params.rs index c0317c280a9d0..5ded5846e34c3 100644 --- a/client/cli/src/params/shared_params.rs +++ b/client/cli/src/params/shared_params.rs @@ -16,10 +16,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use crate::arg_enums::TracingReceiver; use sc_service::config::BasePath; use std::path::PathBuf; use structopt::StructOpt; -use crate::arg_enums::TracingReceiver; /// Shared parameters used by all `CoreParams`. #[derive(Debug, StructOpt, Clone)] @@ -88,13 +88,12 @@ impl SharedParams { pub fn chain_id(&self, is_dev: bool) -> String { match self.chain { Some(ref chain) => chain.clone(), - None => { + None => if is_dev { "dev".into() } else { "".into() - } - } + }, } } diff --git a/client/cli/src/runner.rs b/client/cli/src/runner.rs index 947cdd5a21e50..f305f8cbbeaff 100644 --- a/client/cli/src/runner.rs +++ b/client/cli/src/runner.rs @@ -16,19 +16,13 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::CliConfiguration; -use crate::Result; -use crate::SubstrateCli; +use crate::{error::Error as CliError, CliConfiguration, Result, SubstrateCli}; use chrono::prelude::*; -use futures::pin_mut; -use futures::select; -use futures::{future, future::FutureExt, Future}; +use futures::{future, future::FutureExt, pin_mut, select, Future}; use log::info; -use sc_service::{Configuration, TaskType, TaskManager}; +use sc_service::{Configuration, Error as ServiceError, TaskManager, TaskType}; use sp_utils::metrics::{TOKIO_THREADS_ALIVE, TOKIO_THREADS_TOTAL}; use std::marker::PhantomData; -use sc_service::Error as ServiceError; -use crate::error::Error as CliError; #[cfg(target_family = "unix")] async fn main(func: F) -> std::result::Result<(), E> @@ -119,27 +113,19 @@ pub struct Runner { impl Runner { /// Create a new runtime with the command provided in argument - pub fn new( - cli: &C, - command: &T, - ) -> Result> { + pub fn new(cli: &C, command: &T) -> Result> { let tokio_runtime = build_runtime()?; let runtime_handle = tokio_runtime.handle().clone(); - let task_executor = move |fut, task_type| { - match task_type { - TaskType::Async => runtime_handle.spawn(fut).map(drop), - TaskType::Blocking => - runtime_handle.spawn_blocking(move || futures::executor::block_on(fut)) - .map(drop), - } + let task_executor = move |fut, task_type| match task_type { + TaskType::Async => runtime_handle.spawn(fut).map(drop), + TaskType::Blocking => runtime_handle + .spawn_blocking(move || futures::executor::block_on(fut)) + .map(drop), }; Ok(Runner { - config: command.create_configuration( - cli, - task_executor.into(), - )?, + config: command.create_configuration(cli, task_executor.into())?, tokio_runtime, phantom: PhantomData, }) @@ -183,7 +169,7 @@ impl Runner { /// A helper function that runs a command with the configuration of this node. pub fn sync_run( self, - runner: impl FnOnce(Configuration) -> std::result::Result<(), E> + runner: impl FnOnce(Configuration) -> std::result::Result<(), E>, ) -> std::result::Result<(), E> where E: std::error::Error + Send + Sync + 'static + From, @@ -194,7 +180,8 @@ impl Runner { /// A helper function that runs a future with tokio and stops if the process receives /// the signal `SIGTERM` or `SIGINT`. pub fn async_run( - self, runner: impl FnOnce(Configuration) -> std::result::Result<(F, TaskManager), E>, + self, + runner: impl FnOnce(Configuration) -> std::result::Result<(F, TaskManager), E>, ) -> std::result::Result<(), E> where F: Future>, @@ -219,19 +206,17 @@ impl Runner { pub fn print_node_infos(config: &Configuration) { info!("{}", C::impl_name()); info!("✌️ version {}", C::impl_version()); - info!( - "❤️ by {}, {}-{}", - C::author(), - C::copyright_start_year(), - Local::today().year(), - ); + info!("❤️ by {}, {}-{}", C::author(), C::copyright_start_year(), Local::today().year(),); info!("📋 Chain specification: {}", config.chain_spec.name()); info!("🏷 Node name: {}", config.network.node_name); info!("👤 Role: {}", config.display_role()); - info!("💾 Database: {} at {}", - config.database, - config.database.path().map_or_else(|| "".to_owned(), |p| p.display().to_string()) + info!( + "💾 Database: {} at {}", + config.database, + config + .database + .path() + .map_or_else(|| "".to_owned(), |p| p.display().to_string()) ); info!("⛓ Native runtime: {}", C::native_runtime_version(&config.chain_spec)); } - diff --git a/client/consensus/aura/src/import_queue.rs b/client/consensus/aura/src/import_queue.rs index c3faa5382686e..a8036f28f1648 100644 --- a/client/consensus/aura/src/import_queue.rs +++ b/client/consensus/aura/src/import_queue.rs @@ -18,36 +18,37 @@ //! Module implementing the logic for verifying and importing AuRa blocks. -use crate::{AuthorityId, find_pre_digest, slot_author, aura_err, Error, authorities}; -use std::{ - sync::Arc, marker::PhantomData, hash::Hash, fmt::Debug, -}; +use crate::{aura_err, authorities, find_pre_digest, slot_author, AuthorityId, Error}; +use codec::{Codec, Decode, Encode}; use log::{debug, info, trace}; use prometheus_endpoint::Registry; -use codec::{Encode, Decode, Codec}; +use sc_client_api::{backend::AuxStore, BlockOf, UsageProvider}; +use sc_consensus_slots::{check_equivocation, CheckedHeader, InherentDataProviderExt}; +use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_TRACE}; +use sp_api::{ApiExt, ProvideRuntimeApi}; +use sp_block_builder::BlockBuilder as BlockBuilderApi; +use sp_blockchain::{ + well_known_cache_keys::{self, Id as CacheKeyId}, + HeaderBackend, ProvideCache, +}; use sp_consensus::{ - BlockImport, CanAuthorWith, ForkChoiceStrategy, BlockImportParams, - BlockOrigin, Error as ConsensusError, - import_queue::{ - Verifier, BasicQueue, DefaultImportQueue, BoxJustificationImport, - }, + import_queue::{BasicQueue, BoxJustificationImport, DefaultImportQueue, Verifier}, + BlockImport, BlockImportParams, BlockOrigin, CanAuthorWith, Error as ConsensusError, + ForkChoiceStrategy, }; -use sc_client_api::{BlockOf, UsageProvider, backend::AuxStore}; -use sp_blockchain::{well_known_cache_keys::{self, Id as CacheKeyId}, ProvideCache, HeaderBackend}; -use sp_block_builder::BlockBuilder as BlockBuilderApi; -use sp_runtime::{generic::{BlockId, OpaqueDigestItemId}, Justifications}; -use sp_runtime::traits::{Block as BlockT, Header, DigestItemFor}; -use sp_api::ProvideRuntimeApi; +use sp_consensus_aura::{ + digests::CompatibleDigestItem, inherents::AuraInherentData, AuraApi, ConsensusLog, + AURA_ENGINE_ID, +}; +use sp_consensus_slots::Slot; use sp_core::crypto::Pair; use sp_inherents::{CreateInherentDataProviders, InherentDataProvider as _}; -use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_TRACE, CONSENSUS_DEBUG}; -use sc_consensus_slots::{CheckedHeader, check_equivocation, InherentDataProviderExt}; -use sp_consensus_slots::Slot; -use sp_api::ApiExt; -use sp_consensus_aura::{ - digests::CompatibleDigestItem, AuraApi, inherents::AuraInherentData, - ConsensusLog, AURA_ENGINE_ID, +use sp_runtime::{ + generic::{BlockId, OpaqueDigestItemId}, + traits::{Block as BlockT, DigestItemFor, Header}, + Justifications, }; +use std::{fmt::Debug, hash::Hash, marker::PhantomData, sync::Arc}; /// check a header has been signed by the right key. If the slot is too far in the future, an error /// will be returned. If it's successful, returns the pre-header and the digest item @@ -61,7 +62,8 @@ fn check_header( hash: B::Hash, authorities: &[AuthorityId

], check_for_equivocation: CheckForEquivocation, -) -> Result)>, Error> where +) -> Result)>, Error> +where DigestItemFor: CompatibleDigestItem, P::Signature: Codec, C: sc_client_api::backend::AuxStore, @@ -69,9 +71,7 @@ fn check_header( { let seal = header.digest_mut().pop().ok_or_else(|| Error::HeaderUnsealed(hash))?; - let sig = seal.as_aura_seal().ok_or_else(|| { - aura_err(Error::HeaderBadSeal(hash)) - })?; + let sig = seal.as_aura_seal().ok_or_else(|| aura_err(Error::HeaderBadSeal(hash)))?; let slot = find_pre_digest::(&header)?; @@ -81,20 +81,17 @@ fn check_header( } else { // check the signature is valid under the expected authority and // chain state. - let expected_author = slot_author::

(slot, &authorities) - .ok_or_else(|| Error::SlotAuthorNotFound)?; + let expected_author = + slot_author::

(slot, &authorities).ok_or_else(|| Error::SlotAuthorNotFound)?; let pre_hash = header.hash(); if P::verify(&sig, pre_hash.as_ref(), expected_author) { if check_for_equivocation.check_for_equivocation() { - if let Some(equivocation_proof) = check_equivocation( - client, - slot_now, - slot, - &header, - expected_author, - ).map_err(Error::Client)? { + if let Some(equivocation_proof) = + check_equivocation(client, slot_now, slot, &header, expected_author) + .map_err(Error::Client)? + { info!( target: "aura", "Slot author is equivocating at slot {} with headers {:?} and {:?}", @@ -141,7 +138,8 @@ impl AuraVerifier { } } -impl AuraVerifier where +impl AuraVerifier +where P: Send + Sync + 'static, CAW: Send + Sync + 'static, CIDP: Send, @@ -152,8 +150,10 @@ impl AuraVerifier where block_id: BlockId, inherent_data: sp_inherents::InherentData, create_inherent_data_providers: CIDP::InherentDataProviders, - ) -> Result<(), Error> where - C: ProvideRuntimeApi, C::Api: BlockBuilderApi, + ) -> Result<(), Error> + where + C: ProvideRuntimeApi, + C::Api: BlockBuilderApi, CAW: CanAuthorWith, CIDP: CreateInherentDataProviders, { @@ -167,11 +167,11 @@ impl AuraVerifier where return Ok(()) } - let inherent_res = self.client.runtime_api().check_inherents( - &block_id, - block, - inherent_data, - ).map_err(|e| Error::Client(e.into()))?; + let inherent_res = self + .client + .runtime_api() + .check_inherents(&block_id, block, inherent_data) + .map_err(|e| Error::Client(e.into()))?; if !inherent_res.ok() { for (i, e) in inherent_res.into_errors() { @@ -187,13 +187,14 @@ impl AuraVerifier where } #[async_trait::async_trait] -impl Verifier for AuraVerifier where - C: ProvideRuntimeApi + - Send + - Sync + - sc_client_api::backend::AuxStore + - ProvideCache + - BlockOf, +impl Verifier for AuraVerifier +where + C: ProvideRuntimeApi + + Send + + Sync + + sc_client_api::backend::AuxStore + + ProvideCache + + BlockOf, C::Api: BlockBuilderApi + AuraApi> + ApiExt, DigestItemFor: CompatibleDigestItem, P: Pair + Send + Sync + 'static, @@ -215,15 +216,14 @@ impl Verifier for AuraVerifier w let authorities = authorities(self.client.as_ref(), &BlockId::Hash(parent_hash)) .map_err(|e| format!("Could not fetch authorities at {:?}: {:?}", parent_hash, e))?; - let create_inherent_data_providers = self.create_inherent_data_providers - .create_inherent_data_providers( - parent_hash, - (), - ) + let create_inherent_data_providers = self + .create_inherent_data_providers + .create_inherent_data_providers(parent_hash, ()) .await .map_err(|e| Error::::Client(sp_blockchain::Error::Application(e)))?; - let mut inherent_data = create_inherent_data_providers.create_inherent_data() + let mut inherent_data = create_inherent_data_providers + .create_inherent_data() .map_err(Error::::Inherent)?; let slot_now = create_inherent_data_providers.slot(); @@ -238,7 +238,8 @@ impl Verifier for AuraVerifier w hash, &authorities[..], self.check_for_equivocation, - ).map_err(|e| e.to_string())?; + ) + .map_err(|e| e.to_string())?; match checked_header { CheckedHeader::Checked(pre_header, (slot, seal)) => { // if the body is passed through, we need to use the runtime @@ -250,7 +251,8 @@ impl Verifier for AuraVerifier w inherent_data.aura_replace_inherent_data(slot); // skip the inherents verification if the runtime API is old. - if self.client + if self + .client .runtime_api() .has_api_with::, _>( &BlockId::Hash(parent_hash), @@ -263,7 +265,9 @@ impl Verifier for AuraVerifier w BlockId::Hash(parent_hash), inherent_data, create_inherent_data_providers, - ).await.map_err(|e| e.to_string())?; + ) + .await + .map_err(|e| e.to_string())?; } let (_, inner_body) = block.deconstruct(); @@ -279,16 +283,18 @@ impl Verifier for AuraVerifier w ); // Look for an authorities-change log. - let maybe_keys = pre_header.digest() + let maybe_keys = pre_header + .digest() .logs() .iter() - .filter_map(|l| l.try_to::>>( - OpaqueDigestItemId::Consensus(&AURA_ENGINE_ID) - )) + .filter_map(|l| { + l.try_to::>>(OpaqueDigestItemId::Consensus( + &AURA_ENGINE_ID, + )) + }) .find_map(|l| match l { - ConsensusLog::AuthoritiesChange(a) => Some( - vec![(well_known_cache_keys::AUTHORITIES, a.encode())] - ), + ConsensusLog::AuthoritiesChange(a) => + Some(vec![(well_known_cache_keys::AUTHORITIES, a.encode())]), _ => None, }); @@ -300,7 +306,7 @@ impl Verifier for AuraVerifier w import_block.post_hash = Some(hash); Ok((import_block, maybe_keys)) - } + }, CheckedHeader::Deferred(a, b) => { debug!(target: "aura", "Checking {:?} failed; {:?}, {:?}.", hash, a, b); telemetry!( @@ -312,7 +318,7 @@ impl Verifier for AuraVerifier w "b" => ?b, ); Err(format!("Header {:?} rejected: too far in the future", hash)) - } + }, } } } @@ -375,8 +381,9 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>( can_author_with, check_for_equivocation, telemetry, - }: ImportQueueParams<'a, Block, I, C, S, CAW, CIDP> -) -> Result, sp_consensus::Error> where + }: ImportQueueParams<'a, Block, I, C, S, CAW, CIDP>, +) -> Result, sp_consensus::Error> +where Block: BlockT, C::Api: BlockBuilderApi + AuraApi> + ApiExt, C: 'static @@ -388,7 +395,7 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>( + AuxStore + UsageProvider + HeaderBackend, - I: BlockImport> + I: BlockImport> + Send + Sync + 'static, @@ -401,23 +408,15 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>( CIDP: CreateInherentDataProviders + Sync + Send + 'static, CIDP::InherentDataProviders: InherentDataProviderExt + Send + Sync, { - let verifier = build_verifier::( - BuildVerifierParams { - client, - create_inherent_data_providers, - can_author_with, - check_for_equivocation, - telemetry, - }, - ); + let verifier = build_verifier::(BuildVerifierParams { + client, + create_inherent_data_providers, + can_author_with, + check_for_equivocation, + telemetry, + }); - Ok(BasicQueue::new( - verifier, - Box::new(block_import), - justification_import, - spawner, - registry, - )) + Ok(BasicQueue::new(verifier, Box::new(block_import), justification_import, spawner, registry)) } /// Parameters of [`build_verifier`]. @@ -442,7 +441,7 @@ pub fn build_verifier( can_author_with, check_for_equivocation, telemetry, - }: BuildVerifierParams + }: BuildVerifierParams, ) -> AuraVerifier { AuraVerifier::<_, P, _, _>::new( client, diff --git a/client/consensus/aura/src/lib.rs b/client/consensus/aura/src/lib.rs index 72545eda077ba..341b0ed25cc46 100644 --- a/client/consensus/aura/src/lib.rs +++ b/client/consensus/aura/src/lib.rs @@ -31,50 +31,53 @@ //! NOTE: Aura itself is designed to be generic over the crypto used. #![forbid(missing_docs, unsafe_code)] use std::{ - sync::Arc, marker::PhantomData, hash::Hash, fmt::Debug, pin::Pin, convert::{TryFrom, TryInto}, + fmt::Debug, + hash::Hash, + marker::PhantomData, + pin::Pin, + sync::Arc, }; use futures::prelude::*; use log::{debug, trace}; -use codec::{Encode, Decode, Codec}; +use codec::{Codec, Decode, Encode}; -use sp_consensus::{ - BlockImport, Environment, Proposer, CanAuthorWith, ForkChoiceStrategy, BlockImportParams, - BlockOrigin, Error as ConsensusError, SelectChain, StateAction, -}; use sc_client_api::{backend::AuxStore, BlockOf, UsageProvider}; -use sp_blockchain::{Result as CResult, ProvideCache, HeaderBackend}; -use sp_core::crypto::Public; -use sp_application_crypto::{AppKey, AppPublic}; -use sp_runtime::{generic::BlockId, traits::NumberFor}; -use sp_runtime::traits::{Block as BlockT, Header, DigestItemFor, Zero, Member}; -use sp_api::ProvideRuntimeApi; -use sp_core::crypto::Pair; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; -use sp_inherents::CreateInherentDataProviders; -use sc_telemetry::TelemetryHandle; use sc_consensus_slots::{ - SlotInfo, BackoffAuthoringBlocksStrategy, InherentDataProviderExt, StorageChanges, + BackoffAuthoringBlocksStrategy, InherentDataProviderExt, SlotInfo, StorageChanges, +}; +use sc_telemetry::TelemetryHandle; +use sp_api::ProvideRuntimeApi; +use sp_application_crypto::{AppKey, AppPublic}; +use sp_blockchain::{HeaderBackend, ProvideCache, Result as CResult}; +use sp_consensus::{ + BlockImport, BlockImportParams, BlockOrigin, CanAuthorWith, Environment, + Error as ConsensusError, ForkChoiceStrategy, Proposer, SelectChain, StateAction, }; use sp_consensus_slots::Slot; +use sp_core::crypto::{Pair, Public}; +use sp_inherents::CreateInherentDataProviders; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, DigestItemFor, Header, Member, NumberFor, Zero}, +}; mod import_queue; -pub use sp_consensus_aura::{ - ConsensusLog, AuraApi, AURA_ENGINE_ID, digests::CompatibleDigestItem, - inherents::{ - InherentType as AuraInherent, - INHERENT_IDENTIFIER, InherentDataProvider, - }, -}; -pub use sp_consensus::SyncOracle; pub use import_queue::{ - ImportQueueParams, import_queue, CheckForEquivocation, - build_verifier, BuildVerifierParams, AuraVerifier, + build_verifier, import_queue, AuraVerifier, BuildVerifierParams, CheckForEquivocation, + ImportQueueParams, }; pub use sc_consensus_slots::SlotProportion; +pub use sp_consensus::SyncOracle; +pub use sp_consensus_aura::{ + digests::CompatibleDigestItem, + inherents::{InherentDataProvider, InherentType as AuraInherent, INHERENT_IDENTIFIER}, + AuraApi, ConsensusLog, AURA_ENGINE_ID, +}; type AuthorityId

=

::Public; @@ -82,7 +85,8 @@ type AuthorityId

=

::Public; pub type SlotDuration = sc_consensus_slots::SlotDuration; /// Get type of `SlotDuration` for Aura. -pub fn slot_duration(client: &C) -> CResult where +pub fn slot_duration(client: &C) -> CResult +where A: Codec, B: BlockT, C: AuxStore + ProvideRuntimeApi + UsageProvider, @@ -93,7 +97,9 @@ pub fn slot_duration(client: &C) -> CResult where /// Get slot author for given block along with authorities. fn slot_author(slot: Slot, authorities: &[AuthorityId

]) -> Option<&AuthorityId

> { - if authorities.is_empty() { return None } + if authorities.is_empty() { + return None + } let idx = *slot % (authorities.len() as u64); assert!( @@ -101,9 +107,10 @@ fn slot_author(slot: Slot, authorities: &[AuthorityId

]) -> Option<&A "It is impossible to have a vector with length beyond the address space; qed", ); - let current_author = authorities.get(idx as usize) - .expect("authorities not empty; index constrained to list length;\ - this is a valid index; qed"); + let current_author = authorities.get(idx as usize).expect( + "authorities not empty; index constrained to list length;\ + this is a valid index; qed", + ); Some(current_author) } @@ -325,9 +332,8 @@ where type BlockImport = I; type SyncOracle = SO; type JustificationSyncLink = L; - type CreateProposer = Pin> + Send + 'static - >>; + type CreateProposer = + Pin> + Send + 'static>>; type Proposer = E::Proposer; type Claim = P::Public; type EpochData = Vec>; @@ -376,22 +382,25 @@ where slot: Slot, _claim: &Self::Claim, ) -> Vec> { - vec![ - as CompatibleDigestItem>::aura_pre_digest(slot), - ] + vec![ as CompatibleDigestItem>::aura_pre_digest(slot)] } - fn block_import_params(&self) -> Box, - StorageChanges, B>, - Self::Claim, - Self::EpochData, - ) -> Result< - sp_consensus::BlockImportParams>, - sp_consensus::Error> + Send + 'static> - { + fn block_import_params( + &self, + ) -> Box< + dyn Fn( + B::Header, + &B::Hash, + Vec, + StorageChanges, B>, + Self::Claim, + Self::EpochData, + ) -> Result< + sp_consensus::BlockImportParams>, + sp_consensus::Error, + > + Send + + 'static, + > { let keystore = self.keystore.clone(); Box::new(move |header, header_hash, body, storage_changes, public, _epoch| { // sign the pre-sealed hash of the block and then @@ -402,28 +411,28 @@ where &*keystore, as AppKey>::ID, &public_type_pair, - header_hash.as_ref() - ).map_err(|e| sp_consensus::Error::CannotSign( - public.clone(), e.to_string(), - ))? - .ok_or_else(|| sp_consensus::Error::CannotSign( - public.clone(), "Could not find key in keystore.".into(), - ))?; - let signature = signature.clone().try_into() - .map_err(|_| sp_consensus::Error::InvalidSignature( - signature, public - ))?; - - let signature_digest_item = < - DigestItemFor as CompatibleDigestItem - >::aura_seal(signature); + header_hash.as_ref(), + ) + .map_err(|e| sp_consensus::Error::CannotSign(public.clone(), e.to_string()))? + .ok_or_else(|| { + sp_consensus::Error::CannotSign( + public.clone(), + "Could not find key in keystore.".into(), + ) + })?; + let signature = signature + .clone() + .try_into() + .map_err(|_| sp_consensus::Error::InvalidSignature(signature, public))?; + + let signature_digest_item = + as CompatibleDigestItem>::aura_seal(signature); let mut import_block = BlockImportParams::new(BlockOrigin::Own, header); import_block.post_digests.push(signature_digest_item); import_block.body = Some(body); - import_block.state_action = StateAction::ApplyChanges( - sp_consensus::StorageChanges::Changes(storage_changes) - ); + import_block.state_action = + StateAction::ApplyChanges(sp_consensus::StorageChanges::Changes(storage_changes)); import_block.fork_choice = Some(ForkChoiceStrategy::LongestChain); Ok(import_block) @@ -443,7 +452,7 @@ where self.client.info().finalized_number, slot, self.logging_target(), - ); + ) } } false @@ -458,9 +467,11 @@ where } fn proposer(&mut self, block: &B::Header) -> Self::CreateProposer { - Box::pin(self.env.init(block).map_err(|e| { - sp_consensus::Error::ClientImport(format!("{:?}", e)).into() - })) + Box::pin( + self.env + .init(block) + .map_err(|e| sp_consensus::Error::ClientImport(format!("{:?}", e)).into()), + ) } fn telemetry(&self) -> Option { @@ -515,7 +526,7 @@ impl std::convert::From> for String { fn find_pre_digest(header: &B::Header) -> Result> { if header.number().is_zero() { - return Ok(0.into()); + return Ok(0.into()) } let mut pre_digest: Option = None; @@ -530,13 +541,15 @@ fn find_pre_digest(header: &B::Header) -> Result(client: &C, at: &BlockId) -> Result, ConsensusError> where +fn authorities(client: &C, at: &BlockId) -> Result, ConsensusError> +where A: Codec + Debug, B: BlockT, C: ProvideRuntimeApi + BlockOf + ProvideCache, C::Api: AuraApi, { - client.runtime_api() + client + .runtime_api() .authorities(at) .ok() .ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet.into()) @@ -545,26 +558,31 @@ fn authorities(client: &C, at: &BlockId) -> Result, Consensus #[cfg(test)] mod tests { use super::*; - use sp_consensus::{ - NoNetwork as DummyOracle, Proposal, AlwaysCanAuthor, DisableProofRecording, - import_queue::BoxJustificationImport, SlotData, - }; - use sc_network_test::{Block as TestBlock, *}; - use sp_runtime::traits::{Block as BlockT, DigestFor}; - use sc_network::config::ProtocolConfig; use parking_lot::Mutex; - use sp_keyring::sr25519::Keyring; - use sc_client_api::BlockchainEvents; - use sp_consensus_aura::sr25519::AuthorityPair; - use sc_consensus_slots::{SimpleSlotWorker, BackoffAuthoringOnFinalizedHeadLagging}; - use std::{task::Poll, time::{Instant, Duration}}; use sc_block_builder::BlockBuilderProvider; - use sp_runtime::traits::Header as _; - use substrate_test_runtime_client::{TestClient, runtime::{Header, H256}}; + use sc_client_api::BlockchainEvents; + use sc_consensus_slots::{BackoffAuthoringOnFinalizedHeadLagging, SimpleSlotWorker}; use sc_keystore::LocalKeystore; + use sc_network::config::ProtocolConfig; + use sc_network_test::{Block as TestBlock, *}; use sp_application_crypto::key_types::AURA; + use sp_consensus::{ + import_queue::BoxJustificationImport, AlwaysCanAuthor, DisableProofRecording, + NoNetwork as DummyOracle, Proposal, SlotData, + }; + use sp_consensus_aura::sr25519::AuthorityPair; use sp_inherents::InherentData; + use sp_keyring::sr25519::Keyring; + use sp_runtime::traits::{Block as BlockT, DigestFor, Header as _}; use sp_timestamp::InherentDataProvider as TimestampInherentDataProvider; + use std::{ + task::Poll, + time::{Duration, Instant}, + }; + use substrate_test_runtime_client::{ + runtime::{Header, H256}, + TestClient, + }; type Error = sp_blockchain::Error; @@ -576,19 +594,15 @@ mod tests { type CreateProposer = futures::future::Ready>; type Error = Error; - fn init(&mut self, parent_header: &::Header) - -> Self::CreateProposer - { + fn init(&mut self, parent_header: &::Header) -> Self::CreateProposer { futures::future::ready(Ok(DummyProposer(parent_header.number + 1, self.0.clone()))) } } impl Proposer for DummyProposer { type Error = Error; - type Transaction = sc_client_api::TransactionFor< - substrate_test_runtime_client::Backend, - TestBlock - >; + type Transaction = + sc_client_api::TransactionFor; type Proposal = future::Ready, Error>>; type ProofRecording = DisableProofRecording; type Proof = (); @@ -616,11 +630,13 @@ mod tests { PeersFullClient, AuthorityPair, AlwaysCanAuthor, - Box> + Box< + dyn CreateInherentDataProviders< + TestBlock, + (), + InherentDataProviders = (TimestampInherentDataProvider, InherentDataProvider), + >, + >, >; type AuraPeer = Peer<(), PeersClient>; @@ -635,14 +651,15 @@ mod tests { /// Create new test network with peers and given config. fn from_config(_config: &ProtocolConfig) -> Self { - AuraTestNet { - peers: Vec::new(), - } + AuraTestNet { peers: Vec::new() } } - fn make_verifier(&self, client: PeersClient, _cfg: &ProtocolConfig, _peer_data: &()) - -> Self::Verifier - { + fn make_verifier( + &self, + client: PeersClient, + _cfg: &ProtocolConfig, + _peer_data: &(), + ) -> Self::Verifier { match client { PeersClient::Full(client, _) => { let slot_duration = slot_duration(&*client).expect("slot duration available"); @@ -668,7 +685,10 @@ mod tests { } } - fn make_block_import(&self, client: PeersClient) -> ( + fn make_block_import( + &self, + client: PeersClient, + ) -> ( BlockImportAdapter, Option>, Self::PeerData, @@ -693,11 +713,7 @@ mod tests { sp_tracing::try_init_simple(); let net = AuraTestNet::new(3); - let peers = &[ - (0, Keyring::Alice), - (1, Keyring::Bob), - (2, Keyring::Charlie), - ]; + let peers = &[(0, Keyring::Alice), (1, Keyring::Bob), (2, Keyring::Charlie)]; let net = Arc::new(Mutex::new(net)); let mut import_notifications = Vec::new(); @@ -710,9 +726,9 @@ mod tests { let client = peer.client().as_full().expect("full clients are created").clone(); let select_chain = peer.select_chain().expect("full client has a select chain"); let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore.")); - + let keystore = Arc::new( + LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore."), + ); SyncCryptoStore::sr25519_generate_new(&*keystore, AURA, Some(&key.to_seed())) .expect("Creates authority key"); @@ -720,38 +736,46 @@ mod tests { let environ = DummyFactory(client.clone()); import_notifications.push( - client.import_notification_stream() - .take_while(|n| future::ready(!(n.origin != BlockOrigin::Own && n.header.number() < &5))) - .for_each(move |_| future::ready(())) + client + .import_notification_stream() + .take_while(|n| { + future::ready(!(n.origin != BlockOrigin::Own && n.header.number() < &5)) + }) + .for_each(move |_| future::ready(())), ); let slot_duration = slot_duration(&*client).expect("slot duration available"); - aura_futures.push(start_aura::(StartAuraParams { - slot_duration, - block_import: client.clone(), - select_chain, - client, - proposer_factory: environ, - sync_oracle: DummyOracle, - justification_sync_link: (), - create_inherent_data_providers: |_, _| async { - let timestamp = TimestampInherentDataProvider::from_system_time(); - let slot = InherentDataProvider::from_timestamp_and_duration( - *timestamp, - Duration::from_secs(6), - ); - - Ok((timestamp, slot)) - }, - force_authoring: false, - backoff_authoring_blocks: Some(BackoffAuthoringOnFinalizedHeadLagging::default()), - keystore, - can_author_with: sp_consensus::AlwaysCanAuthor, - block_proposal_slot_portion: SlotProportion::new(0.5), - max_block_proposal_slot_portion: None, - telemetry: None, - }).expect("Starts aura")); + aura_futures.push( + start_aura::(StartAuraParams { + slot_duration, + block_import: client.clone(), + select_chain, + client, + proposer_factory: environ, + sync_oracle: DummyOracle, + justification_sync_link: (), + create_inherent_data_providers: |_, _| async { + let timestamp = TimestampInherentDataProvider::from_system_time(); + let slot = InherentDataProvider::from_timestamp_and_duration( + *timestamp, + Duration::from_secs(6), + ); + + Ok((timestamp, slot)) + }, + force_authoring: false, + backoff_authoring_blocks: Some( + BackoffAuthoringOnFinalizedHeadLagging::default(), + ), + keystore, + can_author_with: sp_consensus::AlwaysCanAuthor, + block_proposal_slot_portion: SlotProportion::new(0.5), + max_block_proposal_slot_portion: None, + telemetry: None, + }) + .expect("Starts aura"), + ); } futures::executor::block_on(future::select( @@ -759,10 +783,7 @@ mod tests { net.lock().poll(cx); Poll::<()>::Pending }), - future::select( - future::join_all(aura_futures), - future::join_all(import_notifications) - ) + future::select(future::join_all(aura_futures), future::join_all(import_notifications)), )); } @@ -771,11 +792,14 @@ mod tests { let client = substrate_test_runtime_client::new(); assert_eq!(client.chain_info().best_number, 0); - assert_eq!(authorities(&client, &BlockId::Number(0)).unwrap(), vec![ - Keyring::Alice.public().into(), - Keyring::Bob.public().into(), - Keyring::Charlie.public().into() - ]); + assert_eq!( + authorities(&client, &BlockId::Number(0)).unwrap(), + vec![ + Keyring::Alice.public().into(), + Keyring::Bob.public().into(), + Keyring::Charlie.public().into() + ] + ); } #[test] @@ -785,12 +809,11 @@ mod tests { let mut authorities = vec![ Keyring::Alice.public().into(), Keyring::Bob.public().into(), - Keyring::Charlie.public().into() + Keyring::Charlie.public().into(), ]; let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore = LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore."); + let keystore = LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore."); let public = SyncCryptoStore::sr25519_generate_new(&keystore, AuthorityPair::ID, None) .expect("Key should be created"); authorities.push(public.into()); @@ -822,7 +845,7 @@ mod tests { H256::from_low_u64_be(0), H256::from_low_u64_be(0), Default::default(), - Default::default() + Default::default(), ); assert!(worker.claim_slot(&head, 0.into(), &authorities).is_none()); assert!(worker.claim_slot(&head, 1.into(), &authorities).is_none()); @@ -839,12 +862,13 @@ mod tests { let net = AuraTestNet::new(4); let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore = LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore."); + let keystore = LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore."); SyncCryptoStore::sr25519_generate_new( &keystore, - AuthorityPair::ID, Some(&Keyring::Alice.to_seed()), - ).expect("Key should be created"); + AuthorityPair::ID, + Some(&Keyring::Alice.to_seed()), + ) + .expect("Key should be created"); let net = Arc::new(Mutex::new(net)); @@ -870,17 +894,16 @@ mod tests { let head = client.header(&BlockId::Number(0)).unwrap().unwrap(); - let res = futures::executor::block_on(worker.on_slot( - SlotInfo { - slot: 0.into(), - timestamp: 0.into(), - ends_at: Instant::now() + Duration::from_secs(100), - inherent_data: InherentData::new(), - duration: Duration::from_millis(1000), - chain_head: head, - block_size_limit: None, - } - )).unwrap(); + let res = futures::executor::block_on(worker.on_slot(SlotInfo { + slot: 0.into(), + timestamp: 0.into(), + ends_at: Instant::now() + Duration::from_secs(100), + inherent_data: InherentData::new(), + duration: Duration::from_millis(1000), + chain_head: head, + block_size_limit: None, + })) + .unwrap(); // The returned block should be imported and we should be able to get its header by now. assert!(client.header(&BlockId::Hash(res.block.hash())).unwrap().is_some()); diff --git a/client/consensus/babe/rpc/src/lib.rs b/client/consensus/babe/rpc/src/lib.rs index e16c24acaca36..e85a430655371 100644 --- a/client/consensus/babe/rpc/src/lib.rs +++ b/client/consensus/babe/rpc/src/lib.rs @@ -18,30 +18,21 @@ //! RPC api for babe. -use sc_consensus_babe::{Epoch, authorship, Config}; use futures::{FutureExt as _, TryFutureExt as _}; -use jsonrpc_core::{ - Error as RpcError, - futures::future as rpc_future, -}; +use jsonrpc_core::{futures::future as rpc_future, Error as RpcError}; use jsonrpc_derive::rpc; +use sc_consensus_babe::{authorship, Config, Epoch}; use sc_consensus_epochs::{descendent_query, Epoch as EpochT, SharedEpochChanges}; -use sp_consensus_babe::{ - AuthorityId, - BabeApi as BabeRuntimeApi, - digests::PreDigest, -}; +use sc_rpc_api::DenyUnsafe; use serde::{Deserialize, Serialize}; -use sp_core::{ - crypto::Public, -}; +use sp_api::{BlockId, ProvideRuntimeApi}; use sp_application_crypto::AppKey; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; -use sc_rpc_api::DenyUnsafe; -use sp_api::{ProvideRuntimeApi, BlockId}; +use sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata}; +use sp_consensus::{Error as ConsensusError, SelectChain}; +use sp_consensus_babe::{digests::PreDigest, AuthorityId, BabeApi as BabeRuntimeApi}; +use sp_core::crypto::Public; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; use sp_runtime::traits::{Block as BlockT, Header as _}; -use sp_consensus::{SelectChain, Error as ConsensusError}; -use sp_blockchain::{HeaderBackend, HeaderMetadata, Error as BlockChainError}; use std::{collections::HashMap, sync::Arc}; type FutureResult = Box + Send>; @@ -81,14 +72,7 @@ impl BabeRpcHandler { select_chain: SC, deny_unsafe: DenyUnsafe, ) -> Self { - Self { - client, - shared_epoch_changes, - keystore, - babe_config, - select_chain, - deny_unsafe, - } + Self { client, shared_epoch_changes, keystore, babe_config, select_chain, deny_unsafe } } } @@ -104,16 +88,10 @@ where { fn epoch_authorship(&self) -> FutureResult> { if let Err(err) = self.deny_unsafe.check_if_safe() { - return Box::new(rpc_future::err(err.into())); + return Box::new(rpc_future::err(err.into())) } - let ( - babe_config, - keystore, - shared_epoch, - client, - select_chain, - ) = ( + let (babe_config, keystore, shared_epoch, client, select_chain) = ( self.babe_config.clone(), self.keystore.clone(), self.shared_epoch_changes.clone(), @@ -126,14 +104,9 @@ where .runtime_api() .current_epoch_start(&BlockId::Hash(header.hash())) .map_err(|err| Error::StringError(format!("{:?}", err)))?; - let epoch = epoch_data( - &shared_epoch, - &client, - &babe_config, - *epoch_start, - &select_chain, - ) - .await?; + let epoch = + epoch_data(&shared_epoch, &client, &babe_config, *epoch_start, &select_chain) + .await?; let (epoch_start, epoch_end) = (epoch.start_slot(), epoch.end_slot()); let mut claims: HashMap = HashMap::new(); @@ -163,10 +136,10 @@ where match claim { PreDigest::Primary { .. } => { claims.entry(key).or_default().primary.push(slot); - } + }, PreDigest::SecondaryPlain { .. } => { claims.entry(key).or_default().secondary.push(slot); - } + }, PreDigest::SecondaryVRF { .. } => { claims.entry(key).or_default().secondary_vrf.push(slot.into()); }, @@ -199,7 +172,7 @@ pub enum Error { /// Consensus error Consensus(ConsensusError), /// Errors that can be formatted as a String - StringError(String) + StringError(String), } impl From for jsonrpc_core::Error { @@ -226,13 +199,15 @@ where SC: SelectChain, { let parent = select_chain.best_chain().await?; - epoch_changes.shared_data().epoch_data_for_child_of( - descendent_query(&**client), - &parent.hash(), - parent.number().clone(), - slot.into(), - |slot| Epoch::genesis(&babe_config, slot), - ) + epoch_changes + .shared_data() + .epoch_data_for_child_of( + descendent_query(&**client), + &parent.hash(), + parent.number().clone(), + slot.into(), + |slot| Epoch::genesis(&babe_config, slot), + ) .map_err(|e| Error::Consensus(ConsensusError::ChainLookup(format!("{:?}", e))))? .ok_or(Error::Consensus(ConsensusError::InvalidAuthoritiesSet)) } @@ -240,31 +215,27 @@ where #[cfg(test)] mod tests { use super::*; + use sc_keystore::LocalKeystore; + use sp_application_crypto::AppPair; + use sp_core::crypto::key_types::BABE; + use sp_keyring::Sr25519Keyring; + use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; use substrate_test_runtime_client::{ - runtime::Block, - Backend, - DefaultTestClientBuilderExt, - TestClient, + runtime::Block, Backend, DefaultTestClientBuilderExt, TestClient, TestClientBuilder, TestClientBuilderExt, - TestClientBuilder, }; - use sp_application_crypto::AppPair; - use sp_keyring::Sr25519Keyring; - use sp_core::{crypto::key_types::BABE}; - use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; - use sc_keystore::LocalKeystore; - use std::sync::Arc; - use sc_consensus_babe::{Config, block_import, AuthorityPair}; use jsonrpc_core::IoHandler; + use sc_consensus_babe::{block_import, AuthorityPair, Config}; + use std::sync::Arc; /// creates keystore backed by a temp file fn create_temp_keystore( authority: Sr25519Keyring, ) -> (SyncCryptoStorePtr, tempfile::TempDir) { let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); + let keystore = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some(&authority.to_seed())) .expect("Creates authority key"); @@ -272,17 +243,14 @@ mod tests { } fn test_babe_rpc_handler( - deny_unsafe: DenyUnsafe + deny_unsafe: DenyUnsafe, ) -> BabeRpcHandler> { let builder = TestClientBuilder::new(); let (client, longest_chain) = builder.build_with_longest_chain(); let client = Arc::new(client); let config = Config::get_or_compute(&*client).expect("config available"); - let (_, link) = block_import( - config.clone(), - client.clone(), - client.clone(), - ).expect("can initialize block-import"); + let (_, link) = block_import(config.clone(), client.clone(), client.clone()) + .expect("can initialize block-import"); let epoch_changes = link.epoch_changes().clone(); let keystore = create_temp_keystore::(Sr25519Keyring::Alice).0; diff --git a/client/consensus/babe/src/authorship.rs b/client/consensus/babe/src/authorship.rs index 2a90ca3b94c00..609f96c83c194 100644 --- a/client/consensus/babe/src/authorship.rs +++ b/client/consensus/babe/src/authorship.rs @@ -18,23 +18,17 @@ //! BABE authority selection and slot claiming. +use super::Epoch; +use codec::Encode; +use schnorrkel::{keys::PublicKey, vrf::VRFInOut}; use sp_application_crypto::AppKey; use sp_consensus_babe::{ - BABE_VRF_PREFIX, AuthorityId, BabeAuthorityWeight, make_transcript, make_transcript_data, - Slot, -}; -use sp_consensus_babe::digests::{ - PreDigest, PrimaryPreDigest, SecondaryPlainPreDigest, SecondaryVRFPreDigest, + digests::{PreDigest, PrimaryPreDigest, SecondaryPlainPreDigest, SecondaryVRFPreDigest}, + make_transcript, make_transcript_data, AuthorityId, BabeAuthorityWeight, Slot, BABE_VRF_PREFIX, }; use sp_consensus_vrf::schnorrkel::{VRFOutput, VRFProof}; -use sp_core::{U256, blake2_256, crypto::Public}; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; -use codec::Encode; -use schnorrkel::{ - keys::PublicKey, - vrf::VRFInOut, -}; -use super::Epoch; +use sp_core::{blake2_256, crypto::Public, U256}; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; /// Calculates the primary selection threshold for a given authority, taking /// into account `c` (`1 - c` represents the probability of a slot being empty). @@ -49,8 +43,7 @@ pub(super) fn calculate_primary_threshold( let c = c.0 as f64 / c.1 as f64; - let theta = - authorities[authority_index].1 as f64 / + let theta = authorities[authority_index].1 as f64 / authorities.iter().map(|(_, weight)| weight).sum::() as f64; assert!(theta > 0.0, "authority with weight 0."); @@ -74,14 +67,14 @@ pub(super) fn calculate_primary_threshold( "returns None when the given value is negative; \ p is defined as `1 - n` where n is defined in (0, 1]; \ p must be a value in [0, 1); \ - qed." + qed.", ); let denom = p.denom().to_biguint().expect( "returns None when the given value is negative; \ p is defined as `1 - n` where n is defined in (0, 1]; \ p must be a value in [0, 1); \ - qed." + qed.", ); ((BigUint::one() << 128) * numer / denom).to_u128().expect( @@ -108,7 +101,7 @@ pub(super) fn secondary_slot_author( randomness: [u8; 32], ) -> Option<&AuthorityId> { if authorities.is_empty() { - return None; + return None } let rand = U256::from((randomness, slot).using_encoded(blake2_256)); @@ -116,9 +109,10 @@ pub(super) fn secondary_slot_author( let authorities_len = U256::from(authorities.len()); let idx = rand % authorities_len; - let expected_author = authorities.get(idx.as_u32() as usize) - .expect("authorities not empty; index constrained to list length; \ - this is a valid index; qed"); + let expected_author = authorities.get(idx.as_u32() as usize).expect( + "authorities not empty; index constrained to list length; \ + this is a valid index; qed", + ); Some(&expected_author.0) } @@ -136,23 +130,15 @@ fn claim_secondary_slot( let Epoch { authorities, randomness, epoch_index, .. } = epoch; if authorities.is_empty() { - return None; + return None } - let expected_author = secondary_slot_author( - slot, - authorities, - *randomness, - )?; + let expected_author = secondary_slot_author(slot, authorities, *randomness)?; for (authority_id, authority_index) in keys { if authority_id == expected_author { let pre_digest = if author_secondary_vrf { - let transcript_data = make_transcript_data( - randomness, - slot, - *epoch_index, - ); + let transcript_data = make_transcript_data(randomness, slot, *epoch_index); let result = SyncCryptoStore::sr25519_vrf_sign( &**keystore, AuthorityId::ID, @@ -169,7 +155,10 @@ fn claim_secondary_slot( } else { None } - } else if SyncCryptoStore::has_keys(&**keystore, &[(authority_id.to_raw_vec(), AuthorityId::ID)]) { + } else if SyncCryptoStore::has_keys( + &**keystore, + &[(authority_id.to_raw_vec(), AuthorityId::ID)], + ) { Some(PreDigest::SecondaryPlain(SecondaryPlainPreDigest { slot, authority_index: *authority_index as u32, @@ -179,7 +168,7 @@ fn claim_secondary_slot( }; if let Some(pre_digest) = pre_digest { - return Some((pre_digest, authority_id.clone())); + return Some((pre_digest, authority_id.clone())) } } } @@ -196,7 +185,9 @@ pub fn claim_slot( epoch: &Epoch, keystore: &SyncCryptoStorePtr, ) -> Option<(PreDigest, AuthorityId)> { - let authorities = epoch.authorities.iter() + let authorities = epoch + .authorities + .iter() .enumerate() .map(|(index, a)| (a.0.clone(), index)) .collect::>(); @@ -211,22 +202,21 @@ pub fn claim_slot_using_keys( keystore: &SyncCryptoStorePtr, keys: &[(AuthorityId, usize)], ) -> Option<(PreDigest, AuthorityId)> { - claim_primary_slot(slot, epoch, epoch.config.c, keystore, &keys) - .or_else(|| { - if epoch.config.allowed_slots.is_secondary_plain_slots_allowed() || - epoch.config.allowed_slots.is_secondary_vrf_slots_allowed() - { - claim_secondary_slot( - slot, - &epoch, - keys, - &keystore, - epoch.config.allowed_slots.is_secondary_vrf_slots_allowed(), - ) - } else { - None - } - }) + claim_primary_slot(slot, epoch, epoch.config.c, keystore, &keys).or_else(|| { + if epoch.config.allowed_slots.is_secondary_plain_slots_allowed() || + epoch.config.allowed_slots.is_secondary_vrf_slots_allowed() + { + claim_secondary_slot( + slot, + &epoch, + keys, + &keystore, + epoch.config.allowed_slots.is_secondary_vrf_slots_allowed(), + ) + } else { + None + } + }) } /// Claim a primary slot if it is our turn. Returns `None` if it is not our turn. @@ -243,16 +233,8 @@ fn claim_primary_slot( let Epoch { authorities, randomness, epoch_index, .. } = epoch; for (authority_id, authority_index) in keys { - let transcript = make_transcript( - randomness, - slot, - *epoch_index - ); - let transcript_data = make_transcript_data( - randomness, - slot, - *epoch_index - ); + let transcript = make_transcript(randomness, slot, *epoch_index); + let transcript_data = make_transcript_data(randomness, slot, *epoch_index); // Compute the threshold we will use. // // We already checked that authorities contains `key.public()`, so it can't @@ -279,7 +261,7 @@ fn claim_primary_slot( authority_index: *authority_index as u32, }); - return Some((pre_digest, authority_id.clone())); + return Some((pre_digest, authority_id.clone())) } } } @@ -290,10 +272,10 @@ fn claim_primary_slot( #[cfg(test)] mod tests { use super::*; - use std::sync::Arc; - use sp_core::{sr25519::Pair, crypto::Pair as _}; - use sp_consensus_babe::{AuthorityId, BabeEpochConfiguration, AllowedSlots}; use sc_keystore::LocalKeystore; + use sp_consensus_babe::{AllowedSlots, AuthorityId, BabeEpochConfiguration}; + use sp_core::{crypto::Pair as _, sr25519::Pair}; + use std::sync::Arc; #[test] fn claim_secondary_plain_slot_works() { @@ -302,7 +284,8 @@ mod tests { &*keystore, AuthorityId::ID, Some(sp_core::crypto::DEV_PHRASE), - ).unwrap(); + ) + .unwrap(); let authorities = vec![ (AuthorityId::from(Pair::generate().0.public()), 5), diff --git a/client/consensus/babe/src/aux_schema.rs b/client/consensus/babe/src/aux_schema.rs index 69c1a1930bbb5..4be7dff3eedcd 100644 --- a/client/consensus/babe/src/aux_schema.rs +++ b/client/consensus/babe/src/aux_schema.rs @@ -18,15 +18,15 @@ //! Schema for BABE epoch changes in the aux-db. -use log::info; use codec::{Decode, Encode}; +use log::info; +use crate::{migration::EpochV0, Epoch}; use sc_client_api::backend::AuxStore; -use sp_blockchain::{Result as ClientResult, Error as ClientError}; -use sp_runtime::traits::Block as BlockT; +use sc_consensus_epochs::{migration::EpochChangesForV0, EpochChangesFor, SharedEpochChanges}; +use sp_blockchain::{Error as ClientError, Result as ClientResult}; use sp_consensus_babe::{BabeBlockWeight, BabeGenesisConfiguration}; -use sc_consensus_epochs::{EpochChangesFor, SharedEpochChanges, migration::EpochChangesForV0}; -use crate::{Epoch, migration::EpochV0}; +use sp_runtime::traits::Block as BlockT; const BABE_EPOCH_CHANGES_VERSION: &[u8] = b"babe_epoch_changes_version"; const BABE_EPOCH_CHANGES_KEY: &[u8] = b"babe_epoch_changes"; @@ -38,16 +38,16 @@ pub fn block_weight_key(block_hash: H) -> Vec { } fn load_decode(backend: &B, key: &[u8]) -> ClientResult> - where - B: AuxStore, - T: Decode, +where + B: AuxStore, + T: Decode, { let corrupt = |e: codec::Error| { ClientError::Backend(format!("BABE DB is corrupted. Decode error: {}", e)) }; match backend.get_aux(key)? { None => Ok(None), - Some(t) => T::decode(&mut &t[..]).map(Some).map_err(corrupt) + Some(t) => T::decode(&mut &t[..]).map(Some).map_err(corrupt), } } @@ -59,32 +59,26 @@ pub fn load_epoch_changes( let version = load_decode::<_, u32>(backend, BABE_EPOCH_CHANGES_VERSION)?; let maybe_epoch_changes = match version { - None => load_decode::<_, EpochChangesForV0>( - backend, - BABE_EPOCH_CHANGES_KEY, - )?.map(|v0| v0.migrate().map(|_, _, epoch| epoch.migrate(config))), - Some(1) => load_decode::<_, EpochChangesFor>( - backend, - BABE_EPOCH_CHANGES_KEY, - )?.map(|v1| v1.map(|_, _, epoch| epoch.migrate(config))), - Some(BABE_EPOCH_CHANGES_CURRENT_VERSION) => load_decode::<_, EpochChangesFor>( - backend, - BABE_EPOCH_CHANGES_KEY, - )?, - Some(other) => { - return Err(ClientError::Backend( - format!("Unsupported BABE DB version: {:?}", other) - )) - }, + None => + load_decode::<_, EpochChangesForV0>(backend, BABE_EPOCH_CHANGES_KEY)? + .map(|v0| v0.migrate().map(|_, _, epoch| epoch.migrate(config))), + Some(1) => + load_decode::<_, EpochChangesFor>(backend, BABE_EPOCH_CHANGES_KEY)? + .map(|v1| v1.map(|_, _, epoch| epoch.migrate(config))), + Some(BABE_EPOCH_CHANGES_CURRENT_VERSION) => + load_decode::<_, EpochChangesFor>(backend, BABE_EPOCH_CHANGES_KEY)?, + Some(other) => + return Err(ClientError::Backend(format!("Unsupported BABE DB version: {:?}", other))), }; - let epoch_changes = SharedEpochChanges::::new(maybe_epoch_changes.unwrap_or_else(|| { - info!( - target: "babe", - "👶 Creating empty BABE epoch changes on what appears to be first startup.", - ); - EpochChangesFor::::default() - })); + let epoch_changes = + SharedEpochChanges::::new(maybe_epoch_changes.unwrap_or_else(|| { + info!( + target: "babe", + "👶 Creating empty BABE epoch changes on what appears to be first startup.", + ); + EpochChangesFor::::default() + })); // rebalance the tree after deserialization. this isn't strictly necessary // since the tree is now rebalanced on every update operation. but since the @@ -99,15 +93,16 @@ pub fn load_epoch_changes( pub(crate) fn write_epoch_changes( epoch_changes: &EpochChangesFor, write_aux: F, -) -> R where +) -> R +where F: FnOnce(&[(&'static [u8], &[u8])]) -> R, { BABE_EPOCH_CHANGES_CURRENT_VERSION.using_encoded(|version| { let encoded_epoch_changes = epoch_changes.encode(); - write_aux( - &[(BABE_EPOCH_CHANGES_KEY, encoded_epoch_changes.as_slice()), - (BABE_EPOCH_CHANGES_VERSION, version)], - ) + write_aux(&[ + (BABE_EPOCH_CHANGES_KEY, encoded_epoch_changes.as_slice()), + (BABE_EPOCH_CHANGES_VERSION, version), + ]) }) } @@ -116,15 +111,12 @@ pub(crate) fn write_block_weight( block_hash: H, block_weight: BabeBlockWeight, write_aux: F, -) -> R where +) -> R +where F: FnOnce(&[(Vec, &[u8])]) -> R, { let key = block_weight_key(block_hash); - block_weight.using_encoded(|s| - write_aux( - &[(key, s)], - ) - ) + block_weight.using_encoded(|s| write_aux(&[(key, s)])) } /// Load the cumulative chain-weight associated with a block. @@ -140,13 +132,13 @@ mod test { use super::*; use crate::migration::EpochV0; use fork_tree::ForkTree; - use substrate_test_runtime_client; + use sc_consensus_epochs::{EpochHeader, PersistedEpoch, PersistedEpochHeader}; + use sc_network_test::Block as TestBlock; + use sp_consensus::Error as ConsensusError; + use sp_consensus_babe::{AllowedSlots, BabeGenesisConfiguration}; use sp_core::H256; use sp_runtime::traits::NumberFor; - use sp_consensus_babe::{AllowedSlots, BabeGenesisConfiguration}; - use sc_consensus_epochs::{PersistedEpoch, PersistedEpochHeader, EpochHeader}; - use sp_consensus::Error as ConsensusError; - use sc_network_test::Block as TestBlock; + use substrate_test_runtime_client; #[test] fn load_decode_from_v0_epoch_changes() { @@ -159,26 +151,30 @@ mod test { }; let client = substrate_test_runtime_client::new(); let mut v0_tree = ForkTree::, _>::new(); - v0_tree.import::<_, ConsensusError>( - Default::default(), - Default::default(), - PersistedEpoch::Regular(epoch), - &|_, _| Ok(false), // Test is single item only so this can be set to false. - ).unwrap(); - - client.insert_aux( - &[(BABE_EPOCH_CHANGES_KEY, - &EpochChangesForV0::::from_raw(v0_tree).encode()[..])], - &[], - ).unwrap(); - - assert_eq!( - load_decode::<_, u32>(&client, BABE_EPOCH_CHANGES_VERSION).unwrap(), - None, - ); + v0_tree + .import::<_, ConsensusError>( + Default::default(), + Default::default(), + PersistedEpoch::Regular(epoch), + &|_, _| Ok(false), // Test is single item only so this can be set to false. + ) + .unwrap(); + + client + .insert_aux( + &[( + BABE_EPOCH_CHANGES_KEY, + &EpochChangesForV0::::from_raw(v0_tree).encode()[..], + )], + &[], + ) + .unwrap(); + + assert_eq!(load_decode::<_, u32>(&client, BABE_EPOCH_CHANGES_VERSION).unwrap(), None,); let epoch_changes = load_epoch_changes::( - &client, &BabeGenesisConfiguration { + &client, + &BabeGenesisConfiguration { slot_duration: 10, epoch_length: 4, c: (3, 10), @@ -186,10 +182,12 @@ mod test { randomness: Default::default(), allowed_slots: AllowedSlots::PrimaryAndSecondaryPlainSlots, }, - ).unwrap(); + ) + .unwrap(); assert!( - epoch_changes.shared_data() + epoch_changes + .shared_data() .tree() .iter() .map(|(_, _, epoch)| epoch.clone()) @@ -200,16 +198,10 @@ mod test { })], ); // PersistedEpochHeader does not implement Debug, so we use assert! directly. - write_epoch_changes::( - &epoch_changes.shared_data(), - |values| { - client.insert_aux(values, &[]).unwrap(); - }, - ); + write_epoch_changes::(&epoch_changes.shared_data(), |values| { + client.insert_aux(values, &[]).unwrap(); + }); - assert_eq!( - load_decode::<_, u32>(&client, BABE_EPOCH_CHANGES_VERSION).unwrap(), - Some(2), - ); + assert_eq!(load_decode::<_, u32>(&client, BABE_EPOCH_CHANGES_VERSION).unwrap(), Some(2),); } } diff --git a/client/consensus/babe/src/lib.rs b/client/consensus/babe/src/lib.rs index 61b58bf1b5999..315bd4e9921aa 100644 --- a/client/consensus/babe/src/lib.rs +++ b/client/consensus/babe/src/lib.rs @@ -71,9 +71,13 @@ use std::{ }; use codec::{Decode, Encode}; -use futures::channel::mpsc::{channel, Receiver, Sender}; -use futures::channel::oneshot; -use futures::prelude::*; +use futures::{ + channel::{ + mpsc::{channel, Receiver, Sender}, + oneshot, + }, + prelude::*, +}; use log::{debug, info, log, trace, warn}; use parking_lot::Mutex; use prometheus_endpoint::Registry; @@ -89,18 +93,16 @@ use sc_consensus_slots::{ SlotInfo, StorageChanges, }; use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_TRACE}; -use sp_api::ApiExt; -use sp_api::{NumberFor, ProvideRuntimeApi}; +use sp_api::{ApiExt, NumberFor, ProvideRuntimeApi}; use sp_application_crypto::AppKey; use sp_block_builder::BlockBuilder as BlockBuilderApi; use sp_blockchain::{ Error as ClientError, HeaderBackend, HeaderMetadata, ProvideCache, Result as ClientResult, }; -use sp_consensus::{import_queue::BoxJustificationImport, CanAuthorWith, ImportResult}; use sp_consensus::{ - import_queue::{BasicQueue, CacheKeyId, DefaultImportQueue, Verifier}, - BlockCheckParams, BlockImport, BlockImportParams, BlockOrigin, Environment, - Error as ConsensusError, ForkChoiceStrategy, Proposer, SelectChain, SlotData, + import_queue::{BasicQueue, BoxJustificationImport, CacheKeyId, DefaultImportQueue, Verifier}, + BlockCheckParams, BlockImport, BlockImportParams, BlockOrigin, CanAuthorWith, Environment, + Error as ConsensusError, ForkChoiceStrategy, ImportResult, Proposer, SelectChain, SlotData, StateAction, }; use sp_consensus_babe::inherents::BabeInherentData; @@ -159,7 +161,7 @@ impl EpochT for Epoch { fn increment( &self, - (descriptor, config): (NextEpochDescriptor, BabeEpochConfiguration) + (descriptor, config): (NextEpochDescriptor, BabeEpochConfiguration), ) -> Epoch { Epoch { epoch_index: self.epoch_index + 1, @@ -183,10 +185,7 @@ impl EpochT for Epoch { impl Epoch { /// Create the genesis epoch (epoch #0). This is defined to start at the slot of /// the first block, so that has to be provided. - pub fn genesis( - genesis_config: &BabeGenesisConfiguration, - slot: Slot, - ) -> Epoch { + pub fn genesis(genesis_config: &BabeGenesisConfiguration, slot: Slot) -> Epoch { Epoch { epoch_index: 0, start_slot: slot, @@ -253,7 +252,11 @@ pub enum Error { #[display(fmt = "No secondary author expected.")] NoSecondaryAuthorExpected, /// VRF verification of block by author failed - #[display(fmt = "VRF verification of block by author {:?} failed: threshold {} exceeded", _0, _1)] + #[display( + fmt = "VRF verification of block by author {:?} failed: threshold {} exceeded", + _0, + _1 + )] VRFVerificationOfBlockFailed(AuthorityId, u128), /// VRF verification failed #[display(fmt = "VRF verification failed: {:?}", _0)] @@ -320,35 +323,36 @@ pub struct Config(sc_consensus_slots::SlotDuration); impl Config { /// Either fetch the slot duration from disk or compute it from the genesis /// state. - pub fn get_or_compute(client: &C) -> ClientResult where - C: AuxStore + ProvideRuntimeApi + UsageProvider, C::Api: BabeApi, + pub fn get_or_compute(client: &C) -> ClientResult + where + C: AuxStore + ProvideRuntimeApi + UsageProvider, + C::Api: BabeApi, { trace!(target: "babe", "Getting slot duration"); match sc_consensus_slots::SlotDuration::get_or_compute(client, |a, b| { - let has_api_v1 = a.has_api_with::, _>( - &b, |v| v == 1, - )?; - let has_api_v2 = a.has_api_with::, _>( - &b, |v| v == 2, - )?; + let has_api_v1 = a.has_api_with::, _>(&b, |v| v == 1)?; + let has_api_v2 = a.has_api_with::, _>(&b, |v| v == 2)?; if has_api_v1 { - #[allow(deprecated)] { + #[allow(deprecated)] + { Ok(a.configuration_before_version_2(b)?.into()) } } else if has_api_v2 { a.configuration(b).map_err(Into::into) } else { Err(sp_blockchain::Error::VersionInvalid( - "Unsupported or invalid BabeApi version".to_string() + "Unsupported or invalid BabeApi version".to_string(), )) } - }).map(Self) { + }) + .map(Self) + { Ok(s) => Ok(s), Err(s) => { warn!(target: "babe", "Failed to get slot duration"); Err(s) - } + }, } } @@ -502,7 +506,8 @@ where let (worker_tx, worker_rx) = channel(HANDLE_BUFFER_SIZE); - let answer_requests = answer_requests(worker_rx, config.0, client, babe_link.epoch_changes.clone()); + let answer_requests = + answer_requests(worker_rx, config.0, client, babe_link.epoch_changes.clone()); Ok(BabeWorker { inner: Box::pin(future::join(inner, answer_requests).map(|_| ())), slot_notification_sinks, @@ -515,28 +520,37 @@ async fn answer_requests( genesis_config: sc_consensus_slots::SlotDuration, client: Arc, epoch_changes: SharedEpochChanges, -) - where C: ProvideRuntimeApi + ProvideCache + ProvideUncles + BlockchainEvents - + HeaderBackend + HeaderMetadata + Send + Sync + 'static, +) where + C: ProvideRuntimeApi + + ProvideCache + + ProvideUncles + + BlockchainEvents + + HeaderBackend + + HeaderMetadata + + Send + + Sync + + 'static, { while let Some(request) = request_rx.next().await { match request { BabeRequest::EpochForChild(parent_hash, parent_number, slot_number, response) => { let lookup = || { let epoch_changes = epoch_changes.shared_data(); - let epoch_descriptor = epoch_changes.epoch_descriptor_for_child_of( - descendent_query(&*client), - &parent_hash, - parent_number, - slot_number, - ) + let epoch_descriptor = epoch_changes + .epoch_descriptor_for_child_of( + descendent_query(&*client), + &parent_hash, + parent_number, + slot_number, + ) .map_err(|e| Error::::ForkTree(Box::new(e)))? .ok_or_else(|| Error::::FetchEpoch(parent_hash))?; - let viable_epoch = epoch_changes.viable_epoch( - &epoch_descriptor, - |slot| Epoch::genesis(&genesis_config, slot) - ).ok_or_else(|| Error::::FetchEpoch(parent_hash))?; + let viable_epoch = epoch_changes + .viable_epoch(&epoch_descriptor, |slot| { + Epoch::genesis(&genesis_config, slot) + }) + .ok_or_else(|| Error::::FetchEpoch(parent_hash))?; Ok(sp_consensus_babe::Epoch { epoch_index: viable_epoch.as_ref().epoch_index, @@ -549,7 +563,7 @@ async fn answer_requests( }; let _ = response.send(lookup()); - } + }, } } } @@ -584,7 +598,7 @@ impl BabeWorkerHandle { /// Worker for Babe which implements `Future`. This must be polled. #[must_use] pub struct BabeWorker { - inner: Pin + Send + 'static>>, + inner: Pin + Send + 'static>>, slot_notification_sinks: SlotNotificationSinks, handle: BabeWorkerHandle, } @@ -593,7 +607,7 @@ impl BabeWorker { /// Return an event stream of notifications for when new slot happens, and the corresponding /// epoch descriptor. pub fn slot_notification_stream( - &self + &self, ) -> Receiver<(Slot, ViableEpochDescriptor, Epoch>)> { const CHANNEL_BUFFER_SIZE: usize = 1024; @@ -613,7 +627,7 @@ impl futures::Future for BabeWorker { fn poll( mut self: Pin<&mut Self>, - cx: &mut futures::task::Context + cx: &mut futures::task::Context, ) -> futures::task::Poll { self.inner.as_mut().poll(cx) } @@ -621,7 +635,7 @@ impl futures::Future for BabeWorker { /// Slot notification sinks. type SlotNotificationSinks = Arc< - Mutex::Hash, NumberFor, Epoch>)>>> + Mutex::Hash, NumberFor, Epoch>)>>>, >; struct BabeSlotWorker { @@ -662,9 +676,8 @@ where type Claim = (PreDigest, AuthorityId); type SyncOracle = SO; type JustificationSyncLink = L; - type CreateProposer = Pin> + Send + 'static - >>; + type CreateProposer = + Pin> + Send + 'static>>; type Proposer = E::Proposer; type BlockImport = I; @@ -681,12 +694,14 @@ where parent: &B::Header, slot: Slot, ) -> Result { - self.epoch_changes.shared_data().epoch_descriptor_for_child_of( - descendent_query(&*self.client), - &parent.hash(), - parent.number().clone(), - slot, - ) + self.epoch_changes + .shared_data() + .epoch_descriptor_for_child_of( + descendent_query(&*self.client), + &parent.hash(), + parent.number().clone(), + slot, + ) .map_err(|e| ConsensusError::ChainLookup(format!("{:?}", e)))? .ok_or(sp_consensus::Error::InvalidAuthoritiesSet) } @@ -707,10 +722,10 @@ where debug!(target: "babe", "Attempting to claim slot {}", slot); let s = authorship::claim_slot( slot, - self.epoch_changes.shared_data().viable_epoch( - &epoch_descriptor, - |slot| Epoch::genesis(&self.config, slot) - )?.as_ref(), + self.epoch_changes + .shared_data() + .viable_epoch(&epoch_descriptor, |slot| Epoch::genesis(&self.config, slot))? + .as_ref(), &self.keystore, ); @@ -727,20 +742,18 @@ where slot: Slot, epoch_descriptor: &ViableEpochDescriptor, Epoch>, ) { - self.slot_notification_sinks.lock() - .retain_mut(|sink| { - match sink.try_send((slot, epoch_descriptor.clone())) { - Ok(()) => true, - Err(e) => { - if e.is_full() { - warn!(target: "babe", "Trying to notify a slot but the channel is full"); - true - } else { - false - } + self.slot_notification_sinks.lock().retain_mut(|sink| { + match sink.try_send((slot, epoch_descriptor.clone())) { + Ok(()) => true, + Err(e) => + if e.is_full() { + warn!(target: "babe", "Trying to notify a slot but the channel is full"); + true + } else { + false }, - } - }); + } + }); } fn pre_digest_data( @@ -748,59 +761,64 @@ where _slot: Slot, claim: &Self::Claim, ) -> Vec> { - vec![ - as CompatibleDigestItem>::babe_pre_digest(claim.0.clone()), - ] + vec![ as CompatibleDigestItem>::babe_pre_digest(claim.0.clone())] } - fn block_import_params(&self) -> Box, - StorageChanges, - Self::Claim, - Self::EpochData, - ) -> Result< - sp_consensus::BlockImportParams, - sp_consensus::Error> + Send + 'static> - { + fn block_import_params( + &self, + ) -> Box< + dyn Fn( + B::Header, + &B::Hash, + Vec, + StorageChanges, + Self::Claim, + Self::EpochData, + ) -> Result, sp_consensus::Error> + + Send + + 'static, + > { let keystore = self.keystore.clone(); - Box::new(move |header, header_hash, body, storage_changes, (_, public), epoch_descriptor| { - // sign the pre-sealed hash of the block and then - // add it to a digest item. - let public_type_pair = public.clone().into(); - let public = public.to_raw_vec(); - let signature = SyncCryptoStore::sign_with( - &*keystore, - ::ID, - &public_type_pair, - header_hash.as_ref() - ) - .map_err(|e| sp_consensus::Error::CannotSign( - public.clone(), e.to_string(), - ))? - .ok_or_else(|| sp_consensus::Error::CannotSign( - public.clone(), "Could not find key in keystore.".into(), - ))?; - let signature: AuthoritySignature = signature.clone().try_into() - .map_err(|_| sp_consensus::Error::InvalidSignature( - signature, public - ))?; - let digest_item = as CompatibleDigestItem>::babe_seal(signature.into()); - - let mut import_block = BlockImportParams::new(BlockOrigin::Own, header); - import_block.post_digests.push(digest_item); - import_block.body = Some(body); - import_block.state_action = StateAction::ApplyChanges( - sp_consensus::StorageChanges::Changes(storage_changes) - ); - import_block.intermediates.insert( - Cow::from(INTERMEDIATE_KEY), - Box::new(BabeIntermediate:: { epoch_descriptor }) as Box<_>, - ); + Box::new( + move |header, header_hash, body, storage_changes, (_, public), epoch_descriptor| { + // sign the pre-sealed hash of the block and then + // add it to a digest item. + let public_type_pair = public.clone().into(); + let public = public.to_raw_vec(); + let signature = SyncCryptoStore::sign_with( + &*keystore, + ::ID, + &public_type_pair, + header_hash.as_ref(), + ) + .map_err(|e| sp_consensus::Error::CannotSign(public.clone(), e.to_string()))? + .ok_or_else(|| { + sp_consensus::Error::CannotSign( + public.clone(), + "Could not find key in keystore.".into(), + ) + })?; + let signature: AuthoritySignature = signature + .clone() + .try_into() + .map_err(|_| sp_consensus::Error::InvalidSignature(signature, public))?; + let digest_item = + as CompatibleDigestItem>::babe_seal(signature.into()); + + let mut import_block = BlockImportParams::new(BlockOrigin::Own, header); + import_block.post_digests.push(digest_item); + import_block.body = Some(body); + import_block.state_action = StateAction::ApplyChanges( + sp_consensus::StorageChanges::Changes(storage_changes), + ); + import_block.intermediates.insert( + Cow::from(INTERMEDIATE_KEY), + Box::new(BabeIntermediate:: { epoch_descriptor }) as Box<_>, + ); - Ok(import_block) - }) + Ok(import_block) + }, + ) } fn force_authoring(&self) -> bool { @@ -809,8 +827,8 @@ where fn should_backoff(&self, slot: Slot, chain_head: &B::Header) -> bool { if let Some(ref strategy) = self.backoff_authoring_blocks { - if let Ok(chain_head_slot) = find_pre_digest::(chain_head) - .map(|digest| digest.slot()) + if let Ok(chain_head_slot) = + find_pre_digest::(chain_head).map(|digest| digest.slot()) { return strategy.should_backoff( *chain_head.number(), @@ -818,7 +836,7 @@ where self.client.info().finalized_number, slot, self.logging_target(), - ); + ) } } false @@ -833,9 +851,11 @@ where } fn proposer(&mut self, block: &B::Header) -> Self::CreateProposer { - Box::pin(self.env.init(block).map_err(|e| { - sp_consensus::Error::ClientImport(format!("{:?}", e)) - })) + Box::pin( + self.env + .init(block) + .map_err(|e| sp_consensus::Error::ClientImport(format!("{:?}", e))), + ) } fn telemetry(&self) -> Option { @@ -865,7 +885,7 @@ pub fn find_pre_digest(header: &B::Header) -> Result = None; @@ -881,16 +901,19 @@ pub fn find_pre_digest(header: &B::Header) -> Result(header: &B::Header) - -> Result, Error> - where DigestItemFor: CompatibleDigestItem, +fn find_next_epoch_digest( + header: &B::Header, +) -> Result, Error> +where + DigestItemFor: CompatibleDigestItem, { let mut epoch_digest: Option<_> = None; for log in header.digest().logs() { trace!(target: "babe", "Checking log {:?}, looking for epoch change digest.", log); let log = log.try_to::(OpaqueDigestItemId::Consensus(&BABE_ENGINE_ID)); match (log, epoch_digest.is_some()) { - (Some(ConsensusLog::NextEpochData(_)), true) => return Err(babe_err(Error::MultipleEpochChangeDigests)), + (Some(ConsensusLog::NextEpochData(_)), true) => + return Err(babe_err(Error::MultipleEpochChangeDigests)), (Some(ConsensusLog::NextEpochData(epoch)), false) => epoch_digest = Some(epoch), _ => trace!(target: "babe", "Ignoring digest not meant for us"), } @@ -900,16 +923,19 @@ fn find_next_epoch_digest(header: &B::Header) } /// Extract the BABE config change digest from the given header, if it exists. -fn find_next_config_digest(header: &B::Header) - -> Result, Error> - where DigestItemFor: CompatibleDigestItem, +fn find_next_config_digest( + header: &B::Header, +) -> Result, Error> +where + DigestItemFor: CompatibleDigestItem, { let mut config_digest: Option<_> = None; for log in header.digest().logs() { trace!(target: "babe", "Checking log {:?}, looking for epoch change digest.", log); let log = log.try_to::(OpaqueDigestItemId::Consensus(&BABE_ENGINE_ID)); match (log, config_digest.is_some()) { - (Some(ConsensusLog::NextConfigData(_)), true) => return Err(babe_err(Error::MultipleConfigChangeDigests)), + (Some(ConsensusLog::NextConfigData(_)), true) => + return Err(babe_err(Error::MultipleConfigChangeDigests)), (Some(ConsensusLog::NextConfigData(config)), false) => config_digest = Some(config), _ => trace!(target: "babe", "Ignoring digest not meant for us"), } @@ -974,11 +1000,11 @@ where return Ok(()) } - let inherent_res = self.client.runtime_api().check_inherents( - &block_id, - block, - inherent_data, - ).map_err(Error::RuntimeApi)?; + let inherent_res = self + .client + .runtime_api() + .check_inherents(&block_id, block, inherent_data) + .map_err(Error::RuntimeApi)?; if !inherent_res.ok() { for (i, e) in inherent_res.into_errors() { @@ -1003,7 +1029,7 @@ where // don't report any equivocations during initial sync // as they are most likely stale. if *origin == BlockOrigin::NetworkInitialSync { - return Ok(()); + return Ok(()) } // check if authorship of this header is an equivocation and return a proof if so. @@ -1053,8 +1079,8 @@ where Some(proof) => proof, None => { debug!(target: "babe", "Equivocation offender is not part of the authority set."); - return Ok(()); - } + return Ok(()) + }, }, }; @@ -1074,13 +1100,8 @@ where } } -type BlockVerificationResult = Result< - ( - BlockImportParams, - Option)>>, - ), - String, ->; +type BlockVerificationResult = + Result<(BlockImportParams, Option)>>), String>; #[async_trait::async_trait] impl Verifier @@ -1129,24 +1150,26 @@ where let slot_now = create_inherent_data_providers.slot(); - let parent_header_metadata = self.client.header_metadata(parent_hash) + let parent_header_metadata = self + .client + .header_metadata(parent_hash) .map_err(Error::::FetchParentHeader)?; let pre_digest = find_pre_digest::(&header)?; let (check_header, epoch_descriptor) = { let epoch_changes = self.epoch_changes.shared_data(); - let epoch_descriptor = epoch_changes.epoch_descriptor_for_child_of( - descendent_query(&*self.client), - &parent_hash, - parent_header_metadata.number, - pre_digest.slot(), - ) - .map_err(|e| Error::::ForkTree(Box::new(e)))? - .ok_or_else(|| Error::::FetchEpoch(parent_hash))?; - let viable_epoch = epoch_changes.viable_epoch( - &epoch_descriptor, - |slot| Epoch::genesis(&self.config, slot) - ).ok_or_else(|| Error::::FetchEpoch(parent_hash))?; + let epoch_descriptor = epoch_changes + .epoch_descriptor_for_child_of( + descendent_query(&*self.client), + &parent_hash, + parent_header_metadata.number, + pre_digest.slot(), + ) + .map_err(|e| Error::::ForkTree(Box::new(e)))? + .ok_or_else(|| Error::::FetchEpoch(parent_hash))?; + let viable_epoch = epoch_changes + .viable_epoch(&epoch_descriptor, |slot| Epoch::genesis(&self.config, slot)) + .ok_or_else(|| Error::::FetchEpoch(parent_hash))?; // We add one to the current slot to allow for some small drift. // FIXME #1019 in the future, alter this queue to allow deferring of headers @@ -1162,20 +1185,25 @@ where match check_header { CheckedHeader::Checked(pre_header, verified_info) => { - let babe_pre_digest = verified_info.pre_digest.as_babe_pre_digest() + let babe_pre_digest = verified_info + .pre_digest + .as_babe_pre_digest() .expect("check_header always returns a pre-digest digest item; qed"); let slot = babe_pre_digest.slot(); // the header is valid but let's check if there was something else already // proposed at the same slot by the given author. if there was, we will // report the equivocation to the runtime. - if let Err(err) = self.check_and_report_equivocation( - slot_now, - slot, - &header, - &verified_info.author, - &origin, - ).await { + if let Err(err) = self + .check_and_report_equivocation( + slot_now, + slot, + &header, + &verified_info.author, + &origin, + ) + .await + { warn!(target: "babe", "Error checking/reporting BABE equivocation: {:?}", err); } @@ -1183,7 +1211,8 @@ where // to check that the internally-set timestamp in the inherents // actually matches the slot set in the seal. if let Some(inner_body) = body.take() { - let mut inherent_data = create_inherent_data_providers.create_inherent_data() + let mut inherent_data = create_inherent_data_providers + .create_inherent_data() .map_err(Error::::CreateInherents)?; inherent_data.babe_replace_inherent_data(slot); let block = Block::new(pre_header.clone(), inner_body); @@ -1193,7 +1222,8 @@ where BlockId::Hash(parent_hash), inherent_data, create_inherent_data_providers, - ).await?; + ) + .await?; let (_, inner_body) = block.deconstruct(); body = Some(inner_body); @@ -1218,7 +1248,7 @@ where import_block.post_hash = Some(hash); Ok((import_block, Default::default())) - } + }, CheckedHeader::Deferred(a, b) => { debug!(target: "babe", "Checking {:?} failed; {:?}, {:?}.", hash, a, b); telemetry!( @@ -1228,7 +1258,7 @@ where "hash" => ?hash, "a" => ?a, "b" => ?b ); Err(Error::::TooFarInFuture(hash).into()) - } + }, } } } @@ -1266,22 +1296,23 @@ impl BabeBlockImport { block_import: I, config: Config, ) -> Self { - BabeBlockImport { - client, - inner: block_import, - epoch_changes, - config, - } + BabeBlockImport { client, inner: block_import, epoch_changes, config } } } #[async_trait::async_trait] -impl BlockImport for BabeBlockImport where +impl BlockImport for BabeBlockImport +where Block: BlockT, Inner: BlockImport> + Send + Sync, Inner::Error: Into, - Client: HeaderBackend + HeaderMetadata - + AuxStore + ProvideRuntimeApi + ProvideCache + Send + Sync, + Client: HeaderBackend + + HeaderMetadata + + AuxStore + + ProvideRuntimeApi + + ProvideCache + + Send + + Sync, Client::Api: BabeApi + ApiExt, { type Error = ConsensusError; @@ -1308,30 +1339,33 @@ impl BlockImport for BabeBlockImport return Err(ConsensusError::ClientImport(e.to_string())), } - let pre_digest = find_pre_digest::(&block.header) - .expect("valid babe headers must contain a predigest; \ - header has been already verified; qed"); + let pre_digest = find_pre_digest::(&block.header).expect( + "valid babe headers must contain a predigest; \ + header has been already verified; qed", + ); let slot = pre_digest.slot(); let parent_hash = *block.header.parent_hash(); - let parent_header = self.client.header(BlockId::Hash(parent_hash)) + let parent_header = self + .client + .header(BlockId::Hash(parent_hash)) .map_err(|e| ConsensusError::ChainLookup(e.to_string()))? - .ok_or_else(|| ConsensusError::ChainLookup(babe_err( - Error::::ParentUnavailable(parent_hash, hash) - ).into()))?; - - let parent_slot = find_pre_digest::(&parent_header) - .map(|d| d.slot()) - .expect("parent is non-genesis; valid BABE headers contain a pre-digest; \ - header has already been verified; qed"); + .ok_or_else(|| { + ConsensusError::ChainLookup( + babe_err(Error::::ParentUnavailable(parent_hash, hash)).into(), + ) + })?; + + let parent_slot = find_pre_digest::(&parent_header).map(|d| d.slot()).expect( + "parent is non-genesis; valid BABE headers contain a pre-digest; \ + header has already been verified; qed", + ); // make sure that slot number is strictly increasing if slot <= parent_slot { - return Err( - ConsensusError::ClientImport(babe_err( - Error::::SlotMustIncrease(parent_slot, slot) - ).into()) - ); + return Err(ConsensusError::ClientImport( + babe_err(Error::::SlotMustIncrease(parent_slot, slot)).into(), + )) } // if there's a pending epoch we'll save the previous epoch changes here @@ -1354,14 +1388,16 @@ impl BlockImport for BabeBlockImport::ParentBlockNoAssociatedWeight(hash)).into() - ))? + .ok_or_else(|| { + ConsensusError::ClientImport( + babe_err(Error::::ParentBlockNoAssociatedWeight(hash)) + .into(), + ) + })? }; - let intermediate = block.take_intermediate::>( - INTERMEDIATE_KEY - )?; + let intermediate = + block.take_intermediate::>(INTERMEDIATE_KEY)?; let epoch_descriptor = intermediate.epoch_descriptor; let first_in_epoch = parent_slot < epoch_descriptor.start_slot(); @@ -1379,27 +1415,18 @@ impl BlockImport for BabeBlockImport {}, (false, false, false) => {}, - (false, false, true) => { - return Err( - ConsensusError::ClientImport( - babe_err(Error::::UnexpectedConfigChange).into(), - ) - ) - }, - (true, false, _) => { - return Err( - ConsensusError::ClientImport( - babe_err(Error::::ExpectedEpochChange(hash, slot)).into(), - ) - ) - }, - (false, true, _) => { - return Err( - ConsensusError::ClientImport( - babe_err(Error::::UnexpectedEpochChange).into(), - ) - ) - }, + (false, false, true) => + return Err(ConsensusError::ClientImport( + babe_err(Error::::UnexpectedConfigChange).into(), + )), + (true, false, _) => + return Err(ConsensusError::ClientImport( + babe_err(Error::::ExpectedEpochChange(hash, slot)).into(), + )), + (false, true, _) => + return Err(ConsensusError::ClientImport( + babe_err(Error::::UnexpectedEpochChange).into(), + )), } let info = self.client.info(); @@ -1407,16 +1434,15 @@ impl BlockImport for BabeBlockImport::FetchEpoch(parent_hash).into()) - })?; + let viable_epoch = epoch_changes + .viable_epoch(&epoch_descriptor, |slot| Epoch::genesis(&self.config, slot)) + .ok_or_else(|| { + ConsensusError::ClientImport(Error::::FetchEpoch(parent_hash).into()) + })?; - let epoch_config = next_config_digest.map(Into::into).unwrap_or_else( - || viable_epoch.as_ref().config.clone() - ); + let epoch_config = next_config_digest + .map(Into::into) + .unwrap_or_else(|| viable_epoch.as_ref().config.clone()); // restrict info logging during initial sync to avoid spam let log_level = if block.origin == BlockOrigin::NetworkInitialSync { @@ -1450,43 +1476,40 @@ impl BlockImport for BabeBlockImport( - &*epoch_changes, - |insert| block.auxiliary.extend( - insert.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec()))) - ) - ); + crate::aux_schema::write_epoch_changes::(&*epoch_changes, |insert| { + block + .auxiliary + .extend(insert.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec())))) + }); } - aux_schema::write_block_weight( - hash, - total_weight, - |values| block.auxiliary.extend( - values.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec()))) - ), - ); + aux_schema::write_block_weight(hash, total_weight, |values| { + block + .auxiliary + .extend(values.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec())))) + }); // The fork choice rule is that we pick the heaviest chain (i.e. // more primary blocks), if there's a tie we go with the longest @@ -1501,9 +1524,11 @@ impl BlockImport for BabeBlockImport last_best_weight { @@ -1544,30 +1569,38 @@ impl BlockImport for BabeBlockImport( client: Arc, epoch_changes: &mut EpochChangesFor, -) -> Result<(), ConsensusError> where +) -> Result<(), ConsensusError> +where Block: BlockT, Client: HeaderBackend + HeaderMetadata, { let info = client.info(); let finalized_slot = { - let finalized_header = client.header(BlockId::Hash(info.finalized_hash)) + let finalized_header = client + .header(BlockId::Hash(info.finalized_hash)) .map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))? - .expect("best finalized hash was given by client; \ - finalized headers must exist in db; qed"); + .expect( + "best finalized hash was given by client; \ + finalized headers must exist in db; qed", + ); find_pre_digest::(&finalized_header) - .expect("finalized header must be valid; \ - valid blocks have a pre-digest; qed") + .expect( + "finalized header must be valid; \ + valid blocks have a pre-digest; qed", + ) .slot() }; - epoch_changes.prune_finalized( - descendent_query(&*client), - &info.finalized_hash, - info.finalized_number, - finalized_slot, - ).map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))?; + epoch_changes + .prune_finalized( + descendent_query(&*client), + &info.finalized_hash, + info.finalized_number, + finalized_slot, + ) + .map_err(|e| ConsensusError::ClientImport(format!("{:?}", e)))?; Ok(()) } @@ -1586,25 +1619,14 @@ where Client: AuxStore + HeaderBackend + HeaderMetadata, { let epoch_changes = aux_schema::load_epoch_changes::(&*client, &config)?; - let link = BabeLink { - epoch_changes: epoch_changes.clone(), - config: config.clone(), - }; + let link = BabeLink { epoch_changes: epoch_changes.clone(), config: config.clone() }; // NOTE: this isn't entirely necessary, but since we didn't use to prune the // epoch tree it is useful as a migration, so that nodes prune long trees on // startup rather than waiting until importing the next epoch change block. - prune_finalized( - client.clone(), - &mut epoch_changes.shared_data(), - )?; + prune_finalized(client.clone(), &mut epoch_changes.shared_data())?; - let import = BabeBlockImport::new( - client, - epoch_changes, - wrapped_block_import, - config, - ); + let import = BabeBlockImport::new(client, epoch_changes, wrapped_block_import, config); Ok((import, link)) } @@ -1629,12 +1651,23 @@ pub fn import_queue( registry: Option<&Registry>, can_author_with: CAW, telemetry: Option, -) -> ClientResult> where - Inner: BlockImport> - + Send + Sync + 'static, - Client: ProvideRuntimeApi + ProvideCache + HeaderBackend - + HeaderMetadata + AuxStore - + Send + Sync + 'static, +) -> ClientResult> +where + Inner: BlockImport< + Block, + Error = ConsensusError, + Transaction = sp_api::TransactionFor, + > + Send + + Sync + + 'static, + Client: ProvideRuntimeApi + + ProvideCache + + HeaderBackend + + HeaderMetadata + + AuxStore + + Send + + Sync + + 'static, Client::Api: BlockBuilderApi + BabeApi + ApiExt, SelectChain: sp_consensus::SelectChain + 'static, CAW: CanAuthorWith + Send + Sync + 'static, @@ -1651,11 +1684,5 @@ pub fn import_queue( client, }; - Ok(BasicQueue::new( - verifier, - Box::new(block_import), - justification_import, - spawner, - registry, - )) + Ok(BasicQueue::new(verifier, Box::new(block_import), justification_import, spawner, registry)) } diff --git a/client/consensus/babe/src/migration.rs b/client/consensus/babe/src/migration.rs index fec73667da48d..a248c9da24db8 100644 --- a/client/consensus/babe/src/migration.rs +++ b/client/consensus/babe/src/migration.rs @@ -16,12 +16,12 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use codec::{Encode, Decode}; -use sc_consensus_epochs::Epoch as EpochT; use crate::{ - Epoch, AuthorityId, BabeAuthorityWeight, BabeGenesisConfiguration, - BabeEpochConfiguration, VRF_OUTPUT_LENGTH, NextEpochDescriptor, + AuthorityId, BabeAuthorityWeight, BabeEpochConfiguration, BabeGenesisConfiguration, Epoch, + NextEpochDescriptor, VRF_OUTPUT_LENGTH, }; +use codec::{Decode, Encode}; +use sc_consensus_epochs::Epoch as EpochT; use sp_consensus_slots::Slot; /// BABE epoch information, version 0. @@ -43,10 +43,7 @@ impl EpochT for EpochV0 { type NextEpochDescriptor = NextEpochDescriptor; type Slot = Slot; - fn increment( - &self, - descriptor: NextEpochDescriptor - ) -> EpochV0 { + fn increment(&self, descriptor: NextEpochDescriptor) -> EpochV0 { EpochV0 { epoch_index: self.epoch_index + 1, start_slot: self.start_slot + self.duration, @@ -74,10 +71,7 @@ impl EpochV0 { duration: self.duration, authorities: self.authorities, randomness: self.randomness, - config: BabeEpochConfiguration { - c: config.c, - allowed_slots: config.allowed_slots, - }, + config: BabeEpochConfiguration { c: config.c, allowed_slots: config.allowed_slots }, } } } diff --git a/client/consensus/babe/src/tests.rs b/client/consensus/babe/src/tests.rs index 3392ffade98ee..18c016bbf1035 100644 --- a/client/consensus/babe/src/tests.rs +++ b/client/consensus/babe/src/tests.rs @@ -23,35 +23,33 @@ #![allow(deprecated)] use super::*; use authorship::claim_slot; -use sp_core::crypto::Pair; -use sp_keystore::{ - SyncCryptoStore, - vrf::make_transcript as transcript_from_data, -}; -use sp_consensus_babe::{ - AuthorityPair, Slot, AllowedSlots, make_transcript, make_transcript_data, - inherents::InherentDataProvider, -}; -use sc_consensus_slots::BackoffAuthoringOnFinalizedHeadLagging; +use futures::executor::block_on; +use log::debug; +use rand::RngCore; +use rand_chacha::{rand_core::SeedableRng, ChaChaRng}; use sc_block_builder::{BlockBuilder, BlockBuilderProvider}; +use sc_client_api::{backend::TransactionFor, BlockchainEvents}; +use sc_consensus_slots::BackoffAuthoringOnFinalizedHeadLagging; +use sc_keystore::LocalKeystore; +use sc_network::config::ProtocolConfig; +use sc_network_test::{Block as TestBlock, *}; +use sp_application_crypto::key_types::BABE; use sp_consensus::{ - NoNetwork as DummyOracle, Proposal, DisableProofRecording, AlwaysCanAuthor, import_queue::{BoxBlockImport, BoxJustificationImport}, + AlwaysCanAuthor, DisableProofRecording, NoNetwork as DummyOracle, Proposal, }; -use sc_network_test::{Block as TestBlock, *}; -use sc_network::config::ProtocolConfig; -use sp_runtime::{generic::DigestItem, traits::{Block as BlockT, DigestFor}}; -use sc_client_api::{BlockchainEvents, backend::TransactionFor}; -use log::debug; -use std::{time::Duration, cell::RefCell, task::Poll}; -use rand::RngCore; -use rand_chacha::{ - rand_core::SeedableRng, ChaChaRng, +use sp_consensus_babe::{ + inherents::InherentDataProvider, make_transcript, make_transcript_data, AllowedSlots, + AuthorityPair, Slot, +}; +use sp_core::crypto::Pair; +use sp_keystore::{vrf::make_transcript as transcript_from_data, SyncCryptoStore}; +use sp_runtime::{ + generic::DigestItem, + traits::{Block as BlockT, DigestFor}, }; -use sc_keystore::LocalKeystore; -use sp_application_crypto::key_types::BABE; -use futures::executor::block_on; use sp_timestamp::InherentDataProvider as TimestampInherentDataProvider; +use std::{cell::RefCell, task::Poll, time::Duration}; type Item = DigestItem; @@ -95,10 +93,7 @@ impl Environment for DummyFactory { type Proposer = DummyProposer; type Error = Error; - fn init(&mut self, parent_header: &::Header) - -> Self::CreateProposer - { - + fn init(&mut self, parent_header: &::Header) -> Self::CreateProposer { let parent_slot = crate::find_pre_digest::(parent_header) .expect("parent header has a pre-digest") .slot(); @@ -113,23 +108,24 @@ impl Environment for DummyFactory { } impl DummyProposer { - fn propose_with(&mut self, pre_digests: DigestFor) - -> future::Ready< - Result< - Proposal< - TestBlock, - sc_client_api::TransactionFor, - () - >, - Error - > - > - { - let block_builder = self.factory.client.new_block_at( - &BlockId::Hash(self.parent_hash), - pre_digests, - false, - ).unwrap(); + fn propose_with( + &mut self, + pre_digests: DigestFor, + ) -> future::Ready< + Result< + Proposal< + TestBlock, + sc_client_api::TransactionFor, + (), + >, + Error, + >, + > { + let block_builder = self + .factory + .client + .new_block_at(&BlockId::Hash(self.parent_hash), pre_digests, false) + .unwrap(); let mut block = match block_builder.build().map_err(|e| e.into()) { Ok(b) => b.block, @@ -143,13 +139,14 @@ impl DummyProposer { // figure out if we should add a consensus digest, since the test runtime // doesn't. let epoch_changes = self.factory.epoch_changes.shared_data(); - let epoch = epoch_changes.epoch_data_for_child_of( - descendent_query(&*self.factory.client), - &self.parent_hash, - self.parent_number, - this_slot, - |slot| Epoch::genesis(&self.factory.config, slot), - ) + let epoch = epoch_changes + .epoch_data_for_child_of( + descendent_query(&*self.factory.client), + &self.parent_hash, + self.parent_number, + this_slot, + |slot| Epoch::genesis(&self.factory.config, slot), + ) .expect("client has data to find epoch") .expect("can compute epoch for baked block"); @@ -162,7 +159,8 @@ impl DummyProposer { let digest_data = ConsensusLog::NextEpochData(NextEpochDescriptor { authorities: epoch.authorities.clone(), randomness: epoch.randomness.clone(), - }).encode(); + }) + .encode(); let digest = DigestItem::Consensus(BABE_ENGINE_ID, digest_data); block.header.digest_mut().push(digest) } @@ -176,7 +174,8 @@ impl DummyProposer { impl Proposer for DummyProposer { type Error = Error; - type Transaction = sc_client_api::TransactionFor; + type Transaction = + sc_client_api::TransactionFor; type Proposal = future::Ready, Error>>; type ProofRecording = DisableProofRecording; type Proof = (); @@ -201,9 +200,9 @@ pub struct PanickingBlockImport(B); #[async_trait::async_trait] impl> BlockImport for PanickingBlockImport - where - B::Transaction: Send, - B: Send, +where + B::Transaction: Send, + B: Send, { type Error = B::Error; type Transaction = B::Transaction; @@ -233,10 +232,8 @@ pub struct BabeTestNet { type TestHeader = ::Header; type TestExtrinsic = ::Extrinsic; -type TestSelectChain = substrate_test_runtime_client::LongestChain< - substrate_test_runtime_client::Backend, - TestBlock, ->; +type TestSelectChain = + substrate_test_runtime_client::LongestChain; pub struct TestVerifier { inner: BabeVerifier< @@ -244,11 +241,13 @@ pub struct TestVerifier { PeersFullClient, TestSelectChain, AlwaysCanAuthor, - Box> + Box< + dyn CreateInherentDataProviders< + TestBlock, + (), + InherentDataProviders = (TimestampInherentDataProvider, InherentDataProvider), + >, + >, >, mutator: Mutator, } @@ -274,7 +273,12 @@ impl Verifier for TestVerifier { pub struct PeerData { link: BabeLink, block_import: Mutex< - Option>> + Option< + BoxBlockImport< + TestBlock, + TransactionFor, + >, + >, >, } @@ -286,32 +290,27 @@ impl TestNetFactory for BabeTestNet { /// Create new test network with peers and given config. fn from_config(_config: &ProtocolConfig) -> Self { debug!(target: "babe", "Creating test network from config"); - BabeTestNet { - peers: Vec::new(), - } + BabeTestNet { peers: Vec::new() } } - fn make_block_import(&self, client: PeersClient) - -> ( - BlockImportAdapter, - Option>, - Option, - ) - { + fn make_block_import( + &self, + client: PeersClient, + ) -> ( + BlockImportAdapter, + Option>, + Option, + ) { let client = client.as_full().expect("only full clients are tested"); let config = Config::get_or_compute(&*client).expect("config available"); - let (block_import, link) = crate::block_import( - config, - client.clone(), - client.clone(), - ).expect("can initialize block-import"); + let (block_import, link) = crate::block_import(config, client.clone(), client.clone()) + .expect("can initialize block-import"); let block_import = PanickingBlockImport(block_import); - let data_block_import = Mutex::new( - Some(Box::new(block_import.clone()) as BoxBlockImport<_, _>) - ); + let data_block_import = + Mutex::new(Some(Box::new(block_import.clone()) as BoxBlockImport<_, _>)); ( BlockImportAdapter::new(block_import), None, @@ -324,16 +323,16 @@ impl TestNetFactory for BabeTestNet { client: PeersClient, _cfg: &ProtocolConfig, maybe_link: &Option, - ) - -> Self::Verifier - { + ) -> Self::Verifier { use substrate_test_runtime_client::DefaultTestClientBuilderExt; let client = client.as_full().expect("only full clients are used in test"); trace!(target: "babe", "Creating a verifier"); // ensure block import and verifier are linked correctly. - let data = maybe_link.as_ref().expect("babe link always provided to verifier instantiation"); + let data = maybe_link + .as_ref() + .expect("babe link always provided to verifier instantiation"); let (_, longest_chain) = TestClientBuilder::new().build_with_longest_chain(); @@ -369,10 +368,7 @@ impl TestNetFactory for BabeTestNet { &self.peers } - fn mut_peers)>( - &mut self, - closure: F, - ) { + fn mut_peers)>(&mut self, closure: F) { closure(&mut self.peers); } } @@ -382,9 +378,7 @@ impl TestNetFactory for BabeTestNet { fn rejects_empty_block() { sp_tracing::try_init_simple(); let mut net = BabeTestNet::new(3); - let block_builder = |builder: BlockBuilder<_, _, _>| { - builder.build().unwrap().block - }; + let block_builder = |builder: BlockBuilder<_, _, _>| builder.build().unwrap().block; net.mut_peers(|peer| { peer[0].generate_blocks(1, BlockOrigin::NetworkInitialSync, block_builder); }) @@ -397,11 +391,7 @@ fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + 'static MUTATOR.with(|m| *m.borrow_mut() = mutator.clone()); let net = BabeTestNet::new(3); - let peers = &[ - (0, "//Alice"), - (1, "//Bob"), - (2, "//Charlie"), - ]; + let peers = &[(0, "//Alice"), (1, "//Bob"), (2, "//Charlie")]; let net = Arc::new(Mutex::new(net)); let mut import_notifications = Vec::new(); @@ -415,9 +405,10 @@ fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + 'static let select_chain = peer.select_chain().expect("Full client has select_chain"); let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); - SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some(seed)).expect("Generates authority key"); + let keystore: SyncCryptoStorePtr = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); + SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some(seed)) + .expect("Generates authority key"); keystore_paths.push(keystore_path); let mut got_own = false; @@ -435,47 +426,54 @@ fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + 'static import_notifications.push( // run each future until we get one of our own blocks with number higher than 5 // that was produced locally. - client.import_notification_stream() - .take_while(move |n| future::ready(n.header.number() < &5 || { - if n.origin == BlockOrigin::Own { - got_own = true; - } else { - got_other = true; - } - - // continue until we have at least one block of our own - // and one of another peer. - !(got_own && got_other) - })) - .for_each(|_| future::ready(()) ) + client + .import_notification_stream() + .take_while(move |n| { + future::ready( + n.header.number() < &5 || { + if n.origin == BlockOrigin::Own { + got_own = true; + } else { + got_other = true; + } + + // continue until we have at least one block of our own + // and one of another peer. + !(got_own && got_other) + }, + ) + }) + .for_each(|_| future::ready(())), ); + babe_futures.push( + start_babe(BabeParams { + block_import: data.block_import.lock().take().expect("import set up during init"), + select_chain, + client, + env: environ, + sync_oracle: DummyOracle, + create_inherent_data_providers: Box::new(|_, _| async { + let timestamp = TimestampInherentDataProvider::from_system_time(); + let slot = InherentDataProvider::from_timestamp_and_duration( + *timestamp, + Duration::from_secs(6), + ); - babe_futures.push(start_babe(BabeParams { - block_import: data.block_import.lock().take().expect("import set up during init"), - select_chain, - client, - env: environ, - sync_oracle: DummyOracle, - create_inherent_data_providers: Box::new(|_, _| async { - let timestamp = TimestampInherentDataProvider::from_system_time(); - let slot = InherentDataProvider::from_timestamp_and_duration( - *timestamp, - Duration::from_secs(6), - ); - - Ok((timestamp, slot)) - }), - force_authoring: false, - backoff_authoring_blocks: Some(BackoffAuthoringOnFinalizedHeadLagging::default()), - babe_link: data.link.clone(), - keystore, - can_author_with: sp_consensus::AlwaysCanAuthor, - justification_sync_link: (), - block_proposal_slot_portion: SlotProportion::new(0.5), - max_block_proposal_slot_portion: None, - telemetry: None, - }).expect("Starts babe")); + Ok((timestamp, slot)) + }), + force_authoring: false, + backoff_authoring_blocks: Some(BackoffAuthoringOnFinalizedHeadLagging::default()), + babe_link: data.link.clone(), + keystore, + can_author_with: sp_consensus::AlwaysCanAuthor, + justification_sync_link: (), + block_proposal_slot_portion: SlotProportion::new(0.5), + max_block_proposal_slot_portion: None, + telemetry: None, + }) + .expect("Starts babe"), + ); } block_on(future::select( futures::future::poll_fn(move |cx| { @@ -489,7 +487,7 @@ fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + 'static Poll::<()>::Pending }), - future::select(future::join_all(import_notifications), future::join_all(babe_futures)) + future::select(future::join_all(import_notifications), future::join_all(babe_futures)), )); } @@ -503,7 +501,8 @@ fn authoring_blocks() { fn rejects_missing_inherent_digest() { run_one_test(|header: &mut TestHeader, stage| { let v = std::mem::take(&mut header.digest_mut().logs); - header.digest_mut().logs = v.into_iter() + header.digest_mut().logs = v + .into_iter() .filter(|v| stage == Stage::PostSeal || v.as_babe_pre_digest().is_none()) .collect() }) @@ -514,7 +513,8 @@ fn rejects_missing_inherent_digest() { fn rejects_missing_seals() { run_one_test(|header: &mut TestHeader, stage| { let v = std::mem::take(&mut header.digest_mut().logs); - header.digest_mut().logs = v.into_iter() + header.digest_mut().logs = v + .into_iter() .filter(|v| stage == Stage::PreSeal || v.as_babe_seal().is_none()) .collect() }) @@ -525,7 +525,8 @@ fn rejects_missing_seals() { fn rejects_missing_consensus_digests() { run_one_test(|header: &mut TestHeader, stage| { let v = std::mem::take(&mut header.digest_mut().logs); - header.digest_mut().logs = v.into_iter() + header.digest_mut().logs = v + .into_iter() .filter(|v| stage == Stage::PostSeal || v.as_next_epoch_descriptor().is_none()) .collect() }); @@ -560,8 +561,8 @@ fn sig_is_not_pre_digest() { fn can_author_block() { sp_tracing::try_init_simple(); let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); + let keystore: SyncCryptoStorePtr = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); let public = SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some("//Alice")) .expect("Generates authority pair"); @@ -601,8 +602,8 @@ fn can_author_block() { None => i += 1, Some(s) => { debug!(target: "babe", "Authored block {:?}", s.0); - break; - } + break + }, } } } @@ -622,26 +623,27 @@ fn propose_and_import_block( }); let pre_digest = sp_runtime::generic::Digest { - logs: vec![ - Item::babe_pre_digest( - PreDigest::SecondaryPlain(SecondaryPlainPreDigest { - authority_index: 0, - slot, - }), - ), - ], + logs: vec![Item::babe_pre_digest(PreDigest::SecondaryPlain(SecondaryPlainPreDigest { + authority_index: 0, + slot, + }))], }; let parent_hash = parent.hash(); let mut block = futures::executor::block_on(proposer.propose_with(pre_digest)).unwrap().block; - let epoch_descriptor = proposer_factory.epoch_changes.shared_data().epoch_descriptor_for_child_of( - descendent_query(&*proposer_factory.client), - &parent_hash, - *parent.number(), - slot, - ).unwrap().unwrap(); + let epoch_descriptor = proposer_factory + .epoch_changes + .shared_data() + .epoch_descriptor_for_child_of( + descendent_query(&*proposer_factory.client), + &parent_hash, + *parent.number(), + slot, + ) + .unwrap() + .unwrap(); let seal = { // sign the pre-sealed hash of the block and then @@ -706,13 +708,12 @@ fn importing_block_one_sets_genesis_epoch() { let genesis_epoch = Epoch::genesis(&data.link.config, 999.into()); let epoch_changes = data.link.epoch_changes.shared_data(); - let epoch_for_second_block = epoch_changes.epoch_data_for_child_of( - descendent_query(&*client), - &block_hash, - 1, - 1000.into(), - |slot| Epoch::genesis(&data.link.config, slot), - ).unwrap().unwrap(); + let epoch_for_second_block = epoch_changes + .epoch_data_for_child_of(descendent_query(&*client), &block_hash, 1, 1000.into(), |slot| { + Epoch::genesis(&data.link.config, slot) + }) + .unwrap() + .unwrap(); assert_eq!(epoch_for_second_block, genesis_epoch); } @@ -779,16 +780,10 @@ fn importing_epoch_change_block_prunes_tree() { let fork_3 = propose_and_import_blocks(BlockId::Hash(canon_hashes[18]), 10); // We should be tracking a total of 9 epochs in the fork tree - assert_eq!( - epoch_changes.shared_data().tree().iter().count(), - 9, - ); + assert_eq!(epoch_changes.shared_data().tree().iter().count(), 9,); // And only one root - assert_eq!( - epoch_changes.shared_data().tree().roots().count(), - 1, - ); + assert_eq!(epoch_changes.shared_data().tree().roots().count(), 1,); // We finalize block #13 from the canon chain, so on the next epoch // change the tree should be pruned, to not contain F (#7). @@ -796,32 +791,47 @@ fn importing_epoch_change_block_prunes_tree() { propose_and_import_blocks(BlockId::Hash(client.chain_info().best_hash), 7); // at this point no hashes from the first fork must exist on the tree - assert!( - !epoch_changes.shared_data().tree().iter().map(|(h, _, _)| h).any(|h| fork_1.contains(h)), - ); + assert!(!epoch_changes + .shared_data() + .tree() + .iter() + .map(|(h, _, _)| h) + .any(|h| fork_1.contains(h)),); // but the epoch changes from the other forks must still exist - assert!( - epoch_changes.shared_data().tree().iter().map(|(h, _, _)| h).any(|h| fork_2.contains(h)) - ); - - assert!( - epoch_changes.shared_data().tree().iter().map(|(h, _, _)| h).any(|h| fork_3.contains(h)), - ); + assert!(epoch_changes + .shared_data() + .tree() + .iter() + .map(|(h, _, _)| h) + .any(|h| fork_2.contains(h))); + + assert!(epoch_changes + .shared_data() + .tree() + .iter() + .map(|(h, _, _)| h) + .any(|h| fork_3.contains(h)),); // finalizing block #25 from the canon chain should prune out the second fork client.finalize_block(BlockId::Hash(canon_hashes[24]), None, false).unwrap(); propose_and_import_blocks(BlockId::Hash(client.chain_info().best_hash), 8); // at this point no hashes from the second fork must exist on the tree - assert!( - !epoch_changes.shared_data().tree().iter().map(|(h, _, _)| h).any(|h| fork_2.contains(h)), - ); + assert!(!epoch_changes + .shared_data() + .tree() + .iter() + .map(|(h, _, _)| h) + .any(|h| fork_2.contains(h)),); // while epoch changes from the last fork should still exist - assert!( - epoch_changes.shared_data().tree().iter().map(|(h, _, _)| h).any(|h| fork_3.contains(h)), - ); + assert!(epoch_changes + .shared_data() + .tree() + .iter() + .map(|(h, _, _)| h) + .any(|h| fork_3.contains(h)),); } #[test] @@ -856,20 +866,15 @@ fn verify_slots_are_strictly_increasing() { // we should fail to import this block since the slot number didn't increase. // we will panic due to the `PanickingBlockImport` defined above. - propose_and_import_block( - &b1, - Some(999.into()), - &mut proposer_factory, - &mut block_import, - ); + propose_and_import_block(&b1, Some(999.into()), &mut proposer_factory, &mut block_import); } #[test] fn babe_transcript_generation_match() { sp_tracing::try_init_simple(); let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore: SyncCryptoStorePtr = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); + let keystore: SyncCryptoStorePtr = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); let public = SyncCryptoStore::sr25519_generate_new(&*keystore, BABE, Some("//Alice")) .expect("Generates authority pair"); @@ -890,9 +895,7 @@ fn babe_transcript_generation_match() { let test = |t: merlin::Transcript| -> [u8; 16] { let mut b = [0u8; 16]; - t.build_rng() - .finalize(&mut ChaChaRng::from_seed([0u8;32])) - .fill_bytes(&mut b); + t.build_rng().finalize(&mut ChaChaRng::from_seed([0u8; 32])).fill_bytes(&mut b); b }; debug_assert!(test(orig_transcript) == test(transcript_from_data(new_transcript))); diff --git a/client/consensus/babe/src/verification.rs b/client/consensus/babe/src/verification.rs index 469286f5110d7..d8883d17ba5a5 100644 --- a/client/consensus/babe/src/verification.rs +++ b/client/consensus/babe/src/verification.rs @@ -17,18 +17,22 @@ // along with this program. If not, see . //! Verification for BABE headers. -use sp_runtime::{traits::Header, traits::DigestItemFor}; -use sp_core::{Pair, Public}; -use sp_consensus_babe::{make_transcript, AuthoritySignature, AuthorityPair, AuthorityId}; -use sp_consensus_babe::digests::{ - PreDigest, PrimaryPreDigest, SecondaryPlainPreDigest, SecondaryVRFPreDigest, - CompatibleDigestItem +use super::{ + authorship::{calculate_primary_threshold, check_primary_threshold, secondary_slot_author}, + babe_err, find_pre_digest, BlockT, Epoch, Error, }; +use log::{debug, trace}; use sc_consensus_slots::CheckedHeader; +use sp_consensus_babe::{ + digests::{ + CompatibleDigestItem, PreDigest, PrimaryPreDigest, SecondaryPlainPreDigest, + SecondaryVRFPreDigest, + }, + make_transcript, AuthorityId, AuthorityPair, AuthoritySignature, +}; use sp_consensus_slots::Slot; -use log::{debug, trace}; -use super::{find_pre_digest, babe_err, Epoch, BlockT, Error}; -use super::authorship::{calculate_primary_threshold, check_primary_threshold, secondary_slot_author}; +use sp_core::{Pair, Public}; +use sp_runtime::traits::{DigestItemFor, Header}; /// BABE verification parameters pub(super) struct VerificationParams<'a, B: 'a + BlockT> { @@ -57,26 +61,24 @@ pub(super) struct VerificationParams<'a, B: 'a + BlockT> { /// with each having different validation logic. pub(super) fn check_header( params: VerificationParams, -) -> Result>, Error> where +) -> Result>, Error> +where DigestItemFor: CompatibleDigestItem, { - let VerificationParams { - mut header, - pre_digest, - slot_now, - epoch, - } = params; + let VerificationParams { mut header, pre_digest, slot_now, epoch } = params; let authorities = &epoch.authorities; let pre_digest = pre_digest.map(Ok).unwrap_or_else(|| find_pre_digest::(&header))?; trace!(target: "babe", "Checking header"); - let seal = header.digest_mut().pop() + let seal = header + .digest_mut() + .pop() .ok_or_else(|| babe_err(Error::HeaderUnsealed(header.hash())))?; - let sig = seal.as_babe_seal().ok_or_else(|| { - babe_err(Error::HeaderBadSeal(header.hash())) - })?; + let sig = seal + .as_babe_seal() + .ok_or_else(|| babe_err(Error::HeaderBadSeal(header.hash())))?; // the pre-hash of the header doesn't include the seal // and that's what we sign @@ -84,7 +86,7 @@ pub(super) fn check_header( if pre_digest.slot() > slot_now { header.digest_mut().push(seal); - return Ok(CheckedHeader::Deferred(header, pre_digest.slot())); + return Ok(CheckedHeader::Deferred(header, pre_digest.slot())) } let author = match authorities.get(pre_digest.authority_index() as usize) { @@ -100,45 +102,33 @@ pub(super) fn check_header( primary.slot, ); - check_primary_header::( - pre_hash, - primary, - sig, - &epoch, - epoch.config.c, - )?; + check_primary_header::(pre_hash, primary, sig, &epoch, epoch.config.c)?; }, - PreDigest::SecondaryPlain(secondary) if epoch.config.allowed_slots.is_secondary_plain_slots_allowed() => { + PreDigest::SecondaryPlain(secondary) + if epoch.config.allowed_slots.is_secondary_plain_slots_allowed() => + { debug!(target: "babe", "Verifying secondary plain block #{} at slot: {}", header.number(), secondary.slot, ); - check_secondary_plain_header::( - pre_hash, - secondary, - sig, - &epoch, - )?; - }, - PreDigest::SecondaryVRF(secondary) if epoch.config.allowed_slots.is_secondary_vrf_slots_allowed() => { + check_secondary_plain_header::(pre_hash, secondary, sig, &epoch)?; + } + PreDigest::SecondaryVRF(secondary) + if epoch.config.allowed_slots.is_secondary_vrf_slots_allowed() => + { debug!(target: "babe", "Verifying secondary VRF block #{} at slot: {}", header.number(), secondary.slot, ); - check_secondary_vrf_header::( - pre_hash, - secondary, - sig, - &epoch, - )?; - }, - _ => { - return Err(babe_err(Error::SecondarySlotAssignmentsDisabled)); + check_secondary_vrf_header::(pre_hash, secondary, sig, &epoch)?; } + _ => { + return Err(babe_err(Error::SecondarySlotAssignmentsDisabled)) + }, } let info = VerifiedHeaderInfo { @@ -170,27 +160,20 @@ fn check_primary_header( if AuthorityPair::verify(&signature, pre_hash, &author) { let (inout, _) = { - let transcript = make_transcript( - &epoch.randomness, - pre_digest.slot, - epoch.epoch_index, - ); + let transcript = make_transcript(&epoch.randomness, pre_digest.slot, epoch.epoch_index); - schnorrkel::PublicKey::from_bytes(author.as_slice()).and_then(|p| { - p.vrf_verify(transcript, &pre_digest.vrf_output, &pre_digest.vrf_proof) - }).map_err(|s| { - babe_err(Error::VRFVerificationFailed(s)) - })? + schnorrkel::PublicKey::from_bytes(author.as_slice()) + .and_then(|p| { + p.vrf_verify(transcript, &pre_digest.vrf_output, &pre_digest.vrf_proof) + }) + .map_err(|s| babe_err(Error::VRFVerificationFailed(s)))? }; - let threshold = calculate_primary_threshold( - c, - &epoch.authorities, - pre_digest.authority_index as usize, - ); + let threshold = + calculate_primary_threshold(c, &epoch.authorities, pre_digest.authority_index as usize); if !check_primary_threshold(&inout, threshold) { - return Err(babe_err(Error::VRFVerificationOfBlockFailed(author.clone(), threshold))); + return Err(babe_err(Error::VRFVerificationOfBlockFailed(author.clone(), threshold))) } Ok(()) @@ -211,16 +194,14 @@ fn check_secondary_plain_header( ) -> Result<(), Error> { // check the signature is valid under the expected authority and // chain state. - let expected_author = secondary_slot_author( - pre_digest.slot, - &epoch.authorities, - epoch.randomness, - ).ok_or_else(|| Error::NoSecondaryAuthorExpected)?; + let expected_author = + secondary_slot_author(pre_digest.slot, &epoch.authorities, epoch.randomness) + .ok_or_else(|| Error::NoSecondaryAuthorExpected)?; let author = &epoch.authorities[pre_digest.authority_index as usize].0; if expected_author != author { - return Err(Error::InvalidAuthor(expected_author.clone(), author.clone())); + return Err(Error::InvalidAuthor(expected_author.clone(), author.clone())) } if AuthorityPair::verify(&signature, pre_hash.as_ref(), author) { @@ -239,30 +220,22 @@ fn check_secondary_vrf_header( ) -> Result<(), Error> { // check the signature is valid under the expected authority and // chain state. - let expected_author = secondary_slot_author( - pre_digest.slot, - &epoch.authorities, - epoch.randomness, - ).ok_or_else(|| Error::NoSecondaryAuthorExpected)?; + let expected_author = + secondary_slot_author(pre_digest.slot, &epoch.authorities, epoch.randomness) + .ok_or_else(|| Error::NoSecondaryAuthorExpected)?; let author = &epoch.authorities[pre_digest.authority_index as usize].0; if expected_author != author { - return Err(Error::InvalidAuthor(expected_author.clone(), author.clone())); + return Err(Error::InvalidAuthor(expected_author.clone(), author.clone())) } if AuthorityPair::verify(&signature, pre_hash.as_ref(), author) { - let transcript = make_transcript( - &epoch.randomness, - pre_digest.slot, - epoch.epoch_index, - ); - - schnorrkel::PublicKey::from_bytes(author.as_slice()).and_then(|p| { - p.vrf_verify(transcript, &pre_digest.vrf_output, &pre_digest.vrf_proof) - }).map_err(|s| { - babe_err(Error::VRFVerificationFailed(s)) - })?; + let transcript = make_transcript(&epoch.randomness, pre_digest.slot, epoch.epoch_index); + + schnorrkel::PublicKey::from_bytes(author.as_slice()) + .and_then(|p| p.vrf_verify(transcript, &pre_digest.vrf_output, &pre_digest.vrf_proof)) + .map_err(|s| babe_err(Error::VRFVerificationFailed(s)))?; Ok(()) } else { diff --git a/client/consensus/common/src/longest_chain.rs b/client/consensus/common/src/longest_chain.rs index e1fbb600fa44f..b1f7f94f9eb28 100644 --- a/client/consensus/common/src/longest_chain.rs +++ b/client/consensus/common/src/longest_chain.rs @@ -18,30 +18,26 @@ //! Longest chain implementation -use std::sync::Arc; -use std::marker::PhantomData; use sc_client_api::backend; -use sp_consensus::{SelectChain, Error as ConsensusError}; use sp_blockchain::{Backend, HeaderBackend}; +use sp_consensus::{Error as ConsensusError, SelectChain}; use sp_runtime::{ - traits::{NumberFor, Block as BlockT}, generic::BlockId, + traits::{Block as BlockT, NumberFor}, }; +use std::{marker::PhantomData, sync::Arc}; /// Implement Longest Chain Select implementation /// where 'longest' is defined as the highest number of blocks pub struct LongestChain { backend: Arc, - _phantom: PhantomData + _phantom: PhantomData, } impl Clone for LongestChain { fn clone(&self) -> Self { let backend = self.backend.clone(); - LongestChain { - backend, - _phantom: Default::default() - } + LongestChain { backend, _phantom: Default::default() } } } @@ -52,21 +48,22 @@ where { /// Instantiate a new LongestChain for Backend B pub fn new(backend: Arc) -> Self { - LongestChain { - backend, - _phantom: Default::default(), - } + LongestChain { backend, _phantom: Default::default() } } fn best_block_header(&self) -> sp_blockchain::Result<::Header> { let info = self.backend.blockchain().info(); let import_lock = self.backend.get_import_lock(); - let best_hash = self.backend + let best_hash = self + .backend .blockchain() .best_containing(info.best_hash, None, import_lock)? .unwrap_or(info.best_hash); - Ok(self.backend.blockchain().header(BlockId::Hash(best_hash))? + Ok(self + .backend + .blockchain() + .header(BlockId::Hash(best_hash))? .expect("given block hash was fetched from block in db; qed")) } diff --git a/client/consensus/common/src/shared_data.rs b/client/consensus/common/src/shared_data.rs index 8132a42a4b929..a97e8e317f23d 100644 --- a/client/consensus/common/src/shared_data.rs +++ b/client/consensus/common/src/shared_data.rs @@ -18,8 +18,8 @@ //! Provides a generic wrapper around shared data. See [`SharedData`] for more information. +use parking_lot::{Condvar, MappedMutexGuard, Mutex, MutexGuard}; use std::sync::Arc; -use parking_lot::{Mutex, MappedMutexGuard, Condvar, MutexGuard}; /// Created by [`SharedDataLocked::release_mutex`]. /// @@ -75,8 +75,7 @@ impl<'a, T> SharedDataLocked<'a, T> { /// Release the mutex, but keep the shared data locked. pub fn release_mutex(mut self) -> SharedDataLockedUpgradable { SharedDataLockedUpgradable { - shared_data: self.shared_data.take() - .expect("`shared_data` is only taken on drop; qed"), + shared_data: self.shared_data.take().expect("`shared_data` is only taken on drop; qed"), } } } @@ -174,10 +173,7 @@ pub struct SharedData { impl Clone for SharedData { fn clone(&self) -> Self { - Self { - inner: self.inner.clone(), - cond_var: self.cond_var.clone(), - } + Self { inner: self.inner.clone(), cond_var: self.cond_var.clone() } } } @@ -228,10 +224,7 @@ impl SharedData { debug_assert!(!guard.locked); guard.locked = true; - SharedDataLocked { - inner: guard, - shared_data: Some(self.clone()), - } + SharedDataLocked { inner: guard, shared_data: Some(self.clone()) } } } diff --git a/client/consensus/epochs/src/lib.rs b/client/consensus/epochs/src/lib.rs index 98a3e83530510..9411bea1adc5b 100644 --- a/client/consensus/epochs/src/lib.rs +++ b/client/consensus/epochs/src/lib.rs @@ -20,12 +20,16 @@ pub mod migration; -use std::{ops::Add, collections::BTreeMap, borrow::{Borrow, BorrowMut}}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use fork_tree::ForkTree; use sc_client_api::utils::is_descendent_of; -use sp_blockchain::{HeaderMetadata, HeaderBackend, Error as ClientError}; +use sp_blockchain::{Error as ClientError, HeaderBackend, HeaderMetadata}; use sp_runtime::traits::{Block as BlockT, NumberFor, One, Zero}; +use std::{ + borrow::{Borrow, BorrowMut}, + collections::BTreeMap, + ops::Add, +}; /// A builder for `is_descendent_of` functions. pub trait IsDescendentOfBuilder { @@ -41,8 +45,7 @@ pub trait IsDescendentOfBuilder { /// details aren't yet stored, but its parent is. /// /// The format of `current` when `Some` is `(current, current_parent)`. - fn build_is_descendent_of(&self, current: Option<(Hash, Hash)>) - -> Self::IsDescendentOf; + fn build_is_descendent_of(&self, current: Option<(Hash, Hash)>) -> Self::IsDescendentOf; } /// Produce a descendent query object given the client. @@ -55,16 +58,18 @@ pub fn descendent_query(client: &H) -> HeaderBackendDescendentBuilder< pub struct HeaderBackendDescendentBuilder(H, std::marker::PhantomData); impl<'a, H, Block> IsDescendentOfBuilder - for HeaderBackendDescendentBuilder<&'a H, Block> where - H: HeaderBackend + HeaderMetadata, + for HeaderBackendDescendentBuilder<&'a H, Block> +where + H: HeaderBackend + HeaderMetadata, Block: BlockT, { type Error = ClientError; type IsDescendentOf = Box Result + 'a>; - fn build_is_descendent_of(&self, current: Option<(Block::Hash, Block::Hash)>) - -> Self::IsDescendentOf - { + fn build_is_descendent_of( + &self, + current: Option<(Block::Hash, Block::Hash)>, + ) -> Self::IsDescendentOf { Box::new(is_descendent_of(self.0, current)) } } @@ -90,10 +95,7 @@ pub trait Epoch { impl<'a, E: Epoch> From<&'a E> for EpochHeader { fn from(epoch: &'a E) -> EpochHeader { - Self { - start_slot: epoch.start_slot(), - end_slot: epoch.end_slot(), - } + Self { start_slot: epoch.start_slot(), end_slot: epoch.end_slot() } } } @@ -109,10 +111,7 @@ pub struct EpochHeader { impl Clone for EpochHeader { fn clone(&self) -> Self { - Self { - start_slot: self.start_slot, - end_slot: self.end_slot, - } + Self { start_slot: self.start_slot, end_slot: self.end_slot } } } @@ -149,7 +148,8 @@ pub enum ViableEpoch { Signaled(ERef), } -impl AsRef for ViableEpoch where +impl AsRef for ViableEpoch +where ERef: Borrow, { fn as_ref(&self) -> &E { @@ -160,7 +160,8 @@ impl AsRef for ViableEpoch where } } -impl AsMut for ViableEpoch where +impl AsMut for ViableEpoch +where ERef: BorrowMut, { fn as_mut(&mut self) -> &mut E { @@ -171,7 +172,8 @@ impl AsMut for ViableEpoch where } } -impl ViableEpoch where +impl ViableEpoch +where E: Epoch + Clone, ERef: Borrow, { @@ -187,18 +189,14 @@ impl ViableEpoch where /// Get cloned value for the viable epoch. pub fn into_cloned(self) -> ViableEpoch { match self { - ViableEpoch::UnimportedGenesis(e) => - ViableEpoch::UnimportedGenesis(e), + ViableEpoch::UnimportedGenesis(e) => ViableEpoch::UnimportedGenesis(e), ViableEpoch::Signaled(e) => ViableEpoch::Signaled(e.borrow().clone()), } } /// Increment the epoch, yielding an `IncrementedEpoch` to be imported /// into the fork-tree. - pub fn increment( - &self, - next_descriptor: E::NextEpochDescriptor - ) -> IncrementedEpoch { + pub fn increment(&self, next_descriptor: E::NextEpochDescriptor) -> IncrementedEpoch { let next = self.as_ref().increment(next_descriptor); let to_persist = match *self { ViableEpoch::UnimportedGenesis(ref epoch_0) => @@ -216,7 +214,7 @@ pub enum ViableEpochDescriptor { /// The epoch is an unimported genesis, with given start slot number. UnimportedGenesis(E::Slot), /// The epoch is signaled and has been imported, with given identifier and header. - Signaled(EpochIdentifier, EpochHeader) + Signaled(EpochIdentifier, EpochHeader), } impl ViableEpochDescriptor { @@ -243,8 +241,7 @@ impl<'a, E: Epoch> From<&'a PersistedEpoch> for PersistedEpochHeader { match epoch { PersistedEpoch::Genesis(ref epoch_0, ref epoch_1) => PersistedEpochHeader::Genesis(epoch_0.into(), epoch_1.into()), - PersistedEpoch::Regular(ref epoch_n) => - PersistedEpochHeader::Regular(epoch_n.into()), + PersistedEpoch::Regular(ref epoch_n) => PersistedEpochHeader::Regular(epoch_n.into()), } } } @@ -312,7 +309,8 @@ fn fake_head_hash + AsMut<[u8]> + Clone>(parent_hash: &H) -> H { h } -impl Default for EpochChanges where +impl Default for EpochChanges +where Hash: PartialEq + Ord, Number: Ord, { @@ -321,9 +319,10 @@ impl Default for EpochChanges where } } -impl EpochChanges where +impl EpochChanges +where Hash: PartialEq + Ord + AsRef<[u8]> + AsMut<[u8]> + Copy, - Number: Ord + One + Zero + Add + Copy, + Number: Ord + One + Zero + Add + Copy, { /// Create a new epoch change. pub fn new() -> Self { @@ -337,51 +336,38 @@ impl EpochChanges where } /// Map the epoch changes from one storing data to a different one. - pub fn map(self, mut f: F) -> EpochChanges where - B: Epoch, + pub fn map(self, mut f: F) -> EpochChanges + where + B: Epoch, F: FnMut(&Hash, &Number, E) -> B, { EpochChanges { - inner: self.inner.map(&mut |_, _, header| { - match header { - PersistedEpochHeader::Genesis(epoch_0, epoch_1) => { - PersistedEpochHeader::Genesis( - EpochHeader { - start_slot: epoch_0.start_slot, - end_slot: epoch_0.end_slot, - }, - EpochHeader { - start_slot: epoch_1.start_slot, - end_slot: epoch_1.end_slot, - }, - ) - }, - PersistedEpochHeader::Regular(epoch_n) => { - PersistedEpochHeader::Regular( - EpochHeader { - start_slot: epoch_n.start_slot, - end_slot: epoch_n.end_slot, - }, - ) - }, - } + inner: self.inner.map(&mut |_, _, header| match header { + PersistedEpochHeader::Genesis(epoch_0, epoch_1) => PersistedEpochHeader::Genesis( + EpochHeader { start_slot: epoch_0.start_slot, end_slot: epoch_0.end_slot }, + EpochHeader { start_slot: epoch_1.start_slot, end_slot: epoch_1.end_slot }, + ), + PersistedEpochHeader::Regular(epoch_n) => + PersistedEpochHeader::Regular(EpochHeader { + start_slot: epoch_n.start_slot, + end_slot: epoch_n.end_slot, + }), }), - epochs: self.epochs.into_iter().map(|((hash, number), epoch)| { - let bepoch = match epoch { - PersistedEpoch::Genesis(epoch_0, epoch_1) => { - PersistedEpoch::Genesis( + epochs: self + .epochs + .into_iter() + .map(|((hash, number), epoch)| { + let bepoch = match epoch { + PersistedEpoch::Genesis(epoch_0, epoch_1) => PersistedEpoch::Genesis( f(&hash, &number, epoch_0), f(&hash, &number, epoch_1), - ) - }, - PersistedEpoch::Regular(epoch_n) => { - PersistedEpoch::Regular( - f(&hash, &number, epoch_n) - ) - }, - }; - ((hash, number), bepoch) - }).collect(), + ), + PersistedEpoch::Regular(epoch_n) => + PersistedEpoch::Regular(f(&hash, &number, epoch_n)), + }; + ((hash, number), bepoch) + }) + .collect(), } } @@ -395,25 +381,17 @@ impl EpochChanges where number: Number, slot: E::Slot, ) -> Result<(), fork_tree::Error> { - let is_descendent_of = descendent_of_builder - .build_is_descendent_of(None); + let is_descendent_of = descendent_of_builder.build_is_descendent_of(None); let predicate = |epoch: &PersistedEpochHeader| match *epoch { - PersistedEpochHeader::Genesis(_, ref epoch_1) => - slot >= epoch_1.end_slot, - PersistedEpochHeader::Regular(ref epoch_n) => - slot >= epoch_n.end_slot, + PersistedEpochHeader::Genesis(_, ref epoch_1) => slot >= epoch_1.end_slot, + PersistedEpochHeader::Regular(ref epoch_n) => slot >= epoch_n.end_slot, }; // prune any epochs which could not be _live_ as of the children of the // finalized block, i.e. re-root the fork tree to the oldest ancestor of // (hash, number) where epoch.end_slot() >= finalized_slot - let removed = self.inner.prune( - hash, - &number, - &is_descendent_of, - &predicate, - )?; + let removed = self.inner.prune(hash, &number, &is_descendent_of, &predicate)?; for (hash, number, _) in removed { self.epochs.remove(&(hash, number)); @@ -424,18 +402,18 @@ impl EpochChanges where /// Get a reference to an epoch with given identifier. pub fn epoch(&self, id: &EpochIdentifier) -> Option<&E> { - self.epochs.get(&(id.hash, id.number)) - .and_then(|v| { - match v { - PersistedEpoch::Genesis(ref epoch_0, _) - if id.position == EpochIdentifierPosition::Genesis0 => Some(epoch_0), - PersistedEpoch::Genesis(_, ref epoch_1) - if id.position == EpochIdentifierPosition::Genesis1 => Some(epoch_1), - PersistedEpoch::Regular(ref epoch_n) - if id.position == EpochIdentifierPosition::Regular => Some(epoch_n), - _ => None, - } - }) + self.epochs.get(&(id.hash, id.number)).and_then(|v| match v { + PersistedEpoch::Genesis(ref epoch_0, _) + if id.position == EpochIdentifierPosition::Genesis0 => + Some(epoch_0), + PersistedEpoch::Genesis(_, ref epoch_1) + if id.position == EpochIdentifierPosition::Genesis1 => + Some(epoch_1), + PersistedEpoch::Regular(ref epoch_n) + if id.position == EpochIdentifierPosition::Regular => + Some(epoch_n), + _ => None, + }) } /// Get a reference to a viable epoch with given descriptor. @@ -443,33 +421,32 @@ impl EpochChanges where &self, descriptor: &ViableEpochDescriptor, make_genesis: G, - ) -> Option> where - G: FnOnce(E::Slot) -> E + ) -> Option> + where + G: FnOnce(E::Slot) -> E, { match descriptor { - ViableEpochDescriptor::UnimportedGenesis(slot) => { - Some(ViableEpoch::UnimportedGenesis(make_genesis(*slot))) - }, - ViableEpochDescriptor::Signaled(identifier, _) => { - self.epoch(&identifier).map(ViableEpoch::Signaled) - }, + ViableEpochDescriptor::UnimportedGenesis(slot) => + Some(ViableEpoch::UnimportedGenesis(make_genesis(*slot))), + ViableEpochDescriptor::Signaled(identifier, _) => + self.epoch(&identifier).map(ViableEpoch::Signaled), } } /// Get a mutable reference to an epoch with given identifier. pub fn epoch_mut(&mut self, id: &EpochIdentifier) -> Option<&mut E> { - self.epochs.get_mut(&(id.hash, id.number)) - .and_then(|v| { - match v { - PersistedEpoch::Genesis(ref mut epoch_0, _) - if id.position == EpochIdentifierPosition::Genesis0 => Some(epoch_0), - PersistedEpoch::Genesis(_, ref mut epoch_1) - if id.position == EpochIdentifierPosition::Genesis1 => Some(epoch_1), - PersistedEpoch::Regular(ref mut epoch_n) - if id.position == EpochIdentifierPosition::Regular => Some(epoch_n), - _ => None, - } - }) + self.epochs.get_mut(&(id.hash, id.number)).and_then(|v| match v { + PersistedEpoch::Genesis(ref mut epoch_0, _) + if id.position == EpochIdentifierPosition::Genesis0 => + Some(epoch_0), + PersistedEpoch::Genesis(_, ref mut epoch_1) + if id.position == EpochIdentifierPosition::Genesis1 => + Some(epoch_1), + PersistedEpoch::Regular(ref mut epoch_n) + if id.position == EpochIdentifierPosition::Regular => + Some(epoch_n), + _ => None, + }) } /// Get a mutable reference to a viable epoch with given descriptor. @@ -477,16 +454,15 @@ impl EpochChanges where &mut self, descriptor: &ViableEpochDescriptor, make_genesis: G, - ) -> Option> where - G: FnOnce(E::Slot) -> E + ) -> Option> + where + G: FnOnce(E::Slot) -> E, { match descriptor { - ViableEpochDescriptor::UnimportedGenesis(slot) => { - Some(ViableEpoch::UnimportedGenesis(make_genesis(*slot))) - }, - ViableEpochDescriptor::Signaled(identifier, _) => { - self.epoch_mut(&identifier).map(ViableEpoch::Signaled) - }, + ViableEpochDescriptor::UnimportedGenesis(slot) => + Some(ViableEpoch::UnimportedGenesis(make_genesis(*slot))), + ViableEpochDescriptor::Signaled(identifier, _) => + self.epoch_mut(&identifier).map(ViableEpoch::Signaled), } } @@ -497,18 +473,15 @@ impl EpochChanges where pub fn epoch_data( &self, descriptor: &ViableEpochDescriptor, - make_genesis: G - ) -> Option where + make_genesis: G, + ) -> Option + where G: FnOnce(E::Slot) -> E, E: Clone, { match descriptor { - ViableEpochDescriptor::UnimportedGenesis(slot) => { - Some(make_genesis(*slot)) - }, - ViableEpochDescriptor::Signaled(identifier, _) => { - self.epoch(&identifier).cloned() - }, + ViableEpochDescriptor::UnimportedGenesis(slot) => Some(make_genesis(*slot)), + ViableEpochDescriptor::Signaled(identifier, _) => self.epoch(&identifier).cloned(), } } @@ -524,7 +497,8 @@ impl EpochChanges where parent_number: Number, slot: E::Slot, make_genesis: G, - ) -> Result, fork_tree::Error> where + ) -> Result, fork_tree::Error> + where G: FnOnce(E::Slot) -> E, E: Clone, { @@ -532,7 +506,7 @@ impl EpochChanges where descendent_of_builder, parent_hash, parent_number, - slot + slot, )?; Ok(descriptor.and_then(|des| self.epoch_data(&des, make_genesis))) @@ -555,8 +529,8 @@ impl EpochChanges where // "descends" from our parent-hash. let fake_head_hash = fake_head_hash(parent_hash); - let is_descendent_of = descendent_of_builder - .build_is_descendent_of(Some((fake_head_hash, *parent_hash))); + let is_descendent_of = + descendent_of_builder.build_is_descendent_of(Some((fake_head_hash, *parent_hash))); if parent_number == Zero::zero() { // need to insert the genesis epoch. @@ -569,37 +543,41 @@ impl EpochChanges where // at epoch_1 -- all we're doing here is figuring out which node // we need. let predicate = |epoch: &PersistedEpochHeader| match *epoch { - PersistedEpochHeader::Genesis(ref epoch_0, _) => - epoch_0.start_slot <= slot, - PersistedEpochHeader::Regular(ref epoch_n) => - epoch_n.start_slot <= slot, + PersistedEpochHeader::Genesis(ref epoch_0, _) => epoch_0.start_slot <= slot, + PersistedEpochHeader::Regular(ref epoch_n) => epoch_n.start_slot <= slot, }; - self.inner.find_node_where( - &fake_head_hash, - &(parent_number + One::one()), - &is_descendent_of, - &predicate, - ) + self.inner + .find_node_where( + &fake_head_hash, + &(parent_number + One::one()), + &is_descendent_of, + &predicate, + ) .map(|n| { - n.map(|node| (match node.data { - // Ok, we found our node. - // and here we figure out which of the internal epochs - // of a genesis node to use based on their start slot. - PersistedEpochHeader::Genesis(ref epoch_0, ref epoch_1) => - if epoch_1.start_slot <= slot { - (EpochIdentifierPosition::Genesis1, epoch_1.clone()) - } else { - (EpochIdentifierPosition::Genesis0, epoch_0.clone()) + n.map(|node| { + ( + match node.data { + // Ok, we found our node. + // and here we figure out which of the internal epochs + // of a genesis node to use based on their start slot. + PersistedEpochHeader::Genesis(ref epoch_0, ref epoch_1) => + if epoch_1.start_slot <= slot { + (EpochIdentifierPosition::Genesis1, epoch_1.clone()) + } else { + (EpochIdentifierPosition::Genesis0, epoch_0.clone()) + }, + PersistedEpochHeader::Regular(ref epoch_n) => + (EpochIdentifierPosition::Regular, epoch_n.clone()), }, - PersistedEpochHeader::Regular(ref epoch_n) => - (EpochIdentifierPosition::Regular, epoch_n.clone()), - }, node)).map(|((position, header), node)| { - ViableEpochDescriptor::Signaled(EpochIdentifier { - position, - hash: node.hash, - number: node.number - }, header) + node, + ) + }) + .map(|((position, header), node)| { + ViableEpochDescriptor::Signaled( + EpochIdentifier { position, hash: node.hash, number: node.number }, + header, + ) }) }) } @@ -617,16 +595,11 @@ impl EpochChanges where parent_hash: Hash, epoch: IncrementedEpoch, ) -> Result<(), fork_tree::Error> { - let is_descendent_of = descendent_of_builder - .build_is_descendent_of(Some((hash, parent_hash))); + let is_descendent_of = + descendent_of_builder.build_is_descendent_of(Some((hash, parent_hash))); let header = PersistedEpochHeader::::from(&epoch.0); - let res = self.inner.import( - hash, - number, - header, - &is_descendent_of, - ); + let res = self.inner.import(hash, number, header, &is_descendent_of); match res { Ok(_) | Err(fork_tree::Error::Duplicate) => { @@ -653,8 +626,7 @@ pub type SharedEpochChanges = #[cfg(test)] mod tests { - use super::*; - use super::Epoch as EpochT; + use super::{Epoch as EpochT, *}; #[derive(Debug, PartialEq)] pub struct TestError; @@ -667,15 +639,14 @@ mod tests { impl std::error::Error for TestError {} - impl<'a, F: 'a , H: 'a + PartialEq + std::fmt::Debug> IsDescendentOfBuilder for &'a F - where F: Fn(&H, &H) -> Result + impl<'a, F: 'a, H: 'a + PartialEq + std::fmt::Debug> IsDescendentOfBuilder for &'a F + where + F: Fn(&H, &H) -> Result, { type Error = TestError; type IsDescendentOf = Box Result + 'a>; - fn build_is_descendent_of(&self, current: Option<(H, H)>) - -> Self::IsDescendentOf - { + fn build_is_descendent_of(&self, current: Option<(H, H)>) -> Self::IsDescendentOf { let f = *self; Box::new(move |base, head| { let mut head = head; @@ -683,7 +654,7 @@ mod tests { if let Some((ref c_head, ref c_parent)) = current { if head == c_head { if base == c_parent { - return Ok(true); + return Ok(true) } else { head = c_parent; } @@ -709,10 +680,7 @@ mod tests { type Slot = Slot; fn increment(&self, _: ()) -> Self { - Epoch { - start_slot: self.start_slot + self.duration, - duration: self.duration, - } + Epoch { start_slot: self.start_slot + self.duration, duration: self.duration } } fn end_slot(&self) -> Slot { @@ -741,12 +709,10 @@ mod tests { }; let epoch_changes = EpochChanges::<_, _, Epoch>::new(); - let genesis_epoch = epoch_changes.epoch_descriptor_for_child_of( - &is_descendent_of, - b"0", - 0, - 10101, - ).unwrap().unwrap(); + let genesis_epoch = epoch_changes + .epoch_descriptor_for_child_of(&is_descendent_of, b"0", 0, 10101) + .unwrap() + .unwrap(); match genesis_epoch { ViableEpochDescriptor::UnimportedGenesis(slot) => { @@ -755,12 +721,10 @@ mod tests { _ => panic!("should be unimported genesis"), }; - let genesis_epoch_2 = epoch_changes.epoch_descriptor_for_child_of( - &is_descendent_of, - b"0", - 0, - 10102, - ).unwrap().unwrap(); + let genesis_epoch_2 = epoch_changes + .epoch_descriptor_for_child_of(&is_descendent_of, b"0", 0, 10102) + .unwrap() + .unwrap(); match genesis_epoch_2 { ViableEpochDescriptor::UnimportedGenesis(slot) => { @@ -786,34 +750,23 @@ mod tests { } }; - let make_genesis = |slot| Epoch { - start_slot: slot, - duration: 100, - }; + let make_genesis = |slot| Epoch { start_slot: slot, duration: 100 }; let mut epoch_changes = EpochChanges::<_, _, Epoch>::new(); - let genesis_epoch = epoch_changes.epoch_descriptor_for_child_of( - &is_descendent_of, - b"0", - 0, - 100, - ).unwrap().unwrap(); + let genesis_epoch = epoch_changes + .epoch_descriptor_for_child_of(&is_descendent_of, b"0", 0, 100) + .unwrap() + .unwrap(); assert_eq!(genesis_epoch, ViableEpochDescriptor::UnimportedGenesis(100)); - let import_epoch_1 = epoch_changes - .viable_epoch(&genesis_epoch, &make_genesis) - .unwrap() - .increment(()); + let import_epoch_1 = + epoch_changes.viable_epoch(&genesis_epoch, &make_genesis).unwrap().increment(()); let epoch_1 = import_epoch_1.as_ref().clone(); - epoch_changes.import( - &is_descendent_of, - *b"A", - 1, - *b"0", - import_epoch_1, - ).unwrap(); + epoch_changes + .import(&is_descendent_of, *b"A", 1, *b"0", import_epoch_1) + .unwrap(); let genesis_epoch = epoch_changes.epoch_data(&genesis_epoch, &make_genesis).unwrap(); assert!(is_descendent_of(b"0", b"A").unwrap()); @@ -823,13 +776,10 @@ mod tests { { // x is still within the genesis epoch. - let x = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"A", - 1, - end_slot - 1, - &make_genesis, - ).unwrap().unwrap(); + let x = epoch_changes + .epoch_data_for_child_of(&is_descendent_of, b"A", 1, end_slot - 1, &make_genesis) + .unwrap() + .unwrap(); assert_eq!(x, genesis_epoch); } @@ -837,13 +787,10 @@ mod tests { { // x is now at the next epoch, because the block is now at the // start slot of epoch 1. - let x = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"A", - 1, - end_slot, - &make_genesis, - ).unwrap().unwrap(); + let x = epoch_changes + .epoch_data_for_child_of(&is_descendent_of, b"A", 1, end_slot, &make_genesis) + .unwrap() + .unwrap(); assert_eq!(x, epoch_1); } @@ -851,13 +798,16 @@ mod tests { { // x is now at the next epoch, because the block is now after // start slot of epoch 1. - let x = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"A", - 1, - epoch_1.end_slot() - 1, - &make_genesis, - ).unwrap().unwrap(); + let x = epoch_changes + .epoch_data_for_child_of( + &is_descendent_of, + b"A", + 1, + epoch_1.end_slot() - 1, + &make_genesis, + ) + .unwrap() + .unwrap(); assert_eq!(x, epoch_1); } @@ -880,90 +830,65 @@ mod tests { let duration = 100; - let make_genesis = |slot| Epoch { - start_slot: slot, - duration, - }; + let make_genesis = |slot| Epoch { start_slot: slot, duration }; let mut epoch_changes = EpochChanges::new(); let next_descriptor = (); // insert genesis epoch for A { - let genesis_epoch_a_descriptor = epoch_changes.epoch_descriptor_for_child_of( - &is_descendent_of, - b"0", - 0, - 100, - ).unwrap().unwrap(); + let genesis_epoch_a_descriptor = epoch_changes + .epoch_descriptor_for_child_of(&is_descendent_of, b"0", 0, 100) + .unwrap() + .unwrap(); let incremented_epoch = epoch_changes .viable_epoch(&genesis_epoch_a_descriptor, &make_genesis) .unwrap() .increment(next_descriptor.clone()); - epoch_changes.import( - &is_descendent_of, - *b"A", - 1, - *b"0", - incremented_epoch, - ).unwrap(); + epoch_changes + .import(&is_descendent_of, *b"A", 1, *b"0", incremented_epoch) + .unwrap(); } // insert genesis epoch for X { - let genesis_epoch_x_descriptor = epoch_changes.epoch_descriptor_for_child_of( - &is_descendent_of, - b"0", - 0, - 1000, - ).unwrap().unwrap(); + let genesis_epoch_x_descriptor = epoch_changes + .epoch_descriptor_for_child_of(&is_descendent_of, b"0", 0, 1000) + .unwrap() + .unwrap(); let incremented_epoch = epoch_changes .viable_epoch(&genesis_epoch_x_descriptor, &make_genesis) .unwrap() .increment(next_descriptor.clone()); - epoch_changes.import( - &is_descendent_of, - *b"X", - 1, - *b"0", - incremented_epoch, - ).unwrap(); + epoch_changes + .import(&is_descendent_of, *b"X", 1, *b"0", incremented_epoch) + .unwrap(); } // now check that the genesis epochs for our respective block 1s // respect the chain structure. { - let epoch_for_a_child = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"A", - 1, - 101, - &make_genesis, - ).unwrap().unwrap(); + let epoch_for_a_child = epoch_changes + .epoch_data_for_child_of(&is_descendent_of, b"A", 1, 101, &make_genesis) + .unwrap() + .unwrap(); assert_eq!(epoch_for_a_child, make_genesis(100)); - let epoch_for_x_child = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"X", - 1, - 1001, - &make_genesis, - ).unwrap().unwrap(); + let epoch_for_x_child = epoch_changes + .epoch_data_for_child_of(&is_descendent_of, b"X", 1, 1001, &make_genesis) + .unwrap() + .unwrap(); assert_eq!(epoch_for_x_child, make_genesis(1000)); - let epoch_for_x_child_before_genesis = epoch_changes.epoch_data_for_child_of( - &is_descendent_of, - b"X", - 1, - 101, - &make_genesis, - ).unwrap(); + let epoch_for_x_child_before_genesis = epoch_changes + .epoch_data_for_child_of(&is_descendent_of, b"X", 1, 101, &make_genesis) + .unwrap(); // even though there is a genesis epoch at that slot, it's not in // this chain. diff --git a/client/consensus/epochs/src/migration.rs b/client/consensus/epochs/src/migration.rs index 6e7baba8053af..49e08240df8c3 100644 --- a/client/consensus/epochs/src/migration.rs +++ b/client/consensus/epochs/src/migration.rs @@ -18,11 +18,11 @@ //! Migration types for epoch changes. -use std::collections::BTreeMap; -use codec::{Encode, Decode}; +use crate::{Epoch, EpochChanges, PersistedEpoch, PersistedEpochHeader}; +use codec::{Decode, Encode}; use fork_tree::ForkTree; use sp_runtime::traits::{Block as BlockT, NumberFor}; -use crate::{Epoch, EpochChanges, PersistedEpoch, PersistedEpochHeader}; +use std::collections::BTreeMap; /// Legacy definition of epoch changes. #[derive(Clone, Encode, Decode)] @@ -31,9 +31,11 @@ pub struct EpochChangesV0 { } /// Type alias for legacy definition of epoch changes. -pub type EpochChangesForV0 = EpochChangesV0<::Hash, NumberFor, Epoch>; +pub type EpochChangesForV0 = + EpochChangesV0<::Hash, NumberFor, Epoch>; -impl EpochChangesV0 where +impl EpochChangesV0 +where Hash: PartialEq + Ord + Copy, Number: Ord + Copy, { diff --git a/client/consensus/manual-seal/src/consensus.rs b/client/consensus/manual-seal/src/consensus.rs index 0cfd99cab5c99..1f7ee413b71d0 100644 --- a/client/consensus/manual-seal/src/consensus.rs +++ b/client/consensus/manual-seal/src/consensus.rs @@ -19,26 +19,30 @@ //! Extensions for manual seal to produce blocks valid for any runtime. use super::Error; -use sp_runtime::traits::{Block as BlockT, DigestFor}; -use sp_inherents::InherentData; use sp_consensus::BlockImportParams; +use sp_inherents::InherentData; +use sp_runtime::traits::{Block as BlockT, DigestFor}; pub mod babe; -/// Consensus data provider, manual seal uses this trait object for authoring blocks valid +/// Consensus data provider, manual seal uses this trait object for authoring blocks valid /// for any runtime. pub trait ConsensusDataProvider: Send + Sync { /// Block import transaction type type Transaction; /// Attempt to create a consensus digest. - fn create_digest(&self, parent: &B::Header, inherents: &InherentData) -> Result, Error>; + fn create_digest( + &self, + parent: &B::Header, + inherents: &InherentData, + ) -> Result, Error>; /// set up the neccessary import params. fn append_block_import( &self, parent: &B::Header, params: &mut BlockImportParams, - inherents: &InherentData + inherents: &InherentData, ) -> Result<(), Error>; } diff --git a/client/consensus/manual-seal/src/consensus/babe.rs b/client/consensus/manual-seal/src/consensus/babe.rs index fb2d47b48fed1..3773c7c3cf121 100644 --- a/client/consensus/manual-seal/src/consensus/babe.rs +++ b/client/consensus/manual-seal/src/consensus/babe.rs @@ -21,30 +21,40 @@ use super::ConsensusDataProvider; use crate::Error; use codec::Encode; -use std::{borrow::Cow, sync::{Arc, atomic}, time::SystemTime}; use sc_client_api::{AuxStore, UsageProvider}; use sc_consensus_babe::{ - Config, Epoch, authorship, CompatibleDigestItem, BabeIntermediate, INTERMEDIATE_KEY, - find_pre_digest, + authorship, find_pre_digest, BabeIntermediate, CompatibleDigestItem, Config, Epoch, + INTERMEDIATE_KEY, +}; +use sc_consensus_epochs::{ + descendent_query, EpochHeader, SharedEpochChanges, ViableEpochDescriptor, }; -use sc_consensus_epochs::{SharedEpochChanges, descendent_query, ViableEpochDescriptor, EpochHeader}; use sp_keystore::SyncCryptoStorePtr; +use std::{ + borrow::Cow, + sync::{atomic, Arc}, + time::SystemTime, +}; use sp_api::{ProvideRuntimeApi, TransactionFor}; use sp_blockchain::{HeaderBackend, HeaderMetadata}; -use sp_consensus::{BlockImportParams, BlockOrigin, ForkChoiceStrategy}; -use sp_consensus_slots::Slot; +use sp_consensus::{ + import_queue::{CacheKeyId, Verifier}, + BlockImportParams, BlockOrigin, ForkChoiceStrategy, +}; use sp_consensus_babe::{ - BabeApi, inherents::BabeInherentData, ConsensusLog, BABE_ENGINE_ID, AuthorityId, - digests::{PreDigest, SecondaryPlainPreDigest, NextEpochDescriptor}, BabeAuthorityWeight, + digests::{NextEpochDescriptor, PreDigest, SecondaryPlainPreDigest}, + inherents::BabeInherentData, + AuthorityId, BabeApi, BabeAuthorityWeight, ConsensusLog, BABE_ENGINE_ID, }; +use sp_consensus_slots::Slot; use sp_inherents::{InherentData, InherentDataProvider, InherentIdentifier}; use sp_runtime::{ - traits::{DigestItemFor, DigestFor, Block as BlockT, Zero, Header}, - generic::{Digest, BlockId}, Justifications, + generic::{BlockId, Digest}, + traits::{Block as BlockT, DigestFor, DigestItemFor, Header, Zero}, + Justifications, }; -use sp_timestamp::{InherentType, INHERENT_IDENTIFIER, TimestampInherentData}; -use sp_consensus::import_queue::{Verifier, CacheKeyId}; +use sp_timestamp::{InherentType, TimestampInherentData, INHERENT_IDENTIFIER}; /// Provides BABE-compatible predigests and BlockImportParams. /// Intended for use with BABE runtimes. @@ -77,19 +87,16 @@ pub struct BabeVerifier { impl BabeVerifier { /// create a nrew verifier pub fn new(epoch_changes: SharedEpochChanges, client: Arc) -> BabeVerifier { - BabeVerifier { - epoch_changes, - client, - } + BabeVerifier { epoch_changes, client } } } /// The verifier for the manual seal engine; instantly finalizes. #[async_trait::async_trait] impl Verifier for BabeVerifier - where - B: BlockT, - C: HeaderBackend + HeaderMetadata +where + B: BlockT, + C: HeaderBackend + HeaderMetadata, { async fn verify( &mut self, @@ -107,7 +114,9 @@ impl Verifier for BabeVerifier let pre_digest = find_pre_digest::(&header)?; let parent_hash = header.parent_hash(); - let parent = self.client.header(BlockId::Hash(*parent_hash)) + let parent = self + .client + .header(BlockId::Hash(*parent_hash)) .ok() .flatten() .ok_or_else(|| format!("header for block {} not found", parent_hash))?; @@ -134,14 +143,14 @@ impl Verifier for BabeVerifier } impl BabeConsensusDataProvider - where - B: BlockT, - C: AuxStore - + HeaderBackend - + ProvideRuntimeApi - + HeaderMetadata - + UsageProvider, - C::Api: BabeApi, +where + B: BlockT, + C: AuxStore + + HeaderBackend + + ProvideRuntimeApi + + HeaderMetadata + + UsageProvider, + C::Api: BabeApi, { pub fn new( client: Arc, @@ -155,13 +164,7 @@ impl BabeConsensusDataProvider let config = Config::get_or_compute(&*client)?; - Ok(Self { - config, - client, - keystore, - epoch_changes, - authorities, - }) + Ok(Self { config, client, keystore, epoch_changes, authorities }) } fn epoch(&self, parent: &B::Header, slot: Slot) -> Result { @@ -177,10 +180,7 @@ impl BabeConsensusDataProvider .ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?; let epoch = epoch_changes - .viable_epoch( - &epoch_descriptor, - |slot| Epoch::genesis(&self.config, slot), - ) + .viable_epoch(&epoch_descriptor, |slot| Epoch::genesis(&self.config, slot)) .ok_or_else(|| { log::info!(target: "babe", "create_digest: no viable_epoch :("); sp_consensus::Error::InvalidAuthoritiesSet @@ -191,38 +191,37 @@ impl BabeConsensusDataProvider } impl ConsensusDataProvider for BabeConsensusDataProvider - where - B: BlockT, - C: AuxStore - + HeaderBackend - + HeaderMetadata - + UsageProvider - + ProvideRuntimeApi, - C::Api: BabeApi, +where + B: BlockT, + C: AuxStore + + HeaderBackend + + HeaderMetadata + + UsageProvider + + ProvideRuntimeApi, + C::Api: BabeApi, { type Transaction = TransactionFor; - fn create_digest(&self, parent: &B::Header, inherents: &InherentData) -> Result, Error> { - let slot = inherents.babe_inherent_data()? + fn create_digest( + &self, + parent: &B::Header, + inherents: &InherentData, + ) -> Result, Error> { + let slot = inherents + .babe_inherent_data()? .ok_or_else(|| Error::StringError("No babe inherent data".into()))?; let epoch = self.epoch(parent, slot)?; // this is a dev node environment, we should always be able to claim a slot. - let logs = if let Some((predigest, _)) = authorship::claim_slot( - slot, - &epoch, - &self.keystore, - ) { - vec![ - as CompatibleDigestItem>::babe_pre_digest(predigest), - ] + let logs = if let Some((predigest, _)) = + authorship::claim_slot(slot, &epoch, &self.keystore) + { + vec![ as CompatibleDigestItem>::babe_pre_digest(predigest)] } else { // well we couldn't claim a slot because this is an existing chain and we're not in the authorities. // we need to tell BabeBlockImport that the epoch has changed, and we put ourselves in the authorities. - let predigest = PreDigest::SecondaryPlain(SecondaryPlainPreDigest { - slot, - authority_index: 0_u32, - }); + let predigest = + PreDigest::SecondaryPlain(SecondaryPlainPreDigest { slot, authority_index: 0_u32 }); let mut epoch_changes = self.epoch_changes.shared_data(); let epoch_descriptor = epoch_changes @@ -232,12 +231,15 @@ impl ConsensusDataProvider for BabeConsensusDataProvider parent.number().clone(), slot, ) - .map_err(|e| Error::StringError(format!("failed to fetch epoch_descriptor: {}", e)))? + .map_err(|e| { + Error::StringError(format!("failed to fetch epoch_descriptor: {}", e)) + })? .ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?; match epoch_descriptor { ViableEpochDescriptor::Signaled(identifier, _epoch_header) => { - let epoch_mut = epoch_changes.epoch_mut(&identifier) + let epoch_mut = epoch_changes + .epoch_mut(&identifier) .ok_or_else(|| sp_consensus::Error::InvalidAuthoritiesSet)?; // mutate the current epoch @@ -251,15 +253,13 @@ impl ConsensusDataProvider for BabeConsensusDataProvider vec![ DigestItemFor::::PreRuntime(BABE_ENGINE_ID, predigest.encode()), - DigestItemFor::::Consensus(BABE_ENGINE_ID, next_epoch.encode()) + DigestItemFor::::Consensus(BABE_ENGINE_ID, next_epoch.encode()), ] }, ViableEpochDescriptor::UnimportedGenesis(_) => { // since this is the genesis, secondary predigest works for now. - vec![ - DigestItemFor::::PreRuntime(BABE_ENGINE_ID, predigest.encode()), - ] - } + vec![DigestItemFor::::PreRuntime(BABE_ENGINE_ID, predigest.encode())] + }, } }; @@ -270,9 +270,10 @@ impl ConsensusDataProvider for BabeConsensusDataProvider &self, parent: &B::Header, params: &mut BlockImportParams, - inherents: &InherentData + inherents: &InherentData, ) -> Result<(), Error> { - let slot = inherents.babe_inherent_data()? + let slot = inherents + .babe_inherent_data()? .ok_or_else(|| Error::StringError("No babe inherent data".into()))?; let epoch_changes = self.epoch_changes.shared_data(); let mut epoch_descriptor = epoch_changes @@ -289,27 +290,27 @@ impl ConsensusDataProvider for BabeConsensusDataProvider // a quick check to see if we're in the authorities let epoch = self.epoch(parent, slot)?; let (authority, _) = self.authorities.first().expect("authorities is non-emptyp; qed"); - let has_authority = epoch.authorities.iter() - .find(|(id, _)| *id == *authority) - .is_some(); + let has_authority = epoch.authorities.iter().find(|(id, _)| *id == *authority).is_some(); if !has_authority { log::info!(target: "manual-seal", "authority not found"); - let timestamp = inherents.timestamp_inherent_data()? + let timestamp = inherents + .timestamp_inherent_data()? .ok_or_else(|| Error::StringError("No timestamp inherent data".into()))?; let slot = *timestamp / self.config.slot_duration; // manually hard code epoch descriptor epoch_descriptor = match epoch_descriptor { - ViableEpochDescriptor::Signaled(identifier, _header) => { + ViableEpochDescriptor::Signaled(identifier, _header) => ViableEpochDescriptor::Signaled( identifier, EpochHeader { start_slot: slot.into(), end_slot: (slot * self.config.epoch_length).into(), }, - ) - }, - _ => unreachable!("we're not in the authorities, so this isn't the genesis epoch; qed") + ), + _ => unreachable!( + "we're not in the authorities, so this isn't the genesis epoch; qed" + ), }; } @@ -326,16 +327,16 @@ impl ConsensusDataProvider for BabeConsensusDataProvider /// Mocks the timestamp inherent to always produce the timestamp for the next babe slot. pub struct SlotTimestampProvider { time: atomic::AtomicU64, - slot_duration: u64 + slot_duration: u64, } impl SlotTimestampProvider { /// Create a new mocked time stamp provider. pub fn new(client: Arc) -> Result - where - B: BlockT, - C: AuxStore + HeaderBackend + ProvideRuntimeApi + UsageProvider, - C::Api: BabeApi, + where + B: BlockT, + C: AuxStore + HeaderBackend + ProvideRuntimeApi + UsageProvider, + C::Api: BabeApi, { let slot_duration = Config::get_or_compute(&*client)?.slot_duration; let info = client.info(); @@ -355,10 +356,7 @@ impl SlotTimestampProvider { .as_millis() as u64 }; - Ok(Self { - time: atomic::AtomicU64::new(time), - slot_duration, - }) + Ok(Self { time: atomic::AtomicU64::new(time), slot_duration }) } /// Get the current slot number @@ -369,12 +367,13 @@ impl SlotTimestampProvider { #[async_trait::async_trait] impl InherentDataProvider for SlotTimestampProvider { - fn provide_inherent_data(&self, inherent_data: &mut InherentData) -> Result<(), sp_inherents::Error> { + fn provide_inherent_data( + &self, + inherent_data: &mut InherentData, + ) -> Result<(), sp_inherents::Error> { // we update the time here. - let duration: InherentType = self.time.fetch_add( - self.slot_duration, - atomic::Ordering::SeqCst, - ).into(); + let duration: InherentType = + self.time.fetch_add(self.slot_duration, atomic::Ordering::SeqCst).into(); inherent_data.put_data(INHERENT_IDENTIFIER, &duration)?; Ok(()) } diff --git a/client/consensus/manual-seal/src/error.rs b/client/consensus/manual-seal/src/error.rs index 77140c835a3ee..9945a6e52b411 100644 --- a/client/consensus/manual-seal/src/error.rs +++ b/client/consensus/manual-seal/src/error.rs @@ -19,10 +19,10 @@ //! A manual sealing engine: the engine listens for rpc calls to seal blocks and create forks. //! This is suitable for a testing environment. -use sp_consensus::{Error as ConsensusError, ImportResult}; +use futures::channel::{mpsc::SendError, oneshot}; use sp_blockchain::Error as BlockchainError; +use sp_consensus::{Error as ConsensusError, ImportResult}; use sp_inherents::Error as InherentsError; -use futures::channel::{oneshot, mpsc::SendError}; /// Error code for rpc mod codes { @@ -70,7 +70,7 @@ pub enum Error { #[display(fmt = "Consensus process is terminating")] SendError(SendError), /// Some other error. - #[display(fmt="Other error: {}", _0)] + #[display(fmt = "Other error: {}", _0)] Other(Box), } @@ -85,7 +85,7 @@ impl Error { InherentError(_) => codes::INHERENTS_ERROR, BlockchainError(_) => codes::BLOCKCHAIN_ERROR, SendError(_) | Canceled(_) => codes::SERVER_SHUTTING_DOWN, - _ => codes::UNKNOWN_ERROR + _ => codes::UNKNOWN_ERROR, } } } @@ -95,7 +95,7 @@ impl std::convert::From for jsonrpc_core::Error { jsonrpc_core::Error { code: jsonrpc_core::ErrorCode::ServerError(error.to_code()), message: format!("{}", error), - data: None + data: None, } } } diff --git a/client/consensus/manual-seal/src/finalize_block.rs b/client/consensus/manual-seal/src/finalize_block.rs index 76ae6eeeae5ac..a5ddf1d162f7a 100644 --- a/client/consensus/manual-seal/src/finalize_block.rs +++ b/client/consensus/manual-seal/src/finalize_block.rs @@ -19,14 +19,9 @@ //! Block finalization utilities use crate::rpc; -use sp_runtime::{ - Justification, - traits::Block as BlockT, - generic::BlockId, -}; -use std::sync::Arc; use sc_client_api::backend::{Backend as ClientBackend, Finalizer}; -use std::marker::PhantomData; +use sp_runtime::{generic::BlockId, traits::Block as BlockT, Justification}; +use std::{marker::PhantomData, sync::Arc}; /// params for block finalization. pub struct FinalizeBlockParams { @@ -42,30 +37,23 @@ pub struct FinalizeBlockParams { pub _phantom: PhantomData, } - /// finalizes a block in the backend with the given params. pub async fn finalize_block(params: FinalizeBlockParams) - where - B: BlockT, - F: Finalizer, - CB: ClientBackend, +where + B: BlockT, + F: Finalizer, + CB: ClientBackend, { - let FinalizeBlockParams { - hash, - mut sender, - justification, - finalizer, - .. - } = params; + let FinalizeBlockParams { hash, mut sender, justification, finalizer, .. } = params; match finalizer.finalize_block(BlockId::Hash(hash), justification, true) { Err(e) => { log::warn!("Failed to finalize block {:?}", e); rpc::send_result(&mut sender, Err(e.into())) - } + }, Ok(()) => { log::info!("✅ Successfully finalized block: {}", hash); rpc::send_result(&mut sender, Ok(())) - } + }, } } diff --git a/client/consensus/manual-seal/src/lib.rs b/client/consensus/manual-seal/src/lib.rs index 5d93f6724ee9f..1aacd22aa7bb8 100644 --- a/client/consensus/manual-seal/src/lib.rs +++ b/client/consensus/manual-seal/src/lib.rs @@ -20,17 +20,17 @@ //! This is suitable for a testing environment. use futures::prelude::*; +use prometheus_endpoint::Registry; +use sc_client_api::backend::{Backend as ClientBackend, Finalizer}; +use sp_blockchain::HeaderBackend; use sp_consensus::{ - Environment, Proposer, SelectChain, BlockImport, - ForkChoiceStrategy, BlockImportParams, BlockOrigin, - import_queue::{Verifier, BasicQueue, CacheKeyId, BoxBlockImport}, + import_queue::{BasicQueue, BoxBlockImport, CacheKeyId, Verifier}, + BlockImport, BlockImportParams, BlockOrigin, Environment, ForkChoiceStrategy, Proposer, + SelectChain, }; -use sp_blockchain::HeaderBackend; use sp_inherents::CreateInherentDataProviders; -use sp_runtime::{traits::Block as BlockT, Justifications, ConsensusEngineId}; -use sc_client_api::backend::{Backend as ClientBackend, Finalizer}; -use std::{sync::Arc, marker::PhantomData}; -use prometheus_endpoint::Registry; +use sp_runtime::{traits::Block as BlockT, ConsensusEngineId, Justifications}; +use std::{marker::PhantomData, sync::Arc}; mod error; mod finalize_block; @@ -40,14 +40,14 @@ pub mod consensus; pub mod rpc; pub use self::{ - error::Error, consensus::ConsensusDataProvider, + error::Error, finalize_block::{finalize_block, FinalizeBlockParams}, - seal_block::{SealBlockParams, seal_block, MAX_PROPOSAL_DURATION}, - rpc::{EngineCommand, CreatedBlock}, + rpc::{CreatedBlock, EngineCommand}, + seal_block::{seal_block, SealBlockParams, MAX_PROPOSAL_DURATION}, }; -use sp_api::{ProvideRuntimeApi, TransactionFor}; use sc_transaction_pool_api::TransactionPool; +use sp_api::{ProvideRuntimeApi, TransactionFor}; /// The `ConsensusEngineId` of Manual Seal. pub const MANUAL_SEAL_ENGINE_ID: ConsensusEngineId = [b'm', b'a', b'n', b'l']; @@ -80,17 +80,11 @@ pub fn import_queue( spawner: &impl sp_core::traits::SpawnEssentialNamed, registry: Option<&Registry>, ) -> BasicQueue - where - Block: BlockT, - Transaction: Send + Sync + 'static, +where + Block: BlockT, + Transaction: Send + Sync + 'static, { - BasicQueue::new( - ManualSealVerifier, - block_import, - None, - spawner, - registry, - ) + BasicQueue::new(ManualSealVerifier, block_import, None, spawner, registry) } /// Params required to start the instant sealing authorship task. @@ -115,7 +109,8 @@ pub struct ManualSealParams, TP, SC, C pub select_chain: SC, /// Digest provider for inclusion in blocks. - pub consensus_data_provider: Option>>>, + pub consensus_data_provider: + Option>>>, /// Something that can create the inherent data providers. pub create_inherent_data_providers: CIDP, @@ -139,7 +134,8 @@ pub struct InstantSealParams, TP, SC, pub select_chain: SC, /// Digest provider for inclusion in blocks. - pub consensus_data_provider: Option>>>, + pub consensus_data_provider: + Option>>>, /// Something that can create the inherent data providers. pub create_inherent_data_providers: CIDP, @@ -156,58 +152,52 @@ pub async fn run_manual_seal( select_chain, consensus_data_provider, create_inherent_data_providers, - }: ManualSealParams -) - where - B: BlockT + 'static, - BI: BlockImport> - + Send + Sync + 'static, - C: HeaderBackend + Finalizer + ProvideRuntimeApi + 'static, - CB: ClientBackend + 'static, - E: Environment + 'static, - E::Proposer: Proposer>, - CS: Stream::Hash>> + Unpin + 'static, - SC: SelectChain + 'static, - TransactionFor: 'static, - TP: TransactionPool, - CIDP: CreateInherentDataProviders, + }: ManualSealParams, +) where + B: BlockT + 'static, + BI: BlockImport> + + Send + + Sync + + 'static, + C: HeaderBackend + Finalizer + ProvideRuntimeApi + 'static, + CB: ClientBackend + 'static, + E: Environment + 'static, + E::Proposer: Proposer>, + CS: Stream::Hash>> + Unpin + 'static, + SC: SelectChain + 'static, + TransactionFor: 'static, + TP: TransactionPool, + CIDP: CreateInherentDataProviders, { while let Some(command) = commands_stream.next().await { match command { - EngineCommand::SealNewBlock { - create_empty, - finalize, - parent_hash, - sender, - } => { - seal_block( - SealBlockParams { - sender, - parent_hash, - finalize, - create_empty, - env: &mut env, - select_chain: &select_chain, - block_import: &mut block_import, - consensus_data_provider: consensus_data_provider.as_ref().map(|p| &**p), - pool: pool.clone(), - client: client.clone(), - create_inherent_data_providers: &create_inherent_data_providers, - } - ).await; - } + EngineCommand::SealNewBlock { create_empty, finalize, parent_hash, sender } => { + seal_block(SealBlockParams { + sender, + parent_hash, + finalize, + create_empty, + env: &mut env, + select_chain: &select_chain, + block_import: &mut block_import, + consensus_data_provider: consensus_data_provider.as_ref().map(|p| &**p), + pool: pool.clone(), + client: client.clone(), + create_inherent_data_providers: &create_inherent_data_providers, + }) + .await; + }, EngineCommand::FinalizeBlock { hash, sender, justification } => { let justification = justification.map(|j| (MANUAL_SEAL_ENGINE_ID, j)); - finalize_block( - FinalizeBlockParams { - hash, - sender, - justification, - finalizer: client.clone(), - _phantom: PhantomData, - } - ).await - } + finalize_block(FinalizeBlockParams { + hash, + sender, + justification, + finalizer: client.clone(), + _phantom: PhantomData, + }) + .await + }, } } } @@ -224,63 +214,57 @@ pub async fn run_instant_seal( select_chain, consensus_data_provider, create_inherent_data_providers, - }: InstantSealParams -) - where - B: BlockT + 'static, - BI: BlockImport> - + Send + Sync + 'static, - C: HeaderBackend + Finalizer + ProvideRuntimeApi + 'static, - CB: ClientBackend + 'static, - E: Environment + 'static, - E::Proposer: Proposer>, - SC: SelectChain + 'static, - TransactionFor: 'static, - TP: TransactionPool, - CIDP: CreateInherentDataProviders, + }: InstantSealParams, +) where + B: BlockT + 'static, + BI: BlockImport> + + Send + + Sync + + 'static, + C: HeaderBackend + Finalizer + ProvideRuntimeApi + 'static, + CB: ClientBackend + 'static, + E: Environment + 'static, + E::Proposer: Proposer>, + SC: SelectChain + 'static, + TransactionFor: 'static, + TP: TransactionPool, + CIDP: CreateInherentDataProviders, { // instant-seal creates blocks as soon as transactions are imported // into the transaction pool. - let commands_stream = pool.import_notification_stream() - .map(|_| { - EngineCommand::SealNewBlock { - create_empty: false, - finalize: false, - parent_hash: None, - sender: None, - } - }); - - run_manual_seal( - ManualSealParams { - block_import, - env, - client, - pool, - commands_stream, - select_chain, - consensus_data_provider, - create_inherent_data_providers, - } - ).await + let commands_stream = pool.import_notification_stream().map(|_| EngineCommand::SealNewBlock { + create_empty: false, + finalize: false, + parent_hash: None, + sender: None, + }); + + run_manual_seal(ManualSealParams { + block_import, + env, + client, + pool, + commands_stream, + select_chain, + consensus_data_provider, + create_inherent_data_providers, + }) + .await } #[cfg(test)] mod tests { use super::*; - use substrate_test_runtime_client::{ - DefaultTestClientBuilderExt, - TestClientBuilderExt, - AccountKeyring::*, - TestClientBuilder, - }; - use sc_transaction_pool::{BasicPool, RevalidationType, Options}; - use substrate_test_runtime_transaction_pool::{TestApi, uxt}; - use sc_transaction_pool_api::{TransactionPool, MaintainedTransactionPool, TransactionSource}; - use sp_runtime::generic::BlockId; - use sp_consensus::ImportedAux; use sc_basic_authorship::ProposerFactory; use sc_client_api::BlockBackend; + use sc_transaction_pool::{BasicPool, Options, RevalidationType}; + use sc_transaction_pool_api::{MaintainedTransactionPool, TransactionPool, TransactionSource}; + use sp_consensus::ImportedAux; + use sp_runtime::generic::BlockId; + use substrate_test_runtime_client::{ + AccountKeyring::*, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, + }; + use substrate_test_runtime_transaction_pool::{uxt, TestApi}; fn api() -> Arc { Arc::new(TestApi::empty()) @@ -303,40 +287,32 @@ mod tests { spawner.clone(), 0, )); - let env = ProposerFactory::new( - spawner.clone(), - client.clone(), - pool.clone(), - None, - None, - ); + let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None); // this test checks that blocks are created as soon as transactions are imported into the pool. let (sender, receiver) = futures::channel::oneshot::channel(); let mut sender = Arc::new(Some(sender)); - let commands_stream = pool.pool().validated_pool().import_notification_stream() - .map(move |_| { + let commands_stream = + pool.pool().validated_pool().import_notification_stream().map(move |_| { // we're only going to submit one tx so this fn will only be called once. - let mut_sender = Arc::get_mut(&mut sender).unwrap(); + let mut_sender = Arc::get_mut(&mut sender).unwrap(); let sender = std::mem::take(mut_sender); EngineCommand::SealNewBlock { create_empty: false, finalize: true, parent_hash: None, - sender + sender, } }); - let future = run_manual_seal( - ManualSealParams { - block_import: client.clone(), - env, - client: client.clone(), - pool: pool.clone(), - commands_stream, - select_chain, - create_inherent_data_providers: |_, _| async { Ok(()) }, - consensus_data_provider: None, - } - ); + let future = run_manual_seal(ManualSealParams { + block_import: client.clone(), + env, + client: client.clone(), + pool: pool.clone(), + commands_stream, + select_chain, + create_inherent_data_providers: |_, _| async { Ok(()) }, + consensus_data_provider: None, + }); std::thread::spawn(|| { let mut rt = tokio::runtime::Runtime::new().unwrap(); // spawn the background authorship task @@ -380,27 +356,19 @@ mod tests { spawner.clone(), 0, )); - let env = ProposerFactory::new( - spawner.clone(), - client.clone(), - pool.clone(), - None, - None, - ); + let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None); // this test checks that blocks are created as soon as an engine command is sent over the stream. let (mut sink, commands_stream) = futures::channel::mpsc::channel(1024); - let future = run_manual_seal( - ManualSealParams { - block_import: client.clone(), - env, - client: client.clone(), - pool: pool.clone(), - commands_stream, - select_chain, - consensus_data_provider: None, - create_inherent_data_providers: |_, _| async { Ok(()) }, - } - ); + let future = run_manual_seal(ManualSealParams { + block_import: client.clone(), + env, + client: client.clone(), + pool: pool.clone(), + commands_stream, + select_chain, + consensus_data_provider: None, + create_inherent_data_providers: |_, _| async { Ok(()) }, + }); std::thread::spawn(|| { let mut rt = tokio::runtime::Runtime::new().unwrap(); // spawn the background authorship task @@ -416,7 +384,9 @@ mod tests { sender: Some(tx), create_empty: false, finalize: false, - }).await.unwrap(); + }) + .await + .unwrap(); let created_block = rx.await.unwrap().unwrap(); // assert that the background task returns ok @@ -439,8 +409,10 @@ mod tests { sink.send(EngineCommand::FinalizeBlock { sender: Some(tx), hash: header.hash(), - justification: None - }).await.unwrap(); + justification: None, + }) + .await + .unwrap(); // assert that the background task returns ok assert_eq!(rx.await.unwrap().unwrap(), ()); } @@ -461,27 +433,19 @@ mod tests { spawner.clone(), 0, )); - let env = ProposerFactory::new( - spawner.clone(), - client.clone(), - pool.clone(), - None, - None, - ); + let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None); // this test checks that blocks are created as soon as an engine command is sent over the stream. let (mut sink, commands_stream) = futures::channel::mpsc::channel(1024); - let future = run_manual_seal( - ManualSealParams { - block_import: client.clone(), - env, - client: client.clone(), - pool: pool.clone(), - commands_stream, - select_chain, - consensus_data_provider: None, - create_inherent_data_providers: |_, _| async { Ok(()) }, - } - ); + let future = run_manual_seal(ManualSealParams { + block_import: client.clone(), + env, + client: client.clone(), + pool: pool.clone(), + commands_stream, + select_chain, + consensus_data_provider: None, + create_inherent_data_providers: |_, _| async { Ok(()) }, + }); std::thread::spawn(|| { let mut rt = tokio::runtime::Runtime::new().unwrap(); // spawn the background authorship task @@ -498,7 +462,9 @@ mod tests { sender: Some(tx), create_empty: false, finalize: false, - }).await.unwrap(); + }) + .await + .unwrap(); let created_block = rx.await.unwrap().unwrap(); pool_api.increment_nonce(Alice.into()); @@ -524,31 +490,35 @@ mod tests { pool.maintain(sc_transaction_pool_api::ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None, - }).await; + }) + .await; let (tx1, rx1) = futures::channel::oneshot::channel(); - assert!(sink.send(EngineCommand::SealNewBlock { - parent_hash: Some(created_block.hash), - sender: Some(tx1), - create_empty: false, - finalize: false, - }).await.is_ok()); - assert_matches::assert_matches!( - rx1.await.expect("should be no error receiving"), - Ok(_) - ); + assert!(sink + .send(EngineCommand::SealNewBlock { + parent_hash: Some(created_block.hash), + sender: Some(tx1), + create_empty: false, + finalize: false, + }) + .await + .is_ok()); + assert_matches::assert_matches!(rx1.await.expect("should be no error receiving"), Ok(_)); let block = client.block(&BlockId::Number(2)).unwrap().unwrap().block; pool_api.add_block(block, true); pool_api.increment_nonce(Alice.into()); assert!(pool.submit_one(&BlockId::Number(1), SOURCE, uxt(Bob, 0)).await.is_ok()); let (tx2, rx2) = futures::channel::oneshot::channel(); - assert!(sink.send(EngineCommand::SealNewBlock { - parent_hash: Some(created_block.hash), - sender: Some(tx2), - create_empty: false, - finalize: false, - }).await.is_ok()); + assert!(sink + .send(EngineCommand::SealNewBlock { + parent_hash: Some(created_block.hash), + sender: Some(tx2), + create_empty: false, + finalize: false, + }) + .await + .is_ok()); let imported = rx2.await.unwrap().unwrap(); // assert that fork block is in the db assert!(client.header(&BlockId::Hash(imported.hash)).unwrap().is_some()) diff --git a/client/consensus/manual-seal/src/rpc.rs b/client/consensus/manual-seal/src/rpc.rs index eb056f22fed8b..0f686bc26e7df 100644 --- a/client/consensus/manual-seal/src/rpc.rs +++ b/client/consensus/manual-seal/src/rpc.rs @@ -18,18 +18,16 @@ //! RPC interface for the `ManualSeal` Engine. -use sp_consensus::ImportedAux; -use jsonrpc_core::Error; -use jsonrpc_derive::rpc; +pub use self::gen_client::Client as ManualSealClient; use futures::{ channel::{mpsc, oneshot}, - TryFutureExt, - FutureExt, - SinkExt + FutureExt, SinkExt, TryFutureExt, }; +use jsonrpc_core::Error; +use jsonrpc_derive::rpc; use serde::{Deserialize, Serialize}; +use sp_consensus::ImportedAux; use sp_runtime::EncodedJustification; -pub use self::gen_client::Client as ManualSealClient; /// Future's type for jsonrpc type FutureResult = Box + Send>; @@ -63,7 +61,7 @@ pub enum EngineCommand { sender: Sender<()>, /// finalization justification justification: Option, - } + }, } /// RPC trait that provides methods for interacting with the manual-seal authorship task over rpc. @@ -75,7 +73,7 @@ pub trait ManualSealApi { &self, create_empty: bool, finalize: bool, - parent_hash: Option + parent_hash: Option, ) -> FutureResult>; /// Instructs the manual-seal authorship task to finalize a block @@ -83,7 +81,7 @@ pub trait ManualSealApi { fn finalize_block( &self, hash: Hash, - justification: Option + justification: Option, ) -> FutureResult; } @@ -98,7 +96,7 @@ pub struct CreatedBlock { /// hash of the created block. pub hash: Hash, /// some extra details about the import operation - pub aux: ImportedAux + pub aux: ImportedAux, } impl ManualSeal { @@ -113,7 +111,7 @@ impl ManualSealApi for ManualSeal { &self, create_empty: bool, finalize: bool, - parent_hash: Option + parent_hash: Option, ) -> FutureResult> { let mut sink = self.import_block_channel.clone(); let future = async move { @@ -126,18 +124,22 @@ impl ManualSealApi for ManualSeal { }; sink.send(command).await?; receiver.await? - }.boxed(); + } + .boxed(); Box::new(future.map_err(Error::from).compat()) } - fn finalize_block(&self, hash: Hash, justification: Option) -> FutureResult { + fn finalize_block( + &self, + hash: Hash, + justification: Option, + ) -> FutureResult { let mut sink = self.import_block_channel.clone(); let future = async move { let (sender, receiver) = oneshot::channel(); - sink.send( - EngineCommand::FinalizeBlock { hash, sender: Some(sender), justification } - ).await?; + sink.send(EngineCommand::FinalizeBlock { hash, sender: Some(sender), justification }) + .await?; receiver.await?.map(|_| true) }; @@ -150,7 +152,7 @@ impl ManualSealApi for ManualSeal { /// to the rpc pub fn send_result( sender: &mut Sender, - result: std::result::Result + result: std::result::Result, ) { if let Some(sender) = sender.take() { if let Err(err) = sender.send(result) { @@ -160,7 +162,7 @@ pub fn send_result( // instant seal doesn't report errors over rpc, simply log them. match result { Ok(r) => log::info!("Instant Seal success: {:?}", r), - Err(e) => log::error!("Instant Seal encountered an error: {}", e) + Err(e) => log::error!("Instant Seal encountered an error: {}", e), } } } diff --git a/client/consensus/manual-seal/src/seal_block.rs b/client/consensus/manual-seal/src/seal_block.rs index 450a7bff4cd40..be97e0ccc360c 100644 --- a/client/consensus/manual-seal/src/seal_block.rs +++ b/client/consensus/manual-seal/src/seal_block.rs @@ -18,23 +18,21 @@ //! Block sealing utilities -use crate::{Error, rpc, CreatedBlock, ConsensusDataProvider}; -use std::sync::Arc; -use sp_runtime::{ - traits::{Block as BlockT, Header as HeaderT}, - generic::BlockId, -}; +use crate::{rpc, ConsensusDataProvider, CreatedBlock, Error}; use futures::prelude::*; +use sc_transaction_pool_api::TransactionPool; +use sp_api::{ProvideRuntimeApi, TransactionFor}; +use sp_blockchain::HeaderBackend; use sp_consensus::{ - self, BlockImport, Environment, Proposer, ForkChoiceStrategy, - BlockImportParams, BlockOrigin, ImportResult, SelectChain, StateAction, + self, BlockImport, BlockImportParams, BlockOrigin, Environment, ForkChoiceStrategy, + ImportResult, Proposer, SelectChain, StateAction, }; -use sp_blockchain::HeaderBackend; -use std::collections::HashMap; -use std::time::Duration; use sp_inherents::{CreateInherentDataProviders, InherentDataProvider}; -use sp_api::{ProvideRuntimeApi, TransactionFor}; -use sc_transaction_pool_api::TransactionPool; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT}, +}; +use std::{collections::HashMap, sync::Arc, time::Duration}; /// max duration for creating a proposal in secs pub const MAX_PROPOSAL_DURATION: u64 = 10; @@ -59,7 +57,8 @@ pub struct SealBlockParams<'a, B: BlockT, BI, SC, C: ProvideRuntimeApi, E, TP /// SelectChain object pub select_chain: &'a SC, /// Digest provider for inclusion in blocks. - pub consensus_data_provider: Option<&'a dyn ConsensusDataProvider>>, + pub consensus_data_provider: + Option<&'a dyn ConsensusDataProvider>>, /// block import object pub block_import: &'a mut BI, /// Something that can create the inherent data providers. @@ -97,7 +96,7 @@ pub async fn seal_block( { let future = async { if pool.status().ready == 0 && !create_empty { - return Err(Error::EmptyTransactionPool); + return Err(Error::EmptyTransactionPool) } // get the header to build this new block on. @@ -129,12 +128,15 @@ pub async fn seal_block( Default::default() }; - let proposal = proposer.propose( - inherent_data.clone(), - digest, - Duration::from_secs(MAX_PROPOSAL_DURATION), - None, - ).map_err(|err| Error::StringError(format!("{:?}", err))).await?; + let proposal = proposer + .propose( + inherent_data.clone(), + digest, + Duration::from_secs(MAX_PROPOSAL_DURATION), + None, + ) + .map_err(|err| Error::StringError(format!("{:?}", err))) + .await?; if proposal.block.extrinsics().len() == inherents_len && !create_empty { return Err(Error::EmptyTransactionPool) @@ -145,18 +147,17 @@ pub async fn seal_block( params.body = Some(body); params.finalized = finalize; params.fork_choice = Some(ForkChoiceStrategy::LongestChain); - params.state_action = StateAction::ApplyChanges( - sp_consensus::StorageChanges::Changes(proposal.storage_changes) - ); + params.state_action = StateAction::ApplyChanges(sp_consensus::StorageChanges::Changes( + proposal.storage_changes, + )); if let Some(digest_provider) = digest_provider { digest_provider.append_block_import(&parent, &mut params, &inherent_data)?; } match block_import.import_block(params, HashMap::new()).await? { - ImportResult::Imported(aux) => { - Ok(CreatedBlock { hash: ::Header::hash(&header), aux }) - }, + ImportResult::Imported(aux) => + Ok(CreatedBlock { hash: ::Header::hash(&header), aux }), other => Err(other.into()), } }; diff --git a/client/consensus/pow/src/lib.rs b/client/consensus/pow/src/lib.rs index e71726564ebe5..bec364e1fe443 100644 --- a/client/consensus/pow/src/lib.rs +++ b/client/consensus/pow/src/lib.rs @@ -41,34 +41,36 @@ mod worker; -pub use crate::worker::{MiningWorker, MiningMetadata, MiningBuild}; +pub use crate::worker::{MiningBuild, MiningMetadata, MiningWorker}; -use std::{ - sync::Arc, borrow::Cow, collections::HashMap, marker::PhantomData, - cmp::Ordering, time::Duration, -}; +use codec::{Decode, Encode}; use futures::{Future, StreamExt}; +use log::*; use parking_lot::Mutex; -use sc_client_api::{BlockOf, backend::AuxStore, BlockchainEvents}; -use sp_blockchain::{HeaderBackend, ProvideCache, well_known_cache_keys::Id as CacheKeyId}; -use sp_block_builder::BlockBuilder as BlockBuilderApi; -use sp_runtime::{Justifications, RuntimeString}; -use sp_runtime::generic::{BlockId, Digest, DigestItem}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; +use prometheus_endpoint::Registry; +use sc_client_api::{ + self, + {backend::AuxStore, BlockOf, BlockchainEvents}, +}; use sp_api::ProvideRuntimeApi; +use sp_block_builder::BlockBuilder as BlockBuilderApi; +use sp_blockchain::{well_known_cache_keys::Id as CacheKeyId, HeaderBackend, ProvideCache}; +use sp_consensus::{ + import_queue::{BasicQueue, BoxBlockImport, BoxJustificationImport, Verifier}, + BlockCheckParams, BlockImport, BlockImportParams, BlockOrigin, CanAuthorWith, Environment, + Error as ConsensusError, ForkChoiceStrategy, ImportResult, Proposer, SelectChain, SyncOracle, +}; use sp_consensus_pow::{Seal, TotalDifficulty, POW_ENGINE_ID}; use sp_inherents::{CreateInherentDataProviders, InherentDataProvider}; -use sp_consensus::{ - BlockImportParams, BlockOrigin, ForkChoiceStrategy, SyncOracle, Environment, Proposer, - SelectChain, Error as ConsensusError, CanAuthorWith, BlockImport, BlockCheckParams, ImportResult, +use sp_runtime::{ + generic::{BlockId, Digest, DigestItem}, + traits::{Block as BlockT, Header as HeaderT}, + Justifications, RuntimeString, }; -use sp_consensus::import_queue::{ - BoxBlockImport, BasicQueue, Verifier, BoxJustificationImport, +use std::{ + borrow::Cow, cmp::Ordering, collections::HashMap, marker::PhantomData, sync::Arc, + time::Duration, }; -use codec::{Encode, Decode}; -use prometheus_endpoint::Registry; -use sc_client_api; -use log::*; use crate::worker::UntilImportedOrTimeout; @@ -102,7 +104,7 @@ pub enum Error { CheckInherents(sp_inherents::Error), #[display( fmt = "Checking inherents unknown error for identifier: {:?}", - "String::from_utf8_lossy(_0)", + "String::from_utf8_lossy(_0)" )] CheckInherentsUnknownError(sp_inherents::InherentIdentifier), #[display(fmt = "Multiple pre-runtime digests")] @@ -153,7 +155,8 @@ pub struct PowAux { pub total_difficulty: Difficulty, } -impl PowAux where +impl PowAux +where Difficulty: Decode + Default, { /// Read the auxiliary from client. @@ -193,11 +196,7 @@ pub trait PowAlgorithm { /// breaking algorithms will help to protect against selfish mining. /// /// Returns if the new seal should be considered best block. - fn break_tie( - &self, - _own_seal: &Seal, - _new_seal: &Seal, - ) -> bool { + fn break_tie(&self, _own_seal: &Seal, _new_seal: &Seal) -> bool { false } /// Verify that the difficulty is valid against given seal. @@ -238,7 +237,8 @@ impl Clone } } -impl PowBlockImport where +impl PowBlockImport +where B: BlockT, I: BlockImport> + Send + Sync, I::Error: Into, @@ -289,14 +289,15 @@ impl PowBlockImport(block.post_digests.last(), block.header.hash())?; - let intermediate = block.take_intermediate::>( - INTERMEDIATE_KEY - )?; + let intermediate = + block.take_intermediate::>(INTERMEDIATE_KEY)?; let difficulty = match intermediate.difficulty { Some(difficulty) => difficulty, @@ -401,14 +401,12 @@ where Ordering::Less => false, Ordering::Greater => true, Ordering::Equal => { - let best_inner_seal = fetch_seal::( - best_header.digest().logs.last(), - best_hash, - )?; + let best_inner_seal = + fetch_seal::(best_header.digest().logs.last(), best_hash)?; self.algorithm.break_tie(&best_inner_seal, &inner_seal) }, - } + }, )); } @@ -423,35 +421,33 @@ pub struct PowVerifier { } impl PowVerifier { - pub fn new( - algorithm: Algorithm, - ) -> Self { + pub fn new(algorithm: Algorithm) -> Self { Self { algorithm, _marker: PhantomData } } fn check_header( &self, mut header: B::Header, - ) -> Result<(B::Header, DigestItem), Error> where + ) -> Result<(B::Header, DigestItem), Error> + where Algorithm: PowAlgorithm, { let hash = header.hash(); let (seal, inner_seal) = match header.digest_mut().pop() { - Some(DigestItem::Seal(id, seal)) => { + Some(DigestItem::Seal(id, seal)) => if id == POW_ENGINE_ID { (DigestItem::Seal(id, seal.clone()), seal) } else { return Err(Error::WrongEngine(id)) - } - }, + }, _ => return Err(Error::HeaderUnsealed(hash)), }; let pre_hash = header.hash(); if !self.algorithm.preliminary_verify(&pre_hash, &inner_seal)?.unwrap_or(true) { - return Err(Error::FailedPreliminaryVerify); + return Err(Error::FailedPreliminaryVerify) } Ok((header, seal)) @@ -459,7 +455,8 @@ impl PowVerifier { } #[async_trait::async_trait] -impl Verifier for PowVerifier where +impl Verifier for PowVerifier +where Algorithm: PowAlgorithm + Send + Sync, Algorithm::Difficulty: 'static + Send, { @@ -473,18 +470,15 @@ impl Verifier for PowVerifier where let hash = header.hash(); let (checked_header, seal) = self.check_header(header)?; - let intermediate = PowIntermediate:: { - difficulty: None, - }; + let intermediate = PowIntermediate:: { difficulty: None }; let mut import_block = BlockImportParams::new(origin, checked_header); import_block.post_digests.push(seal); import_block.body = body; import_block.justifications = justifications; - import_block.intermediates.insert( - Cow::from(INTERMEDIATE_KEY), - Box::new(intermediate) as Box<_> - ); + import_block + .intermediates + .insert(Cow::from(INTERMEDIATE_KEY), Box::new(intermediate) as Box<_>); import_block.post_hash = Some(hash); Ok((import_block, None)) @@ -501,10 +495,8 @@ pub fn import_queue( algorithm: Algorithm, spawner: &impl sp_core::traits::SpawnEssentialNamed, registry: Option<&Registry>, -) -> Result< - PowImportQueue, - sp_consensus::Error -> where +) -> Result, sp_consensus::Error> +where B: BlockT, Transaction: Send + Sync + 'static, Algorithm: PowAlgorithm + Clone + Send + Sync + 'static, @@ -512,13 +504,7 @@ pub fn import_queue( { let verifier = PowVerifier::new(algorithm); - Ok(BasicQueue::new( - verifier, - block_import, - justification_import, - spawner, - registry, - )) + Ok(BasicQueue::new(verifier, block_import, justification_import, spawner, registry)) } /// Start the mining worker for PoW. This function provides the necessary helper functions that can @@ -573,13 +559,13 @@ where let task = async move { loop { if timer.next().await.is_none() { - break; + break } if sync_oracle.is_major_syncing() { debug!(target: "pow", "Skipping proposal due to sync."); worker.lock().on_major_syncing(); - return; + return } let best_header = match select_chain.best_chain().await { @@ -591,8 +577,8 @@ where Select best chain error: {:?}", err ); - return; - } + return + }, }; let best_hash = best_header.hash(); @@ -603,11 +589,11 @@ where Probably a node update is required!", err, ); - return; + return } if worker.lock().best_hash() == Some(best_hash) { - return; + return } // The worker is locked for the duration of the whole proposing period. Within this period, @@ -622,23 +608,25 @@ where Fetch difficulty failed: {:?}", err, ); - return; + return }, }; - let inherent_data_providers = - match create_inherent_data_providers.create_inherent_data_providers(best_hash, ()).await { - Ok(x) => x, - Err(err) => { - warn!( - target: "pow", - "Unable to propose new block for authoring. \ - Creating inherent data providers failed: {:?}", - err, - ); - return; - }, - }; + let inherent_data_providers = match create_inherent_data_providers + .create_inherent_data_providers(best_hash, ()) + .await + { + Ok(x) => x, + Err(err) => { + warn!( + target: "pow", + "Unable to propose new block for authoring. \ + Creating inherent data providers failed: {:?}", + err, + ); + return + }, + }; let inherent_data = match inherent_data_providers.create_inherent_data() { Ok(r) => r, @@ -649,7 +637,7 @@ where Creating inherent data failed: {:?}", e, ); - return; + return }, }; @@ -673,12 +661,10 @@ where }, }; - let proposal = match proposer.propose( - inherent_data, - inherent_digest, - build_time.clone(), - None, - ).await { + let proposal = match proposer + .propose(inherent_data, inherent_digest, build_time.clone(), None) + .await + { Ok(x) => x, Err(err) => { warn!( @@ -714,9 +700,8 @@ fn find_pre_digest(header: &B::Header) -> Result>, Err for log in header.digest().logs() { trace!(target: "pow", "Checking log {:?}, looking for pre runtime digest", log); match (log, pre_digest.is_some()) { - (DigestItem::PreRuntime(POW_ENGINE_ID, _), true) => { - return Err(Error::MultiplePreRuntimeDigests) - }, + (DigestItem::PreRuntime(POW_ENGINE_ID, _), true) => + return Err(Error::MultiplePreRuntimeDigests), (DigestItem::PreRuntime(POW_ENGINE_ID, v), false) => { pre_digest = Some(v.clone()); }, @@ -733,13 +718,12 @@ fn fetch_seal( hash: B::Hash, ) -> Result, Error> { match digest { - Some(DigestItem::Seal(id, seal)) => { + Some(DigestItem::Seal(id, seal)) => if id == &POW_ENGINE_ID { Ok(seal.clone()) } else { return Err(Error::::WrongEngine(*id).into()) - } - }, + }, _ => return Err(Error::::HeaderUnsealed(hash).into()), } } diff --git a/client/consensus/pow/src/worker.rs b/client/consensus/pow/src/worker.rs index 74fbcce81341d..572ed364c8f82 100644 --- a/client/consensus/pow/src/worker.rs +++ b/client/consensus/pow/src/worker.rs @@ -16,20 +16,25 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{pin::Pin, time::Duration, collections::HashMap, borrow::Cow}; +use futures::{ + prelude::*, + task::{Context, Poll}, +}; +use futures_timer::Delay; +use log::*; use sc_client_api::ImportNotifications; -use sp_consensus::{Proposal, BlockOrigin, BlockImportParams, StorageChanges, - StateAction, import_queue::BoxBlockImport}; +use sp_consensus::{ + import_queue::BoxBlockImport, BlockImportParams, BlockOrigin, Proposal, StateAction, + StorageChanges, +}; use sp_runtime::{ generic::BlockId, traits::{Block as BlockT, Header as HeaderT}, DigestItem, }; -use futures::{prelude::*, task::{Context, Poll}}; -use futures_timer::Delay; -use log::*; +use std::{borrow::Cow, collections::HashMap, pin::Pin, time::Duration}; -use crate::{INTERMEDIATE_KEY, POW_ENGINE_ID, Seal, PowAlgorithm, PowIntermediate}; +use crate::{PowAlgorithm, PowIntermediate, Seal, INTERMEDIATE_KEY, POW_ENGINE_ID}; /// Mining metadata. This is the information needed to start an actual mining loop. #[derive(Clone, Eq, PartialEq)] @@ -49,7 +54,7 @@ pub struct MiningBuild< Block: BlockT, Algorithm: PowAlgorithm, C: sp_api::ProvideRuntimeApi, - Proof + Proof, > { /// Mining metadata. pub metadata: MiningMetadata, @@ -90,10 +95,7 @@ where self.build = None; } - pub(crate) fn on_build( - &mut self, - build: MiningBuild, - ) { + pub(crate) fn on_build(&mut self, build: MiningBuild) { self.build = Some(build); } @@ -137,23 +139,25 @@ where let mut import_block = BlockImportParams::new(BlockOrigin::Own, header); import_block.post_digests.push(seal); import_block.body = Some(body); - import_block.state_action = StateAction::ApplyChanges( - StorageChanges::Changes(build.proposal.storage_changes) - ); + import_block.state_action = + StateAction::ApplyChanges(StorageChanges::Changes(build.proposal.storage_changes)); let intermediate = PowIntermediate:: { difficulty: Some(build.metadata.difficulty), }; - import_block.intermediates.insert( - Cow::from(INTERMEDIATE_KEY), - Box::new(intermediate) as Box<_>, - ); + import_block + .intermediates + .insert(Cow::from(INTERMEDIATE_KEY), Box::new(intermediate) as Box<_>); let header = import_block.post_header(); match self.block_import.import_block(import_block, HashMap::default()).await { Ok(res) => { - res.handle_justification(&header.hash(), *header.number(), &mut self.justification_sync_link); + res.handle_justification( + &header.hash(), + *header.number(), + &mut self.justification_sync_link, + ); info!( target: "pow", @@ -190,15 +194,8 @@ pub struct UntilImportedOrTimeout { impl UntilImportedOrTimeout { /// Create a new stream using the given import notification and timeout duration. - pub fn new( - import_notifications: ImportNotifications, - timeout: Duration, - ) -> Self { - Self { - import_notifications, - timeout, - inner_delay: None, - } + pub fn new(import_notifications: ImportNotifications, timeout: Duration) -> Self { + Self { import_notifications, timeout, inner_delay: None } } } diff --git a/client/consensus/slots/src/aux_schema.rs b/client/consensus/slots/src/aux_schema.rs index db94ec48855e4..af92a3a0d60f9 100644 --- a/client/consensus/slots/src/aux_schema.rs +++ b/client/consensus/slots/src/aux_schema.rs @@ -18,9 +18,9 @@ //! Schema for slots in the aux-db. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sc_client_api::backend::AuxStore; -use sp_blockchain::{Result as ClientResult, Error as ClientError}; +use sp_blockchain::{Error as ClientError, Result as ClientResult}; use sp_consensus_slots::{EquivocationProof, Slot}; use sp_runtime::traits::Header; @@ -33,17 +33,17 @@ pub const MAX_SLOT_CAPACITY: u64 = 1000; pub const PRUNING_BOUND: u64 = 2 * MAX_SLOT_CAPACITY; fn load_decode(backend: &C, key: &[u8]) -> ClientResult> - where - C: AuxStore, - T: Decode, +where + C: AuxStore, + T: Decode, { match backend.get_aux(key)? { None => Ok(None), Some(t) => T::decode(&mut &t[..]) - .map_err( - |e| ClientError::Backend(format!("Slots DB is corrupted. Decode error: {}", e)), - ) - .map(Some) + .map_err(|e| { + ClientError::Backend(format!("Slots DB is corrupted. Decode error: {}", e)) + }) + .map(Some), } } @@ -57,14 +57,14 @@ pub fn check_equivocation( header: &H, signer: &P, ) -> ClientResult>> - where - H: Header, - C: AuxStore, - P: Clone + Encode + Decode + PartialEq, +where + H: Header, + C: AuxStore, + P: Clone + Encode + Decode + PartialEq, { // We don't check equivocations for old headers out of our capacity. if slot_now.saturating_sub(*slot) > Slot::from(MAX_SLOT_CAPACITY) { - return Ok(None); + return Ok(None) } // Key for this slot. @@ -72,17 +72,16 @@ pub fn check_equivocation( slot.using_encoded(|s| curr_slot_key.extend(s)); // Get headers of this slot. - let mut headers_with_sig = load_decode::<_, Vec<(H, P)>>(backend, &curr_slot_key[..])? - .unwrap_or_else(Vec::new); + let mut headers_with_sig = + load_decode::<_, Vec<(H, P)>>(backend, &curr_slot_key[..])?.unwrap_or_else(Vec::new); // Get first slot saved. let slot_header_start = SLOT_HEADER_START.to_vec(); - let first_saved_slot = load_decode::<_, Slot>(backend, &slot_header_start[..])? - .unwrap_or(slot); + let first_saved_slot = load_decode::<_, Slot>(backend, &slot_header_start[..])?.unwrap_or(slot); if slot_now < first_saved_slot { // The code below assumes that slots will be visited sequentially. - return Ok(None); + return Ok(None) } for (prev_header, prev_signer) in headers_with_sig.iter() { @@ -96,7 +95,7 @@ pub fn check_equivocation( offender: signer.clone(), first_header: prev_header.clone(), second_header: header.clone(), - })); + })) } else { // We don't need to continue in case of duplicated header, // since it's already saved and a possible equivocation @@ -135,12 +134,11 @@ pub fn check_equivocation( #[cfg(test)] mod test { - use sp_core::{sr25519, Pair}; - use sp_core::hash::H256; - use sp_runtime::testing::{Header as HeaderTest, Digest as DigestTest}; + use sp_core::{hash::H256, sr25519, Pair}; + use sp_runtime::testing::{Digest as DigestTest, Header as HeaderTest}; use substrate_test_runtime_client; - use super::{MAX_SLOT_CAPACITY, PRUNING_BOUND, check_equivocation}; + use super::{check_equivocation, MAX_SLOT_CAPACITY, PRUNING_BOUND}; fn create_header(number: u64) -> HeaderTest { // so that different headers for the same number get different hashes @@ -151,7 +149,7 @@ mod test { number, state_root: Default::default(), extrinsics_root: Default::default(), - digest: DigestTest { logs: vec![], }, + digest: DigestTest { logs: vec![] }, }; header @@ -171,79 +169,55 @@ mod test { let header6 = create_header(3); // @ slot 4 // It's ok to sign same headers. - assert!( - check_equivocation( - &client, - 2.into(), - 2.into(), - &header1, - &public, - ).unwrap().is_none(), - ); - - assert!( - check_equivocation( - &client, - 3.into(), - 2.into(), - &header1, - &public, - ).unwrap().is_none(), - ); + assert!(check_equivocation(&client, 2.into(), 2.into(), &header1, &public,) + .unwrap() + .is_none(),); + + assert!(check_equivocation(&client, 3.into(), 2.into(), &header1, &public,) + .unwrap() + .is_none(),); // But not two different headers at the same slot. - assert!( - check_equivocation( - &client, - 4.into(), - 2.into(), - &header2, - &public, - ).unwrap().is_some(), - ); + assert!(check_equivocation(&client, 4.into(), 2.into(), &header2, &public,) + .unwrap() + .is_some(),); // Different slot is ok. - assert!( - check_equivocation( - &client, - 5.into(), - 4.into(), - &header3, - &public, - ).unwrap().is_none(), - ); + assert!(check_equivocation(&client, 5.into(), 4.into(), &header3, &public,) + .unwrap() + .is_none(),); // Here we trigger pruning and save header 4. - assert!( - check_equivocation( - &client, - (PRUNING_BOUND + 2).into(), - (MAX_SLOT_CAPACITY + 4).into(), - &header4, - &public, - ).unwrap().is_none(), - ); + assert!(check_equivocation( + &client, + (PRUNING_BOUND + 2).into(), + (MAX_SLOT_CAPACITY + 4).into(), + &header4, + &public, + ) + .unwrap() + .is_none(),); // This fails because header 5 is an equivocation of header 4. - assert!( - check_equivocation( - &client, - (PRUNING_BOUND + 3).into(), - (MAX_SLOT_CAPACITY + 4).into(), - &header5, - &public, - ).unwrap().is_some(), - ); + assert!(check_equivocation( + &client, + (PRUNING_BOUND + 3).into(), + (MAX_SLOT_CAPACITY + 4).into(), + &header5, + &public, + ) + .unwrap() + .is_some(),); // This is ok because we pruned the corresponding header. Shows that we are pruning. - assert!( - check_equivocation( - &client, - (PRUNING_BOUND + 4).into(), - 4.into(), - &header6, - &public, - ).unwrap().is_none(), - ); + assert!(check_equivocation( + &client, + (PRUNING_BOUND + 4).into(), + 4.into(), + &header6, + &public, + ) + .unwrap() + .is_none(),); } } diff --git a/client/consensus/slots/src/lib.rs b/client/consensus/slots/src/lib.rs index c410f173e90ab..0e19ca94b4aec 100644 --- a/client/consensus/slots/src/lib.rs +++ b/client/consensus/slots/src/lib.rs @@ -25,19 +25,19 @@ #![forbid(unsafe_code)] #![warn(missing_docs)] -mod slots; mod aux_schema; +mod slots; +pub use aux_schema::{check_equivocation, MAX_SLOT_CAPACITY, PRUNING_BOUND}; pub use slots::SlotInfo; use slots::Slots; -pub use aux_schema::{check_equivocation, MAX_SLOT_CAPACITY, PRUNING_BOUND}; -use std::{fmt::Debug, ops::Deref, time::Duration}; use codec::{Decode, Encode}; use futures::{future::Either, Future, TryFutureExt}; use futures_timer::Delay; use log::{debug, error, info, warn}; -use sp_api::{ProvideRuntimeApi, ApiRef}; +use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_INFO, CONSENSUS_WARN}; +use sp_api::{ApiRef, ProvideRuntimeApi}; use sp_arithmetic::traits::BaseArithmetic; use sp_consensus::{ BlockImport, CanAuthorWith, JustificationSyncLink, Proposer, SelectChain, SlotData, SyncOracle, @@ -46,10 +46,10 @@ use sp_consensus_slots::Slot; use sp_inherents::CreateInherentDataProviders; use sp_runtime::{ generic::BlockId, - traits::{Block as BlockT, Header as HeaderT, HashFor, NumberFor} + traits::{Block as BlockT, HashFor, Header as HeaderT, NumberFor}, }; -use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_WARN, CONSENSUS_INFO}; use sp_timestamp::Timestamp; +use std::{fmt::Debug, ops::Deref, time::Duration}; /// The changes that need to applied to the storage to create the state for a block. /// @@ -76,10 +76,7 @@ pub trait SlotWorker { /// /// Returns a future that resolves to a [`SlotResult`] iff a block was successfully built in /// the slot. Otherwise `None` is returned. - async fn on_slot( - &mut self, - slot_info: SlotInfo, - ) -> Option>; + async fn on_slot(&mut self, slot_info: SlotInfo) -> Option>; } /// A skeleton implementation for `SlotWorker` which tries to claim a slot at @@ -89,7 +86,8 @@ pub trait SlotWorker { pub trait SimpleSlotWorker { /// A handle to a `BlockImport`. type BlockImport: BlockImport>::Transaction> - + Send + 'static; + + Send + + 'static; /// A handle to a `SyncOracle`. type SyncOracle: SyncOracle; @@ -100,7 +98,9 @@ pub trait SimpleSlotWorker { /// The type of future resolving to the proposer. type CreateProposer: Future> - + Send + Unpin + 'static; + + Send + + Unpin + + 'static; /// The type of proposer to use to build blocks. type Proposer: Proposer + Send; @@ -139,12 +139,7 @@ pub trait SimpleSlotWorker { /// Notifies the given slot. Similar to `claim_slot`, but will be called no matter whether we /// need to author blocks or not. - fn notify_slot( - &self, - _header: &B::Header, - _slot: Slot, - _epoch_data: &Self::EpochData, - ) {} + fn notify_slot(&self, _header: &B::Header, _slot: Slot, _epoch_data: &Self::EpochData) {} /// Return the pre digest data to include in a block authored with the given claim. fn pre_digest_data( @@ -154,18 +149,24 @@ pub trait SimpleSlotWorker { ) -> Vec>; /// Returns a function which produces a `BlockImportParams`. - fn block_import_params(&self) -> Box< + fn block_import_params( + &self, + ) -> Box< dyn Fn( - B::Header, - &B::Hash, - Vec, - StorageChanges<>::Transaction, B>, - Self::Claim, - Self::EpochData, - ) -> Result< - sp_consensus::BlockImportParams>::Transaction>, - sp_consensus::Error - > + Send + 'static + B::Header, + &B::Hash, + Vec, + StorageChanges<>::Transaction, B>, + Self::Claim, + Self::EpochData, + ) -> Result< + sp_consensus::BlockImportParams< + B, + >::Transaction, + >, + sp_consensus::Error, + > + Send + + 'static, >; /// Whether to force authoring if offline. @@ -194,10 +195,7 @@ pub trait SimpleSlotWorker { fn telemetry(&self) -> Option; /// Remaining duration for proposing. - fn proposing_remaining_duration( - &self, - slot_info: &SlotInfo, - ) -> Duration; + fn proposing_remaining_duration(&self, slot_info: &SlotInfo) -> Duration; /// Implements [`SlotWorker::on_slot`]. async fn on_slot( @@ -213,8 +211,7 @@ pub trait SimpleSlotWorker { let proposing_remaining = if proposing_remaining_duration == Duration::default() { debug!( target: logging_target, - "Skipping proposal slot {} since there's no time left to propose", - slot, + "Skipping proposal slot {} since there's no time left to propose", slot, ); return None @@ -240,8 +237,8 @@ pub trait SimpleSlotWorker { "err" => ?err, ); - return None; - } + return None + }, }; self.notify_slot(&slot_info.chain_head, slot, &epoch_data); @@ -260,13 +257,13 @@ pub trait SimpleSlotWorker { "authorities_len" => authorities_len, ); - return None; + return None } let claim = self.claim_slot(&slot_info.chain_head, slot, &epoch_data)?; if self.should_backoff(slot, &slot_info.chain_head) { - return None; + return None } debug!( @@ -289,9 +286,7 @@ pub trait SimpleSlotWorker { Err(err) => { warn!( target: logging_target, - "Unable to author block in slot {:?}: {:?}", - slot, - err, + "Unable to author block in slot {:?}: {:?}", slot, err, ); telemetry!( @@ -303,7 +298,7 @@ pub trait SimpleSlotWorker { ); return None - } + }, }; let logs = self.pre_digest_data(slot, &claim); @@ -311,34 +306,29 @@ pub trait SimpleSlotWorker { // deadline our production to 98% of the total time left for proposing. As we deadline // the proposing below to the same total time left, the 2% margin should be enough for // the result to be returned. - let proposing = proposer.propose( - slot_info.inherent_data, - sp_runtime::generic::Digest { - logs, - }, - proposing_remaining_duration.mul_f32(0.98), - None, - ).map_err(|e| sp_consensus::Error::ClientImport(format!("{:?}", e))); + let proposing = proposer + .propose( + slot_info.inherent_data, + sp_runtime::generic::Digest { logs }, + proposing_remaining_duration.mul_f32(0.98), + None, + ) + .map_err(|e| sp_consensus::Error::ClientImport(format!("{:?}", e))); let proposal = match futures::future::select(proposing, proposing_remaining).await { Either::Left((Ok(p), _)) => p, Either::Left((Err(err), _)) => { - warn!( - target: logging_target, - "Proposing failed: {:?}", - err, - ); + warn!(target: logging_target, "Proposing failed: {:?}", err,); return None }, Either::Right(_) => { info!( target: logging_target, - "⌛️ Discarding proposal for slot {}; block production took too long", - slot, + "⌛️ Discarding proposal for slot {}; block production took too long", slot, ); // If the node was compiled with debug, tell the user to use release optimizations. - #[cfg(build_type="debug")] + #[cfg(build_type = "debug")] info!( target: logging_target, "👉 Recompile your node in `--release` mode to mitigate this problem.", @@ -373,14 +363,10 @@ pub trait SimpleSlotWorker { ) { Ok(bi) => bi, Err(err) => { - warn!( - target: logging_target, - "Failed to create block import params: {:?}", - err, - ); + warn!(target: logging_target, "Failed to create block import params: {:?}", err,); return None - } + }, }; info!( @@ -401,17 +387,14 @@ pub trait SimpleSlotWorker { ); let header = block_import_params.post_header(); - match block_import - .import_block(block_import_params, Default::default()) - .await - { + match block_import.import_block(block_import_params, Default::default()).await { Ok(res) => { res.handle_justification( &header.hash(), *header.number(), self.justification_sync_link(), ); - } + }, Err(err) => { warn!( target: logging_target, @@ -425,18 +408,17 @@ pub trait SimpleSlotWorker { "hash" => ?parent_hash, "err" => ?err, ); - } + }, } - Some(SlotResult { - block: B::new(header, body), - storage_proof, - }) + Some(SlotResult { block: B::new(header, body), storage_proof }) } } #[async_trait::async_trait] -impl + Send> SlotWorker>::Proof> for T { +impl + Send> SlotWorker>::Proof> + for T +{ async fn on_slot( &mut self, slot_info: SlotInfo, @@ -496,8 +478,7 @@ pub async fn start_slot_worker( mut sync_oracle: SO, create_inherent_data_providers: CIDP, can_author_with: CAW, -) -where +) where B: BlockT, C: SelectChain, W: SlotWorker, @@ -509,28 +490,25 @@ where { let SlotDuration(slot_duration) = slot_duration; - let mut slots = Slots::new( - slot_duration.slot_duration(), - create_inherent_data_providers, - client, - ); + let mut slots = + Slots::new(slot_duration.slot_duration(), create_inherent_data_providers, client); loop { let slot_info = match slots.next_slot().await { Ok(r) => r, Err(e) => { warn!(target: "slots", "Error while polling for next slot: {:?}", e); - return; - } + return + }, }; if sync_oracle.is_major_syncing() { debug!(target: "slots", "Skipping proposal slot due to sync."); - continue; + continue } - if let Err(err) = can_author_with - .can_author_with(&BlockId::Hash(slot_info.chain_head.hash())) + if let Err(err) = + can_author_with.can_author_with(&BlockId::Hash(slot_info.chain_head.hash())) { warn!( target: "slots", @@ -559,7 +537,10 @@ pub enum CheckedHeader { #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] -pub enum Error where T: Debug { +pub enum Error +where + T: Debug, +{ #[error("Slot duration is invalid: {0:?}")] SlotDurationInvalid(SlotDuration), } @@ -591,25 +572,23 @@ impl SlotDuration { /// /// `slot_key` is marked as `'static`, as it should really be a /// compile-time constant. - pub fn get_or_compute(client: &C, cb: CB) -> sp_blockchain::Result where + pub fn get_or_compute(client: &C, cb: CB) -> sp_blockchain::Result + where C: sc_client_api::backend::AuxStore + sc_client_api::UsageProvider, C: ProvideRuntimeApi, CB: FnOnce(ApiRef, &BlockId) -> sp_blockchain::Result, T: SlotData + Encode + Decode + Debug, { let slot_duration = match client.get_aux(T::SLOT_KEY)? { - Some(v) => ::decode(&mut &v[..]) - .map(SlotDuration) - .map_err(|_| { - sp_blockchain::Error::Backend({ - error!(target: "slots", "slot duration kept in invalid format"); - "slot duration kept in invalid format".to_string() - }) - }), + Some(v) => ::decode(&mut &v[..]).map(SlotDuration).map_err(|_| { + sp_blockchain::Error::Backend({ + error!(target: "slots", "slot duration kept in invalid format"); + "slot duration kept in invalid format".to_string() + }) + }), None => { let best_hash = client.usage_info().chain.best_hash; - let slot_duration = - cb(client.runtime_api(), &BlockId::hash(best_hash))?; + let slot_duration = cb(client.runtime_api(), &BlockId::hash(best_hash))?; info!( "⏱ Loaded block-time = {:?} from block {:?}", @@ -621,11 +600,13 @@ impl SlotDuration { .using_encoded(|s| client.insert_aux(&[(T::SLOT_KEY, &s[..])], &[]))?; Ok(SlotDuration(slot_duration)) - } + }, }?; if slot_duration.slot_duration() == Default::default() { - return Err(sp_blockchain::Error::Application(Box::new(Error::SlotDurationInvalid(slot_duration)))) + return Err(sp_blockchain::Error::Application(Box::new(Error::SlotDurationInvalid( + slot_duration, + )))) } Ok(slot_duration) @@ -687,9 +668,7 @@ pub fn proposing_remaining_duration( ) -> Duration { use sp_runtime::traits::Zero; - let proposing_duration = slot_info - .duration - .mul_f32(block_proposal_slot_portion.get()); + let proposing_duration = slot_info.duration.mul_f32(block_proposal_slot_portion.get()); let slot_remaining = slot_info .ends_at @@ -700,7 +679,7 @@ pub fn proposing_remaining_duration( // If parent is genesis block, we don't require any lenience factor. if slot_info.chain_head.number().is_zero() { - return proposing_duration; + return proposing_duration } let parent_slot = match parent_slot { @@ -723,9 +702,7 @@ pub fn proposing_remaining_duration( if let Some(ref max_block_proposal_slot_portion) = max_block_proposal_slot_portion { std::cmp::min( lenient_proposing_duration, - slot_info - .duration - .mul_f32(max_block_proposal_slot_portion.get()), + slot_info.duration.mul_f32(max_block_proposal_slot_portion.get()), ) } else { lenient_proposing_duration @@ -853,7 +830,7 @@ impl Default for BackoffAuthoringOnFinalizedHeadLagging { impl BackoffAuthoringBlocksStrategy for BackoffAuthoringOnFinalizedHeadLagging where - N: BaseArithmetic + Copy + N: BaseArithmetic + Copy, { fn should_backoff( &self, @@ -865,12 +842,12 @@ where ) -> bool { // This should not happen, but we want to keep the previous behaviour if it does. if slot_now <= chain_head_slot { - return false; + return false } let unfinalized_block_length = chain_head_number - finalized_number; - let interval = unfinalized_block_length.saturating_sub(self.unfinalized_slack) - / self.authoring_bias; + let interval = + unfinalized_block_length.saturating_sub(self.unfinalized_slack) / self.authoring_bias; let interval = interval.min(self.max_interval); // We're doing arithmetic between block and slot numbers. @@ -906,9 +883,9 @@ impl BackoffAuthoringBlocksStrategy for () { #[cfg(test)] mod test { use super::*; + use sp_api::NumberFor; use std::time::{Duration, Instant}; use substrate_test_runtime_client::runtime::{Block, Header}; - use sp_api::NumberFor; const SLOT_DURATION: Duration = Duration::from_millis(6000); @@ -945,10 +922,7 @@ mod test { } // but we cap it to a maximum of 20 slots - assert_eq!( - super::slot_lenience_linear(1u64.into(), &slot(23)), - Some(SLOT_DURATION * 20), - ); + assert_eq!(super::slot_lenience_linear(1u64.into(), &slot(23)), Some(SLOT_DURATION * 20),); } #[test] @@ -1041,7 +1015,15 @@ mod test { let slot_now = 2; let should_backoff: Vec = (slot_now..1000) - .map(|s| strategy.should_backoff(head_number, head_slot.into(), finalized_number, s.into(), "slots")) + .map(|s| { + strategy.should_backoff( + head_number, + head_slot.into(), + finalized_number, + s.into(), + "slots", + ) + }) .collect(); // Should always be false, since the head isn't advancing @@ -1105,7 +1087,15 @@ mod test { let max_interval = strategy.max_interval; let should_backoff: Vec = (slot_now..200) - .map(|s| strategy.should_backoff(head_number, head_slot.into(), finalized_number, s.into(), "slots")) + .map(|s| { + strategy.should_backoff( + head_number, + head_slot.into(), + finalized_number, + s.into(), + "slots", + ) + }) .collect(); // Should backoff (true) until we are `max_interval` number of slots ahead of the chain @@ -1123,11 +1113,7 @@ mod test { }; let finalized_number = 2; - let mut head_state = HeadState { - head_number: 4, - head_slot: 10, - slot_now: 11, - }; + let mut head_state = HeadState { head_number: 4, head_slot: 10, slot_now: 11 }; let should_backoff = |head_state: &HeadState| -> bool { >>::should_backoff( @@ -1155,32 +1141,27 @@ mod test { // Gradually start to backoff more and more frequently let expected = [ false, false, false, false, false, // no effect - true, false, - true, false, // 1:1 - true, true, false, - true, true, false, // 2:1 - true, true, true, false, - true, true, true, false, // 3:1 - true, true, true, true, false, - true, true, true, true, false, // 4:1 - true, true, true, true, true, false, - true, true, true, true, true, false, // 5:1 - true, true, true, true, true, true, false, - true, true, true, true, true, true, false, // 6:1 - true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, false, // 7:1 - true, true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, true, false, // 8:1 - true, true, true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, true, true, false, // 9:1 - true, true, true, true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, true, true, true, false, // 10:1 - true, true, true, true, true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, true, true, true, true, false, // 11:1 - true, true, true, true, true, true, true, true, true, true, true, true, false, - true, true, true, true, true, true, true, true, true, true, true, true, false, // 12:1 + true, false, true, false, // 1:1 + true, true, false, true, true, false, // 2:1 + true, true, true, false, true, true, true, false, // 3:1 + true, true, true, true, false, true, true, true, true, false, // 4:1 + true, true, true, true, true, false, true, true, true, true, true, false, // 5:1 + true, true, true, true, true, true, false, true, true, true, true, true, true, + false, // 6:1 + true, true, true, true, true, true, true, false, true, true, true, true, true, true, + true, false, // 7:1 + true, true, true, true, true, true, true, true, false, true, true, true, true, true, + true, true, true, false, // 8:1 + true, true, true, true, true, true, true, true, true, false, true, true, true, true, + true, true, true, true, true, false, // 9:1 + true, true, true, true, true, true, true, true, true, true, false, true, true, true, + true, true, true, true, true, true, true, false, // 10:1 + true, true, true, true, true, true, true, true, true, true, true, false, true, true, + true, true, true, true, true, true, true, true, true, false, // 11:1 + true, true, true, true, true, true, true, true, true, true, true, true, false, true, + true, true, true, true, true, true, true, true, true, true, true, false, // 12:1 true, true, true, true, - ]; + ]; assert_eq!(backoff.as_slice(), &expected[..]); } @@ -1195,11 +1176,7 @@ mod test { let finalized_number = 2; let starting_slot = 11; - let mut head_state = HeadState { - head_number: 4, - head_slot: 10, - slot_now: starting_slot, - }; + let mut head_state = HeadState { head_number: 4, head_slot: 10, slot_now: starting_slot }; let should_backoff = |head_state: &HeadState| -> bool { >>::should_backoff( @@ -1240,30 +1217,22 @@ mod test { assert_eq!(last_slot - last_two_claimed.next().unwrap(), 92); assert_eq!(last_slot - last_two_claimed.next().unwrap(), 92 + expected_distance); - let intervals: Vec<_> = slots_claimed - .windows(2) - .map(|x| x[1] - x[0]) - .collect(); + let intervals: Vec<_> = slots_claimed.windows(2).map(|x| x[1] - x[0]).collect(); // The key thing is that the distance between claimed slots is capped to `max_interval + 1` // assert_eq!(max_observed_interval, Some(&expected_distance)); assert_eq!(intervals.iter().max(), Some(&expected_distance)); // But lets assert all distances, which we expect to grow linearly until `max_interval + 1` - let expected_intervals: Vec<_> = (0..497) - .map(|i| (i/2).max(1).min(expected_distance) ) - .collect(); + let expected_intervals: Vec<_> = + (0..497).map(|i| (i / 2).max(1).min(expected_distance)).collect(); assert_eq!(intervals, expected_intervals); } fn run_until_max_interval(param: BackoffAuthoringOnFinalizedHeadLagging) -> (u64, u64) { let finalized_number = 0; - let mut head_state = HeadState { - head_number: 0, - head_slot: 0, - slot_now: 1, - }; + let mut head_state = HeadState { head_number: 0, head_slot: 0, slot_now: 1 }; let should_backoff = |head_state: &HeadState| -> bool { >>::should_backoff( @@ -1277,8 +1246,8 @@ mod test { }; // Number of blocks until we reach the max interval - let block_for_max_interval - = param.max_interval * param.authoring_bias + param.unfinalized_slack; + let block_for_max_interval = + param.max_interval * param.authoring_bias + param.unfinalized_slack; while head_state.head_number < block_for_max_interval { if should_backoff(&head_state) { @@ -1302,7 +1271,7 @@ mod test { // or // (start_slot + C) + M * X*(X+1)/2 fn expected_time_to_reach_max_interval( - param: &BackoffAuthoringOnFinalizedHeadLagging + param: &BackoffAuthoringOnFinalizedHeadLagging, ) -> (u64, u64) { let c = param.unfinalized_slack; let m = param.authoring_bias; diff --git a/client/consensus/slots/src/slots.rs b/client/consensus/slots/src/slots.rs index 1e6dadcdf5cf5..d994aff1fc612 100644 --- a/client/consensus/slots/src/slots.rs +++ b/client/consensus/slots/src/slots.rs @@ -20,23 +20,21 @@ //! //! This is used instead of `futures_timer::Interval` because it was unreliable. -use super::{Slot, InherentDataProviderExt}; +use super::{InherentDataProviderExt, Slot}; use sp_consensus::{Error, SelectChain}; -use sp_inherents::{InherentData, CreateInherentDataProviders, InherentDataProvider}; +use sp_inherents::{CreateInherentDataProviders, InherentData, InherentDataProvider}; use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; -use std::time::{Duration, Instant}; use futures_timer::Delay; +use std::time::{Duration, Instant}; /// Returns current duration since unix epoch. pub fn duration_now() -> Duration { use std::time::SystemTime; let now = SystemTime::now(); - now.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_else(|e| panic!( - "Current time {:?} is before unix epoch. Something is wrong: {:?}", - now, - e, - )) + now.duration_since(SystemTime::UNIX_EPOCH).unwrap_or_else(|e| { + panic!("Current time {:?} is before unix epoch. Something is wrong: {:?}", now, e,) + }) } /// Returns the duration until the next slot from now. @@ -104,11 +102,7 @@ pub(crate) struct Slots { impl Slots { /// Create a new `Slots` stream. - pub fn new( - slot_duration: Duration, - create_inherent_data_providers: IDP, - client: C, - ) -> Self { + pub fn new(slot_duration: Duration, create_inherent_data_providers: IDP, client: C) -> Self { Slots { last_slot: 0.into(), slot_duration, @@ -135,7 +129,7 @@ where // schedule wait. let wait_dur = time_until_next_slot(self.slot_duration); Some(Delay::new(wait_dur)) - } + }, Some(d) => Some(d), }; @@ -161,11 +155,12 @@ where ); // Let's try at the next slot.. self.inner_delay.take(); - continue; - } + continue + }, }; - let inherent_data_providers = self.create_inherent_data_providers + let inherent_data_providers = self + .create_inherent_data_providers .create_inherent_data_providers(chain_head.hash(), ()) .await?; diff --git a/client/consensus/uncles/src/lib.rs b/client/consensus/uncles/src/lib.rs index cfae0528a627d..368a994cfe520 100644 --- a/client/consensus/uncles/src/lib.rs +++ b/client/consensus/uncles/src/lib.rs @@ -19,7 +19,7 @@ //! Uncles functionality for Substrate. use sc_client_api::ProvideUncles; -use sp_runtime::{traits::Block as BlockT, generic::BlockId}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; #[derive(Debug, thiserror::Error)] pub enum Error { @@ -34,7 +34,8 @@ const MAX_UNCLE_GENERATIONS: u32 = 8; pub fn create_uncles_inherent_data_provider( client: &C, parent: B::Hash, -) -> Result, sc_client_api::blockchain::Error> where +) -> Result, sc_client_api::blockchain::Error> +where B: BlockT, C: ProvideUncles, { diff --git a/client/db/src/bench.rs b/client/db/src/bench.rs index 4b34182a1c3bd..c21119bd1176f 100644 --- a/client/db/src/bench.rs +++ b/client/db/src/bench.rs @@ -18,27 +18,31 @@ //! State backend that's useful for benchmarking -use std::sync::Arc; -use std::cell::{Cell, RefCell}; -use std::collections::HashMap; +use std::{ + cell::{Cell, RefCell}, + collections::HashMap, + sync::Arc, +}; -use hash_db::{Prefix, Hasher}; -use sp_trie::{MemoryDB, prefixed_key}; +use crate::storage_cache::{new_shared_cache, CachingState, SharedCache}; +use hash_db::{Hasher, Prefix}; +use kvdb::{DBTransaction, KeyValueDB}; use sp_core::{ + hexdisplay::HexDisplay, storage::{ChildInfo, TrackedStorageKey}, - hexdisplay::HexDisplay }; -use sp_runtime::traits::{Block as BlockT, HashFor}; -use sp_runtime::Storage; +use sp_runtime::{ + traits::{Block as BlockT, HashFor}, + Storage, +}; use sp_state_machine::{ - DBValue, backend::Backend as StateBackend, StorageCollection, ChildStorageCollection, ProofRecorder, + backend::Backend as StateBackend, ChildStorageCollection, DBValue, ProofRecorder, + StorageCollection, }; -use kvdb::{KeyValueDB, DBTransaction}; -use crate::storage_cache::{CachingState, SharedCache, new_shared_cache}; +use sp_trie::{prefixed_key, MemoryDB}; -type DbState = sp_state_machine::TrieBackend< - Arc>>, HashFor ->; +type DbState = + sp_state_machine::TrieBackend>>, HashFor>; type State = CachingState, B>; @@ -53,14 +57,17 @@ impl sp_state_machine::Storage> for StorageDb>(key, prefix); if let Some(recorder) = &self.proof_recorder { if let Some(v) = recorder.get(&key) { - return Ok(v.clone()); + return Ok(v.clone()) } - let backend_value = self.db.get(0, &prefixed_key) + let backend_value = self + .db + .get(0, &prefixed_key) .map_err(|e| format!("Database backend error: {:?}", e))?; recorder.record(key.clone(), backend_value.clone()); Ok(backend_value) } else { - self.db.get(0, &prefixed_key) + self.db + .get(0, &prefixed_key) .map_err(|e| format!("Database backend error: {:?}", e)) } } @@ -91,7 +98,11 @@ pub struct BenchmarkingState { impl BenchmarkingState { /// Create a new instance that creates a database in a temporary dir. - pub fn new(genesis: Storage, _cache_size_mb: Option, record_proof: bool) -> Result { + pub fn new( + genesis: Storage, + _cache_size_mb: Option, + record_proof: bool, + ) -> Result { let mut root = B::Hash::default(); let mut mdb = MemoryDB::>::default(); sp_state_machine::TrieDBMut::>::new(&mut mdb, &mut root); @@ -114,14 +125,17 @@ impl BenchmarkingState { state.add_whitelist_to_tracker(); state.reopen()?; - let child_delta = genesis.children_default.iter().map(|(_storage_key, child_content)| ( - &child_content.child_info, - child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), - )); - let (root, transaction): (B::Hash, _) = state.state.borrow_mut().as_mut().unwrap().full_storage_root( - genesis.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), - child_delta, - ); + let child_delta = genesis.children_default.iter().map(|(_storage_key, child_content)| { + ( + &child_content.child_info, + child_content.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + ) + }); + let (root, transaction): (B::Hash, _) = + state.state.borrow_mut().as_mut().unwrap().full_storage_root( + genesis.top.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))), + child_delta, + ); state.genesis = transaction.clone().drain(); state.genesis_root = root.clone(); state.commit(root, transaction, Vec::new(), Vec::new())?; @@ -143,12 +157,12 @@ impl BenchmarkingState { let storage_db = Arc::new(StorageDb:: { db, proof_recorder: self.proof_recorder.clone(), - _block: Default::default() + _block: Default::default(), }); *self.state.borrow_mut() = Some(State::new( DbState::::new(storage_db, self.root.get()), self.shared_cache.clone(), - None + None, )); Ok(()) } @@ -178,7 +192,7 @@ impl BenchmarkingState { let key_tracker = if let Some(childtrie) = childtrie { child_key_tracker.entry(childtrie.to_vec()).or_insert_with(|| HashMap::new()) - } else { + } else { &mut main_key_tracker }; @@ -193,7 +207,7 @@ impl BenchmarkingState { let should_log = !tracker.has_been_read(); tracker.add_read(); should_log - } + }, }; if should_log { @@ -215,7 +229,7 @@ impl BenchmarkingState { let key_tracker = if let Some(childtrie) = childtrie { child_key_tracker.entry(childtrie.to_vec()).or_insert_with(|| HashMap::new()) - } else { + } else { &mut main_key_tracker }; @@ -231,7 +245,7 @@ impl BenchmarkingState { let should_log = !tracker.has_been_written(); tracker.add_write(); should_log - } + }, }; if should_log { @@ -269,7 +283,7 @@ fn state_err() -> String { } impl StateBackend> for BenchmarkingState { - type Error = as StateBackend>>::Error; + type Error = as StateBackend>>::Error; type Transaction = as StateBackend>>::Transaction; type TrieBackendStorage = as StateBackend>>::TrieBackendStorage; @@ -289,7 +303,11 @@ impl StateBackend> for BenchmarkingState { key: &[u8], ) -> Result>, Self::Error> { self.add_read_key(Some(child_info.storage_key()), key); - self.state.borrow().as_ref().ok_or_else(state_err)?.child_storage(child_info, key) + self.state + .borrow() + .as_ref() + .ok_or_else(state_err)? + .child_storage(child_info, key) } fn exists_storage(&self, key: &[u8]) -> Result { @@ -303,7 +321,11 @@ impl StateBackend> for BenchmarkingState { key: &[u8], ) -> Result { self.add_read_key(Some(child_info.storage_key()), key); - self.state.borrow().as_ref().ok_or_else(state_err)?.exists_child_storage(child_info, key) + self.state + .borrow() + .as_ref() + .ok_or_else(state_err)? + .exists_child_storage(child_info, key) } fn next_storage_key(&self, key: &[u8]) -> Result>, Self::Error> { @@ -317,7 +339,11 @@ impl StateBackend> for BenchmarkingState { key: &[u8], ) -> Result>, Self::Error> { self.add_read_key(Some(child_info.storage_key()), key); - self.state.borrow().as_ref().ok_or_else(state_err)?.next_child_storage_key(child_info, key) + self.state + .borrow() + .as_ref() + .ok_or_else(state_err)? + .next_child_storage_key(child_info, key) } fn for_keys_with_prefix(&self, prefix: &[u8], f: F) { @@ -340,8 +366,13 @@ impl StateBackend> for BenchmarkingState { f: F, allow_missing: bool, ) -> Result { - self.state.borrow().as_ref().ok_or_else(state_err)? - .apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) + self.state.borrow().as_ref().ok_or_else(state_err)?.apply_to_key_values_while( + child_info, + prefix, + start_at, + f, + allow_missing, + ) } fn apply_to_keys_while bool>( @@ -368,17 +399,29 @@ impl StateBackend> for BenchmarkingState { fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (B::Hash, Self::Transaction) where B::Hash: Ord { - self.state.borrow().as_ref().map_or(Default::default(), |s| s.storage_root(delta)) + delta: impl Iterator)>, + ) -> (B::Hash, Self::Transaction) + where + B::Hash: Ord, + { + self.state + .borrow() + .as_ref() + .map_or(Default::default(), |s| s.storage_root(delta)) } fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (B::Hash, bool, Self::Transaction) where B::Hash: Ord { - self.state.borrow().as_ref().map_or(Default::default(), |s| s.child_storage_root(child_info, delta)) + delta: impl Iterator)>, + ) -> (B::Hash, bool, Self::Transaction) + where + B::Hash: Ord, + { + self.state + .borrow() + .as_ref() + .map_or(Default::default(), |s| s.child_storage_root(child_info, delta)) } fn pairs(&self) -> Vec<(Vec, Vec)> { @@ -389,17 +432,16 @@ impl StateBackend> for BenchmarkingState { self.state.borrow().as_ref().map_or(Default::default(), |s| s.keys(prefix)) } - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec> { - self.state.borrow().as_ref().map_or(Default::default(), |s| s.child_keys(child_info, prefix)) + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec> { + self.state + .borrow() + .as_ref() + .map_or(Default::default(), |s| s.child_keys(child_info, prefix)) } - fn as_trie_backend(&mut self) - -> Option<&sp_state_machine::TrieBackend>> - { + fn as_trie_backend( + &mut self, + ) -> Option<&sp_state_machine::TrieBackend>> { None } @@ -425,7 +467,8 @@ impl StateBackend> for BenchmarkingState { let mut record = self.record.take(); record.extend(keys); self.record.set(record); - db.write(db_transaction).map_err(|_| String::from("Error committing transaction"))?; + db.write(db_transaction) + .map_err(|_| String::from("Error committing transaction"))?; self.root.set(storage_root); self.db.set(Some(db)); @@ -455,7 +498,8 @@ impl StateBackend> for BenchmarkingState { None => db_transaction.delete(0, &key), } } - db.write(db_transaction).map_err(|_| String::from("Error committing transaction"))?; + db.write(db_transaction) + .map_err(|_| String::from("Error committing transaction"))?; self.db.set(Some(db)); } @@ -519,24 +563,20 @@ impl StateBackend> for BenchmarkingState { let reads = tracker.reads.min(1); let writes = tracker.writes.min(1); if let Some(prefix_tracker) = prefix_key_tracker.get_mut(&prefix) { - prefix_tracker.0 += reads; - prefix_tracker.1 += writes; + prefix_tracker.0 += reads; + prefix_tracker.1 += writes; } else { - prefix_key_tracker.insert( - prefix, - ( - reads, - writes, - tracker.whitelisted, - ), - ); + prefix_key_tracker.insert(prefix, (reads, writes, tracker.whitelisted)); } } }); - prefix_key_tracker.iter().map(|(key, tracker)| -> (Vec, u32, u32, bool) { + prefix_key_tracker + .iter() + .map(|(key, tracker)| -> (Vec, u32, u32, bool) { (key.to_vec(), tracker.0, tracker.1, tracker.2) - }).collect::>() + }) + .collect::>() } fn register_overlay_stats(&self, stats: &sp_state_machine::StateMachineStats) { @@ -544,7 +584,10 @@ impl StateBackend> for BenchmarkingState { } fn usage_info(&self) -> sp_state_machine::UsageInfo { - self.state.borrow().as_ref().map_or(sp_state_machine::UsageInfo::empty(), |s| s.usage_info()) + self.state + .borrow() + .as_ref() + .map_or(sp_state_machine::UsageInfo::empty(), |s| s.usage_info()) } fn proof_size(&self) -> Option { @@ -585,8 +628,8 @@ mod test { #[test] fn read_to_main_and_child_tries() { - let bench_state = BenchmarkingState::::new(Default::default(), None, false) - .unwrap(); + let bench_state = + BenchmarkingState::::new(Default::default(), None, false).unwrap(); for _ in 0..2 { let child1 = sp_core::storage::ChildInfo::new_default(b"child1"); @@ -600,16 +643,14 @@ mod test { bench_state.child_storage(&child1, b"bar").unwrap(); bench_state.child_storage(&child2, b"bar").unwrap(); - bench_state.commit( - Default::default(), - Default::default(), - vec![ - ("foo".as_bytes().to_vec(), None) - ], - vec![ - ("child1".as_bytes().to_vec(), vec![("foo".as_bytes().to_vec(), None)]) - ] - ).unwrap(); + bench_state + .commit( + Default::default(), + Default::default(), + vec![("foo".as_bytes().to_vec(), None)], + vec![("child1".as_bytes().to_vec(), vec![("foo".as_bytes().to_vec(), None)])], + ) + .unwrap(); let rw_tracker = bench_state.read_write_count(); assert_eq!(rw_tracker.0, 6); diff --git a/client/db/src/cache/list_cache.rs b/client/db/src/cache/list_cache.rs index 341105b16a5b3..9499ae2a89f45 100644 --- a/client/db/src/cache/list_cache.rs +++ b/client/db/src/cache/list_cache.rs @@ -41,18 +41,18 @@ //! Finalized entry E1 is pruned when block B is finalized so that: //! EntryAt(B.number - prune_depth).points_to(E1) -use std::collections::{BTreeSet, BTreeMap}; +use std::collections::{BTreeMap, BTreeSet}; use log::warn; use sp_blockchain::{Error as ClientError, Result as ClientResult}; -use sp_runtime::traits::{ - Block as BlockT, NumberFor, Zero, Bounded, CheckedSub -}; +use sp_runtime::traits::{Block as BlockT, Bounded, CheckedSub, NumberFor, Zero}; -use crate::cache::{CacheItemT, ComplexBlockId, EntryType}; -use crate::cache::list_entry::{Entry, StorageEntry}; -use crate::cache::list_storage::{Storage, StorageTransaction, Metadata}; +use crate::cache::{ + list_entry::{Entry, StorageEntry}, + list_storage::{Metadata, Storage, StorageTransaction}, + CacheItemT, ComplexBlockId, EntryType, +}; /// Pruning strategy. #[derive(Debug, Clone, Copy)] @@ -132,8 +132,8 @@ impl> ListCache pruning_strategy: PruningStrategy>, best_finalized_block: ComplexBlockId, ) -> ClientResult { - let (best_finalized_entry, unfinalized) = storage.read_meta() - .and_then(|meta| read_forks(&storage, meta))?; + let (best_finalized_entry, unfinalized) = + storage.read_meta().and_then(|meta| read_forks(&storage, meta))?; Ok(ListCache { storage, @@ -167,7 +167,7 @@ impl> ListCache // BUT since we're not guaranteeing to provide correct values for forks // behind the finalized block, check if the block is finalized first if !chain::is_finalized_block(&self.storage, &at, Bounded::max_value())? { - return Err(ClientError::NotInFinalizedChain); + return Err(ClientError::NotInFinalizedChain) } self.best_finalized_entry.as_ref() @@ -184,18 +184,21 @@ impl> ListCache match self.find_unfinalized_fork(&at)? { Some(fork) => Some(&fork.head), None => match self.best_finalized_entry.as_ref() { - Some(best_finalized_entry) if chain::is_connected_to_block( - &self.storage, - &at, - &best_finalized_entry.valid_from, - )? => Some(best_finalized_entry), + Some(best_finalized_entry) + if chain::is_connected_to_block( + &self.storage, + &at, + &best_finalized_entry.valid_from, + )? => + Some(best_finalized_entry), _ => None, }, } }; match head { - Some(head) => head.search_best_before(&self.storage, at.number) + Some(head) => head + .search_best_before(&self.storage, at.number) .map(|e| e.map(|e| (e.0.valid_from, e.1, e.0.value))), None => Ok(None), } @@ -213,7 +216,8 @@ impl> ListCache entry_type: EntryType, operations: &mut CommitOperations, ) -> ClientResult<()> { - Ok(operations.append(self.do_on_block_insert(tx, parent, block, value, entry_type, operations)?)) + Ok(operations + .append(self.do_on_block_insert(tx, parent, block, value, entry_type, operations)?)) } /// When previously inserted block is finalized. @@ -242,25 +246,25 @@ impl> ListCache for op in ops.operations { match op { CommitOperation::AppendNewBlock(index, best_block) => { - let mut fork = self.unfinalized.get_mut(index) - .expect("ListCache is a crate-private type; + let mut fork = self.unfinalized.get_mut(index).expect( + "ListCache is a crate-private type; internal clients of ListCache are committing transaction while cache is locked; - CommitOperation holds valid references while cache is locked; qed"); + CommitOperation holds valid references while cache is locked; qed", + ); fork.best_block = Some(best_block); }, CommitOperation::AppendNewEntry(index, entry) => { - let mut fork = self.unfinalized.get_mut(index) - .expect("ListCache is a crate-private type; + let mut fork = self.unfinalized.get_mut(index).expect( + "ListCache is a crate-private type; internal clients of ListCache are committing transaction while cache is locked; - CommitOperation holds valid references while cache is locked; qed"); + CommitOperation holds valid references while cache is locked; qed", + ); fork.best_block = Some(entry.valid_from.clone()); fork.head = entry; }, CommitOperation::AddNewFork(entry) => { - self.unfinalized.push(Fork { - best_block: Some(entry.valid_from.clone()), - head: entry, - }); + self.unfinalized + .push(Fork { best_block: Some(entry.valid_from.clone()), head: entry }); }, CommitOperation::BlockFinalized(block, finalizing_entry, forks) => { self.best_finalized_block = block; @@ -275,7 +279,9 @@ impl> ListCache for (fork_index, updated_fork) in forks.into_iter().rev() { match updated_fork { Some(updated_fork) => self.unfinalized[fork_index] = updated_fork, - None => { self.unfinalized.remove(fork_index); }, + None => { + self.unfinalized.remove(fork_index); + }, } } }, @@ -296,17 +302,17 @@ impl> ListCache let prev_operation = operations.operations.last(); debug_assert!( entry_type != EntryType::Final || - self.best_finalized_block.hash == parent.hash || - match prev_operation { - Some(&CommitOperation::BlockFinalized(ref best_finalized_block, _, _)) - => best_finalized_block.hash == parent.hash, - _ => false, - } + self.best_finalized_block.hash == parent.hash || + match prev_operation { + Some(&CommitOperation::BlockFinalized(ref best_finalized_block, _, _)) => + best_finalized_block.hash == parent.hash, + _ => false, + } ); // we do not store any values behind finalized if block.number != Zero::zero() && self.best_finalized_block.number >= block.number { - return Ok(None); + return Ok(None) } // if the block is not final, it is possibly appended to/forking from existing unfinalized fork @@ -316,14 +322,14 @@ impl> ListCache // when value hasn't changed and block isn't final, there's nothing we need to do if value.is_none() { - return Ok(None); + return Ok(None) } // first: try to find fork that is known to has the best block we're appending to for (index, fork) in self.unfinalized.iter().enumerate() { if fork.try_append(&parent) { fork_and_action = Some((index, ForkAppendResult::Append)); - break; + break } } @@ -331,11 +337,14 @@ impl> ListCache // - we're appending to the fork for the first time after restart; // - we're forking existing unfinalized fork from the middle; if fork_and_action.is_none() { - let best_finalized_entry_block = self.best_finalized_entry.as_ref().map(|f| f.valid_from.number); + let best_finalized_entry_block = + self.best_finalized_entry.as_ref().map(|f| f.valid_from.number); for (index, fork) in self.unfinalized.iter().enumerate() { - if let Some(action) = fork.try_append_or_fork(&self.storage, &parent, best_finalized_entry_block)? { + if let Some(action) = + fork.try_append_or_fork(&self.storage, &parent, best_finalized_entry_block)? + { fork_and_action = Some((index, action)); - break; + break } } } @@ -350,9 +359,14 @@ impl> ListCache }; tx.insert_storage_entry(&block, &new_storage_entry); - let operation = CommitOperation::AppendNewEntry(index, new_storage_entry.into_entry(block)); - tx.update_meta(self.best_finalized_entry.as_ref(), &self.unfinalized, &operation); - return Ok(Some(operation)); + let operation = + CommitOperation::AppendNewEntry(index, new_storage_entry.into_entry(block)); + tx.update_meta( + self.best_finalized_entry.as_ref(), + &self.unfinalized, + &operation, + ); + return Ok(Some(operation)) }, // fork from the middle of unfinalized fork Some((_, ForkAppendResult::Fork(prev_valid_from))) => { @@ -363,9 +377,14 @@ impl> ListCache }; tx.insert_storage_entry(&block, &new_storage_entry); - let operation = CommitOperation::AddNewFork(new_storage_entry.into_entry(block)); - tx.update_meta(self.best_finalized_entry.as_ref(), &self.unfinalized, &operation); - return Ok(Some(operation)); + let operation = + CommitOperation::AddNewFork(new_storage_entry.into_entry(block)); + tx.update_meta( + self.best_finalized_entry.as_ref(), + &self.unfinalized, + &operation, + ); + return Ok(Some(operation)) }, None => (), } @@ -389,12 +408,17 @@ impl> ListCache return Ok(match new_storage_entry { Some(new_storage_entry) => { tx.insert_storage_entry(&block, &new_storage_entry); - let operation = CommitOperation::AddNewFork(new_storage_entry.into_entry(block)); - tx.update_meta(self.best_finalized_entry.as_ref(), &self.unfinalized, &operation); + let operation = + CommitOperation::AddNewFork(new_storage_entry.into_entry(block)); + tx.update_meta( + self.best_finalized_entry.as_ref(), + &self.unfinalized, + &operation, + ); Some(operation) }, None => None, - }); + }) } // cleanup database from abandoned unfinalized forks and obsolete finalized entries @@ -404,7 +428,11 @@ impl> ListCache match new_storage_entry { Some(new_storage_entry) => { tx.insert_storage_entry(&block, &new_storage_entry); - let operation = CommitOperation::BlockFinalized(block.clone(), Some(new_storage_entry.into_entry(block)), abandoned_forks); + let operation = CommitOperation::BlockFinalized( + block.clone(), + Some(new_storage_entry.into_entry(block)), + abandoned_forks, + ); tx.update_meta(self.best_finalized_entry.as_ref(), &self.unfinalized, &operation); Ok(Some(operation)) }, @@ -423,16 +451,16 @@ impl> ListCache let prev_operation = operations.operations.last(); debug_assert!( self.best_finalized_block.hash == parent.hash || - match prev_operation { - Some(&CommitOperation::BlockFinalized(ref best_finalized_block, _, _)) - => best_finalized_block.hash == parent.hash, - _ => false, - } + match prev_operation { + Some(&CommitOperation::BlockFinalized(ref best_finalized_block, _, _)) => + best_finalized_block.hash == parent.hash, + _ => false, + } ); // there could be at most one entry that is finalizing - let finalizing_entry = self.storage.read_entry(&block)? - .map(|entry| entry.into_entry(block.clone())); + let finalizing_entry = + self.storage.read_entry(&block)?.map(|entry| entry.into_entry(block.clone())); // cleanup database from abandoned unfinalized forks and obsolete finalized entries let abandoned_forks = self.destroy_abandoned_forks(tx, &block, prev_operation); @@ -457,12 +485,13 @@ impl> ListCache for (index, fork) in self.unfinalized.iter().enumerate() { // we only need to truncate fork if its head is ancestor of truncated block if fork.head.valid_from.number < reverted_block.number { - continue; + continue } // we only need to truncate fork if its head is connected to truncated block - if !chain::is_connected_to_block(&self.storage, reverted_block, &fork.head.valid_from)? { - continue; + if !chain::is_connected_to_block(&self.storage, reverted_block, &fork.head.valid_from)? + { + continue } let updated_fork = fork.truncate( @@ -485,7 +514,7 @@ impl> ListCache fn prune_finalized_entries>( &self, tx: &mut Tx, - block: &ComplexBlockId + block: &ComplexBlockId, ) { let prune_depth = match self.pruning_strategy { PruningStrategy::ByDepth(prune_depth) => prune_depth, @@ -515,18 +544,13 @@ impl> ListCache }; // truncate ancient entry - tx.insert_storage_entry(&ancient_block, &StorageEntry { - prev_valid_from: None, - value: current_entry.value, - }); + tx.insert_storage_entry( + &ancient_block, + &StorageEntry { prev_valid_from: None, value: current_entry.value }, + ); // destroy 'fork' ending with previous entry - destroy_fork( - first_entry_to_truncate, - &self.storage, - tx, - None, - ) + destroy_fork(first_entry_to_truncate, &self.storage, tx, None) }; if let Err(error) = do_pruning() { @@ -543,16 +567,17 @@ impl> ListCache ) -> BTreeSet { // if some block has been finalized already => take it into account let prev_abandoned_forks = match prev_operation { - Some(&CommitOperation::BlockFinalized(_, _, ref abandoned_forks)) => Some(abandoned_forks), + Some(&CommitOperation::BlockFinalized(_, _, ref abandoned_forks)) => + Some(abandoned_forks), _ => None, }; let mut destroyed = prev_abandoned_forks.cloned().unwrap_or_else(|| BTreeSet::new()); - let live_unfinalized = self.unfinalized.iter() - .enumerate() - .filter(|(idx, _)| prev_abandoned_forks + let live_unfinalized = self.unfinalized.iter().enumerate().filter(|(idx, _)| { + prev_abandoned_forks .map(|prev_abandoned_forks| !prev_abandoned_forks.contains(idx)) - .unwrap_or(true)); + .unwrap_or(true) + }); for (index, fork) in live_unfinalized { if fork.head.valid_from.number == block.number { destroyed.insert(index); @@ -574,7 +599,7 @@ impl> ListCache ) -> ClientResult>> { for unfinalized in &self.unfinalized { if unfinalized.matches(&self.storage, block)? { - return Ok(Some(&unfinalized)); + return Ok(Some(&unfinalized)) } } @@ -597,7 +622,8 @@ impl Fork { let range = self.head.search_best_range_before(storage, block.number)?; match range { None => Ok(false), - Some((begin, end)) => chain::is_connected_to_range(storage, block, (&begin, end.as_ref())), + Some((begin, end)) => + chain::is_connected_to_range(storage, block, (&begin, end.as_ref())), } } @@ -628,19 +654,19 @@ impl Fork { // check if the parent is connected to the beginning of the range if !chain::is_connected_to_block(storage, parent, &begin)? { - return Ok(None); + return Ok(None) } // the block is connected to the begin-entry. If begin is the head entry // => we need to append new block to the fork if begin == self.head.valid_from { - return Ok(Some(ForkAppendResult::Append)); + return Ok(Some(ForkAppendResult::Append)) } // the parent block belongs to this fork AND it is located after last finalized entry // => we need to make a new fork if best_finalized_entry_block.map(|f| begin.number > f).unwrap_or(true) { - return Ok(Some(ForkAppendResult::Fork(begin))); + return Ok(Some(ForkAppendResult::Fork(begin))) } Ok(None) @@ -653,12 +679,7 @@ impl Fork { tx: &mut Tx, best_finalized_block: Option>, ) -> ClientResult<()> { - destroy_fork( - self.head.valid_from.clone(), - storage, - tx, - best_finalized_block, - ) + destroy_fork(self.head.valid_from.clone(), storage, tx, best_finalized_block) } /// Truncate fork by deleting all entries that are descendants of given block. @@ -674,18 +695,15 @@ impl Fork { // read pointer to previous entry let entry = storage.require_entry(¤t)?; - // truncation stops when we have reached the ancestor of truncated block + // truncation stops when we have reached the ancestor of truncated block if current.number < reverting_block { // if we have reached finalized block => destroy fork if chain::is_finalized_block(storage, ¤t, best_finalized_block)? { - return Ok(None); + return Ok(None) } // else fork needs to be updated - return Ok(Some(Fork { - best_block: None, - head: entry.into_entry(current), - })); + return Ok(Some(Fork { best_block: None, head: entry.into_entry(current) })) } tx.remove_storage_entry(¤t); @@ -707,7 +725,9 @@ impl Default for CommitOperations { // This should never be allowed for non-test code to avoid revealing its internals. #[cfg(test)] -impl From>> for CommitOperations { +impl From>> + for CommitOperations +{ fn from(operations: Vec>) -> Self { CommitOperations { operations } } @@ -725,30 +745,36 @@ impl CommitOperations { Some(last_operation) => last_operation, None => { self.operations.push(new_operation); - return; + return }, }; // we are able (and obliged to) to merge two consequent block finalization operations match last_operation { - CommitOperation::BlockFinalized(old_finalized_block, old_finalized_entry, old_abandoned_forks) => { - match new_operation { - CommitOperation::BlockFinalized(new_finalized_block, new_finalized_entry, new_abandoned_forks) => { - self.operations.push(CommitOperation::BlockFinalized( - new_finalized_block, - new_finalized_entry, - new_abandoned_forks, - )); - }, - _ => { - self.operations.push(CommitOperation::BlockFinalized( - old_finalized_block, - old_finalized_entry, - old_abandoned_forks, - )); - self.operations.push(new_operation); - }, - } + CommitOperation::BlockFinalized( + old_finalized_block, + old_finalized_entry, + old_abandoned_forks, + ) => match new_operation { + CommitOperation::BlockFinalized( + new_finalized_block, + new_finalized_entry, + new_abandoned_forks, + ) => { + self.operations.push(CommitOperation::BlockFinalized( + new_finalized_block, + new_finalized_entry, + new_abandoned_forks, + )); + }, + _ => { + self.operations.push(CommitOperation::BlockFinalized( + old_finalized_block, + old_finalized_entry, + old_abandoned_forks, + )); + self.operations.push(new_operation); + }, }, _ => { self.operations.push(last_operation); @@ -759,7 +785,12 @@ impl CommitOperations { } /// Destroy fork by deleting all unfinalized entries. -pub fn destroy_fork, Tx: StorageTransaction>( +pub fn destroy_fork< + Block: BlockT, + T: CacheItemT, + S: Storage, + Tx: StorageTransaction, +>( head_valid_from: ComplexBlockId, storage: &S, tx: &mut Tx, @@ -770,7 +801,7 @@ pub fn destroy_fork, Tx: Stor // optionally: deletion stops when we found entry at finalized block if let Some(best_finalized_block) = best_finalized_block { if chain::is_finalized_block(storage, ¤t, best_finalized_block)? { - return Ok(()); + return Ok(()) } } @@ -788,8 +819,8 @@ pub fn destroy_fork, Tx: Stor /// Blockchain related functions. mod chain { - use sp_runtime::traits::Header as HeaderT; use super::*; + use sp_runtime::traits::Header as HeaderT; /// Is the block1 connected both ends of the range. pub fn is_connected_to_range>( @@ -798,8 +829,8 @@ mod chain { range: (&ComplexBlockId, Option<&ComplexBlockId>), ) -> ClientResult { let (begin, end) = range; - Ok(is_connected_to_block(storage, block, begin)? - && match end { + Ok(is_connected_to_block(storage, block, begin)? && + match end { Some(end) => is_connected_to_block(storage, block, end)?, None => true, }) @@ -812,10 +843,12 @@ mod chain { block2: &ComplexBlockId, ) -> ClientResult { let (begin, end) = if *block1 > *block2 { (block2, block1) } else { (block1, block2) }; - let mut current = storage.read_header(&end.hash)? + let mut current = storage + .read_header(&end.hash)? .ok_or_else(|| ClientError::UnknownBlock(format!("{}", end.hash)))?; while *current.number() > begin.number { - current = storage.read_header(current.parent_hash())? + current = storage + .read_header(current.parent_hash())? .ok_or_else(|| ClientError::UnknownBlock(format!("{}", current.parent_hash())))?; } @@ -829,11 +862,10 @@ mod chain { best_finalized_block: NumberFor, ) -> ClientResult { if block.number > best_finalized_block { - return Ok(false); + return Ok(false) } - storage.read_id(block.number) - .map(|hash| hash.as_ref() == Some(&block.hash)) + storage.read_id(block.number).map(|hash| hash.as_ref() == Some(&block.hash)) } } @@ -843,17 +875,19 @@ fn read_forks>( meta: Metadata, ) -> ClientResult<(Option>, Vec>)> { let finalized = match meta.finalized { - Some(finalized) => Some(storage.require_entry(&finalized)? - .into_entry(finalized)), + Some(finalized) => Some(storage.require_entry(&finalized)?.into_entry(finalized)), None => None, }; - let unfinalized = meta.unfinalized.into_iter() - .map(|unfinalized| storage.require_entry(&unfinalized) - .map(|storage_entry| Fork { + let unfinalized = meta + .unfinalized + .into_iter() + .map(|unfinalized| { + storage.require_entry(&unfinalized).map(|storage_entry| Fork { best_block: None, head: storage_entry.into_entry(unfinalized), - })) + }) + }) .collect::>()?; Ok((finalized, unfinalized)) @@ -861,10 +895,10 @@ fn read_forks>( #[cfg(test)] mod tests { - use substrate_test_runtime_client::runtime::H256; - use sp_runtime::testing::{Header, Block as RawBlock, ExtrinsicWrapper}; - use crate::cache::list_storage::tests::{DummyStorage, FaultyStorage, DummyTransaction}; use super::*; + use crate::cache::list_storage::tests::{DummyStorage, DummyTransaction, FaultyStorage}; + use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper, Header}; + use substrate_test_runtime_client::runtime::H256; type Block = RawBlock>; @@ -882,7 +916,11 @@ mod tests { fn test_header(number: u64) -> Header { Header { - parent_hash: if number == 0 { Default::default() } else { test_header(number - 1).hash() }, + parent_hash: if number == 0 { + Default::default() + } else { + test_header(number - 1).hash() + }, number, state_root: Default::default(), extrinsics_root: Default::default(), @@ -909,28 +947,54 @@ mod tests { // when block is earlier than best finalized block AND it is not finalized // --- 50 --- // ----------> [100] - assert!(ListCache::<_, u64, _>::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), test_id(100)) - .unwrap().value_at_block(&test_id(50)).is_err()); + assert!(ListCache::<_, u64, _>::new( + DummyStorage::new(), + PruningStrategy::ByDepth(1024), + test_id(100) + ) + .unwrap() + .value_at_block(&test_id(50)) + .is_err()); // when block is earlier than best finalized block AND it is finalized AND value is some // [30] ---- 50 ---> [100] - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(test_id(100)), Vec::new()) - .with_id(50, H256::from_low_u64_be(50)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 }) - .with_entry(test_id(30), StorageEntry { prev_valid_from: None, value: 30 }), - PruningStrategy::ByDepth(1024), test_id(100) - ).unwrap().value_at_block(&test_id(50)).unwrap(), Some((test_id(30), Some(test_id(100)), 30))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(test_id(100)), Vec::new()) + .with_id(50, H256::from_low_u64_be(50)) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 } + ) + .with_entry(test_id(30), StorageEntry { prev_valid_from: None, value: 30 }), + PruningStrategy::ByDepth(1024), + test_id(100) + ) + .unwrap() + .value_at_block(&test_id(50)) + .unwrap(), + Some((test_id(30), Some(test_id(100)), 30)) + ); // when block is the best finalized block AND value is some // ---> [100] - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(test_id(100)), Vec::new()) - .with_id(100, H256::from_low_u64_be(100)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 }) - .with_entry(test_id(30), StorageEntry { prev_valid_from: None, value: 30 }), - PruningStrategy::ByDepth(1024), test_id(100) - ).unwrap().value_at_block(&test_id(100)).unwrap(), Some((test_id(100), None, 100))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(test_id(100)), Vec::new()) + .with_id(100, H256::from_low_u64_be(100)) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 } + ) + .with_entry(test_id(30), StorageEntry { prev_valid_from: None, value: 30 }), + PruningStrategy::ByDepth(1024), + test_id(100) + ) + .unwrap() + .value_at_block(&test_id(100)) + .unwrap(), + Some((test_id(100), None, 100)) + ); // when block is parallel to the best finalized block // ---- 100 // ---> [100] @@ -938,81 +1002,138 @@ mod tests { DummyStorage::new() .with_meta(Some(test_id(100)), Vec::new()) .with_id(50, H256::from_low_u64_be(50)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 }) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 } + ) .with_entry(test_id(30), StorageEntry { prev_valid_from: None, value: 30 }), - PruningStrategy::ByDepth(1024), test_id(100) - ).unwrap().value_at_block(&ComplexBlockId::new(H256::from_low_u64_be(2), 100)).is_err()); + PruningStrategy::ByDepth(1024), + test_id(100) + ) + .unwrap() + .value_at_block(&ComplexBlockId::new(H256::from_low_u64_be(2), 100)) + .is_err()); // when block is later than last finalized block AND there are no forks AND finalized value is Some // ---> [100] --- 200 - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(test_id(100)), Vec::new()) - .with_id(50, H256::from_low_u64_be(50)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 }), - PruningStrategy::ByDepth(1024), test_id(100) - ).unwrap().value_at_block(&test_id(200)).unwrap(), Some((test_id(100), None, 100))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(test_id(100)), Vec::new()) + .with_id(50, H256::from_low_u64_be(50)) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(30)), value: 100 } + ), + PruningStrategy::ByDepth(1024), + test_id(100) + ) + .unwrap() + .value_at_block(&test_id(200)) + .unwrap(), + Some((test_id(100), None, 100)) + ); // when block is later than last finalized block AND there are no matching forks // AND block is connected to finalized block AND finalized value is Some // --- 3 // ---> [2] /---------> [4] - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(correct_id(2)), vec![correct_id(4)]) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }) - .with_header(test_header(2)) - .with_header(test_header(3)) - .with_header(test_header(4)) - .with_header(fork_header(0, 2, 3)), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap().value_at_block(&fork_id(0, 2, 3)).unwrap(), Some((correct_id(2), None, 2))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(correct_id(2)), vec![correct_id(4)]) + .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 } + ) + .with_header(test_header(2)) + .with_header(test_header(3)) + .with_header(test_header(4)) + .with_header(fork_header(0, 2, 3)), + PruningStrategy::ByDepth(1024), + test_id(2) + ) + .unwrap() + .value_at_block(&fork_id(0, 2, 3)) + .unwrap(), + Some((correct_id(2), None, 2)) + ); // when block is later than last finalized block AND there are no matching forks // AND block is not connected to finalized block // --- 2 --- 3 // 1 /---> [2] ---------> [4] - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(correct_id(2)), vec![correct_id(4)]) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }) - .with_header(test_header(1)) - .with_header(test_header(2)) - .with_header(test_header(3)) - .with_header(test_header(4)) - .with_header(fork_header(0, 1, 3)) - .with_header(fork_header(0, 1, 2)), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap().value_at_block(&fork_id(0, 1, 3)).unwrap(), None); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(correct_id(2)), vec![correct_id(4)]) + .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 } + ) + .with_header(test_header(1)) + .with_header(test_header(2)) + .with_header(test_header(3)) + .with_header(test_header(4)) + .with_header(fork_header(0, 1, 3)) + .with_header(fork_header(0, 1, 2)), + PruningStrategy::ByDepth(1024), + test_id(2) + ) + .unwrap() + .value_at_block(&fork_id(0, 1, 3)) + .unwrap(), + None + ); // when block is later than last finalized block AND it appends to unfinalized fork from the end // AND unfinalized value is Some // ---> [2] ---> [4] ---> 5 - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(correct_id(2)), vec![correct_id(4)]) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }) - .with_header(test_header(4)) - .with_header(test_header(5)), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap().value_at_block(&correct_id(5)).unwrap(), Some((correct_id(4), None, 4))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(correct_id(2)), vec![correct_id(4)]) + .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 } + ) + .with_header(test_header(4)) + .with_header(test_header(5)), + PruningStrategy::ByDepth(1024), + test_id(2) + ) + .unwrap() + .value_at_block(&correct_id(5)) + .unwrap(), + Some((correct_id(4), None, 4)) + ); // when block is later than last finalized block AND it does not fits unfinalized fork // AND it is connected to the finalized block AND finalized value is Some // ---> [2] ----------> [4] // \--- 3 - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(Some(correct_id(2)), vec![correct_id(4)]) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_header(test_header(2)) - .with_header(test_header(3)) - .with_header(test_header(4)) - .with_header(fork_header(0, 2, 3)), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap().value_at_block(&fork_id(0, 2, 3)).unwrap(), Some((correct_id(2), None, 2))); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(Some(correct_id(2)), vec![correct_id(4)]) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 } + ) + .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) + .with_header(test_header(2)) + .with_header(test_header(3)) + .with_header(test_header(4)) + .with_header(fork_header(0, 2, 3)), + PruningStrategy::ByDepth(1024), + test_id(2) + ) + .unwrap() + .value_at_block(&fork_id(0, 2, 3)) + .unwrap(), + Some((correct_id(2), None, 2)) + ); } #[test] @@ -1022,7 +1143,8 @@ mod tests { // when trying to insert block < finalized number let mut ops = Default::default(); - assert!(ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), test_id(100)).unwrap() + assert!(ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), test_id(100)) + .unwrap() .do_on_block_insert( &mut DummyTransaction::new(), test_id(49), @@ -1030,9 +1152,12 @@ mod tests { Some(50), nfin, &mut ops, - ).unwrap().is_none()); + ) + .unwrap() + .is_none()); // when trying to insert block @ finalized number - assert!(ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), test_id(100)).unwrap() + assert!(ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), test_id(100)) + .unwrap() .do_on_block_insert( &mut DummyTransaction::new(), test_id(99), @@ -1040,7 +1165,9 @@ mod tests { Some(100), nfin, &Default::default(), - ).unwrap().is_none()); + ) + .unwrap() + .is_none()); // when trying to insert non-final block AND it appends to the best block of unfinalized fork // AND new value is the same as in the fork' best block @@ -1048,12 +1175,23 @@ mod tests { DummyStorage::new() .with_meta(None, vec![test_id(4)]) .with_entry(test_id(4), StorageEntry { prev_valid_from: None, value: 4 }), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + test_id(2), + ) + .unwrap(); cache.unfinalized[0].best_block = Some(test_id(4)); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, test_id(4), test_id(5), Some(4), nfin, &Default::default()).unwrap(), + cache + .do_on_block_insert( + &mut tx, + test_id(4), + test_id(5), + Some(4), + nfin, + &Default::default() + ) + .unwrap(), Some(CommitOperation::AppendNewBlock(0, test_id(5))), ); assert!(tx.inserted_entries().is_empty()); @@ -1063,12 +1201,24 @@ mod tests { // AND new value is the same as in the fork' best block let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, test_id(4), test_id(5), Some(5), nfin, &Default::default()).unwrap(), + cache + .do_on_block_insert( + &mut tx, + test_id(4), + test_id(5), + Some(5), + nfin, + &Default::default() + ) + .unwrap(), Some(CommitOperation::AppendNewEntry(0, Entry { valid_from: test_id(5), value: 5 })), ); assert_eq!(*tx.inserted_entries(), vec![test_id(5).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: None, unfinalized: vec![test_id(5)] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: None, unfinalized: vec![test_id(5)] }) + ); // when trying to insert non-final block AND it is the first block that appends to the best block of unfinalized fork // AND new value is the same as in the fork' best block @@ -1077,18 +1227,22 @@ mod tests { .with_meta(None, vec![correct_id(4)]) .with_entry(correct_id(4), StorageEntry { prev_valid_from: None, value: 4 }) .with_header(test_header(4)), - PruningStrategy::ByDepth(1024), test_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + test_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert( - &mut tx, - correct_id(4), - correct_id(5), - Some(4), - nfin, - &Default::default(), - ).unwrap(), + cache + .do_on_block_insert( + &mut tx, + correct_id(4), + correct_id(5), + Some(4), + nfin, + &Default::default(), + ) + .unwrap(), Some(CommitOperation::AppendNewBlock(0, correct_id(5))), ); assert!(tx.inserted_entries().is_empty()); @@ -1098,40 +1252,64 @@ mod tests { // AND new value is the same as in the fork' best block let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert( - &mut tx, - correct_id(4), - correct_id(5), - Some(5), - nfin, - &Default::default(), - ).unwrap(), + cache + .do_on_block_insert( + &mut tx, + correct_id(4), + correct_id(5), + Some(5), + nfin, + &Default::default(), + ) + .unwrap(), Some(CommitOperation::AppendNewEntry(0, Entry { valid_from: correct_id(5), value: 5 })), ); assert_eq!(*tx.inserted_entries(), vec![correct_id(5).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: None, unfinalized: vec![correct_id(5)] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: None, unfinalized: vec![correct_id(5)] }) + ); // when trying to insert non-final block AND it forks unfinalized fork let cache = ListCache::new( DummyStorage::new() .with_meta(Some(correct_id(2)), vec![correct_id(4)]) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 4 }, + ) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) .with_header(test_header(2)) .with_header(test_header(3)) .with_header(test_header(4)), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(3), fork_id(0, 3, 4), Some(14), nfin, &Default::default()) + cache + .do_on_block_insert( + &mut tx, + correct_id(3), + fork_id(0, 3, 4), + Some(14), + nfin, + &Default::default() + ) .unwrap(), Some(CommitOperation::AddNewFork(Entry { valid_from: fork_id(0, 3, 4), value: 14 })), ); assert_eq!(*tx.inserted_entries(), vec![fork_id(0, 3, 4).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: Some(correct_id(2)), unfinalized: vec![correct_id(4), fork_id(0, 3, 4)] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { + finalized: Some(correct_id(2)), + unfinalized: vec![correct_id(4), fork_id(0, 3, 4)] + }) + ); // when trying to insert non-final block AND there are no unfinalized forks // AND value is the same as last finalized @@ -1139,11 +1317,21 @@ mod tests { DummyStorage::new() .with_meta(Some(correct_id(2)), vec![]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(2), nfin, &Default::default()) + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(2), + nfin, + &Default::default() + ) .unwrap(), None, ); @@ -1156,23 +1344,46 @@ mod tests { DummyStorage::new() .with_meta(Some(correct_id(2)), vec![]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(3), nfin, &Default::default()) + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(3), + nfin, + &Default::default() + ) .unwrap(), Some(CommitOperation::AddNewFork(Entry { valid_from: correct_id(3), value: 3 })), ); assert_eq!(*tx.inserted_entries(), vec![correct_id(3).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: Some(correct_id(2)), unfinalized: vec![correct_id(3)] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: Some(correct_id(2)), unfinalized: vec![correct_id(3)] }) + ); // when inserting finalized entry AND there are no previous finalized entries - let cache = ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), correct_id(2)).unwrap(); + let cache = + ListCache::new(DummyStorage::new(), PruningStrategy::ByDepth(1024), correct_id(2)) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(3), fin, &Default::default()) + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(3), + fin, + &Default::default() + ) .unwrap(), Some(CommitOperation::BlockFinalized( correct_id(3), @@ -1182,17 +1393,31 @@ mod tests { ); assert_eq!(*tx.inserted_entries(), vec![correct_id(3).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: Some(correct_id(3)), unfinalized: vec![] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: Some(correct_id(3)), unfinalized: vec![] }) + ); // when inserting finalized entry AND value is the same as in previous finalized let cache = ListCache::new( DummyStorage::new() .with_meta(Some(correct_id(2)), vec![]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(2), fin, &Default::default()).unwrap(), + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(2), + fin, + &Default::default() + ) + .unwrap(), Some(CommitOperation::BlockFinalized(correct_id(3), None, Default::default())), ); assert!(tx.inserted_entries().is_empty()); @@ -1201,7 +1426,16 @@ mod tests { // when inserting finalized entry AND value differs from previous finalized let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(3), fin, &Default::default()).unwrap(), + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(3), + fin, + &Default::default() + ) + .unwrap(), Some(CommitOperation::BlockFinalized( correct_id(3), Some(Entry { valid_from: correct_id(3), value: 3 }), @@ -1210,7 +1444,10 @@ mod tests { ); assert_eq!(*tx.inserted_entries(), vec![correct_id(3).hash].into_iter().collect()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: Some(correct_id(3)), unfinalized: vec![] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: Some(correct_id(3)), unfinalized: vec![] }) + ); // inserting finalized entry removes abandoned fork EVEN if new entry is not inserted let cache = ListCache::new( @@ -1218,12 +1455,27 @@ mod tests { .with_meta(Some(correct_id(2)), vec![fork_id(0, 1, 3)]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) .with_entry(fork_id(0, 1, 3), StorageEntry { prev_valid_from: None, value: 13 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_insert(&mut tx, correct_id(2), correct_id(3), Some(2), fin, &Default::default()).unwrap(), - Some(CommitOperation::BlockFinalized(correct_id(3), None, vec![0].into_iter().collect())), + cache + .do_on_block_insert( + &mut tx, + correct_id(2), + correct_id(3), + Some(2), + fin, + &Default::default() + ) + .unwrap(), + Some(CommitOperation::BlockFinalized( + correct_id(3), + None, + vec![0].into_iter().collect() + )), ); } @@ -1234,12 +1486,19 @@ mod tests { DummyStorage::new() .with_meta(Some(correct_id(2)), vec![correct_id(5)]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }, + ), + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_finalize(&mut tx, correct_id(2), correct_id(3), &Default::default()).unwrap(), + cache + .do_on_block_finalize(&mut tx, correct_id(2), correct_id(3), &Default::default()) + .unwrap(), Some(CommitOperation::BlockFinalized(correct_id(3), None, Default::default())), ); assert!(tx.inserted_entries().is_empty()); @@ -1253,12 +1512,19 @@ mod tests { DummyStorage::new() .with_meta(Some(correct_id(2)), vec![correct_id(5)]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }), - PruningStrategy::ByDepth(1024), correct_id(4) - ).unwrap(); + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }, + ), + PruningStrategy::ByDepth(1024), + correct_id(4), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_finalize(&mut tx, correct_id(4), correct_id(5), &Default::default()).unwrap(), + cache + .do_on_block_finalize(&mut tx, correct_id(4), correct_id(5), &Default::default()) + .unwrap(), Some(CommitOperation::BlockFinalized( correct_id(5), Some(Entry { valid_from: correct_id(5), value: 5 }), @@ -1267,19 +1533,30 @@ mod tests { ); assert!(tx.inserted_entries().is_empty()); assert!(tx.removed_entries().is_empty()); - assert_eq!(*tx.updated_meta(), Some(Metadata { finalized: Some(correct_id(5)), unfinalized: vec![] })); + assert_eq!( + *tx.updated_meta(), + Some(Metadata { finalized: Some(correct_id(5)), unfinalized: vec![] }) + ); // finalization removes abandoned forks let cache = ListCache::new( DummyStorage::new() .with_meta(Some(correct_id(2)), vec![fork_id(0, 1, 3)]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) .with_entry(fork_id(0, 1, 3), StorageEntry { prev_valid_from: None, value: 13 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); let mut tx = DummyTransaction::new(); assert_eq!( - cache.do_on_block_finalize(&mut tx, correct_id(2), correct_id(3), &Default::default()).unwrap(), - Some(CommitOperation::BlockFinalized(correct_id(3), None, vec![0].into_iter().collect())), + cache + .do_on_block_finalize(&mut tx, correct_id(2), correct_id(3), &Default::default()) + .unwrap(), + Some(CommitOperation::BlockFinalized( + correct_id(3), + None, + vec![0].into_iter().collect() + )), ); } @@ -1289,34 +1566,50 @@ mod tests { DummyStorage::new() .with_meta(Some(correct_id(2)), vec![correct_id(5), correct_id(6)]) .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }) - .with_entry(correct_id(6), StorageEntry { prev_valid_from: Some(correct_id(5)), value: 6 }), - PruningStrategy::ByDepth(1024), correct_id(2) - ).unwrap(); + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }, + ) + .with_entry( + correct_id(6), + StorageEntry { prev_valid_from: Some(correct_id(5)), value: 6 }, + ), + PruningStrategy::ByDepth(1024), + correct_id(2), + ) + .unwrap(); // when new block is appended to unfinalized fork cache.on_transaction_commit(vec![CommitOperation::AppendNewBlock(0, correct_id(6))].into()); assert_eq!(cache.unfinalized[0].best_block, Some(correct_id(6))); // when new entry is appended to unfinalized fork - cache.on_transaction_commit(vec![ - CommitOperation::AppendNewEntry(0, Entry { valid_from: correct_id(7), value: 7 }), - ].into()); + cache.on_transaction_commit( + vec![CommitOperation::AppendNewEntry(0, Entry { valid_from: correct_id(7), value: 7 })] + .into(), + ); assert_eq!(cache.unfinalized[0].best_block, Some(correct_id(7))); assert_eq!(cache.unfinalized[0].head, Entry { valid_from: correct_id(7), value: 7 }); // when new fork is added - cache.on_transaction_commit(vec![ - CommitOperation::AddNewFork(Entry { valid_from: correct_id(10), value: 10 }), - ].into()); + cache.on_transaction_commit( + vec![CommitOperation::AddNewFork(Entry { valid_from: correct_id(10), value: 10 })] + .into(), + ); assert_eq!(cache.unfinalized[2].best_block, Some(correct_id(10))); assert_eq!(cache.unfinalized[2].head, Entry { valid_from: correct_id(10), value: 10 }); // when block is finalized + entry is finalized + unfinalized forks are deleted - cache.on_transaction_commit(vec![CommitOperation::BlockFinalized( - correct_id(20), - Some(Entry { valid_from: correct_id(20), value: 20 }), - vec![0, 1, 2].into_iter().collect(), - )].into()); + cache.on_transaction_commit( + vec![CommitOperation::BlockFinalized( + correct_id(20), + Some(Entry { valid_from: correct_id(20), value: 20 }), + vec![0, 1, 2].into_iter().collect(), + )] + .into(), + ); assert_eq!(cache.best_finalized_block, correct_id(20)); - assert_eq!(cache.best_finalized_entry, Some(Entry { valid_from: correct_id(20), value: 20 })); + assert_eq!( + cache.best_finalized_entry, + Some(Entry { valid_from: correct_id(20), value: 20 }) + ); assert!(cache.unfinalized.is_empty()); } @@ -1324,45 +1617,88 @@ mod tests { fn list_find_unfinalized_fork_works() { // ----------> [3] // --- [2] ---------> 4 ---> [5] - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(None, vec![fork_id(0, 1, 3), correct_id(5)]) - .with_entry(fork_id(0, 1, 3), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) - .with_header(test_header(2)) - .with_header(test_header(3)) - .with_header(test_header(4)) - .with_header(test_header(5)), - PruningStrategy::ByDepth(1024), correct_id(0) - ).unwrap().find_unfinalized_fork((&correct_id(4)).into()).unwrap().unwrap().head.valid_from, correct_id(5)); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(None, vec![fork_id(0, 1, 3), correct_id(5)]) + .with_entry( + fork_id(0, 1, 3), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 } + ) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 } + ) + .with_entry(correct_id(2), StorageEntry { prev_valid_from: None, value: 2 }) + .with_header(test_header(2)) + .with_header(test_header(3)) + .with_header(test_header(4)) + .with_header(test_header(5)), + PruningStrategy::ByDepth(1024), + correct_id(0) + ) + .unwrap() + .find_unfinalized_fork((&correct_id(4)).into()) + .unwrap() + .unwrap() + .head + .valid_from, + correct_id(5) + ); // --- [2] ---------------> [5] // ----------> [3] ---> 4 - assert_eq!(ListCache::new( - DummyStorage::new() - .with_meta(None, vec![correct_id(5), fork_id(0, 1, 3)]) - .with_entry(fork_id(0, 1, 3), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 2 }) - .with_header(test_header(2)) - .with_header(test_header(3)) - .with_header(test_header(4)) - .with_header(test_header(5)) - .with_header(fork_header(0, 1, 2)) - .with_header(fork_header(0, 1, 3)) - .with_header(fork_header(0, 1, 4)), - PruningStrategy::ByDepth(1024), correct_id(0) - ).unwrap() - .find_unfinalized_fork((&fork_id(0, 1, 4)).into()).unwrap().unwrap().head.valid_from, fork_id(0, 1, 3)); + assert_eq!( + ListCache::new( + DummyStorage::new() + .with_meta(None, vec![correct_id(5), fork_id(0, 1, 3)]) + .with_entry( + fork_id(0, 1, 3), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 } + ) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 } + ) + .with_entry( + correct_id(2), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 2 } + ) + .with_header(test_header(2)) + .with_header(test_header(3)) + .with_header(test_header(4)) + .with_header(test_header(5)) + .with_header(fork_header(0, 1, 2)) + .with_header(fork_header(0, 1, 3)) + .with_header(fork_header(0, 1, 4)), + PruningStrategy::ByDepth(1024), + correct_id(0) + ) + .unwrap() + .find_unfinalized_fork((&fork_id(0, 1, 4)).into()) + .unwrap() + .unwrap() + .head + .valid_from, + fork_id(0, 1, 3) + ); // --- [2] ---------------> [5] // ----------> [3] // -----------------> 4 assert!(ListCache::new( DummyStorage::new() .with_meta(None, vec![correct_id(5), fork_id(0, 1, 3)]) - .with_entry(fork_id(0, 1, 3), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 }) - .with_entry(correct_id(2), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 2 }) + .with_entry( + fork_id(0, 1, 3), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 13 } + ) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(2)), value: 5 } + ) + .with_entry( + correct_id(2), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 2 } + ) .with_header(test_header(2)) .with_header(test_header(3)) .with_header(test_header(4)) @@ -1372,89 +1708,167 @@ mod tests { .with_header(fork_header(1, 1, 2)) .with_header(fork_header(1, 1, 3)) .with_header(fork_header(1, 1, 4)), - PruningStrategy::ByDepth(1024), correct_id(0) - ).unwrap().find_unfinalized_fork((&fork_id(1, 1, 4)).into()).unwrap().is_none()); + PruningStrategy::ByDepth(1024), + correct_id(0) + ) + .unwrap() + .find_unfinalized_fork((&fork_id(1, 1, 4)).into()) + .unwrap() + .is_none()); } #[test] fn fork_matches_works() { // when block is not within list range let storage = DummyStorage::new() - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }, + ) .with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .matches(&storage, (&test_id(20)).into()).unwrap(), false); + assert_eq!( + Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } + .matches(&storage, (&test_id(20)).into()) + .unwrap(), + false + ); // when block is not connected to the begin block let storage = DummyStorage::new() - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)) .with_header(fork_header(0, 2, 4)) .with_header(fork_header(0, 2, 3)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .matches(&storage, (&fork_id(0, 2, 4)).into()).unwrap(), false); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .matches(&storage, (&fork_id(0, 2, 4)).into()) + .unwrap(), + false + ); // when block is not connected to the end block let storage = DummyStorage::new() - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)) .with_header(fork_header(0, 3, 4)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .matches(&storage, (&fork_id(0, 3, 4)).into()).unwrap(), false); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .matches(&storage, (&fork_id(0, 3, 4)).into()) + .unwrap(), + false + ); // when block is connected to the begin block AND end is open let storage = DummyStorage::new() .with_entry(correct_id(5), StorageEntry { prev_valid_from: None, value: 100 }) .with_header(test_header(5)) .with_header(test_header(6)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .matches(&storage, (&correct_id(6)).into()).unwrap(), true); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .matches(&storage, (&correct_id(6)).into()) + .unwrap(), + true + ); // when block is connected to the begin block AND to the end block let storage = DummyStorage::new() - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .matches(&storage, (&correct_id(4)).into()).unwrap(), true); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .matches(&storage, (&correct_id(4)).into()) + .unwrap(), + true + ); } #[test] fn fork_try_append_works() { // when best block is unknown - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .try_append(&test_id(100)), false); + assert_eq!( + Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } + .try_append(&test_id(100)), + false + ); // when best block is known but different - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .try_append(&test_id(101)), false); + assert_eq!( + Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } + .try_append(&test_id(101)), + false + ); // when best block is known and the same - assert_eq!(Fork::<_, u64> { best_block: Some(test_id(100)), head: Entry { valid_from: test_id(100), value: 0 } } - .try_append(&test_id(100)), true); + assert_eq!( + Fork::<_, u64> { + best_block: Some(test_id(100)), + head: Entry { valid_from: test_id(100), value: 0 } + } + .try_append(&test_id(100)), + true + ); } #[test] fn fork_try_append_or_fork_works() { // when there's no entry before parent let storage = DummyStorage::new() - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }, + ) .with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .try_append_or_fork(&storage, &test_id(30), None).unwrap(), None); + assert_eq!( + Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } + .try_append_or_fork(&storage, &test_id(30), None) + .unwrap(), + None + ); // when parent does not belong to the fork let storage = DummyStorage::new() - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)) .with_header(fork_header(0, 2, 4)) .with_header(fork_header(0, 2, 3)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .try_append_or_fork(&storage, &fork_id(0, 2, 4), None).unwrap(), None); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .try_append_or_fork(&storage, &fork_id(0, 2, 4), None) + .unwrap(), + None + ); // when the entry before parent is the head entry let storage = DummyStorage::new() .with_entry( @@ -1463,30 +1877,57 @@ mod tests { ) .with_header(test_header(6)) .with_header(test_header(5)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(5), value: 100 } } - .try_append_or_fork(&storage, &correct_id(6), None).unwrap(), Some(ForkAppendResult::Append)); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(5), value: 100 } + } + .try_append_or_fork(&storage, &correct_id(6), None) + .unwrap(), + Some(ForkAppendResult::Append) + ); // when the parent located after last finalized entry let storage = DummyStorage::new() - .with_entry(correct_id(6), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(6), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(6)) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)) .with_header(fork_header(0, 4, 5)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(6), value: 100 } } - .try_append_or_fork(&storage, &fork_id(0, 4, 5), None).unwrap(), Some(ForkAppendResult::Fork(ComplexBlockId::new(test_header(3).hash(), 3)))); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(6), value: 100 } + } + .try_append_or_fork(&storage, &fork_id(0, 4, 5), None) + .unwrap(), + Some(ForkAppendResult::Fork(ComplexBlockId::new(test_header(3).hash(), 3))) + ); // when the parent located before last finalized entry let storage = DummyStorage::new() - .with_entry(correct_id(6), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }) + .with_entry( + correct_id(6), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 100 }, + ) .with_entry(correct_id(3), StorageEntry { prev_valid_from: None, value: 200 }) .with_header(test_header(6)) .with_header(test_header(5)) .with_header(test_header(4)) .with_header(test_header(3)) .with_header(fork_header(0, 4, 5)); - assert_eq!(Fork::<_, u64> { best_block: None, head: Entry { valid_from: correct_id(6), value: 100 } } - .try_append_or_fork(&storage, &fork_id(0, 4, 5), Some(3)).unwrap(), None); + assert_eq!( + Fork::<_, u64> { + best_block: None, + head: Entry { valid_from: correct_id(6), value: 100 } + } + .try_append_or_fork(&storage, &fork_id(0, 4, 5), Some(3)) + .unwrap(), + None + ); } #[test] @@ -1495,12 +1936,16 @@ mod tests { let storage = DummyStorage::new().with_id(100, H256::from_low_u64_be(100)); let mut tx = DummyTransaction::new(); Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .destroy(&storage, &mut tx, Some(200)).unwrap(); + .destroy(&storage, &mut tx, Some(200)) + .unwrap(); assert!(tx.removed_entries().is_empty()); // when we reach finalized entry with iterations let storage = DummyStorage::new() .with_id(10, H256::from_low_u64_be(10)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }, + ) .with_entry(test_id(50), StorageEntry { prev_valid_from: Some(test_id(20)), value: 50 }) .with_entry(test_id(20), StorageEntry { prev_valid_from: Some(test_id(10)), value: 20 }) .with_entry(test_id(10), StorageEntry { prev_valid_from: Some(test_id(5)), value: 10 }) @@ -1508,120 +1953,192 @@ mod tests { .with_entry(test_id(3), StorageEntry { prev_valid_from: None, value: 0 }); let mut tx = DummyTransaction::new(); Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .destroy(&storage, &mut tx, Some(200)).unwrap(); - assert_eq!(*tx.removed_entries(), - vec![test_id(100).hash, test_id(50).hash, test_id(20).hash].into_iter().collect()); + .destroy(&storage, &mut tx, Some(200)) + .unwrap(); + assert_eq!( + *tx.removed_entries(), + vec![test_id(100).hash, test_id(50).hash, test_id(20).hash] + .into_iter() + .collect() + ); // when we reach beginning of fork before finalized block let storage = DummyStorage::new() .with_id(10, H256::from_low_u64_be(10)) - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }, + ) .with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }); let mut tx = DummyTransaction::new(); Fork::<_, u64> { best_block: None, head: Entry { valid_from: test_id(100), value: 0 } } - .destroy(&storage, &mut tx, Some(200)).unwrap(); - assert_eq!(*tx.removed_entries(), - vec![test_id(100).hash, test_id(50).hash].into_iter().collect()); + .destroy(&storage, &mut tx, Some(200)) + .unwrap(); + assert_eq!( + *tx.removed_entries(), + vec![test_id(100).hash, test_id(50).hash].into_iter().collect() + ); } #[test] fn is_connected_to_block_fails() { // when storage returns error - assert!( - chain::is_connected_to_block::<_, u64, _>( - &FaultyStorage, - (&test_id(1)).into(), - &test_id(100), - ).is_err(), - ); + assert!(chain::is_connected_to_block::<_, u64, _>( + &FaultyStorage, + (&test_id(1)).into(), + &test_id(100), + ) + .is_err(),); // when there's no header in the storage - assert!( - chain::is_connected_to_block::<_, u64, _>( - &DummyStorage::new(), - (&test_id(1)).into(), - &test_id(100), - ).is_err(), - ); + assert!(chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new(), + (&test_id(1)).into(), + &test_id(100), + ) + .is_err(),); } #[test] fn is_connected_to_block_works() { // when without iterations we end up with different block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(1)), - (&test_id(1)).into(), &correct_id(1)).unwrap(), false); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new().with_header(test_header(1)), + (&test_id(1)).into(), + &correct_id(1) + ) + .unwrap(), + false + ); // when with ASC iterations we end up with different block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(0)) - .with_header(test_header(1)) - .with_header(test_header(2)), - (&test_id(0)).into(), &correct_id(2)).unwrap(), false); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new() + .with_header(test_header(0)) + .with_header(test_header(1)) + .with_header(test_header(2)), + (&test_id(0)).into(), + &correct_id(2) + ) + .unwrap(), + false + ); // when with DESC iterations we end up with different block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(0)) - .with_header(test_header(1)) - .with_header(test_header(2)), - (&correct_id(2)).into(), &test_id(0)).unwrap(), false); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new() + .with_header(test_header(0)) + .with_header(test_header(1)) + .with_header(test_header(2)), + (&correct_id(2)).into(), + &test_id(0) + ) + .unwrap(), + false + ); // when without iterations we end up with the same block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(1)), - (&correct_id(1)).into(), &correct_id(1)).unwrap(), true); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new().with_header(test_header(1)), + (&correct_id(1)).into(), + &correct_id(1) + ) + .unwrap(), + true + ); // when with ASC iterations we end up with the same block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(0)) - .with_header(test_header(1)) - .with_header(test_header(2)), - (&correct_id(0)).into(), &correct_id(2)).unwrap(), true); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new() + .with_header(test_header(0)) + .with_header(test_header(1)) + .with_header(test_header(2)), + (&correct_id(0)).into(), + &correct_id(2) + ) + .unwrap(), + true + ); // when with DESC iterations we end up with the same block - assert_eq!(chain::is_connected_to_block::<_, u64, _>(&DummyStorage::new() - .with_header(test_header(0)) - .with_header(test_header(1)) - .with_header(test_header(2)), - (&correct_id(2)).into(), &correct_id(0)).unwrap(), true); + assert_eq!( + chain::is_connected_to_block::<_, u64, _>( + &DummyStorage::new() + .with_header(test_header(0)) + .with_header(test_header(1)) + .with_header(test_header(2)), + (&correct_id(2)).into(), + &correct_id(0) + ) + .unwrap(), + true + ); } #[test] fn is_finalized_block_fails() { // when storage returns error assert!(chain::is_finalized_block::<_, u64, _>(&FaultyStorage, &test_id(1), 100).is_err()); - } #[test] fn is_finalized_block_works() { // when number of block is larger than last finalized block - assert_eq!(chain::is_finalized_block::<_, u64, _>(&DummyStorage::new(), &test_id(100), 1).unwrap(), false); + assert_eq!( + chain::is_finalized_block::<_, u64, _>(&DummyStorage::new(), &test_id(100), 1).unwrap(), + false + ); // when there's no hash for this block number in the database - assert_eq!(chain::is_finalized_block::<_, u64, _>(&DummyStorage::new(), &test_id(1), 100).unwrap(), false); + assert_eq!( + chain::is_finalized_block::<_, u64, _>(&DummyStorage::new(), &test_id(1), 100).unwrap(), + false + ); // when there's different hash for this block number in the database - assert_eq!(chain::is_finalized_block::<_, u64, _>(&DummyStorage::new() - .with_id(1, H256::from_low_u64_be(2)), &test_id(1), 100).unwrap(), false); + assert_eq!( + chain::is_finalized_block::<_, u64, _>( + &DummyStorage::new().with_id(1, H256::from_low_u64_be(2)), + &test_id(1), + 100 + ) + .unwrap(), + false + ); // when there's the same hash for this block number in the database - assert_eq!(chain::is_finalized_block::<_, u64, _>(&DummyStorage::new() - .with_id(1, H256::from_low_u64_be(1)), &test_id(1), 100).unwrap(), true); + assert_eq!( + chain::is_finalized_block::<_, u64, _>( + &DummyStorage::new().with_id(1, H256::from_low_u64_be(1)), + &test_id(1), + 100 + ) + .unwrap(), + true + ); } #[test] fn read_forks_fails() { // when storage returns error during finalized entry read - assert!(read_forks::(&FaultyStorage, Metadata { - finalized: Some(test_id(1)), - unfinalized: vec![], - }).is_err()); + assert!(read_forks::( + &FaultyStorage, + Metadata { finalized: Some(test_id(1)), unfinalized: vec![] } + ) + .is_err()); // when storage returns error during unfinalized entry read - assert!(read_forks::(&FaultyStorage, Metadata { - finalized: None, - unfinalized: vec![test_id(1)], - }).is_err()); + assert!(read_forks::( + &FaultyStorage, + Metadata { finalized: None, unfinalized: vec![test_id(1)] } + ) + .is_err()); // when finalized entry is not found - assert!(read_forks::(&DummyStorage::new(), Metadata { - finalized: Some(test_id(1)), - unfinalized: vec![], - }).is_err()); + assert!(read_forks::( + &DummyStorage::new(), + Metadata { finalized: Some(test_id(1)), unfinalized: vec![] } + ) + .is_err()); // when unfinalized entry is not found - assert!(read_forks::(&DummyStorage::new(), Metadata { - finalized: None, - unfinalized: vec![test_id(1)], - }).is_err()); + assert!(read_forks::( + &DummyStorage::new(), + Metadata { finalized: None, unfinalized: vec![test_id(1)] } + ) + .is_err()); } #[test] @@ -1638,23 +2155,40 @@ mod tests { ], ); - assert_eq!(expected, read_forks(&storage, Metadata { - finalized: Some(test_id(10)), - unfinalized: vec![test_id(20), test_id(30)], - }).unwrap()); + assert_eq!( + expected, + read_forks( + &storage, + Metadata { + finalized: Some(test_id(10)), + unfinalized: vec![test_id(20), test_id(30)], + } + ) + .unwrap() + ); } #[test] fn ancient_entries_are_pruned_when_pruning_enabled() { fn do_test(strategy: PruningStrategy) { - let cache = ListCache::new(DummyStorage::new() - .with_id(10, H256::from_low_u64_be(10)) - .with_id(20, H256::from_low_u64_be(20)) - .with_id(30, H256::from_low_u64_be(30)) - .with_entry(test_id(10), StorageEntry { prev_valid_from: None, value: 10 }) - .with_entry(test_id(20), StorageEntry { prev_valid_from: Some(test_id(10)), value: 20 }) - .with_entry(test_id(30), StorageEntry { prev_valid_from: Some(test_id(20)), value: 30 }), - strategy, test_id(9)).unwrap(); + let cache = ListCache::new( + DummyStorage::new() + .with_id(10, H256::from_low_u64_be(10)) + .with_id(20, H256::from_low_u64_be(20)) + .with_id(30, H256::from_low_u64_be(30)) + .with_entry(test_id(10), StorageEntry { prev_valid_from: None, value: 10 }) + .with_entry( + test_id(20), + StorageEntry { prev_valid_from: Some(test_id(10)), value: 20 }, + ) + .with_entry( + test_id(30), + StorageEntry { prev_valid_from: Some(test_id(20)), value: 30 }, + ), + strategy, + test_id(9), + ) + .unwrap(); let mut tx = DummyTransaction::new(); // when finalizing entry #10: no entries pruned @@ -1678,7 +2212,10 @@ mod tests { }, PruningStrategy::ByDepth(_) => { assert_eq!(*tx.removed_entries(), vec![test_id(10).hash].into_iter().collect()); - assert_eq!(*tx.inserted_entries(), vec![test_id(20).hash].into_iter().collect()); + assert_eq!( + *tx.inserted_entries(), + vec![test_id(20).hash].into_iter().collect() + ); }, } } @@ -1696,15 +2233,36 @@ mod tests { // -> (3') -> 4' -> 5' let mut cache = ListCache::new( DummyStorage::new() - .with_meta(Some(correct_id(1)), vec![correct_id(5), fork_id(1, 2, 5), fork_id(2, 4, 5)]) + .with_meta( + Some(correct_id(1)), + vec![correct_id(5), fork_id(1, 2, 5), fork_id(2, 4, 5)], + ) .with_id(1, correct_id(1).hash) .with_entry(correct_id(1), StorageEntry { prev_valid_from: None, value: 1 }) - .with_entry(correct_id(3), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 3 }) - .with_entry(correct_id(4), StorageEntry { prev_valid_from: Some(correct_id(3)), value: 4 }) - .with_entry(correct_id(5), StorageEntry { prev_valid_from: Some(correct_id(4)), value: 5 }) - .with_entry(fork_id(1, 2, 4), StorageEntry { prev_valid_from: Some(correct_id(1)), value: 14 }) - .with_entry(fork_id(1, 2, 5), StorageEntry { prev_valid_from: Some(fork_id(1, 2, 4)), value: 15 }) - .with_entry(fork_id(2, 4, 5), StorageEntry { prev_valid_from: Some(correct_id(4)), value: 25 }) + .with_entry( + correct_id(3), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 3 }, + ) + .with_entry( + correct_id(4), + StorageEntry { prev_valid_from: Some(correct_id(3)), value: 4 }, + ) + .with_entry( + correct_id(5), + StorageEntry { prev_valid_from: Some(correct_id(4)), value: 5 }, + ) + .with_entry( + fork_id(1, 2, 4), + StorageEntry { prev_valid_from: Some(correct_id(1)), value: 14 }, + ) + .with_entry( + fork_id(1, 2, 5), + StorageEntry { prev_valid_from: Some(fork_id(1, 2, 4)), value: 15 }, + ) + .with_entry( + fork_id(2, 4, 5), + StorageEntry { prev_valid_from: Some(correct_id(4)), value: 25 }, + ) .with_header(test_header(1)) .with_header(test_header(2)) .with_header(test_header(3)) @@ -1714,29 +2272,40 @@ mod tests { .with_header(fork_header(1, 2, 4)) .with_header(fork_header(1, 2, 5)) .with_header(fork_header(2, 4, 5)), - PruningStrategy::ByDepth(1024), correct_id(1) - ).unwrap(); + PruningStrategy::ByDepth(1024), + correct_id(1), + ) + .unwrap(); // when 5 is reverted: entry 5 is truncated let op = cache.do_on_block_revert(&mut DummyTransaction::new(), &correct_id(5)).unwrap(); - assert_eq!(op, CommitOperation::BlockReverted(vec![ - (0, Some(Fork { best_block: None, head: Entry { valid_from: correct_id(4), value: 4 } })), - ].into_iter().collect())); + assert_eq!( + op, + CommitOperation::BlockReverted( + vec![( + 0, + Some(Fork { + best_block: None, + head: Entry { valid_from: correct_id(4), value: 4 } + }) + ),] + .into_iter() + .collect() + ) + ); cache.on_transaction_commit(vec![op].into()); // when 3 is reverted: entries 4+5' are truncated let op = cache.do_on_block_revert(&mut DummyTransaction::new(), &correct_id(3)).unwrap(); - assert_eq!(op, CommitOperation::BlockReverted(vec![ - (0, None), - (2, None), - ].into_iter().collect())); + assert_eq!( + op, + CommitOperation::BlockReverted(vec![(0, None), (2, None),].into_iter().collect()) + ); cache.on_transaction_commit(vec![op].into()); // when 2 is reverted: entries 4'+5' are truncated let op = cache.do_on_block_revert(&mut DummyTransaction::new(), &correct_id(2)).unwrap(); - assert_eq!(op, CommitOperation::BlockReverted(vec![ - (0, None), - ].into_iter().collect())); + assert_eq!(op, CommitOperation::BlockReverted(vec![(0, None),].into_iter().collect())); cache.on_transaction_commit(vec![op].into()); } diff --git a/client/db/src/cache/list_entry.rs b/client/db/src/cache/list_entry.rs index 94d4eb9f49b27..7cee7a5146260 100644 --- a/client/db/src/cache/list_entry.rs +++ b/client/db/src/cache/list_entry.rs @@ -18,12 +18,11 @@ //! List-cache storage entries. +use codec::{Decode, Encode}; use sp_blockchain::Result as ClientResult; use sp_runtime::traits::{Block as BlockT, NumberFor}; -use codec::{Encode, Decode}; -use crate::cache::{CacheItemT, ComplexBlockId}; -use crate::cache::list_storage::{Storage}; +use crate::cache::{list_storage::Storage, CacheItemT, ComplexBlockId}; /// Single list-based cache entry. #[derive(Debug)] @@ -52,10 +51,8 @@ impl Entry { match value { Some(value) => match self.value == value { true => None, - false => Some(StorageEntry { - prev_valid_from: Some(self.valid_from.clone()), - value, - }), + false => + Some(StorageEntry { prev_valid_from: Some(self.valid_from.clone()), value }), }, None => None, } @@ -67,7 +64,8 @@ impl Entry { storage: &S, block: NumberFor, ) -> ClientResult, Option>)>> { - Ok(self.search_best_before(storage, block)? + Ok(self + .search_best_before(storage, block)? .map(|(entry, next)| (entry.valid_from, next))) } @@ -86,14 +84,14 @@ impl Entry { let mut current = self.valid_from.clone(); if block >= self.valid_from.number { let value = self.value.clone(); - return Ok(Some((Entry { valid_from: current, value }, next))); + return Ok(Some((Entry { valid_from: current, value }, next))) } // else - travel back in time loop { let entry = storage.require_entry(¤t)?; if block >= current.number { - return Ok(Some((Entry { valid_from: current, value: entry.value }, next))); + return Ok(Some((Entry { valid_from: current, value: entry.value }, next))) } next = Some(current); @@ -108,18 +106,15 @@ impl Entry { impl StorageEntry { /// Converts storage entry into an entry, valid from given block. pub fn into_entry(self, valid_from: ComplexBlockId) -> Entry { - Entry { - valid_from, - value: self.value, - } + Entry { valid_from, value: self.value } } } #[cfg(test)] mod tests { - use crate::cache::list_storage::tests::{DummyStorage, FaultyStorage}; - use substrate_test_runtime_client::runtime::{H256, Block}; use super::*; + use crate::cache::list_storage::tests::{DummyStorage, FaultyStorage}; + use substrate_test_runtime_client::runtime::{Block, H256}; fn test_id(number: u64) -> ComplexBlockId { ComplexBlockId::new(H256::from_low_u64_be(number), number) @@ -132,36 +127,61 @@ mod tests { // when trying to update with the same Some value assert_eq!(Entry { valid_from: test_id(1), value: 1 }.try_update(Some(1)), None); // when trying to update with different Some value - assert_eq!(Entry { valid_from: test_id(1), value: 1 }.try_update(Some(2)), - Some(StorageEntry { prev_valid_from: Some(test_id(1)), value: 2 })); + assert_eq!( + Entry { valid_from: test_id(1), value: 1 }.try_update(Some(2)), + Some(StorageEntry { prev_valid_from: Some(test_id(1)), value: 2 }) + ); } #[test] fn entry_search_best_before_fails() { // when storage returns error assert!(Entry::<_, u64> { valid_from: test_id(100), value: 42 } - .search_best_before(&FaultyStorage, 50).is_err()); + .search_best_before(&FaultyStorage, 50) + .is_err()); } #[test] fn entry_search_best_before_works() { // when block is better than our best block - assert_eq!(Entry::<_, u64> { valid_from: test_id(100), value: 100 } - .search_best_before(&DummyStorage::new(), 150).unwrap(), - Some((Entry::<_, u64> { valid_from: test_id(100), value: 100 }, None))); + assert_eq!( + Entry::<_, u64> { valid_from: test_id(100), value: 100 } + .search_best_before(&DummyStorage::new(), 150) + .unwrap(), + Some((Entry::<_, u64> { valid_from: test_id(100), value: 100 }, None)) + ); // when block is found between two entries - assert_eq!(Entry::<_, u64> { valid_from: test_id(100), value: 100 } - .search_best_before(&DummyStorage::new() - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) - .with_entry(test_id(50), StorageEntry { prev_valid_from: Some(test_id(30)), value: 50 }), - 75).unwrap(), - Some((Entry::<_, u64> { valid_from: test_id(50), value: 50 }, Some(test_id(100))))); + assert_eq!( + Entry::<_, u64> { valid_from: test_id(100), value: 100 } + .search_best_before( + &DummyStorage::new() + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 } + ) + .with_entry( + test_id(50), + StorageEntry { prev_valid_from: Some(test_id(30)), value: 50 } + ), + 75 + ) + .unwrap(), + Some((Entry::<_, u64> { valid_from: test_id(50), value: 50 }, Some(test_id(100)))) + ); // when block is not found - assert_eq!(Entry::<_, u64> { valid_from: test_id(100), value: 100 } - .search_best_before(&DummyStorage::new() - .with_entry(test_id(100), StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 }) - .with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }), - 30).unwrap(), - None); + assert_eq!( + Entry::<_, u64> { valid_from: test_id(100), value: 100 } + .search_best_before( + &DummyStorage::new() + .with_entry( + test_id(100), + StorageEntry { prev_valid_from: Some(test_id(50)), value: 100 } + ) + .with_entry(test_id(50), StorageEntry { prev_valid_from: None, value: 50 }), + 30 + ) + .unwrap(), + None + ); } } diff --git a/client/db/src/cache/list_storage.rs b/client/db/src/cache/list_storage.rs index e4b3677b4ab31..bb47b8dab5a7f 100644 --- a/client/db/src/cache/list_storage.rs +++ b/client/db/src/cache/list_storage.rs @@ -20,17 +20,23 @@ use std::sync::Arc; +use crate::utils::{self, meta_keys}; +use codec::{Decode, Encode}; use sp_blockchain::{Error as ClientError, Result as ClientResult}; -use codec::{Encode, Decode}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; use sp_database::{Database, Transaction}; -use crate::utils::{self, meta_keys}; - -use crate::cache::{CacheItemT, ComplexBlockId}; -use crate::cache::list_cache::{CommitOperation, Fork}; -use crate::cache::list_entry::{Entry, StorageEntry}; -use crate::DbHash; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor}, +}; + +use crate::{ + cache::{ + list_cache::{CommitOperation, Fork}, + list_entry::{Entry, StorageEntry}, + CacheItemT, ComplexBlockId, + }, + DbHash, +}; /// Single list-cache metadata. #[derive(Debug)] @@ -54,14 +60,21 @@ pub trait Storage { fn read_meta(&self) -> ClientResult>; /// Reads cache entry from the storage. - fn read_entry(&self, at: &ComplexBlockId) -> ClientResult>>; + fn read_entry( + &self, + at: &ComplexBlockId, + ) -> ClientResult>>; /// Reads referenced (and thus existing) cache entry from the storage. fn require_entry(&self, at: &ComplexBlockId) -> ClientResult> { - self.read_entry(at) - .and_then(|entry| entry - .ok_or_else(|| ClientError::from( - ClientError::Backend(format!("Referenced cache entry at {:?} is not found", at))))) + self.read_entry(at).and_then(|entry| { + entry.ok_or_else(|| { + ClientError::from(ClientError::Backend(format!( + "Referenced cache entry at {:?} is not found", + at + ))) + }) + }) } } @@ -111,10 +124,14 @@ impl DbStorage { } /// Get reference to the database. - pub fn db(&self) -> &Arc> { &self.db } + pub fn db(&self) -> &Arc> { + &self.db + } /// Get reference to the database columns. - pub fn columns(&self) -> &DbColumns { &self.columns } + pub fn columns(&self) -> &DbColumns { + &self.columns + } /// Encode block id for storing as a key in cache column. /// We append prefix to the actual encoding to allow several caches @@ -128,25 +145,35 @@ impl DbStorage { impl Storage for DbStorage { fn read_id(&self, at: NumberFor) -> ClientResult> { - utils::read_header::(&*self.db, self.columns.key_lookup, self.columns.header, BlockId::Number(at)) - .map(|maybe_header| maybe_header.map(|header| header.hash())) + utils::read_header::( + &*self.db, + self.columns.key_lookup, + self.columns.header, + BlockId::Number(at), + ) + .map(|maybe_header| maybe_header.map(|header| header.hash())) } fn read_header(&self, at: &Block::Hash) -> ClientResult> { - utils::read_header::(&*self.db, self.columns.key_lookup, self.columns.header, BlockId::Hash(*at)) + utils::read_header::( + &*self.db, + self.columns.key_lookup, + self.columns.header, + BlockId::Hash(*at), + ) } fn read_meta(&self) -> ClientResult> { match self.db.get(self.columns.meta, &self.meta_key) { Some(meta) => meta::decode(&*meta), - None => Ok(Metadata { - finalized: None, - unfinalized: Vec::new(), - }) + None => Ok(Metadata { finalized: None, unfinalized: Vec::new() }), } } - fn read_entry(&self, at: &ComplexBlockId) -> ClientResult>> { + fn read_entry( + &self, + at: &ComplexBlockId, + ) -> ClientResult>> { match self.db.get(self.columns.cache, &self.encode_block_id(at)) { Some(entry) => StorageEntry::::decode(&mut &entry[..]) .map_err(|_| ClientError::Backend("Failed to decode cache entry".into())) @@ -171,7 +198,11 @@ impl<'a> DbStorageTransaction<'a> { impl<'a, Block: BlockT, T: CacheItemT> StorageTransaction for DbStorageTransaction<'a> { fn insert_storage_entry(&mut self, at: &ComplexBlockId, entry: &StorageEntry) { - self.tx.set_from_vec(self.storage.columns.cache, &self.storage.encode_block_id(at), entry.encode()); + self.tx.set_from_vec( + self.storage.columns.cache, + &self.storage.encode_block_id(at), + entry.encode(), + ); } fn remove_storage_entry(&mut self, at: &ComplexBlockId) { @@ -187,7 +218,8 @@ impl<'a, Block: BlockT, T: CacheItemT> StorageTransaction for DbStorag self.tx.set_from_vec( self.storage.columns.meta, &self.storage.meta_key, - meta::encode(best_finalized_entry, unfinalized, operation)); + meta::encode(best_finalized_entry, unfinalized, operation), + ); } } @@ -206,10 +238,11 @@ mod meta { pub fn encode( best_finalized_entry: Option<&Entry>, unfinalized: &[Fork], - op: &CommitOperation + op: &CommitOperation, ) -> Vec { let mut finalized = best_finalized_entry.as_ref().map(|entry| &entry.valid_from); - let mut unfinalized = unfinalized.iter().map(|fork| &fork.head().valid_from).collect::>(); + let mut unfinalized = + unfinalized.iter().map(|fork| &fork.head().valid_from).collect::>(); match op { CommitOperation::AppendNewBlock(_, _) => (), @@ -230,8 +263,11 @@ mod meta { CommitOperation::BlockReverted(ref forks) => { for (fork_index, updated_fork) in forks.iter().rev() { match updated_fork { - Some(updated_fork) => unfinalized[*fork_index] = &updated_fork.head().valid_from, - None => { unfinalized.remove(*fork_index); }, + Some(updated_fork) => + unfinalized[*fork_index] = &updated_fork.head().valid_from, + None => { + unfinalized.remove(*fork_index); + }, } } }, @@ -243,10 +279,12 @@ mod meta { /// Decode meta information. pub fn decode(encoded: &[u8]) -> ClientResult> { let input = &mut &*encoded; - let finalized: Option> = Decode::decode(input) - .map_err(|_| ClientError::from(ClientError::Backend("Error decoding cache meta".into())))?; - let unfinalized: Vec> = Decode::decode(input) - .map_err(|_| ClientError::from(ClientError::Backend("Error decoding cache meta".into())))?; + let finalized: Option> = Decode::decode(input).map_err(|_| { + ClientError::from(ClientError::Backend("Error decoding cache meta".into())) + })?; + let unfinalized: Vec> = Decode::decode(input).map_err(|_| { + ClientError::from(ClientError::Backend("Error decoding cache meta".into())) + })?; Ok(Metadata { finalized, unfinalized }) } @@ -254,8 +292,8 @@ mod meta { #[cfg(test)] pub mod tests { - use std::collections::{HashMap, HashSet}; use super::*; + use std::collections::{HashMap, HashSet}; pub struct FaultyStorage; @@ -272,7 +310,10 @@ pub mod tests { Err(ClientError::Backend("TestError".into())) } - fn read_entry(&self, _at: &ComplexBlockId) -> ClientResult>> { + fn read_entry( + &self, + _at: &ComplexBlockId, + ) -> ClientResult>> { Err(ClientError::Backend("TestError".into())) } } @@ -287,17 +328,18 @@ pub mod tests { impl DummyStorage { pub fn new() -> Self { DummyStorage { - meta: Metadata { - finalized: None, - unfinalized: Vec::new(), - }, + meta: Metadata { finalized: None, unfinalized: Vec::new() }, ids: HashMap::new(), headers: HashMap::new(), entries: HashMap::new(), } } - pub fn with_meta(mut self, finalized: Option>, unfinalized: Vec>) -> Self { + pub fn with_meta( + mut self, + finalized: Option>, + unfinalized: Vec>, + ) -> Self { self.meta.finalized = finalized; self.meta.unfinalized = unfinalized; self @@ -313,7 +355,11 @@ pub mod tests { self } - pub fn with_entry(mut self, at: ComplexBlockId, entry: StorageEntry) -> Self { + pub fn with_entry( + mut self, + at: ComplexBlockId, + entry: StorageEntry, + ) -> Self { self.entries.insert(at.hash, entry); self } @@ -332,7 +378,10 @@ pub mod tests { Ok(self.meta.clone()) } - fn read_entry(&self, at: &ComplexBlockId) -> ClientResult>> { + fn read_entry( + &self, + at: &ComplexBlockId, + ) -> ClientResult>> { Ok(self.entries.get(&at.hash).cloned()) } } @@ -366,7 +415,11 @@ pub mod tests { } impl StorageTransaction for DummyTransaction { - fn insert_storage_entry(&mut self, at: &ComplexBlockId, _entry: &StorageEntry) { + fn insert_storage_entry( + &mut self, + at: &ComplexBlockId, + _entry: &StorageEntry, + ) { self.inserted_entries.insert(at.hash); } @@ -380,7 +433,9 @@ pub mod tests { unfinalized: &[Fork], operation: &CommitOperation, ) { - self.updated_meta = Some(meta::decode(&meta::encode(best_finalized_entry, unfinalized, operation)).unwrap()); + self.updated_meta = Some( + meta::decode(&meta::encode(best_finalized_entry, unfinalized, operation)).unwrap(), + ); } } } diff --git a/client/db/src/cache/mod.rs b/client/db/src/cache/mod.rs index 005d25b90f933..5502896aced2c 100644 --- a/client/db/src/cache/mod.rs +++ b/client/db/src/cache/mod.rs @@ -18,17 +18,27 @@ //! DB-backed cache of blockchain data. -use std::{sync::Arc, collections::{HashMap, hash_map::Entry}}; use parking_lot::RwLock; - -use sc_client_api::blockchain::{well_known_cache_keys::{self, Id as CacheKeyId}, Cache as BlockchainCache}; -use sp_blockchain::{Result as ClientResult, HeaderMetadataCache}; +use std::{ + collections::{hash_map::Entry, HashMap}, + sync::Arc, +}; + +use crate::{ + utils::{self, COLUMN_META}, + DbHash, +}; +use codec::{Decode, Encode}; +use sc_client_api::blockchain::{ + well_known_cache_keys::{self, Id as CacheKeyId}, + Cache as BlockchainCache, +}; +use sp_blockchain::{HeaderMetadataCache, Result as ClientResult}; use sp_database::{Database, Transaction}; -use codec::{Encode, Decode}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}; -use crate::utils::{self, COLUMN_META}; -use crate::DbHash; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}, +}; use self::list_cache::{ListCache, PruningStrategy}; @@ -118,7 +128,10 @@ impl DbCache { } /// Begin cache transaction. - pub fn transaction<'a>(&'a mut self, tx: &'a mut Transaction) -> DbCacheTransaction<'a, Block> { + pub fn transaction<'a>( + &'a mut self, + tx: &'a mut Transaction, + ) -> DbCacheTransaction<'a, Block> { DbCacheTransaction { cache: self, tx, @@ -164,7 +177,7 @@ impl DbCache { self.key_lookup_column, self.header_column, self.cache_column, - &self.best_finalized_block + &self.best_finalized_block, ) } } @@ -184,19 +197,16 @@ fn get_cache_helper<'a, Block: BlockT>( Entry::Occupied(entry) => Ok(entry.into_mut()), Entry::Vacant(entry) => { let cache = ListCache::new( - self::list_storage::DbStorage::new(name.to_vec(), db.clone(), - self::list_storage::DbColumns { - meta: COLUMN_META, - key_lookup, - header, - cache, - }, + self::list_storage::DbStorage::new( + name.to_vec(), + db.clone(), + self::list_storage::DbColumns { meta: COLUMN_META, key_lookup, header, cache }, ), cache_pruning_strategy(name), best_finalized_block.clone(), )?; Ok(entry.insert(cache)) - } + }, } } @@ -210,10 +220,7 @@ pub struct DbCacheTransactionOps { impl DbCacheTransactionOps { /// Empty transaction ops. pub fn empty() -> DbCacheTransactionOps { - DbCacheTransactionOps { - cache_at_ops: HashMap::new(), - best_finalized_block: None, - } + DbCacheTransactionOps { cache_at_ops: HashMap::new(), best_finalized_block: None } } } @@ -244,19 +251,21 @@ impl<'a, Block: BlockT> DbCacheTransaction<'a, Block> { ) -> ClientResult { // prepare list of caches that are not update // (we might still need to do some cache maintenance in this case) - let missed_caches = self.cache.cache_at.keys() + let missed_caches = self + .cache + .cache_at + .keys() .filter(|cache| !data_at.contains_key(*cache)) .cloned() .collect::>(); - let mut insert_op = |name: CacheKeyId, value: Option>| -> Result<(), sp_blockchain::Error> { + let mut insert_op = |name: CacheKeyId, + value: Option>| + -> Result<(), sp_blockchain::Error> { let cache = self.cache.get_cache(name)?; let cache_ops = self.cache_at_ops.entry(name).or_default(); cache.on_block_insert( - &mut self::list_storage::DbStorageTransaction::new( - cache.storage(), - &mut self.tx, - ), + &mut self::list_storage::DbStorageTransaction::new(cache.storage(), &mut self.tx), parent.clone(), block.clone(), value, @@ -271,8 +280,7 @@ impl<'a, Block: BlockT> DbCacheTransaction<'a, Block> { missed_caches.into_iter().try_for_each(|name| insert_op(name, None))?; match entry_type { - EntryType::Final | EntryType::Genesis => - self.best_finalized_block = Some(block), + EntryType::Final | EntryType::Genesis => self.best_finalized_block = Some(block), EntryType::NonFinal => (), } @@ -288,10 +296,7 @@ impl<'a, Block: BlockT> DbCacheTransaction<'a, Block> { for (name, cache) in self.cache.cache_at.iter() { let cache_ops = self.cache_at_ops.entry(*name).or_default(); cache.on_block_finalize( - &mut self::list_storage::DbStorageTransaction::new( - cache.storage(), - &mut self.tx - ), + &mut self::list_storage::DbStorageTransaction::new(cache.storage(), &mut self.tx), parent.clone(), block.clone(), cache_ops, @@ -304,17 +309,11 @@ impl<'a, Block: BlockT> DbCacheTransaction<'a, Block> { } /// When block is reverted. - pub fn on_block_revert( - mut self, - reverted_block: &ComplexBlockId, - ) -> ClientResult { + pub fn on_block_revert(mut self, reverted_block: &ComplexBlockId) -> ClientResult { for (name, cache) in self.cache.cache_at.iter() { let cache_ops = self.cache_at_ops.entry(*name).or_default(); cache.on_block_revert( - &mut self::list_storage::DbStorageTransaction::new( - cache.storage(), - &mut self.tx - ), + &mut self::list_storage::DbStorageTransaction::new(cache.storage(), &mut self.tx), reverted_block, cache_ops, )?; @@ -352,7 +351,9 @@ impl BlockchainCache for DbCacheSync { &self, key: &CacheKeyId, at: &BlockId, - ) -> ClientResult, Block::Hash), Option<(NumberFor, Block::Hash)>, Vec)>> { + ) -> ClientResult< + Option<((NumberFor, Block::Hash), Option<(NumberFor, Block::Hash)>, Vec)>, + > { let mut cache = self.0.write(); let header_metadata_cache = cache.header_metadata_cache.clone(); let cache = cache.get_cache(*key)?; @@ -360,36 +361,39 @@ impl BlockchainCache for DbCacheSync { let db = storage.db(); let columns = storage.columns(); let at = match *at { - BlockId::Hash(hash) => { - match header_metadata_cache.header_metadata(hash) { - Some(metadata) => ComplexBlockId::new(hash, metadata.number), - None => { - let header = utils::require_header::( - &**db, - columns.key_lookup, - columns.header, - BlockId::Hash(hash.clone()))?; - ComplexBlockId::new(hash, *header.number()) - } - } + BlockId::Hash(hash) => match header_metadata_cache.header_metadata(hash) { + Some(metadata) => ComplexBlockId::new(hash, metadata.number), + None => { + let header = utils::require_header::( + &**db, + columns.key_lookup, + columns.header, + BlockId::Hash(hash.clone()), + )?; + ComplexBlockId::new(hash, *header.number()) + }, }, BlockId::Number(number) => { let hash = utils::require_header::( &**db, columns.key_lookup, columns.header, - BlockId::Number(number.clone()))?.hash(); + BlockId::Number(number.clone()), + )? + .hash(); ComplexBlockId::new(hash, number) }, }; - cache.value_at_block(&at) - .map(|block_and_value| block_and_value.map(|(begin_block, end_block, value)| + cache.value_at_block(&at).map(|block_and_value| { + block_and_value.map(|(begin_block, end_block, value)| { ( (begin_block.number, begin_block.hash), end_block.map(|end_block| (end_block.number, end_block.hash)), value, - ))) + ) + }) + }) } } diff --git a/client/db/src/changes_tries_storage.rs b/client/db/src/changes_tries_storage.rs index 3863099a09f96..6b948a2d2c5c3 100644 --- a/client/db/src/changes_tries_storage.rs +++ b/client/db/src/changes_tries_storage.rs @@ -18,33 +18,43 @@ //! DB-backed changes tries storage. -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; -use hash_db::Prefix; +use crate::{ + cache::{ + ComplexBlockId, DbCache, DbCacheSync, DbCacheTransactionOps, EntryType as CacheEntryType, + }, + utils::{self, meta_keys, Meta}, + Database, DbHash, +}; use codec::{Decode, Encode}; +use hash_db::Prefix; use parking_lot::RwLock; -use sp_blockchain::{Error as ClientError, Result as ClientResult}; -use sp_trie::MemoryDB; use sc_client_api::backend::PrunableStateChangesTrieStorage; -use sp_blockchain::{well_known_cache_keys, Cache as BlockchainCache, HeaderMetadataCache}; -use sp_core::{ChangesTrieConfiguration, ChangesTrieConfigurationRange, convert_hash}; -use sp_core::storage::PrefixedStorageKey; +use sp_blockchain::{ + well_known_cache_keys, Cache as BlockchainCache, Error as ClientError, HeaderMetadataCache, + Result as ClientResult, +}; +use sp_core::{ + convert_hash, storage::PrefixedStorageKey, ChangesTrieConfiguration, + ChangesTrieConfigurationRange, +}; use sp_database::Transaction; -use sp_runtime::traits::{ - Block as BlockT, Header as HeaderT, HashFor, NumberFor, One, Zero, CheckedSub, +use sp_runtime::{ + generic::{BlockId, ChangesTrieSignal, DigestItem}, + traits::{Block as BlockT, CheckedSub, HashFor, Header as HeaderT, NumberFor, One, Zero}, }; -use sp_runtime::generic::{BlockId, DigestItem, ChangesTrieSignal}; use sp_state_machine::{ChangesTrieBuildCache, ChangesTrieCacheAction}; -use crate::{Database, DbHash}; -use crate::utils::{self, Meta, meta_keys}; -use crate::cache::{ - DbCacheSync, DbCache, DbCacheTransactionOps, - ComplexBlockId, EntryType as CacheEntryType, +use sp_trie::MemoryDB; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, }; /// Extract new changes trie configuration (if available) from the header. -pub fn extract_new_configuration(header: &Header) -> Option<&Option> { - header.digest() +pub fn extract_new_configuration( + header: &Header, +) -> Option<&Option> { + header + .digest() .log(DigestItem::as_changes_trie_signal) .and_then(ChangesTrieSignal::as_new_configuration) } @@ -68,10 +78,7 @@ impl DbChangesTrieStorageTransaction { impl From> for DbChangesTrieStorageTransaction { fn from(cache_ops: DbCacheTransactionOps) -> Self { - DbChangesTrieStorageTransaction { - cache_ops, - new_config: None, - } + DbChangesTrieStorageTransaction { cache_ops, new_config: None } } } @@ -173,21 +180,25 @@ impl DbChangesTrieStorage { let new_configuration = match new_configuration { Some(new_configuration) => new_configuration, None if !finalized => return Ok(DbCacheTransactionOps::empty().into()), - None => return self.finalize( - tx, - parent_block.hash, - block.hash, - block.number, - Some(new_header), - cache_tx, - ), + None => + return self.finalize( + tx, + parent_block.hash, + block.hash, + block.number, + Some(new_header), + cache_tx, + ), }; // update configuration cache let mut cache_at = HashMap::new(); cache_at.insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_configuration.encode()); Ok(DbChangesTrieStorageTransaction::from(match cache_tx { - Some(cache_tx) => self.cache.0.write() + Some(cache_tx) => self + .cache + .0 + .write() .transaction_with_ops(tx, cache_tx.cache_ops) .on_block_insert( parent_block, @@ -196,7 +207,10 @@ impl DbChangesTrieStorage { if finalized { CacheEntryType::Final } else { CacheEntryType::NonFinal }, )? .into_ops(), - None => self.cache.0.write() + None => self + .cache + .0 + .write() .transaction(tx) .on_block_insert( parent_block, @@ -205,7 +219,8 @@ impl DbChangesTrieStorage { if finalized { CacheEntryType::Final } else { CacheEntryType::NonFinal }, )? .into_ops(), - }).with_new_config(Some(new_configuration))) + }) + .with_new_config(Some(new_configuration))) } /// Called when block is finalized. @@ -226,7 +241,7 @@ impl DbChangesTrieStorage { if cache_tx.is_some() { if let Some(new_header) = new_header { if new_header.hash() == block_hash { - return Ok(cache_tx.expect("guarded by cache_tx.is_some(); qed")); + return Ok(cache_tx.expect("guarded by cache_tx.is_some(); qed")) } } } @@ -237,22 +252,21 @@ impl DbChangesTrieStorage { let parent_block = ComplexBlockId::new(parent_block_hash, parent_block_num); Ok(match cache_tx { Some(cache_tx) => DbChangesTrieStorageTransaction::from( - self.cache.0.write() + self.cache + .0 + .write() .transaction_with_ops(tx, cache_tx.cache_ops) - .on_block_finalize( - parent_block, - block, - )? - .into_ops() - ).with_new_config(cache_tx.new_config), + .on_block_finalize(parent_block, block)? + .into_ops(), + ) + .with_new_config(cache_tx.new_config), None => DbChangesTrieStorageTransaction::from( - self.cache.0.write() + self.cache + .0 + .write() .transaction(tx) - .on_block_finalize( - parent_block, - block, - )? - .into_ops() + .on_block_finalize(parent_block, block)? + .into_ops(), ), }) } @@ -263,23 +277,24 @@ impl DbChangesTrieStorage { tx: &mut Transaction, block: &ComplexBlockId, ) -> ClientResult> { - Ok(self.cache.0.write().transaction(tx) - .on_block_revert(block)? - .into_ops() - .into()) + Ok(self.cache.0.write().transaction(tx).on_block_revert(block)?.into_ops().into()) } /// When transaction has been committed. pub fn post_commit(&self, tx: Option>) { if let Some(tx) = tx { - self.cache.0.write().commit(tx.cache_ops) - .expect("only fails if cache with given name isn't loaded yet;\ - cache is already loaded because there is tx; qed"); + self.cache.0.write().commit(tx.cache_ops).expect( + "only fails if cache with given name isn't loaded yet;\ + cache is already loaded because there is tx; qed", + ); } } /// Commit changes into changes trie build cache. - pub fn commit_build_cache(&self, cache_update: ChangesTrieCacheAction>) { + pub fn commit_build_cache( + &self, + cache_update: ChangesTrieCacheAction>, + ) { self.build_cache.write().perform(cache_update); } @@ -307,7 +322,7 @@ impl DbChangesTrieStorage { // 2) or we are (or were) in period where changes tries are disabled if let Some((begin, end)) = tries_meta.oldest_digest_range { if block_num <= end || block_num - end <= min_blocks_to_keep.into() { - break; + break } tries_meta.oldest_pruned_digest_range_end = end; @@ -333,7 +348,8 @@ impl DbChangesTrieStorage { self.key_lookup_column, self.header_column, BlockId::Number(next_digest_range_start), - )?.hash(), + )? + .hash(), }; let config_for_new_block = new_header @@ -341,21 +357,18 @@ impl DbChangesTrieStorage { .unwrap_or(false); let next_config = match cache_tx { Some(cache_tx) if config_for_new_block && cache_tx.new_config.is_some() => { - let config = cache_tx - .new_config - .clone() - .expect("guarded by is_some(); qed"); + let config = cache_tx.new_config.clone().expect("guarded by is_some(); qed"); ChangesTrieConfigurationRange { zero: (block_num, block_hash), end: None, config, } }, - _ if config_for_new_block => { - self.configuration_at(&BlockId::Hash(*new_header.expect( - "config_for_new_block is only true when new_header is passed; qed" - ).parent_hash()))? - }, + _ if config_for_new_block => self.configuration_at(&BlockId::Hash( + *new_header + .expect("config_for_new_block is only true when new_header is passed; qed") + .parent_hash(), + ))?, _ => self.configuration_at(&BlockId::Hash(next_digest_range_start_hash))?, }; if let Some(config) = next_config.config { @@ -370,11 +383,11 @@ impl DbChangesTrieStorage { } tries_meta.oldest_digest_range = Some(oldest_digest_range); - continue; + continue } tries_meta.oldest_digest_range = None; - break; + break } write_tries_meta(tx, self.meta_column, &*tries_meta); @@ -383,17 +396,23 @@ impl DbChangesTrieStorage { } impl PrunableStateChangesTrieStorage for DbChangesTrieStorage { - fn storage(&self) -> &dyn sp_state_machine::ChangesTrieStorage, NumberFor> { + fn storage( + &self, + ) -> &dyn sp_state_machine::ChangesTrieStorage, NumberFor> { self } - fn configuration_at(&self, at: &BlockId) -> ClientResult< - ChangesTrieConfigurationRange, Block::Hash> - > { + fn configuration_at( + &self, + at: &BlockId, + ) -> ClientResult, Block::Hash>> { self.cache .get_at(&well_known_cache_keys::CHANGES_TRIE_CONFIG, at)? - .and_then(|(zero, end, encoded)| Decode::decode(&mut &encoded[..]).ok() - .map(|config| ChangesTrieConfigurationRange { zero, end, config })) + .and_then(|(zero, end, encoded)| { + Decode::decode(&mut &encoded[..]) + .ok() + .map(|config| ChangesTrieConfigurationRange { zero, end, config }) + }) .ok_or_else(|| ClientError::ErrorReadingChangesTriesConfig) } @@ -409,14 +428,21 @@ impl sp_state_machine::ChangesTrieRootsStorage, Nu &self, hash: Block::Hash, ) -> Result>, String> { - utils::read_header::(&*self.db, self.key_lookup_column, self.header_column, BlockId::Hash(hash)) - .map_err(|e| e.to_string()) - .and_then(|maybe_header| maybe_header.map(|header| - sp_state_machine::ChangesTrieAnchorBlockId { + utils::read_header::( + &*self.db, + self.key_lookup_column, + self.header_column, + BlockId::Hash(hash), + ) + .map_err(|e| e.to_string()) + .and_then(|maybe_header| { + maybe_header + .map(|header| sp_state_machine::ChangesTrieAnchorBlockId { hash, number: *header.number(), - } - ).ok_or_else(|| format!("Unknown header: {}", hash))) + }) + .ok_or_else(|| format!("Unknown header: {}", hash)) + }) } fn root( @@ -426,7 +452,10 @@ impl sp_state_machine::ChangesTrieRootsStorage, Nu ) -> Result, String> { // check API requirement: we can't get NEXT block(s) based on anchor if block > anchor.number { - return Err(format!("Can't get changes trie root at {} using anchor at {}", block, anchor.number)); + return Err(format!( + "Can't get changes trie root at {} using anchor at {}", + block, anchor.number + )) } // we need to get hash of the block to resolve changes trie root @@ -438,8 +467,12 @@ impl sp_state_machine::ChangesTrieRootsStorage, Nu let mut current_num = anchor.number; let mut current_hash: Block::Hash = convert_hash(&anchor.hash); let maybe_anchor_header: Block::Header = utils::require_header::( - &*self.db, self.key_lookup_column, self.header_column, BlockId::Number(current_num) - ).map_err(|e| e.to_string())?; + &*self.db, + self.key_lookup_column, + self.header_column, + BlockId::Number(current_num), + ) + .map_err(|e| e.to_string())?; if maybe_anchor_header.hash() == current_hash { // if anchor is canonicalized, then the block is also canonicalized BlockId::Number(block) @@ -449,8 +482,12 @@ impl sp_state_machine::ChangesTrieRootsStorage, Nu // back from the anchor to the block with given number while current_num != block { let current_header: Block::Header = utils::require_header::( - &*self.db, self.key_lookup_column, self.header_column, BlockId::Hash(current_hash) - ).map_err(|e| e.to_string())?; + &*self.db, + self.key_lookup_column, + self.header_column, + BlockId::Hash(current_hash), + ) + .map_err(|e| e.to_string())?; current_hash = *current_header.parent_hash(); current_num = current_num - One::one(); @@ -460,18 +497,16 @@ impl sp_state_machine::ChangesTrieRootsStorage, Nu } }; - Ok( - utils::require_header::( - &*self.db, - self.key_lookup_column, - self.header_column, - block_id, - ) - .map_err(|e| e.to_string())? - .digest() - .log(DigestItem::as_changes_trie_root) - .cloned() + Ok(utils::require_header::( + &*self.db, + self.key_lookup_column, + self.header_column, + block_id, ) + .map_err(|e| e.to_string())? + .digest() + .log(DigestItem::as_changes_trie_root) + .cloned()) } } @@ -480,7 +515,9 @@ impl sp_state_machine::ChangesTrieStorage, NumberFor &dyn sp_state_machine::ChangesTrieRootsStorage, NumberFor> { + fn as_roots_storage( + &self, + ) -> &dyn sp_state_machine::ChangesTrieRootsStorage, NumberFor> { self } @@ -503,8 +540,9 @@ fn read_tries_meta( meta_column: u32, ) -> ClientResult> { match db.get(meta_column, meta_keys::CHANGES_TRIES_META) { - Some(h) => Decode::decode(&mut &h[..]) - .map_err(|err| ClientError::Backend(format!("Error decoding changes tries metadata: {}", err))), + Some(h) => Decode::decode(&mut &h[..]).map_err(|err| { + ClientError::Backend(format!("Error decoding changes tries metadata: {}", err)) + }), None => Ok(ChangesTriesMeta { oldest_digest_range: None, oldest_pruned_digest_range_end: Zero::zero(), @@ -523,18 +561,23 @@ fn write_tries_meta( #[cfg(test)] mod tests { + use super::*; + use crate::{ + tests::{insert_header, prepare_changes, Block}, + Backend, + }; use hash_db::EMPTY_PREFIX; use sc_client_api::backend::{ - Backend as ClientBackend, NewBlockState, BlockImportOperation, PrunableStateChangesTrieStorage, + Backend as ClientBackend, BlockImportOperation, NewBlockState, + PrunableStateChangesTrieStorage, }; use sp_blockchain::HeaderBackend as BlockchainHeaderBackend; use sp_core::H256; - use sp_runtime::testing::{Digest, Header}; - use sp_runtime::traits::{Hash, BlakeTwo256}; + use sp_runtime::{ + testing::{Digest, Header}, + traits::{BlakeTwo256, Hash}, + }; use sp_state_machine::{ChangesTrieRootsStorage, ChangesTrieStorage}; - use crate::Backend; - use crate::tests::{Block, insert_header, prepare_changes}; - use super::*; fn changes(number: u64) -> Option, Vec)>> { Some(vec![(number.to_le_bytes().to_vec(), number.to_le_bytes().to_vec())]) @@ -554,7 +597,9 @@ mod tests { digest.push(DigestItem::ChangesTrieRoot(root)); changes_trie_update = update; } - digest.push(DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration(new_configuration))); + digest.push(DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration( + new_configuration, + ))); let header = Header { number, @@ -573,7 +618,8 @@ mod tests { let mut op = backend.begin_operation().unwrap(); backend.begin_state_operation(&mut op, block_id).unwrap(); op.set_block_data(header, None, None, None, NewBlockState::Best).unwrap(); - op.update_changes_trie((changes_trie_update, ChangesTrieCacheAction::Clear)).unwrap(); + op.update_changes_trie((changes_trie_update, ChangesTrieCacheAction::Clear)) + .unwrap(); backend.commit_operation(op).unwrap(); header_hash @@ -584,11 +630,13 @@ mod tests { let backend = Backend::::new_test(1000, 100); backend.changes_tries_storage.meta.write().finalized_number = 1000; - let check_changes = |backend: &Backend, block: u64, changes: Vec<(Vec, Vec)>| { + let check_changes = |backend: &Backend, + block: u64, + changes: Vec<(Vec, Vec)>| { let (changes_root, mut changes_trie_update) = prepare_changes(changes); let anchor = sp_state_machine::ChangesTrieAnchorBlockId { hash: backend.blockchain().header(BlockId::Number(block)).unwrap().unwrap().hash(), - number: block + number: block, }; assert_eq!(backend.changes_tries_storage.root(&anchor, block), Ok(Some(changes_root))); @@ -605,7 +653,13 @@ mod tests { ]; let changes2 = vec![(b"key_at_2".to_vec(), b"val_at_2".to_vec())]; - let block0 = insert_header(&backend, 0, Default::default(), Some(changes0.clone()), Default::default()); + let block0 = insert_header( + &backend, + 0, + Default::default(), + Some(changes0.clone()), + Default::default(), + ); let block1 = insert_header(&backend, 1, block0, Some(changes1.clone()), Default::default()); let _ = insert_header(&backend, 2, block1, Some(changes2.clone()), Default::default()); @@ -622,19 +676,29 @@ mod tests { let changes0 = vec![(b"k0".to_vec(), b"v0".to_vec())]; let changes1 = vec![(b"k1".to_vec(), b"v1".to_vec())]; let changes2 = vec![(b"k2".to_vec(), b"v2".to_vec())]; - let block0 = insert_header(&backend, 0, Default::default(), Some(changes0.clone()), Default::default()); + let block0 = insert_header( + &backend, + 0, + Default::default(), + Some(changes0.clone()), + Default::default(), + ); let block1 = insert_header(&backend, 1, block0, Some(changes1.clone()), Default::default()); let block2 = insert_header(&backend, 2, block1, Some(changes2.clone()), Default::default()); let changes2_1_0 = vec![(b"k3".to_vec(), b"v3".to_vec())]; let changes2_1_1 = vec![(b"k4".to_vec(), b"v4".to_vec())]; - let block2_1_0 = insert_header(&backend, 3, block2, Some(changes2_1_0.clone()), Default::default()); - let block2_1_1 = insert_header(&backend, 4, block2_1_0, Some(changes2_1_1.clone()), Default::default()); + let block2_1_0 = + insert_header(&backend, 3, block2, Some(changes2_1_0.clone()), Default::default()); + let block2_1_1 = + insert_header(&backend, 4, block2_1_0, Some(changes2_1_1.clone()), Default::default()); let changes2_2_0 = vec![(b"k5".to_vec(), b"v5".to_vec())]; let changes2_2_1 = vec![(b"k6".to_vec(), b"v6".to_vec())]; - let block2_2_0 = insert_header(&backend, 3, block2, Some(changes2_2_0.clone()), Default::default()); - let block2_2_1 = insert_header(&backend, 4, block2_2_0, Some(changes2_2_1.clone()), Default::default()); + let block2_2_0 = + insert_header(&backend, 3, block2, Some(changes2_2_0.clone()), Default::default()); + let block2_2_1 = + insert_header(&backend, 4, block2_2_0, Some(changes2_2_1.clone()), Default::default()); // finalize block1 backend.changes_tries_storage.meta.write().finalized_number = 1; @@ -680,7 +744,12 @@ mod tests { if number == 0 { Default::default() } else { - backend.blockchain().header(BlockId::Number(number - 1)).unwrap().unwrap().hash() + backend + .blockchain() + .header(BlockId::Number(number - 1)) + .unwrap() + .unwrap() + .hash() } }; @@ -698,12 +767,14 @@ mod tests { let trie_root = backend .blockchain() .header(BlockId::Number(number)) - .unwrap().unwrap() + .unwrap() + .unwrap() .digest() .log(DigestItem::as_changes_trie_root) .cloned(); match trie_root { - Some(trie_root) => backend.changes_tries_storage.get(&trie_root, EMPTY_PREFIX).unwrap().is_none(), + Some(trie_root) => + backend.changes_tries_storage.get(&trie_root, EMPTY_PREFIX).unwrap().is_none(), None => true, } }; @@ -711,14 +782,10 @@ mod tests { let finalize_block = |number| { let header = backend.blockchain().header(BlockId::Number(number)).unwrap().unwrap(); let mut tx = Transaction::new(); - let cache_ops = backend.changes_tries_storage.finalize( - &mut tx, - *header.parent_hash(), - header.hash(), - number, - None, - None, - ).unwrap(); + let cache_ops = backend + .changes_tries_storage + .finalize(&mut tx, *header.parent_hash(), header.hash(), number, None, None) + .unwrap(); backend.storage.db.commit(tx).unwrap(); backend.changes_tries_storage.post_commit(Some(cache_ops)); }; @@ -737,11 +804,23 @@ mod tests { (0..6).for_each(|number| insert_regular_header(false, number)); insert_header_with_configuration_change(&backend, 6, parent_hash(6), None, config_at_6); (7..17).for_each(|number| insert_regular_header(true, number)); - insert_header_with_configuration_change(&backend, 17, parent_hash(17), changes(17), config_at_17); + insert_header_with_configuration_change( + &backend, + 17, + parent_hash(17), + changes(17), + config_at_17, + ); (18..21).for_each(|number| insert_regular_header(false, number)); insert_header_with_configuration_change(&backend, 21, parent_hash(21), None, config_at_21); (22..32).for_each(|number| insert_regular_header(true, number)); - insert_header_with_configuration_change(&backend, 32, parent_hash(32), changes(32), config_at_32); + insert_header_with_configuration_change( + &backend, + 32, + parent_hash(32), + changes(32), + config_at_32, + ); (33..50).for_each(|number| insert_regular_header(true, number)); // when only genesis is finalized, nothing is pruned @@ -826,29 +905,24 @@ mod tests { let backend = Backend::::new_test(1000, 100); // configurations at blocks - let config_at_1 = Some(ChangesTrieConfiguration { - digest_interval: 4, - digest_levels: 2, - }); - let config_at_3 = Some(ChangesTrieConfiguration { - digest_interval: 8, - digest_levels: 1, - }); + let config_at_1 = Some(ChangesTrieConfiguration { digest_interval: 4, digest_levels: 2 }); + let config_at_3 = Some(ChangesTrieConfiguration { digest_interval: 8, digest_levels: 1 }); let config_at_5 = None; - let config_at_7 = Some(ChangesTrieConfiguration { - digest_interval: 8, - digest_levels: 1, - }); + let config_at_7 = Some(ChangesTrieConfiguration { digest_interval: 8, digest_levels: 1 }); // insert some blocks let block0 = insert_header(&backend, 0, Default::default(), None, Default::default()); - let block1 = insert_header_with_configuration_change(&backend, 1, block0, None, config_at_1.clone()); + let block1 = + insert_header_with_configuration_change(&backend, 1, block0, None, config_at_1.clone()); let block2 = insert_header(&backend, 2, block1, None, Default::default()); - let block3 = insert_header_with_configuration_change(&backend, 3, block2, None, config_at_3.clone()); + let block3 = + insert_header_with_configuration_change(&backend, 3, block2, None, config_at_3.clone()); let block4 = insert_header(&backend, 4, block3, None, Default::default()); - let block5 = insert_header_with_configuration_change(&backend, 5, block4, None, config_at_5.clone()); + let block5 = + insert_header_with_configuration_change(&backend, 5, block4, None, config_at_5.clone()); let block6 = insert_header(&backend, 6, block5, None, Default::default()); - let block7 = insert_header_with_configuration_change(&backend, 7, block6, None, config_at_7.clone()); + let block7 = + insert_header_with_configuration_change(&backend, 7, block6, None, config_at_7.clone()); // test configuration cache let storage = &backend.changes_tries_storage; @@ -887,17 +961,48 @@ mod tests { let mut backend = Backend::::new_test(10, 10); backend.changes_tries_storage.min_blocks_to_keep = Some(8); - let configs = (0..=7).map(|i| Some(ChangesTrieConfiguration::new(2, i))).collect::>(); + let configs = + (0..=7).map(|i| Some(ChangesTrieConfiguration::new(2, i))).collect::>(); // insert unfinalized headers - let block0 = insert_header_with_configuration_change(&backend, 0, Default::default(), None, configs[0].clone()); - let block1 = insert_header_with_configuration_change(&backend, 1, block0, changes(1), configs[1].clone()); - let block2 = insert_header_with_configuration_change(&backend, 2, block1, changes(2), configs[2].clone()); + let block0 = insert_header_with_configuration_change( + &backend, + 0, + Default::default(), + None, + configs[0].clone(), + ); + let block1 = insert_header_with_configuration_change( + &backend, + 1, + block0, + changes(1), + configs[1].clone(), + ); + let block2 = insert_header_with_configuration_change( + &backend, + 2, + block1, + changes(2), + configs[2].clone(), + ); let side_config2_1 = Some(ChangesTrieConfiguration::new(3, 2)); let side_config2_2 = Some(ChangesTrieConfiguration::new(3, 3)); - let block2_1 = insert_header_with_configuration_change(&backend, 2, block1, changes(8), side_config2_1.clone()); - let _ = insert_header_with_configuration_change(&backend, 3, block2_1, changes(9), side_config2_2.clone()); + let block2_1 = insert_header_with_configuration_change( + &backend, + 2, + block1, + changes(8), + side_config2_1.clone(), + ); + let _ = insert_header_with_configuration_change( + &backend, + 3, + block2_1, + changes(9), + side_config2_2.clone(), + ); // insert finalized header => 4 headers are finalized at once let header3 = Header { @@ -905,9 +1010,9 @@ mod tests { parent_hash: block2, state_root: Default::default(), digest: Digest { - logs: vec![ - DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration(configs[3].clone())), - ], + logs: vec![DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration( + configs[3].clone(), + ))], }, extrinsics_root: Default::default(), }; @@ -920,9 +1025,27 @@ mod tests { backend.commit_operation(op).unwrap(); // insert more unfinalized headers - let block4 = insert_header_with_configuration_change(&backend, 4, block3, changes(4), configs[4].clone()); - let block5 = insert_header_with_configuration_change(&backend, 5, block4, changes(5), configs[5].clone()); - let block6 = insert_header_with_configuration_change(&backend, 6, block5, changes(6), configs[6].clone()); + let block4 = insert_header_with_configuration_change( + &backend, + 4, + block3, + changes(4), + configs[4].clone(), + ); + let block5 = insert_header_with_configuration_change( + &backend, + 5, + block4, + changes(5), + configs[5].clone(), + ); + let block6 = insert_header_with_configuration_change( + &backend, + 6, + block5, + changes(6), + configs[6].clone(), + ); // insert finalized header => 4 headers are finalized at once let header7 = Header { @@ -930,9 +1053,9 @@ mod tests { parent_hash: block6, state_root: Default::default(), digest: Digest { - logs: vec![ - DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration(configs[7].clone())), - ], + logs: vec![DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration( + configs[7].clone(), + ))], }, extrinsics_root: Default::default(), }; @@ -950,23 +1073,33 @@ mod tests { let backend = Backend::::new_test(10, 10); let config0 = Some(ChangesTrieConfiguration::new(2, 5)); - let block0 = insert_header_with_configuration_change(&backend, 0, Default::default(), None, config0); + let block0 = + insert_header_with_configuration_change(&backend, 0, Default::default(), None, config0); let config1 = Some(ChangesTrieConfiguration::new(2, 6)); - let block1 = insert_header_with_configuration_change(&backend, 1, block0, changes(0), config1); + let block1 = + insert_header_with_configuration_change(&backend, 1, block0, changes(0), config1); let just1 = Some((*b"TEST", vec![42])); backend.finalize_block(BlockId::Number(1), just1).unwrap(); let config2 = Some(ChangesTrieConfiguration::new(2, 7)); - let block2 = insert_header_with_configuration_change(&backend, 2, block1, changes(1), config2); + let block2 = + insert_header_with_configuration_change(&backend, 2, block1, changes(1), config2); let config2_1 = Some(ChangesTrieConfiguration::new(2, 8)); - let _ = insert_header_with_configuration_change(&backend, 3, block2, changes(10), config2_1); + let _ = + insert_header_with_configuration_change(&backend, 3, block2, changes(10), config2_1); let config2_2 = Some(ChangesTrieConfiguration::new(2, 9)); - let block2_2 = insert_header_with_configuration_change(&backend, 3, block2, changes(20), config2_2); + let block2_2 = + insert_header_with_configuration_change(&backend, 3, block2, changes(20), config2_2); let config2_3 = Some(ChangesTrieConfiguration::new(2, 10)); - let _ = insert_header_with_configuration_change(&backend, 4, block2_2, changes(30), config2_3); + let _ = + insert_header_with_configuration_change(&backend, 4, block2_2, changes(30), config2_3); // before truncate there are 2 unfinalized forks - block2_1+block2_3 assert_eq!( - backend.changes_tries_storage.cache.0.write() + backend + .changes_tries_storage + .cache + .0 + .write() .get_cache(well_known_cache_keys::CHANGES_TRIE_CONFIG) .unwrap() .unfinalized() @@ -979,7 +1112,11 @@ mod tests { // after truncating block2_3 - there are 2 unfinalized forks - block2_1+block2_2 backend.revert(1, false).unwrap(); assert_eq!( - backend.changes_tries_storage.cache.0.write() + backend + .changes_tries_storage + .cache + .0 + .write() .get_cache(well_known_cache_keys::CHANGES_TRIE_CONFIG) .unwrap() .unfinalized() @@ -993,7 +1130,11 @@ mod tests { // the 1st one points to the block #3 because it isn't truncated backend.revert(1, false).unwrap(); assert_eq!( - backend.changes_tries_storage.cache.0.write() + backend + .changes_tries_storage + .cache + .0 + .write() .get_cache(well_known_cache_keys::CHANGES_TRIE_CONFIG) .unwrap() .unfinalized() @@ -1005,15 +1146,17 @@ mod tests { // after truncating block2 - there are no unfinalized forks backend.revert(1, false).unwrap(); - assert!( - backend.changes_tries_storage.cache.0.write() - .get_cache(well_known_cache_keys::CHANGES_TRIE_CONFIG) - .unwrap() - .unfinalized() - .iter() - .map(|fork| fork.head().valid_from.number) - .collect::>() - .is_empty(), - ); + assert!(backend + .changes_tries_storage + .cache + .0 + .write() + .get_cache(well_known_cache_keys::CHANGES_TRIE_CONFIG) + .unwrap() + .unfinalized() + .iter() + .map(|fork| fork.head().valid_from.number) + .collect::>() + .is_empty(),); } } diff --git a/client/db/src/children.rs b/client/db/src/children.rs index 62352e6d0614a..c11e4204997d1 100644 --- a/client/db/src/children.rs +++ b/client/db/src/children.rs @@ -18,17 +18,22 @@ //! Functionality for reading and storing children hashes from db. -use codec::{Encode, Decode}; +use crate::DbHash; +use codec::{Decode, Encode}; use sp_blockchain; -use std::hash::Hash; use sp_database::{Database, Transaction}; -use crate::DbHash; +use std::hash::Hash; /// Returns the hashes of the children blocks of the block with `parent_hash`. pub fn read_children< K: Eq + Hash + Clone + Encode + Decode, V: Eq + Hash + Clone + Encode + Decode, ->(db: &dyn Database, column: u32, prefix: &[u8], parent_hash: K) -> sp_blockchain::Result> { +>( + db: &dyn Database, + column: u32, + prefix: &[u8], + parent_hash: K, +) -> sp_blockchain::Result> { let mut buf = prefix.to_vec(); parent_hash.using_encoded(|s| buf.extend(s)); @@ -65,9 +70,7 @@ pub fn write_children< } /// Prepare transaction to remove the children of `parent_hash`. -pub fn remove_children< - K: Eq + Hash + Clone + Encode + Decode, ->( +pub fn remove_children( tx: &mut Transaction, column: u32, prefix: &[u8], @@ -78,7 +81,6 @@ pub fn remove_children< tx.remove(column, &key); } - #[cfg(test)] mod tests { use super::*; diff --git a/client/db/src/lib.rs b/client/db/src/lib.rs index 505c7b9d49ea6..3369b5fad055c 100644 --- a/client/db/src/lib.rs +++ b/client/db/src/lib.rs @@ -34,63 +34,72 @@ pub mod offchain; #[cfg(any(feature = "with-kvdb-rocksdb", test))] pub mod bench; -mod children; mod cache; mod changes_tries_storage; +mod children; +#[cfg(feature = "with-parity-db")] +mod parity_db; +mod stats; mod storage_cache; #[cfg(any(feature = "with-kvdb-rocksdb", test))] mod upgrade; mod utils; -mod stats; -#[cfg(feature = "with-parity-db")] -mod parity_db; -use std::sync::Arc; -use std::path::{Path, PathBuf}; -use std::io; -use std::collections::{HashMap, HashSet}; -use parking_lot::{Mutex, RwLock}; use linked_hash_map::LinkedHashMap; -use log::{trace, debug, warn}; +use log::{debug, trace, warn}; +use parking_lot::{Mutex, RwLock}; +use std::{ + collections::{HashMap, HashSet}, + io, + path::{Path, PathBuf}, + sync::Arc, +}; +use crate::{ + changes_tries_storage::{DbChangesTrieStorage, DbChangesTrieStorageTransaction}, + stats::StateUsageStats, + storage_cache::{new_shared_cache, CachingState, SharedCache, SyncingCachingState}, + utils::{meta_keys, read_db, read_meta, DatabaseType, Meta}, +}; +use codec::{Decode, Encode}; +use hash_db::Prefix; use sc_client_api::{ - UsageInfo, MemoryInfo, IoInfo, MemorySize, - backend::{NewBlockState, PrunableStateChangesTrieStorage, ProvideChtRoots}, - leaves::{LeafSet, FinalizationDisplaced}, cht, + backend::{NewBlockState, ProvideChtRoots, PrunableStateChangesTrieStorage}, + cht, + leaves::{FinalizationDisplaced, LeafSet}, utils::is_descendent_of, + IoInfo, MemoryInfo, MemorySize, UsageInfo, }; +use sc_state_db::StateDb; +use sp_arithmetic::traits::Saturating; use sp_blockchain::{ - Result as ClientResult, Error as ClientError, - well_known_cache_keys, Backend as _, HeaderBackend, + well_known_cache_keys, Backend as _, CachedHeaderMetadata, Error as ClientError, HeaderBackend, + HeaderMetadata, HeaderMetadataCache, Result as ClientResult, +}; +use sp_core::{ + offchain::OffchainOverlayedChange, + storage::{well_known_keys, ChildInfo}, + ChangesTrieConfiguration, }; -use codec::{Decode, Encode}; -use hash_db::Prefix; -use sp_trie::{MemoryDB, PrefixedMemoryDB, prefixed_key}; use sp_database::Transaction; -use sp_core::ChangesTrieConfiguration; -use sp_core::offchain::OffchainOverlayedChange; -use sp_core::storage::{well_known_keys, ChildInfo}; -use sp_arithmetic::traits::Saturating; -use sp_runtime::{generic::{DigestItem, BlockId}, Justification, Justifications, Storage}; -use sp_runtime::traits::{ - Block as BlockT, Header as HeaderT, NumberFor, Zero, One, SaturatedConversion, HashFor, - Hash, +use sp_runtime::{ + generic::{BlockId, DigestItem}, + traits::{ + Block as BlockT, Hash, HashFor, Header as HeaderT, NumberFor, One, SaturatedConversion, + Zero, + }, + Justification, Justifications, Storage, }; use sp_state_machine::{ - DBValue, ChangesTrieTransaction, ChangesTrieCacheAction, UsageInfo as StateUsageInfo, - StorageCollection, ChildStorageCollection, OffchainChangesCollection, - backend::Backend as StateBackend, StateMachineStats, IndexOperation, + backend::Backend as StateBackend, ChangesTrieCacheAction, ChangesTrieTransaction, + ChildStorageCollection, DBValue, IndexOperation, OffchainChangesCollection, StateMachineStats, + StorageCollection, UsageInfo as StateUsageInfo, }; -use crate::utils::{DatabaseType, Meta, meta_keys, read_db, read_meta}; -use crate::changes_tries_storage::{DbChangesTrieStorage, DbChangesTrieStorageTransaction}; -use sc_state_db::StateDb; -use sp_blockchain::{CachedHeaderMetadata, HeaderMetadata, HeaderMetadataCache}; -use crate::storage_cache::{CachingState, SyncingCachingState, SharedCache, new_shared_cache}; -use crate::stats::StateUsageStats; +use sp_trie::{prefixed_key, MemoryDB, PrefixedMemoryDB}; // Re-export the Database trait so that one can pass an implementation of it. -pub use sp_database::Database; pub use sc_state_db::PruningMode; +pub use sp_database::Database; #[cfg(any(feature = "with-kvdb-rocksdb", test))] pub use bench::BenchmarkingState; @@ -102,9 +111,8 @@ const CACHE_HEADERS: usize = 8; const DEFAULT_CHILD_RATIO: (usize, usize) = (1, 10); /// DB-backed patricia trie state, transaction type is an overlay of changes to commit. -pub type DbState = sp_state_machine::TrieBackend< - Arc>>, HashFor ->; +pub type DbState = + sp_state_machine::TrieBackend>>, HashFor>; const DB_HASH_LEN: usize = 32; /// Hash type that this backend uses for the database. @@ -131,11 +139,7 @@ pub struct RefTrackingState { impl RefTrackingState { fn new(state: DbState, storage: Arc>, parent_hash: Option) -> Self { - RefTrackingState { - state, - parent_hash, - storage, - } + RefTrackingState { state, parent_hash, storage } } } @@ -154,7 +158,7 @@ impl std::fmt::Debug for RefTrackingState { } impl StateBackend> for RefTrackingState { - type Error = as StateBackend>>::Error; + type Error = as StateBackend>>::Error; type Transaction = as StateBackend>>::Transaction; type TrieBackendStorage = as StateBackend>>::TrieBackendStorage; @@ -214,7 +218,8 @@ impl StateBackend> for RefTrackingState { f: F, allow_missing: bool, ) -> Result { - self.state.apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) + self.state + .apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) } fn apply_to_keys_while bool>( @@ -237,16 +242,22 @@ impl StateBackend> for RefTrackingState { fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (B::Hash, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, Self::Transaction) + where + B::Hash: Ord, + { self.state.storage_root(delta) } fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (B::Hash, bool, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, bool, Self::Transaction) + where + B::Hash: Ord, + { self.state.child_storage_root(child_info, delta) } @@ -258,17 +269,13 @@ impl StateBackend> for RefTrackingState { self.state.keys(prefix) } - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec> { + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec> { self.state.child_keys(child_info, prefix) } - fn as_trie_backend(&mut self) - -> Option<&sp_state_machine::TrieBackend>> - { + fn as_trie_backend( + &mut self, + ) -> Option<&sp_state_machine::TrieBackend>> { self.state.as_trie_backend() } @@ -432,7 +439,7 @@ pub struct BlockchainDb { impl BlockchainDb { fn new( db: Arc>, - transaction_storage: TransactionStorageMode + transaction_storage: TransactionStorageMode, ) -> ClientResult { let meta = read_meta::(&*db, columns::HEADER)?; let leaves = LeafSet::read_from_db(&*db, columns::META, meta_keys::LEAF_PREFIX)?; @@ -446,10 +453,7 @@ impl BlockchainDb { }) } - fn update_meta( - &self, - update: MetaUpdate, - ) { + fn update_meta(&self, update: MetaUpdate) { let MetaUpdate { hash, number, is_best, is_finalized, with_state } = update; let mut meta = self.meta.write(); if number.is_zero() { @@ -473,10 +477,9 @@ impl BlockchainDb { // Get block changes trie root, if available. fn changes_trie_root(&self, block: BlockId) -> ClientResult> { - self.header(block) - .map(|header| header.and_then(|header| - header.digest().log(DigestItem::as_changes_trie_root) - .cloned())) + self.header(block).map(|header| { + header.and_then(|header| header.digest().log(DigestItem::as_changes_trie_root).cloned()) + }) } } @@ -486,15 +489,15 @@ impl sc_client_api::blockchain::HeaderBackend for Blockcha BlockId::Hash(h) => { let mut cache = self.header_cache.lock(); if let Some(result) = cache.get_refresh(h) { - return Ok(result.clone()); + return Ok(result.clone()) } - let header = utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id)?; + let header = + utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id)?; cache_header(&mut cache, h.clone(), header.clone()); Ok(header) - } - BlockId::Number(_) => { - utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id) - } + }, + BlockId::Number(_) => + utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id), } } @@ -527,10 +530,11 @@ impl sc_client_api::blockchain::HeaderBackend for Blockcha } fn hash(&self, number: NumberFor) -> ClientResult> { - self.header(BlockId::Number(number)).and_then(|maybe_header| match maybe_header { - Some(header) => Ok(Some(header.hash().clone())), - None => Ok(None), - }) + self.header(BlockId::Number(number)) + .and_then(|maybe_header| match maybe_header { + Some(header) => Ok(Some(header.hash().clone())), + None => Ok(None), + }) } } @@ -543,40 +547,51 @@ impl sc_client_api::blockchain::Backend for BlockchainDb match Decode::decode(&mut &body[..]) { Ok(body) => Ok(Some(body)), - Err(err) => return Err(sp_blockchain::Error::Backend( - format!("Error decoding body: {}", err) - )), + Err(err) => + return Err(sp_blockchain::Error::Backend(format!( + "Error decoding body: {}", + err + ))), }, TransactionStorageMode::StorageChain => { match Vec::::decode(&mut &body[..]) { Ok(index) => { - let extrinsics: ClientResult> = index.into_iter().map( - | ExtrinsicHeader { indexed_hash, data } | { + let extrinsics: ClientResult> = index + .into_iter() + .map(|ExtrinsicHeader { indexed_hash, data }| { let decode_result = if indexed_hash != Default::default() { match self.db.get(columns::TRANSACTION, indexed_hash.as_ref()) { Some(t) => { - let mut input = utils::join_input(data.as_ref(), t.as_ref()); + let mut input = + utils::join_input(data.as_ref(), t.as_ref()); Block::Extrinsic::decode(&mut input) }, - None => return Err(sp_blockchain::Error::Backend( - format!("Missing indexed transaction {:?}", indexed_hash)) - ) + None => + return Err(sp_blockchain::Error::Backend(format!( + "Missing indexed transaction {:?}", + indexed_hash + ))), } } else { Block::Extrinsic::decode(&mut data.as_ref()) }; - decode_result.map_err(|err| sp_blockchain::Error::Backend( - format!("Error decoding extrinsic: {}", err)) - ) - } - ).collect(); + decode_result.map_err(|err| { + sp_blockchain::Error::Backend(format!( + "Error decoding extrinsic: {}", + err + )) + }) + }) + .collect(); Ok(Some(extrinsics?)) - } - Err(err) => return Err(sp_blockchain::Error::Backend( - format!("Error decoding body list: {}", err) - )), + }, + Err(err) => + return Err(sp_blockchain::Error::Backend(format!( + "Error decoding body list: {}", + err + ))), } - } + }, } } @@ -584,10 +599,12 @@ impl sc_client_api::blockchain::Backend for BlockchainDb match Decode::decode(&mut &justifications[..]) { Ok(justifications) => Ok(Some(justifications)), - Err(err) => return Err(sp_blockchain::Error::Backend( - format!("Error decoding justifications: {}", err) - )), - } + Err(err) => + return Err(sp_blockchain::Error::Backend(format!( + "Error decoding justifications: {}", + err + ))), + }, None => Ok(None), } } @@ -631,19 +648,23 @@ impl sc_client_api::blockchain::Backend for BlockchainDb transactions.push(t), - None => return Err(sp_blockchain::Error::Backend( - format!("Missing indexed transaction {:?}", indexed_hash)) - ) + None => + return Err(sp_blockchain::Error::Backend(format!( + "Missing indexed transaction {:?}", + indexed_hash + ))), } } } Ok(Some(transactions)) - } - Err(err) => return Err(sp_blockchain::Error::Backend( - format!("Error decoding body list: {}", err) - )), + }, + Err(err) => + return Err(sp_blockchain::Error::Backend(format!( + "Error decoding body list: {}", + err + ))), } - } + }, } } } @@ -657,17 +678,25 @@ impl sc_client_api::blockchain::ProvideCache for Blockchai impl HeaderMetadata for BlockchainDb { type Error = sp_blockchain::Error; - fn header_metadata(&self, hash: Block::Hash) -> Result, Self::Error> { - self.header_metadata_cache.header_metadata(hash).map_or_else(|| { - self.header(BlockId::hash(hash))?.map(|header| { - let header_metadata = CachedHeaderMetadata::from(&header); - self.header_metadata_cache.insert_header_metadata( - header_metadata.hash, - header_metadata.clone(), - ); - header_metadata - }).ok_or_else(|| ClientError::UnknownBlock(format!("header not found in db: {}", hash))) - }, Ok) + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header_metadata_cache.header_metadata(hash).map_or_else( + || { + self.header(BlockId::hash(hash))? + .map(|header| { + let header_metadata = CachedHeaderMetadata::from(&header); + self.header_metadata_cache + .insert_header_metadata(header_metadata.hash, header_metadata.clone()); + header_metadata + }) + .ok_or_else(|| { + ClientError::UnknownBlock(format!("header not found in db: {}", hash)) + }) + }, + Ok, + ) } fn insert_header_metadata(&self, hash: Block::Hash, metadata: CachedHeaderMetadata) { @@ -701,8 +730,11 @@ impl ProvideChtRoots for BlockchainDb { }); cht::compute_root::, _>( - cht::size(), cht_number, cht_range.map(|num| self.hash(num)) - ).map(Some) + cht::size(), + cht_number, + cht_range.map(|num| self.hash(num)), + ) + .map(Some) } fn changes_trie_cht_root( @@ -728,7 +760,8 @@ impl ProvideChtRoots for BlockchainDb { cht::size(), cht_number, cht_range.map(|num| self.changes_trie_root(BlockId::Number(num))), - ).map(Some) + ) + .map(Some) } } @@ -759,8 +792,7 @@ impl BlockImportOperation { match value_operation { OffchainOverlayedChange::SetValue(val) => transaction.set_from_vec(columns::OFFCHAIN, &key, val), - OffchainOverlayedChange::Remove => - transaction.remove(columns::OFFCHAIN, &key), + OffchainOverlayedChange::Remove => transaction.remove(columns::OFFCHAIN, &key), } } @@ -778,18 +810,17 @@ impl BlockImportOperation { } } - fn apply_new_state( - &mut self, - storage: Storage, - ) -> ClientResult { + fn apply_new_state(&mut self, storage: Storage) -> ClientResult { if storage.top.keys().any(|k| well_known_keys::is_child_storage_key(&k)) { - return Err(sp_blockchain::Error::InvalidState.into()); + return Err(sp_blockchain::Error::InvalidState.into()) } - let child_delta = storage.children_default.iter().map(|(_storage_key, child_content)|( + let child_delta = storage.children_default.iter().map(|(_storage_key, child_content)| { + ( &child_content.child_info, child_content.data.iter().map(|(k, v)| (&k[..], Some(&v[..]))), - )); + ) + }); let mut changes_trie_config = None; let (root, transaction) = self.old_state.full_storage_root( @@ -799,7 +830,7 @@ impl BlockImportOperation { } (&k[..], Some(&v[..])) }), - child_delta + child_delta, ); let changes_trie_config = match changes_trie_config { @@ -812,10 +843,11 @@ impl BlockImportOperation { self.changes_trie_config_update = Some(changes_trie_config); Ok(root) } - } -impl sc_client_api::backend::BlockImportOperation for BlockImportOperation { +impl sc_client_api::backend::BlockImportOperation + for BlockImportOperation +{ type State = SyncingCachingState, Block>; fn state(&self) -> ClientResult> { @@ -831,16 +863,13 @@ impl sc_client_api::backend::BlockImportOperation for Bloc leaf_state: NewBlockState, ) -> ClientResult<()> { assert!(self.pending_block.is_none(), "Only one block per operation is allowed"); - if let Some(changes_trie_config_update) = changes_tries_storage::extract_new_configuration(&header) { + if let Some(changes_trie_config_update) = + changes_tries_storage::extract_new_configuration(&header) + { self.changes_trie_config_update = Some(changes_trie_config_update.clone()); } - self.pending_block = Some(PendingBlock { - header, - body, - indexed_body, - justifications, - leaf_state, - }); + self.pending_block = + Some(PendingBlock { header, body, indexed_body, justifications, leaf_state }); Ok(()) } @@ -853,20 +882,13 @@ impl sc_client_api::backend::BlockImportOperation for Bloc Ok(()) } - fn reset_storage( - &mut self, - storage: Storage, - ) -> ClientResult { + fn reset_storage(&mut self, storage: Storage) -> ClientResult { let root = self.apply_new_state(storage)?; self.commit_state = true; Ok(root) } - fn set_genesis_state( - &mut self, - storage: Storage, - commit: bool, - ) -> ClientResult { + fn set_genesis_state(&mut self, storage: Storage, commit: bool) -> ClientResult { let root = self.apply_new_state(storage)?; self.commit_state = commit; Ok(root) @@ -882,7 +904,8 @@ impl sc_client_api::backend::BlockImportOperation for Bloc } fn insert_aux(&mut self, ops: I) -> ClientResult<()> - where I: IntoIterator, Option>)> + where + I: IntoIterator, Option>)>, { self.aux_ops.append(&mut ops.into_iter().collect()); Ok(()) @@ -961,10 +984,7 @@ struct DbGenesisStorage { impl DbGenesisStorage { pub fn new(root: Block::Hash, storage: PrefixedMemoryDB>) -> Self { - DbGenesisStorage { - root, - storage, - } + DbGenesisStorage { root, storage } } } @@ -1012,13 +1032,13 @@ pub(crate) struct FrozenForDuration { impl FrozenForDuration { fn new(duration: std::time::Duration) -> Self { - Self { - duration, - value: Frozen { at: std::time::Instant::now(), value: None }.into(), - } + Self { duration, value: Frozen { at: std::time::Instant::now(), value: None }.into() } } - fn take_or_else(&self, f: F) -> T where F: FnOnce() -> T { + fn take_or_else(&self, f: F) -> T + where + F: FnOnce() -> T, + { let mut lock = self.value.lock(); if lock.at.elapsed() > self.duration || lock.value.is_none() { let new_value = f(); @@ -1104,7 +1124,8 @@ impl Backend { config.state_pruning.clone(), !config.source.supports_ref_counting(), &StateMetaDb(&*db), - ).map_err(map_e)?; + ) + .map_err(map_e)?; let storage_db = StorageDb { db: db.clone(), state_db, @@ -1120,11 +1141,7 @@ impl Backend { columns::HEADER, columns::CACHE, meta, - if is_archive_pruning { - None - } else { - Some(MIN_BLOCKS_TO_KEEP_CHANGES_TRIES_FOR) - }, + if is_archive_pruning { None } else { Some(MIN_BLOCKS_TO_KEEP_CHANGES_TRIES_FOR) }, )?; let backend = Backend { @@ -1148,10 +1165,13 @@ impl Backend { // Older DB versions have no last state key. Check if the state is available and set it. let info = backend.blockchain.info(); - if info.finalized_state.is_none() - && info.finalized_hash != Default::default() - && sc_client_api::Backend::have_state_at(&backend, &info.finalized_hash, info.finalized_number) - { + if info.finalized_state.is_none() && + info.finalized_hash != Default::default() && + sc_client_api::Backend::have_state_at( + &backend, + &info.finalized_hash, + info.finalized_number, + ) { backend.blockchain.update_meta(MetaUpdate { hash: info.finalized_hash, number: info.finalized_number, @@ -1183,11 +1203,7 @@ impl Backend { // cannot find tree route with empty DB. if meta.best_hash != Default::default() { - let tree_route = sp_blockchain::tree_route( - &self.blockchain, - meta.best_hash, - route_to, - )?; + let tree_route = sp_blockchain::tree_route(&self.blockchain, meta.best_hash, route_to)?; // uncanonicalize: check safety violations and ensure the numbers no longer // point to these block hashes in the key mapping. @@ -1198,15 +1214,11 @@ impl Backend { (&r.number, &r.hash) ); - return Err(::sp_blockchain::Error::NotInFinalizedChain.into()); + return Err(::sp_blockchain::Error::NotInFinalizedChain.into()) } retracted.push(r.hash.clone()); - utils::remove_number_to_key_mapping( - transaction, - columns::KEY_LOOKUP, - r.number - )?; + utils::remove_number_to_key_mapping(transaction, columns::KEY_LOOKUP, r.number)?; } // canonicalize: set the number lookup to map to this block's hash. @@ -1216,7 +1228,7 @@ impl Backend { transaction, columns::KEY_LOOKUP, e.number, - e.hash + e.hash, )?; } } @@ -1238,11 +1250,15 @@ impl Backend { header: &Block::Header, last_finalized: Option, ) -> ClientResult<()> { - let last_finalized = last_finalized.unwrap_or_else(|| self.blockchain.meta.read().finalized_hash); + let last_finalized = + last_finalized.unwrap_or_else(|| self.blockchain.meta.read().finalized_hash); if *header.parent_hash() != last_finalized { - return Err(::sp_blockchain::Error::NonSequentialFinalization( - format!("Last finalized {:?} not parent of {:?}", last_finalized, header.hash()), - ).into()); + return Err(::sp_blockchain::Error::NonSequentialFinalization(format!( + "Last finalized {:?} not parent of {:?}", + last_finalized, + header.hash() + )) + .into()) } Ok(()) } @@ -1279,13 +1295,7 @@ impl Backend { Justifications::from(justification).encode(), ); } - Ok(MetaUpdate { - hash: *hash, - number, - is_best: false, - is_finalized: true, - with_state, - }) + Ok(MetaUpdate { hash: *hash, number, is_best: false, is_finalized: true, with_state }) } // performs forced canonicalization with a delay after importing a non-finalized block. @@ -1294,9 +1304,7 @@ impl Backend { transaction: &mut Transaction, hash: Block::Hash, number: NumberFor, - ) - -> ClientResult<()> - { + ) -> ClientResult<()> { let number_u64 = number.saturated_into::(); if number_u64 > self.canonicalization_delay { let new_canonical = number_u64 - self.canonicalization_delay; @@ -1310,29 +1318,28 @@ impl Backend { sc_client_api::blockchain::HeaderBackend::hash( &self.blockchain, new_canonical.saturated_into(), - )?.ok_or_else(|| sp_blockchain::Error::Backend(format!( - "Can't canonicalize missing block number #{} when importing {:?} (#{})", - new_canonical, - hash, - number, - )))? + )? + .ok_or_else(|| { + sp_blockchain::Error::Backend(format!( + "Can't canonicalize missing block number #{} when importing {:?} (#{})", + new_canonical, hash, number, + )) + })? }; if !sc_client_api::Backend::have_state_at(self, &hash, new_canonical.saturated_into()) { return Ok(()) } trace!(target: "db", "Canonicalize block #{} ({:?})", new_canonical, hash); - let commit = self.storage.state_db.canonicalize_block(&hash) - .map_err(|e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e))?; + let commit = self.storage.state_db.canonicalize_block(&hash).map_err( + |e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e), + )?; apply_state_commit(transaction, commit); } Ok(()) } - fn try_commit_operation( - &self, - mut operation: BlockImportOperation, - ) -> ClientResult<()> { + fn try_commit_operation(&self, mut operation: BlockImportOperation) -> ClientResult<()> { let mut transaction = Transaction::new(); let mut finalization_displaced_leaves = None; @@ -1362,12 +1369,12 @@ impl Backend { } let imported = if let Some(pending_block) = operation.pending_block { - let hash = pending_block.header.hash(); let parent_hash = *pending_block.header.parent_hash(); let number = pending_block.header.number().clone(); - let existing_header = number <= best_num && self.blockchain.header(BlockId::hash(hash))?.is_some(); + let existing_header = + number <= best_num && self.blockchain.header(BlockId::hash(hash))?.is_some(); // blocks are keyed by number + hash. let lookup_key = utils::number_and_hash_to_lookup_key(number, hash)?; @@ -1378,12 +1385,7 @@ impl Backend { (Default::default(), Default::default()) }; - utils::insert_hash_to_key_mapping( - &mut transaction, - columns::KEY_LOOKUP, - number, - hash, - )?; + utils::insert_hash_to_key_mapping(&mut transaction, columns::KEY_LOOKUP, number, hash)?; transaction.set_from_vec(columns::HEADER, &lookup_key, pending_block.header.encode()); if let Some(body) = pending_block.body { @@ -1392,7 +1394,8 @@ impl Backend { transaction.set_from_vec(columns::BODY, &lookup_key, body.encode()); }, TransactionStorageMode::StorageChain => { - let body = apply_index_ops::(&mut transaction, body, operation.index_ops); + let body = + apply_index_ops::(&mut transaction, body, operation.index_ops); transaction.set_from_vec(columns::BODY, &lookup_key, body); }, } @@ -1408,11 +1411,19 @@ impl Backend { } } if let Some(justifications) = pending_block.justifications { - transaction.set_from_vec(columns::JUSTIFICATIONS, &lookup_key, justifications.encode()); + transaction.set_from_vec( + columns::JUSTIFICATIONS, + &lookup_key, + justifications.encode(), + ); } if number.is_zero() { - transaction.set_from_vec(columns::META, meta_keys::FINALIZED_BLOCK, lookup_key.clone()); + transaction.set_from_vec( + columns::META, + meta_keys::FINALIZED_BLOCK, + lookup_key.clone(), + ); transaction.set(columns::META, meta_keys::GENESIS_HASH, hash.as_ref()); // for tests, because config is set from within the reset_storage @@ -1427,13 +1438,14 @@ impl Backend { // to bootstrap consensus. It is queried for an initial list of authorities, etc. *self.genesis_state.write() = Some(Arc::new(DbGenesisStorage::new( pending_block.header.state_root().clone(), - operation.db_updates.clone() + operation.db_updates.clone(), ))); } } let finalized = if operation.commit_state { - let mut changeset: sc_state_db::ChangeSet> = sc_state_db::ChangeSet::default(); + let mut changeset: sc_state_db::ChangeSet> = + sc_state_db::ChangeSet::default(); let mut ops: u64 = 0; let mut bytes: u64 = 0; let mut removal: u64 = 0; @@ -1441,7 +1453,7 @@ impl Backend { for (mut key, (val, rc)) in operation.db_updates.drain() { if !self.storage.prefix_keys { // Strip prefix - key.drain(0 .. key.len() - DB_HASH_LEN); + key.drain(0..key.len() - DB_HASH_LEN); }; if rc > 0 { ops += 1; @@ -1450,7 +1462,7 @@ impl Backend { changeset.inserted.push((key, val.to_vec())); } else { changeset.inserted.push((key.clone(), val.to_vec())); - for _ in 0 .. rc - 1 { + for _ in 0..rc - 1 { changeset.inserted.push((key.clone(), Default::default())); } } @@ -1460,7 +1472,7 @@ impl Backend { if rc == -1 { changeset.deleted.push(key); } else { - for _ in 0 .. -rc { + for _ in 0..-rc { changeset.deleted.push(key.clone()); } } @@ -1471,27 +1483,32 @@ impl Backend { let mut ops: u64 = 0; let mut bytes: u64 = 0; - for (key, value) in operation.storage_updates.iter() - .chain(operation.child_storage_updates.iter().flat_map(|(_, s)| s.iter())) { - ops += 1; - bytes += key.len() as u64; - if let Some(v) = value.as_ref() { - bytes += v.len() as u64; - } + for (key, value) in operation + .storage_updates + .iter() + .chain(operation.child_storage_updates.iter().flat_map(|(_, s)| s.iter())) + { + ops += 1; + bytes += key.len() as u64; + if let Some(v) = value.as_ref() { + bytes += v.len() as u64; + } } self.state_usage.tally_writes(ops, bytes); let number_u64 = number.saturated_into::(); - let commit = self.storage.state_db.insert_block( - &hash, - number_u64, - &pending_block.header.parent_hash(), - changeset, - ).map_err(|e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e))?; + let commit = self + .storage + .state_db + .insert_block(&hash, number_u64, &pending_block.header.parent_hash(), changeset) + .map_err(|e: sc_state_db::Error| { + sp_blockchain::Error::from_state_db(e) + })?; apply_state_commit(&mut transaction, commit); if number <= last_finalized_num { // Canonicalize in the db when re-importing existing blocks with state. - let commit = self.storage.state_db.canonicalize_block(&hash) - .map_err(|e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e))?; + let commit = self.storage.state_db.canonicalize_block(&hash).map_err( + |e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e), + )?; apply_state_commit(&mut transaction, commit); meta_updates.push(MetaUpdate { hash, @@ -1502,7 +1519,6 @@ impl Backend { }); } - // Check if need to finalize. Genesis is always finalized instantly. let finalized = number_u64 == 0 || pending_block.leaf_state.is_final(); finalized @@ -1555,11 +1571,14 @@ impl Backend { self.force_delayed_canonicalize(&mut transaction, hash, *header.number())? } - let displaced_leaf = { let mut leaves = self.blockchain.leaves.write(); let displaced_leaf = leaves.import(hash, number, parent_hash); - leaves.prepare_transaction(&mut transaction, columns::META, meta_keys::LEAF_PREFIX); + leaves.prepare_transaction( + &mut transaction, + columns::META, + meta_keys::LEAF_PREFIX, + ); displaced_leaf }; @@ -1589,7 +1608,16 @@ impl Backend { with_state: operation.commit_state, }); - Some((pending_block.header, number, hash, enacted, retracted, displaced_leaf, is_best, cache)) + Some(( + pending_block.header, + number, + hash, + enacted, + retracted, + displaced_leaf, + is_best, + cache, + )) } else { None } @@ -1598,14 +1626,16 @@ impl Backend { }; let cache_update = if let Some(set_head) = operation.set_head { - if let Some(header) = sc_client_api::blockchain::HeaderBackend::header(&self.blockchain, set_head)? { + if let Some(header) = + sc_client_api::blockchain::HeaderBackend::header(&self.blockchain, set_head)? + { let number = header.number(); let hash = header.hash(); let (enacted, retracted) = self.set_head_with_transaction( &mut transaction, hash.clone(), - (number.clone(), hash.clone()) + (number.clone(), hash.clone()), )?; meta_updates.push(MetaUpdate { hash, @@ -1616,7 +1646,10 @@ impl Backend { }); Some((enacted, retracted)) } else { - return Err(sp_blockchain::Error::UnknownBlock(format!("Cannot set head {:?}", set_head))) + return Err(sp_blockchain::Error::UnknownBlock(format!( + "Cannot set head {:?}", + set_head + ))) } } else { None @@ -1636,13 +1669,11 @@ impl Backend { _displaced_leaf, is_best, mut cache, - )) = imported { + )) = imported + { trace!(target: "db", "DB Commit done {:?}", hash); let header_metadata = CachedHeaderMetadata::from(&header); - self.blockchain.insert_header_metadata( - header_metadata.hash, - header_metadata, - ); + self.blockchain.insert_header_metadata(header_metadata.hash, header_metadata); cache_header(&mut self.blockchain.header_cache.lock(), hash, Some(header)); cache.sync_cache( &enacted, @@ -1693,10 +1724,15 @@ impl Backend { transaction.set_from_vec(columns::META, meta_keys::FINALIZED_BLOCK, lookup_key); if sc_client_api::Backend::have_state_at(self, &f_hash, f_num) && - self.storage.state_db.best_canonical().map(|c| f_num.saturated_into::() > c).unwrap_or(true) + self.storage + .state_db + .best_canonical() + .map(|c| f_num.saturated_into::() > c) + .unwrap_or(true) { - let commit = self.storage.state_db.canonicalize_block(&f_hash) - .map_err(|e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e))?; + let commit = self.storage.state_db.canonicalize_block(&f_hash).map_err( + |e: sc_state_db::Error| sp_blockchain::Error::from_state_db(e), + )?; apply_state_commit(transaction, commit); } @@ -1779,23 +1815,21 @@ impl Backend { TransactionStorageMode::BlockBody => {}, TransactionStorageMode::StorageChain => { match Vec::::decode(&mut &body[..]) { - Ok(body) => { + Ok(body) => for ExtrinsicHeader { indexed_hash, .. } in body { if indexed_hash != Default::default() { - transaction.release( - columns::TRANSACTION, - indexed_hash, - ); + transaction.release(columns::TRANSACTION, indexed_hash); } - } - } - Err(err) => return Err(sp_blockchain::Error::Backend( - format!("Error decoding body list: {}", err) - )), + }, + Err(err) => + return Err(sp_blockchain::Error::Backend(format!( + "Error decoding body list: {}", + err + ))), } - } + }, } - } + }, None => return Ok(()), } Ok(()) @@ -1805,22 +1839,20 @@ impl Backend { let root = EmptyStorage::::new().0; // Empty trie let db_state = DbState::::new(self.storage.clone(), root); let state = RefTrackingState::new(db_state, self.storage.clone(), None); - let caching_state = CachingState::new( - state, - self.shared_cache.clone(), - None, - ); + let caching_state = CachingState::new(state, self.shared_cache.clone(), None); Ok(SyncingCachingState::new( - caching_state, - self.state_usage.clone(), - self.blockchain.meta.clone(), - self.import_lock.clone(), + caching_state, + self.state_usage.clone(), + self.blockchain.meta.clone(), + self.import_lock.clone(), )) } } - -fn apply_state_commit(transaction: &mut Transaction, commit: sc_state_db::CommitSet>) { +fn apply_state_commit( + transaction: &mut Transaction, + commit: sc_state_db::CommitSet>, +) { for (key, val) in commit.data.inserted.into_iter() { transaction.set_from_vec(columns::STATE, &key[..], val); } @@ -1847,10 +1879,10 @@ fn apply_index_ops( match op { IndexOperation::Insert { extrinsic, hash, size } => { index_map.insert(extrinsic, (hash, size)); - } + }, IndexOperation::Renew { extrinsic, hash } => { renewed_map.insert(extrinsic, DbHash::from_slice(hash.as_ref())); - } + }, } } for (index, extrinsic) in body.into_iter().enumerate() { @@ -1858,10 +1890,7 @@ fn apply_index_ops( let extrinsic_header = if let Some(hash) = renewed_map.get(&(index as u32)) { // Bump ref counter transaction.reference(columns::TRANSACTION, DbHash::from_slice(hash.as_ref())); - ExtrinsicHeader { - indexed_hash: hash.clone(), - data: extrinsic, - } + ExtrinsicHeader { indexed_hash: hash.clone(), data: extrinsic } } else { match index_map.get(&(index as u32)) { Some((hash, size)) if *size as usize <= extrinsic.len() => { @@ -1876,12 +1905,7 @@ fn apply_index_ops( data: extrinsic[..offset].to_vec(), } }, - _ => { - ExtrinsicHeader { - indexed_hash: Default::default(), - data: extrinsic, - } - } + _ => ExtrinsicHeader { indexed_hash: Default::default(), data: extrinsic }, } }; extrinsic_headers.push(extrinsic_header); @@ -1895,28 +1919,28 @@ fn apply_index_ops( extrinsic_headers.encode() } -fn apply_indexed_body( - transaction: &mut Transaction, - body: Vec>, -) { +fn apply_indexed_body(transaction: &mut Transaction, body: Vec>) { for extrinsic in body { let hash = sp_runtime::traits::BlakeTwo256::hash(&extrinsic); - transaction.store( - columns::TRANSACTION, - DbHash::from_slice(hash.as_ref()), - extrinsic, - ); + transaction.store(columns::TRANSACTION, DbHash::from_slice(hash.as_ref()), extrinsic); } } -impl sc_client_api::backend::AuxStore for Backend where Block: BlockT { +impl sc_client_api::backend::AuxStore for Backend +where + Block: BlockT, +{ fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> ClientResult<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> ClientResult<()> { let mut transaction = Transaction::new(); for (k, v) in insert { transaction.set(columns::AUX, k, v); @@ -1977,10 +2001,7 @@ impl sc_client_api::backend::Backend for Backend { Ok(()) } - fn commit_operation( - &self, - operation: Self::BlockImportOperation, - ) -> ClientResult<()> { + fn commit_operation(&self, operation: Self::BlockImportOperation) -> ClientResult<()> { let usage = operation.old_state.usage_info(); self.state_usage.merge_sm(usage); @@ -1992,7 +2013,7 @@ impl sc_client_api::backend::Backend for Backend { e @ Err(_) => { self.storage.state_db.revert_pending(); e - } + }, } } @@ -2037,23 +2058,22 @@ impl sc_client_api::backend::Backend for Backend { let last_finalized = self.blockchain.last_finalized()?; // We can do a quick check first, before doing a proper but more expensive check - if number > self.blockchain.info().finalized_number - || (hash != last_finalized && !is_descendent_of(&hash, &last_finalized)?) + if number > self.blockchain.info().finalized_number || + (hash != last_finalized && !is_descendent_of(&hash, &last_finalized)?) { - return Err(ClientError::NotInFinalizedChain); + return Err(ClientError::NotInFinalizedChain) } - let justifications = - if let Some(mut stored_justifications) = self.blockchain.justifications(block)? { - if !stored_justifications.append(justification) { - return Err(ClientError::BadJustification( - "Duplicate consensus engine ID".into() - )); - } - stored_justifications - } else { - Justifications::from(justification) - }; + let justifications = if let Some(mut stored_justifications) = + self.blockchain.justifications(block)? + { + if !stored_justifications.append(justification) { + return Err(ClientError::BadJustification("Duplicate consensus engine ID".into())) + } + stored_justifications + } else { + Justifications::from(justification) + }; transaction.set_from_vec( columns::JUSTIFICATIONS, @@ -2075,25 +2095,20 @@ impl sc_client_api::backend::Backend for Backend { } fn usage_info(&self) -> Option { - let (io_stats, state_stats) = self.io_stats.take_or_else(|| + let (io_stats, state_stats) = self.io_stats.take_or_else(|| { ( // TODO: implement DB stats and cache size retrieval kvdb::IoStats::empty(), self.state_usage.take(), ) - ); + }); let database_cache = MemorySize::from_bytes(0); - let state_cache = MemorySize::from_bytes( - (*&self.shared_cache).read().used_storage_cache_size(), - ); + let state_cache = + MemorySize::from_bytes((*&self.shared_cache).read().used_storage_cache_size()); let state_db = self.storage.state_db.memory_info(); Some(UsageInfo { - memory: MemoryInfo { - state_cache, - database_cache, - state_db, - }, + memory: MemoryInfo { state_cache, database_cache, state_db }, io: IoInfo { transactions: io_stats.transactions, bytes_read: io_stats.bytes_read, @@ -2123,29 +2138,31 @@ impl sc_client_api::backend::Backend for Backend { let finalized = self.blockchain.info().finalized_number; let revertible = best_number - finalized; - let n = if !revert_finalized && revertible < n { - revertible - } else { - n - }; + let n = if !revert_finalized && revertible < n { revertible } else { n }; let mut revert_blocks = || -> ClientResult> { - for c in 0 .. n.saturated_into::() { + for c in 0..n.saturated_into::() { if best_number.is_zero() { return Ok(c.saturated_into::>()) } let mut transaction = Transaction::new(); let removed_number = best_number; - let removed = self.blockchain.header(BlockId::Number(best_number))?.ok_or_else( - || sp_blockchain::Error::UnknownBlock( - format!("Error reverting to {}. Block hash not found.", best_number)))?; + let removed = + self.blockchain.header(BlockId::Number(best_number))?.ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!( + "Error reverting to {}. Block hash not found.", + best_number + )) + })?; let removed_hash = removed.hash(); let prev_number = best_number.saturating_sub(One::one()); - let prev_hash = self.blockchain.hash(prev_number)?.ok_or_else( - || sp_blockchain::Error::UnknownBlock( - format!("Error reverting to {}. Block hash not found.", best_number)) - )?; + let prev_hash = self.blockchain.hash(prev_number)?.ok_or_else(|| { + sp_blockchain::Error::UnknownBlock(format!( + "Error reverting to {}. Block hash not found.", + best_number + )) + })?; if !self.have_state_at(&prev_hash, prev_number) { return Ok(c.saturated_into::>()) @@ -2160,41 +2177,49 @@ impl sc_client_api::backend::Backend for Backend { let update_finalized = best_number < finalized; - let key = utils::number_and_hash_to_lookup_key(best_number.clone(), &best_hash)?; + let key = + utils::number_and_hash_to_lookup_key(best_number.clone(), &best_hash)?; let changes_trie_cache_ops = self.changes_tries_storage.revert( &mut transaction, - &cache::ComplexBlockId::new( - removed.hash(), - removed_number, - ), + &cache::ComplexBlockId::new(removed.hash(), removed_number), )?; if update_finalized { transaction.set_from_vec( columns::META, meta_keys::FINALIZED_BLOCK, - key.clone() + key.clone(), ); reverted_finalized.insert(removed_hash); if let Some((hash, _)) = self.blockchain.info().finalized_state { if hash == best_hash { - if !best_number.is_zero() - && self.have_state_at(&prev_hash, best_number - One::one()) + if !best_number.is_zero() && + self.have_state_at(&prev_hash, best_number - One::one()) { let lookup_key = utils::number_and_hash_to_lookup_key( best_number - One::one(), - prev_hash + prev_hash, )?; - transaction.set_from_vec(columns::META, meta_keys::FINALIZED_STATE, lookup_key); + transaction.set_from_vec( + columns::META, + meta_keys::FINALIZED_STATE, + lookup_key, + ); } else { - transaction.remove(columns::META, meta_keys::FINALIZED_STATE); + transaction + .remove(columns::META, meta_keys::FINALIZED_STATE); } } } } transaction.set_from_vec(columns::META, meta_keys::BEST_BLOCK, key); transaction.remove(columns::KEY_LOOKUP, removed.hash().as_ref()); - children::remove_children(&mut transaction, columns::META, meta_keys::CHILDREN_PREFIX, best_hash); + children::remove_children( + &mut transaction, + columns::META, + meta_keys::CHILDREN_PREFIX, + best_hash, + ); self.storage.db.commit(transaction)?; self.changes_tries_storage.post_commit(Some(changes_trie_cache_ops)); self.blockchain.update_meta(MetaUpdate { @@ -2202,10 +2227,10 @@ impl sc_client_api::backend::Backend for Backend { number: best_number, is_best: true, is_finalized: update_finalized, - with_state: false + with_state: false, }); - } - None => return Ok(c.saturated_into::>()) + }, + None => return Ok(c.saturated_into::>()), } } @@ -2230,36 +2255,27 @@ impl sc_client_api::backend::Backend for Backend { Ok((reverted, reverted_finalized)) } - fn remove_leaf_block( - &self, - hash: &Block::Hash, - ) -> ClientResult<()> { + fn remove_leaf_block(&self, hash: &Block::Hash) -> ClientResult<()> { let best_hash = self.blockchain.info().best_hash; if best_hash == *hash { - return Err( - sp_blockchain::Error::Backend( - format!("Can't remove best block {:?}", hash) - ) - ) + return Err(sp_blockchain::Error::Backend(format!("Can't remove best block {:?}", hash))) } let hdr = self.blockchain.header_metadata(hash.clone())?; if !self.have_state_at(&hash, hdr.number) { - return Err( - sp_blockchain::Error::UnknownBlock( - format!("State already discarded for {:?}", hash) - ) - ) + return Err(sp_blockchain::Error::UnknownBlock(format!( + "State already discarded for {:?}", + hash + ))) } let mut leaves = self.blockchain.leaves.write(); if !leaves.contains(hdr.number, *hash) { - return Err( - sp_blockchain::Error::Backend( - format!("Can't remove non-leaf block {:?}", hash) - ) - ) + return Err(sp_blockchain::Error::Backend(format!( + "Can't remove non-leaf block {:?}", + hash + ))) } let mut transaction = Transaction::new(); @@ -2267,13 +2283,9 @@ impl sc_client_api::backend::Backend for Backend { apply_state_commit(&mut transaction, commit); } transaction.remove(columns::KEY_LOOKUP, hash.as_ref()); - let changes_trie_cache_ops = self.changes_tries_storage.revert( - &mut transaction, - &cache::ComplexBlockId::new( - *hash, - hdr.number, - ), - )?; + let changes_trie_cache_ops = self + .changes_tries_storage + .revert(&mut transaction, &cache::ComplexBlockId::new(*hash, hdr.number))?; self.changes_tries_storage.post_commit(Some(changes_trie_cache_ops)); leaves.revert(hash.clone(), hdr.number); @@ -2300,11 +2312,7 @@ impl sc_client_api::backend::Backend for Backend { let root = genesis_state.root.clone(); let db_state = DbState::::new(genesis_state.clone(), root); let state = RefTrackingState::new(db_state, self.storage.clone(), None); - let caching_state = CachingState::new( - state, - self.shared_cache.clone(), - None, - ); + let caching_state = CachingState::new(state, self.shared_cache.clone(), None); let mut state = SyncingCachingState::new( caching_state, self.state_usage.clone(), @@ -2318,33 +2326,26 @@ impl sc_client_api::backend::Backend for Backend { let hash = match block { BlockId::Hash(h) => h, - BlockId::Number(n) => self.blockchain.hash(n)?.ok_or_else(|| + BlockId::Number(n) => self.blockchain.hash(n)?.ok_or_else(|| { sp_blockchain::Error::UnknownBlock(format!("Unknown block number {}", n)) - )?, + })?, }; match self.blockchain.header_metadata(hash) { Ok(ref hdr) => { if !self.have_state_at(&hash, hdr.number) { - return Err( - sp_blockchain::Error::UnknownBlock( - format!("State already discarded for {:?}", block) - ) - ) + return Err(sp_blockchain::Error::UnknownBlock(format!( + "State already discarded for {:?}", + block + ))) } if let Ok(()) = self.storage.state_db.pin(&hash) { let root = hdr.state_root; let db_state = DbState::::new(self.storage.clone(), root); - let state = RefTrackingState::new( - db_state, - self.storage.clone(), - Some(hash.clone()), - ); - let caching_state = CachingState::new( - state, - self.shared_cache.clone(), - Some(hash), - ); + let state = + RefTrackingState::new(db_state, self.storage.clone(), Some(hash.clone())); + let caching_state = + CachingState::new(state, self.shared_cache.clone(), Some(hash)); Ok(SyncingCachingState::new( caching_state, self.state_usage.clone(), @@ -2352,11 +2353,10 @@ impl sc_client_api::backend::Backend for Backend { self.import_lock.clone(), )) } else { - Err( - sp_blockchain::Error::UnknownBlock( - format!("State already discarded for {:?}", block) - ) - ) + Err(sp_blockchain::Error::UnknownBlock(format!( + "State already discarded for {:?}", + block + ))) } }, Err(e) => Err(e), @@ -2366,13 +2366,13 @@ impl sc_client_api::backend::Backend for Backend { fn have_state_at(&self, hash: &Block::Hash, number: NumberFor) -> bool { if self.is_archive { match self.blockchain.header_metadata(hash.clone()) { - Ok(header) => { - sp_state_machine::Storage::get( - self.storage.as_ref(), - &header.state_root, - (&[], None), - ).unwrap_or(None).is_some() - }, + Ok(header) => sp_state_machine::Storage::get( + self.storage.as_ref(), + &header.state_root, + (&[], None), + ) + .unwrap_or(None) + .is_some(), _ => false, } } else { @@ -2389,18 +2389,22 @@ impl sc_client_api::backend::LocalBackend for Backend::default(); { - let mut trie = TrieDBMut::::new( - &mut changes_trie_update, - &mut changes_root - ); + let mut trie = + TrieDBMut::::new(&mut changes_trie_update, &mut changes_root); for (key, value) in changes { trie.insert(&key, &value).unwrap(); } @@ -2471,7 +2473,8 @@ pub(crate) mod tests { if let Some(index) = transaction_index { op.update_transaction_index(index).unwrap(); } - op.update_changes_trie((changes_trie_update, ChangesTrieCacheAction::Clear)).unwrap(); + op.update_changes_trie((changes_trie_update, ChangesTrieCacheAction::Clear)) + .unwrap(); backend.commit_operation(op).unwrap(); header_hash @@ -2505,13 +2508,8 @@ pub(crate) mod tests { extrinsics_root: Default::default(), }; - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); db.commit_operation(op).unwrap(); } @@ -2520,14 +2518,18 @@ pub(crate) mod tests { db.storage.db.clone() }; - let backend = Backend::::new(DatabaseSettings { - state_cache_size: 16777216, - state_cache_child_ratio: Some((50, 100)), - state_pruning: PruningMode::keep_blocks(1), - source: DatabaseSettingsSrc::Custom(backing), - keep_blocks: KeepBlocks::All, - transaction_storage: TransactionStorageMode::BlockBody, - }, 0).unwrap(); + let backend = Backend::::new( + DatabaseSettings { + state_cache_size: 16777216, + state_cache_child_ratio: Some((50, 100)), + state_pruning: PruningMode::keep_blocks(1), + source: DatabaseSettingsSrc::Custom(backing), + keep_blocks: KeepBlocks::All, + transaction_storage: TransactionStorageMode::BlockBody, + }, + 0, + ) + .unwrap(); assert_eq!(backend.blockchain().info().best_number, 9); for i in 0..10 { assert!(backend.blockchain().hash(i).unwrap().is_some()) @@ -2547,28 +2549,22 @@ pub(crate) mod tests { extrinsics_root: Default::default(), }; - let storage = vec![ - (vec![1, 3, 5], vec![2, 4, 6]), - (vec![1, 2, 3], vec![9, 9, 9]), - ]; + let storage = vec![(vec![1, 3, 5], vec![2, 4, 6]), (vec![1, 2, 3], vec![9, 9, 9])]; - header.state_root = op.old_state.storage_root(storage - .iter() - .map(|(x, y)| (&x[..], Some(&y[..]))) - ).0.into(); + header.state_root = op + .old_state + .storage_root(storage.iter().map(|(x, y)| (&x[..], Some(&y[..])))) + .0 + .into(); let hash = header.hash(); op.reset_storage(Storage { top: storage.into_iter().collect(), children_default: Default::default(), - }).unwrap(); - op.set_block_data( - header.clone(), - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + }) + .unwrap(); + op.set_block_data(header.clone(), Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); db.commit_operation(op).unwrap(); @@ -2592,26 +2588,17 @@ pub(crate) mod tests { extrinsics_root: Default::default(), }; - let storage = vec![ - (vec![1, 3, 5], None), - (vec![5, 5, 5], Some(vec![4, 5, 6])), - ]; + let storage = vec![(vec![1, 3, 5], None), (vec![5, 5, 5], Some(vec![4, 5, 6]))]; - let (root, overlay) = op.old_state.storage_root( - storage.iter() - .map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..]))) - ); + let (root, overlay) = op + .old_state + .storage_root(storage.iter().map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..])))); op.update_db_storage(overlay).unwrap(); header.state_root = root.into(); op.update_storage(storage, Vec::new()).unwrap(); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); db.commit_operation(op).unwrap(); @@ -2631,7 +2618,9 @@ pub(crate) mod tests { let hash = { let mut op = backend.begin_operation().unwrap(); - backend.begin_state_operation(&mut op, BlockId::Hash(Default::default())).unwrap(); + backend + .begin_state_operation(&mut op, BlockId::Hash(Default::default())) + .unwrap(); let mut header = Header { number: 0, parent_hash: Default::default(), @@ -2646,22 +2635,22 @@ pub(crate) mod tests { op.reset_storage(Storage { top: Default::default(), children_default: Default::default(), - }).unwrap(); + }) + .unwrap(); key = op.db_updates.insert(EMPTY_PREFIX, b"hello"); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); backend.commit_operation(op).unwrap(); - assert_eq!(backend.storage.db.get( - columns::STATE, - &sp_trie::prefixed_key::(&key, EMPTY_PREFIX) - ).unwrap(), &b"hello"[..]); + assert_eq!( + backend + .storage + .db + .get(columns::STATE, &sp_trie::prefixed_key::(&key, EMPTY_PREFIX)) + .unwrap(), + &b"hello"[..] + ); hash }; @@ -2678,28 +2667,27 @@ pub(crate) mod tests { let storage: Vec<(_, _)> = vec![]; - header.state_root = op.old_state.storage_root(storage - .iter() - .cloned() - .map(|(x, y)| (x, Some(y))) - ).0.into(); + header.state_root = op + .old_state + .storage_root(storage.iter().cloned().map(|(x, y)| (x, Some(y)))) + .0 + .into(); let hash = header.hash(); op.db_updates.insert(EMPTY_PREFIX, b"hello"); op.db_updates.remove(&key, EMPTY_PREFIX); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); backend.commit_operation(op).unwrap(); - assert_eq!(backend.storage.db.get( - columns::STATE, - &sp_trie::prefixed_key::(&key, EMPTY_PREFIX) - ).unwrap(), &b"hello"[..]); + assert_eq!( + backend + .storage + .db + .get(columns::STATE, &sp_trie::prefixed_key::(&key, EMPTY_PREFIX)) + .unwrap(), + &b"hello"[..] + ); hash }; @@ -2716,28 +2704,24 @@ pub(crate) mod tests { let storage: Vec<(_, _)> = vec![]; - header.state_root = op.old_state.storage_root(storage - .iter() - .cloned() - .map(|(x, y)| (x, Some(y))) - ).0.into(); + header.state_root = op + .old_state + .storage_root(storage.iter().cloned().map(|(x, y)| (x, Some(y)))) + .0 + .into(); let hash = header.hash(); op.db_updates.remove(&key, EMPTY_PREFIX); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); backend.commit_operation(op).unwrap(); - assert!(backend.storage.db.get( - columns::STATE, - &sp_trie::prefixed_key::(&key, EMPTY_PREFIX) - ).is_some()); + assert!(backend + .storage + .db + .get(columns::STATE, &sp_trie::prefixed_key::(&key, EMPTY_PREFIX)) + .is_some()); hash }; @@ -2754,34 +2738,31 @@ pub(crate) mod tests { let storage: Vec<(_, _)> = vec![]; - header.state_root = op.old_state.storage_root(storage - .iter() - .cloned() - .map(|(x, y)| (x, Some(y))) - ).0.into(); + header.state_root = op + .old_state + .storage_root(storage.iter().cloned().map(|(x, y)| (x, Some(y)))) + .0 + .into(); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); backend.commit_operation(op).unwrap(); - assert!(backend.storage.db.get( - columns::STATE, - &sp_trie::prefixed_key::(&key, EMPTY_PREFIX) - ).is_none()); + assert!(backend + .storage + .db + .get(columns::STATE, &sp_trie::prefixed_key::(&key, EMPTY_PREFIX)) + .is_none()); } backend.finalize_block(BlockId::Number(1), None).unwrap(); backend.finalize_block(BlockId::Number(2), None).unwrap(); backend.finalize_block(BlockId::Number(3), None).unwrap(); - assert!(backend.storage.db.get( - columns::STATE, - &sp_trie::prefixed_key::(&key, EMPTY_PREFIX) - ).is_none()); + assert!(backend + .storage + .db + .get(columns::STATE, &sp_trie::prefixed_key::(&key, EMPTY_PREFIX)) + .is_none()); } #[test] @@ -2803,8 +2784,14 @@ pub(crate) mod tests { let tree_route = tree_route(blockchain, a3, b2).unwrap(); assert_eq!(tree_route.common_block().hash, block0); - assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::>(), vec![a3, a2, a1]); - assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::>(), vec![b1, b2]); + assert_eq!( + tree_route.retracted().iter().map(|r| r.hash).collect::>(), + vec![a3, a2, a1] + ); + assert_eq!( + tree_route.enacted().iter().map(|r| r.hash).collect::>(), + vec![b1, b2] + ); } { @@ -2812,14 +2799,20 @@ pub(crate) mod tests { assert_eq!(tree_route.common_block().hash, a1); assert!(tree_route.retracted().is_empty()); - assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::>(), vec![a2, a3]); + assert_eq!( + tree_route.enacted().iter().map(|r| r.hash).collect::>(), + vec![a2, a3] + ); } { let tree_route = tree_route(blockchain, a3, a1).unwrap(); assert_eq!(tree_route.common_block().hash, a1); - assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::>(), vec![a3, a2]); + assert_eq!( + tree_route.retracted().iter().map(|r| r.hash).collect::>(), + vec![a3, a2] + ); assert!(tree_route.enacted().is_empty()); } @@ -2845,7 +2838,10 @@ pub(crate) mod tests { assert_eq!(tree_route.common_block().hash, block0); assert!(tree_route.retracted().is_empty()); - assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::>(), vec![block1]); + assert_eq!( + tree_route.enacted().iter().map(|r| r.hash).collect::>(), + vec![block1] + ); } } @@ -2943,20 +2939,25 @@ pub(crate) mod tests { #[test] fn test_leaves_with_complex_block_tree() { - let backend: Arc> = Arc::new(Backend::new_test(20, 20)); + let backend: Arc> = + Arc::new(Backend::new_test(20, 20)); substrate_test_runtime_client::trait_tests::test_leaves_for_backend(backend); } #[test] fn test_children_with_complex_block_tree() { - let backend: Arc> = Arc::new(Backend::new_test(20, 20)); + let backend: Arc> = + Arc::new(Backend::new_test(20, 20)); substrate_test_runtime_client::trait_tests::test_children_for_backend(backend); } #[test] fn test_blockchain_query_by_number_gets_canonical() { - let backend: Arc> = Arc::new(Backend::new_test(20, 20)); - substrate_test_runtime_client::trait_tests::test_blockchain_query_by_number_gets_canonical(backend); + let backend: Arc> = + Arc::new(Backend::new_test(20, 20)); + substrate_test_runtime_client::trait_tests::test_blockchain_query_by_number_gets_canonical( + backend, + ); } #[test] @@ -2974,7 +2975,10 @@ pub(crate) mod tests { let block2_b = insert_header(&backend, 2, block1_b, None, Default::default()); let block2_c = insert_header(&backend, 2, block1_b, None, [1; 32].into()); - assert_eq!(backend.blockchain().leaves().unwrap(), vec![block2_a, block2_b, block2_c, block1_c]); + assert_eq!( + backend.blockchain().leaves().unwrap(), + vec![block2_a, block2_b, block2_c, block1_c] + ); backend.finalize_block(BlockId::hash(block1_a), None).unwrap(); backend.finalize_block(BlockId::hash(block2_a), None).unwrap(); @@ -2985,7 +2989,8 @@ pub(crate) mod tests { #[test] fn test_aux() { - let backend: Backend = Backend::new_test(0, 0); + let backend: Backend = + Backend::new_test(0, 0); assert!(backend.get_aux(b"test").unwrap().is_none()); backend.insert_aux(&[(&b"test"[..], &b"hello"[..])], &[]).unwrap(); assert_eq!(b"hello", &backend.get_aux(b"test").unwrap().unwrap()[..]); @@ -2995,7 +3000,7 @@ pub(crate) mod tests { #[test] fn test_finalize_block_with_justification() { - use sc_client_api::blockchain::{Backend as BlockChainBackend}; + use sc_client_api::blockchain::Backend as BlockChainBackend; let backend = Backend::::new_test(10, 10); @@ -3013,7 +3018,7 @@ pub(crate) mod tests { #[test] fn test_append_justification_to_finalized_block() { - use sc_client_api::blockchain::{Backend as BlockChainBackend}; + use sc_client_api::blockchain::Backend as BlockChainBackend; let backend = Backend::::new_test(10, 10); @@ -3021,10 +3026,7 @@ pub(crate) mod tests { let _ = insert_header(&backend, 1, block0, None, Default::default()); let just0 = (CONS0_ENGINE_ID, vec![1, 2, 3]); - backend.finalize_block( - BlockId::Number(1), - Some(just0.clone().into()), - ).unwrap(); + backend.finalize_block(BlockId::Number(1), Some(just0.clone().into())).unwrap(); let just1 = (CONS1_ENGINE_ID, vec![4, 5]); backend.append_justification(BlockId::Number(1), just1.clone()).unwrap(); @@ -3077,7 +3079,9 @@ pub(crate) mod tests { let hash0 = { let mut op = backend.begin_operation().unwrap(); - backend.begin_state_operation(&mut op, BlockId::Hash(Default::default())).unwrap(); + backend + .begin_state_operation(&mut op, BlockId::Hash(Default::default())) + .unwrap(); let mut header = Header { number: 0, parent_hash: Default::default(), @@ -3088,30 +3092,28 @@ pub(crate) mod tests { let storage = vec![(b"test".to_vec(), b"test".to_vec())]; - header.state_root = op.old_state.storage_root(storage - .iter() - .map(|(x, y)| (&x[..], Some(&y[..]))) - ).0.into(); + header.state_root = op + .old_state + .storage_root(storage.iter().map(|(x, y)| (&x[..], Some(&y[..])))) + .0 + .into(); let hash = header.hash(); op.reset_storage(Storage { top: storage.into_iter().collect(), children_default: Default::default(), - }).unwrap(); - op.set_block_data( - header.clone(), - Some(vec![]), - None, - None, - NewBlockState::Best, - ).unwrap(); + }) + .unwrap(); + op.set_block_data(header.clone(), Some(vec![]), None, None, NewBlockState::Best) + .unwrap(); backend.commit_operation(op).unwrap(); hash }; - let block0_hash = backend.state_at(BlockId::Hash(hash0)) + let block0_hash = backend + .state_at(BlockId::Hash(hash0)) .unwrap() .storage_hash(&b"test"[..]) .unwrap(); @@ -3129,22 +3131,16 @@ pub(crate) mod tests { let storage = vec![(b"test".to_vec(), Some(b"test2".to_vec()))]; - let (root, overlay) = op.old_state.storage_root( - storage.iter() - .map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..]))) - ); + let (root, overlay) = op + .old_state + .storage_root(storage.iter().map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..])))); op.update_db_storage(overlay).unwrap(); header.state_root = root.into(); let hash = header.hash(); op.update_storage(storage, Vec::new()).unwrap(); - op.set_block_data( - header, - Some(vec![]), - None, - None, - NewBlockState::Normal, - ).unwrap(); + op.set_block_data(header, Some(vec![]), None, None, NewBlockState::Normal) + .unwrap(); backend.commit_operation(op).unwrap(); @@ -3159,7 +3155,8 @@ pub(crate) mod tests { backend.commit_operation(op).unwrap(); } - let block1_hash = backend.state_at(BlockId::Hash(hash1)) + let block1_hash = backend + .state_at(BlockId::Hash(hash1)) .unwrap() .storage_hash(&b"test"[..]) .unwrap(); @@ -3189,7 +3186,8 @@ pub(crate) mod tests { let backend = Backend::::new_test(10, 10); // insert 1 + SIZE + SIZE + 1 blocks so that CHT#0 is created - let mut prev_hash = insert_header(&backend, 0, Default::default(), None, Default::default()); + let mut prev_hash = + insert_header(&backend, 0, Default::default(), None, Default::default()); let cht_size: u64 = cht::size(); for i in 1..1 + cht_size + cht_size + 1 { prev_hash = insert_header(&backend, i, prev_hash, None, Default::default()); @@ -3197,12 +3195,18 @@ pub(crate) mod tests { let blockchain = backend.blockchain(); - let cht_root_1 = blockchain.header_cht_root(cht_size, cht::start_number(cht_size, 0)) - .unwrap().unwrap(); - let cht_root_2 = blockchain.header_cht_root(cht_size, cht::start_number(cht_size, 0) + cht_size / 2) - .unwrap().unwrap(); - let cht_root_3 = blockchain.header_cht_root(cht_size, cht::end_number(cht_size, 0)) - .unwrap().unwrap(); + let cht_root_1 = blockchain + .header_cht_root(cht_size, cht::start_number(cht_size, 0)) + .unwrap() + .unwrap(); + let cht_root_2 = blockchain + .header_cht_root(cht_size, cht::start_number(cht_size, 0) + cht_size / 2) + .unwrap() + .unwrap(); + let cht_root_3 = blockchain + .header_cht_root(cht_size, cht::end_number(cht_size, 0)) + .unwrap() + .unwrap(); assert_eq!(cht_root_1, cht_root_2); assert_eq!(cht_root_2, cht_root_3); } @@ -3213,8 +3217,16 @@ pub(crate) mod tests { let backend = Backend::::new_test_with_tx_storage(2, 0, *storage); let mut blocks = Vec::new(); let mut prev_hash = Default::default(); - for i in 0 .. 5 { - let hash = insert_block(&backend, i, prev_hash, None, Default::default(), vec![i.into()], None); + for i in 0..5 { + let hash = insert_block( + &backend, + i, + prev_hash, + None, + Default::default(), + vec![i.into()], + None, + ); blocks.push(hash); prev_hash = hash; } @@ -3222,7 +3234,7 @@ pub(crate) mod tests { { let mut op = backend.begin_operation().unwrap(); backend.begin_state_operation(&mut op, BlockId::Hash(blocks[4])).unwrap(); - for i in 1 .. 5 { + for i in 1..5 { op.mark_finalized(BlockId::Hash(blocks[i]), None).unwrap(); } backend.commit_operation(op).unwrap(); @@ -3238,15 +3250,20 @@ pub(crate) mod tests { #[test] fn prune_blocks_on_finalize_with_fork() { - let backend = Backend::::new_test_with_tx_storage( - 2, - 10, - TransactionStorageMode::StorageChain - ); + let backend = + Backend::::new_test_with_tx_storage(2, 10, TransactionStorageMode::StorageChain); let mut blocks = Vec::new(); let mut prev_hash = Default::default(); - for i in 0 .. 5 { - let hash = insert_block(&backend, i, prev_hash, None, Default::default(), vec![i.into()], None); + for i in 0..5 { + let hash = insert_block( + &backend, + i, + prev_hash, + None, + Default::default(), + vec![i.into()], + None, + ); blocks.push(hash); prev_hash = hash; } @@ -3259,15 +3276,23 @@ pub(crate) mod tests { None, sp_core::H256::random(), vec![2.into()], - None + None, + ); + insert_block( + &backend, + 3, + fork_hash_root, + None, + H256::random(), + vec![3.into(), 11.into()], + None, ); - insert_block(&backend, 3, fork_hash_root, None, H256::random(), vec![3.into(), 11.into()], None); let mut op = backend.begin_operation().unwrap(); backend.begin_state_operation(&mut op, BlockId::Hash(blocks[4])).unwrap(); op.mark_head(BlockId::Hash(blocks[4])).unwrap(); backend.commit_operation(op).unwrap(); - for i in 1 .. 5 { + for i in 1..5 { let mut op = backend.begin_operation().unwrap(); backend.begin_state_operation(&mut op, BlockId::Hash(blocks[4])).unwrap(); op.mark_finalized(BlockId::Hash(blocks[i]), None).unwrap(); @@ -3284,16 +3309,13 @@ pub(crate) mod tests { #[test] fn renew_transaction_storage() { - let backend = Backend::::new_test_with_tx_storage( - 2, - 10, - TransactionStorageMode::StorageChain - ); + let backend = + Backend::::new_test_with_tx_storage(2, 10, TransactionStorageMode::StorageChain); let mut blocks = Vec::new(); let mut prev_hash = Default::default(); let x1 = ExtrinsicWrapper::from(0u64).encode(); - let x1_hash = as sp_core::Hasher>::hash(&x1[1..]); - for i in 0 .. 10 { + let x1_hash = as sp_core::Hasher>::hash(&x1[1..]); + for i in 0..10 { let mut index = Vec::new(); if i == 0 { index.push(IndexOperation::Insert { @@ -3303,10 +3325,7 @@ pub(crate) mod tests { }); } else if i < 5 { // keep renewing 1st - index.push(IndexOperation::Renew { - extrinsic: 0, - hash: x1_hash.as_ref().to_vec(), - }); + index.push(IndexOperation::Renew { extrinsic: 0, hash: x1_hash.as_ref().to_vec() }); } // else stop renewing let hash = insert_block( &backend, @@ -3315,13 +3334,13 @@ pub(crate) mod tests { None, Default::default(), vec![i.into()], - Some(index) + Some(index), ); blocks.push(hash); prev_hash = hash; } - for i in 1 .. 10 { + for i in 1..10 { let mut op = backend.begin_operation().unwrap(); backend.begin_state_operation(&mut op, BlockId::Hash(blocks[4])).unwrap(); op.mark_finalized(BlockId::Hash(blocks[i]), None).unwrap(); @@ -3337,15 +3356,20 @@ pub(crate) mod tests { #[test] fn remove_leaf_block_works() { - let backend = Backend::::new_test_with_tx_storage( - 2, - 10, - TransactionStorageMode::StorageChain - ); + let backend = + Backend::::new_test_with_tx_storage(2, 10, TransactionStorageMode::StorageChain); let mut blocks = Vec::new(); let mut prev_hash = Default::default(); - for i in 0 .. 2 { - let hash = insert_block(&backend, i, prev_hash, None, Default::default(), vec![i.into()], None); + for i in 0..2 { + let hash = insert_block( + &backend, + i, + prev_hash, + None, + Default::default(), + vec![i.into()], + None, + ); blocks.push(hash); prev_hash = hash; } @@ -3358,7 +3382,7 @@ pub(crate) mod tests { None, sp_core::H256::random(), vec![42.into()], - None + None, ); assert!(backend.remove_leaf_block(&best_hash).is_err()); assert!(backend.have_state_at(&prev_hash, 1)); diff --git a/client/db/src/light.rs b/client/db/src/light.rs index 4e61a9c2ee03d..ded5e598fc683 100644 --- a/client/db/src/light.rs +++ b/client/db/src/light.rs @@ -18,31 +18,31 @@ //! RocksDB-based light client blockchain storage. -use std::{sync::Arc, collections::HashMap}; -use std::convert::TryInto; use parking_lot::RwLock; +use std::{collections::HashMap, convert::TryInto, sync::Arc}; +use crate::{ + cache::{ComplexBlockId, DbCache, DbCacheSync, EntryType as CacheEntryType}, + utils::{self, block_id_to_lookup_key, meta_keys, read_db, read_meta, DatabaseType, Meta}, + DatabaseSettings, DbHash, FrozenForDuration, +}; +use codec::{Decode, Encode}; +use log::{debug, trace, warn}; use sc_client_api::{ - cht, backend::{AuxStore, NewBlockState, ProvideChtRoots}, UsageInfo, - blockchain::{ - BlockStatus, Cache as BlockchainCache, Info as BlockchainInfo, - }, - Storage, + backend::{AuxStore, NewBlockState, ProvideChtRoots}, + blockchain::{BlockStatus, Cache as BlockchainCache, Info as BlockchainInfo}, + cht, Storage, UsageInfo, }; use sp_blockchain::{ - CachedHeaderMetadata, HeaderMetadata, HeaderMetadataCache, - Error as ClientError, Result as ClientResult, - HeaderBackend as BlockchainHeaderBackend, - well_known_cache_keys, + well_known_cache_keys, CachedHeaderMetadata, Error as ClientError, + HeaderBackend as BlockchainHeaderBackend, HeaderMetadata, HeaderMetadataCache, + Result as ClientResult, }; use sp_database::{Database, Transaction}; -use codec::{Decode, Encode}; -use sp_runtime::generic::{DigestItem, BlockId}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Zero, One, NumberFor, HashFor}; -use crate::cache::{DbCacheSync, DbCache, ComplexBlockId, EntryType as CacheEntryType}; -use crate::utils::{self, meta_keys, DatabaseType, Meta, read_db, block_id_to_lookup_key, read_meta}; -use crate::{DatabaseSettings, FrozenForDuration, DbHash}; -use log::{trace, warn, debug}; +use sp_runtime::{ + generic::{BlockId, DigestItem}, + traits::{Block as BlockT, HashFor, Header as HeaderT, NumberFor, One, Zero}, +}; pub(crate) mod columns { pub const META: u32 = crate::utils::COLUMN_META; @@ -139,8 +139,8 @@ impl LightStorage { } impl BlockchainHeaderBackend for LightStorage - where - Block: BlockT, +where + Block: BlockT, { fn header(&self, id: BlockId) -> ClientResult> { utils::read_header(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id) @@ -165,12 +165,8 @@ impl BlockchainHeaderBackend for LightStorage fn status(&self, id: BlockId) -> ClientResult { let exists = match id { - BlockId::Hash(_) => read_db( - &*self.db, - columns::KEY_LOOKUP, - columns::HEADER, - id - )?.is_some(), + BlockId::Hash(_) => + read_db(&*self.db, columns::KEY_LOOKUP, columns::HEADER, id)?.is_some(), BlockId::Number(n) => n <= self.meta.read().best_number, }; match exists { @@ -180,7 +176,9 @@ impl BlockchainHeaderBackend for LightStorage } fn number(&self, hash: Block::Hash) -> ClientResult>> { - if let Some(lookup_key) = block_id_to_lookup_key::(&*self.db, columns::KEY_LOOKUP, BlockId::Hash(hash))? { + if let Some(lookup_key) = + block_id_to_lookup_key::(&*self.db, columns::KEY_LOOKUP, BlockId::Hash(hash))? + { let number = utils::lookup_key_to_number(&lookup_key)?; Ok(Some(number)) } else { @@ -196,17 +194,25 @@ impl BlockchainHeaderBackend for LightStorage impl HeaderMetadata for LightStorage { type Error = ClientError; - fn header_metadata(&self, hash: Block::Hash) -> Result, Self::Error> { - self.header_metadata_cache.header_metadata(hash).map_or_else(|| { - self.header(BlockId::hash(hash))?.map(|header| { - let header_metadata = CachedHeaderMetadata::from(&header); - self.header_metadata_cache.insert_header_metadata( - header_metadata.hash, - header_metadata.clone(), - ); - header_metadata - }).ok_or_else(|| ClientError::UnknownBlock(format!("header not found in db: {}", hash))) - }, Ok) + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { + self.header_metadata_cache.header_metadata(hash).map_or_else( + || { + self.header(BlockId::hash(hash))? + .map(|header| { + let header_metadata = CachedHeaderMetadata::from(&header); + self.header_metadata_cache + .insert_header_metadata(header_metadata.hash, header_metadata.clone()); + header_metadata + }) + .ok_or_else(|| { + ClientError::UnknownBlock(format!("header not found in db: {}", hash)) + }) + }, + Ok, + ) } fn insert_header_metadata(&self, hash: Block::Hash, metadata: CachedHeaderMetadata) { @@ -221,10 +227,9 @@ impl HeaderMetadata for LightStorage { impl LightStorage { // Get block changes trie root, if available. fn changes_trie_root(&self, block: BlockId) -> ClientResult> { - self.header(block) - .map(|header| header.and_then(|header| - header.digest().log(DigestItem::as_changes_trie_root) - .cloned())) + self.header(block).map(|header| { + header.and_then(|header| header.digest().log(DigestItem::as_changes_trie_root).cloned()) + }) } /// Handle setting head within a transaction. `route_to` should be the last @@ -251,14 +256,16 @@ impl LightStorage { for retracted in tree_route.retracted() { if retracted.hash == meta.finalized_hash { // TODO: can we recover here? - warn!("Safety failure: reverting finalized block {:?}", - (&retracted.number, &retracted.hash)); + warn!( + "Safety failure: reverting finalized block {:?}", + (&retracted.number, &retracted.hash) + ); } utils::remove_number_to_key_mapping( transaction, columns::KEY_LOOKUP, - retracted.number + retracted.number, )?; } @@ -267,7 +274,7 @@ impl LightStorage { transaction, columns::KEY_LOOKUP, enacted.number, - enacted.hash + enacted.hash, )?; } } @@ -292,10 +299,11 @@ impl LightStorage { ) -> ClientResult<()> { let meta = self.meta.read(); if &meta.finalized_hash != header.parent_hash() { - return Err(::sp_blockchain::Error::NonSequentialFinalization( - format!("Last finalized {:?} not parent of {:?}", - meta.finalized_hash, hash), - ).into()) + return Err(::sp_blockchain::Error::NonSequentialFinalization(format!( + "Last finalized {:?} not parent of {:?}", + meta.finalized_hash, hash + )) + .into()) } let lookup_key = utils::number_and_hash_to_lookup_key(header.number().clone(), hash)?; @@ -313,12 +321,14 @@ impl LightStorage { }); let new_header_cht_root = cht::compute_root::, _>( - cht::size(), new_cht_number, cht_range.map(|num| self.hash(num)) + cht::size(), + new_cht_number, + cht_range.map(|num| self.hash(num)), )?; transaction.set( columns::CHT, &cht_key(HEADER_CHT_PREFIX, new_cht_start)?, - new_header_cht_root.as_ref() + new_header_cht_root.as_ref(), ); // if the header includes changes trie root, let's build a changes tries roots CHT @@ -329,14 +339,16 @@ impl LightStorage { current_num = current_num + One::one(); Some(old_current_num) }); - let new_changes_trie_cht_root = cht::compute_root::, _>( - cht::size(), new_cht_number, cht_range - .map(|num| self.changes_trie_root(BlockId::Number(num))) - )?; + let new_changes_trie_cht_root = + cht::compute_root::, _>( + cht::size(), + new_cht_number, + cht_range.map(|num| self.changes_trie_root(BlockId::Number(num))), + )?; transaction.set( columns::CHT, &cht_key(CHANGES_TRIE_CHT_PREFIX, new_cht_start)?, - new_changes_trie_cht_root.as_ref() + new_changes_trie_cht_root.as_ref(), ); } @@ -354,7 +366,7 @@ impl LightStorage { transaction, columns::KEY_LOOKUP, prune_block, - hash + hash, )?; transaction.remove(columns::HEADER, &lookup_key); } @@ -370,7 +382,7 @@ impl LightStorage { &self, cht_type: u8, cht_size: NumberFor, - block: NumberFor + block: NumberFor, ) -> ClientResult> { let no_cht_for_block = || ClientError::Backend(format!("Missing CHT for block {}", block)); @@ -383,7 +395,8 @@ impl LightStorage { } let cht_start = cht::start_number(cht_size, cht_number); - self.db.get(columns::CHT, &cht_key(cht_type, cht_start)?) + self.db + .get(columns::CHT, &cht_key(cht_type, cht_start)?) .ok_or_else(no_cht_for_block) .and_then(|hash| Block::Hash::decode(&mut &*hash).map_err(|_| no_cht_for_block())) .map(Some) @@ -391,15 +404,20 @@ impl LightStorage { } impl AuxStore for LightStorage - where Block: BlockT, +where + Block: BlockT, { fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> ClientResult<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> ClientResult<()> { let mut transaction = Transaction::new(); for (k, v) in insert { transaction.set(columns::AUX, k, v); @@ -418,7 +436,8 @@ impl AuxStore for LightStorage } impl Storage for LightStorage - where Block: BlockT, +where + Block: BlockT, { fn import_header( &self, @@ -447,19 +466,12 @@ impl Storage for LightStorage self.set_head_with_transaction(&mut transaction, parent_hash, (number, hash))?; } - utils::insert_hash_to_key_mapping( - &mut transaction, - columns::KEY_LOOKUP, - number, - hash, - )?; + utils::insert_hash_to_key_mapping(&mut transaction, columns::KEY_LOOKUP, number, hash)?; transaction.set_from_vec(columns::HEADER, &lookup_key, header.encode()); let header_metadata = CachedHeaderMetadata::from(&header); - self.header_metadata_cache.insert_header_metadata( - header.hash().clone(), - header_metadata, - ); + self.header_metadata_cache + .insert_header_metadata(header.hash().clone(), header_metadata); let is_genesis = number.is_zero(); if is_genesis { @@ -474,25 +486,28 @@ impl Storage for LightStorage }; if finalized { - self.note_finalized( - &mut transaction, - &header, - hash, - )?; + self.note_finalized(&mut transaction, &header, hash)?; } // update changes trie configuration cache if !cache_at.contains_key(&well_known_cache_keys::CHANGES_TRIE_CONFIG) { - if let Some(new_configuration) = crate::changes_tries_storage::extract_new_configuration(&header) { - cache_at.insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_configuration.encode()); + if let Some(new_configuration) = + crate::changes_tries_storage::extract_new_configuration(&header) + { + cache_at + .insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_configuration.encode()); } } { let mut cache = self.cache.0.write(); - let cache_ops = cache.transaction(&mut transaction) + let cache_ops = cache + .transaction(&mut transaction) .on_block_insert( - ComplexBlockId::new(*header.parent_hash(), if number.is_zero() { Zero::zero() } else { number - One::one() }), + ComplexBlockId::new( + *header.parent_hash(), + if number.is_zero() { Zero::zero() } else { number - One::one() }, + ), ComplexBlockId::new(hash, number), cache_at, if finalized { CacheEntryType::Final } else { CacheEntryType::NonFinal }, @@ -502,9 +517,10 @@ impl Storage for LightStorage debug!("Light DB Commit {:?} ({})", hash, number); self.db.commit(transaction)?; - cache.commit(cache_ops) - .expect("only fails if cache with given name isn't loaded yet;\ - cache is already loaded because there are cache_ops; qed"); + cache.commit(cache_ops).expect( + "only fails if cache with given name isn't loaded yet;\ + cache is already loaded because there are cache_ops; qed", + ); } self.update_meta(hash, number, leaf_state.is_best(), finalized); @@ -518,7 +534,11 @@ impl Storage for LightStorage let number = header.number(); let mut transaction = Transaction::new(); - self.set_head_with_transaction(&mut transaction, hash.clone(), (number.clone(), hash.clone()))?; + self.set_head_with_transaction( + &mut transaction, + hash.clone(), + (number.clone(), hash.clone()), + )?; self.db.commit(transaction)?; self.update_meta(hash, header.number().clone(), true, false); @@ -536,17 +556,22 @@ impl Storage for LightStorage self.note_finalized(&mut transaction, &header, hash.clone())?; { let mut cache = self.cache.0.write(); - let cache_ops = cache.transaction(&mut transaction) + let cache_ops = cache + .transaction(&mut transaction) .on_block_finalize( - ComplexBlockId::new(*header.parent_hash(), if number.is_zero() { Zero::zero() } else { number - One::one() }), - ComplexBlockId::new(hash, number) + ComplexBlockId::new( + *header.parent_hash(), + if number.is_zero() { Zero::zero() } else { number - One::one() }, + ), + ComplexBlockId::new(hash, number), )? .into_ops(); self.db.commit(transaction)?; - cache.commit(cache_ops) - .expect("only fails if cache with given name isn't loaded yet;\ - cache is already loaded because there are cache_ops; qed"); + cache.commit(cache_ops).expect( + "only fails if cache with given name isn't loaded yet;\ + cache is already loaded because there are cache_ops; qed", + ); } self.update_meta(hash, header.number().clone(), false, true); @@ -566,7 +591,7 @@ impl Storage for LightStorage #[cfg(not(target_os = "unknown"))] fn usage_info(&self) -> Option { - use sc_client_api::{MemoryInfo, IoInfo, MemorySize}; + use sc_client_api::{IoInfo, MemoryInfo, MemorySize}; // TODO: reimplement IO stats let database_cache = MemorySize::from_bytes(0); @@ -591,7 +616,7 @@ impl Storage for LightStorage state_reads_cache: 0, state_writes_cache: 0, state_writes_nodes: 0, - } + }, }) } @@ -602,7 +627,8 @@ impl Storage for LightStorage } impl ProvideChtRoots for LightStorage - where Block: BlockT, +where + Block: BlockT, { fn header_cht_root( &self, @@ -630,12 +656,14 @@ fn cht_key>(cht_type: u8, block: N) -> ClientResult<[u8; 5]> { #[cfg(test)] pub(crate) mod tests { + use super::*; use sc_client_api::cht; - use sp_core::ChangesTrieConfiguration; - use sp_runtime::generic::{DigestItem, ChangesTrieSignal}; - use sp_runtime::testing::{H256 as Hash, Header, Block as RawBlock, ExtrinsicWrapper}; use sp_blockchain::{lowest_common_ancestor, tree_route}; - use super::*; + use sp_core::ChangesTrieConfiguration; + use sp_runtime::{ + generic::{ChangesTrieSignal, DigestItem}, + testing::{Block as RawBlock, ExtrinsicWrapper, Header, H256 as Hash}, + }; type Block = RawBlock>; type AuthorityId = sp_core::ed25519::Public; @@ -652,7 +680,10 @@ pub(crate) mod tests { fn header_with_changes_trie(parent: &Hash, number: u64) -> Header { let mut header = default_header(parent, number); - header.digest.logs.push(DigestItem::ChangesTrieRoot([(number % 256) as u8; 32].into())); + header + .digest + .logs + .push(DigestItem::ChangesTrieRoot([(number % 256) as u8; 32].into())); header } @@ -698,7 +729,8 @@ pub(crate) mod tests { #[test] fn returns_known_header() { let db = LightStorage::new_test(); - let known_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let known_hash = + insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); let header_by_hash = db.header(BlockId::Hash(known_hash)).unwrap().unwrap(); let header_by_number = db.header(BlockId::Number(0)).unwrap().unwrap(); assert_eq!(header_by_hash, header_by_number); @@ -714,7 +746,8 @@ pub(crate) mod tests { #[test] fn returns_info() { let db = LightStorage::new_test(); - let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let genesis_hash = + insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); let info = db.info(); assert_eq!(info.best_hash, genesis_hash); assert_eq!(info.best_number, 0); @@ -729,17 +762,22 @@ pub(crate) mod tests { #[test] fn returns_block_status() { let db = LightStorage::new_test(); - let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let genesis_hash = + insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.status(BlockId::Hash(genesis_hash)).unwrap(), BlockStatus::InChain); assert_eq!(db.status(BlockId::Number(0)).unwrap(), BlockStatus::InChain); - assert_eq!(db.status(BlockId::Hash(Hash::from_low_u64_be(1))).unwrap(), BlockStatus::Unknown); + assert_eq!( + db.status(BlockId::Hash(Hash::from_low_u64_be(1))).unwrap(), + BlockStatus::Unknown + ); assert_eq!(db.status(BlockId::Number(1)).unwrap(), BlockStatus::Unknown); } #[test] fn returns_block_hash() { let db = LightStorage::new_test(); - let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let genesis_hash = + insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.hash(0).unwrap(), Some(genesis_hash)); assert_eq!(db.hash(1).unwrap(), None); } @@ -749,7 +787,8 @@ pub(crate) mod tests { let raw_db = Arc::new(sp_database::MemDb::default()); let db = LightStorage::from_kvdb(raw_db.clone()).unwrap(); - let genesis_hash = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let genesis_hash = + insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(raw_db.count(columns::HEADER), 1); assert_eq!(raw_db.count(columns::KEY_LOOKUP), 2); @@ -760,43 +799,41 @@ pub(crate) mod tests { #[test] fn finalized_ancient_headers_are_replaced_with_cht() { - fn insert_headers Header>(header_producer: F) -> - (Arc, LightStorage) - { + fn insert_headers Header>( + header_producer: F, + ) -> (Arc, LightStorage) { let raw_db = Arc::new(sp_database::MemDb::default()); let db = LightStorage::from_kvdb(raw_db.clone()).unwrap(); let cht_size: u64 = cht::size(); let ucht_size: usize = cht_size as _; // insert genesis block header (never pruned) - let mut prev_hash = insert_final_block(&db, HashMap::new(), || header_producer(&Default::default(), 0)); + let mut prev_hash = + insert_final_block(&db, HashMap::new(), || header_producer(&Default::default(), 0)); // insert SIZE blocks && ensure that nothing is pruned for number in 0..cht::size() { - prev_hash = insert_block(&db, HashMap::new(), || header_producer(&prev_hash, 1 + number)); + prev_hash = + insert_block(&db, HashMap::new(), || header_producer(&prev_hash, 1 + number)); } assert_eq!(raw_db.count(columns::HEADER), 1 + ucht_size); assert_eq!(raw_db.count(columns::CHT), 0); // insert next SIZE blocks && ensure that nothing is pruned for number in 0..(cht_size as _) { - prev_hash = insert_block( - &db, - HashMap::new(), - || header_producer(&prev_hash, 1 + cht_size + number), - ); + prev_hash = insert_block(&db, HashMap::new(), || { + header_producer(&prev_hash, 1 + cht_size + number) + }); } assert_eq!(raw_db.count(columns::HEADER), 1 + ucht_size + ucht_size); assert_eq!(raw_db.count(columns::CHT), 0); // insert block #{2 * cht::size() + 1} && check that new CHT is created + headers of this CHT are pruned // nothing is yet finalized, so nothing is pruned. - prev_hash = insert_block( - &db, - HashMap::new(), - || header_producer(&prev_hash, 1 + cht_size + cht_size), - ); + prev_hash = insert_block(&db, HashMap::new(), || { + header_producer(&prev_hash, 1 + cht_size + cht_size) + }); assert_eq!(raw_db.count(columns::HEADER), 2 + ucht_size + ucht_size); assert_eq!(raw_db.count(columns::CHT), 0); @@ -839,7 +876,10 @@ pub(crate) mod tests { #[test] fn get_cht_fails_for_non_existent_cht() { let cht_size: u64 = cht::size(); - assert!(LightStorage::::new_test().header_cht_root(cht_size, cht_size / 2).unwrap().is_none()); + assert!(LightStorage::::new_test() + .header_cht_root(cht_size, cht_size / 2) + .unwrap() + .is_none()); } #[test] @@ -847,26 +887,41 @@ pub(crate) mod tests { let db = LightStorage::new_test(); // insert 1 + SIZE + SIZE + 1 blocks so that CHT#0 is created - let mut prev_hash = insert_final_block(&db, HashMap::new(), || header_with_changes_trie(&Default::default(), 0)); + let mut prev_hash = insert_final_block(&db, HashMap::new(), || { + header_with_changes_trie(&Default::default(), 0) + }); let cht_size: u64 = cht::size(); let ucht_size: usize = cht_size as _; for i in 1..1 + ucht_size + ucht_size + 1 { - prev_hash = insert_block(&db, HashMap::new(), || header_with_changes_trie(&prev_hash, i as u64)); + prev_hash = insert_block(&db, HashMap::new(), || { + header_with_changes_trie(&prev_hash, i as u64) + }); db.finalize_header(BlockId::Hash(prev_hash)).unwrap(); } - let cht_root_1 = db.header_cht_root(cht_size, cht::start_number(cht_size, 0)).unwrap().unwrap(); - let cht_root_2 = db.header_cht_root(cht_size, cht::start_number(cht_size, 0) + cht_size / 2).unwrap().unwrap(); - let cht_root_3 = db.header_cht_root(cht_size, cht::end_number(cht_size, 0)).unwrap().unwrap(); + let cht_root_1 = + db.header_cht_root(cht_size, cht::start_number(cht_size, 0)).unwrap().unwrap(); + let cht_root_2 = db + .header_cht_root(cht_size, cht::start_number(cht_size, 0) + cht_size / 2) + .unwrap() + .unwrap(); + let cht_root_3 = + db.header_cht_root(cht_size, cht::end_number(cht_size, 0)).unwrap().unwrap(); assert_eq!(cht_root_1, cht_root_2); assert_eq!(cht_root_2, cht_root_3); - let cht_root_1 = db.changes_trie_cht_root(cht_size, cht::start_number(cht_size, 0)).unwrap().unwrap(); - let cht_root_2 = db.changes_trie_cht_root( - cht_size, - cht::start_number(cht_size, 0) + cht_size / 2, - ).unwrap().unwrap(); - let cht_root_3 = db.changes_trie_cht_root(cht_size, cht::end_number(cht_size, 0)).unwrap().unwrap(); + let cht_root_1 = db + .changes_trie_cht_root(cht_size, cht::start_number(cht_size, 0)) + .unwrap() + .unwrap(); + let cht_root_2 = db + .changes_trie_cht_root(cht_size, cht::start_number(cht_size, 0) + cht_size / 2) + .unwrap() + .unwrap(); + let cht_root_3 = db + .changes_trie_cht_root(cht_size, cht::end_number(cht_size, 0)) + .unwrap() + .unwrap(); assert_eq!(cht_root_1, cht_root_2); assert_eq!(cht_root_2, cht_root_3); } @@ -882,15 +937,23 @@ pub(crate) mod tests { let a3 = insert_block(&db, HashMap::new(), || default_header(&a2, 3)); // fork from genesis: 2 prong. - let b1 = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block0, 1, Hash::from([1; 32]))); + let b1 = insert_block(&db, HashMap::new(), || { + header_with_extrinsics_root(&block0, 1, Hash::from([1; 32])) + }); let b2 = insert_block(&db, HashMap::new(), || default_header(&b1, 2)); { let tree_route = tree_route(&db, a3, b2).unwrap(); assert_eq!(tree_route.common_block().hash, block0); - assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::>(), vec![a3, a2, a1]); - assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::>(), vec![b1, b2]); + assert_eq!( + tree_route.retracted().iter().map(|r| r.hash).collect::>(), + vec![a3, a2, a1] + ); + assert_eq!( + tree_route.enacted().iter().map(|r| r.hash).collect::>(), + vec![b1, b2] + ); } { @@ -898,14 +961,20 @@ pub(crate) mod tests { assert_eq!(tree_route.common_block().hash, a1); assert!(tree_route.retracted().is_empty()); - assert_eq!(tree_route.enacted().iter().map(|r| r.hash).collect::>(), vec![a2, a3]); + assert_eq!( + tree_route.enacted().iter().map(|r| r.hash).collect::>(), + vec![a2, a3] + ); } { let tree_route = tree_route(&db, a3, a1).unwrap(); assert_eq!(tree_route.common_block().hash, a1); - assert_eq!(tree_route.retracted().iter().map(|r| r.hash).collect::>(), vec![a3, a2]); + assert_eq!( + tree_route.retracted().iter().map(|r| r.hash).collect::>(), + vec![a3, a2] + ); assert!(tree_route.enacted().is_empty()); } @@ -929,7 +998,9 @@ pub(crate) mod tests { let a3 = insert_block(&db, HashMap::new(), || default_header(&a2, 3)); // fork from genesis: 2 prong. - let b1 = insert_block(&db, HashMap::new(), || header_with_extrinsics_root(&block0, 1, Hash::from([1; 32]))); + let b1 = insert_block(&db, HashMap::new(), || { + header_with_extrinsics_root(&block0, 1, Hash::from([1; 32])) + }); let b2 = insert_block(&db, HashMap::new(), || default_header(&b1, 2)); { @@ -979,7 +1050,11 @@ pub(crate) mod tests { fn authorities_are_cached() { let db = LightStorage::new_test(); - fn run_checks(db: &LightStorage, max: u64, checks: &[(u64, Option>)]) { + fn run_checks( + db: &LightStorage, + max: u64, + checks: &[(u64, Option>)], + ) { for (at, expected) in checks.iter().take_while(|(at, _)| *at <= max) { let actual = authorities(db.cache(), BlockId::Number(*at)); assert_eq!(*expected, actual); @@ -990,14 +1065,21 @@ pub(crate) mod tests { HashMap::new() } - fn make_authorities(authorities: Vec) -> HashMap> { + fn make_authorities( + authorities: Vec, + ) -> HashMap> { let mut map = HashMap::new(); map.insert(well_known_cache_keys::AUTHORITIES, authorities.encode()); map } - fn authorities(cache: &dyn BlockchainCache, at: BlockId) -> Option> { - cache.get_at(&well_known_cache_keys::AUTHORITIES, &at).unwrap_or(None) + fn authorities( + cache: &dyn BlockchainCache, + at: BlockId, + ) -> Option> { + cache + .get_at(&well_known_cache_keys::AUTHORITIES, &at) + .unwrap_or(None) .and_then(|(_, _, val)| Decode::decode(&mut &val[..]).ok()) } @@ -1021,17 +1103,27 @@ pub(crate) mod tests { (6, Some(vec![auth1(), auth2()])), ]; - let hash0 = insert_final_block(&db, same_authorities(), || default_header(&Default::default(), 0)); + let hash0 = insert_final_block(&db, same_authorities(), || { + default_header(&Default::default(), 0) + }); run_checks(&db, 0, &checks); let hash1 = insert_final_block(&db, same_authorities(), || default_header(&hash0, 1)); run_checks(&db, 1, &checks); - let hash2 = insert_final_block(&db, make_authorities(vec![auth1()]), || default_header(&hash1, 2)); + let hash2 = insert_final_block(&db, make_authorities(vec![auth1()]), || { + default_header(&hash1, 2) + }); run_checks(&db, 2, &checks); - let hash3 = insert_final_block(&db, make_authorities(vec![auth1()]), || default_header(&hash2, 3)); + let hash3 = insert_final_block(&db, make_authorities(vec![auth1()]), || { + default_header(&hash2, 3) + }); run_checks(&db, 3, &checks); - let hash4 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash3, 4)); + let hash4 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || { + default_header(&hash3, 4) + }); run_checks(&db, 4, &checks); - let hash5 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash4, 5)); + let hash5 = insert_final_block(&db, make_authorities(vec![auth1(), auth2()]), || { + default_header(&hash4, 5) + }); run_checks(&db, 5, &checks); let hash6 = insert_final_block(&db, same_authorities(), || default_header(&hash5, 6)); run_checks(&db, 6, &checks); @@ -1043,9 +1135,14 @@ pub(crate) mod tests { // some older non-best blocks are inserted // ... -> B2(1) -> B2_1(1) -> B2_2(2) // => the cache ignores all writes before best finalized block - let hash2_1 = insert_non_best_block(&db, make_authorities(vec![auth1()]), || default_header(&hash2, 3)); + let hash2_1 = insert_non_best_block(&db, make_authorities(vec![auth1()]), || { + default_header(&hash2, 3) + }); assert_eq!(None, authorities(db.cache(), BlockId::Hash(hash2_1))); - let hash2_2 = insert_non_best_block(&db, make_authorities(vec![auth1(), auth2()]), || default_header(&hash2_1, 4)); + let hash2_2 = + insert_non_best_block(&db, make_authorities(vec![auth1(), auth2()]), || { + default_header(&hash2_1, 4) + }); assert_eq!(None, authorities(db.cache(), BlockId::Hash(hash2_2))); } @@ -1056,51 +1153,41 @@ pub(crate) mod tests { // \> B6_1_1(5) // \> B6_1_2(6) -> B6_1_3(7) - let hash7 = insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash6, 7)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash7 = + insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash6, 7)); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); - let hash8 = insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash7, 8)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash8 = + insert_block(&db, make_authorities(vec![auth3()]), || default_header(&hash7, 8)); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); - let hash6_1 = insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6, 7)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash6_1 = + insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6, 7)); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); - let hash6_1_1 = insert_non_best_block(&db, make_authorities(vec![auth5()]), || default_header(&hash6_1, 8)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash6_1_1 = insert_non_best_block(&db, make_authorities(vec![auth5()]), || { + default_header(&hash6_1, 8) + }); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); - let hash6_1_2 = insert_non_best_block(&db, make_authorities(vec![auth6()]), || default_header(&hash6_1, 8)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash6_1_2 = insert_non_best_block(&db, make_authorities(vec![auth6()]), || { + default_header(&hash6_1, 8) + }); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1_1)), Some(vec![auth5()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1_2)), Some(vec![auth6()])); - let hash6_2 = insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6_1, 8)); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + let hash6_2 = + insert_block(&db, make_authorities(vec![auth4()]), || default_header(&hash6_1, 8)); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), Some(vec![auth3()])); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); @@ -1114,10 +1201,7 @@ pub(crate) mod tests { { // finalize block hash6_1 db.finalize_header(BlockId::Hash(hash6_1)).unwrap(); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), None); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), None); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); @@ -1126,10 +1210,7 @@ pub(crate) mod tests { assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_2)), Some(vec![auth4()])); // finalize block hash6_2 db.finalize_header(BlockId::Hash(hash6_2)).unwrap(); - assert_eq!( - authorities(db.cache(), BlockId::Hash(hash6)), - Some(vec![auth1(), auth2()]), - ); + assert_eq!(authorities(db.cache(), BlockId::Hash(hash6)), Some(vec![auth1(), auth2()]),); assert_eq!(authorities(db.cache(), BlockId::Hash(hash7)), None); assert_eq!(authorities(db.cache(), BlockId::Hash(hash8)), None); assert_eq!(authorities(db.cache(), BlockId::Hash(hash6_1)), Some(vec![auth4()])); @@ -1142,7 +1223,8 @@ pub(crate) mod tests { #[test] fn database_is_reopened() { let db = LightStorage::new_test(); - let hash0 = insert_final_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); + let hash0 = + insert_final_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!(db.info().best_hash, hash0); assert_eq!(db.header(BlockId::Hash(hash0)).unwrap().unwrap().hash(), hash0); @@ -1157,7 +1239,8 @@ pub(crate) mod tests { let db = LightStorage::::new_test(); // insert aux1 + aux2 using direct store access - db.insert_aux(&[(&[1][..], &[101][..]), (&[2][..], &[102][..])], ::std::iter::empty()).unwrap(); + db.insert_aux(&[(&[1][..], &[101][..]), (&[2][..], &[102][..])], ::std::iter::empty()) + .unwrap(); // check aux values assert_eq!(db.get_aux(&[1]).unwrap(), Some(vec![101])); @@ -1165,10 +1248,13 @@ pub(crate) mod tests { assert_eq!(db.get_aux(&[3]).unwrap(), None); // delete aux1 + insert aux3 using import operation - db.import_header(default_header(&Default::default(), 0), HashMap::new(), NewBlockState::Best, vec![ - (vec![3], Some(vec![103])), - (vec![1], None), - ]).unwrap(); + db.import_header( + default_header(&Default::default(), 0), + HashMap::new(), + NewBlockState::Best, + vec![(vec![3], Some(vec![103])), (vec![1], None)], + ) + .unwrap(); // check aux values assert_eq!(db.get_aux(&[1]).unwrap(), None); @@ -1208,7 +1294,8 @@ pub(crate) mod tests { }; // restart && check that after restart value is read from the cache - let db = LightStorage::::from_kvdb(storage as Arc<_>).expect("failed to create test-db"); + let db = + LightStorage::::from_kvdb(storage as Arc<_>).expect("failed to create test-db"); assert_eq!( db.cache().get_at(b"test", &BlockId::Number(0)).unwrap(), Some(((0, genesis_hash.unwrap()), None, vec![42])), @@ -1224,7 +1311,9 @@ pub(crate) mod tests { // insert block#0 && block#1 (no value for cache is provided) let hash0 = insert_block(&db, HashMap::new(), || default_header(&Default::default(), 0)); assert_eq!( - db.cache().get_at(&well_known_cache_keys::CHANGES_TRIE_CONFIG, &BlockId::Number(0)).unwrap() + db.cache() + .get_at(&well_known_cache_keys::CHANGES_TRIE_CONFIG, &BlockId::Number(0)) + .unwrap() .map(|(_, _, v)| ChangesTrieConfiguration::decode(&mut &v[..]).unwrap()), None, ); @@ -1232,13 +1321,15 @@ pub(crate) mod tests { // insert configuration at block#1 (starts from block#2) insert_block(&db, HashMap::new(), || { let mut header = default_header(&hash0, 1); - header.digest_mut().push( - DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration(new_config.clone())) - ); + header.digest_mut().push(DigestItem::ChangesTrieSignal( + ChangesTrieSignal::NewConfiguration(new_config.clone()), + )); header }); assert_eq!( - db.cache().get_at(&well_known_cache_keys::CHANGES_TRIE_CONFIG, &BlockId::Number(1)).unwrap() + db.cache() + .get_at(&well_known_cache_keys::CHANGES_TRIE_CONFIG, &BlockId::Number(1)) + .unwrap() .map(|(_, _, v)| Option::::decode(&mut &v[..]).unwrap()), Some(new_config), ); diff --git a/client/db/src/offchain.rs b/client/db/src/offchain.rs index df45c4946e622..c31273ff07c63 100644 --- a/client/db/src/offchain.rs +++ b/client/db/src/offchain.rs @@ -21,8 +21,8 @@ use std::{collections::HashMap, sync::Arc}; use crate::{columns, Database, DbHash, Transaction}; -use parking_lot::Mutex; use log::error; +use parking_lot::Mutex; /// Offchain local storage #[derive(Clone)] @@ -33,8 +33,7 @@ pub struct LocalStorage { impl std::fmt::Debug for LocalStorage { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.debug_struct("LocalStorage") - .finish() + fmt.debug_struct("LocalStorage").finish() } } @@ -49,10 +48,7 @@ impl LocalStorage { /// Create offchain local storage with given `KeyValueDB` backend. pub fn new(db: Arc>) -> Self { - Self { - db, - locks: Default::default(), - } + Self { db, locks: Default::default() } } } @@ -118,11 +114,7 @@ impl sp_core::offchain::OffchainStorage for LocalStorage { /// Concatenate the prefix and key to create an offchain key in the db. pub(crate) fn concatenate_prefix_and_key(prefix: &[u8], key: &[u8]) -> Vec { - prefix - .iter() - .chain(key.into_iter()) - .cloned() - .collect() + prefix.iter().chain(key.into_iter()).cloned().collect() } #[cfg(test)] @@ -155,5 +147,4 @@ mod tests { assert_eq!(storage.get(prefix, key), Some(b"asd".to_vec())); assert!(storage.locks.lock().is_empty(), "Locks map should be empty!"); } - } diff --git a/client/db/src/parity_db.rs b/client/db/src/parity_db.rs index ed39c1e9f669f..07f58baf01541 100644 --- a/client/db/src/parity_db.rs +++ b/client/db/src/parity_db.rs @@ -15,27 +15,29 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use crate::{ + columns, + utils::{DatabaseType, NUM_COLUMNS}, +}; /// A `Database` adapter for parity-db. - -use sp_database::{Database, Change, ColumnId, Transaction, error::DatabaseError}; -use crate::utils::{DatabaseType, NUM_COLUMNS}; -use crate::columns; +use sp_database::{error::DatabaseError, Change, ColumnId, Database, Transaction}; struct DbAdapter(parity_db::Db); fn handle_err(result: parity_db::Result) -> T { match result { Ok(r) => r, - Err(e) => { + Err(e) => { panic!("Critical database error: {:?}", e); - } + }, } } /// Wrap parity-db database into a trait object that implements `sp_database::Database` -pub fn open>(path: &std::path::Path, db_type: DatabaseType) - -> parity_db::Result>> -{ +pub fn open>( + path: &std::path::Path, + db_type: DatabaseType, +) -> parity_db::Result>> { let mut config = parity_db::Options::with_columns(path, NUM_COLUMNS as u8); config.sync = true; // Flush each commit if db_type == DatabaseType::Full { @@ -50,13 +52,11 @@ pub fn open>(path: &std::path::Path, db_type: DatabaseTyp impl> Database for DbAdapter { fn commit(&self, transaction: Transaction) -> Result<(), DatabaseError> { - handle_err(self.0.commit(transaction.0.into_iter().map(|change| - match change { - Change::Set(col, key, value) => (col as u8, key, Some(value)), - Change::Remove(col, key) => (col as u8, key, None), - _ => unimplemented!(), - })) - ); + handle_err(self.0.commit(transaction.0.into_iter().map(|change| match change { + Change::Set(col, key, value) => (col as u8, key, Some(value)), + Change::Remove(col, key) => (col as u8, key, None), + _ => unimplemented!(), + }))); Ok(()) } diff --git a/client/db/src/stats.rs b/client/db/src/stats.rs index 3fd93db931d02..9223142ef5aba 100644 --- a/client/db/src/stats.rs +++ b/client/db/src/stats.rs @@ -65,7 +65,10 @@ impl StateUsageStats { /// Tally one key read. pub fn tally_key_read(&self, key: &[u8], val: Option<&Vec>, cache: bool) { - self.tally_read(key.len() as u64 + val.as_ref().map(|x| x.len() as u64).unwrap_or(0), cache); + self.tally_read( + key.len() as u64 + val.as_ref().map(|x| x.len() as u64).unwrap_or(0), + cache, + ); } /// Tally one child key read. @@ -103,9 +106,11 @@ impl StateUsageStats { self.reads.fetch_add(info.reads.ops, AtomicOrdering::Relaxed); self.bytes_read.fetch_add(info.reads.bytes, AtomicOrdering::Relaxed); self.writes_nodes.fetch_add(info.nodes_writes.ops, AtomicOrdering::Relaxed); - self.bytes_written_nodes.fetch_add(info.nodes_writes.bytes, AtomicOrdering::Relaxed); + self.bytes_written_nodes + .fetch_add(info.nodes_writes.bytes, AtomicOrdering::Relaxed); self.removed_nodes.fetch_add(info.removed_nodes.ops, AtomicOrdering::Relaxed); - self.bytes_removed_nodes.fetch_add(info.removed_nodes.bytes, AtomicOrdering::Relaxed); + self.bytes_removed_nodes + .fetch_add(info.removed_nodes.bytes, AtomicOrdering::Relaxed); self.reads_cache.fetch_add(info.cache_reads.ops, AtomicOrdering::Relaxed); self.bytes_read_cache.fetch_add(info.cache_reads.bytes, AtomicOrdering::Relaxed); } diff --git a/client/db/src/storage_cache.rs b/client/db/src/storage_cache.rs index e4b5951465461..4238b4deec989 100644 --- a/client/db/src/storage_cache.rs +++ b/client/db/src/storage_cache.rs @@ -20,21 +20,22 @@ //! Tracks changes over the span of a few recent blocks and handles forks //! by tracking/removing cache entries for conflicting changes. -use std::collections::{VecDeque, HashSet, HashMap}; -use std::sync::Arc; -use std::hash::Hash as StdHash; -use parking_lot::{RwLock, RwLockUpgradableReadGuard}; -use linked_hash_map::{LinkedHashMap, Entry}; +use crate::{stats::StateUsageStats, utils::Meta}; use hash_db::Hasher; -use sp_runtime::traits::{Block as BlockT, Header, HashFor, NumberFor}; -use sp_core::hexdisplay::HexDisplay; -use sp_core::storage::ChildInfo; +use linked_hash_map::{Entry, LinkedHashMap}; +use log::trace; +use parking_lot::{RwLock, RwLockUpgradableReadGuard}; +use sp_core::{hexdisplay::HexDisplay, storage::ChildInfo}; +use sp_runtime::traits::{Block as BlockT, HashFor, Header, NumberFor}; use sp_state_machine::{ - backend::Backend as StateBackend, TrieBackend, StorageKey, StorageValue, - StorageCollection, ChildStorageCollection, + backend::Backend as StateBackend, ChildStorageCollection, StorageCollection, StorageKey, + StorageValue, TrieBackend, +}; +use std::{ + collections::{HashMap, HashSet, VecDeque}, + hash::Hash as StdHash, + sync::Arc, }; -use log::trace; -use crate::{utils::Meta, stats::StateUsageStats}; const STATE_CACHE_BLOCKS: usize = 12; @@ -75,7 +76,7 @@ impl EstimateSize for Vec { impl EstimateSize for Option> { fn estimate_size(&self) -> usize { - self.as_ref().map(|v|v.capacity()).unwrap_or(0) + self.as_ref().map(|v| v.capacity()).unwrap_or(0) } } @@ -84,7 +85,7 @@ struct OptionHOut>(Option); impl> EstimateSize for OptionHOut { fn estimate_size(&self) -> usize { // capacity would be better - self.0.as_ref().map(|v|v.as_ref().len()).unwrap_or(0) + self.0.as_ref().map(|v| v.as_ref().len()).unwrap_or(0) } } @@ -125,20 +126,22 @@ impl LRUMap { }; while *storage_used_size > limit { - if let Some((k,v)) = lmap.pop_front() { + if let Some((k, v)) = lmap.pop_front() { *storage_used_size -= k.estimate_size(); *storage_used_size -= v.estimate_size(); } else { // can happen fairly often as we get value from multiple lru // and only remove from a single lru - break; + break } } } - fn get(&mut self, k: &Q) -> Option<&mut V> - where K: std::borrow::Borrow, - Q: StdHash + Eq { + fn get(&mut self, k: &Q) -> Option<&mut V> + where + K: std::borrow::Borrow, + Q: StdHash + Eq, + { self.0.get_refresh(k) } @@ -149,15 +152,13 @@ impl LRUMap { self.0.clear(); self.1 = 0; } - } impl Cache { /// Returns the used memory size of the storage cache in bytes. pub fn used_storage_cache_size(&self) -> usize { - self.lru_storage.used_size() - + self.lru_child_storage.used_size() - // ignore small hashes storage and self.lru_hashes.used_size() + self.lru_storage.used_size() + self.lru_child_storage.used_size() + // ignore small hashes storage and self.lru_hashes.used_size() } /// Synchronize the shared cache with the best block state. @@ -233,20 +234,16 @@ pub fn new_shared_cache( child_ratio: (usize, usize), ) -> SharedCache { let top = child_ratio.1.saturating_sub(child_ratio.0); - Arc::new( - RwLock::new( - Cache { - lru_storage: LRUMap( - LinkedHashMap::new(), 0, shared_cache_size * top / child_ratio.1 - ), - lru_hashes: LRUMap(LinkedHashMap::new(), 0, FIX_LRU_HASH_SIZE), - lru_child_storage: LRUMap( - LinkedHashMap::new(), 0, shared_cache_size * child_ratio.0 / child_ratio.1 - ), - modifications: VecDeque::new(), - } - ) - ) + Arc::new(RwLock::new(Cache { + lru_storage: LRUMap(LinkedHashMap::new(), 0, shared_cache_size * top / child_ratio.1), + lru_hashes: LRUMap(LinkedHashMap::new(), 0, FIX_LRU_HASH_SIZE), + lru_child_storage: LRUMap( + LinkedHashMap::new(), + 0, + shared_cache_size * child_ratio.0 / child_ratio.1, + ), + modifications: VecDeque::new(), + })) } #[derive(Debug)] @@ -393,16 +390,15 @@ impl CacheChanges { } } - if let ( - Some(ref number), Some(ref hash), Some(ref parent)) - = (commit_number, commit_hash, self.parent_hash) + if let (Some(ref number), Some(ref hash), Some(ref parent)) = + (commit_number, commit_hash, self.parent_hash) { if cache.modifications.len() == STATE_CACHE_BLOCKS { cache.modifications.pop_back(); } let mut modifications = HashSet::new(); let mut child_modifications = HashSet::new(); - child_changes.into_iter().for_each(|(sk, changes)| + child_changes.into_iter().for_each(|(sk, changes)| { for (k, v) in changes.into_iter() { let k = (sk.clone(), k); if is_best { @@ -410,7 +406,7 @@ impl CacheChanges { } child_modifications.insert(k); } - ); + }); for (k, v) in changes.into_iter() { if is_best { cache.lru_hashes.remove(&k); @@ -428,7 +424,9 @@ impl CacheChanges { is_canon: is_best, parent: parent.clone(), }; - let insert_at = cache.modifications.iter() + let insert_at = cache + .modifications + .iter() .enumerate() .find(|(_, m)| m.number < *number) .map(|(i, _)| i); @@ -471,13 +469,16 @@ impl>, B: BlockT> CachingState { key: Option<&[u8]>, child_key: Option<&ChildStorageKey>, parent_hash: &Option, - modifications: &VecDeque> + modifications: &VecDeque>, ) -> bool { let mut parent = match *parent_hash { None => { - trace!("Cache lookup skipped for {:?}: no parent hash", key.as_ref().map(HexDisplay::from)); - return false; - } + trace!( + "Cache lookup skipped for {:?}: no parent hash", + key.as_ref().map(HexDisplay::from) + ); + return false + }, Some(ref parent) => parent, }; // Ignore all storage entries modified in later blocks. @@ -488,20 +489,23 @@ impl>, B: BlockT> CachingState { for m in modifications { if &m.hash == parent { if m.is_canon { - return true; + return true } parent = &m.parent; } if let Some(key) = key { if m.storage.contains(key) { - trace!("Cache lookup skipped for {:?}: modified in a later block", HexDisplay::from(&key)); - return false; + trace!( + "Cache lookup skipped for {:?}: modified in a later block", + HexDisplay::from(&key) + ); + return false } } if let Some(child_key) = child_key { if m.child_storage.contains(child_key) { trace!("Cache lookup skipped for {:?}: modified in a later block", child_key); - return false; + return false } } } @@ -540,7 +544,9 @@ impl>, B: BlockT> StateBackend> for Cachin } trace!("Cache miss: {:?}", HexDisplay::from(&key)); let value = self.state.storage(key)?; - RwLockUpgradableReadGuard::upgrade(local_cache).storage.insert(key.to_vec(), value.clone()); + RwLockUpgradableReadGuard::upgrade(local_cache) + .storage + .insert(key.to_vec(), value.clone()); self.usage.tally_key_read(key, value.as_ref(), false); Ok(value) } @@ -563,7 +569,9 @@ impl>, B: BlockT> StateBackend> for Cachin } trace!("Cache hash miss: {:?}", HexDisplay::from(&key)); let hash = self.state.storage_hash(key)?; - RwLockUpgradableReadGuard::upgrade(local_cache).hashes.insert(key.to_vec(), hash); + RwLockUpgradableReadGuard::upgrade(local_cache) + .hashes + .insert(key.to_vec(), hash); Ok(hash) } @@ -576,9 +584,7 @@ impl>, B: BlockT> StateBackend> for Cachin let local_cache = self.cache.local_cache.upgradable_read(); if let Some(entry) = local_cache.child_storage.get(&key).cloned() { trace!("Found in local cache: {:?}", key); - return Ok( - self.usage.tally_child_key_read(&key, entry, true) - ) + return Ok(self.usage.tally_child_key_read(&key, entry, true)) } { let cache = self.cache.shared_cache.upgradable_read(); @@ -586,9 +592,7 @@ impl>, B: BlockT> StateBackend> for Cachin let mut cache = RwLockUpgradableReadGuard::upgrade(cache); if let Some(entry) = cache.lru_child_storage.get(&key).map(|a| a.clone()) { trace!("Found in shared cache: {:?}", key); - return Ok( - self.usage.tally_child_key_read(&key, entry, true) - ) + return Ok(self.usage.tally_child_key_read(&key, entry, true)) } } } @@ -596,9 +600,11 @@ impl>, B: BlockT> StateBackend> for Cachin let value = self.state.child_storage(child_info, &key.1[..])?; // just pass it through the usage counter - let value = self.usage.tally_child_key_read(&key, value, false); + let value = self.usage.tally_child_key_read(&key, value, false); - RwLockUpgradableReadGuard::upgrade(local_cache).child_storage.insert(key, value.clone()); + RwLockUpgradableReadGuard::upgrade(local_cache) + .child_storage + .insert(key, value.clone()); Ok(value) } @@ -622,7 +628,8 @@ impl>, B: BlockT> StateBackend> for Cachin f: F, allow_missing: bool, ) -> Result { - self.state.apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) + self.state + .apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) } fn apply_to_keys_while bool>( @@ -665,16 +672,22 @@ impl>, B: BlockT> StateBackend> for Cachin fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (B::Hash, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, Self::Transaction) + where + B::Hash: Ord, + { self.state.storage_root(delta) } fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (B::Hash, bool, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, bool, Self::Transaction) + where + B::Hash: Ord, + { self.state.child_storage_root(child_info, delta) } @@ -686,11 +699,7 @@ impl>, B: BlockT> StateBackend> for Cachin self.state.keys(prefix) } - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec> { + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec> { self.state.child_keys(child_info, prefix) } @@ -739,13 +748,7 @@ impl SyncingCachingState { meta: Arc, B::Hash>>>, lock: Arc>, ) -> Self { - Self { - caching_state: Some(caching_state), - state_usage, - meta, - lock, - disable_syncing: false, - } + Self { caching_state: Some(caching_state), state_usage, meta, lock, disable_syncing: false } } /// Returns the reference to the internal [`CachingState`]. @@ -775,7 +778,9 @@ impl std::fmt::Debug for SyncingCachingState { } } -impl>, B: BlockT> StateBackend> for SyncingCachingState { +impl>, B: BlockT> StateBackend> + for SyncingCachingState +{ type Error = S::Error; type Transaction = S::Transaction; type TrieBackendStorage = S::TrieBackendStorage; @@ -816,7 +821,13 @@ impl>, B: BlockT> StateBackend> for Syncin f: F, allow_missing: bool, ) -> Result { - self.caching_state().apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) + self.caching_state().apply_to_key_values_while( + child_info, + prefix, + start_at, + f, + allow_missing, + ) } fn apply_to_keys_while bool>( @@ -859,16 +870,22 @@ impl>, B: BlockT> StateBackend> for Syncin fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (B::Hash, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, Self::Transaction) + where + B::Hash: Ord, + { self.caching_state().storage_root(delta) } fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (B::Hash, bool, Self::Transaction) where B::Hash: Ord { + delta: impl Iterator)>, + ) -> (B::Hash, bool, Self::Transaction) + where + B::Hash: Ord, + { self.caching_state().child_storage_root(child_info, delta) } @@ -880,11 +897,7 @@ impl>, B: BlockT> StateBackend> for Syncin self.caching_state().keys(prefix) } - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec> { + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec> { self.caching_state().child_keys(child_info, prefix) } @@ -907,7 +920,7 @@ impl>, B: BlockT> StateBackend> for Syncin impl Drop for SyncingCachingState { fn drop(&mut self) { if self.disable_syncing { - return; + return } if let Some(mut caching_state) = self.caching_state.take() { @@ -926,8 +939,8 @@ impl Drop for SyncingCachingState { mod tests { use super::*; use sp_runtime::{ + testing::{Block as RawBlock, ExtrinsicWrapper, H256}, traits::BlakeTwo256, - testing::{H256, Block as RawBlock, ExtrinsicWrapper}, }; use sp_state_machine::InMemoryBackend; @@ -965,18 +978,12 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h0), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h0)); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h1a), Some(1), true); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h0), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h0)); s.cache.sync_cache( &[], &[], @@ -987,11 +994,8 @@ mod tests { false, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1b), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1b)); s.cache.sync_cache( &[], &[], @@ -1002,11 +1006,8 @@ mod tests { false, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1a), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1a)); s.cache.sync_cache( &[], &[], @@ -1017,48 +1018,30 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2a), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2a)); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h3a), Some(3), true); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h3a), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h3a)); assert_eq!(s.storage(&key).unwrap().unwrap(), vec![5]); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1a), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1a)); assert!(s.storage(&key).unwrap().is_none()); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2b), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2b)); assert!(s.storage(&key).unwrap().is_none()); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1b), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1b)); assert!(s.storage(&key).unwrap().is_none()); // reorg to 3b // blocks [ 3b(c) 3a 2a 2b(c) 1b 1a 0 ] - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2b), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2b)); s.cache.sync_cache( &[h1b, h2b, h3b], &[h1a, h2a, h3a], @@ -1068,11 +1051,8 @@ mod tests { Some(3), true, ); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h3a), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h3a)); assert!(s.storage(&key).unwrap().is_none()); } @@ -1087,7 +1067,7 @@ mod tests { let h2b = H256::random(); let h3b = H256::random(); - let shared = new_shared_cache::(256*1024, (0,1)); + let shared = new_shared_cache::(256 * 1024, (0, 1)); let mut s = CachingState::new( InMemoryBackend::::default(), @@ -1104,18 +1084,12 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2a), Some(2), true); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); s.cache.sync_cache( &[], &[], @@ -1126,11 +1100,8 @@ mod tests { false, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2b), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2b)); s.cache.sync_cache( &[], &[], @@ -1141,11 +1112,8 @@ mod tests { false, ); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2a), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2a)); assert_eq!(s.storage(&key).unwrap().unwrap(), vec![2]); } @@ -1159,7 +1127,7 @@ mod tests { let h3a = H256::random(); let h3b = H256::random(); - let shared = new_shared_cache::(256*1024, (0,1)); + let shared = new_shared_cache::(256 * 1024, (0, 1)); let mut s = CachingState::new( InMemoryBackend::::default(), @@ -1168,18 +1136,12 @@ mod tests { ); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h1), Some(1), true); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2a), Some(2), true); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2a), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2a)); s.cache.sync_cache( &[], &[], @@ -1190,18 +1152,12 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2b), Some(2), false); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h2b), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h2b)); s.cache.sync_cache( &[], &[], @@ -1212,11 +1168,8 @@ mod tests { false, ); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h3a), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h3a)); assert_eq!(s.storage(&key).unwrap().unwrap(), vec![2]); } @@ -1227,15 +1180,11 @@ mod tests { let h1a = H256::random(); let h1b = H256::random(); - let shared = new_shared_cache::(256*1024, (0,1)); + let shared = new_shared_cache::(256 * 1024, (0, 1)); let mut backend = InMemoryBackend::::default(); backend.insert(std::iter::once((None, vec![(key.clone(), Some(vec![1]))]))); - let mut s = CachingState::new( - backend.clone(), - shared.clone(), - Some(root_parent), - ); + let mut s = CachingState::new(backend.clone(), shared.clone(), Some(root_parent)); s.cache.sync_cache( &[], &[], @@ -1246,29 +1195,23 @@ mod tests { true, ); - let mut s = CachingState::new( - backend.clone(), - shared.clone(), - Some(root_parent), - ); + let mut s = CachingState::new(backend.clone(), shared.clone(), Some(root_parent)); s.cache.sync_cache(&[], &[h1a], vec![], vec![], Some(h1b), Some(1), true); - let s = CachingState::new( - backend.clone(), - shared.clone(), - Some(h1b), - ); + let s = CachingState::new(backend.clone(), shared.clone(), Some(h1b)); assert_eq!(s.storage_hash(&key).unwrap().unwrap(), BlakeTwo256::hash(&vec![1])); } #[test] fn should_track_used_size_correctly() { let root_parent = H256::random(); - let shared = new_shared_cache::(109, ((109-36), 109)); + let shared = new_shared_cache::(109, ((109 - 36), 109)); let h0 = H256::random(); let mut s = CachingState::new( - InMemoryBackend::::default(), shared.clone(), Some(root_parent.clone()), + InMemoryBackend::::default(), + shared.clone(), + Some(root_parent.clone()), ); let key = H256::random()[..].to_vec(); @@ -1302,7 +1245,7 @@ mod tests { #[test] fn should_remove_lru_items_based_on_tracking_used_size() { let root_parent = H256::random(); - let shared = new_shared_cache::(36*3, (2,3)); + let shared = new_shared_cache::(36 * 3, (2, 3)); let h0 = H256::random(); let mut s = CachingState::new( @@ -1364,11 +1307,8 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h0), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h0)); s.cache.sync_cache( &[], &[], @@ -1379,11 +1319,8 @@ mod tests { true, ); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); assert_eq!(s.storage(&key).unwrap(), Some(vec![3])); // Restart (or unknown block?), clear caches. @@ -1402,11 +1339,8 @@ mod tests { // New value is propagated. s.cache.sync_cache(&[], &[], vec![], vec![], None, None, true); - let s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); assert_eq!(s.storage(&key).unwrap(), None); } @@ -1419,7 +1353,7 @@ mod tests { let h1 = H256::random(); let h2 = H256::random(); - let shared = new_shared_cache::(256*1024, (0,1)); + let shared = new_shared_cache::(256 * 1024, (0, 1)); let mut s = CachingState::new( InMemoryBackend::::default(), @@ -1437,11 +1371,8 @@ mod tests { ); assert_eq!(shared.write().lru_storage.get(&key).unwrap(), &Some(vec![1])); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); // commit as non-best s.cache.sync_cache( @@ -1456,36 +1387,25 @@ mod tests { assert_eq!(shared.write().lru_storage.get(&key).unwrap(), &Some(vec![1])); - let mut s = CachingState::new( - InMemoryBackend::::default(), - shared.clone(), - Some(h1), - ); + let mut s = + CachingState::new(InMemoryBackend::::default(), shared.clone(), Some(h1)); // commit again as best with no changes - s.cache.sync_cache( - &[], - &[], - vec![], - vec![], - Some(h2), - Some(2), - true, - ); + s.cache.sync_cache(&[], &[], vec![], vec![], Some(h2), Some(2), true); assert_eq!(s.storage(&key).unwrap(), None); } } #[cfg(test)] mod qc { - use std::collections::{HashMap, hash_map::Entry}; + use std::collections::{hash_map::Entry, HashMap}; - use quickcheck::{quickcheck, TestResult, Arbitrary}; + use quickcheck::{quickcheck, Arbitrary, TestResult}; use super::*; use sp_runtime::{ + testing::{Block as RawBlock, ExtrinsicWrapper, H256}, traits::BlakeTwo256, - testing::{H256, Block as RawBlock, ExtrinsicWrapper}, }; use sp_state_machine::InMemoryBackend; @@ -1507,28 +1427,24 @@ mod qc { fn new_next(&self, hash: H256, changes: KeySet) -> Self { let mut state = self.state.clone(); - for (k, v) in self.state.iter() { state.insert(k.clone(), v.clone()); } - for (k, v) in changes.clone().into_iter() { state.insert(k, v); } - - Self { - hash, - parent: self.hash, - changes, - state, + for (k, v) in self.state.iter() { + state.insert(k.clone(), v.clone()); } + for (k, v) in changes.clone().into_iter() { + state.insert(k, v); + } + + Self { hash, parent: self.hash, changes, state } } fn new(hash: H256, parent: H256, changes: KeySet) -> Self { let mut state = KeyMap::new(); - for (k, v) in changes.clone().into_iter() { state.insert(k, v); } - - Self { - hash, - parent, - state, - changes, + for (k, v) in changes.clone().into_iter() { + state.insert(k, v); } + + Self { hash, parent, state, changes } } fn purge(&mut self, other_changes: &KeySet) { @@ -1552,30 +1468,26 @@ mod qc { let buf = (0..32).map(|_| u8::arbitrary(gen)).collect::>(); match path { - 0..=175 => { - Action::Next { - hash: H256::from_slice(&buf[..]), - changes: { - let mut set = Vec::new(); - for _ in 0..::arbitrary(gen)/(64*256*256*256) { - set.push((vec![u8::arbitrary(gen)], Some(vec![u8::arbitrary(gen)]))); - } - set + 0..=175 => Action::Next { + hash: H256::from_slice(&buf[..]), + changes: { + let mut set = Vec::new(); + for _ in 0..::arbitrary(gen) / (64 * 256 * 256 * 256) { + set.push((vec![u8::arbitrary(gen)], Some(vec![u8::arbitrary(gen)]))); } - } + set + }, }, - 176..=220 => { - Action::Fork { - hash: H256::from_slice(&buf[..]), - depth: ((u8::arbitrary(gen)) / 32) as usize, - changes: { - let mut set = Vec::new(); - for _ in 0..::arbitrary(gen)/(64*256*256*256) { - set.push((vec![u8::arbitrary(gen)], Some(vec![u8::arbitrary(gen)]))); - } - set + 176..=220 => Action::Fork { + hash: H256::from_slice(&buf[..]), + depth: ((u8::arbitrary(gen)) / 32) as usize, + changes: { + let mut set = Vec::new(); + for _ in 0..::arbitrary(gen) / (64 * 256 * 256 * 256) { + set.push((vec![u8::arbitrary(gen)], Some(vec![u8::arbitrary(gen)]))); } - } + set + }, }, 221..=240 => { Action::ReorgWithImport { @@ -1586,7 +1498,7 @@ mod qc { _ => { Action::FinalizationReorg { fork_depth: ((u8::arbitrary(gen)) / 32) as usize, // 0-7 - depth: ((u8::arbitrary(gen)) / 64) as usize, // 0-3 + depth: ((u8::arbitrary(gen)) / 64) as usize, // 0-3 } }, } @@ -1601,13 +1513,9 @@ mod qc { impl Mutator { fn new_empty() -> Self { - let shared = new_shared_cache::(256*1024, (0,1)); + let shared = new_shared_cache::(256 * 1024, (0, 1)); - Self { - shared, - canon: vec![], - forks: HashMap::new(), - } + Self { shared, canon: vec![], forks: HashMap::new() } } fn head_state(&self, hash: H256) -> CachingState, Block> { @@ -1626,11 +1534,12 @@ mod qc { &mut self, action: Action, ) -> CachingState, Block> { - self.mutate(action).expect("Expected to provide only valid actions to the mutate_static") + self.mutate(action) + .expect("Expected to provide only valid actions to the mutate_static") } fn canon_len(&self) -> usize { - return self.canon.len(); + return self.canon.len() } fn head_storage_ref(&self) -> &KeyMap { @@ -1648,10 +1557,10 @@ mod qc { let state = match action { Action::Fork { depth, hash, changes } => { let pos = self.canon.len() as isize - depth as isize; - if pos < 0 || self.canon.len() == 0 || pos >= (self.canon.len()-1) as isize + if pos < 0 || self.canon.len() == 0 || pos >= (self.canon.len() - 1) as isize // no fork on top also, thus len-1 { - return Err(()); + return Err(()) } let pos = pos as usize; @@ -1661,7 +1570,8 @@ mod qc { let (total_h, parent) = match self.forks.entry(fork_at) { Entry::Occupied(occupied) => { let chain = occupied.into_mut(); - let parent = chain.last().expect("No empty forks are ever created").clone(); + let parent = + chain.last().expect("No empty forks are ever created").clone(); let mut node = parent.new_next(hash, changes.clone()); for earlier in chain.iter() { @@ -1677,7 +1587,7 @@ mod qc { vacant.insert(vec![canon_parent.new_next(hash, changes.clone())]); (pos + 1, fork_at) - } + }, }; let mut state = CachingState::new( @@ -1704,9 +1614,7 @@ mod qc { let parent_hash = H256::from(&[0u8; 32]); (Node::new(hash, parent_hash, changes.clone()), parent_hash) }, - Some(parent) => { - (parent.new_next(hash, changes.clone()), parent.hash) - } + Some(parent) => (parent.new_next(hash, changes.clone()), parent.hash), }; // delete cache entries for earlier @@ -1741,22 +1649,26 @@ mod qc { }, Action::ReorgWithImport { depth, hash } => { let pos = self.canon.len() as isize - depth as isize; - if pos < 0 || pos+1 >= self.canon.len() as isize { return Err(()); } + if pos < 0 || pos + 1 >= self.canon.len() as isize { + return Err(()) + } let fork_at = self.canon[pos as usize].hash; let pos = pos as usize; match self.forks.get_mut(&fork_at) { Some(chain) => { - let mut new_fork = self.canon.drain(pos+1..).collect::>(); + let mut new_fork = self.canon.drain(pos + 1..).collect::>(); - let retracted: Vec = new_fork.iter().map(|node| node.hash).collect(); + let retracted: Vec = + new_fork.iter().map(|node| node.hash).collect(); let enacted: Vec = chain.iter().map(|node| node.hash).collect(); std::mem::swap(chain, &mut new_fork); - let mut node = new_fork.last().map( - |node| node.new_next(hash, vec![]) - ).expect("No empty fork ever created!"); + let mut node = new_fork + .last() + .map(|node| node.new_next(hash, vec![])) + .expect("No empty fork ever created!"); for invalidators in chain.iter().chain(new_fork.iter()) { node.purge(&invalidators.changes); @@ -1784,44 +1696,54 @@ mod qc { ); state - } + }, None => { - return Err(()); // no reorg without a fork atm! + return Err(()) // no reorg without a fork atm! }, } }, Action::FinalizationReorg { fork_depth, depth } => { let pos = self.canon.len() as isize - fork_depth as isize; - if pos < 0 || pos+1 >= self.canon.len() as isize { return Err(()); } + if pos < 0 || pos + 1 >= self.canon.len() as isize { + return Err(()) + } let fork_at = self.canon[pos as usize].hash; let pos = pos as usize; match self.forks.get_mut(&fork_at) { Some(fork_chain) => { - let sync_pos = fork_chain.len() as isize - fork_chain.len() as isize - depth as isize; - if sync_pos < 0 || sync_pos >= fork_chain.len() as isize { return Err (()); } + let sync_pos = fork_chain.len() as isize - + fork_chain.len() as isize - depth as isize; + if sync_pos < 0 || sync_pos >= fork_chain.len() as isize { + return Err(()) + } let sync_pos = sync_pos as usize; - let mut new_fork = self.canon.drain(pos+1..).collect::>(); + let mut new_fork = self.canon.drain(pos + 1..).collect::>(); - let retracted: Vec = new_fork.iter().map(|node| node.hash).collect(); - let enacted: Vec = fork_chain.iter().take(sync_pos+1).map(|node| node.hash).collect(); + let retracted: Vec = + new_fork.iter().map(|node| node.hash).collect(); + let enacted: Vec = fork_chain + .iter() + .take(sync_pos + 1) + .map(|node| node.hash) + .collect(); std::mem::swap(fork_chain, &mut new_fork); self.shared.write().sync(&retracted, &enacted); self.head_state( - self.canon.last() - .expect("wasn't forking to emptiness so there should be one!") - .hash + self.canon + .last() + .expect("wasn't forking to emptiness so there should be one!") + .hash, ) }, None => { - return Err(()); // no reorg to nothing pls! - } + return Err(()) // no reorg to nothing pls! + }, } - }, }; @@ -1841,14 +1763,27 @@ mod qc { let h3b = H256::random(); let mut mutator = Mutator::new_empty(); - mutator.mutate_static(Action::Next { hash: h0, changes: vec![(key.clone(), Some(vec![2]))] }); + mutator + .mutate_static(Action::Next { hash: h0, changes: vec![(key.clone(), Some(vec![2]))] }); mutator.mutate_static(Action::Next { hash: h1a, changes: vec![] }); - mutator.mutate_static(Action::Fork { depth: 2, hash: h1b, changes: vec![(key.clone(), Some(vec![3]))] }); - mutator.mutate_static(Action::Fork { depth: 2, hash: h2b, changes: vec![(key.clone(), Some(vec![4]))] }); - mutator.mutate_static(Action::Next { hash: h2a, changes: vec![(key.clone(), Some(vec![5]))] }); + mutator.mutate_static(Action::Fork { + depth: 2, + hash: h1b, + changes: vec![(key.clone(), Some(vec![3]))], + }); + mutator.mutate_static(Action::Fork { + depth: 2, + hash: h2b, + changes: vec![(key.clone(), Some(vec![4]))], + }); + mutator + .mutate_static(Action::Next { hash: h2a, changes: vec![(key.clone(), Some(vec![5]))] }); mutator.mutate_static(Action::Next { hash: h3a, changes: vec![] }); - assert_eq!(mutator.head_state(h3a).storage(&key).unwrap().expect("there should be a value"), vec![5]); + assert_eq!( + mutator.head_state(h3a).storage(&key).unwrap().expect("there should be a value"), + vec![5] + ); assert!(mutator.head_state(h1a).storage(&key).unwrap().is_none()); assert!(mutator.head_state(h2b).storage(&key).unwrap().is_none()); assert!(mutator.head_state(h1b).storage(&key).unwrap().is_none()); @@ -1862,18 +1797,17 @@ mod qc { for key in Mutator::key_permutations() { match (head_state.storage(&key).unwrap(), mutator.head_storage_ref().get(&key)) { - (Some(x), Some(y)) => { + (Some(x), Some(y)) => if Some(&x) != y.as_ref() { eprintln!("{:?} != {:?}", x, y); - return false; - } - }, + return false + }, (None, Some(_y)) => { // TODO: cache miss is not tracked atm }, (Some(x), None) => { eprintln!("{:?} != ", x); - return false; + return false }, _ => continue, } @@ -1886,18 +1820,17 @@ mod qc { let head_state = mutator.head_state(node.hash); for key in Mutator::key_permutations() { match (head_state.storage(&key).unwrap(), node.state.get(&key)) { - (Some(x), Some(y)) => { + (Some(x), Some(y)) => if Some(&x) != y.as_ref() { eprintln!("at [{}]: {:?} != {:?}", node.hash, x, y); - return false; - } - }, + return false + }, (None, Some(_y)) => { // cache miss is not tracked atm }, (Some(x), None) => { eprintln!("at [{}]: {:?} != ", node.hash, x); - return false; + return false }, _ => continue, } @@ -1918,16 +1851,27 @@ mod qc { let mut mutator = Mutator::new_empty(); mutator.mutate_static(Action::Next { hash: h0, changes: vec![] }); mutator.mutate_static(Action::Next { hash: h1, changes: vec![] }); - mutator.mutate_static(Action::Next { hash: h2, changes: vec![(key.clone(), Some(vec![2]))] }); - mutator.mutate_static(Action::Fork { depth: 2, hash: h1b, changes: vec![(key.clone(), Some(vec![3]))] }); + mutator + .mutate_static(Action::Next { hash: h2, changes: vec![(key.clone(), Some(vec![2]))] }); + mutator.mutate_static(Action::Fork { + depth: 2, + hash: h1b, + changes: vec![(key.clone(), Some(vec![3]))], + }); mutator.mutate_static(Action::ReorgWithImport { depth: 2, hash: h2b }); assert!(is_head_match(&mutator)) } - fn key(k: u8) -> Vec { vec![k] } - fn val(v: u8) -> Option> { Some(vec![v]) } - fn keyval(k: u8, v: u8) -> KeySet { vec![(key(k), val(v))] } + fn key(k: u8) -> Vec { + vec![k] + } + fn val(v: u8) -> Option> { + Some(vec![v]) + } + fn keyval(k: u8, v: u8) -> KeySet { + vec![(key(k), val(v))] + } #[test] fn reorg2() { @@ -1941,7 +1885,7 @@ mod qc { let mut mutator = Mutator::new_empty(); mutator.mutate_static(Action::Next { hash: h0, changes: keyval(1, 1) }); mutator.mutate_static(Action::Next { hash: h1a, changes: keyval(1, 1) }); - mutator.mutate_static(Action::Fork { depth: 2, hash: h1b, changes: keyval(2, 2 ) }); + mutator.mutate_static(Action::Fork { depth: 2, hash: h1b, changes: keyval(2, 2) }); mutator.mutate_static(Action::Next { hash: h2a, changes: keyval(3, 3) }); mutator.mutate_static(Action::Next { hash: h3a, changes: keyval(4, 4) }); diff --git a/client/db/src/upgrade.rs b/client/db/src/upgrade.rs index ea91b8253e1d8..fe0abaed1b07a 100644 --- a/client/db/src/upgrade.rs +++ b/client/db/src/upgrade.rs @@ -18,14 +18,16 @@ //! Database upgrade logic. -use std::fs; -use std::io::{Read, Write, ErrorKind}; -use std::path::{Path, PathBuf}; +use std::{ + fs, + io::{ErrorKind, Read, Write}, + path::{Path, PathBuf}, +}; -use sp_runtime::traits::Block as BlockT; use crate::{columns, utils::DatabaseType}; -use kvdb_rocksdb::{Database, DatabaseConfig}; use codec::{Decode, Encode}; +use kvdb_rocksdb::{Database, DatabaseConfig}; +use sp_runtime::traits::Block as BlockT; /// Version file name. const VERSION_FILE_NAME: &'static str = "db_version"; @@ -38,19 +40,28 @@ const V1_NUM_COLUMNS: u32 = 11; const V2_NUM_COLUMNS: u32 = 12; /// Upgrade database to current version. -pub fn upgrade_db(db_path: &Path, db_type: DatabaseType) -> sp_blockchain::Result<()> { +pub fn upgrade_db( + db_path: &Path, + db_type: DatabaseType, +) -> sp_blockchain::Result<()> { let is_empty = db_path.read_dir().map_or(true, |mut d| d.next().is_none()); if !is_empty { let db_version = current_version(db_path)?; match db_version { - 0 => Err(sp_blockchain::Error::Backend(format!("Unsupported database version: {}", db_version)))?, + 0 => Err(sp_blockchain::Error::Backend(format!( + "Unsupported database version: {}", + db_version + )))?, 1 => { migrate_1_to_2::(db_path, db_type)?; migrate_2_to_3::(db_path, db_type)? }, 2 => migrate_2_to_3::(db_path, db_type)?, CURRENT_VERSION => (), - _ => Err(sp_blockchain::Error::Backend(format!("Future database version: {}", db_version)))?, + _ => Err(sp_blockchain::Error::Backend(format!( + "Future database version: {}", + db_version + )))?, } } @@ -60,8 +71,12 @@ pub fn upgrade_db(db_path: &Path, db_type: DatabaseType) -> sp_bl /// Migration from version1 to version2: /// 1) the number of columns has changed from 11 to 12; /// 2) transactions column is added; -fn migrate_1_to_2(db_path: &Path, _db_type: DatabaseType) -> sp_blockchain::Result<()> { - let db_path = db_path.to_str() +fn migrate_1_to_2( + db_path: &Path, + _db_type: DatabaseType, +) -> sp_blockchain::Result<()> { + let db_path = db_path + .to_str() .ok_or_else(|| sp_blockchain::Error::Backend("Invalid database path".into()))?; let db_cfg = DatabaseConfig::with_columns(V1_NUM_COLUMNS); let db = Database::open(&db_cfg, db_path).map_err(db_err)?; @@ -70,8 +85,12 @@ fn migrate_1_to_2(db_path: &Path, _db_type: DatabaseType) -> sp_b /// Migration from version2 to version3: /// - The format of the stored Justification changed to support multiple Justifications. -fn migrate_2_to_3(db_path: &Path, _db_type: DatabaseType) -> sp_blockchain::Result<()> { - let db_path = db_path.to_str() +fn migrate_2_to_3( + db_path: &Path, + _db_type: DatabaseType, +) -> sp_blockchain::Result<()> { + let db_path = db_path + .to_str() .ok_or_else(|| sp_blockchain::Error::Backend("Invalid database path".into()))?; let db_cfg = DatabaseConfig::with_columns(V2_NUM_COLUMNS); let db = Database::open(&db_cfg, db_path).map_err(db_err)?; @@ -137,10 +156,11 @@ fn version_file_path(path: &Path) -> PathBuf { #[cfg(test)] mod tests { - use sc_state_db::PruningMode; - use crate::{DatabaseSettings, DatabaseSettingsSrc, KeepBlocks, TransactionStorageMode}; - use crate::tests::Block; use super::*; + use crate::{ + tests::Block, DatabaseSettings, DatabaseSettingsSrc, KeepBlocks, TransactionStorageMode, + }; + use sc_state_db::PruningMode; fn create_db(db_path: &Path, version: Option) { if let Some(version) = version { @@ -151,14 +171,18 @@ mod tests { } fn open_database(db_path: &Path) -> sp_blockchain::Result<()> { - crate::utils::open_database::(&DatabaseSettings { - state_cache_size: 0, - state_cache_child_ratio: None, - state_pruning: PruningMode::ArchiveAll, - source: DatabaseSettingsSrc::RocksDb { path: db_path.to_owned(), cache_size: 128 }, - keep_blocks: KeepBlocks::All, - transaction_storage: TransactionStorageMode::BlockBody, - }, DatabaseType::Full).map(|_| ()) + crate::utils::open_database::( + &DatabaseSettings { + state_cache_size: 0, + state_cache_child_ratio: None, + state_pruning: PruningMode::ArchiveAll, + source: DatabaseSettingsSrc::RocksDb { path: db_path.to_owned(), cache_size: 128 }, + keep_blocks: KeepBlocks::All, + transaction_storage: TransactionStorageMode::BlockBody, + }, + DatabaseType::Full, + ) + .map(|_| ()) } #[test] diff --git a/client/db/src/utils.rs b/client/db/src/utils.rs index bd6dc9841aa63..808de326e99f7 100644 --- a/client/db/src/utils.rs +++ b/client/db/src/utils.rs @@ -19,24 +19,27 @@ //! Db-based backend utility structures and functions, used by both //! full and light storages. -use std::sync::Arc; -use std::convert::TryInto; +use std::{convert::TryInto, sync::Arc}; use log::debug; +use crate::{Database, DatabaseSettings, DatabaseSettingsSrc, DbHash}; use codec::Decode; -use sp_trie::DBValue; use sp_database::Transaction; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{ - Block as BlockT, Header as HeaderT, Zero, - UniqueSaturatedFrom, UniqueSaturatedInto, +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, UniqueSaturatedFrom, UniqueSaturatedInto, Zero}, }; -use crate::{DatabaseSettings, DatabaseSettingsSrc, Database, DbHash}; +use sp_trie::DBValue; /// Number of columns in the db. Must be the same for both full && light dbs. /// Otherwise RocksDb will fail to open database && check its type. -#[cfg(any(feature = "with-kvdb-rocksdb", feature = "with-parity-db", feature = "test-helpers", test))] +#[cfg(any( + feature = "with-kvdb-rocksdb", + feature = "with-parity-db", + feature = "test-helpers", + test +))] pub const NUM_COLUMNS: u32 = 12; /// Meta column. The set of keys in the column is shared by full && light storages. pub const COLUMN_META: u32 = 0; @@ -98,24 +101,17 @@ pub enum DatabaseType { /// In the current database schema, this kind of key is only used for /// lookups into an index, NOT for storing header data or others. pub fn number_index_key>(n: N) -> sp_blockchain::Result { - let n = n.try_into().map_err(|_| + let n = n.try_into().map_err(|_| { sp_blockchain::Error::Backend("Block number cannot be converted to u32".into()) - )?; - - Ok([ - (n >> 24) as u8, - ((n >> 16) & 0xff) as u8, - ((n >> 8) & 0xff) as u8, - (n & 0xff) as u8 - ]) + })?; + + Ok([(n >> 24) as u8, ((n >> 16) & 0xff) as u8, ((n >> 8) & 0xff) as u8, (n & 0xff) as u8]) } /// Convert number and hash into long lookup key for blocks that are /// not in the canonical chain. -pub fn number_and_hash_to_lookup_key( - number: N, - hash: H, -) -> sp_blockchain::Result> where +pub fn number_and_hash_to_lookup_key(number: N, hash: H) -> sp_blockchain::Result> +where N: TryInto, H: AsRef<[u8]>, { @@ -126,16 +122,15 @@ pub fn number_and_hash_to_lookup_key( /// Convert block lookup key into block number. /// all block lookup keys start with the block number. -pub fn lookup_key_to_number(key: &[u8]) -> sp_blockchain::Result where - N: From +pub fn lookup_key_to_number(key: &[u8]) -> sp_blockchain::Result +where + N: From, { if key.len() < 4 { - return Err(sp_blockchain::Error::Backend("Invalid block key".into())); + return Err(sp_blockchain::Error::Backend("Invalid block key".into())) } - Ok((key[0] as u32) << 24 - | (key[1] as u32) << 16 - | (key[2] as u32) << 8 - | (key[3] as u32)).map(Into::into) + Ok((key[0] as u32) << 24 | (key[1] as u32) << 16 | (key[2] as u32) << 8 | (key[3] as u32)) + .map(Into::into) } /// Delete number to hash mapping in DB transaction. @@ -197,17 +192,15 @@ pub fn insert_hash_to_key_mapping, H: AsRef<[u8]> + Clone>( pub fn block_id_to_lookup_key( db: &dyn Database, key_lookup_col: u32, - id: BlockId -) -> Result>, sp_blockchain::Error> where + id: BlockId, +) -> Result>, sp_blockchain::Error> +where Block: BlockT, ::sp_runtime::traits::NumberFor: UniqueSaturatedFrom + UniqueSaturatedInto, { Ok(match id { - BlockId::Number(n) => db.get( - key_lookup_col, - number_index_key(n)?.as_ref(), - ), - BlockId::Hash(h) => db.get(key_lookup_col, h.as_ref()) + BlockId::Number(n) => db.get(key_lookup_col, number_index_key(n)?.as_ref()), + BlockId::Hash(h) => db.get(key_lookup_col, h.as_ref()), }) } @@ -218,9 +211,10 @@ pub fn open_database( ) -> sp_blockchain::Result>> { #[allow(unused)] fn db_open_error(feat: &'static str) -> sp_blockchain::Error { - sp_blockchain::Error::Backend( - format!("`{}` feature not enabled, database can not be opened", feat), - ) + sp_blockchain::Error::Backend(format!( + "`{}` feature not enabled, database can not be opened", + feat + )) } let db: Arc> = match &config.source { @@ -231,14 +225,16 @@ pub fn open_database( // and now open database assuming that it has the latest version let mut db_config = kvdb_rocksdb::DatabaseConfig::with_columns(NUM_COLUMNS); - let path = path.to_str() + let path = path + .to_str() .ok_or_else(|| sp_blockchain::Error::Backend("Invalid database path".into()))?; let mut memory_budget = std::collections::HashMap::new(); match db_type { DatabaseType::Full => { let state_col_budget = (*cache_size as f64 * 0.9) as usize; - let other_col_budget = (cache_size - state_col_budget) / (NUM_COLUMNS as usize - 1); + let other_col_budget = + (cache_size - state_col_budget) / (NUM_COLUMNS as usize - 1); for i in 0..NUM_COLUMNS { if i == crate::columns::STATE { @@ -267,7 +263,7 @@ pub fn open_database( path, col_budget, ); - } + }, } db_config.memory_budget = memory_budget; @@ -277,17 +273,13 @@ pub fn open_database( }, #[cfg(not(any(feature = "with-kvdb-rocksdb", test)))] DatabaseSettingsSrc::RocksDb { .. } => { - return Err(db_open_error("with-kvdb-rocksdb")); + return Err(db_open_error("with-kvdb-rocksdb")) }, #[cfg(feature = "with-parity-db")] - DatabaseSettingsSrc::ParityDb { path } => { - crate::parity_db::open(&path, db_type) - .map_err(|e| sp_blockchain::Error::Backend(format!("{}", e)))? - }, + DatabaseSettingsSrc::ParityDb { path } => crate::parity_db::open(&path, db_type) + .map_err(|e| sp_blockchain::Error::Backend(format!("{}", e)))?, #[cfg(not(feature = "with-parity-db"))] - DatabaseSettingsSrc::ParityDb { .. } => { - return Err(db_open_error("with-parity-db")) - }, + DatabaseSettingsSrc::ParityDb { .. } => return Err(db_open_error("with-parity-db")), DatabaseSettingsSrc::Custom(db) => db.clone(), }; @@ -297,14 +289,19 @@ pub fn open_database( } /// Check database type. -pub fn check_database_type(db: &dyn Database, db_type: DatabaseType) -> sp_blockchain::Result<()> { +pub fn check_database_type( + db: &dyn Database, + db_type: DatabaseType, +) -> sp_blockchain::Result<()> { match db.get(COLUMN_META, meta_keys::TYPE) { - Some(stored_type) => { + Some(stored_type) => if db_type.as_str().as_bytes() != &*stored_type { - return Err(sp_blockchain::Error::Backend( - format!("Unexpected database type. Expected: {}", db_type.as_str())).into()); - } - }, + return Err(sp_blockchain::Error::Backend(format!( + "Unexpected database type. Expected: {}", + db_type.as_str() + )) + .into()) + }, None => { let mut transaction = Transaction::new(); transaction.set(COLUMN_META, meta_keys::TYPE, db_type.as_str().as_bytes()); @@ -320,10 +317,10 @@ pub fn read_db( db: &dyn Database, col_index: u32, col: u32, - id: BlockId + id: BlockId, ) -> sp_blockchain::Result> - where - Block: BlockT, +where + Block: BlockT, { block_id_to_lookup_key(db, col_index, id).and_then(|key| match key { Some(key) => Ok(db.get(col, key.as_ref())), @@ -358,10 +355,8 @@ pub fn read_header( match read_db(db, col_index, col, id)? { Some(header) => match Block::Header::decode(&mut &header[..]) { Ok(header) => Ok(Some(header)), - Err(_) => return Err( - sp_blockchain::Error::Backend("Error decoding header".into()) - ), - } + Err(_) => return Err(sp_blockchain::Error::Backend("Error decoding header".into())), + }, None => Ok(None), } } @@ -373,34 +368,35 @@ pub fn require_header( col: u32, id: BlockId, ) -> sp_blockchain::Result { - read_header(db, col_index, col, id) - .and_then(|header| header.ok_or_else(|| - sp_blockchain::Error::UnknownBlock(format!("Require header: {}", id)) - )) + read_header(db, col_index, col, id).and_then(|header| { + header.ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("Require header: {}", id))) + }) } /// Read meta from the database. -pub fn read_meta(db: &dyn Database, col_header: u32) -> Result< - Meta<<::Header as HeaderT>::Number, Block::Hash>, - sp_blockchain::Error, -> - where - Block: BlockT, +pub fn read_meta( + db: &dyn Database, + col_header: u32, +) -> Result::Header as HeaderT>::Number, Block::Hash>, sp_blockchain::Error> +where + Block: BlockT, { let genesis_hash: Block::Hash = match read_genesis_hash(db)? { Some(genesis_hash) => genesis_hash, - None => return Ok(Meta { - best_hash: Default::default(), - best_number: Zero::zero(), - finalized_hash: Default::default(), - finalized_number: Zero::zero(), - genesis_hash: Default::default(), - finalized_state: None, - }), + None => + return Ok(Meta { + best_hash: Default::default(), + best_number: Zero::zero(), + finalized_hash: Default::default(), + finalized_number: Zero::zero(), + genesis_hash: Default::default(), + finalized_state: None, + }), }; let load_meta_block = |desc, key| -> Result<_, sp_blockchain::Error> { - if let Some(Some(header)) = db.get(COLUMN_META, key) + if let Some(Some(header)) = db + .get(COLUMN_META, key) .and_then(|id| db.get(col_header, &id).map(|b| Block::Header::decode(&mut &b[..]).ok())) { let hash = header.hash(); @@ -419,7 +415,8 @@ pub fn read_meta(db: &dyn Database, col_header: u32) -> Result< let (best_hash, best_number) = load_meta_block("best", meta_keys::BEST_BLOCK)?; let (finalized_hash, finalized_number) = load_meta_block("final", meta_keys::FINALIZED_BLOCK)?; - let (finalized_state_hash, finalized_state_number) = load_meta_block("final_state", meta_keys::FINALIZED_STATE)?; + let (finalized_state_hash, finalized_state_number) = + load_meta_block("final_state", meta_keys::FINALIZED_STATE)?; let finalized_state = if finalized_state_hash != Default::default() { Some((finalized_state_hash, finalized_state_number)) } else { @@ -437,13 +434,14 @@ pub fn read_meta(db: &dyn Database, col_header: u32) -> Result< } /// Read genesis hash from database. -pub fn read_genesis_hash(db: &dyn Database) -> sp_blockchain::Result> { +pub fn read_genesis_hash( + db: &dyn Database, +) -> sp_blockchain::Result> { match db.get(COLUMN_META, meta_keys::GENESIS_HASH) { Some(h) => match Decode::decode(&mut &h[..]) { Ok(h) => Ok(Some(h)), - Err(err) => Err(sp_blockchain::Error::Backend( - format!("Error decoding genesis hash: {}", err) - )), + Err(err) => + Err(sp_blockchain::Error::Backend(format!("Error decoding genesis hash: {}", err))), }, None => Ok(None), } @@ -461,7 +459,7 @@ impl DatabaseType { pub(crate) struct JoinInput<'a, 'b>(&'a [u8], &'b [u8]); -pub(crate) fn join_input<'a, 'b>(i1: &'a[u8], i2: &'b [u8]) -> JoinInput<'a, 'b> { +pub(crate) fn join_input<'a, 'b>(i1: &'a [u8], i2: &'b [u8]) -> JoinInput<'a, 'b> { JoinInput(i1, i2) } @@ -486,8 +484,8 @@ impl<'a, 'b> codec::Input for JoinInput<'a, 'b> { #[cfg(test)] mod tests { use super::*; - use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper}; use codec::Input; + use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper}; type Block = RawBlock>; #[test] diff --git a/client/executor/common/src/lib.rs b/client/executor/common/src/lib.rs index 25e06314aba39..ef73ecd90e285 100644 --- a/client/executor/common/src/lib.rs +++ b/client/executor/common/src/lib.rs @@ -22,6 +22,6 @@ #![deny(unused_crate_dependencies)] pub mod error; +pub mod runtime_blob; pub mod sandbox; pub mod wasm_runtime; -pub mod runtime_blob; diff --git a/client/executor/common/src/runtime_blob/data_segments_snapshot.rs b/client/executor/common/src/runtime_blob/data_segments_snapshot.rs index 269ad0858325b..5c3fedbdc963e 100644 --- a/client/executor/common/src/runtime_blob/data_segments_snapshot.rs +++ b/client/executor/common/src/runtime_blob/data_segments_snapshot.rs @@ -16,10 +16,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error::{self, Error}; use super::RuntimeBlob; -use std::mem; +use crate::error::{self, Error}; use pwasm_utils::parity_wasm::elements::Instruction; +use std::mem; /// This is a snapshot of data segments specialzied for a particular instantiation. /// @@ -49,7 +49,7 @@ impl DataSegmentsSnapshot { // [op, End] if init_expr.len() != 2 { - return Err(Error::InitializerHasTooManyExpressions); + return Err(Error::InitializerHasTooManyExpressions) } let offset = match &init_expr[0] { Instruction::I32Const(v) => *v as u32, @@ -60,8 +60,8 @@ impl DataSegmentsSnapshot { // At the moment of writing the Substrate Runtime Interface does not provide // any globals. There is nothing that prevents us from supporting this // if/when we gain those. - return Err(Error::ImportedGlobalsUnsupported); - } + return Err(Error::ImportedGlobalsUnsupported) + }, insn => return Err(Error::InvalidInitializerExpression(format!("{:?}", insn))), }; diff --git a/client/executor/common/src/runtime_blob/globals_snapshot.rs b/client/executor/common/src/runtime_blob/globals_snapshot.rs index acdefef2e64ef..7e9c49429d356 100644 --- a/client/executor/common/src/runtime_blob/globals_snapshot.rs +++ b/client/executor/common/src/runtime_blob/globals_snapshot.rs @@ -57,10 +57,8 @@ pub struct ExposedMutableGlobalsSet(Vec); impl ExposedMutableGlobalsSet { /// Collect the set from the given runtime blob. See the struct documentation for details. pub fn collect(runtime_blob: &RuntimeBlob) -> Self { - let global_names = runtime_blob - .exported_internal_global_names() - .map(ToOwned::to_owned) - .collect(); + let global_names = + runtime_blob.exported_internal_global_names().map(ToOwned::to_owned).collect(); Self(global_names) } } diff --git a/client/executor/common/src/runtime_blob/mod.rs b/client/executor/common/src/runtime_blob/mod.rs index 372df7bd97eb7..43d6e5e7a0dfb 100644 --- a/client/executor/common/src/runtime_blob/mod.rs +++ b/client/executor/common/src/runtime_blob/mod.rs @@ -53,5 +53,5 @@ mod globals_snapshot; mod runtime_blob; pub use data_segments_snapshot::DataSegmentsSnapshot; -pub use globals_snapshot::{GlobalsSnapshot, ExposedMutableGlobalsSet, InstanceGlobals}; +pub use globals_snapshot::{ExposedMutableGlobalsSet, GlobalsSnapshot, InstanceGlobals}; pub use runtime_blob::RuntimeBlob; diff --git a/client/executor/common/src/runtime_blob/runtime_blob.rs b/client/executor/common/src/runtime_blob/runtime_blob.rs index e7fc15bb13e19..b7f71193449cb 100644 --- a/client/executor/common/src/runtime_blob/runtime_blob.rs +++ b/client/executor/common/src/runtime_blob/runtime_blob.rs @@ -16,13 +16,11 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use crate::error::WasmError; use pwasm_utils::{ - parity_wasm::elements::{ - DataSegment, Module, deserialize_buffer, serialize, Internal, - }, export_mutable_globals, + parity_wasm::elements::{deserialize_buffer, serialize, DataSegment, Internal, Module}, }; -use crate::error::WasmError; /// A bunch of information collected from a WebAssembly module. #[derive(Clone)] @@ -53,11 +51,7 @@ impl RuntimeBlob { /// Extract the data segments from the given wasm code. pub(super) fn data_segments(&self) -> Vec { - self.raw_module - .data_section() - .map(|ds| ds.entries()) - .unwrap_or(&[]) - .to_vec() + self.raw_module.data_section().map(|ds| ds.entries()).unwrap_or(&[]).to_vec() } /// The number of globals defined in locally in this module. @@ -70,10 +64,7 @@ impl RuntimeBlob { /// The number of imports of globals. pub fn imported_globals_count(&self) -> u32 { - self.raw_module - .import_section() - .map(|is| is.globals() as u32) - .unwrap_or(0) + self.raw_module.import_section().map(|is| is.globals() as u32).unwrap_or(0) } /// Perform an instrumentation that makes sure that the mutable globals are exported. @@ -95,35 +86,29 @@ impl RuntimeBlob { |e| WasmError::Other(format!("cannot inject the stack limiter: {:?}", e)), )?; - Ok(Self { - raw_module: injected_module, - }) + Ok(Self { raw_module: injected_module }) } /// Perform an instrumentation that makes sure that a specific function `entry_point` is exported pub fn entry_point_exists(&self, entry_point: &str) -> bool { - self.raw_module.export_section().map(|e| { - e.entries() - .iter() - .any(|e| matches!(e.internal(), Internal::Function(_)) && e.field() == entry_point) - }).unwrap_or_default() + self.raw_module + .export_section() + .map(|e| { + e.entries().iter().any(|e| { + matches!(e.internal(), Internal::Function(_)) && e.field() == entry_point + }) + }) + .unwrap_or_default() } /// Returns an iterator of all globals which were exported by [`expose_mutable_globals`]. pub(super) fn exported_internal_global_names<'module>( &'module self, ) -> impl Iterator { - let exports = self - .raw_module - .export_section() - .map(|es| es.entries()) - .unwrap_or(&[]); + let exports = self.raw_module.export_section().map(|es| es.entries()).unwrap_or(&[]); exports.iter().filter_map(|export| match export.internal() { - Internal::Global(_) - if export.field().starts_with("exported_internal_global") => - { - Some(export.field()) - } + Internal::Global(_) if export.field().starts_with("exported_internal_global") => + Some(export.field()), _ => None, }) } @@ -135,12 +120,11 @@ impl RuntimeBlob { .custom_sections() .find(|cs| cs.name() == section_name) .map(|cs| cs.payload()) - } + } /// Consumes this runtime blob and serializes it. pub fn serialize(self) -> Vec { - serialize(self.raw_module) - .expect("serializing into a vec should succeed; qed") + serialize(self.raw_module).expect("serializing into a vec should succeed; qed") } /// Destructure this structure into the underlying parity-wasm Module. diff --git a/client/executor/common/src/sandbox.rs b/client/executor/common/src/sandbox.rs index b7838aab7f348..63f9cc4f258e8 100644 --- a/client/executor/common/src/sandbox.rs +++ b/client/executor/common/src/sandbox.rs @@ -21,15 +21,15 @@ //! Sandboxing is baked by wasmi at the moment. In future, however, we would like to add/switch to //! a compiled execution engine. -use crate::error::{Result, Error}; -use std::{collections::HashMap, rc::Rc}; +use crate::error::{Error, Result}; use codec::{Decode, Encode}; use sp_core::sandbox as sandbox_primitives; +use sp_wasm_interface::{FunctionContext, Pointer, WordSize}; +use std::{collections::HashMap, rc::Rc}; use wasmi::{ - Externals, ImportResolver, MemoryInstance, MemoryRef, Module, ModuleInstance, - ModuleRef, RuntimeArgs, RuntimeValue, Trap, TrapKind, memory_units::Pages, + memory_units::Pages, Externals, ImportResolver, MemoryInstance, MemoryRef, Module, + ModuleInstance, ModuleRef, RuntimeArgs, RuntimeValue, Trap, TrapKind, }; -use sp_wasm_interface::{FunctionContext, Pointer, WordSize}; /// Index of a function inside the supervisor. /// @@ -83,15 +83,9 @@ impl ImportResolver for Imports { field_name: &str, signature: &::wasmi::Signature, ) -> std::result::Result { - let key = ( - module_name.as_bytes().to_owned(), - field_name.as_bytes().to_owned(), - ); + let key = (module_name.as_bytes().to_owned(), field_name.as_bytes().to_owned()); let idx = *self.func_map.get(&key).ok_or_else(|| { - wasmi::Error::Instantiation(format!( - "Export {}:{} not found", - module_name, field_name - )) + wasmi::Error::Instantiation(format!("Export {}:{} not found", module_name, field_name)) })?; Ok(wasmi::FuncInstance::alloc_host(signature.clone(), idx.0)) } @@ -102,11 +96,9 @@ impl ImportResolver for Imports { field_name: &str, _memory_type: &::wasmi::MemoryDescriptor, ) -> std::result::Result { - let key = ( - module_name.as_bytes().to_vec(), - field_name.as_bytes().to_vec(), - ); - let mem = self.memories_map + let key = (module_name.as_bytes().to_vec(), field_name.as_bytes().to_vec()); + let mem = self + .memories_map .get(&key) .ok_or_else(|| { wasmi::Error::Instantiation(format!( @@ -124,10 +116,7 @@ impl ImportResolver for Imports { field_name: &str, _global_type: &::wasmi::GlobalDescriptor, ) -> std::result::Result { - Err(wasmi::Error::Instantiation(format!( - "Export {}:{} not found", - module_name, field_name - ))) + Err(wasmi::Error::Instantiation(format!("Export {}:{} not found", module_name, field_name))) } fn resolve_table( @@ -136,10 +125,7 @@ impl ImportResolver for Imports { field_name: &str, _table_type: &::wasmi::TableDescriptor, ) -> std::result::Result { - Err(wasmi::Error::Instantiation(format!( - "Export {}:{} not found", - module_name, field_name - ))) + Err(wasmi::Error::Instantiation(format!("Export {}:{} not found", module_name, field_name))) } } @@ -187,7 +173,9 @@ fn trap(msg: &'static str) -> Trap { TrapKind::Host(Box::new(Error::Other(msg.into()))).into() } -fn deserialize_result(mut serialized_result: &[u8]) -> std::result::Result, Trap> { +fn deserialize_result( + mut serialized_result: &[u8], +) -> std::result::Result, Trap> { use self::sandbox_primitives::HostError; use sp_wasm_interface::ReturnValue; let result_val = std::result::Result::::decode(&mut serialized_result) @@ -222,7 +210,8 @@ impl<'a, FE: SandboxCapabilities + 'a> Externals for GuestExternals<'a, FE> { ); // Serialize arguments into a byte vector. - let invoke_args_data: Vec = args.as_ref() + let invoke_args_data: Vec = args + .as_ref() .iter() .cloned() .map(sp_wasm_interface::Value::from) @@ -240,10 +229,7 @@ impl<'a, FE: SandboxCapabilities + 'a> Externals for GuestExternals<'a, FE> { .map_err(|_| trap("Can't allocate memory in supervisor for the arguments"))?; let deallocate = |this: &mut GuestExternals, ptr, fail_msg| { - this - .supervisor_externals - .deallocate_memory(ptr) - .map_err(|_| trap(fail_msg)) + this.supervisor_externals.deallocate_memory(ptr).map_err(|_| trap(fail_msg)) }; if self @@ -251,8 +237,12 @@ impl<'a, FE: SandboxCapabilities + 'a> Externals for GuestExternals<'a, FE> { .write_memory(invoke_args_ptr, &invoke_args_data) .is_err() { - deallocate(self, invoke_args_ptr, "Failed dealloction after failed write of invoke arguments")?; - return Err(trap("Can't write invoke args into memory")); + deallocate( + self, + invoke_args_ptr, + "Failed dealloction after failed write of invoke arguments", + )?; + return Err(trap("Can't write invoke args into memory")) } let result = self.supervisor_externals.invoke( @@ -263,7 +253,11 @@ impl<'a, FE: SandboxCapabilities + 'a> Externals for GuestExternals<'a, FE> { func_idx, ); - deallocate(self, invoke_args_ptr, "Can't deallocate memory for dispatch thunk's invoke arguments")?; + deallocate( + self, + invoke_args_ptr, + "Can't deallocate memory for dispatch thunk's invoke arguments", + )?; let result = result?; // dispatch_thunk returns pointer to serialized arguments. @@ -276,13 +270,18 @@ impl<'a, FE: SandboxCapabilities + 'a> Externals for GuestExternals<'a, FE> { (Pointer::new(ptr), len) }; - let serialized_result_val = self.supervisor_externals + let serialized_result_val = self + .supervisor_externals .read_memory(serialized_result_val_ptr, serialized_result_val_len) .map_err(|_| trap("Can't read the serialized result from dispatch thunk")); - deallocate(self, serialized_result_val_ptr, "Can't deallocate memory for dispatch thunk's result") - .and_then(|_| serialized_result_val) - .and_then(|serialized_result_val| deserialize_result(&serialized_result_val)) + deallocate( + self, + serialized_result_val_ptr, + "Can't deallocate memory for dispatch thunk's result", + ) + .and_then(|_| serialized_result_val) + .and_then(|serialized_result_val| deserialize_result(&serialized_result_val)) } } @@ -296,11 +295,7 @@ where FE: SandboxCapabilities, F: FnOnce(&mut GuestExternals) -> R, { - let mut guest_externals = GuestExternals { - supervisor_externals, - sandbox_instance, - state, - }; + let mut guest_externals = GuestExternals { supervisor_externals, sandbox_instance, state }; f(&mut guest_externals) } @@ -332,32 +327,23 @@ impl SandboxInstance { /// /// The `state` parameter can be used to provide custom data for /// these syscall implementations. - pub fn invoke>( + pub fn invoke>( &self, export_name: &str, args: &[RuntimeValue], supervisor_externals: &mut FE, state: u32, ) -> std::result::Result, wasmi::Error> { - with_guest_externals( - supervisor_externals, - self, - state, - |guest_externals| { - self.instance - .invoke_export(export_name, args, guest_externals) - }, - ) + with_guest_externals(supervisor_externals, self, state, |guest_externals| { + self.instance.invoke_export(export_name, args, guest_externals) + }) } /// Get the value from a global with the given `name`. /// /// Returns `Some(_)` if the global could be found. pub fn get_global_val(&self, name: &str) -> Option { - let global = self.instance - .export_by_name(name)? - .as_global()? - .get(); + let global = self.instance.export_by_name(name)?.as_global()?.get(); Some(global.into()) } @@ -398,7 +384,7 @@ fn decode_environment_definition( let externals_idx = guest_to_supervisor_mapping.define(SupervisorFuncIndex(func_idx as usize)); func_map.insert((module, field), externals_idx); - } + }, sandbox_primitives::ExternEntity::Memory(memory_idx) => { let memory_ref = memories .get(memory_idx as usize) @@ -406,17 +392,11 @@ fn decode_environment_definition( .ok_or_else(|| InstantiationError::EnvironmentDefinitionCorrupted)? .ok_or_else(|| InstantiationError::EnvironmentDefinitionCorrupted)?; memories_map.insert((module, field), memory_ref); - } + }, } } - Ok(( - Imports { - func_map, - memories_map, - }, - guest_to_supervisor_mapping, - )) + Ok((Imports { func_map, memories_map }, guest_to_supervisor_mapping)) } /// An environment in which the guest module is instantiated. @@ -435,10 +415,7 @@ impl GuestEnvironment { ) -> std::result::Result { let (imports, guest_to_supervisor_mapping) = decode_environment_definition(raw_env_def, &store.memories)?; - Ok(Self { - imports, - guest_to_supervisor_mapping, - }) + Ok(Self { imports, guest_to_supervisor_mapping }) } } @@ -493,16 +470,11 @@ pub fn instantiate<'a, FE: SandboxCapabilities>( guest_to_supervisor_mapping: host_env.guest_to_supervisor_mapping, }); - with_guest_externals( - supervisor_externals, - &sandbox_instance, - state, - |guest_externals| { - instance - .run_start(guest_externals) - .map_err(|_| InstantiationError::StartTrapped) - }, - )?; + with_guest_externals(supervisor_externals, &sandbox_instance, state, |guest_externals| { + instance + .run_start(guest_externals) + .map_err(|_| InstantiationError::StartTrapped) + })?; Ok(UnregisteredInstance { sandbox_instance }) } @@ -519,10 +491,7 @@ pub struct Store { impl Store { /// Create a new empty sandbox store. pub fn new() -> Self { - Store { - instances: Vec::new(), - memories: Vec::new(), - } + Store { instances: Vec::new(), memories: Vec::new() } } /// Create a new memory instance and return it's index. @@ -537,11 +506,7 @@ impl Store { specified_limit => Some(Pages(specified_limit as usize)), }; - let mem = - MemoryInstance::alloc( - Pages(initial as usize), - maximum, - )?; + let mem = MemoryInstance::alloc(Pages(initial as usize), maximum)?; let mem_idx = self.memories.len(); self.memories.push(Some(mem)); @@ -589,7 +554,7 @@ impl Store { Some(memory) => { *memory = None; Ok(()) - } + }, } } @@ -606,7 +571,7 @@ impl Store { Some(instance) => { *instance = None; Ok(()) - } + }, } } diff --git a/client/executor/runtime-test/src/lib.rs b/client/executor/runtime-test/src/lib.rs index c37766832b461..11771b183e3c4 100644 --- a/client/executor/runtime-test/src/lib.rs +++ b/client/executor/runtime-test/src/lib.rs @@ -7,22 +7,28 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_unwrap() -> &'static [u8] { - WASM_BINARY.expect("Development wasm binary is not available. Testing is only \ - supported with the flag disabled.") + WASM_BINARY.expect( + "Development wasm binary is not available. Testing is only \ + supported with the flag disabled.", + ) } #[cfg(not(feature = "std"))] -use sp_std::{vec::Vec, vec}; +use sp_std::{vec, vec::Vec}; +#[cfg(not(feature = "std"))] +use sp_core::{ed25519, sr25519}; #[cfg(not(feature = "std"))] use sp_io::{ - storage, hashing::{blake2_128, blake2_256, sha2_256, twox_128, twox_256}, - crypto::{ed25519_verify, sr25519_verify}, wasm_tracing, + crypto::{ed25519_verify, sr25519_verify}, + hashing::{blake2_128, blake2_256, sha2_256, twox_128, twox_256}, + storage, wasm_tracing, }; #[cfg(not(feature = "std"))] -use sp_runtime::{print, traits::{BlakeTwo256, Hash}}; -#[cfg(not(feature = "std"))] -use sp_core::{ed25519, sr25519}; +use sp_runtime::{ + print, + traits::{BlakeTwo256, Hash}, +}; #[cfg(not(feature = "std"))] use sp_sandbox::Value; @@ -48,347 +54,347 @@ static mut MUTABLE_STATIC: u64 = 32; static mut MUTABLE_STATIC_BSS: u64 = 0; sp_core::wasm_export_functions! { - fn test_calling_missing_external() { - unsafe { missing_external() } - } - - fn test_calling_yet_another_missing_external() { - unsafe { yet_another_missing_external() } - } - - fn test_data_in(input: Vec) -> Vec { - print("set_storage"); - storage::set(b"input", &input); - - print("storage"); - let foo = storage::get(b"foo").unwrap(); - - print("set_storage"); - storage::set(b"baz", &foo); - - print("finished!"); - b"all ok!".to_vec() - } - - fn test_clear_prefix(input: Vec) -> Vec { - storage::clear_prefix(&input, None); - b"all ok!".to_vec() - } - - fn test_empty_return() {} - - fn test_dirty_plenty_memory(heap_base: u32, heap_pages: u32) { - // This piece of code will dirty multiple pages of memory. The number of pages is given by - // the `heap_pages`. It's unit is a wasm page (64KiB). The first page to be cleared - // is a wasm page that that follows the one that holds the `heap_base` address. - // - // This function dirties the **host** pages. I.e. we dirty 4KiB at a time and it will take - // 16 writes to process a single wasm page. - - let mut heap_ptr = heap_base as usize; - - // Find the next wasm page boundary. - let heap_ptr = round_up_to(heap_ptr, 65536); - - // Make it an actual pointer - let heap_ptr = heap_ptr as *mut u8; - - // Traverse the host pages and make each one dirty - let host_pages = heap_pages as usize * 16; - for i in 0..host_pages { - unsafe { - // technically this is an UB, but there is no way Rust can find this out. - heap_ptr.add(i * 4096).write(0); - } - } - - fn round_up_to(n: usize, divisor: usize) -> usize { - (n + divisor - 1) / divisor - } - } - - fn test_exhaust_heap() -> Vec { Vec::with_capacity(16777216) } - - fn test_fp_f32add(a: [u8; 4], b: [u8; 4]) -> [u8; 4] { - let a = f32::from_le_bytes(a); - let b = f32::from_le_bytes(b); - f32::to_le_bytes(a + b) - } - - fn test_panic() { panic!("test panic") } - - fn test_conditional_panic(input: Vec) -> Vec { - if input.len() > 0 { - panic!("test panic") - } - - input - } - - fn test_blake2_256(input: Vec) -> Vec { - blake2_256(&input).to_vec() - } - - fn test_blake2_128(input: Vec) -> Vec { - blake2_128(&input).to_vec() - } - - fn test_sha2_256(input: Vec) -> Vec { - sha2_256(&input).to_vec() - } - - fn test_twox_256(input: Vec) -> Vec { - twox_256(&input).to_vec() - } - - fn test_twox_128(input: Vec) -> Vec { - twox_128(&input).to_vec() - } - - fn test_ed25519_verify(input: Vec) -> bool { - let mut pubkey = [0; 32]; - let mut sig = [0; 64]; - - pubkey.copy_from_slice(&input[0..32]); - sig.copy_from_slice(&input[32..96]); - - let msg = b"all ok!"; - ed25519_verify(&ed25519::Signature(sig), &msg[..], &ed25519::Public(pubkey)) - } - - fn test_sr25519_verify(input: Vec) -> bool { - let mut pubkey = [0; 32]; - let mut sig = [0; 64]; - - pubkey.copy_from_slice(&input[0..32]); - sig.copy_from_slice(&input[32..96]); - - let msg = b"all ok!"; - sr25519_verify(&sr25519::Signature(sig), &msg[..], &sr25519::Public(pubkey)) - } - - fn test_ordered_trie_root() -> Vec { - BlakeTwo256::ordered_trie_root( - vec![ - b"zero"[..].into(), - b"one"[..].into(), - b"two"[..].into(), - ], - ).as_ref().to_vec() - } - - fn test_sandbox(code: Vec) -> bool { - execute_sandboxed(&code, &[]).is_ok() - } + fn test_calling_missing_external() { + unsafe { missing_external() } + } - fn test_sandbox_args(code: Vec) -> bool { - execute_sandboxed( - &code, - &[ - Value::I32(0x12345678), - Value::I64(0x1234567887654321), - ], - ).is_ok() - } - - fn test_sandbox_return_val(code: Vec) -> bool { - let ok = match execute_sandboxed( - &code, - &[ - Value::I32(0x1336), - ] - ) { - Ok(sp_sandbox::ReturnValue::Value(Value::I32(0x1337))) => true, - _ => false, - }; - - ok - } - - fn test_sandbox_instantiate(code: Vec) -> u8 { - let env_builder = sp_sandbox::EnvironmentDefinitionBuilder::new(); - let code = match sp_sandbox::Instance::new(&code, &env_builder, &mut ()) { - Ok(_) => 0, - Err(sp_sandbox::Error::Module) => 1, - Err(sp_sandbox::Error::Execution) => 2, - Err(sp_sandbox::Error::OutOfBounds) => 3, - }; - - code - } - - fn test_sandbox_get_global_val(code: Vec) -> i64 { - let env_builder = sp_sandbox::EnvironmentDefinitionBuilder::new(); - let instance = if let Ok(i) = sp_sandbox::Instance::new(&code, &env_builder, &mut ()) { - i - } else { - return 20; - }; - - match instance.get_global_val("test_global") { - Some(sp_sandbox::Value::I64(val)) => val, - None => 30, - val => 40, - } - } - - fn test_offchain_index_set() { - sp_io::offchain_index::set(b"k", b"v"); - } - - fn test_offchain_local_storage() -> bool { - let kind = sp_core::offchain::StorageKind::PERSISTENT; - assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), None); - sp_io::offchain::local_storage_set(kind, b"test", b"asd"); - assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"asd".to_vec())); - - let res = sp_io::offchain::local_storage_compare_and_set( - kind, - b"test", - Some(b"asd".to_vec()), - b"", - ); - assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"".to_vec())); - res - } - - fn test_offchain_local_storage_with_none() { - let kind = sp_core::offchain::StorageKind::PERSISTENT; - assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), None); - - let res = sp_io::offchain::local_storage_compare_and_set(kind, b"test", None, b"value"); - assert_eq!(res, true); - assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"value".to_vec())); - } + fn test_calling_yet_another_missing_external() { + unsafe { yet_another_missing_external() } + } - fn test_offchain_http() -> bool { - use sp_core::offchain::HttpRequestStatus; - let run = || -> Option<()> { - let id = sp_io::offchain::http_request_start( - "POST", - "http://localhost:12345", - &[], - ).ok()?; - sp_io::offchain::http_request_add_header(id, "X-Auth", "test").ok()?; - sp_io::offchain::http_request_write_body(id, &[1, 2, 3, 4], None).ok()?; - sp_io::offchain::http_request_write_body(id, &[], None).ok()?; - let status = sp_io::offchain::http_response_wait(&[id], None); - assert!(status == vec![HttpRequestStatus::Finished(200)], "Expected Finished(200) status."); - let headers = sp_io::offchain::http_response_headers(id); - assert_eq!(headers, vec![(b"X-Auth".to_vec(), b"hello".to_vec())]); - let mut buffer = vec![0; 64]; - let read = sp_io::offchain::http_response_read_body(id, &mut buffer, None).ok()?; - assert_eq!(read, 3); - assert_eq!(&buffer[0..read as usize], &[1, 2, 3]); - let read = sp_io::offchain::http_response_read_body(id, &mut buffer, None).ok()?; - assert_eq!(read, 0); - - Some(()) - }; + fn test_data_in(input: Vec) -> Vec { + print("set_storage"); + storage::set(b"input", &input); - run().is_some() - } + print("storage"); + let foo = storage::get(b"foo").unwrap(); - fn test_enter_span() -> u64 { - wasm_tracing::enter_span(Default::default()) - } + print("set_storage"); + storage::set(b"baz", &foo); - fn test_exit_span(span_id: u64) { - wasm_tracing::exit(span_id) - } + print("finished!"); + b"all ok!".to_vec() + } - fn test_nested_spans() { - sp_io::init_tracing(); - let span_id = wasm_tracing::enter_span(Default::default()); - { - sp_io::init_tracing(); - let span_id = wasm_tracing::enter_span(Default::default()); - wasm_tracing::exit(span_id); - } - wasm_tracing::exit(span_id); - } - - fn returns_mutable_static() -> u64 { - unsafe { - MUTABLE_STATIC += 1; - MUTABLE_STATIC - } - } - - fn returns_mutable_static_bss() -> u64 { - unsafe { - MUTABLE_STATIC_BSS += 1; - MUTABLE_STATIC_BSS - } - } - - fn allocates_huge_stack_array(trap: bool) -> Vec { - // Allocate a stack frame that is approx. 75% of the stack (assuming it is 1MB). - // This will just decrease (stacks in wasm32-u-u grow downwards) the stack - // pointer. This won't trap on the current compilers. - let mut data = [0u8; 1024 * 768]; - - // Then make sure we actually write something to it. - // - // If: - // 1. the stack area is placed at the beginning of the linear memory space, and - // 2. the stack pointer points to out-of-bounds area, and - // 3. a write is performed around the current stack pointer. - // - // then a trap should happen. - // - for (i, v) in data.iter_mut().enumerate() { - *v = i as u8; // deliberate truncation - } - - if trap { - // There is a small chance of this to be pulled up in theory. In practice - // the probability of that is rather low. - panic!() - } - - data.to_vec() - } - - // Check that the heap at `heap_base + offset` don't contains the test message. - // After the check succeeds the test message is written into the heap. - // - // It is expected that the given pointer is not allocated. - fn check_and_set_in_heap(heap_base: u32, offset: u32) { - let test_message = b"Hello invalid heap memory"; - let ptr = unsafe { (heap_base + offset) as *mut u8 }; - - let message_slice = unsafe { sp_std::slice::from_raw_parts_mut(ptr, test_message.len()) }; - - assert_ne!(test_message, message_slice); - message_slice.copy_from_slice(test_message); - } - - fn test_spawn() { - let data = vec![1u8, 2u8]; - let data_new = sp_tasks::spawn(tasks::incrementer, data).join(); - - assert_eq!(data_new, vec![2u8, 3u8]); - } - - fn test_nested_spawn() { - let data = vec![7u8, 13u8]; - let data_new = sp_tasks::spawn(tasks::parallel_incrementer, data).join(); - - assert_eq!(data_new, vec![10u8, 16u8]); - } - - fn test_panic_in_spawned() { - sp_tasks::spawn(tasks::panicker, vec![]).join(); - } - } + fn test_clear_prefix(input: Vec) -> Vec { + storage::clear_prefix(&input, None); + b"all ok!".to_vec() + } + + fn test_empty_return() {} + + fn test_dirty_plenty_memory(heap_base: u32, heap_pages: u32) { + // This piece of code will dirty multiple pages of memory. The number of pages is given by + // the `heap_pages`. It's unit is a wasm page (64KiB). The first page to be cleared + // is a wasm page that that follows the one that holds the `heap_base` address. + // + // This function dirties the **host** pages. I.e. we dirty 4KiB at a time and it will take + // 16 writes to process a single wasm page. + + let mut heap_ptr = heap_base as usize; + + // Find the next wasm page boundary. + let heap_ptr = round_up_to(heap_ptr, 65536); + + // Make it an actual pointer + let heap_ptr = heap_ptr as *mut u8; + + // Traverse the host pages and make each one dirty + let host_pages = heap_pages as usize * 16; + for i in 0..host_pages { + unsafe { + // technically this is an UB, but there is no way Rust can find this out. + heap_ptr.add(i * 4096).write(0); + } + } + + fn round_up_to(n: usize, divisor: usize) -> usize { + (n + divisor - 1) / divisor + } + } + + fn test_exhaust_heap() -> Vec { Vec::with_capacity(16777216) } + + fn test_fp_f32add(a: [u8; 4], b: [u8; 4]) -> [u8; 4] { + let a = f32::from_le_bytes(a); + let b = f32::from_le_bytes(b); + f32::to_le_bytes(a + b) + } + + fn test_panic() { panic!("test panic") } + + fn test_conditional_panic(input: Vec) -> Vec { + if input.len() > 0 { + panic!("test panic") + } + + input + } + + fn test_blake2_256(input: Vec) -> Vec { + blake2_256(&input).to_vec() + } + + fn test_blake2_128(input: Vec) -> Vec { + blake2_128(&input).to_vec() + } + + fn test_sha2_256(input: Vec) -> Vec { + sha2_256(&input).to_vec() + } + + fn test_twox_256(input: Vec) -> Vec { + twox_256(&input).to_vec() + } + + fn test_twox_128(input: Vec) -> Vec { + twox_128(&input).to_vec() + } + + fn test_ed25519_verify(input: Vec) -> bool { + let mut pubkey = [0; 32]; + let mut sig = [0; 64]; + + pubkey.copy_from_slice(&input[0..32]); + sig.copy_from_slice(&input[32..96]); + + let msg = b"all ok!"; + ed25519_verify(&ed25519::Signature(sig), &msg[..], &ed25519::Public(pubkey)) + } + + fn test_sr25519_verify(input: Vec) -> bool { + let mut pubkey = [0; 32]; + let mut sig = [0; 64]; + + pubkey.copy_from_slice(&input[0..32]); + sig.copy_from_slice(&input[32..96]); + + let msg = b"all ok!"; + sr25519_verify(&sr25519::Signature(sig), &msg[..], &sr25519::Public(pubkey)) + } + + fn test_ordered_trie_root() -> Vec { + BlakeTwo256::ordered_trie_root( + vec![ + b"zero"[..].into(), + b"one"[..].into(), + b"two"[..].into(), + ], + ).as_ref().to_vec() + } + + fn test_sandbox(code: Vec) -> bool { + execute_sandboxed(&code, &[]).is_ok() + } + + fn test_sandbox_args(code: Vec) -> bool { + execute_sandboxed( + &code, + &[ + Value::I32(0x12345678), + Value::I64(0x1234567887654321), + ], + ).is_ok() + } + + fn test_sandbox_return_val(code: Vec) -> bool { + let ok = match execute_sandboxed( + &code, + &[ + Value::I32(0x1336), + ] + ) { + Ok(sp_sandbox::ReturnValue::Value(Value::I32(0x1337))) => true, + _ => false, + }; + + ok + } + + fn test_sandbox_instantiate(code: Vec) -> u8 { + let env_builder = sp_sandbox::EnvironmentDefinitionBuilder::new(); + let code = match sp_sandbox::Instance::new(&code, &env_builder, &mut ()) { + Ok(_) => 0, + Err(sp_sandbox::Error::Module) => 1, + Err(sp_sandbox::Error::Execution) => 2, + Err(sp_sandbox::Error::OutOfBounds) => 3, + }; + + code + } + + fn test_sandbox_get_global_val(code: Vec) -> i64 { + let env_builder = sp_sandbox::EnvironmentDefinitionBuilder::new(); + let instance = if let Ok(i) = sp_sandbox::Instance::new(&code, &env_builder, &mut ()) { + i + } else { + return 20; + }; + + match instance.get_global_val("test_global") { + Some(sp_sandbox::Value::I64(val)) => val, + None => 30, + val => 40, + } + } + + fn test_offchain_index_set() { + sp_io::offchain_index::set(b"k", b"v"); + } + + fn test_offchain_local_storage() -> bool { + let kind = sp_core::offchain::StorageKind::PERSISTENT; + assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), None); + sp_io::offchain::local_storage_set(kind, b"test", b"asd"); + assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"asd".to_vec())); + + let res = sp_io::offchain::local_storage_compare_and_set( + kind, + b"test", + Some(b"asd".to_vec()), + b"", + ); + assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"".to_vec())); + res + } + + fn test_offchain_local_storage_with_none() { + let kind = sp_core::offchain::StorageKind::PERSISTENT; + assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), None); + + let res = sp_io::offchain::local_storage_compare_and_set(kind, b"test", None, b"value"); + assert_eq!(res, true); + assert_eq!(sp_io::offchain::local_storage_get(kind, b"test"), Some(b"value".to_vec())); + } + + fn test_offchain_http() -> bool { + use sp_core::offchain::HttpRequestStatus; + let run = || -> Option<()> { + let id = sp_io::offchain::http_request_start( + "POST", + "http://localhost:12345", + &[], + ).ok()?; + sp_io::offchain::http_request_add_header(id, "X-Auth", "test").ok()?; + sp_io::offchain::http_request_write_body(id, &[1, 2, 3, 4], None).ok()?; + sp_io::offchain::http_request_write_body(id, &[], None).ok()?; + let status = sp_io::offchain::http_response_wait(&[id], None); + assert!(status == vec![HttpRequestStatus::Finished(200)], "Expected Finished(200) status."); + let headers = sp_io::offchain::http_response_headers(id); + assert_eq!(headers, vec![(b"X-Auth".to_vec(), b"hello".to_vec())]); + let mut buffer = vec![0; 64]; + let read = sp_io::offchain::http_response_read_body(id, &mut buffer, None).ok()?; + assert_eq!(read, 3); + assert_eq!(&buffer[0..read as usize], &[1, 2, 3]); + let read = sp_io::offchain::http_response_read_body(id, &mut buffer, None).ok()?; + assert_eq!(read, 0); + + Some(()) + }; + + run().is_some() + } + + fn test_enter_span() -> u64 { + wasm_tracing::enter_span(Default::default()) + } + + fn test_exit_span(span_id: u64) { + wasm_tracing::exit(span_id) + } + + fn test_nested_spans() { + sp_io::init_tracing(); + let span_id = wasm_tracing::enter_span(Default::default()); + { + sp_io::init_tracing(); + let span_id = wasm_tracing::enter_span(Default::default()); + wasm_tracing::exit(span_id); + } + wasm_tracing::exit(span_id); + } + + fn returns_mutable_static() -> u64 { + unsafe { + MUTABLE_STATIC += 1; + MUTABLE_STATIC + } + } + + fn returns_mutable_static_bss() -> u64 { + unsafe { + MUTABLE_STATIC_BSS += 1; + MUTABLE_STATIC_BSS + } + } + + fn allocates_huge_stack_array(trap: bool) -> Vec { + // Allocate a stack frame that is approx. 75% of the stack (assuming it is 1MB). + // This will just decrease (stacks in wasm32-u-u grow downwards) the stack + // pointer. This won't trap on the current compilers. + let mut data = [0u8; 1024 * 768]; + + // Then make sure we actually write something to it. + // + // If: + // 1. the stack area is placed at the beginning of the linear memory space, and + // 2. the stack pointer points to out-of-bounds area, and + // 3. a write is performed around the current stack pointer. + // + // then a trap should happen. + // + for (i, v) in data.iter_mut().enumerate() { + *v = i as u8; // deliberate truncation + } + + if trap { + // There is a small chance of this to be pulled up in theory. In practice + // the probability of that is rather low. + panic!() + } + + data.to_vec() + } + + // Check that the heap at `heap_base + offset` don't contains the test message. + // After the check succeeds the test message is written into the heap. + // + // It is expected that the given pointer is not allocated. + fn check_and_set_in_heap(heap_base: u32, offset: u32) { + let test_message = b"Hello invalid heap memory"; + let ptr = unsafe { (heap_base + offset) as *mut u8 }; + + let message_slice = unsafe { sp_std::slice::from_raw_parts_mut(ptr, test_message.len()) }; + + assert_ne!(test_message, message_slice); + message_slice.copy_from_slice(test_message); + } + + fn test_spawn() { + let data = vec![1u8, 2u8]; + let data_new = sp_tasks::spawn(tasks::incrementer, data).join(); + + assert_eq!(data_new, vec![2u8, 3u8]); + } + + fn test_nested_spawn() { + let data = vec![7u8, 13u8]; + let data_new = sp_tasks::spawn(tasks::parallel_incrementer, data).join(); + + assert_eq!(data_new, vec![10u8, 16u8]); + } + + fn test_panic_in_spawned() { + sp_tasks::spawn(tasks::panicker, vec![]).join(); + } +} - #[cfg(not(feature = "std"))] - mod tasks { +#[cfg(not(feature = "std"))] +mod tasks { use sp_std::prelude::*; pub fn incrementer(data: Vec) -> Vec { - data.into_iter().map(|v| v + 1).collect() + data.into_iter().map(|v| v + 1).collect() } pub fn panicker(_: Vec) -> Vec { @@ -396,11 +402,11 @@ sp_core::wasm_export_functions! { } pub fn parallel_incrementer(data: Vec) -> Vec { - let first = data.into_iter().map(|v| v + 2).collect::>(); - let second = sp_tasks::spawn(incrementer, first).join(); - second + let first = data.into_iter().map(|v| v + 2).collect::>(); + let second = sp_tasks::spawn(incrementer, first).join(); + second } - } +} #[cfg(not(feature = "std"))] fn execute_sandboxed( @@ -416,7 +422,7 @@ fn execute_sandboxed( args: &[Value], ) -> Result { if args.len() != 1 { - return Err(sp_sandbox::HostError); + return Err(sp_sandbox::HostError) } let condition = args[0].as_i32().ok_or_else(|| sp_sandbox::HostError)?; if condition != 0 { @@ -430,7 +436,7 @@ fn execute_sandboxed( args: &[Value], ) -> Result { if args.len() != 1 { - return Err(sp_sandbox::HostError); + return Err(sp_sandbox::HostError) } let inc_by = args[0].as_i32().ok_or_else(|| sp_sandbox::HostError)?; e.counter += inc_by as u32; @@ -445,7 +451,8 @@ fn execute_sandboxed( env_builder.add_host_func("env", "inc_counter", env_inc_counter); let memory = match sp_sandbox::Memory::new(1, Some(16)) { Ok(m) => m, - Err(_) => unreachable!(" + Err(_) => unreachable!( + " Memory::new() can return Err only if parameters are borked; \ We passing params here explicitly and they're correct; \ Memory::new() can't return a Error qed" diff --git a/client/executor/src/integration_tests/linux.rs b/client/executor/src/integration_tests/linux.rs index 057cc1332717b..7e0696973dc77 100644 --- a/client/executor/src/integration_tests/linux.rs +++ b/client/executor/src/integration_tests/linux.rs @@ -23,8 +23,8 @@ // borthersome. #![cfg(feature = "wasmtime")] -use crate::WasmExecutionMethod; use super::mk_test_runtime; +use crate::WasmExecutionMethod; use codec::Encode as _; mod smaps; @@ -54,17 +54,11 @@ fn memory_consumption_compiled() { } instance - .call_export( - "test_dirty_plenty_memory", - &(heap_base as u32, 1u32).encode(), - ) + .call_export("test_dirty_plenty_memory", &(heap_base as u32, 1u32).encode()) .unwrap(); let probe_1 = probe_rss(&*instance); instance - .call_export( - "test_dirty_plenty_memory", - &(heap_base as u32, 1024u32).encode(), - ) + .call_export("test_dirty_plenty_memory", &(heap_base as u32, 1024u32).encode()) .unwrap(); let probe_2 = probe_rss(&*instance); diff --git a/client/executor/src/integration_tests/linux/smaps.rs b/client/executor/src/integration_tests/linux/smaps.rs index 8088a5a3ea952..b23a188b93a26 100644 --- a/client/executor/src/integration_tests/linux/smaps.rs +++ b/client/executor/src/integration_tests/linux/smaps.rs @@ -19,8 +19,7 @@ //! A tool for extracting information about the memory consumption of the current process from //! the procfs. -use std::ops::Range; -use std::collections::BTreeMap; +use std::{collections::BTreeMap, ops::Range}; /// An interface to the /proc/self/smaps /// @@ -69,7 +68,8 @@ impl Smaps { } fn get_map(&self, addr: usize) -> &BTreeMap { - &self.0 + &self + .0 .iter() .find(|(range, _)| addr >= range.start && addr < range.end) .unwrap() diff --git a/client/executor/src/integration_tests/mod.rs b/client/executor/src/integration_tests/mod.rs index 0762306309df4..dabead4799dc8 100644 --- a/client/executor/src/integration_tests/mod.rs +++ b/client/executor/src/integration_tests/mod.rs @@ -20,20 +20,22 @@ mod linux; mod sandbox; -use std::sync::Arc; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use hex_literal::hex; +use sc_executor_common::{runtime_blob::RuntimeBlob, wasm_runtime::WasmModule}; +use sc_runtime_test::wasm_binary_unwrap; use sp_core::{ - blake2_128, blake2_256, ed25519, sr25519, map, Pair, - offchain::{OffchainWorkerExt, OffchainDbExt, testing}, + blake2_128, blake2_256, ed25519, map, + offchain::{testing, OffchainDbExt, OffchainWorkerExt}, + sr25519, traits::Externalities, + Pair, }; -use sc_runtime_test::wasm_binary_unwrap; +use sp_runtime::traits::BlakeTwo256; use sp_state_machine::TestExternalities as CoreTestExternalities; -use sp_trie::{TrieConfiguration, trie_types::Layout}; +use sp_trie::{trie_types::Layout, TrieConfiguration}; use sp_wasm_interface::HostFunctions as _; -use sp_runtime::traits::BlakeTwo256; -use sc_executor_common::{wasm_runtime::WasmModule, runtime_blob::RuntimeBlob}; +use std::sync::Arc; use tracing_subscriber::layer::SubscriberExt; use crate::WasmExecutionMethod; @@ -96,12 +98,7 @@ fn returning_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let output = call_in_wasm( - "test_empty_return", - &[], - wasm_method, - &mut ext, - ).unwrap(); + let output = call_in_wasm("test_empty_return", &[], wasm_method, &mut ext).unwrap(); assert_eq!(output, vec![0u8; 0]); } @@ -164,28 +161,13 @@ fn panicking_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let output = call_in_wasm( - "test_panic", - &[], - wasm_method, - &mut ext, - ); + let output = call_in_wasm("test_panic", &[], wasm_method, &mut ext); assert!(output.is_err()); - let output = call_in_wasm( - "test_conditional_panic", - &[0], - wasm_method, - &mut ext, - ); + let output = call_in_wasm("test_conditional_panic", &[0], wasm_method, &mut ext); assert_eq!(Decode::decode(&mut &output.unwrap()[..]), Ok(Vec::::new())); - let output = call_in_wasm( - "test_conditional_panic", - &vec![2].encode(), - wasm_method, - &mut ext, - ); + let output = call_in_wasm("test_conditional_panic", &vec![2].encode(), wasm_method, &mut ext); assert!(output.is_err()); } @@ -197,12 +179,9 @@ fn storage_should_work(wasm_method: WasmExecutionMethod) { let mut ext = ext.ext(); ext.set_storage(b"foo".to_vec(), b"bar".to_vec()); - let output = call_in_wasm( - "test_data_in", - &b"Hello world".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(); + let output = + call_in_wasm("test_data_in", &b"Hello world".to_vec().encode(), wasm_method, &mut ext) + .unwrap(); assert_eq!(output, b"all ok!".to_vec().encode()); } @@ -230,12 +209,9 @@ fn clear_prefix_should_work(wasm_method: WasmExecutionMethod) { ext.set_storage(b"bbb".to_vec(), b"5".to_vec()); // This will clear all entries which prefix is "ab". - let output = call_in_wasm( - "test_clear_prefix", - &b"ab".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(); + let output = + call_in_wasm("test_clear_prefix", &b"ab".to_vec().encode(), wasm_method, &mut ext) + .unwrap(); assert_eq!(output, b"all ok!".to_vec().encode()); } @@ -256,21 +232,12 @@ fn blake2_256_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); assert_eq!( - call_in_wasm( - "test_blake2_256", - &[0], - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_blake2_256", &[0], wasm_method, &mut ext,).unwrap(), blake2_256(&b""[..]).to_vec().encode(), ); assert_eq!( - call_in_wasm( - "test_blake2_256", - &b"Hello world!".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_blake2_256", &b"Hello world!".to_vec().encode(), wasm_method, &mut ext,) + .unwrap(), blake2_256(&b"Hello world!"[..]).to_vec().encode(), ); } @@ -280,21 +247,12 @@ fn blake2_128_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); assert_eq!( - call_in_wasm( - "test_blake2_128", - &[0], - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_blake2_128", &[0], wasm_method, &mut ext,).unwrap(), blake2_128(&b""[..]).to_vec().encode(), ); assert_eq!( - call_in_wasm( - "test_blake2_128", - &b"Hello world!".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_blake2_128", &b"Hello world!".to_vec().encode(), wasm_method, &mut ext,) + .unwrap(), blake2_128(&b"Hello world!"[..]).to_vec().encode(), ); } @@ -304,25 +262,14 @@ fn sha2_256_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); assert_eq!( - call_in_wasm( - "test_sha2_256", - &[0], - wasm_method, - &mut ext, - ) - .unwrap(), + call_in_wasm("test_sha2_256", &[0], wasm_method, &mut ext,).unwrap(), hex!("e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855") .to_vec() .encode(), ); assert_eq!( - call_in_wasm( - "test_sha2_256", - &b"Hello world!".to_vec().encode(), - wasm_method, - &mut ext, - ) - .unwrap(), + call_in_wasm("test_sha2_256", &b"Hello world!".to_vec().encode(), wasm_method, &mut ext,) + .unwrap(), hex!("c0535e4be2b79ffd93291305436bf889314e4a3faec05ecffcbb7df31ad9e51a") .to_vec() .encode(), @@ -334,26 +281,17 @@ fn twox_256_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); assert_eq!( - call_in_wasm( - "test_twox_256", - &[0], - wasm_method, - &mut ext, - ).unwrap(), - hex!( - "99e9d85137db46ef4bbea33613baafd56f963c64b1f3685a4eb4abd67ff6203a" - ).to_vec().encode(), + call_in_wasm("test_twox_256", &[0], wasm_method, &mut ext,).unwrap(), + hex!("99e9d85137db46ef4bbea33613baafd56f963c64b1f3685a4eb4abd67ff6203a") + .to_vec() + .encode(), ); assert_eq!( - call_in_wasm( - "test_twox_256", - &b"Hello world!".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(), - hex!( - "b27dfd7f223f177f2a13647b533599af0c07f68bda23d96d059da2b451a35a74" - ).to_vec().encode(), + call_in_wasm("test_twox_256", &b"Hello world!".to_vec().encode(), wasm_method, &mut ext,) + .unwrap(), + hex!("b27dfd7f223f177f2a13647b533599af0c07f68bda23d96d059da2b451a35a74") + .to_vec() + .encode(), ); } @@ -362,21 +300,12 @@ fn twox_128_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); assert_eq!( - call_in_wasm( - "test_twox_128", - &[0], - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_twox_128", &[0], wasm_method, &mut ext,).unwrap(), hex!("99e9d85137db46ef4bbea33613baafd5").to_vec().encode(), ); assert_eq!( - call_in_wasm( - "test_twox_128", - &b"Hello world!".to_vec().encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_twox_128", &b"Hello world!".to_vec().encode(), wasm_method, &mut ext,) + .unwrap(), hex!("b27dfd7f223f177f2a13647b533599af").to_vec().encode(), ); } @@ -392,12 +321,7 @@ fn ed25519_verify_should_work(wasm_method: WasmExecutionMethod) { calldata.extend_from_slice(sig.as_ref()); assert_eq!( - call_in_wasm( - "test_ed25519_verify", - &calldata.encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_ed25519_verify", &calldata.encode(), wasm_method, &mut ext,).unwrap(), true.encode(), ); @@ -407,12 +331,7 @@ fn ed25519_verify_should_work(wasm_method: WasmExecutionMethod) { calldata.extend_from_slice(other_sig.as_ref()); assert_eq!( - call_in_wasm( - "test_ed25519_verify", - &calldata.encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_ed25519_verify", &calldata.encode(), wasm_method, &mut ext,).unwrap(), false.encode(), ); } @@ -428,12 +347,7 @@ fn sr25519_verify_should_work(wasm_method: WasmExecutionMethod) { calldata.extend_from_slice(sig.as_ref()); assert_eq!( - call_in_wasm( - "test_sr25519_verify", - &calldata.encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sr25519_verify", &calldata.encode(), wasm_method, &mut ext,).unwrap(), true.encode(), ); @@ -443,12 +357,7 @@ fn sr25519_verify_should_work(wasm_method: WasmExecutionMethod) { calldata.extend_from_slice(other_sig.as_ref()); assert_eq!( - call_in_wasm( - "test_sr25519_verify", - &calldata.encode(), - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sr25519_verify", &calldata.encode(), wasm_method, &mut ext,).unwrap(), false.encode(), ); } @@ -458,12 +367,7 @@ fn ordered_trie_root_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let trie_input = vec![b"zero".to_vec(), b"one".to_vec(), b"two".to_vec()]; assert_eq!( - call_in_wasm( - "test_ordered_trie_root", - &[0], - wasm_method, - &mut ext.ext(), - ).unwrap(), + call_in_wasm("test_ordered_trie_root", &[0], wasm_method, &mut ext.ext(),).unwrap(), Layout::::ordered_trie_root(trie_input.iter()).as_bytes().encode(), ); } @@ -473,17 +377,14 @@ fn offchain_index(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let (offchain, _state) = testing::TestOffchainExt::new(); ext.register_extension(OffchainWorkerExt::new(offchain)); - call_in_wasm( - "test_offchain_index_set", - &[0], - wasm_method, - &mut ext.ext(), - ).unwrap(); + call_in_wasm("test_offchain_index_set", &[0], wasm_method, &mut ext.ext()).unwrap(); use sp_core::offchain::OffchainOverlayedChange; - let data = ext.overlayed_changes().clone().offchain_drain_committed().find(|(k, _v)| { - k == &(sp_core::offchain::STORAGE_PREFIX.to_vec(), b"k".to_vec()) - }); + let data = ext + .overlayed_changes() + .clone() + .offchain_drain_committed() + .find(|(k, _v)| k == &(sp_core::offchain::STORAGE_PREFIX.to_vec(), b"k".to_vec())); assert_eq!(data.map(|data| data.1), Some(OffchainOverlayedChange::SetValue(b"v".to_vec()))); } @@ -494,12 +395,7 @@ fn offchain_local_storage_should_work(wasm_method: WasmExecutionMethod) { ext.register_extension(OffchainDbExt::new(offchain.clone())); ext.register_extension(OffchainWorkerExt::new(offchain)); assert_eq!( - call_in_wasm( - "test_offchain_local_storage", - &[0], - wasm_method, - &mut ext.ext(), - ).unwrap(), + call_in_wasm("test_offchain_local_storage", &[0], wasm_method, &mut ext.ext(),).unwrap(), true.encode(), ); assert_eq!(state.read().persistent_storage.get(b"test"), Some(vec![])); @@ -511,24 +407,18 @@ fn offchain_http_should_work(wasm_method: WasmExecutionMethod) { let (offchain, state) = testing::TestOffchainExt::new(); ext.register_extension(OffchainWorkerExt::new(offchain)); state.write().expect_request(testing::PendingRequest { - method: "POST".into(), - uri: "http://localhost:12345".into(), - body: vec![1, 2, 3, 4], - headers: vec![("X-Auth".to_owned(), "test".to_owned())], - sent: true, - response: Some(vec![1, 2, 3]), - response_headers: vec![("X-Auth".to_owned(), "hello".to_owned())], - ..Default::default() - }, - ); + method: "POST".into(), + uri: "http://localhost:12345".into(), + body: vec![1, 2, 3, 4], + headers: vec![("X-Auth".to_owned(), "test".to_owned())], + sent: true, + response: Some(vec![1, 2, 3]), + response_headers: vec![("X-Auth".to_owned(), "hello".to_owned())], + ..Default::default() + }); assert_eq!( - call_in_wasm( - "test_offchain_http", - &[0], - wasm_method, - &mut ext.ext(), - ).unwrap(), + call_in_wasm("test_offchain_http", &[0], wasm_method, &mut ext.ext(),).unwrap(), true.encode(), ); } @@ -539,7 +429,7 @@ fn should_trap_when_heap_exhausted(wasm_method: WasmExecutionMethod) { let executor = crate::WasmExecutor::new( wasm_method, - Some(17), // `17` is the initial number of pages compiled into the binary. + Some(17), // `17` is the initial number of pages compiled into the binary. HostFunctions::host_functions(), 8, None, @@ -593,17 +483,13 @@ fn returns_mutable_static_bss(wasm_method: WasmExecutionMethod) { let runtime = mk_test_runtime(wasm_method, 1024); let instance = runtime.new_instance().unwrap(); - let res = instance - .call_export("returns_mutable_static_bss", &[0]) - .unwrap(); + let res = instance.call_export("returns_mutable_static_bss", &[0]).unwrap(); assert_eq!(1, u64::decode(&mut &res[..]).unwrap()); // We expect that every invocation will need to return the initial // value plus one. If the value increases more than that then it is // a sign that the wasm runtime preserves the memory content. - let res = instance - .call_export("returns_mutable_static_bss", &[0]) - .unwrap(); + let res = instance.call_export("returns_mutable_static_bss", &[0]).unwrap(); assert_eq!(1, u64::decode(&mut &res[..]).unwrap()); } @@ -638,7 +524,8 @@ fn heap_is_reset_between_calls(wasm_method: WasmExecutionMethod) { let runtime = mk_test_runtime(wasm_method, 1024); let instance = runtime.new_instance().unwrap(); - let heap_base = instance.get_global_const("__heap_base") + let heap_base = instance + .get_global_const("__heap_base") .expect("`__heap_base` is valid") .expect("`__heap_base` exists") .as_i32() @@ -689,8 +576,8 @@ fn parallel_execution(wasm_method: WasmExecutionMethod) { test_wasm_execution!(wasm_tracing_should_work); fn wasm_tracing_should_work(wasm_method: WasmExecutionMethod) { - use std::sync::Mutex; use sc_tracing::{SpanDatum, TraceEvent}; + use std::sync::Mutex; struct TestTraceHandler(Arc>>); @@ -706,36 +593,23 @@ fn wasm_tracing_should_work(wasm_method: WasmExecutionMethod) { let handler = TestTraceHandler(traces.clone()); // Create subscriber with wasm_tracing disabled - let test_subscriber = tracing_subscriber::fmt().finish().with( - sc_tracing::ProfilingLayer::new_with_handler( - Box::new(handler), "default" - ) - ); + let test_subscriber = tracing_subscriber::fmt() + .finish() + .with(sc_tracing::ProfilingLayer::new_with_handler(Box::new(handler), "default")); let _guard = tracing::subscriber::set_default(test_subscriber); let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let span_id = call_in_wasm( - "test_enter_span", - Default::default(), - wasm_method, - &mut ext, - ).unwrap(); + let span_id = + call_in_wasm("test_enter_span", Default::default(), wasm_method, &mut ext).unwrap(); let span_id = u64::decode(&mut &span_id[..]).unwrap(); - assert!( - span_id > 0 - ); + assert!(span_id > 0); - call_in_wasm( - "test_exit_span", - &span_id.encode(), - wasm_method, - &mut ext, - ).unwrap(); + call_in_wasm("test_exit_span", &span_id.encode(), wasm_method, &mut ext).unwrap(); // Check there is only the single trace let len = traces.lock().unwrap().len(); @@ -747,12 +621,7 @@ fn wasm_tracing_should_work(wasm_method: WasmExecutionMethod) { assert_eq!(span_datum.name, ""); assert_eq!(values.bool_values.get("wasm").unwrap(), &true); - call_in_wasm( - "test_nested_spans", - Default::default(), - wasm_method, - &mut ext, - ).unwrap(); + call_in_wasm("test_nested_spans", Default::default(), wasm_method, &mut ext).unwrap(); let len = traces.lock().unwrap().len(); assert_eq!(len, 2); } @@ -762,12 +631,7 @@ fn spawning_runtime_instance_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - call_in_wasm( - "test_spawn", - &[], - wasm_method, - &mut ext, - ).unwrap(); + call_in_wasm("test_spawn", &[], wasm_method, &mut ext).unwrap(); } test_wasm_execution!(spawning_runtime_instance_nested_should_work); @@ -775,12 +639,7 @@ fn spawning_runtime_instance_nested_should_work(wasm_method: WasmExecutionMethod let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - call_in_wasm( - "test_nested_spawn", - &[], - wasm_method, - &mut ext, - ).unwrap(); + call_in_wasm("test_nested_spawn", &[], wasm_method, &mut ext).unwrap(); } test_wasm_execution!(panic_in_spawned_instance_panics_on_joining_its_result); @@ -788,12 +647,8 @@ fn panic_in_spawned_instance_panics_on_joining_its_result(wasm_method: WasmExecu let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let error_result = call_in_wasm( - "test_panic_in_spawned", - &[], - wasm_method, - &mut ext, - ).unwrap_err(); + let error_result = + call_in_wasm("test_panic_in_spawned", &[], wasm_method, &mut ext).unwrap_err(); assert!(format!("{}", error_result).contains("Spawned task")); } diff --git a/client/executor/src/integration_tests/sandbox.rs b/client/executor/src/integration_tests/sandbox.rs index 7ce9c94a2db8a..ee3b295ae8a85 100644 --- a/client/executor/src/integration_tests/sandbox.rs +++ b/client/executor/src/integration_tests/sandbox.rs @@ -16,9 +16,8 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use super::{TestExternalities, call_in_wasm}; -use crate::WasmExecutionMethod; -use crate::test_wasm_execution; +use super::{call_in_wasm, TestExternalities}; +use crate::{test_wasm_execution, WasmExecutionMethod}; use codec::Encode; @@ -27,7 +26,8 @@ fn sandbox_should_work(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (import "env" "assert" (func $assert (param i32))) (import "env" "inc_counter" (func $inc_counter (param i32) (result i32))) @@ -46,17 +46,12 @@ fn sandbox_should_work(wasm_method: WasmExecutionMethod) { call $assert ) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); - assert_eq!( - call_in_wasm( - "test_sandbox", - &code, - wasm_method, - &mut ext, - ).unwrap(), - true.encode(), - ); + assert_eq!(call_in_wasm("test_sandbox", &code, wasm_method, &mut ext,).unwrap(), true.encode(),); } test_wasm_execution!(sandbox_trap); @@ -64,7 +59,8 @@ fn sandbox_trap(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (import "env" "assert" (func $assert (param i32))) (func (export "call") @@ -72,17 +68,11 @@ fn sandbox_trap(wasm_method: WasmExecutionMethod) { call $assert ) ) - "#).unwrap(); + "#, + ) + .unwrap(); - assert_eq!( - call_in_wasm( - "test_sandbox", - &code, - wasm_method, - &mut ext, - ).unwrap(), - vec![0], - ); + assert_eq!(call_in_wasm("test_sandbox", &code, wasm_method, &mut ext,).unwrap(), vec![0],); } test_wasm_execution!(start_called); @@ -90,7 +80,8 @@ fn start_called(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (import "env" "assert" (func $assert (param i32))) (import "env" "inc_counter" (func $inc_counter (param i32) (result i32))) @@ -115,17 +106,12 @@ fn start_called(wasm_method: WasmExecutionMethod) { call $assert ) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); - assert_eq!( - call_in_wasm( - "test_sandbox", - &code, - wasm_method, - &mut ext, - ).unwrap(), - true.encode(), - ); + assert_eq!(call_in_wasm("test_sandbox", &code, wasm_method, &mut ext,).unwrap(), true.encode(),); } test_wasm_execution!(invoke_args); @@ -133,7 +119,8 @@ fn invoke_args(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (import "env" "assert" (func $assert (param i32))) @@ -154,15 +141,13 @@ fn invoke_args(wasm_method: WasmExecutionMethod) { ) ) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_args", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_args", &code, wasm_method, &mut ext,).unwrap(), true.encode(), ); } @@ -172,7 +157,8 @@ fn return_val(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (func (export "call") (param $x i32) (result i32) (i32.add @@ -181,15 +167,13 @@ fn return_val(wasm_method: WasmExecutionMethod) { ) ) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_return_val", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_return_val", &code, wasm_method, &mut ext,).unwrap(), true.encode(), ); } @@ -199,22 +183,21 @@ fn unlinkable_module(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (import "env" "non-existent" (func)) (func (export "call") ) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_instantiate", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_instantiate", &code, wasm_method, &mut ext,).unwrap(), 1u8.encode(), ); } @@ -228,12 +211,7 @@ fn corrupted_module(wasm_method: WasmExecutionMethod) { let code = vec![0u8, 0, 0, 0, 1, 0, 0, 0].encode(); assert_eq!( - call_in_wasm( - "test_sandbox_instantiate", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_instantiate", &code, wasm_method, &mut ext,).unwrap(), 1u8.encode(), ); } @@ -243,7 +221,8 @@ fn start_fn_ok(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (func (export "call") ) @@ -253,15 +232,13 @@ fn start_fn_ok(wasm_method: WasmExecutionMethod) { (start $start) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_instantiate", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_instantiate", &code, wasm_method, &mut ext,).unwrap(), 0u8.encode(), ); } @@ -271,7 +248,8 @@ fn start_fn_traps(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (func (export "call") ) @@ -282,15 +260,13 @@ fn start_fn_traps(wasm_method: WasmExecutionMethod) { (start $start) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_instantiate", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_instantiate", &code, wasm_method, &mut ext,).unwrap(), 2u8.encode(), ); } @@ -300,19 +276,18 @@ fn get_global_val_works(wasm_method: WasmExecutionMethod) { let mut ext = TestExternalities::default(); let mut ext = ext.ext(); - let code = wat::parse_str(r#" + let code = wat::parse_str( + r#" (module (global (export "test_global") i64 (i64.const 500)) ) - "#).unwrap().encode(); + "#, + ) + .unwrap() + .encode(); assert_eq!( - call_in_wasm( - "test_sandbox_get_global_val", - &code, - wasm_method, - &mut ext, - ).unwrap(), + call_in_wasm("test_sandbox_get_global_val", &code, wasm_method, &mut ext,).unwrap(), 500i64.encode(), ); } diff --git a/client/executor/src/lib.rs b/client/executor/src/lib.rs index c0cbf9c94dafd..f4b972a86f27a 100644 --- a/client/executor/src/lib.rs +++ b/client/executor/src/lib.rs @@ -29,26 +29,25 @@ //! wasm engine used, instance cache. #![warn(missing_docs)] -#![recursion_limit="128"] +#![recursion_limit = "128"] #[macro_use] mod native_executor; -mod wasm_runtime; #[cfg(test)] mod integration_tests; +mod wasm_runtime; -pub use wasmi; +pub use codec::Codec; pub use native_executor::{ - with_externalities_safe, NativeExecutor, WasmExecutor, NativeExecutionDispatch, + with_externalities_safe, NativeExecutionDispatch, NativeExecutor, WasmExecutor, }; -pub use sp_version::{RuntimeVersion, NativeVersion}; -pub use codec::Codec; #[doc(hidden)] -pub use sp_core::traits::{Externalities}; +pub use sp_core::traits::Externalities; +pub use sp_version::{NativeVersion, RuntimeVersion}; #[doc(hidden)] pub use sp_wasm_interface; -pub use wasm_runtime::WasmExecutionMethod; -pub use wasm_runtime::read_embedded_version; +pub use wasm_runtime::{read_embedded_version, WasmExecutionMethod}; +pub use wasmi; pub use sc_executor_common::{error, sandbox}; @@ -68,10 +67,10 @@ pub trait RuntimeInfo { #[cfg(test)] mod tests { use super::*; + use sc_executor_common::runtime_blob::RuntimeBlob; use sc_runtime_test::wasm_binary_unwrap; use sp_io::TestExternalities; use sp_wasm_interface::HostFunctions; - use sc_executor_common::runtime_blob::RuntimeBlob; #[test] fn call_in_interpreted_wasm_works() { diff --git a/client/executor/src/native_executor.rs b/client/executor/src/native_executor.rs index 6fc34b6f1a322..3bb3b77009da4 100644 --- a/client/executor/src/native_executor.rs +++ b/client/executor/src/native_executor.rs @@ -17,32 +17,36 @@ // along with this program. If not, see . use crate::{ - RuntimeInfo, error::{Error, Result}, + error::{Error, Result}, wasm_runtime::{RuntimeCache, WasmExecutionMethod}, + RuntimeInfo, }; use std::{ collections::HashMap, - panic::{UnwindSafe, AssertUnwindSafe}, - result, - sync::{Arc, atomic::{AtomicU64, Ordering}, mpsc}, + panic::{AssertUnwindSafe, UnwindSafe}, path::PathBuf, + result, + sync::{ + atomic::{AtomicU64, Ordering}, + mpsc, Arc, + }, }; -use sp_version::{NativeVersion, RuntimeVersion}; use codec::{Decode, Encode}; -use sp_core::{ - NativeOrEncoded, - traits::{CodeExecutor, Externalities, RuntimeCode, RuntimeSpawnExt, RuntimeSpawn}, -}; use log::trace; -use sp_wasm_interface::{HostFunctions, Function}; use sc_executor_common::{ - wasm_runtime::{WasmInstance, WasmModule, InvokeMethod}, runtime_blob::RuntimeBlob, + wasm_runtime::{InvokeMethod, WasmInstance, WasmModule}, +}; +use sp_core::{ + traits::{CodeExecutor, Externalities, RuntimeCode, RuntimeSpawn, RuntimeSpawnExt}, + NativeOrEncoded, }; use sp_externalities::ExternalitiesExt as _; use sp_tasks::new_async_externalities; +use sp_version::{NativeVersion, RuntimeVersion}; +use sp_wasm_interface::{Function, HostFunctions}; /// Default num of pages for the heap const DEFAULT_HEAP_PAGES: u64 = 2048; @@ -51,25 +55,23 @@ const DEFAULT_HEAP_PAGES: u64 = 2048; /// /// If the inner closure panics, it will be caught and return an error. pub fn with_externalities_safe(ext: &mut dyn Externalities, f: F) -> Result - where F: UnwindSafe + FnOnce() -> U +where + F: UnwindSafe + FnOnce() -> U, { - sp_externalities::set_and_run_with_externalities( - ext, - move || { - // Substrate uses custom panic hook that terminates process on panic. Disable - // termination for the native call. - let _guard = sp_panic_handler::AbortGuard::force_unwind(); - std::panic::catch_unwind(f).map_err(|e| { - if let Some(err) = e.downcast_ref::() { - Error::RuntimePanicked(err.clone()) - } else if let Some(err) = e.downcast_ref::<&'static str>() { - Error::RuntimePanicked(err.to_string()) - } else { - Error::RuntimePanicked("Unknown panic".into()) - } - }) - }, - ) + sp_externalities::set_and_run_with_externalities(ext, move || { + // Substrate uses custom panic hook that terminates process on panic. Disable + // termination for the native call. + let _guard = sp_panic_handler::AbortGuard::force_unwind(); + std::panic::catch_unwind(f).map_err(|e| { + if let Some(err) = e.downcast_ref::() { + Error::RuntimePanicked(err.clone()) + } else if let Some(err) = e.downcast_ref::<&'static str>() { + Error::RuntimePanicked(err.to_string()) + } else { + Error::RuntimePanicked("Unknown panic".into()) + } + }) + }) } /// Delegate for dispatching a CodeExecutor call. @@ -163,7 +165,8 @@ impl WasmExecutor { allow_missing_host_functions: bool, f: F, ) -> Result - where F: FnOnce( + where + F: FnOnce( AssertUnwindSafe<&Arc>, AssertUnwindSafe<&dyn WasmInstance>, Option<&RuntimeVersion>, @@ -182,7 +185,7 @@ impl WasmExecutor { let instance = AssertUnwindSafe(instance); let ext = AssertUnwindSafe(ext); f(module, instance, version, ext) - } + }, )? { Ok(r) => r, Err(e) => Err(e), @@ -245,7 +248,7 @@ impl sp_core::traits::ReadRuntimeVersion for WasmExecutor { .map_err(|e| format!("Failed to read the static section: {:?}", e)) .map(|v| v.map(|v| v.encode()))? { - return Ok(version); + return Ok(version) } // If the blob didn't have embedded runtime version section, we fallback to the legacy @@ -296,13 +299,13 @@ impl NativeExecutor { .into_iter() // filter out any host function overrides provided. .filter(|host_fn| { - extended.iter() + extended + .iter() .find(|ext_host_fn| host_fn.name() == ext_host_fn.name()) .is_none() }) .collect::>(); - // Add the custom host functions provided by the user. host_functions.extend(extended); let wasm_executor = WasmExecutor::new( @@ -331,13 +334,10 @@ impl RuntimeInfo for NativeExecutor { ext: &mut dyn Externalities, runtime_code: &RuntimeCode, ) -> Result { - self.wasm.with_instance( - runtime_code, - ext, - false, - |_module, _instance, version, _ext| - Ok(version.cloned().ok_or_else(|| Error::ApiError("Unknown version".into()))), - ) + self.wasm + .with_instance(runtime_code, ext, false, |_module, _instance, version, _ext| { + Ok(version.cloned().ok_or_else(|| Error::ApiError("Unknown version".into()))) + }) } } @@ -358,70 +358,67 @@ impl RuntimeSpawn for RuntimeInstanceSpawn { let module = self.module.clone(); let scheduler = self.scheduler.clone(); - self.scheduler.spawn("executor-extra-runtime-instance", Box::pin(async move { - let module = AssertUnwindSafe(module); - - let async_ext = match new_async_externalities(scheduler.clone()) { - Ok(val) => val, - Err(e) => { - log::error!( - target: "executor", - "Failed to setup externalities for async context: {}", - e, - ); - - // This will drop sender and receiver end will panic - return; - } - }; - - let mut async_ext = match async_ext.with_runtime_spawn( - Box::new(RuntimeInstanceSpawn::new(module.clone(), scheduler)) - ) { - Ok(val) => val, - Err(e) => { - log::error!( - target: "executor", - "Failed to setup runtime extension for async externalities: {}", - e, - ); - - // This will drop sender and receiver end will panic - return; - } - }; + self.scheduler.spawn( + "executor-extra-runtime-instance", + Box::pin(async move { + let module = AssertUnwindSafe(module); - let result = with_externalities_safe( - &mut async_ext, - move || { + let async_ext = match new_async_externalities(scheduler.clone()) { + Ok(val) => val, + Err(e) => { + log::error!( + target: "executor", + "Failed to setup externalities for async context: {}", + e, + ); + // This will drop sender and receiver end will panic + return + }, + }; + + let mut async_ext = match async_ext.with_runtime_spawn(Box::new( + RuntimeInstanceSpawn::new(module.clone(), scheduler), + )) { + Ok(val) => val, + Err(e) => { + log::error!( + target: "executor", + "Failed to setup runtime extension for async externalities: {}", + e, + ); + + // This will drop sender and receiver end will panic + return + }, + }; + + let result = with_externalities_safe(&mut async_ext, move || { // FIXME: Should be refactored to shared "instance factory". // Instantiating wasm here every time is suboptimal at the moment, shared // pool of instances should be used. // // https://github.com/paritytech/substrate/issues/7354 - let instance = module.new_instance() - .expect("Failed to create new instance from module"); + let instance = + module.new_instance().expect("Failed to create new instance from module"); - instance.call( - InvokeMethod::TableWithWrapper { dispatcher_ref, func }, - &data[..], - ).expect("Failed to invoke instance.") - } - ); - - match result { - Ok(output) => { - let _ = sender.send(output); - }, - Err(error) => { - // If execution is panicked, the `join` in the original runtime code will panic as well, - // since the sender is dropped without sending anything. - log::error!("Call error in spawned task: {:?}", error); - }, - } - })); + instance + .call(InvokeMethod::TableWithWrapper { dispatcher_ref, func }, &data[..]) + .expect("Failed to invoke instance.") + }); + match result { + Ok(output) => { + let _ = sender.send(output); + }, + Err(error) => { + // If execution is panicked, the `join` in the original runtime code will panic as well, + // since the sender is dropped without sending anything. + log::error!("Call error in spawned task: {:?}", error); + }, + } + }), + ); new_handle } @@ -438,12 +435,7 @@ impl RuntimeInstanceSpawn { module: Arc, scheduler: Box, ) -> Self { - Self { - module, - scheduler, - counter: 0.into(), - tasks: HashMap::new().into(), - } + Self { module, scheduler, counter: 0.into(), tasks: HashMap::new().into() } } fn with_externalities_and_module( @@ -495,17 +487,13 @@ impl CodeExecutor for NativeExecutor { ext, false, |module, instance, onchain_version, mut ext| { - let onchain_version = onchain_version.ok_or_else( - || Error::ApiError("Unknown version".into()) - )?; + let onchain_version = + onchain_version.ok_or_else(|| Error::ApiError("Unknown version".into()))?; - let can_call_with = onchain_version.can_call_with(&self.native_version.runtime_version); + let can_call_with = + onchain_version.can_call_with(&self.native_version.runtime_version); - match ( - use_native, - can_call_with, - native_call, - ) { + match (use_native, can_call_with, native_call) { (_, false, _) | (false, _, _) => { if !can_call_with { trace!( @@ -516,13 +504,10 @@ impl CodeExecutor for NativeExecutor { ); } - with_externalities_safe( - &mut **ext, - move || { - preregister_builtin_ext(module.clone()); - instance.call_export(method, data).map(NativeOrEncoded::Encoded) - } - ) + with_externalities_safe(&mut **ext, move || { + preregister_builtin_ext(module.clone()); + instance.call_export(method, data).map(NativeOrEncoded::Encoded) + }) }, (true, true, Some(call)) => { trace!( @@ -535,13 +520,10 @@ impl CodeExecutor for NativeExecutor { used_native = true; let res = with_externalities_safe(&mut **ext, move || (call)()) - .and_then(|r| r - .map(NativeOrEncoded::Native) - .map_err(Error::ApiError) - ); + .and_then(|r| r.map(NativeOrEncoded::Native).map_err(Error::ApiError)); Ok(res) - } + }, _ => { trace!( target: "executor", @@ -552,9 +534,9 @@ impl CodeExecutor for NativeExecutor { used_native = true; Ok(D::dispatch(&mut **ext, method, data).map(NativeOrEncoded::Encoded)) - } + }, } - } + }, ); (result, used_native) } @@ -675,16 +657,9 @@ mod tests { #[test] fn native_executor_registers_custom_interface() { - let executor = NativeExecutor::::new( - WasmExecutionMethod::Interpreted, - None, - 8, - ); + let executor = NativeExecutor::::new(WasmExecutionMethod::Interpreted, None, 8); my_interface::HostFunctions::host_functions().iter().for_each(|function| { - assert_eq!( - executor.wasm.host_functions.iter().filter(|f| f == &function).count(), - 2, - ); + assert_eq!(executor.wasm.host_functions.iter().filter(|f| f == &function).count(), 2,); }); my_interface::say_hello_world("hey"); diff --git a/client/executor/src/wasm_runtime.rs b/client/executor/src/wasm_runtime.rs index 4e6febbf15b67..8674e7239255b 100644 --- a/client/executor/src/wasm_runtime.rs +++ b/client/executor/src/wasm_runtime.rs @@ -21,17 +21,19 @@ //! The primary means of accessing the runtimes is through a cache which saves the reusable //! components of the runtime that are expensive to initialize. -use std::sync::Arc; use crate::error::{Error, WasmError}; -use parking_lot::Mutex; use codec::Decode; -use sp_core::traits::{Externalities, RuntimeCode, FetchRuntimeCode}; -use sp_version::RuntimeVersion; -use std::panic::AssertUnwindSafe; -use std::path::{Path, PathBuf}; +use parking_lot::Mutex; use sc_executor_common::{ - wasm_runtime::{WasmModule, WasmInstance}, runtime_blob::RuntimeBlob, + wasm_runtime::{WasmInstance, WasmModule}, +}; +use sp_core::traits::{Externalities, FetchRuntimeCode, RuntimeCode}; +use sp_version::RuntimeVersion; +use std::{ + panic::AssertUnwindSafe, + path::{Path, PathBuf}, + sync::Arc, }; use sp_wasm_interface::Function; @@ -70,27 +72,26 @@ struct VersionedRuntime { impl VersionedRuntime { /// Run the given closure `f` with an instance of this runtime. - fn with_instance<'c, R, F>( - &self, - ext: &mut dyn Externalities, - f: F, - ) -> Result - where F: FnOnce( + fn with_instance<'c, R, F>(&self, ext: &mut dyn Externalities, f: F) -> Result + where + F: FnOnce( &Arc, &dyn WasmInstance, Option<&RuntimeVersion>, - &mut dyn Externalities) - -> Result, + &mut dyn Externalities, + ) -> Result, { // Find a free instance - let instance = self.instances + let instance = self + .instances .iter() .enumerate() .find_map(|(index, i)| i.try_lock().map(|i| (index, i))); match instance { Some((index, mut locked)) => { - let (instance, new_inst) = locked.take() + let (instance, new_inst) = locked + .take() .map(|r| Ok((r, false))) .unwrap_or_else(|| self.module.new_instance().map(|i| (i, true)))?; @@ -131,7 +132,7 @@ impl VersionedRuntime { let instance = self.module.new_instance()?; f(&self.module, &*instance, self.version.as_ref(), ext) - } + }, } } } @@ -168,11 +169,7 @@ impl RuntimeCache { /// `cache_path` allows to specify an optional directory where the executor can store files /// for caching. pub fn new(max_runtime_instances: usize, cache_path: Option) -> RuntimeCache { - RuntimeCache { - runtimes: Default::default(), - max_runtime_instances, - cache_path, - } + RuntimeCache { runtimes: Default::default(), max_runtime_instances, cache_path } } /// Prepares a WASM module instance and executes given function for it. @@ -213,29 +210,31 @@ impl RuntimeCache { allow_missing_func_imports: bool, f: F, ) -> Result, Error> - where F: FnOnce( + where + F: FnOnce( &Arc, &dyn WasmInstance, Option<&RuntimeVersion>, - &mut dyn Externalities) - -> Result, + &mut dyn Externalities, + ) -> Result, { let code_hash = &runtime_code.hash; let heap_pages = runtime_code.heap_pages.unwrap_or(default_heap_pages); let mut runtimes = self.runtimes.lock(); // this must be released prior to calling f - let pos = runtimes.iter().position(|r| r.as_ref().map_or( - false, - |r| r.wasm_method == wasm_method && - r.code_hash == *code_hash && - r.heap_pages == heap_pages - )); + let pos = runtimes.iter().position(|r| { + r.as_ref().map_or(false, |r| { + r.wasm_method == wasm_method && + r.code_hash == *code_hash && + r.heap_pages == heap_pages + }) + }); let runtime = match pos { Some(n) => runtimes[n] .clone() .expect("`position` only returns `Some` for entries that are `Some`"), - None => { + None => { let code = runtime_code.fetch_runtime_code().ok_or(WasmError::CodeNotFound)?; #[cfg(not(target_os = "unknown"))] @@ -262,30 +261,29 @@ impl RuntimeCache { result.version, time.elapsed().as_millis(), ); - } + }, Err(ref err) => { log::warn!(target: "wasm-runtime", "Cannot create a runtime: {:?}", err); - } + }, } Arc::new(result?) - } + }, }; // Rearrange runtimes by last recently used. match pos { Some(0) => {}, - Some(n) => { - for i in (1 .. n + 1).rev() { + Some(n) => + for i in (1..n + 1).rev() { runtimes.swap(i, i - 1); - } - } + }, None => { - runtimes[MAX_RUNTIMES-1] = Some(runtime.clone()); - for i in (1 .. MAX_RUNTIMES).rev() { + runtimes[MAX_RUNTIMES - 1] = Some(runtime.clone()); + for i in (1..MAX_RUNTIMES).rev() { runtimes.swap(i, i - 1); } - } + }, } drop(runtimes); @@ -317,49 +315,48 @@ pub fn create_wasm_runtime_with_code( allow_missing_func_imports, ) .map(|runtime| -> Arc { Arc::new(runtime) }) - } + }, #[cfg(feature = "wasmtime")] - WasmExecutionMethod::Compiled => { - sc_executor_wasmtime::create_runtime( - blob, - sc_executor_wasmtime::Config { - heap_pages: heap_pages as u32, - allow_missing_func_imports, - cache_path: cache_path.map(ToOwned::to_owned), - semantics: sc_executor_wasmtime::Semantics { - fast_instance_reuse: true, - deterministic_stack_limit: None, - canonicalize_nans: false, - }, + WasmExecutionMethod::Compiled => sc_executor_wasmtime::create_runtime( + blob, + sc_executor_wasmtime::Config { + heap_pages: heap_pages as u32, + allow_missing_func_imports, + cache_path: cache_path.map(ToOwned::to_owned), + semantics: sc_executor_wasmtime::Semantics { + fast_instance_reuse: true, + deterministic_stack_limit: None, + canonicalize_nans: false, }, - host_functions, - ).map(|runtime| -> Arc { Arc::new(runtime) }) - }, + }, + host_functions, + ) + .map(|runtime| -> Arc { Arc::new(runtime) }), } } fn decode_version(mut version: &[u8]) -> Result { let v: RuntimeVersion = sp_api::OldRuntimeVersion::decode(&mut &version[..]) - .map_err(|_| - WasmError::Instantiation( - "failed to decode \"Core_version\" result using old runtime version".into(), - ) - )?.into(); + .map_err(|_| { + WasmError::Instantiation( + "failed to decode \"Core_version\" result using old runtime version".into(), + ) + })? + .into(); let core_api_id = sp_core::hashing::blake2_64(b"Core"); if v.has_api_with(&core_api_id, |v| v >= 3) { - sp_api::RuntimeVersion::decode(&mut version) - .map_err(|_| - WasmError::Instantiation("failed to decode \"Core_version\" result".into()) - ) + sp_api::RuntimeVersion::decode(&mut version).map_err(|_| { + WasmError::Instantiation("failed to decode \"Core_version\" result".into()) + }) } else { Ok(v) } } fn decode_runtime_apis(apis: &[u8]) -> Result, WasmError> { - use std::convert::TryFrom; use sp_api::RUNTIME_API_INFO_SIZE; + use std::convert::TryFrom; apis.chunks(RUNTIME_API_INFO_SIZE) .map(|chunk| { @@ -367,9 +364,7 @@ fn decode_runtime_apis(apis: &[u8]) -> Result, WasmError> { // completely divide by `RUNTIME_API_INFO_SIZE`. <[u8; RUNTIME_API_INFO_SIZE]>::try_from(chunk) .map(sp_api::deserialize_runtime_api_info) - .map_err(|_| { - WasmError::Other("a clipped runtime api info declaration".to_owned()) - }) + .map_err(|_| WasmError::Other("a clipped runtime api info declaration".to_owned())) }) .collect::, WasmError>>() } @@ -379,9 +374,7 @@ fn decode_runtime_apis(apis: &[u8]) -> Result, WasmError> { /// /// If there are no such sections, it returns `None`. If there is an error during decoding those /// sections, `Err` will be returned. -pub fn read_embedded_version( - blob: &RuntimeBlob, -) -> Result, WasmError> { +pub fn read_embedded_version(blob: &RuntimeBlob) -> Result, WasmError> { if let Some(mut version_section) = blob.custom_section_contents("runtime_version") { // We do not use `decode_version` here because the runtime_version section is not supposed // to ever contain a legacy version. Apart from that `decode_version` relies on presence @@ -389,9 +382,7 @@ pub fn read_embedded_version( // the structure found in the `runtime_version` always contain an empty `apis` field. Therefore // the version read will be mistakenly treated as an legacy one. let mut decoded_version = sp_api::RuntimeVersion::decode(&mut version_section) - .map_err(|_| - WasmError::Instantiation("failed to decode version section".into()) - )?; + .map_err(|_| WasmError::Instantiation("failed to decode version section".into()))?; // Don't stop on this and check if there is a special section that encodes all runtime APIs. if let Some(apis_section) = blob.custom_section_contents("runtime_apis") { @@ -443,10 +434,10 @@ fn create_versioned_wasm_runtime( // The following unwind safety assertion is OK because if the method call panics, the // runtime will be dropped. let runtime = AssertUnwindSafe(runtime.as_ref()); - crate::native_executor::with_externalities_safe( - &mut **ext, - move || runtime.new_instance()?.call("Core_version".into(), &[]) - ).map_err(|_| WasmError::Instantiation("panic in call to get runtime version".into()))? + crate::native_executor::with_externalities_safe(&mut **ext, move || { + runtime.new_instance()?.call("Core_version".into(), &[]) + }) + .map_err(|_| WasmError::Instantiation("panic in call to get runtime version".into()))? }; if let Ok(version_buf) = version_result { @@ -457,23 +448,16 @@ fn create_versioned_wasm_runtime( let mut instances = Vec::with_capacity(max_instances); instances.resize_with(max_instances, || Mutex::new(None)); - Ok(VersionedRuntime { - code_hash, - module: runtime, - version, - heap_pages, - wasm_method, - instances, - }) + Ok(VersionedRuntime { code_hash, module: runtime, version, heap_pages, wasm_method, instances }) } #[cfg(test)] mod tests { use super::*; - use sp_wasm_interface::HostFunctions; + use codec::Encode; use sp_api::{Core, RuntimeApiInfo}; + use sp_wasm_interface::HostFunctions; use substrate_test_runtime::Block; - use codec::Encode; #[test] fn host_functions_are_equal() { @@ -533,7 +517,8 @@ mod tests { let wasm = sp_maybe_compressed_blob::decompress( substrate_test_runtime::wasm_binary_unwrap(), sp_maybe_compressed_blob::CODE_BLOB_BOMB_LIMIT, - ).expect("Decompressing works"); + ) + .expect("Decompressing works"); let runtime_version = RuntimeVersion { spec_name: "test_replace".into(), @@ -545,10 +530,8 @@ mod tests { transaction_version: 100, }; - let embedded = sp_version::embed::embed_runtime_version( - &wasm, - runtime_version.clone(), - ).expect("Embedding works"); + let embedded = sp_version::embed::embed_runtime_version(&wasm, runtime_version.clone()) + .expect("Embedding works"); let blob = RuntimeBlob::new(&embedded).expect("Embedded blob is valid"); let read_version = read_embedded_version(&blob) diff --git a/client/executor/wasmi/src/lib.rs b/client/executor/wasmi/src/lib.rs index 1bafa39494098..d11d867e9a1bf 100644 --- a/client/executor/wasmi/src/lib.rs +++ b/client/executor/wasmi/src/lib.rs @@ -18,25 +18,26 @@ //! This crate provides an implementation of `WasmModule` that is baked by wasmi. -use std::{str, cell::RefCell, sync::Arc}; -use wasmi::{ - Module, ModuleInstance, MemoryInstance, MemoryRef, TableRef, ImportsBuilder, ModuleRef, - FuncInstance, memory_units::Pages, - RuntimeValue::{I32, I64, self}, +use codec::{Decode, Encode}; +use log::{debug, error, trace}; +use sc_executor_common::{ + error::{Error, WasmError}, + runtime_blob::{DataSegmentsSnapshot, RuntimeBlob}, + sandbox, + wasm_runtime::{InvokeMethod, WasmInstance, WasmModule}, }; -use codec::{Encode, Decode}; use sp_core::sandbox as sandbox_primitives; -use log::{error, trace, debug}; +use sp_runtime_interface::unpack_ptr_and_len; use sp_wasm_interface::{ - FunctionContext, Pointer, WordSize, Sandbox, MemoryId, Result as WResult, Function, + Function, FunctionContext, MemoryId, Pointer, Result as WResult, Sandbox, WordSize, }; -use sp_runtime_interface::unpack_ptr_and_len; -use sc_executor_common::wasm_runtime::{WasmModule, WasmInstance, InvokeMethod}; -use sc_executor_common::{ - error::{Error, WasmError}, - sandbox, +use std::{cell::RefCell, str, sync::Arc}; +use wasmi::{ + memory_units::Pages, + FuncInstance, ImportsBuilder, MemoryInstance, MemoryRef, Module, ModuleInstance, ModuleRef, + RuntimeValue::{self, I32, I64}, + TableRef, }; -use sc_executor_common::runtime_blob::{RuntimeBlob, DataSegmentsSnapshot}; struct FunctionExecutor<'a> { sandbox_store: sandbox::Store, @@ -109,16 +110,14 @@ impl<'a> FunctionContext for FunctionExecutor<'a> { fn allocate_memory(&mut self, size: WordSize) -> WResult> { let heap = &mut self.heap; - self.memory.with_direct_access_mut(|mem| { - heap.allocate(mem, size).map_err(|e| e.to_string()) - }) + self.memory + .with_direct_access_mut(|mem| heap.allocate(mem, size).map_err(|e| e.to_string())) } fn deallocate_memory(&mut self, ptr: Pointer) -> WResult<()> { let heap = &mut self.heap; - self.memory.with_direct_access_mut(|mem| { - heap.deallocate(mem, ptr).map_err(|e| e.to_string()) - }) + self.memory + .with_direct_access_mut(|mem| heap.deallocate(mem, ptr).map_err(|e| e.to_string())) } fn sandbox(&mut self) -> &mut dyn Sandbox { @@ -173,11 +172,7 @@ impl<'a> Sandbox for FunctionExecutor<'a> { self.sandbox_store.memory_teardown(memory_id).map_err(|e| e.to_string()) } - fn memory_new( - &mut self, - initial: u32, - maximum: u32, - ) -> WResult { + fn memory_new(&mut self, initial: u32, maximum: u32) -> WResult { self.sandbox_store.new_memory(initial, maximum).map_err(|e| e.to_string()) } @@ -213,7 +208,7 @@ impl<'a> Sandbox for FunctionExecutor<'a> { self.write_memory(return_val, val).map_err(|_| "Return value buffer is OOB")?; Ok(sandbox_primitives::ERR_OK) }) - } + }, Err(_) => Ok(sandbox_primitives::ERR_EXECUTION), } } @@ -231,9 +226,12 @@ impl<'a> Sandbox for FunctionExecutor<'a> { ) -> WResult { // Extract a dispatch thunk from instance's table by the specified index. let dispatch_thunk = { - let table = self.table.as_ref() + let table = self + .table + .as_ref() .ok_or_else(|| "Runtime doesn't have a table; sandbox is unavailable")?; - table.get(dispatch_thunk_id) + table + .get(dispatch_thunk_id) .map_err(|_| "dispatch_thunk_idx is out of the table bounds")? .ok_or_else(|| "dispatch_thunk_idx points on an empty table entry")? }; @@ -248,8 +246,7 @@ impl<'a> Sandbox for FunctionExecutor<'a> { .map(|i| i.register(&mut self.sandbox_store)) { Ok(instance_idx) => instance_idx, - Err(sandbox::InstantiationError::StartTrapped) => - sandbox_primitives::ERR_EXECUTION, + Err(sandbox::InstantiationError::StartTrapped) => sandbox_primitives::ERR_EXECUTION, Err(_) => sandbox_primitives::ERR_MODULE, }; @@ -288,7 +285,7 @@ struct Resolver<'a> { impl<'a> Resolver<'a> { fn new( - host_functions: &'a[&'static dyn Function], + host_functions: &'a [&'static dyn Function], allow_missing_func_imports: bool, heap_pages: usize, ) -> Resolver<'a> { @@ -303,25 +300,23 @@ impl<'a> Resolver<'a> { } impl<'a> wasmi::ModuleImportResolver for Resolver<'a> { - fn resolve_func(&self, name: &str, signature: &wasmi::Signature) - -> std::result::Result - { + fn resolve_func( + &self, + name: &str, + signature: &wasmi::Signature, + ) -> std::result::Result { let signature = sp_wasm_interface::Signature::from(signature); for (function_index, function) in self.host_functions.iter().enumerate() { if name == function.name() { if signature == function.signature() { - return Ok( - wasmi::FuncInstance::alloc_host(signature.into(), function_index), - ) + return Ok(wasmi::FuncInstance::alloc_host(signature.into(), function_index)) } else { - return Err(wasmi::Error::Instantiation( - format!( - "Invalid signature for function `{}` expected `{:?}`, got `{:?}`", - function.name(), - signature, - function.signature(), - ), - )) + return Err(wasmi::Error::Instantiation(format!( + "Invalid signature for function `{}` expected `{:?}`, got `{:?}`", + function.name(), + signature, + function.signature(), + ))) } } } @@ -333,9 +328,7 @@ impl<'a> wasmi::ModuleImportResolver for Resolver<'a> { Ok(wasmi::FuncInstance::alloc_host(signature.into(), id)) } else { - Err(wasmi::Error::Instantiation( - format!("Export {} not found", name), - )) + Err(wasmi::Error::Instantiation(format!("Export {} not found", name))) } } @@ -346,15 +339,14 @@ impl<'a> wasmi::ModuleImportResolver for Resolver<'a> { ) -> Result { if field_name == "memory" { match &mut *self.import_memory.borrow_mut() { - Some(_) => Err(wasmi::Error::Instantiation( - "Memory can not be imported twice!".into(), - )), + Some(_) => + Err(wasmi::Error::Instantiation("Memory can not be imported twice!".into())), memory_ref @ None => { if memory_type - .maximum() - .map(|m| m.saturating_sub(memory_type.initial())) - .map(|m| self.heap_pages > m as usize) - .unwrap_or(false) + .maximum() + .map(|m| m.saturating_sub(memory_type.initial())) + .map(|m| self.heap_pages > m as usize) + .unwrap_or(false) { Err(wasmi::Error::Instantiation(format!( "Heap pages ({}) is greater than imported memory maximum ({}).", @@ -372,35 +364,40 @@ impl<'a> wasmi::ModuleImportResolver for Resolver<'a> { *memory_ref = Some(memory.clone()); Ok(memory) } - } + }, } } else { - Err(wasmi::Error::Instantiation( - format!("Unknown memory reference with name: {}", field_name), - )) + Err(wasmi::Error::Instantiation(format!( + "Unknown memory reference with name: {}", + field_name + ))) } } } impl<'a> wasmi::Externals for FunctionExecutor<'a> { - fn invoke_index(&mut self, index: usize, args: wasmi::RuntimeArgs) - -> Result, wasmi::Trap> - { + fn invoke_index( + &mut self, + index: usize, + args: wasmi::RuntimeArgs, + ) -> Result, wasmi::Trap> { let mut args = args.as_ref().iter().copied().map(Into::into); if let Some(function) = self.host_functions.get(index) { - function.execute(self, &mut args) + function + .execute(self, &mut args) .map_err(|msg| Error::FunctionExecution(function.name().to_string(), msg)) .map_err(wasmi::Trap::from) .map(|v| v.map(Into::into)) - } else if self.allow_missing_func_imports - && index >= self.host_functions.len() - && index < self.host_functions.len() + self.missing_functions.len() + } else if self.allow_missing_func_imports && + index >= self.host_functions.len() && + index < self.host_functions.len() + self.missing_functions.len() { Err(Error::from(format!( "Function `{}` is only a stub. Calling a stub is not allowed.", self.missing_functions[index - self.host_functions.len()], - )).into()) + )) + .into()) } else { Err(Error::from(format!("Could not find host function with index: {}", index)).into()) } @@ -462,25 +459,26 @@ fn call_in_wasm_module( function_executor.write_memory(offset, data)?; let result = match method { - InvokeMethod::Export(method) => { - module_instance.invoke_export( - method, - &[I32(u32::from(offset) as i32), I32(data.len() as i32)], - &mut function_executor, - ) - }, + InvokeMethod::Export(method) => module_instance.invoke_export( + method, + &[I32(u32::from(offset) as i32), I32(data.len() as i32)], + &mut function_executor, + ), InvokeMethod::Table(func_ref) => { - let func = table.ok_or(Error::NoTable)? + let func = table + .ok_or(Error::NoTable)? .get(func_ref)? .ok_or(Error::NoTableEntryWithIndex(func_ref))?; FuncInstance::invoke( &func, &[I32(u32::from(offset) as i32), I32(data.len() as i32)], &mut function_executor, - ).map_err(Into::into) + ) + .map_err(Into::into) }, InvokeMethod::TableWithWrapper { dispatcher_ref, func } => { - let dispatcher = table.ok_or(Error::NoTable)? + let dispatcher = table + .ok_or(Error::NoTable)? .get(dispatcher_ref)? .ok_or(Error::NoTableEntryWithIndex(dispatcher_ref))?; @@ -488,7 +486,8 @@ fn call_in_wasm_module( &dispatcher, &[I32(func as _), I32(u32::from(offset) as i32), I32(data.len() as i32)], &mut function_executor, - ).map_err(Into::into) + ) + .map_err(Into::into) }, }; @@ -518,15 +517,12 @@ fn instantiate_module( ) -> Result<(ModuleRef, Vec, MemoryRef), Error> { let resolver = Resolver::new(host_functions, allow_missing_func_imports, heap_pages); // start module instantiation. Don't run 'start' function yet. - let intermediate_instance = ModuleInstance::new( - module, - &ImportsBuilder::new().with_resolver("env", &resolver), - )?; + let intermediate_instance = + ModuleInstance::new(module, &ImportsBuilder::new().with_resolver("env", &resolver))?; // Verify that the module has the heap base global variable. let _ = get_heap_base(intermediate_instance.not_started_instance())?; - // Get the memory reference. Runtimes should import memory, but to be backwards // compatible we also support exported memory. let memory = match resolver.import_memory.into_inner() { @@ -541,7 +537,7 @@ fn instantiate_module( memory.grow(Pages(heap_pages)).map_err(|_| Error::Runtime)?; memory - } + }, }; if intermediate_instance.has_start() { @@ -592,9 +588,7 @@ impl GlobalValsSnapshot { // the instance should be the same as used for preserving and // we iterate the same way it as we do it for preserving values that means that the // types should be the same and all the values are mutable. So no error is expected/ - global_ref - .set(*global_val) - .map_err(|_| WasmError::ApplySnapshotFailed)?; + global_ref.set(*global_val).map_err(|_| WasmError::ApplySnapshotFailed)?; } Ok(()) } @@ -624,7 +618,8 @@ impl WasmModule for WasmiRuntime { &self.module, &self.host_functions, self.allow_missing_func_imports, - ).map_err(|e| WasmError::Instantiation(e.to_string()))?; + ) + .map_err(|e| WasmError::Instantiation(e.to_string()))?; Ok(Box::new(WasmiInstance { instance, @@ -646,11 +641,11 @@ pub fn create_runtime( host_functions: Vec<&'static dyn Function>, allow_missing_func_imports: bool, ) -> Result { - let data_segments_snapshot = DataSegmentsSnapshot::take(&blob) - .map_err(|e| WasmError::Other(e.to_string()))?; + let data_segments_snapshot = + DataSegmentsSnapshot::take(&blob).map_err(|e| WasmError::Other(e.to_string()))?; - let module = Module::from_parity_wasm_module(blob.into_inner()) - .map_err(|_| WasmError::InvalidModule)?; + let module = + Module::from_parity_wasm_module(blob.into_inner()).map_err(|_| WasmError::InvalidModule)?; let global_vals_snapshot = { let (instance, _, _) = instantiate_module( @@ -734,7 +729,7 @@ impl WasmInstance for WasmiInstance { .as_global() .ok_or_else(|| format!("`{}` is not a global", name))? .get() - .into() + .into(), )), None => Ok(None), } diff --git a/client/executor/wasmtime/src/host.rs b/client/executor/wasmtime/src/host.rs index 3f5ac0560a6d7..ee0e82928db24 100644 --- a/client/executor/wasmtime/src/host.rs +++ b/client/executor/wasmtime/src/host.rs @@ -19,16 +19,17 @@ //! This module defines `HostState` and `HostContext` structs which provide logic and state //! required for execution of host. -use crate::instance_wrapper::InstanceWrapper; -use crate::util; -use std::{cell::RefCell, rc::Rc}; +use crate::{instance_wrapper::InstanceWrapper, util}; +use codec::{Decode, Encode}; use log::trace; -use codec::{Encode, Decode}; use sc_allocator::FreeingBumpHeapAllocator; -use sc_executor_common::error::Result; -use sc_executor_common::sandbox::{self, SandboxCapabilities, SupervisorFuncIndex}; +use sc_executor_common::{ + error::Result, + sandbox::{self, SandboxCapabilities, SupervisorFuncIndex}, +}; use sp_core::sandbox as sandbox_primitives; use sp_wasm_interface::{FunctionContext, MemoryId, Pointer, Sandbox, WordSize}; +use std::{cell::RefCell, rc::Rc}; use wasmtime::{Func, Val}; /// Wrapper type for pointer to a Wasm table entry. @@ -108,7 +109,7 @@ impl<'a> SandboxCapabilities for HostContext<'a> { "Supervisor function returned {} results, expected 1", ret_vals.len() ) - .into()); + .into()) } else { &ret_vals[0] }; @@ -116,9 +117,9 @@ impl<'a> SandboxCapabilities for HostContext<'a> { if let Some(ret_val) = ret_val.i64() { Ok(ret_val) } else { - return Err("Supervisor function returned unexpected result!".into()); + return Err("Supervisor function returned unexpected result!".into()) } - } + }, Err(err) => Err(err.to_string().into()), } } @@ -130,15 +131,11 @@ impl<'a> sp_wasm_interface::FunctionContext for HostContext<'a> { address: Pointer, dest: &mut [u8], ) -> sp_wasm_interface::Result<()> { - self.instance - .read_memory_into(address, dest) - .map_err(|e| e.to_string()) + self.instance.read_memory_into(address, dest).map_err(|e| e.to_string()) } fn write_memory(&mut self, address: Pointer, data: &[u8]) -> sp_wasm_interface::Result<()> { - self.instance - .write_memory_from(address, data) - .map_err(|e| e.to_string()) + self.instance.write_memory_from(address, data).map_err(|e| e.to_string()) } fn allocate_memory(&mut self, size: WordSize) -> sp_wasm_interface::Result> { @@ -166,11 +163,8 @@ impl<'a> Sandbox for HostContext<'a> { buf_ptr: Pointer, buf_len: WordSize, ) -> sp_wasm_interface::Result { - let sandboxed_memory = self - .sandbox_store - .borrow() - .memory(memory_id) - .map_err(|e| e.to_string())?; + let sandboxed_memory = + self.sandbox_store.borrow().memory(memory_id).map_err(|e| e.to_string())?; sandboxed_memory.with_direct_access(|sandboxed_memory| { let len = buf_len as usize; let src_range = match util::checked_range(offset as usize, len, sandboxed_memory.len()) @@ -200,11 +194,8 @@ impl<'a> Sandbox for HostContext<'a> { val_ptr: Pointer, val_len: WordSize, ) -> sp_wasm_interface::Result { - let sandboxed_memory = self - .sandbox_store - .borrow() - .memory(memory_id) - .map_err(|e| e.to_string())?; + let sandboxed_memory = + self.sandbox_store.borrow().memory(memory_id).map_err(|e| e.to_string())?; sandboxed_memory.with_direct_access_mut(|sandboxed_memory| { let len = val_len as usize; let supervisor_mem_size = self.instance.memory_size() as usize; @@ -259,11 +250,8 @@ impl<'a> Sandbox for HostContext<'a> { .map(Into::into) .collect::>(); - let instance = self - .sandbox_store - .borrow() - .instance(instance_id) - .map_err(|e| e.to_string())?; + let instance = + self.sandbox_store.borrow().instance(instance_id).map_err(|e| e.to_string())?; let result = instance.invoke(export_name, &args, self, state); match result { @@ -278,7 +266,7 @@ impl<'a> Sandbox for HostContext<'a> { .map_err(|_| "can't write return value")?; Ok(sandbox_primitives::ERR_OK) }) - } + }, Err(_) => Ok(sandbox_primitives::ERR_EXECUTION), } } diff --git a/client/executor/wasmtime/src/imports.rs b/client/executor/wasmtime/src/imports.rs index f66e3042fba52..688f30c89c049 100644 --- a/client/executor/wasmtime/src/imports.rs +++ b/client/executor/wasmtime/src/imports.rs @@ -21,8 +21,8 @@ use sc_executor_common::error::WasmError; use sp_wasm_interface::{Function, ValueType}; use std::any::Any; use wasmtime::{ - Extern, ExternType, Func, FuncType, ImportType, Limits, Memory, MemoryType, Module, - Trap, Val, Store, + Extern, ExternType, Func, FuncType, ImportType, Limits, Memory, MemoryType, Module, Store, + Trap, Val, }; pub struct Imports { @@ -51,36 +51,29 @@ pub fn resolve_imports( "host doesn't provide any imports from non-env module: {}:{}", import_ty.module(), name, - ))); + ))) } let resolved = match name { "memory" => { memory_import_index = Some(externs.len()); resolve_memory_import(store, &import_ty, heap_pages)? - } - _ => resolve_func_import( - store, - &import_ty, - host_functions, - allow_missing_func_imports, - )?, + }, + _ => + resolve_func_import(store, &import_ty, host_functions, allow_missing_func_imports)?, }; externs.push(resolved); } - Ok(Imports { - memory_import_index, - externs, - }) + Ok(Imports { memory_import_index, externs }) } /// When the module linking proposal is supported the import's name can be `None`. /// Because we are not using this proposal we could safely unwrap the name. /// However, we opt for an error in order to avoid panics at all costs. fn import_name<'a, 'b: 'a>(import: &'a ImportType<'b>) -> Result<&'a str, WasmError> { - let name = import.name().ok_or_else(|| + let name = import.name().ok_or_else(|| { WasmError::Other("The module linking proposal is not supported.".to_owned()) - )?; + })?; Ok(name) } @@ -91,21 +84,17 @@ fn resolve_memory_import( ) -> Result { let requested_memory_ty = match import_ty.ty() { ExternType::Memory(memory_ty) => memory_ty, - _ => { + _ => return Err(WasmError::Other(format!( "this import must be of memory type: {}:{}", import_ty.module(), import_name(&import_ty)?, - ))) - } + ))), }; // Increment the min (a.k.a initial) number of pages by `heap_pages` and check if it exceeds the // maximum specified by the import. - let initial = requested_memory_ty - .limits() - .min() - .saturating_add(heap_pages); + let initial = requested_memory_ty.limits().min().saturating_add(heap_pages); if let Some(max) = requested_memory_ty.limits().max() { if initial > max { return Err(WasmError::Other(format!( @@ -113,7 +102,7 @@ fn resolve_memory_import( by the runtime wasm module {}", initial, max, - ))); + ))) } } @@ -142,32 +131,29 @@ fn resolve_func_import( "host doesn't provide any non function imports besides 'memory': {}:{}", import_ty.module(), name, - ))); - } + ))) + }, }; - let host_func = match host_functions - .iter() - .find(|host_func| host_func.name() == name) - { + let host_func = match host_functions.iter().find(|host_func| host_func.name() == name) { Some(host_func) => host_func, None if allow_missing_func_imports => { - return Ok(MissingHostFuncHandler::new(import_ty)?.into_extern(store, &func_ty)); - } + return Ok(MissingHostFuncHandler::new(import_ty)?.into_extern(store, &func_ty)) + }, None => { return Err(WasmError::Other(format!( "host doesn't provide such function: {}:{}", import_ty.module(), name, - ))); - } + ))) + }, }; if &func_ty != &wasmtime_func_sig(*host_func) { return Err(WasmError::Other(format!( "signature mismatch for: {}:{}", import_ty.module(), name, - ))); + ))) } Ok(HostFuncHandler::new(*host_func).into_extern(store)) @@ -218,7 +204,7 @@ fn call_static( ); wasmtime_results[0] = util::into_wasmtime_val(ret_val); Ok(()) - } + }, Ok(None) => { debug_assert!( wasmtime_results.len() == 0, @@ -226,26 +212,22 @@ fn call_static( correspond to the number of results returned by the host function", ); Ok(()) - } + }, Err(msg) => Err(Trap::new(msg)), } } impl HostFuncHandler { fn new(host_func: &'static dyn Function) -> Self { - Self { - host_func, - } + Self { host_func } } fn into_extern(self, store: &Store) -> Extern { let host_func = self.host_func; let func_ty = wasmtime_func_sig(self.host_func); - let func = Func::new(store, func_ty, - move |_, params, result| { - call_static(host_func, params, result) - } - ); + let func = Func::new(store, func_ty, move |_, params, result| { + call_static(host_func, params, result) + }); Extern::Func(func) } } @@ -266,28 +248,17 @@ impl MissingHostFuncHandler { fn into_extern(self, store: &Store, func_ty: &FuncType) -> Extern { let Self { module, name } = self; - let func = Func::new(store, func_ty.clone(), - move |_, _, _| Err(Trap::new(format!( - "call to a missing function {}:{}", - module, name - ))) - ); + let func = Func::new(store, func_ty.clone(), move |_, _, _| { + Err(Trap::new(format!("call to a missing function {}:{}", module, name))) + }); Extern::Func(func) } } fn wasmtime_func_sig(func: &dyn Function) -> wasmtime::FuncType { let signature = func.signature(); - let params = signature - .args - .iter() - .cloned() - .map(into_wasmtime_val_type); - let results = signature - .return_value - .iter() - .cloned() - .map(into_wasmtime_val_type); + let params = signature.args.iter().cloned().map(into_wasmtime_val_type); + let results = signature.return_value.iter().cloned().map(into_wasmtime_val_type); wasmtime::FuncType::new(params, results) } diff --git a/client/executor/wasmtime/src/instance_wrapper.rs b/client/executor/wasmtime/src/instance_wrapper.rs index 816099aee8049..80cf2b60f4924 100644 --- a/client/executor/wasmtime/src/instance_wrapper.rs +++ b/client/executor/wasmtime/src/instance_wrapper.rs @@ -19,26 +19,23 @@ //! Defines data and logic needed for interaction with an WebAssembly instance of a substrate //! runtime module. -use crate::util; -use crate::imports::Imports; +use crate::{imports::Imports, util}; -use std::{slice, marker}; use sc_executor_common::{ error::{Error, Result}, runtime_blob, wasm_runtime::InvokeMethod, }; -use sp_wasm_interface::{Pointer, WordSize, Value}; -use wasmtime::{Instance, Module, Memory, Table, Val, Func, Extern, Global, Store}; +use sp_wasm_interface::{Pointer, Value, WordSize}; +use std::{marker, slice}; +use wasmtime::{Extern, Func, Global, Instance, Memory, Module, Store, Table, Val}; /// Invoked entrypoint format. pub enum EntryPointType { /// Direct call. /// /// Call is made by providing only payload reference and length. - Direct { - entrypoint: wasmtime::TypedFunc<(u32, u32), u64>, - }, + Direct { entrypoint: wasmtime::TypedFunc<(u32, u32), u64> }, /// Indirect call. /// /// Call is made by providing payload reference and length, and extra argument @@ -66,17 +63,10 @@ impl EntryPoint { } match self.call_type { - EntryPointType::Direct { ref entrypoint } => { - entrypoint.call((data_ptr, data_len)).map_err(handle_trap) - } - EntryPointType::Wrapped { - func, - ref dispatcher, - } => { - dispatcher - .call((func, data_ptr, data_len)) - .map_err(handle_trap) - } + EntryPointType::Direct { ref entrypoint } => + entrypoint.call((data_ptr, data_len)).map_err(handle_trap), + EntryPointType::Wrapped { func, ref dispatcher } => + dispatcher.call((func, data_ptr, data_len)).map_err(handle_trap), } } @@ -85,9 +75,7 @@ impl EntryPoint { .typed::<(u32, u32), u64>() .map_err(|_| "Invalid signature for direct entry point")? .clone(); - Ok(Self { - call_type: EntryPointType::Direct { entrypoint }, - }) + Ok(Self { call_type: EntryPointType::Direct { entrypoint } }) } pub fn wrapped( @@ -98,9 +86,7 @@ impl EntryPoint { .typed::<(u32, u32, u32), u64>() .map_err(|_| "Invalid signature for wrapped entry point")? .clone(); - Ok(Self { - call_type: EntryPointType::Wrapped { func, dispatcher }, - }) + Ok(Self { call_type: EntryPointType::Wrapped { func, dispatcher } }) } } @@ -127,7 +113,6 @@ fn extern_memory(extern_: &Extern) -> Option<&Memory> { } } - fn extern_global(extern_: &Extern) -> Option<&Global> { match extern_ { Extern::Global(glob) => Some(glob), @@ -156,15 +141,13 @@ impl InstanceWrapper { .map_err(|e| Error::from(format!("cannot instantiate: {}", e)))?; let memory = match imports.memory_import_index { - Some(memory_idx) => { - extern_memory(&imports.externs[memory_idx]) - .expect("only memory can be at the `memory_idx`; qed") - .clone() - } + Some(memory_idx) => extern_memory(&imports.externs[memory_idx]) + .expect("only memory can be at the `memory_idx`; qed") + .clone(), None => { let memory = get_linear_memory(&instance)?; if !memory.grow(heap_pages).is_ok() { - return Err("failed top increase the linear memory size".into()); + return Err("failed top increase the linear memory size".into()) } memory }, @@ -186,42 +169,38 @@ impl InstanceWrapper { Ok(match method { InvokeMethod::Export(method) => { // Resolve the requested method and verify that it has a proper signature. - let export = self - .instance - .get_export(method) - .ok_or_else(|| Error::from(format!("Exported method {} is not found", method)))?; + let export = self.instance.get_export(method).ok_or_else(|| { + Error::from(format!("Exported method {} is not found", method)) + })?; let func = extern_func(&export) .ok_or_else(|| Error::from(format!("Export {} is not a function", method)))? .clone(); - EntryPoint::direct(func) - .map_err(|_| - Error::from(format!( - "Exported function '{}' has invalid signature.", - method, - )) - )? + EntryPoint::direct(func).map_err(|_| { + Error::from(format!("Exported function '{}' has invalid signature.", method,)) + })? }, InvokeMethod::Table(func_ref) => { - let table = self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?; - let val = table.get(func_ref) - .ok_or(Error::NoTableEntryWithIndex(func_ref))?; + let table = + self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?; + let val = table.get(func_ref).ok_or(Error::NoTableEntryWithIndex(func_ref))?; let func = val .funcref() .ok_or(Error::TableElementIsNotAFunction(func_ref))? .ok_or(Error::FunctionRefIsNull(func_ref))? .clone(); - EntryPoint::direct(func) - .map_err(|_| - Error::from(format!( - "Function @{} in exported table has invalid signature for direct call.", - func_ref, - )) - )? - }, + EntryPoint::direct(func).map_err(|_| { + Error::from(format!( + "Function @{} in exported table has invalid signature for direct call.", + func_ref, + )) + })? + }, InvokeMethod::TableWithWrapper { dispatcher_ref, func } => { - let table = self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?; - let val = table.get(dispatcher_ref) + let table = + self.instance.get_table("__indirect_function_table").ok_or(Error::NoTable)?; + let val = table + .get(dispatcher_ref) .ok_or(Error::NoTableEntryWithIndex(dispatcher_ref))?; let dispatcher = val .funcref() @@ -229,13 +208,12 @@ impl InstanceWrapper { .ok_or(Error::FunctionRefIsNull(dispatcher_ref))? .clone(); - EntryPoint::wrapped(dispatcher, func) - .map_err(|_| - Error::from(format!( - "Function @{} in exported table has invalid signature for wrapped call.", - dispatcher_ref, - )) - )? + EntryPoint::wrapped(dispatcher, func).map_err(|_| { + Error::from(format!( + "Function @{} in exported table has invalid signature for wrapped call.", + dispatcher_ref, + )) + })? }, }) } @@ -426,7 +404,7 @@ impl InstanceWrapper { /// relied upon. Thus this function acts as a hint. pub fn decommit(&self) { if self.memory.data_size() == 0 { - return; + return } cfg_if::cfg_if! { diff --git a/client/executor/wasmtime/src/lib.rs b/client/executor/wasmtime/src/lib.rs index 74b1150f06aea..da1a610dd8d93 100644 --- a/client/executor/wasmtime/src/lib.rs +++ b/client/executor/wasmtime/src/lib.rs @@ -28,6 +28,6 @@ mod util; mod tests; pub use runtime::{ - create_runtime, create_runtime_from_artifact, prepare_runtime_artifact, Config, Semantics, - DeterministicStackLimit, + create_runtime, create_runtime_from_artifact, prepare_runtime_artifact, Config, + DeterministicStackLimit, Semantics, }; diff --git a/client/executor/wasmtime/src/runtime.rs b/client/executor/wasmtime/src/runtime.rs index 0a3c0488a247d..b69eac6266bb1 100644 --- a/client/executor/wasmtime/src/runtime.rs +++ b/client/executor/wasmtime/src/runtime.rs @@ -18,22 +18,26 @@ //! Defines the compiled Wasm runtime that uses Wasmtime internally. -use crate::host::HostState; -use crate::imports::{Imports, resolve_imports}; -use crate::instance_wrapper::{InstanceWrapper, EntryPoint}; -use crate::state_holder; - -use std::{path::PathBuf, rc::Rc}; -use std::sync::Arc; -use std::path::Path; +use crate::{ + host::HostState, + imports::{resolve_imports, Imports}, + instance_wrapper::{EntryPoint, InstanceWrapper}, + state_holder, +}; + +use sc_allocator::FreeingBumpHeapAllocator; use sc_executor_common::{ error::{Result, WasmError}, runtime_blob::{DataSegmentsSnapshot, ExposedMutableGlobalsSet, GlobalsSnapshot, RuntimeBlob}, - wasm_runtime::{WasmModule, WasmInstance, InvokeMethod}, + wasm_runtime::{InvokeMethod, WasmInstance, WasmModule}, }; -use sc_allocator::FreeingBumpHeapAllocator; use sp_runtime_interface::unpack_ptr_and_len; -use sp_wasm_interface::{Function, Pointer, WordSize, Value}; +use sp_wasm_interface::{Function, Pointer, Value, WordSize}; +use std::{ + path::{Path, PathBuf}, + rc::Rc, + sync::Arc, +}; use wasmtime::{Engine, Store}; enum Strategy { @@ -102,7 +106,8 @@ impl WasmModule for WasmtimeRuntime { // the mutable globals were collected. Here, it is easy to see that there is only a single // runtime blob and thus it's the same that was used for both creating the instance and // collecting the mutable globals. - let globals_snapshot = GlobalsSnapshot::take(&snapshot_data.mutable_globals, &instance_wrapper); + let globals_snapshot = + GlobalsSnapshot::take(&snapshot_data.mutable_globals, &instance_wrapper); Strategy::FastInstanceReuse { instance_wrapper: Rc::new(instance_wrapper), @@ -150,14 +155,15 @@ impl WasmInstance for WasmtimeInstance { globals_snapshot.apply(&**instance_wrapper); let allocator = FreeingBumpHeapAllocator::new(*heap_base); - let result = perform_call(data, Rc::clone(&instance_wrapper), entrypoint, allocator); + let result = + perform_call(data, Rc::clone(&instance_wrapper), entrypoint, allocator); // Signal to the OS that we are done with the linear memory and that it can be // reclaimed. instance_wrapper.decommit(); result - } + }, Strategy::RecreateInstance(instance_creator) => { let instance_wrapper = instance_creator.instantiate()?; let heap_base = instance_wrapper.extract_heap_base()?; @@ -165,18 +171,16 @@ impl WasmInstance for WasmtimeInstance { let allocator = FreeingBumpHeapAllocator::new(heap_base); perform_call(data, Rc::new(instance_wrapper), entrypoint, allocator) - } + }, } } fn get_global_const(&self, name: &str) -> Result> { match &self.strategy { - Strategy::FastInstanceReuse { - instance_wrapper, .. - } => instance_wrapper.get_global_val(name), - Strategy::RecreateInstance(instance_creator) => { - instance_creator.instantiate()?.get_global_val(name) - } + Strategy::FastInstanceReuse { instance_wrapper, .. } => + instance_wrapper.get_global_val(name), + Strategy::RecreateInstance(instance_creator) => + instance_creator.instantiate()?.get_global_val(name), } } @@ -186,10 +190,9 @@ impl WasmInstance for WasmtimeInstance { // We do not keep the wasm instance around, therefore there is no linear memory // associated with it. None - } - Strategy::FastInstanceReuse { - instance_wrapper, .. - } => Some(instance_wrapper.base_ptr()), + }, + Strategy::FastInstanceReuse { instance_wrapper, .. } => + Some(instance_wrapper.base_ptr()), } } } @@ -237,9 +240,8 @@ fn common_config(semantics: &Semantics) -> std::result::Result, ) -> std::result::Result { - do_create_runtime( - CodeSupplyMode::Artifact { compiled_artifact }, - config, - host_functions, - ) + do_create_runtime(CodeSupplyMode::Artifact { compiled_artifact }, config, host_functions) } /// # Safety @@ -456,16 +454,13 @@ unsafe fn do_create_runtime( let module = wasmtime::Module::new(&engine, &blob.serialize()) .map_err(|e| WasmError::Other(format!("cannot create module: {}", e)))?; - (module, Some(InstanceSnapshotData { - data_segments_snapshot, - mutable_globals, - })) + (module, Some(InstanceSnapshotData { data_segments_snapshot, mutable_globals })) } else { let module = wasmtime::Module::new(&engine, &blob.serialize()) .map_err(|e| WasmError::Other(format!("cannot create module: {}", e)))?; (module, None) } - } + }, CodeSupplyMode::Artifact { compiled_artifact } => { // SAFETY: The unsafity of `deserialize` is covered by this function. The // responsibilities to maintain the invariants are passed to the caller. @@ -473,16 +468,10 @@ unsafe fn do_create_runtime( .map_err(|e| WasmError::Other(format!("cannot deserialize module: {}", e)))?; (module, None) - } + }, }; - Ok(WasmtimeRuntime { - module: Arc::new(module), - snapshot_data, - config, - host_functions, - engine, - }) + Ok(WasmtimeRuntime { module: Arc::new(module), snapshot_data, config, host_functions, engine }) } fn instrument( diff --git a/client/executor/wasmtime/src/tests.rs b/client/executor/wasmtime/src/tests.rs index 4066a44194a13..7933578b80499 100644 --- a/client/executor/wasmtime/src/tests.rs +++ b/client/executor/wasmtime/src/tests.rs @@ -16,12 +16,9 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sc_executor_common::{ - runtime_blob::RuntimeBlob, - wasm_runtime::WasmModule, -}; +use codec::{Decode as _, Encode as _}; +use sc_executor_common::{runtime_blob::RuntimeBlob, wasm_runtime::WasmModule}; use sc_runtime_test::wasm_binary_unwrap; -use codec::{Encode as _, Decode as _}; use std::sync::Arc; type HostFunctions = sp_io::SubstrateHostFunctions; @@ -68,7 +65,7 @@ impl RuntimeBuilder { Some(wat) => { wasm = wat::parse_str(wat).unwrap(); &wasm - } + }, }; RuntimeBlob::uncompress_if_needed(&wasm) @@ -83,21 +80,20 @@ impl RuntimeBuilder { cache_path: None, semantics: crate::Semantics { fast_instance_reuse: self.fast_instance_reuse, - deterministic_stack_limit: - match self.deterministic_stack { - true => Some(crate::DeterministicStackLimit { - logical_max: 65536, - native_stack_max: 256 * 1024 * 1024, - }), - false => None, - }, + deterministic_stack_limit: match self.deterministic_stack { + true => Some(crate::DeterministicStackLimit { + logical_max: 65536, + native_stack_max: 256 * 1024 * 1024, + }), + false => None, + }, canonicalize_nans: self.canonicalize_nans, }, }, { use sp_wasm_interface::HostFunctions as _; HostFunctions::host_functions() - } + }, ) .expect("cannot create runtime"); @@ -113,9 +109,7 @@ fn test_nan_canonicalization() { builder.build() }; - let instance = runtime - .new_instance() - .expect("failed to instantiate a runtime"); + let instance = runtime.new_instance().expect("failed to instantiate a runtime"); /// A NaN with canonical payload bits. const CANONICAL_NAN_BITS: u32 = 0x7fc00000; @@ -142,10 +136,7 @@ fn test_nan_canonicalization() { let params = (u32::to_le_bytes(ARBITRARY_NAN_BITS), u32::to_le_bytes(1)).encode(); let res = { - let raw_result = instance.call_export( - "test_fp_f32add", - ¶ms, - ).unwrap(); + let raw_result = instance.call_export("test_fp_f32add", ¶ms).unwrap(); u32::from_le_bytes(<[u8; 4]>::decode(&mut &raw_result[..]).unwrap()) }; assert_eq!(res, CANONICAL_NAN_BITS); @@ -161,9 +152,7 @@ fn test_stack_depth_reaching() { builder.deterministic_stack(true); builder.build() }; - let instance = runtime - .new_instance() - .expect("failed to instantiate a runtime"); + let instance = runtime.new_instance().expect("failed to instantiate a runtime"); let err = instance.call_export("test-many-locals", &[]).unwrap_err(); diff --git a/client/finality-grandpa-warp-sync/src/lib.rs b/client/finality-grandpa-warp-sync/src/lib.rs index c0ef93e625fd8..c74c4d15f9f45 100644 --- a/client/finality-grandpa-warp-sync/src/lib.rs +++ b/client/finality-grandpa-warp-sync/src/lib.rs @@ -17,17 +17,20 @@ //! Helper for handling (i.e. answering) grandpa warp sync requests from a remote peer. use codec::{Decode, Encode}; -use sc_network::config::{IncomingRequest, OutgoingResponse, ProtocolId, RequestResponseConfig}; -use sc_client_api::Backend; -use sp_runtime::traits::NumberFor; -use futures::channel::{mpsc, oneshot}; -use futures::stream::StreamExt; +use futures::{ + channel::{mpsc, oneshot}, + stream::StreamExt, +}; use log::debug; -use sp_runtime::traits::Block as BlockT; -use std::time::Duration; -use std::sync::Arc; -use sc_service::{SpawnTaskHandle, config::{Configuration, Role}}; +use sc_client_api::Backend; use sc_finality_grandpa::SharedAuthoritySet; +use sc_network::config::{IncomingRequest, OutgoingResponse, ProtocolId, RequestResponseConfig}; +use sc_service::{ + config::{Configuration, Role}, + SpawnTaskHandle, +}; +use sp_runtime::traits::{Block as BlockT, NumberFor}; +use std::{sync::Arc, time::Duration}; mod proof; @@ -50,11 +53,8 @@ where generate_request_response_config(protocol_id.clone()) } else { // Allow both outgoing and incoming requests. - let (handler, request_response_config) = GrandpaWarpSyncRequestHandler::new( - protocol_id.clone(), - backend.clone(), - authority_set, - ); + let (handler, request_response_config) = + GrandpaWarpSyncRequestHandler::new(protocol_id.clone(), backend.clone(), authority_set); spawn_handle.spawn("grandpa-warp-sync", handler.run()); request_response_config } @@ -108,12 +108,7 @@ impl> GrandpaWarpSyncRequestHandler> GrandpaWarpSyncRequestHandler, pending_response: oneshot::Sender, ) -> Result<(), HandleRequestError> - where NumberFor: sc_finality_grandpa::BlockNumberOps, + where + NumberFor: sc_finality_grandpa::BlockNumberOps, { let request = Request::::decode(&mut &payload[..])?; @@ -133,26 +129,29 @@ impl> GrandpaWarpSyncRequestHandler: sc_finality_grandpa::BlockNumberOps, + where + NumberFor: sc_finality_grandpa::BlockNumberOps, { while let Some(request) = self.request_receiver.next().await { let IncomingRequest { peer, payload, pending_response } = request; match self.handle_request(payload, pending_response) { - Ok(()) => debug!(target: LOG_TARGET, "Handled grandpa warp sync request from {}.", peer), + Ok(()) => + debug!(target: LOG_TARGET, "Handled grandpa warp sync request from {}.", peer), Err(e) => debug!( target: LOG_TARGET, - "Failed to handle grandpa warp sync request from {}: {}", - peer, e, + "Failed to handle grandpa warp sync request from {}: {}", peer, e, ), } } diff --git a/client/finality-grandpa-warp-sync/src/proof.rs b/client/finality-grandpa-warp-sync/src/proof.rs index 87a6220267827..d2484a800e63b 100644 --- a/client/finality-grandpa-warp-sync/src/proof.rs +++ b/client/finality-grandpa-warp-sync/src/proof.rs @@ -72,7 +72,7 @@ impl WarpSyncProof { if begin_number > blockchain.info().finalized_number { return Err(HandleRequestError::InvalidRequest( "Start block is not finalized".to_string(), - )); + )) } let canon_hash = blockchain.hash(begin_number)?.expect( @@ -84,15 +84,15 @@ impl WarpSyncProof { if canon_hash != begin { return Err(HandleRequestError::InvalidRequest( "Start block is not in the finalized chain".to_string(), - )); + )) } let mut proofs = Vec::new(); let mut proofs_encoded_len = 0; let mut proof_limit_reached = false; - let set_changes = set_changes.iter_from(begin_number) - .ok_or(HandleRequestError::MissingData)?; + let set_changes = + set_changes.iter_from(begin_number).ok_or(HandleRequestError::MissingData)?; for (_, last_block) in set_changes { let header = blockchain.header(BlockId::Number(*last_block))?.expect( @@ -105,7 +105,7 @@ impl WarpSyncProof { // if it doesn't contain a signal for standard change then the set must have changed // through a forced changed, in which case we stop collecting proofs as the chain of // trust in authority handoffs was broken. - break; + break } let justification = blockchain @@ -119,10 +119,7 @@ impl WarpSyncProof { let justification = GrandpaJustification::::decode(&mut &justification[..])?; - let proof = WarpSyncFragment { - header: header.clone(), - justification, - }; + let proof = WarpSyncFragment { header: header.clone(), justification }; let proof_size = proof.encoded_size(); // Check for the limit. We remove some bytes from the maximum size, because we're only @@ -130,7 +127,7 @@ impl WarpSyncProof { // room for rest of the data (the size of the `Vec` and the boolean). if proofs_encoded_len + proof_size >= MAX_WARP_SYNC_PROOF_SIZE - 50 { proof_limit_reached = true; - break; + break } proofs_encoded_len += proof_size; @@ -158,19 +155,13 @@ impl WarpSyncProof { let header = blockchain.header(BlockId::Hash(latest_justification.target().1))? .expect("header hash corresponds to a justification in db; must exist in db as well; qed."); - proofs.push(WarpSyncFragment { - header, - justification: latest_justification, - }) + proofs.push(WarpSyncFragment { header, justification: latest_justification }) } true }; - let final_outcome = WarpSyncProof { - proofs, - is_finished, - }; + let final_outcome = WarpSyncProof { proofs, is_finished }; debug_assert!(final_outcome.encoded_size() <= MAX_WARP_SYNC_PROOF_SIZE); Ok(final_outcome) } @@ -196,8 +187,8 @@ impl WarpSyncProof { if proof.justification.target().1 != proof.header.hash() { return Err(HandleRequestError::InvalidProof( - "mismatch between header and justification".to_owned() - )); + "mismatch between header and justification".to_owned(), + )) } if let Some(scheduled_change) = find_scheduled_change::(&proof.header) { @@ -208,7 +199,7 @@ impl WarpSyncProof { // set change. return Err(HandleRequestError::InvalidProof( "Header is missing authority set change digest".to_string(), - )); + )) } } @@ -249,12 +240,7 @@ mod tests { let mut authority_set_changes = Vec::new(); for n in 1..=100 { - let mut block = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let mut block = client.new_block(Default::default()).unwrap().build().unwrap().block; let mut new_authorities = None; @@ -277,10 +263,7 @@ mod tests { let digest = sp_runtime::generic::DigestItem::Consensus( sp_finality_grandpa::GRANDPA_ENGINE_ID, sp_finality_grandpa::ConsensusLog::ScheduledChange( - sp_finality_grandpa::ScheduledChange { - delay: 0u64, - next_authorities, - }, + sp_finality_grandpa::ScheduledChange { delay: 0u64, next_authorities }, ) .encode(), ); @@ -300,10 +283,7 @@ mod tests { let mut precommits = Vec::new(); for keyring in ¤t_authorities { - let precommit = finality_grandpa::Precommit { - target_hash, - target_number, - }; + let precommit = finality_grandpa::Precommit { target_hash, target_number }; let msg = finality_grandpa::Message::Precommit(precommit.clone()); let encoded = sp_finality_grandpa::localized_payload(42, current_set_id, &msg); @@ -318,18 +298,14 @@ mod tests { precommits.push(precommit); } - let commit = finality_grandpa::Commit { - target_hash, - target_number, - precommits, - }; + let commit = finality_grandpa::Commit { target_hash, target_number, precommits }; let justification = GrandpaJustification::from_commit(&client, 42, commit).unwrap(); client .finalize_block( BlockId::Hash(target_hash), - Some((GRANDPA_ENGINE_ID, justification.encode())) + Some((GRANDPA_ENGINE_ID, justification.encode())), ) .unwrap(); diff --git a/client/finality-grandpa/rpc/src/finality.rs b/client/finality-grandpa/rpc/src/finality.rs index cfd8f68e5ce60..62e3502fc7180 100644 --- a/client/finality-grandpa/rpc/src/finality.rs +++ b/client/finality-grandpa/rpc/src/finality.rs @@ -16,7 +16,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use sc_finality_grandpa::FinalityProofProvider; use sp_runtime::traits::{Block as BlockT, NumberFor}; @@ -44,7 +44,6 @@ where &self, block: NumberFor, ) -> Result, sc_finality_grandpa::FinalityProofError> { - self.prove_finality(block) - .map(|x| x.map(|y| EncodedFinalityProof(y.into()))) + self.prove_finality(block).map(|x| x.map(|y| EncodedFinalityProof(y.into()))) } } diff --git a/client/finality-grandpa/rpc/src/lib.rs b/client/finality-grandpa/rpc/src/lib.rs index 2e7354e5fda68..42d8630d10f83 100644 --- a/client/finality-grandpa/rpc/src/lib.rs +++ b/client/finality-grandpa/rpc/src/lib.rs @@ -19,17 +19,16 @@ //! RPC API for GRANDPA. #![warn(missing_docs)] -use std::sync::Arc; -use futures::{FutureExt, TryFutureExt, TryStreamExt, StreamExt}; -use log::warn; -use jsonrpc_derive::rpc; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; +use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt}; use jsonrpc_core::futures::{ + future::{Executor as Executor01, Future as Future01}, sink::Sink as Sink01, stream::Stream as Stream01, - future::Future as Future01, - future::Executor as Executor01, }; +use jsonrpc_derive::rpc; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; +use log::warn; +use std::sync::Arc; mod error; mod finality; @@ -40,8 +39,8 @@ use sc_finality_grandpa::GrandpaJustificationStream; use sp_runtime::traits::{Block as BlockT, NumberFor}; use finality::{EncodedFinalityProof, RpcFinalityProofProvider}; -use report::{ReportAuthoritySet, ReportVoterState, ReportedRoundStates}; use notification::JustificationNotification; +use report::{ReportAuthoritySet, ReportVoterState, ReportedRoundStates}; type FutureResult = Box + Send>; @@ -67,7 +66,7 @@ pub trait GrandpaApi { fn subscribe_justifications( &self, metadata: Self::Metadata, - subscriber: Subscriber + subscriber: Subscriber, ); /// Unsubscribe from receiving notifications about recently finalized blocks. @@ -79,16 +78,13 @@ pub trait GrandpaApi { fn unsubscribe_justifications( &self, metadata: Option, - id: SubscriptionId + id: SubscriptionId, ) -> jsonrpc_core::Result; /// Prove finality for the given block number by returning the Justification for the last block /// in the set and all the intermediary headers to link them together. #[rpc(name = "grandpa_proveFinality")] - fn prove_finality( - &self, - block: Number, - ) -> FutureResult>; + fn prove_finality(&self, block: Number) -> FutureResult>; } /// Implements the GrandpaApi RPC trait for interacting with GRANDPA. @@ -115,13 +111,7 @@ impl E: Executor01 + Send>> + Send + Sync + 'static, { let manager = SubscriptionManager::new(Arc::new(executor)); - Self { - authority_set, - voter_state, - justification_stream, - manager, - finality_proof_provider, - } + Self { authority_set, voter_state, justification_stream, manager, finality_proof_provider } } } @@ -145,10 +135,12 @@ where fn subscribe_justifications( &self, _metadata: Self::Metadata, - subscriber: Subscriber + subscriber: Subscriber, ) { - let stream = self.justification_stream.subscribe() - .map(|x| Ok::<_,()>(JustificationNotification::from(x))) + let stream = self + .justification_stream + .subscribe() + .map(|x| Ok::<_, ()>(JustificationNotification::from(x))) .map_err(|e| warn!("Notification stream error: {:?}", e)) .compat(); @@ -163,7 +155,7 @@ where fn unsubscribe_justifications( &self, _metadata: Option, - id: SubscriptionId + id: SubscriptionId, ) -> jsonrpc_core::Result { Ok(self.manager.cancel(id)) } @@ -181,7 +173,7 @@ where error::Error::ProveFinalityFailed(e) }) .map_err(jsonrpc_core::Error::from) - .compat() + .compat(), ) } } @@ -189,14 +181,13 @@ where #[cfg(test)] mod tests { use super::*; + use jsonrpc_core::{types::Params, Notification, Output}; use std::{collections::HashSet, convert::TryInto, sync::Arc}; - use jsonrpc_core::{Notification, Output, types::Params}; - use parity_scale_codec::{Encode, Decode}; + use parity_scale_codec::{Decode, Encode}; use sc_block_builder::{BlockBuilder, RecordProof}; use sc_finality_grandpa::{ - report, AuthorityId, GrandpaJustificationSender, GrandpaJustification, - FinalityProof, + report, AuthorityId, FinalityProof, GrandpaJustification, GrandpaJustificationSender, }; use sp_blockchain::HeaderBackend; use sp_core::crypto::Public; @@ -204,9 +195,7 @@ mod tests { use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; use substrate_test_runtime_client::{ runtime::{Block, Header, H256}, - DefaultTestClientBuilderExt, - TestClientBuilderExt, - TestClientBuilder, + DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, }; struct TestAuthoritySet; @@ -253,14 +242,14 @@ mod tests { impl RpcFinalityProofProvider for TestFinalityProofProvider { fn rpc_prove_finality( &self, - _block: NumberFor + _block: NumberFor, ) -> Result, sc_finality_grandpa::FinalityProofError> { Ok(Some(EncodedFinalityProof( self.finality_proof .as_ref() .expect("Don't call rpc_prove_finality without setting the FinalityProof") .encode() - .into() + .into(), ))) } } @@ -290,17 +279,14 @@ mod tests { let background_rounds = vec![(1, past_round_state)].into_iter().collect(); - Some(report::VoterState { - background_rounds, - best_round: (2, best_round_state), - }) + Some(report::VoterState { background_rounds, best_round: (2, best_round_state) }) } } - fn setup_io_handler(voter_state: VoterState) -> ( - jsonrpc_core::MetaIoHandler, - GrandpaJustificationSender, - ) where + fn setup_io_handler( + voter_state: VoterState, + ) -> (jsonrpc_core::MetaIoHandler, GrandpaJustificationSender) + where VoterState: ReportVoterState + Send + Sync + 'static, { setup_io_handler_with_finality_proofs(voter_state, None) @@ -309,10 +295,8 @@ mod tests { fn setup_io_handler_with_finality_proofs( voter_state: VoterState, finality_proof: Option>, - ) -> ( - jsonrpc_core::MetaIoHandler, - GrandpaJustificationSender, - ) where + ) -> (jsonrpc_core::MetaIoHandler, GrandpaJustificationSender) + where VoterState: ReportVoterState + Send + Sync + 'static, { let (justification_sender, justification_stream) = GrandpaJustificationStream::channel(); @@ -345,7 +329,7 @@ mod tests { #[test] fn working_rpc_handler() { - let (io, _) = setup_io_handler(TestVoterState); + let (io, _) = setup_io_handler(TestVoterState); let request = r#"{"jsonrpc":"2.0","method":"grandpa_roundState","params":[],"id":1}"#; let response = "{\"jsonrpc\":\"2.0\",\"result\":{\ @@ -378,7 +362,8 @@ mod tests { let (meta, _) = setup_session(); // Subscribe - let sub_request = r#"{"jsonrpc":"2.0","method":"grandpa_subscribeJustifications","params":[],"id":1}"#; + let sub_request = + r#"{"jsonrpc":"2.0","method":"grandpa_subscribeJustifications","params":[],"id":1}"#; let resp = io.handle_request_sync(sub_request, meta.clone()); let resp: Output = serde_json::from_str(&resp.unwrap()).unwrap(); @@ -410,7 +395,8 @@ mod tests { let (meta, _) = setup_session(); // Subscribe - let sub_request = r#"{"jsonrpc":"2.0","method":"grandpa_subscribeJustifications","params":[],"id":1}"#; + let sub_request = + r#"{"jsonrpc":"2.0","method":"grandpa_subscribeJustifications","params":[],"id":1}"#; let resp = io.handle_request_sync(sub_request, meta.clone()); let resp: Output = serde_json::from_str(&resp.unwrap()).unwrap(); assert!(matches!(resp, Output::Success(_))); @@ -440,7 +426,10 @@ mod tests { RecordProof::No, Default::default(), &*backend, - ).unwrap().build().unwrap(); + ) + .unwrap() + .build() + .unwrap(); let block = built_block.block; let block_hash = block.hash(); @@ -501,8 +490,7 @@ mod tests { _ => panic!(), }; - let recv_sub_id: String = - serde_json::from_value(json_map["subscription"].take()).unwrap(); + let recv_sub_id: String = serde_json::from_value(json_map["subscription"].take()).unwrap(); let recv_justification: sp_core::Bytes = serde_json::from_value(json_map["result"].take()).unwrap(); let recv_justification: GrandpaJustification = @@ -520,10 +508,8 @@ mod tests { justification: create_justification().encode(), unknown_headers: vec![header(2)], }; - let (io, _) = setup_io_handler_with_finality_proofs( - TestVoterState, - Some(finality_proof.clone()), - ); + let (io, _) = + setup_io_handler_with_finality_proofs(TestVoterState, Some(finality_proof.clone())); let request = "{\"jsonrpc\":\"2.0\",\"method\":\"grandpa_proveFinality\",\"params\":[42],\"id\":1}"; diff --git a/client/finality-grandpa/rpc/src/notification.rs b/client/finality-grandpa/rpc/src/notification.rs index 4c9141be3631a..68944e903e0fb 100644 --- a/client/finality-grandpa/rpc/src/notification.rs +++ b/client/finality-grandpa/rpc/src/notification.rs @@ -16,10 +16,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use serde::{Serialize, Deserialize}; use parity_scale_codec::Encode; -use sp_runtime::traits::Block as BlockT; use sc_finality_grandpa::GrandpaJustification; +use serde::{Deserialize, Serialize}; +use sp_runtime::traits::Block as BlockT; /// An encoded justification proving that the given header has been finalized #[derive(Clone, Serialize, Deserialize)] diff --git a/client/finality-grandpa/rpc/src/report.rs b/client/finality-grandpa/rpc/src/report.rs index 0482d90f58f0a..fef8f22659953 100644 --- a/client/finality-grandpa/rpc/src/report.rs +++ b/client/finality-grandpa/rpc/src/report.rs @@ -44,11 +44,8 @@ where H: Clone + Debug + Eq, { fn get(&self) -> (u64, HashSet) { - let current_voters: HashSet = self - .current_authorities() - .iter() - .map(|p| p.0.clone()) - .collect(); + let current_voters: HashSet = + self.current_authorities().iter().map(|p| p.0.clone()).collect(); (self.set_id(), current_voters) } @@ -152,10 +149,6 @@ impl ReportedRoundStates { .map(|(round, round_state)| RoundState::from(*round, round_state, ¤t_voters)) .collect::, Error>>()?; - Ok(Self { - set_id, - best, - background, - }) + Ok(Self { set_id, best, background }) } } diff --git a/client/finality-grandpa/src/authorities.rs b/client/finality-grandpa/src/authorities.rs index a04be72f9d31e..60a347acc35bc 100644 --- a/client/finality-grandpa/src/authorities.rs +++ b/client/finality-grandpa/src/authorities.rs @@ -18,18 +18,16 @@ //! Utilities for dealing with authorities, authority sets, and handoffs. -use std::cmp::Ord; -use std::fmt::Debug; -use std::ops::Add; +use std::{cmp::Ord, fmt::Debug, ops::Add}; -use fork_tree::ForkTree; -use parking_lot::MappedMutexGuard; use finality_grandpa::voter_set::VoterSet; -use parity_scale_codec::{Encode, Decode}; +use fork_tree::ForkTree; use log::debug; +use parity_scale_codec::{Decode, Encode}; +use parking_lot::MappedMutexGuard; +use sc_consensus::shared_data::{SharedData, SharedDataLocked}; use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_INFO}; use sp_finality_grandpa::{AuthorityId, AuthorityList}; -use sc_consensus::shared_data::{SharedData, SharedDataLocked}; use crate::SetId; @@ -77,9 +75,7 @@ pub struct SharedAuthoritySet { impl Clone for SharedAuthoritySet { fn clone(&self) -> Self { - SharedAuthoritySet { - inner: self.inner.clone(), - } + SharedAuthoritySet { inner: self.inner.clone() } } } @@ -92,16 +88,15 @@ impl SharedAuthoritySet { /// Returns access to the [`AuthoritySet`] and locks it. /// /// For more information see [`SharedDataLocked`]. - pub(crate) fn inner_locked( - &self, - ) -> SharedDataLocked> { + pub(crate) fn inner_locked(&self) -> SharedDataLocked> { self.inner.shared_data_locked() } } impl SharedAuthoritySet -where N: Add + Ord + Clone + Debug, - H: Clone + Debug +where + N: Add + Ord + Clone + Debug, + H: Clone + Debug, { /// Get the earliest limit-block number that's higher or equal to the given /// min number, if any. @@ -136,9 +131,7 @@ where N: Add + Ord + Clone + Debug, impl From> for SharedAuthoritySet { fn from(set: AuthoritySet) -> Self { - SharedAuthoritySet { - inner: SharedData::new(set), - } + SharedAuthoritySet { inner: SharedData::new(set) } } } @@ -191,7 +184,7 @@ where /// Get a genesis set with given authorities. pub(crate) fn genesis(initial: AuthorityList) -> Option { if Self::invalid_authority_list(&initial) { - return None; + return None } Some(AuthoritySet { @@ -212,7 +205,7 @@ where authority_set_changes: AuthoritySetChanges, ) -> Option { if Self::invalid_authority_list(&authorities) { - return None; + return None } Some(AuthoritySet { @@ -255,7 +248,7 @@ where for change in &self.pending_forced_changes { if is_descendent_of(&change.canon_hash, best_hash)? { forced = Some((change.canon_hash.clone(), change.canon_height.clone())); - break; + break } } @@ -263,16 +256,13 @@ where for (_, _, change) in self.pending_standard_changes.roots() { if is_descendent_of(&change.canon_hash, best_hash)? { standard = Some((change.canon_hash.clone(), change.canon_height.clone())); - break; + break } } let earliest = match (forced, standard) { - (Some(forced), Some(standard)) => Some(if forced.1 < standard.1 { - forced - } else { - standard - }), + (Some(forced), Some(standard)) => + Some(if forced.1 < standard.1 { forced } else { standard }), (Some(forced), None) => Some(forced), (None, Some(standard)) => Some(standard), (None, None) => None, @@ -300,12 +290,7 @@ where pending.delay, ); - self.pending_standard_changes.import( - hash, - number, - pending, - is_descendent_of, - )?; + self.pending_standard_changes.import(hash, number, pending, is_descendent_of)?; debug!( target: "afg", @@ -329,21 +314,21 @@ where { for change in &self.pending_forced_changes { if change.canon_hash == pending.canon_hash { - return Err(Error::DuplicateAuthoritySetChange); + return Err(Error::DuplicateAuthoritySetChange) } if is_descendent_of(&change.canon_hash, &pending.canon_hash)? { - return Err(Error::MultiplePendingForcedAuthoritySetChanges); + return Err(Error::MultiplePendingForcedAuthoritySetChanges) } } // ordered first by effective number and then by signal-block number. let key = (pending.effective_number(), pending.canon_height.clone()); - let idx = self.pending_forced_changes - .binary_search_by_key(&key, |change| ( - change.effective_number(), - change.canon_height.clone(), - )) + let idx = self + .pending_forced_changes + .binary_search_by_key(&key, |change| { + (change.effective_number(), change.canon_height.clone()) + }) .unwrap_or_else(|i| i); debug!( @@ -376,24 +361,22 @@ where E: std::error::Error, { if Self::invalid_authority_list(&pending.next_authorities) { - return Err(Error::InvalidAuthoritySet); + return Err(Error::InvalidAuthoritySet) } match pending.delay_kind { - DelayKind::Best { .. } => { - self.add_forced_change(pending, is_descendent_of) - }, - DelayKind::Finalized => { - self.add_standard_change(pending, is_descendent_of) - }, + DelayKind::Best { .. } => self.add_forced_change(pending, is_descendent_of), + DelayKind::Finalized => self.add_standard_change(pending, is_descendent_of), } } /// Inspect pending changes. Standard pending changes are iterated first, /// and the changes in the tree are traversed in pre-order, afterwards all /// forced changes are iterated. - pub(crate) fn pending_changes(&self) -> impl Iterator> { - self.pending_standard_changes.iter().map(|(_, _, c)| c) + pub(crate) fn pending_changes(&self) -> impl Iterator> { + self.pending_standard_changes + .iter() + .map(|(_, _, c)| c) .chain(self.pending_forced_changes.iter()) } @@ -404,7 +387,8 @@ where /// Only standard changes are taken into account for the current /// limit, since any existing forced change should preclude the voter from voting. pub(crate) fn current_limit(&self, min: N) -> Option { - self.pending_standard_changes.roots() + self.pending_standard_changes + .roots() .filter(|&(_, _, c)| c.effective_number() >= min) .min_by_key(|&(_, _, c)| c.effective_number()) .map(|(_, _, c)| c.effective_number()) @@ -450,9 +434,7 @@ where // the block that signaled the change. if change.canon_hash == best_hash || is_descendent_of(&change.canon_hash, &best_hash)? { let median_last_finalized = match change.delay_kind { - DelayKind::Best { - ref median_last_finalized, - } => median_last_finalized.clone(), + DelayKind::Best { ref median_last_finalized } => median_last_finalized.clone(), _ => unreachable!( "pending_forced_changes only contains forced changes; forced changes have delay kind Best; qed." ), @@ -460,8 +442,8 @@ where // check if there's any pending standard change that we depend on for (_, _, standard_change) in self.pending_standard_changes.roots() { - if standard_change.effective_number() <= median_last_finalized - && is_descendent_of(&standard_change.canon_hash, &change.canon_hash)? + if standard_change.effective_number() <= median_last_finalized && + is_descendent_of(&standard_change.canon_hash, &change.canon_hash)? { log::info!(target: "afg", "Not applying authority set change forced at block #{:?}, due to pending standard change at block #{:?}", @@ -469,11 +451,9 @@ where standard_change.effective_number(), ); - return Err( - Error::ForcedAuthoritySetChangeDependencyUnsatisfied( - standard_change.effective_number() - ) - ); + return Err(Error::ForcedAuthoritySetChangeDependencyUnsatisfied( + standard_change.effective_number(), + )) } } @@ -505,7 +485,7 @@ where }, )); - break; + break } } @@ -536,24 +516,19 @@ where F: Fn(&H, &H) -> Result, E: std::error::Error, { - let mut status = Status { - changed: false, - new_set_block: None, - }; + let mut status = Status { changed: false, new_set_block: None }; match self.pending_standard_changes.finalize_with_descendent_if( &finalized_hash, finalized_number.clone(), is_descendent_of, - |change| change.effective_number() <= finalized_number + |change| change.effective_number() <= finalized_number, )? { fork_tree::FinalizationResult::Changed(change) => { status.changed = true; - let pending_forced_changes = std::mem::replace( - &mut self.pending_forced_changes, - Vec::new(), - ); + let pending_forced_changes = + std::mem::replace(&mut self.pending_forced_changes, Vec::new()); // we will keep all forced changes for any later blocks and that are a // descendent of the finalized block (i.e. they are part of this branch). @@ -566,7 +541,8 @@ where } if let Some(change) = change { - afg_log!(initial_sync, + afg_log!( + initial_sync, "👴 Applying authority set change scheduled at block #{:?}", change.canon_height, ); @@ -583,10 +559,7 @@ where self.current_authorities = change.next_authorities; self.set_id += 1; - status.new_set_block = Some(( - finalized_hash, - finalized_number, - )); + status.new_set_block = Some((finalized_hash, finalized_number)); } }, fork_tree::FinalizationResult::Unchanged => {}, @@ -615,12 +588,14 @@ where F: Fn(&H, &H) -> Result, E: std::error::Error, { - self.pending_standard_changes.finalizes_any_with_descendent_if( - &finalized_hash, - finalized_number.clone(), - is_descendent_of, - |change| change.effective_number() == finalized_number - ).map_err(Error::ForkTree) + self.pending_standard_changes + .finalizes_any_with_descendent_if( + &finalized_hash, + finalized_number.clone(), + is_descendent_of, + |change| change.effective_number() == finalized_number, + ) + .map_err(Error::ForkTree) } } @@ -654,7 +629,9 @@ pub struct PendingChange { } impl Decode for PendingChange { - fn decode(value: &mut I) -> Result { + fn decode( + value: &mut I, + ) -> Result { let next_authorities = Decode::decode(value)?; let delay = Decode::decode(value)?; let canon_height = Decode::decode(value)?; @@ -662,17 +639,11 @@ impl Decode for PendingChange { let delay_kind = DelayKind::decode(value).unwrap_or(DelayKind::Finalized); - Ok(PendingChange { - next_authorities, - delay, - canon_height, - canon_hash, - delay_kind, - }) + Ok(PendingChange { next_authorities, delay, canon_height, canon_hash, delay_kind }) } } -impl + Clone> PendingChange { +impl + Clone> PendingChange { /// Returns the effective number this change will be applied at. pub fn effective_number(&self) -> N { self.canon_height.clone() + self.delay.clone() @@ -715,15 +686,17 @@ impl AuthoritySetChanges { } pub(crate) fn get_set_id(&self, block_number: N) -> AuthoritySetChangeId { - if self.0 + if self + .0 .last() .map(|last_auth_change| last_auth_change.1 < block_number) .unwrap_or(false) { - return AuthoritySetChangeId::Latest; + return AuthoritySetChangeId::Latest } - let idx = self.0 + let idx = self + .0 .binary_search_by_key(&block_number, |(_, n)| n.clone()) .unwrap_or_else(|b| b); @@ -732,7 +705,7 @@ impl AuthoritySetChanges { // if this is the first index but not the first set id then we are missing data. if idx == 0 && set_id != 0 { - return AuthoritySetChangeId::Unknown; + return AuthoritySetChangeId::Unknown } AuthoritySetChangeId::Set(set_id, block_number) @@ -745,7 +718,9 @@ impl AuthoritySetChanges { /// number (excluded). The iterator yields a tuple representing the set id and the block number /// of the last block in that set. pub fn iter_from(&self, block_number: N) -> Option> { - let idx = self.0.binary_search_by_key(&block_number, |(_, n)| n.clone()) + let idx = self + .0 + .binary_search_by_key(&block_number, |(_, n)| n.clone()) // if there was a change at the given block number then we should start on the next // index since we want to exclude the current block number .map(|n| n + 1) @@ -756,7 +731,7 @@ impl AuthoritySetChanges { // if this is the first index but not the first set id then we are missing data. if idx == 0 && set_id != 0 { - return None; + return None } } @@ -769,14 +744,13 @@ mod tests { use super::*; use sp_core::crypto::Public; - fn static_is_descendent_of(value: bool) - -> impl Fn(&A, &A) -> Result - { + fn static_is_descendent_of(value: bool) -> impl Fn(&A, &A) -> Result { move |_, _| Ok(value) } fn is_descendent_of(f: F) -> impl Fn(&A, &A) -> Result - where F: Fn(&A, &A) -> bool + where + F: Fn(&A, &A) -> bool, { move |base, hash| Ok(f(base, hash)) } @@ -793,14 +767,12 @@ mod tests { authority_set_changes: AuthoritySetChanges::empty(), }; - let change = |height| { - PendingChange { - next_authorities: current_authorities.clone(), - delay: 0, - canon_height: height, - canon_hash: height.to_string(), - delay_kind: DelayKind::Finalized, - } + let change = |height| PendingChange { + next_authorities: current_authorities.clone(), + delay: 0, + canon_height: height, + canon_hash: height.to_string(), + delay_kind: DelayKind::Finalized, }; let is_descendent_of = static_is_descendent_of(false); @@ -808,25 +780,13 @@ mod tests { authorities.add_pending_change(change(1), &is_descendent_of).unwrap(); authorities.add_pending_change(change(2), &is_descendent_of).unwrap(); - assert_eq!( - authorities.current_limit(0), - Some(1), - ); + assert_eq!(authorities.current_limit(0), Some(1),); - assert_eq!( - authorities.current_limit(1), - Some(1), - ); + assert_eq!(authorities.current_limit(1), Some(1),); - assert_eq!( - authorities.current_limit(2), - Some(2), - ); + assert_eq!(authorities.current_limit(2), Some(2),); - assert_eq!( - authorities.current_limit(3), - None, - ); + assert_eq!(authorities.current_limit(3), None,); } #[test] @@ -865,13 +825,22 @@ mod tests { delay_kind: DelayKind::Finalized, }; - authorities.add_pending_change(change_a.clone(), &static_is_descendent_of(false)).unwrap(); - authorities.add_pending_change(change_b.clone(), &static_is_descendent_of(false)).unwrap(); - authorities.add_pending_change(change_c.clone(), &is_descendent_of(|base, hash| match (*base, *hash) { - ("hash_a", "hash_c") => true, - ("hash_b", "hash_c") => false, - _ => unreachable!(), - })).unwrap(); + authorities + .add_pending_change(change_a.clone(), &static_is_descendent_of(false)) + .unwrap(); + authorities + .add_pending_change(change_b.clone(), &static_is_descendent_of(false)) + .unwrap(); + authorities + .add_pending_change( + change_c.clone(), + &is_descendent_of(|base, hash| match (*base, *hash) { + ("hash_a", "hash_c") => true, + ("hash_b", "hash_c") => false, + _ => unreachable!(), + }), + ) + .unwrap(); // forced changes are iterated last let change_d = PendingChange { @@ -890,8 +859,12 @@ mod tests { delay_kind: DelayKind::Best { median_last_finalized: 0 }, }; - authorities.add_pending_change(change_d.clone(), &static_is_descendent_of(false)).unwrap(); - authorities.add_pending_change(change_e.clone(), &static_is_descendent_of(false)).unwrap(); + authorities + .add_pending_change(change_d.clone(), &static_is_descendent_of(false)) + .unwrap(); + authorities + .add_pending_change(change_e.clone(), &static_is_descendent_of(false)) + .unwrap(); // ordered by subtree depth assert_eq!( @@ -930,46 +903,48 @@ mod tests { delay_kind: DelayKind::Finalized, }; - authorities.add_pending_change(change_a.clone(), &static_is_descendent_of(true)).unwrap(); - authorities.add_pending_change(change_b.clone(), &static_is_descendent_of(true)).unwrap(); + authorities + .add_pending_change(change_a.clone(), &static_is_descendent_of(true)) + .unwrap(); + authorities + .add_pending_change(change_b.clone(), &static_is_descendent_of(true)) + .unwrap(); - assert_eq!( - authorities.pending_changes().collect::>(), - vec![&change_a, &change_b], - ); + assert_eq!(authorities.pending_changes().collect::>(), vec![&change_a, &change_b],); // finalizing "hash_c" won't enact the change signaled at "hash_a" but it will prune out "hash_b" - let status = authorities.apply_standard_changes( - "hash_c", - 11, - &is_descendent_of(|base, hash| match (*base, *hash) { - ("hash_a", "hash_c") => true, - ("hash_b", "hash_c") => false, - _ => unreachable!(), - }), - false, - None, - ).unwrap(); + let status = authorities + .apply_standard_changes( + "hash_c", + 11, + &is_descendent_of(|base, hash| match (*base, *hash) { + ("hash_a", "hash_c") => true, + ("hash_b", "hash_c") => false, + _ => unreachable!(), + }), + false, + None, + ) + .unwrap(); assert!(status.changed); assert_eq!(status.new_set_block, None); - assert_eq!( - authorities.pending_changes().collect::>(), - vec![&change_a], - ); + assert_eq!(authorities.pending_changes().collect::>(), vec![&change_a],); assert_eq!(authorities.authority_set_changes, AuthoritySetChanges::empty()); // finalizing "hash_d" will enact the change signaled at "hash_a" - let status = authorities.apply_standard_changes( - "hash_d", - 15, - &is_descendent_of(|base, hash| match (*base, *hash) { - ("hash_a", "hash_d") => true, - _ => unreachable!(), - }), - false, - None, - ).unwrap(); + let status = authorities + .apply_standard_changes( + "hash_d", + 15, + &is_descendent_of(|base, hash| match (*base, *hash) { + ("hash_a", "hash_d") => true, + _ => unreachable!(), + }), + false, + None, + ) + .unwrap(); assert!(status.changed); assert_eq!(status.new_set_block, Some(("hash_d", 15))); @@ -1010,8 +985,12 @@ mod tests { delay_kind: DelayKind::Finalized, }; - authorities.add_pending_change(change_a.clone(), &static_is_descendent_of(true)).unwrap(); - authorities.add_pending_change(change_c.clone(), &static_is_descendent_of(true)).unwrap(); + authorities + .add_pending_change(change_a.clone(), &static_is_descendent_of(true)) + .unwrap(); + authorities + .add_pending_change(change_c.clone(), &static_is_descendent_of(true)) + .unwrap(); let is_descendent_of = is_descendent_of(|base, hash| match (*base, *hash) { ("hash_a", "hash_b") => true, @@ -1032,13 +1011,9 @@ mod tests { )); assert_eq!(authorities.authority_set_changes, AuthoritySetChanges::empty()); - let status = authorities.apply_standard_changes( - "hash_b", - 15, - &is_descendent_of, - false, - None, - ).unwrap(); + let status = authorities + .apply_standard_changes("hash_b", 15, &is_descendent_of, false, None) + .unwrap(); assert!(status.changed); assert_eq!(status.new_set_block, Some(("hash_b", 15))); @@ -1048,13 +1023,9 @@ mod tests { assert_eq!(authorities.authority_set_changes, AuthoritySetChanges(vec![(0, 15)])); // after finalizing `change_a` it should be possible to finalize `change_c` - let status = authorities.apply_standard_changes( - "hash_d", - 40, - &is_descendent_of, - false, - None, - ).unwrap(); + let status = authorities + .apply_standard_changes("hash_d", 40, &is_descendent_of, false, None) + .unwrap(); assert!(status.changed); assert_eq!(status.new_set_block, Some(("hash_d", 40))); @@ -1092,8 +1063,12 @@ mod tests { delay_kind: DelayKind::Finalized, }; - authorities.add_pending_change(change_a.clone(), &static_is_descendent_of(false)).unwrap(); - authorities.add_pending_change(change_b.clone(), &static_is_descendent_of(true)).unwrap(); + authorities + .add_pending_change(change_a.clone(), &static_is_descendent_of(false)) + .unwrap(); + authorities + .add_pending_change(change_b.clone(), &static_is_descendent_of(true)) + .unwrap(); let is_descendent_of = is_descendent_of(|base, hash| match (*base, *hash) { ("hash_a", "hash_d") => true, @@ -1160,8 +1135,12 @@ mod tests { delay_kind: DelayKind::Best { median_last_finalized: 0 }, }; - authorities.add_pending_change(change_a, &static_is_descendent_of(false)).unwrap(); - authorities.add_pending_change(change_b.clone(), &static_is_descendent_of(false)).unwrap(); + authorities + .add_pending_change(change_a, &static_is_descendent_of(false)) + .unwrap(); + authorities + .add_pending_change(change_b.clone(), &static_is_descendent_of(false)) + .unwrap(); // no duplicates are allowed assert!(matches!( @@ -1172,7 +1151,9 @@ mod tests { // there's an effective change triggered at block 15 but not a standard one. // so this should do nothing. assert_eq!( - authorities.enacts_standard_change("hash_c", 15, &static_is_descendent_of(true)).unwrap(), + authorities + .enacts_standard_change("hash_c", 15, &static_is_descendent_of(true)) + .unwrap(), None, ); @@ -1194,20 +1175,16 @@ mod tests { // let's try and apply the forced changes. // too early and there's no forced changes to apply. - assert!( - authorities - .apply_forced_changes("hash_a10", 10, &static_is_descendent_of(true), false, None) - .unwrap() - .is_none() - ); + assert!(authorities + .apply_forced_changes("hash_a10", 10, &static_is_descendent_of(true), false, None) + .unwrap() + .is_none()); // too late. - assert!( - authorities - .apply_forced_changes("hash_a16", 16, &is_descendent_of_a, false, None) - .unwrap() - .is_none() - ); + assert!(authorities + .apply_forced_changes("hash_a16", 16, &is_descendent_of_a, false, None) + .unwrap() + .is_none()); // on time -- chooses the right change for this fork. assert_eq!( @@ -1247,9 +1224,7 @@ mod tests { delay: 0, canon_height: 5, canon_hash: "hash_a", - delay_kind: DelayKind::Best { - median_last_finalized: 0, - }, + delay_kind: DelayKind::Best { median_last_finalized: 0 }, }; // and import it @@ -1258,12 +1233,10 @@ mod tests { .unwrap(); // it should be enacted at the same block that signaled it - assert!( - authorities - .apply_forced_changes("hash_a", 5, &static_is_descendent_of(false), false, None) - .unwrap() - .is_some() - ); + assert!(authorities + .apply_forced_changes("hash_a", 5, &static_is_descendent_of(false), false, None) + .unwrap() + .is_some()); } #[test] @@ -1306,9 +1279,15 @@ mod tests { }; // add some pending standard changes all on the same fork - authorities.add_pending_change(change_a, &static_is_descendent_of(true)).unwrap(); - authorities.add_pending_change(change_b, &static_is_descendent_of(true)).unwrap(); - authorities.add_pending_change(change_c, &static_is_descendent_of(true)).unwrap(); + authorities + .add_pending_change(change_a, &static_is_descendent_of(true)) + .unwrap(); + authorities + .add_pending_change(change_b, &static_is_descendent_of(true)) + .unwrap(); + authorities + .add_pending_change(change_c, &static_is_descendent_of(true)) + .unwrap(); // effective at #45 let change_d = PendingChange { @@ -1316,18 +1295,24 @@ mod tests { delay: 5, canon_height: 40, canon_hash: "hash_d", - delay_kind: DelayKind::Best { - median_last_finalized: 31, - }, + delay_kind: DelayKind::Best { median_last_finalized: 31 }, }; // now add a forced change on the same fork - authorities.add_pending_change(change_d, &static_is_descendent_of(true)).unwrap(); + authorities + .add_pending_change(change_d, &static_is_descendent_of(true)) + .unwrap(); // the forced change cannot be applied since the pending changes it depends on // have not been applied yet. assert!(matches!( - authorities.apply_forced_changes("hash_d45", 45, &static_is_descendent_of(true), false, None), + authorities.apply_forced_changes( + "hash_d45", + 45, + &static_is_descendent_of(true), + false, + None + ), Err(Error::ForcedAuthoritySetChangeDependencyUnsatisfied(15)) )); assert_eq!(authorities.authority_set_changes, AuthoritySetChanges::empty()); @@ -1340,7 +1325,13 @@ mod tests { // but the forced change still depends on the next standard change assert!(matches!( - authorities.apply_forced_changes("hash_d", 45, &static_is_descendent_of(true), false, None), + authorities.apply_forced_changes( + "hash_d", + 45, + &static_is_descendent_of(true), + false, + None + ), Err(Error::ForcedAuthoritySetChangeDependencyUnsatisfied(20)) )); assert_eq!(authorities.authority_set_changes, AuthoritySetChanges(vec![(0, 15)])); @@ -1425,29 +1416,19 @@ mod tests { }); // add the three pending changes - authorities - .add_pending_change(change_b, &is_descendent_of) - .unwrap(); - authorities - .add_pending_change(change_a0, &is_descendent_of) - .unwrap(); - authorities - .add_pending_change(change_a1, &is_descendent_of) - .unwrap(); + authorities.add_pending_change(change_b, &is_descendent_of).unwrap(); + authorities.add_pending_change(change_a0, &is_descendent_of).unwrap(); + authorities.add_pending_change(change_a1, &is_descendent_of).unwrap(); // the earliest change at block `best_a` should be the change at A0 (#5) assert_eq!( - authorities - .next_change(&"best_a", &is_descendent_of) - .unwrap(), + authorities.next_change(&"best_a", &is_descendent_of).unwrap(), Some(("hash_a0", 5)), ); // the earliest change at block `best_b` should be the change at B (#4) assert_eq!( - authorities - .next_change(&"best_b", &is_descendent_of) - .unwrap(), + authorities.next_change(&"best_b", &is_descendent_of).unwrap(), Some(("hash_b", 4)), ); @@ -1458,19 +1439,12 @@ mod tests { // the next change is now at A1 (#10) assert_eq!( - authorities - .next_change(&"best_a", &is_descendent_of) - .unwrap(), + authorities.next_change(&"best_a", &is_descendent_of).unwrap(), Some(("hash_a1", 10)), ); // there's no longer any pending change at `best_b` fork - assert_eq!( - authorities - .next_change(&"best_b", &is_descendent_of) - .unwrap(), - None, - ); + assert_eq!(authorities.next_change(&"best_b", &is_descendent_of).unwrap(), None,); // we a forced change at A10 (#8) let change_a10 = PendingChange { @@ -1478,9 +1452,7 @@ mod tests { delay: 0, canon_height: 8, canon_hash: "hash_a10", - delay_kind: DelayKind::Best { - median_last_finalized: 0, - }, + delay_kind: DelayKind::Best { median_last_finalized: 0 }, }; authorities @@ -1489,9 +1461,7 @@ mod tests { // it should take precedence over the change at A1 (#10) assert_eq!( - authorities - .next_change(&"best_a", &is_descendent_of) - .unwrap(), + authorities.next_change(&"best_a", &is_descendent_of).unwrap(), Some(("hash_a10", 8)), ); } @@ -1511,16 +1481,11 @@ mod tests { None, ); - let invalid_authorities_weight = vec![ - (AuthorityId::from_slice(&[1; 32]), 5), - (AuthorityId::from_slice(&[2; 32]), 0), - ]; + let invalid_authorities_weight = + vec![(AuthorityId::from_slice(&[1; 32]), 5), (AuthorityId::from_slice(&[2; 32]), 0)]; // authority weight of zero is invalid - assert_eq!( - AuthoritySet::<(), ()>::genesis(invalid_authorities_weight.clone()), - None - ); + assert_eq!(AuthoritySet::<(), ()>::genesis(invalid_authorities_weight.clone()), None); assert_eq!( AuthoritySet::<(), ()>::new( invalid_authorities_weight.clone(), @@ -1557,9 +1522,7 @@ mod tests { delay: 10, canon_height: 5, canon_hash: (), - delay_kind: DelayKind::Best { - median_last_finalized: 0, - }, + delay_kind: DelayKind::Best { median_last_finalized: 0 }, }; // pending change contains an an authority set @@ -1617,17 +1580,13 @@ mod tests { canon_height, canon_hash, delay_kind: if forced { - DelayKind::Best { - median_last_finalized: 0, - } + DelayKind::Best { median_last_finalized: 0 } } else { DelayKind::Finalized }, }; - authorities - .add_pending_change(change, &is_descendent_of) - .unwrap(); + authorities.add_pending_change(change, &is_descendent_of).unwrap(); }; add_pending_change(5, "A", false); @@ -1669,14 +1628,7 @@ mod tests { .unwrap(); assert_eq!(authorities.pending_forced_changes.len(), 1); - assert_eq!( - authorities - .pending_forced_changes - .first() - .unwrap() - .canon_hash, - "D" - ); + assert_eq!(authorities.pending_forced_changes.first().unwrap().canon_hash, "D"); } #[test] @@ -1714,10 +1666,7 @@ mod tests { authority_set_changes.append(2, 81); // we are missing the data for the first set, therefore we should return `None` - assert_eq!( - None, - authority_set_changes.iter_from(40).map(|it| it.collect::>()), - ); + assert_eq!(None, authority_set_changes.iter_from(40).map(|it| it.collect::>()),); // after adding the data for the first set the same query should work let mut authority_set_changes = AuthoritySetChanges::empty(); @@ -1736,14 +1685,8 @@ mod tests { authority_set_changes.iter_from(41).map(|it| it.cloned().collect::>()), ); - assert_eq!( - 0, - authority_set_changes.iter_from(121).unwrap().count(), - ); + assert_eq!(0, authority_set_changes.iter_from(121).unwrap().count(),); - assert_eq!( - 0, - authority_set_changes.iter_from(200).unwrap().count(), - ); + assert_eq!(0, authority_set_changes.iter_from(200).unwrap().count(),); } } diff --git a/client/finality-grandpa/src/aux_schema.rs b/client/finality-grandpa/src/aux_schema.rs index 296f7c13c5244..179e8876e66d8 100644 --- a/client/finality-grandpa/src/aux_schema.rs +++ b/client/finality-grandpa/src/aux_schema.rs @@ -30,13 +30,16 @@ use sp_blockchain::{Error as ClientError, Result as ClientResult}; use sp_finality_grandpa::{AuthorityList, RoundNumber, SetId}; use sp_runtime::traits::{Block as BlockT, NumberFor}; -use crate::authorities::{ - AuthoritySet, AuthoritySetChanges, DelayKind, PendingChange, SharedAuthoritySet, +use crate::{ + authorities::{ + AuthoritySet, AuthoritySetChanges, DelayKind, PendingChange, SharedAuthoritySet, + }, + environment::{ + CompletedRound, CompletedRounds, CurrentRounds, HasVoted, SharedVoterSetState, + VoterSetState, + }, + GrandpaJustification, NewAuthoritySet, }; -use crate::environment::{ - CompletedRound, CompletedRounds, CurrentRounds, HasVoted, SharedVoterSetState, VoterSetState, -}; -use crate::{GrandpaJustification, NewAuthoritySet}; const VERSION_KEY: &[u8] = b"grandpa_schema_version"; const SET_STATE_KEY: &[u8] = b"grandpa_completed_round"; @@ -141,13 +144,13 @@ struct V2AuthoritySet { pub(crate) fn load_decode( backend: &B, - key: &[u8] + key: &[u8], ) -> ClientResult> { match backend.get_aux(key)? { None => Ok(None), Some(t) => T::decode(&mut &t[..]) .map_err(|e| ClientError::Backend(format!("GRANDPA DB is corrupted: {}", e))) - .map(Some) + .map(Some), } } @@ -160,24 +163,16 @@ pub(crate) struct PersistentData { fn migrate_from_version0( backend: &B, genesis_round: &G, -) -> ClientResult< - Option<( - AuthoritySet>, - VoterSetState, - )>, -> +) -> ClientResult>, VoterSetState)>> where B: AuxStore, G: Fn() -> RoundState>, { - CURRENT_VERSION.using_encoded(|s| - backend.insert_aux(&[(VERSION_KEY, s)], &[]) - )?; + CURRENT_VERSION.using_encoded(|s| backend.insert_aux(&[(VERSION_KEY, s)], &[]))?; - if let Some(old_set) = load_decode::<_, V0AuthoritySet>>( - backend, - AUTHORITY_SET_KEY, - )? { + if let Some(old_set) = + load_decode::<_, V0AuthoritySet>>(backend, AUTHORITY_SET_KEY)? + { let new_set: AuthoritySet> = old_set.into(); backend.insert_aux(&[(AUTHORITY_SET_KEY, new_set.encode().as_slice())], &[])?; @@ -193,7 +188,7 @@ where let set_id = new_set.set_id; let base = last_round_state.prevote_ghost.expect( - "state is for completed round; completed rounds must have a prevote ghost; qed." + "state is for completed round; completed rounds must have a prevote ghost; qed.", ); let mut current_rounds = CurrentRounds::new(); @@ -215,7 +210,7 @@ where backend.insert_aux(&[(SET_STATE_KEY, set_state.encode().as_slice())], &[])?; - return Ok(Some((new_set, set_state))); + return Ok(Some((new_set, set_state))) } Ok(None) @@ -224,36 +219,25 @@ where fn migrate_from_version1( backend: &B, genesis_round: &G, -) -> ClientResult< - Option<( - AuthoritySet>, - VoterSetState, - )>, -> +) -> ClientResult>, VoterSetState)>> where B: AuxStore, G: Fn() -> RoundState>, { - CURRENT_VERSION.using_encoded(|s| - backend.insert_aux(&[(VERSION_KEY, s)], &[]) - )?; + CURRENT_VERSION.using_encoded(|s| backend.insert_aux(&[(VERSION_KEY, s)], &[]))?; - if let Some(set) = load_decode::<_, AuthoritySet>>( - backend, - AUTHORITY_SET_KEY, - )? { + if let Some(set) = + load_decode::<_, AuthoritySet>>(backend, AUTHORITY_SET_KEY)? + { let set_id = set.set_id; - let completed_rounds = |number, state, base| CompletedRounds::new( - CompletedRound { - number, - state, - votes: Vec::new(), - base, - }, - set_id, - &set, - ); + let completed_rounds = |number, state, base| { + CompletedRounds::new( + CompletedRound { number, state, votes: Vec::new(), base }, + set_id, + &set, + ) + }; let set_state = match load_decode::<_, V1VoterSetState>>( backend, @@ -284,17 +268,13 @@ where let base = set_state.prevote_ghost .expect("state is for completed round; completed rounds must have a prevote ghost; qed."); - VoterSetState::live( - set_id, - &set, - base, - ) + VoterSetState::live(set_id, &set, base) }, }; backend.insert_aux(&[(SET_STATE_KEY, set_state.encode().as_slice())], &[])?; - return Ok(Some((set, set_state))); + return Ok(Some((set, set_state))) } Ok(None) @@ -303,46 +283,31 @@ where fn migrate_from_version2( backend: &B, genesis_round: &G, -) -> ClientResult< - Option<( - AuthoritySet>, - VoterSetState, - )>, -> +) -> ClientResult>, VoterSetState)>> where B: AuxStore, G: Fn() -> RoundState>, { - CURRENT_VERSION.using_encoded(|s| - backend.insert_aux(&[(VERSION_KEY, s)], &[]) - )?; + CURRENT_VERSION.using_encoded(|s| backend.insert_aux(&[(VERSION_KEY, s)], &[]))?; - if let Some(old_set) = load_decode::<_, V2AuthoritySet>>( - backend, - AUTHORITY_SET_KEY, - )? { + if let Some(old_set) = + load_decode::<_, V2AuthoritySet>>(backend, AUTHORITY_SET_KEY)? + { let new_set: AuthoritySet> = old_set.into(); backend.insert_aux(&[(AUTHORITY_SET_KEY, new_set.encode().as_slice())], &[])?; - let set_state = match load_decode::<_, VoterSetState>( - backend, - SET_STATE_KEY, - )? { + let set_state = match load_decode::<_, VoterSetState>(backend, SET_STATE_KEY)? { Some(state) => state, None => { let state = genesis_round(); let base = state.prevote_ghost .expect("state is for completed round; completed rounds must have a prevote ghost; qed."); - VoterSetState::live( - new_set.set_id, - &new_set, - base, - ) - } + VoterSetState::live(new_set.set_id, &new_set, base) + }, }; - return Ok(Some((new_set, set_state))); + return Ok(Some((new_set, set_state))) } Ok(None) @@ -371,7 +336,7 @@ where return Ok(PersistentData { authority_set: new_set.into(), set_state: set_state.into(), - }); + }) } }, Some(1) => { @@ -381,7 +346,7 @@ where return Ok(PersistentData { authority_set: new_set.into(), set_state: set_state.into(), - }); + }) } }, Some(2) => { @@ -391,41 +356,31 @@ where return Ok(PersistentData { authority_set: new_set.into(), set_state: set_state.into(), - }); + }) } - } + }, Some(3) => { if let Some(set) = load_decode::<_, AuthoritySet>>( backend, AUTHORITY_SET_KEY, )? { - let set_state = match load_decode::<_, VoterSetState>( - backend, - SET_STATE_KEY, - )? { - Some(state) => state, - None => { - let state = make_genesis_round(); - let base = state.prevote_ghost + let set_state = + match load_decode::<_, VoterSetState>(backend, SET_STATE_KEY)? { + Some(state) => state, + None => { + let state = make_genesis_round(); + let base = state.prevote_ghost .expect("state is for completed round; completed rounds must have a prevote ghost; qed."); - VoterSetState::live( - set.set_id, - &set, - base, - ) - } - }; + VoterSetState::live(set.set_id, &set, base) + }, + }; - return Ok(PersistentData { - authority_set: set.into(), - set_state: set_state.into(), - }); + return Ok(PersistentData { authority_set: set.into(), set_state: set_state.into() }) } - } - Some(other) => return Err(ClientError::Backend( - format!("Unsupported GRANDPA DB version: {:?}", other) - )), + }, + Some(other) => + return Err(ClientError::Backend(format!("Unsupported GRANDPA DB version: {:?}", other))), } // genesis. @@ -436,14 +391,11 @@ where let genesis_set = AuthoritySet::genesis(genesis_authorities) .expect("genesis authorities is non-empty; all weights are non-zero; qed."); let state = make_genesis_round(); - let base = state.prevote_ghost + let base = state + .prevote_ghost .expect("state is for completed round; completed rounds must have a prevote ghost; qed."); - let genesis_state = VoterSetState::live( - 0, - &genesis_set, - base, - ); + let genesis_state = VoterSetState::live(0, &genesis_set, base); backend.insert_aux( &[ @@ -453,10 +405,7 @@ where &[], )?; - Ok(PersistentData { - authority_set: genesis_set.into(), - set_state: genesis_state.into(), - }) + Ok(PersistentData { authority_set: genesis_set.into(), set_state: genesis_state.into() }) } /// Update the authority set on disk after a change. @@ -486,10 +435,7 @@ where ); let encoded = set_state.encode(); - write_aux(&[ - (AUTHORITY_SET_KEY, &encoded_set[..]), - (SET_STATE_KEY, &encoded[..]), - ]) + write_aux(&[(AUTHORITY_SET_KEY, &encoded_set[..]), (SET_STATE_KEY, &encoded[..])]) } else { write_aux(&[(AUTHORITY_SET_KEY, &encoded_set[..])]) } @@ -527,10 +473,7 @@ pub(crate) fn write_voter_set_state( backend: &B, state: &VoterSetState, ) -> ClientResult<()> { - backend.insert_aux( - &[(SET_STATE_KEY, state.encode().as_slice())], - &[] - ) + backend.insert_aux(&[(SET_STATE_KEY, state.encode().as_slice())], &[]) } /// Write concluded round. @@ -554,10 +497,10 @@ pub(crate) fn load_authorities( #[cfg(test)] mod test { - use sp_finality_grandpa::AuthorityId; + use super::*; use sp_core::H256; + use sp_finality_grandpa::AuthorityId; use substrate_test_runtime_client; - use super::*; #[test] fn load_decode_from_v0_migrates_data_format() { @@ -582,19 +525,18 @@ mod test { let voter_set_state = (round_number, round_state.clone()); - client.insert_aux( - &[ - (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), - (SET_STATE_KEY, voter_set_state.encode().as_slice()), - ], - &[], - ).unwrap(); + client + .insert_aux( + &[ + (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), + (SET_STATE_KEY, voter_set_state.encode().as_slice()), + ], + &[], + ) + .unwrap(); } - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - None, - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), None,); // should perform the migration load_persistent::( @@ -602,23 +544,19 @@ mod test { H256::random(), 0, || unreachable!(), - ).unwrap(); + ) + .unwrap(); - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - Some(3), - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), Some(3),); - let PersistentData { - authority_set, - set_state, - .. - } = load_persistent::( - &client, - H256::random(), - 0, - || unreachable!(), - ).unwrap(); + let PersistentData { authority_set, set_state, .. } = + load_persistent::( + &client, + H256::random(), + 0, + || unreachable!(), + ) + .unwrap(); assert_eq!( *authority_set.inner(), @@ -628,7 +566,8 @@ mod test { ForkTree::new(), Vec::new(), AuthoritySetChanges::empty(), - ).unwrap(), + ) + .unwrap(), ); let mut current_rounds = CurrentRounds::new(); @@ -673,24 +612,24 @@ mod test { ForkTree::new(), Vec::new(), AuthoritySetChanges::empty(), - ).unwrap(); + ) + .unwrap(); let voter_set_state = V1VoterSetState::Live(round_number, round_state.clone()); - client.insert_aux( - &[ - (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), - (SET_STATE_KEY, voter_set_state.encode().as_slice()), - (VERSION_KEY, 1u32.encode().as_slice()), - ], - &[], - ).unwrap(); + client + .insert_aux( + &[ + (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), + (SET_STATE_KEY, voter_set_state.encode().as_slice()), + (VERSION_KEY, 1u32.encode().as_slice()), + ], + &[], + ) + .unwrap(); } - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - Some(1), - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), Some(1),); // should perform the migration load_persistent::( @@ -698,23 +637,19 @@ mod test { H256::random(), 0, || unreachable!(), - ).unwrap(); + ) + .unwrap(); - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - Some(3), - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), Some(3),); - let PersistentData { - authority_set, - set_state, - .. - } = load_persistent::( - &client, - H256::random(), - 0, - || unreachable!(), - ).unwrap(); + let PersistentData { authority_set, set_state, .. } = + load_persistent::( + &client, + H256::random(), + 0, + || unreachable!(), + ) + .unwrap(); assert_eq!( *authority_set.inner(), @@ -724,7 +659,8 @@ mod test { ForkTree::new(), Vec::new(), AuthoritySetChanges::empty(), - ).unwrap(), + ) + .unwrap(), ); let mut current_rounds = CurrentRounds::new(); @@ -768,23 +704,22 @@ mod test { VoterSetState::live( set_id, &authority_set.clone().into(), // Note the conversion! - genesis_state + genesis_state, ); - client.insert_aux( - &[ - (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), - (SET_STATE_KEY, voter_set_state.encode().as_slice()), - (VERSION_KEY, 2u32.encode().as_slice()), - ], - &[], - ).unwrap(); + client + .insert_aux( + &[ + (AUTHORITY_SET_KEY, authority_set.encode().as_slice()), + (SET_STATE_KEY, voter_set_state.encode().as_slice()), + (VERSION_KEY, 2u32.encode().as_slice()), + ], + &[], + ) + .unwrap(); } - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - Some(2), - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), Some(2),); // should perform the migration load_persistent::( @@ -792,22 +727,17 @@ mod test { H256::random(), 0, || unreachable!(), - ).unwrap(); + ) + .unwrap(); - assert_eq!( - load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), - Some(3), - ); + assert_eq!(load_decode::<_, u32>(&client, VERSION_KEY).unwrap(), Some(3),); - let PersistentData { - authority_set, - .. - } = load_persistent::( - &client, - H256::random(), - 0, - || unreachable!(), - ).unwrap(); + let PersistentData { authority_set, .. } = load_persistent::< + substrate_test_runtime_client::runtime::Block, + _, + _, + >(&client, H256::random(), 0, || unreachable!()) + .unwrap(); assert_eq!( *authority_set.inner(), @@ -817,7 +747,8 @@ mod test { ForkTree::new(), Vec::new(), AuthoritySetChanges::empty(), - ).unwrap(), + ) + .unwrap(), ); } @@ -843,7 +774,8 @@ mod test { assert_eq!( load_decode::<_, CompletedRound::>( &client, &key - ).unwrap(), + ) + .unwrap(), Some(completed_round), ); } diff --git a/client/finality-grandpa/src/communication/gossip.rs b/client/finality-grandpa/src/communication/gossip.rs index 1b3b5ea7c5d24..cf712f9e8965c 100644 --- a/client/finality-grandpa/src/communication/gossip.rs +++ b/client/finality-grandpa/src/communication/gossip.rs @@ -84,23 +84,25 @@ //! //! We only send polite messages to peers, -use sp_runtime::traits::{NumberFor, Block as BlockT, Zero}; -use sc_network_gossip::{MessageIntent, ValidatorContext}; +use parity_scale_codec::{Decode, Encode}; use sc_network::{ObservedRole, PeerId, ReputationChange}; -use parity_scale_codec::{Encode, Decode}; +use sc_network_gossip::{MessageIntent, ValidatorContext}; use sp_finality_grandpa::AuthorityId; +use sp_runtime::traits::{Block as BlockT, NumberFor, Zero}; +use log::{debug, trace}; +use prometheus_endpoint::{register, CounterVec, Opts, PrometheusError, Registry, U64}; +use rand::seq::SliceRandom; use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG}; -use log::{trace, debug}; use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; -use prometheus_endpoint::{CounterVec, Opts, PrometheusError, register, Registry, U64}; -use rand::seq::SliceRandom; +use super::{benefit, cost, Round, SetId}; use crate::{environment, CatchUp, CompactCommit, SignedMessage}; -use super::{cost, benefit, Round, SetId}; -use std::collections::{HashMap, VecDeque, HashSet}; -use std::time::Duration; +use std::{ + collections::{HashMap, HashSet, VecDeque}, + time::Duration, +}; use wasm_timer::Instant; const REBROADCAST_AFTER: Duration = Duration::from_secs(60 * 5); @@ -146,18 +148,14 @@ enum Consider { /// A view of protocol state. #[derive(Debug)] struct View { - round: Round, // the current round we are at. - set_id: SetId, // the current voter set id. + round: Round, // the current round we are at. + set_id: SetId, // the current voter set id. last_commit: Option, // commit-finalized block height, if any. } impl Default for View { fn default() -> Self { - View { - round: Round(1), - set_id: SetId(0), - last_commit: None, - } + View { round: Round(1), set_id: SetId(0), last_commit: None } } } @@ -165,12 +163,20 @@ impl View { /// Consider a round and set ID combination under a current view. fn consider_vote(&self, round: Round, set_id: SetId) -> Consider { // only from current set - if set_id < self.set_id { return Consider::RejectPast } - if set_id > self.set_id { return Consider::RejectFuture } + if set_id < self.set_id { + return Consider::RejectPast + } + if set_id > self.set_id { + return Consider::RejectFuture + } // only r-1 ... r+1 - if round.0 > self.round.0.saturating_add(1) { return Consider::RejectFuture } - if round.0 < self.round.0.saturating_sub(1) { return Consider::RejectPast } + if round.0 > self.round.0.saturating_add(1) { + return Consider::RejectFuture + } + if round.0 < self.round.0.saturating_sub(1) { + return Consider::RejectPast + } Consider::Accept } @@ -179,18 +185,23 @@ impl View { /// because we gate on finalization of a further block than a previous commit. fn consider_global(&self, set_id: SetId, number: N) -> Consider { // only from current set - if set_id < self.set_id { return Consider::RejectPast } - if set_id > self.set_id { return Consider::RejectFuture } + if set_id < self.set_id { + return Consider::RejectPast + } + if set_id > self.set_id { + return Consider::RejectFuture + } // only commits which claim to prove a higher block number than // the one we're aware of. match self.last_commit { None => Consider::Accept, - Some(ref num) => if num < &number { - Consider::Accept - } else { - Consider::RejectPast - } + Some(ref num) => + if num < &number { + Consider::Accept + } else { + Consider::RejectPast + }, } } } @@ -208,22 +219,13 @@ struct LocalView { impl LocalView { /// Creates a new `LocalView` at the given set id and round. fn new(set_id: SetId, round: Round) -> LocalView { - LocalView { - set_id, - round, - last_commit: None, - round_start: Instant::now(), - } + LocalView { set_id, round, last_commit: None, round_start: Instant::now() } } /// Converts the local view to a `View` discarding round and set id /// information about the last commit. fn as_view(&self) -> View<&N> { - View { - round: self.round, - set_id: self.set_id, - last_commit: self.last_commit_height(), - } + View { round: self.round, set_id: self.set_id, last_commit: self.last_commit_height() } } /// Update the set ID. implies a reset to round 1. @@ -231,7 +233,7 @@ impl LocalView { if set_id != self.set_id { self.set_id = set_id; self.round = Round(1); - self.round_start = Instant::now(); + self.round_start = Instant::now(); } } @@ -259,7 +261,7 @@ const KEEP_RECENT_ROUNDS: usize = 3; struct KeepTopics { current_set: SetId, rounds: VecDeque<(Round, SetId)>, - reverse_map: HashMap, SetId)> + reverse_map: HashMap, SetId)>, } impl KeepTopics { @@ -293,10 +295,7 @@ impl KeepTopics { map.insert(super::global_topic::(self.current_set.0), (None, self.current_set)); for &(round, set) in &self.rounds { - map.insert( - super::round_topic::(round.0, set.0), - (Some(round), set) - ); + map.insert(super::round_topic::(round.0, set.0), (Some(round), set)); } self.reverse_map = map; @@ -310,10 +309,8 @@ impl KeepTopics { // topics to send to a neighbor based on their view. fn neighbor_topics(view: &View>) -> Vec { let s = view.set_id; - let mut topics = vec![ - super::global_topic::(s.0), - super::round_topic::(view.round.0, s.0), - ]; + let mut topics = + vec![super::global_topic::(s.0), super::round_topic::(view.round.0, s.0)]; if view.round.0 != 0 { let r = Round(view.round.0 - 1); @@ -423,15 +420,9 @@ pub(super) enum Misbehavior { // could not decode neighbor message. bytes-length of the packet. UndecodablePacket(i32), // Bad catch up message (invalid signatures). - BadCatchUpMessage { - signatures_checked: i32, - }, + BadCatchUpMessage { signatures_checked: i32 }, // Bad commit message - BadCommitMessage { - signatures_checked: i32, - blocks_loaded: i32, - equivocations_caught: i32, - }, + BadCommitMessage { signatures_checked: i32, blocks_loaded: i32, equivocations_caught: i32 }, // A message received that's from the future relative to our view. // always misbehavior. FutureMessage, @@ -462,7 +453,10 @@ impl Misbehavior { let benefit = equivocations_caught.saturating_mul(benefit::PER_EQUIVOCATION); - ReputationChange::new((benefit as i32).saturating_add(cost as i32), "Grandpa: Bad commit") + ReputationChange::new( + (benefit as i32).saturating_add(cost as i32), + "Grandpa: Bad commit", + ) }, FutureMessage => cost::FUTURE_MESSAGE, OutOfScopeMessage => cost::OUT_OF_SCOPE_MESSAGE, @@ -478,10 +472,7 @@ struct PeerInfo { impl PeerInfo { fn new(roles: ObservedRole) -> Self { - PeerInfo { - view: View::default(), - roles, - } + PeerInfo { view: View::default(), roles } } } @@ -515,14 +506,14 @@ impl Peers { match role { ObservedRole::Authority if self.first_stage_peers.len() < LUCKY_PEERS => { self.first_stage_peers.insert(who.clone()); - } + }, ObservedRole::Authority if self.second_stage_peers.len() < LUCKY_PEERS => { self.second_stage_peers.insert(who.clone()); - } + }, ObservedRole::Light if self.lucky_light_peers.len() < LUCKY_PEERS => { self.lucky_light_peers.insert(who.clone()); - } - _ => {} + }, + _ => {}, } self.inner.insert(who, PeerInfo::new(role)); @@ -548,12 +539,12 @@ impl Peers { Some(p) => p, }; - let invalid_change = peer.view.set_id > update.set_id - || peer.view.round > update.round && peer.view.set_id == update.set_id - || peer.view.last_commit.as_ref() > Some(&update.commit_finalized_height); + let invalid_change = peer.view.set_id > update.set_id || + peer.view.round > update.round && peer.view.set_id == update.set_id || + peer.view.last_commit.as_ref() > Some(&update.commit_finalized_height); if invalid_change { - return Err(Misbehavior::InvalidViewChange); + return Err(Misbehavior::InvalidViewChange) } peer.view = View { @@ -578,7 +569,7 @@ impl Peers { // same height, because there is still a misbehavior condition based on // sending commits that are <= the best we are aware of. if peer.view.last_commit.as_ref() > Some(&new_height) { - return Err(Misbehavior::InvalidViewChange); + return Err(Misbehavior::InvalidViewChange) } peer.view.last_commit = Some(new_height); @@ -630,7 +621,7 @@ impl Peers { } else if n_authorities_added < one_and_a_half_lucky { second_stage_peers.insert(peer_id.clone()); } else { - break; + break } n_authorities_added += 1; @@ -641,7 +632,7 @@ impl Peers { let n_second_stage_peers = LUCKY_PEERS.max((shuffled_peers.len() as f32).sqrt() as usize); for (peer_id, info) in &shuffled_peers { if info.roles.is_light() { - continue; + continue } if first_stage_peers.len() < LUCKY_PEERS { @@ -652,20 +643,14 @@ impl Peers { second_stage_peers.insert(peer_id.clone()); } } else { - break; + break } } // pick `LUCKY_PEERS` random light peers let lucky_light_peers = shuffled_peers .into_iter() - .filter_map(|(peer_id, info)| { - if info.roles.is_light() { - Some(peer_id) - } else { - None - } - }) + .filter_map(|(peer_id, info)| if info.roles.is_light() { Some(peer_id) } else { None }) .take(LUCKY_PEERS) .collect(); @@ -691,15 +676,9 @@ enum PendingCatchUp { /// No pending catch up requests. None, /// Pending catch up request which has not been answered yet. - Requesting { - who: PeerId, - request: CatchUpRequestMessage, - instant: Instant, - }, + Requesting { who: PeerId, request: CatchUpRequestMessage, instant: Instant }, /// Pending catch up request that was answered and is being processed. - Processing { - instant: Instant, - }, + Processing { instant: Instant }, } /// Configuration for the round catch-up mechanism. @@ -730,10 +709,7 @@ impl CatchUpConfig { fn request_allowed(&self, peer: &PeerInfo) -> bool { match self { CatchUpConfig::Disabled => false, - CatchUpConfig::Enabled { - only_from_authorities, - .. - } => match peer.roles { + CatchUpConfig::Enabled { only_from_authorities, .. } => match peer.roles { ObservedRole::Authority => true, ObservedRole::Light => false, ObservedRole::Full => !only_from_authorities, @@ -795,11 +771,12 @@ impl Inner { { let local_view = match self.local_view { None => return None, - Some(ref mut v) => if v.round == round { - return None - } else { - v - }, + Some(ref mut v) => + if v.round == round { + return None + } else { + v + }, }; let set_id = local_view.set_id; @@ -820,27 +797,24 @@ impl Inner { fn note_set(&mut self, set_id: SetId, authorities: Vec) -> MaybeMessage { { let local_view = match self.local_view { - ref mut x @ None => x.get_or_insert(LocalView::new( - set_id, - Round(1), - )), - Some(ref mut v) => if v.set_id == set_id { - let diff_authorities = - self.authorities.iter().collect::>() != - authorities.iter().collect(); - - if diff_authorities { - debug!(target: "afg", - "Gossip validator noted set {:?} twice with different authorities. \ - Was the authority set hard forked?", - set_id, - ); - self.authorities = authorities; - } - return None; - } else { - v - }, + ref mut x @ None => x.get_or_insert(LocalView::new(set_id, Round(1))), + Some(ref mut v) => + if v.set_id == set_id { + let diff_authorities = self.authorities.iter().collect::>() != + authorities.iter().collect(); + + if diff_authorities { + debug!(target: "afg", + "Gossip validator noted set {:?} twice with different authorities. \ + Was the authority set hard forked?", + set_id, + ); + self.authorities = authorities; + } + return None + } else { + v + }, }; local_view.update_set(set_id); @@ -860,11 +834,12 @@ impl Inner { { match self.local_view { None => return None, - Some(ref mut v) => if v.last_commit_height() < Some(&finalized) { - v.last_commit = Some((finalized, round, set_id)); - } else { - return None - }, + Some(ref mut v) => + if v.last_commit_height() < Some(&finalized) { + v.last_commit = Some((finalized, round, set_id)); + } else { + return None + }, }; } @@ -872,30 +847,40 @@ impl Inner { } fn consider_vote(&self, round: Round, set_id: SetId) -> Consider { - self.local_view.as_ref() + self.local_view + .as_ref() .map(LocalView::as_view) .map(|v| v.consider_vote(round, set_id)) .unwrap_or(Consider::RejectOutOfScope) } fn consider_global(&self, set_id: SetId, number: NumberFor) -> Consider { - self.local_view.as_ref() + self.local_view + .as_ref() .map(LocalView::as_view) .map(|v| v.consider_global(set_id, &number)) .unwrap_or(Consider::RejectOutOfScope) } - fn cost_past_rejection(&self, _who: &PeerId, _round: Round, _set_id: SetId) -> ReputationChange { + fn cost_past_rejection( + &self, + _who: &PeerId, + _round: Round, + _set_id: SetId, + ) -> ReputationChange { // hardcoded for now. cost::PAST_REJECTION } - fn validate_round_message(&self, who: &PeerId, full: &VoteMessage) - -> Action - { + fn validate_round_message( + &self, + who: &PeerId, + full: &VoteMessage, + ) -> Action { match self.consider_vote(full.round, full.set_id) { Consider::RejectFuture => return Action::Discard(Misbehavior::FutureMessage.cost()), - Consider::RejectOutOfScope => return Action::Discard(Misbehavior::OutOfScopeMessage.cost()), + Consider::RejectOutOfScope => + return Action::Discard(Misbehavior::OutOfScopeMessage.cost()), Consider::RejectPast => return Action::Discard(self.cost_past_rejection(who, full.round, full.set_id)), Consider::Accept => {}, @@ -910,7 +895,7 @@ impl Inner { "afg.bad_msg_signature"; "signature" => ?full.message.id, ); - return Action::Discard(cost::UNKNOWN_VOTER); + return Action::Discard(cost::UNKNOWN_VOTER) } if !sp_finality_grandpa::check_message_signature( @@ -927,30 +912,34 @@ impl Inner { "afg.bad_msg_signature"; "signature" => ?full.message.id, ); - return Action::Discard(cost::BAD_SIGNATURE); + return Action::Discard(cost::BAD_SIGNATURE) } let topic = super::round_topic::(full.round.0, full.set_id.0); Action::Keep(topic, benefit::ROUND_MESSAGE) } - fn validate_commit_message(&mut self, who: &PeerId, full: &FullCommitMessage) - -> Action - { - + fn validate_commit_message( + &mut self, + who: &PeerId, + full: &FullCommitMessage, + ) -> Action { if let Err(misbehavior) = self.peers.update_commit_height(who, full.message.target_number) { - return Action::Discard(misbehavior.cost()); + return Action::Discard(misbehavior.cost()) } match self.consider_global(full.set_id, full.message.target_number) { Consider::RejectFuture => return Action::Discard(Misbehavior::FutureMessage.cost()), Consider::RejectPast => return Action::Discard(self.cost_past_rejection(who, full.round, full.set_id)), - Consider::RejectOutOfScope => return Action::Discard(Misbehavior::OutOfScopeMessage.cost()), + Consider::RejectOutOfScope => + return Action::Discard(Misbehavior::OutOfScopeMessage.cost()), Consider::Accept => {}, } - if full.message.precommits.len() != full.message.auth_data.len() || full.message.precommits.is_empty() { + if full.message.precommits.len() != full.message.auth_data.len() || + full.message.precommits.is_empty() + { debug!(target: "afg", "Malformed compact commit"); telemetry!( self.config.telemetry; @@ -960,7 +949,7 @@ impl Inner { "auth_data_len" => ?full.message.auth_data.len(), "precommits_is_empty" => ?full.message.precommits.is_empty(), ); - return Action::Discard(cost::MALFORMED_COMMIT); + return Action::Discard(cost::MALFORMED_COMMIT) } // always discard commits initially and rebroadcast after doing full @@ -969,33 +958,33 @@ impl Inner { Action::ProcessAndDiscard(topic, benefit::BASIC_VALIDATED_COMMIT) } - fn validate_catch_up_message(&mut self, who: &PeerId, full: &FullCatchUpMessage) - -> Action - { + fn validate_catch_up_message( + &mut self, + who: &PeerId, + full: &FullCatchUpMessage, + ) -> Action { match &self.pending_catch_up { PendingCatchUp::Requesting { who: peer, request, instant } => { if peer != who { - return Action::Discard(Misbehavior::OutOfScopeMessage.cost()); + return Action::Discard(Misbehavior::OutOfScopeMessage.cost()) } if request.set_id != full.set_id { - return Action::Discard(cost::MALFORMED_CATCH_UP); + return Action::Discard(cost::MALFORMED_CATCH_UP) } if request.round.0 > full.message.round_number { - return Action::Discard(cost::MALFORMED_CATCH_UP); + return Action::Discard(cost::MALFORMED_CATCH_UP) } if full.message.prevotes.is_empty() || full.message.precommits.is_empty() { - return Action::Discard(cost::MALFORMED_CATCH_UP); + return Action::Discard(cost::MALFORMED_CATCH_UP) } // move request to pending processing state, we won't push out // any catch up requests until we import this one (either with a // success or failure). - self.pending_catch_up = PendingCatchUp::Processing { - instant: *instant, - }; + self.pending_catch_up = PendingCatchUp::Processing { instant: *instant }; // always discard catch up messages, they're point-to-point let topic = super::global_topic::(full.set_id.0); @@ -1036,15 +1025,14 @@ impl Inner { if request.set_id.0.saturating_add(1) == local_view.set_id.0 && local_view.round.0.saturating_sub(CATCH_UP_THRESHOLD) == 0 { - return (None, Action::Discard(cost::HONEST_OUT_OF_SCOPE_CATCH_UP)); + return (None, Action::Discard(cost::HONEST_OUT_OF_SCOPE_CATCH_UP)) } - return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())); + return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())) } match self.peers.peer(who) { - None => - return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())), + None => return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())), Some(peer) if peer.view.round >= request.round => return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())), _ => {}, @@ -1052,7 +1040,7 @@ impl Inner { let last_completed_round = set_state.read().last_completed_round(); if last_completed_round.number < request.round.0 { - return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())); + return (None, Action::Discard(Misbehavior::OutOfScopeMessage.cost())) } trace!(target: "afg", "Replying to catch-up request for round {} from {} with round {}", @@ -1123,10 +1111,8 @@ impl Inner { { // send catch up request if allowed let round = peer.view.round.0 - 1; // peer.view.round is > 0 - let request = CatchUpRequestMessage { - set_id: peer.view.set_id, - round: Round(round), - }; + let request = + CatchUpRequestMessage { set_id: peer.view.set_id, round: Round(round) }; let (catch_up_allowed, catch_up_report) = self.note_catch_up_request(who, &request); @@ -1146,16 +1132,17 @@ impl Inner { (catch_up, report) } - fn import_neighbor_message(&mut self, who: &PeerId, update: NeighborPacket>) - -> (Vec, Action, Option>, Option) - { + fn import_neighbor_message( + &mut self, + who: &PeerId, + update: NeighborPacket>, + ) -> (Vec, Action, Option>, Option) { let update_res = self.peers.update_peer_state(who, update); let (cost_benefit, topics) = match update_res { Ok(view) => (benefit::NEIGHBOR_MESSAGE, view.map(|view| neighbor_topics::(view))), - Err(misbehavior) => - (misbehavior.cost(), None), + Err(misbehavior) => (misbehavior.cost(), None), }; let (catch_up, report) = match update_res { @@ -1207,14 +1194,14 @@ impl Inner { let report = match &self.pending_catch_up { PendingCatchUp::Requesting { who: peer, instant, .. } => if instant.elapsed() <= CATCH_UP_REQUEST_TIMEOUT { - return (false, None); + return (false, None) } else { // report peer for timeout Some((peer.clone(), cost::CATCH_UP_REQUEST_TIMEOUT)) }, PendingCatchUp::Processing { instant, .. } => if instant.elapsed() < CATCH_UP_PROCESS_TIMEOUT { - return (false, None); + return (false, None) } else { None }, @@ -1246,19 +1233,16 @@ impl Inner { }; if self.config.local_role.is_light() { - return false; + return false } if round_elapsed < round_duration.mul_f32(PROPAGATION_SOME) { self.peers.first_stage_peers.contains(who) } else if round_elapsed < round_duration.mul_f32(PROPAGATION_ALL) { - self.peers.first_stage_peers.contains(who) - || self.peers.second_stage_peers.contains(who) + self.peers.first_stage_peers.contains(who) || + self.peers.second_stage_peers.contains(who) } else { - self.peers - .peer(who) - .map(|info| !info.roles.is_light()) - .unwrap_or(false) + self.peers.peer(who).map(|info| !info.roles.is_light()).unwrap_or(false) } } @@ -1283,13 +1267,13 @@ impl Inner { }; if self.config.local_role.is_light() { - return false; + return false } if round_elapsed < round_duration.mul_f32(PROPAGATION_ALL) { - self.peers.first_stage_peers.contains(who) - || self.peers.second_stage_peers.contains(who) - || self.peers.lucky_light_peers.contains(who) + self.peers.first_stage_peers.contains(who) || + self.peers.second_stage_peers.contains(who) || + self.peers.lucky_light_peers.contains(who) } else { true } @@ -1302,15 +1286,17 @@ pub(crate) struct Metrics { } impl Metrics { - pub(crate) fn register(registry: &prometheus_endpoint::Registry) -> Result { + pub(crate) fn register( + registry: &prometheus_endpoint::Registry, + ) -> Result { Ok(Self { messages_validated: register( CounterVec::new( Opts::new( "finality_grandpa_communication_gossip_validator_messages", - "Number of messages validated by the finality grandpa gossip validator." + "Number of messages validated by the finality grandpa gossip validator.", ), - &["message", "action"] + &["message", "action"], )?, registry, )?, @@ -1336,7 +1322,7 @@ impl GossipValidator { set_state: environment::SharedVoterSetState, prometheus_registry: Option<&Registry>, telemetry: Option, - ) -> (GossipValidator, TracingUnboundedReceiver) { + ) -> (GossipValidator, TracingUnboundedReceiver) { let metrics = match prometheus_registry.map(Metrics::register) { Some(Ok(metrics)) => Some(metrics), Some(Err(e)) => { @@ -1360,7 +1346,8 @@ impl GossipValidator { /// Note a round in the current set has started. pub(super) fn note_round(&self, round: Round, send_neighbor: F) - where F: FnOnce(Vec, NeighborPacket>) + where + F: FnOnce(Vec, NeighborPacket>), { let maybe_msg = self.inner.write().note_round(round); if let Some((to, msg)) = maybe_msg { @@ -1371,7 +1358,8 @@ impl GossipValidator { /// Note that a voter set with given ID has started. Updates the current set to given /// value and initializes the round to 0. pub(super) fn note_set(&self, set_id: SetId, authorities: Vec, send_neighbor: F) - where F: FnOnce(Vec, NeighborPacket>) + where + F: FnOnce(Vec, NeighborPacket>), { let maybe_msg = self.inner.write().note_set(set_id, authorities); if let Some((to, msg)) = maybe_msg { @@ -1386,14 +1374,10 @@ impl GossipValidator { set_id: SetId, finalized: NumberFor, send_neighbor: F, - ) - where F: FnOnce(Vec, NeighborPacket>) + ) where + F: FnOnce(Vec, NeighborPacket>), { - let maybe_msg = self.inner.write().note_commit_finalized( - round, - set_id, - finalized, - ); + let maybe_msg = self.inner.write().note_commit_finalized(round, set_id, finalized); if let Some((to, msg)) = maybe_msg { send_neighbor(to, msg); @@ -1401,7 +1385,7 @@ impl GossipValidator { } /// Note that we've processed a catch up message. - pub(super) fn note_catch_up_message_processed(&self) { + pub(super) fn note_catch_up_message_processed(&self) { self.inner.write().note_catch_up_message_processed(); } @@ -1409,9 +1393,11 @@ impl GossipValidator { let _ = self.report_sender.unbounded_send(PeerReport { who, cost_benefit }); } - pub(super) fn do_validate(&self, who: &PeerId, mut data: &[u8]) - -> (Action, Vec, Option>) - { + pub(super) fn do_validate( + &self, + who: &PeerId, + mut data: &[u8], + ) -> (Action, Vec, Option>) { let mut broadcast_topics = Vec::new(); let mut peer_reply = None; @@ -1430,10 +1416,10 @@ impl GossipValidator { }, Ok(GossipMessage::Neighbor(update)) => { message_name = Some("neighbor"); - let (topics, action, catch_up, report) = self.inner.write().import_neighbor_message( - who, - update.into_neighbor_packet(), - ); + let (topics, action, catch_up, report) = self + .inner + .write() + .import_neighbor_message(who, update.into_neighbor_packet()); if let Some((peer, cost_benefit)) = report { self.report(peer, cost_benefit); @@ -1442,22 +1428,19 @@ impl GossipValidator { broadcast_topics = topics; peer_reply = catch_up; action - } + }, Ok(GossipMessage::CatchUp(ref message)) => { message_name = Some("catch_up"); self.inner.write().validate_catch_up_message(who, message) }, Ok(GossipMessage::CatchUpRequest(request)) => { message_name = Some("catch_up_request"); - let (reply, action) = self.inner.write().handle_catch_up_request( - who, - request, - &self.set_state, - ); + let (reply, action) = + self.inner.write().handle_catch_up_request(who, request, &self.set_state); peer_reply = reply; action - } + }, Err(e) => { message_name = None; debug!(target: "afg", "Error decoding message: {}", e); @@ -1470,7 +1453,7 @@ impl GossipValidator { let len = std::cmp::min(i32::MAX as usize, data.len()) as i32; Action::Discard(Misbehavior::UndecodablePacket(len).cost()) - } + }, } }; @@ -1494,17 +1477,20 @@ impl GossipValidator { } impl sc_network_gossip::Validator for GossipValidator { - fn new_peer(&self, context: &mut dyn ValidatorContext, who: &PeerId, roles: ObservedRole) { + fn new_peer( + &self, + context: &mut dyn ValidatorContext, + who: &PeerId, + roles: ObservedRole, + ) { let packet = { let mut inner = self.inner.write(); inner.peers.new_peer(who.clone(), roles); - inner.local_view.as_ref().map(|v| { - NeighborPacket { - round: v.round, - set_id: v.set_id, - commit_finalized_height: *v.last_commit_height().unwrap_or(&Zero::zero()), - } + inner.local_view.as_ref().map(|v| NeighborPacket { + round: v.round, + set_id: v.set_id, + commit_finalized_height: *v.last_commit_height().unwrap_or(&Zero::zero()), }) }; @@ -1540,15 +1526,15 @@ impl sc_network_gossip::Validator for GossipValidator { self.report(who.clone(), cb); sc_network_gossip::ValidationResult::ProcessAndDiscard(topic) - } + }, Action::Discard(cb) => { self.report(who.clone(), cb); sc_network_gossip::ValidationResult::Discard - } + }, } } @@ -1573,7 +1559,7 @@ impl sc_network_gossip::Validator for GossipValidator sc_network_gossip::Validator for GossipValidator sc_network_gossip::Validator for GossipValidator false, Ok(GossipMessage::CatchUpRequest(_)) => false, Ok(GossipMessage::CatchUp(_)) => false, @@ -1652,11 +1639,10 @@ impl sc_network_gossip::Validator for GossipValidator true, Ok(GossipMessage::Commit(full)) => match local_view.last_commit { Some((number, round, set_id)) => - // we expire any commit message that doesn't target the same block - // as our best commit or isn't from the same round and set id + // we expire any commit message that doesn't target the same block + // as our best commit or isn't from the same round and set id !(full.message.target_number == number && - full.round == round && - full.set_id == set_id), + full.round == round && full.set_id == set_id), None => true, }, Ok(_) => true, @@ -1673,8 +1659,7 @@ pub(super) struct PeerReport { #[cfg(test)] mod tests { - use super::*; - use super::environment::SharedVoterSetState; + use super::{environment::SharedVoterSetState, *}; use sc_network::config::Role; use sc_network_gossip::Validator as GossipValidatorT; use sc_network_test::Block; @@ -1695,19 +1680,14 @@ mod tests { // dummy voter set state fn voter_set_state() -> SharedVoterSetState { - use crate::authorities::AuthoritySet; - use crate::environment::VoterSetState; + use crate::{authorities::AuthoritySet, environment::VoterSetState}; let base = (H256::zero(), 0); let voters = vec![(AuthorityId::from_slice(&[1; 32]), 1)]; let voters = AuthoritySet::genesis(voters).unwrap(); - let set_state = VoterSetState::live( - 0, - &voters, - base, - ); + let set_state = VoterSetState::live(0, &voters, base); set_state.into() } @@ -1752,11 +1732,8 @@ mod tests { let mut peers = Peers::default(); let id = PeerId::random(); - let update = NeighborPacket { - round: Round(5), - set_id: SetId(10), - commit_finalized_height: 50, - }; + let update = + NeighborPacket { round: Round(5), set_id: SetId(10), commit_finalized_height: 50 }; let res = peers.update_peer_state(&id, update.clone()); assert!(res.unwrap().is_none()); @@ -1771,29 +1748,17 @@ mod tests { #[test] fn update_peer_state() { - let update1 = NeighborPacket { - round: Round(5), - set_id: SetId(10), - commit_finalized_height: 50u32, - }; + let update1 = + NeighborPacket { round: Round(5), set_id: SetId(10), commit_finalized_height: 50u32 }; - let update2 = NeighborPacket { - round: Round(6), - set_id: SetId(10), - commit_finalized_height: 60, - }; + let update2 = + NeighborPacket { round: Round(6), set_id: SetId(10), commit_finalized_height: 60 }; - let update3 = NeighborPacket { - round: Round(2), - set_id: SetId(11), - commit_finalized_height: 61, - }; + let update3 = + NeighborPacket { round: Round(2), set_id: SetId(11), commit_finalized_height: 61 }; - let update4 = NeighborPacket { - round: Round(3), - set_id: SetId(11), - commit_finalized_height: 80, - }; + let update4 = + NeighborPacket { round: Round(3), set_id: SetId(11), commit_finalized_height: 80 }; let mut peers = Peers::default(); let id = PeerId::random(); @@ -1820,11 +1785,13 @@ mod tests { let id = PeerId::random(); peers.new_peer(id.clone(), ObservedRole::Authority); - peers.update_peer_state(&id, NeighborPacket { - round: Round(10), - set_id: SetId(10), - commit_finalized_height: 10, - }).unwrap().unwrap(); + peers + .update_peer_state( + &id, + NeighborPacket { round: Round(10), set_id: SetId(10), commit_finalized_height: 10 }, + ) + .unwrap() + .unwrap(); let mut check_update = move |update: NeighborPacket<_>| { let err = peers.update_peer_state(&id, update.clone()).unwrap_err(); @@ -1853,12 +1820,7 @@ mod tests { #[test] fn messages_not_expired_immediately() { - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); let set_id = 1; @@ -1890,12 +1852,7 @@ mod tests { fn message_from_unknown_authority_discarded() { assert!(cost::UNKNOWN_VOTER != cost::BAD_SIGNATURE); - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); let set_id = 1; let auth = AuthorityId::from_slice(&[1u8; 32]); let peer = PeerId::random(); @@ -1904,31 +1861,37 @@ mod tests { val.note_round(Round(1), |_, _| {}); let inner = val.inner.read(); - let unknown_voter = inner.validate_round_message(&peer, &VoteMessage { - round: Round(1), - set_id: SetId(set_id), - message: SignedMessage:: { - message: finality_grandpa::Message::Prevote(finality_grandpa::Prevote { - target_hash: Default::default(), - target_number: 10, - }), - signature: Default::default(), - id: AuthorityId::from_slice(&[2u8; 32]), - } - }); + let unknown_voter = inner.validate_round_message( + &peer, + &VoteMessage { + round: Round(1), + set_id: SetId(set_id), + message: SignedMessage:: { + message: finality_grandpa::Message::Prevote(finality_grandpa::Prevote { + target_hash: Default::default(), + target_number: 10, + }), + signature: Default::default(), + id: AuthorityId::from_slice(&[2u8; 32]), + }, + }, + ); - let bad_sig = inner.validate_round_message(&peer, &VoteMessage { - round: Round(1), - set_id: SetId(set_id), - message: SignedMessage:: { - message: finality_grandpa::Message::Prevote(finality_grandpa::Prevote { - target_hash: Default::default(), - target_number: 10, - }), - signature: Default::default(), - id: auth.clone(), - } - }); + let bad_sig = inner.validate_round_message( + &peer, + &VoteMessage { + round: Round(1), + set_id: SetId(set_id), + message: SignedMessage:: { + message: finality_grandpa::Message::Prevote(finality_grandpa::Prevote { + target_hash: Default::default(), + target_number: 10, + }), + signature: Default::default(), + id: auth.clone(), + }, + }, + ); assert_eq!(unknown_voter, Action::Discard(cost::UNKNOWN_VOTER)); assert_eq!(bad_sig, Action::Discard(cost::BAD_SIGNATURE)); @@ -1936,12 +1899,7 @@ mod tests { #[test] fn unsolicited_catch_up_messages_discarded() { - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); let set_id = 1; let auth = AuthorityId::from_slice(&[1u8; 32]); @@ -1952,16 +1910,19 @@ mod tests { let validate_catch_up = || { let mut inner = val.inner.write(); - inner.validate_catch_up_message(&peer, &FullCatchUpMessage { - set_id: SetId(set_id), - message: finality_grandpa::CatchUp { - round_number: 10, - prevotes: Default::default(), - precommits: Default::default(), - base_hash: Default::default(), - base_number: Default::default(), - } - }) + inner.validate_catch_up_message( + &peer, + &FullCatchUpMessage { + set_id: SetId(set_id), + message: finality_grandpa::CatchUp { + round_number: 10, + prevotes: Default::default(), + precommits: Default::default(), + base_hash: Default::default(), + base_number: Default::default(), + }, + }, + ) }; // the catch up is discarded because we have no pending request @@ -1969,10 +1930,7 @@ mod tests { let noted = val.inner.write().note_catch_up_request( &peer, - &CatchUpRequestMessage { - set_id: SetId(set_id), - round: Round(10), - } + &CatchUpRequestMessage { set_id: SetId(set_id), round: Round(10) }, ); assert!(noted.0); @@ -1998,20 +1956,13 @@ mod tests { let mut current_rounds = environment::CurrentRounds::new(); current_rounds.insert(3, environment::HasVoted::No); - let set_state = environment::VoterSetState::::Live { - completed_rounds, - current_rounds, - }; + let set_state = + environment::VoterSetState::::Live { completed_rounds, current_rounds }; set_state.into() }; - let (val, _) = GossipValidator::::new( - config(), - set_state.clone(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), set_state.clone(), None, None); let set_id = 1; let auth = AuthorityId::from_slice(&[1u8; 32]); @@ -2027,10 +1978,7 @@ mod tests { let res = inner.handle_catch_up_request( &peer, - CatchUpRequestMessage { - set_id: SetId(set_id), - round: Round(10), - }, + CatchUpRequestMessage { set_id: SetId(set_id), round: Round(10) }, &set_state, ); @@ -2040,10 +1988,7 @@ mod tests { let res = inner.handle_catch_up_request( &peer, - CatchUpRequestMessage { - set_id: SetId(set_id), - round: Round(2), - }, + CatchUpRequestMessage { set_id: SetId(set_id), round: Round(2) }, &set_state, ); @@ -2062,12 +2007,7 @@ mod tests { #[test] fn detects_honest_out_of_scope_catch_requests() { let set_state = voter_set_state(); - let (val, _) = GossipValidator::::new( - config(), - set_state.clone(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), set_state.clone(), None, None); // the validator starts at set id 2 val.note_set(SetId(2), Vec::new(), |_, _| {}); @@ -2081,10 +2021,7 @@ mod tests { let mut inner = val.inner.write(); inner.handle_catch_up_request( &peer, - CatchUpRequestMessage { - set_id: SetId(set_id), - round: Round(round), - }, + CatchUpRequestMessage { set_id: SetId(set_id), round: Round(round) }, &set_state, ) }; @@ -2104,51 +2041,28 @@ mod tests { // the validator is at set id 2 and round 0. requests for set id 1 // should not be answered but they should be considered an honest // mistake - assert_res( - send_request(1, 1), - true, - ); + assert_res(send_request(1, 1), true); - assert_res( - send_request(1, 10), - true, - ); + assert_res(send_request(1, 10), true); // requests for set id 0 should be considered out of scope - assert_res( - send_request(0, 1), - false, - ); + assert_res(send_request(0, 1), false); - assert_res( - send_request(0, 10), - false, - ); + assert_res(send_request(0, 10), false); // after the validator progresses further than CATCH_UP_THRESHOLD in set // id 2, any request for set id 1 should no longer be considered an // honest mistake. val.note_round(Round(3), |_, _| {}); - assert_res( - send_request(1, 1), - false, - ); + assert_res(send_request(1, 1), false); - assert_res( - send_request(1, 2), - false, - ); + assert_res(send_request(1, 2), false); } #[test] fn issues_catch_up_request_on_neighbor_packet_import() { - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); // the validator starts at set id 1. val.note_set(SetId(1), Vec::new(), |_, _| {}); @@ -2218,12 +2132,7 @@ mod tests { c }; - let (val, _) = GossipValidator::::new( - config, - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config, voter_set_state(), None, None); // the validator starts at set id 1. val.note_set(SetId(1), Vec::new(), |_, _| {}); @@ -2238,11 +2147,7 @@ mod tests { // we should get `None`. let (_, _, catch_up_request, _) = val.inner.write().import_neighbor_message( &peer, - NeighborPacket { - round: Round(42), - set_id: SetId(1), - commit_finalized_height: 50, - }, + NeighborPacket { round: Round(42), set_id: SetId(1), commit_finalized_height: 50 }, ); match catch_up_request { @@ -2253,12 +2158,7 @@ mod tests { #[test] fn doesnt_send_catch_up_requests_to_non_authorities_when_observer_enabled() { - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); // the validator starts at set id 1. val.note_set(SetId(1), Vec::new(), |_, _| {}); @@ -2268,17 +2168,16 @@ mod tests { let peer_authority = PeerId::random(); let peer_full = PeerId::random(); - val.inner.write().peers.new_peer(peer_authority.clone(), ObservedRole::Authority); + val.inner + .write() + .peers + .new_peer(peer_authority.clone(), ObservedRole::Authority); val.inner.write().peers.new_peer(peer_full.clone(), ObservedRole::Full); let import_neighbor_message = |peer| { let (_, _, catch_up_request, _) = val.inner.write().import_neighbor_message( &peer, - NeighborPacket { - round: Round(42), - set_id: SetId(1), - commit_finalized_height: 50, - }, + NeighborPacket { round: Round(42), set_id: SetId(1), commit_finalized_height: 50 }, ); catch_up_request @@ -2314,12 +2213,7 @@ mod tests { c }; - let (val, _) = GossipValidator::::new( - config, - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config, voter_set_state(), None, None); // the validator starts at set id 1. val.note_set(SetId(1), Vec::new(), |_, _| {}); @@ -2331,11 +2225,7 @@ mod tests { let (_, _, catch_up_request, _) = val.inner.write().import_neighbor_message( &peer_full, - NeighborPacket { - round: Round(42), - set_id: SetId(1), - commit_finalized_height: 50, - }, + NeighborPacket { round: Round(42), set_id: SetId(1), commit_finalized_height: 50 }, ); // importing a neighbor message from a peer in the same set in a later @@ -2354,12 +2244,7 @@ mod tests { #[test] fn doesnt_expire_next_round_messages() { // NOTE: this is a regression test - let (val, _) = GossipValidator::::new( - config(), - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config(), voter_set_state(), None, None); // the validator starts at set id 1. val.note_set(SetId(1), Vec::new(), |_, _| {}); @@ -2373,12 +2258,7 @@ mod tests { // we accept messages from rounds 9, 10 and 11 // therefore neither of those should be considered expired for round in &[9, 10, 11] { - assert!( - !is_expired( - crate::communication::round_topic::(*round, 1), - &[], - ) - ) + assert!(!is_expired(crate::communication::round_topic::(*round, 1), &[],)) } } @@ -2388,12 +2268,7 @@ mod tests { config.gossip_duration = Duration::from_secs(300); // Set to high value to prevent test race let round_duration = config.gossip_duration * ROUND_DURATION; - let (val, _) = GossipValidator::::new( - config, - voter_set_state(), - None, - None, - ); + let (val, _) = GossipValidator::::new(config, voter_set_state(), None, None); // the validator start at set id 0 val.note_set(SetId(0), Vec::new(), |_, _| {}); @@ -2411,10 +2286,7 @@ mod tests { .peers .new_peer(authorities[i].clone(), ObservedRole::Authority); - val.inner - .write() - .peers - .new_peer(full_nodes[i].clone(), ObservedRole::Full); + val.inner.write().peers.new_peer(full_nodes[i].clone(), ObservedRole::Full); } let test = |rounds_elapsed, peers| { @@ -2458,11 +2330,7 @@ mod tests { sum / n } - let all_peers = authorities - .iter() - .chain(full_nodes.iter()) - .cloned() - .collect(); + let all_peers = authorities.iter().chain(full_nodes.iter()).cloned().collect(); // on the first attempt we will only gossip to 4 peers, either // authorities or full nodes, but we'll guarantee that half of those @@ -2497,10 +2365,7 @@ mod tests { // add a new light client as peer let light_peer = PeerId::random(); - val.inner - .write() - .peers - .new_peer(light_peer.clone(), ObservedRole::Light); + val.inner.write().peers.new_peer(light_peer.clone(), ObservedRole::Light); assert!(!val.message_allowed()( &light_peer, @@ -2529,11 +2394,7 @@ mod tests { .peers .update_peer_state( &light_peer, - NeighborPacket { - round: Round(1), - set_id: SetId(0), - commit_finalized_height: 1, - }, + NeighborPacket { round: Round(1), set_id: SetId(0), commit_finalized_height: 1 }, ) .unwrap(); @@ -2576,30 +2437,20 @@ mod tests { // add a new peer at set id 1 let peer1 = PeerId::random(); - val.inner - .write() - .peers - .new_peer(peer1.clone(), ObservedRole::Authority); + val.inner.write().peers.new_peer(peer1.clone(), ObservedRole::Authority); val.inner .write() .peers .update_peer_state( &peer1, - NeighborPacket { - round: Round(1), - set_id: SetId(1), - commit_finalized_height: 1, - }, + NeighborPacket { round: Round(1), set_id: SetId(1), commit_finalized_height: 1 }, ) .unwrap(); // peer2 will default to set id 0 let peer2 = PeerId::random(); - val.inner - .write() - .peers - .new_peer(peer2.clone(), ObservedRole::Authority); + val.inner.write().peers.new_peer(peer2.clone(), ObservedRole::Authority); // create a commit for round 1 of set id 1 // targeting a block at height 2 @@ -2677,22 +2528,15 @@ mod tests { // a commit message for round 1 that finalizes the same height as we // have observed previously should not be expired - assert!(!message_expired( - crate::communication::global_topic::(1), - &commit(1, 1, 2), - )); + assert!( + !message_expired(crate::communication::global_topic::(1), &commit(1, 1, 2),) + ); // it should be expired if it is for a lower block - assert!(message_expired( - crate::communication::global_topic::(1), - &commit(1, 1, 1), - )); + assert!(message_expired(crate::communication::global_topic::(1), &commit(1, 1, 1),)); // or the same block height but from the previous round - assert!(message_expired( - crate::communication::global_topic::(1), - &commit(0, 1, 2), - )); + assert!(message_expired(crate::communication::global_topic::(1), &commit(0, 1, 2),)); } #[test] diff --git a/client/finality-grandpa/src/communication/mod.rs b/client/finality-grandpa/src/communication/mod.rs index 45bc72223e4b5..d0f6700f8aaf7 100644 --- a/client/finality-grandpa/src/communication/mod.rs +++ b/client/finality-grandpa/src/communication/mod.rs @@ -29,37 +29,36 @@ //! In the future, there will be a fallback for allowing sending the same message //! under certain conditions that are used to un-stick the protocol. -use futures::{prelude::*, channel::mpsc}; +use futures::{channel::mpsc, prelude::*}; use log::{debug, trace}; use parking_lot::Mutex; use prometheus_endpoint::Registry; -use std::{pin::Pin, sync::Arc, task::{Context, Poll}}; +use std::{ + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; -use sp_keystore::SyncCryptoStorePtr; -use finality_grandpa::Message::{Prevote, Precommit, PrimaryPropose}; -use finality_grandpa::{voter, voter_set::VoterSet}; +use finality_grandpa::{ + voter, + voter_set::VoterSet, + Message::{Precommit, Prevote, PrimaryPropose}, +}; +use parity_scale_codec::{Decode, Encode}; use sc_network::{NetworkService, ReputationChange}; use sc_network_gossip::{GossipEngine, Network as GossipNetwork}; -use parity_scale_codec::{Encode, Decode}; -use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT, NumberFor}; use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_INFO}; +use sp_keystore::SyncCryptoStorePtr; +use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT, NumberFor}; use crate::{ - CatchUp, Commit, CommunicationIn, CommunicationOutH, - CompactCommit, Error, Message, SignedMessage, + environment::HasVoted, CatchUp, Commit, CommunicationIn, CommunicationOutH, CompactCommit, + Error, Message, SignedMessage, }; -use crate::environment::HasVoted; use gossip::{ - FullCatchUpMessage, - FullCommitMessage, - GossipMessage, - GossipValidator, - PeerReport, - VoteMessage, -}; -use sp_finality_grandpa::{ - AuthorityId, AuthoritySignature, SetId as SetIdNumber, RoundNumber, + FullCatchUpMessage, FullCommitMessage, GossipMessage, GossipValidator, PeerReport, VoteMessage, }; +use sp_finality_grandpa::{AuthorityId, AuthoritySignature, RoundNumber, SetId as SetIdNumber}; use sp_utils::mpsc::TracingUnboundedReceiver; pub mod gossip; @@ -89,11 +88,13 @@ mod cost { pub(super) const INVALID_CATCH_UP: Rep = Rep::new(-5000, "Grandpa: Invalid catch-up"); pub(super) const INVALID_COMMIT: Rep = Rep::new(-5000, "Grandpa: Invalid commit"); pub(super) const OUT_OF_SCOPE_MESSAGE: Rep = Rep::new(-500, "Grandpa: Out-of-scope message"); - pub(super) const CATCH_UP_REQUEST_TIMEOUT: Rep = Rep::new(-200, "Grandpa: Catch-up request timeout"); + pub(super) const CATCH_UP_REQUEST_TIMEOUT: Rep = + Rep::new(-200, "Grandpa: Catch-up request timeout"); // cost of answering a catch up request pub(super) const CATCH_UP_REPLY: Rep = Rep::new(-200, "Grandpa: Catch-up reply"); - pub(super) const HONEST_OUT_OF_SCOPE_CATCH_UP: Rep = Rep::new(-200, "Grandpa: Out-of-scope catch-up"); + pub(super) const HONEST_OUT_OF_SCOPE_CATCH_UP: Rep = + Rep::new(-200, "Grandpa: Out-of-scope catch-up"); } // benefit scalars for reporting peers. @@ -144,14 +145,25 @@ pub trait Network: GossipNetwork + Clone + Send + 'static /// If the given vector of peers is empty then the underlying implementation /// should make a best effort to fetch the block from any peers it is /// connected to (NOTE: this assumption will change in the future #3629). - fn set_sync_fork_request(&self, peers: Vec, hash: Block::Hash, number: NumberFor); + fn set_sync_fork_request( + &self, + peers: Vec, + hash: Block::Hash, + number: NumberFor, + ); } -impl Network for Arc> where +impl Network for Arc> +where B: BlockT, H: sc_network::ExHashT, { - fn set_sync_fork_request(&self, peers: Vec, hash: B::Hash, number: NumberFor) { + fn set_sync_fork_request( + &self, + peers: Vec, + hash: B::Hash, + number: NumberFor, + ) { NetworkService::set_sync_fork_request(self, peers, hash, number) } } @@ -210,12 +222,8 @@ impl> NetworkBridge { prometheus_registry: Option<&Registry>, telemetry: Option, ) -> Self { - let (validator, report_stream) = GossipValidator::new( - config, - set_state.clone(), - prometheus_registry, - telemetry.clone(), - ); + let (validator, report_stream) = + GossipValidator::new(config, set_state.clone(), prometheus_registry, telemetry.clone()); let validator = Arc::new(validator); let gossip_engine = Arc::new(Mutex::new(GossipEngine::new( @@ -239,18 +247,13 @@ impl> NetworkBridge { validator.note_round(Round(round.number), |_, _| {}); for signed in round.votes.iter() { - let message = gossip::GossipMessage::Vote( - gossip::VoteMessage:: { - message: signed.clone(), - round: Round(round.number), - set_id: SetId(set_id), - } - ); + let message = gossip::GossipMessage::Vote(gossip::VoteMessage:: { + message: signed.clone(), + round: Round(round.number), + set_id: SetId(set_id), + }); - gossip_engine.lock().register_gossip_message( - topic, - message.encode(), - ); + gossip_engine.lock().register_gossip_message(topic, message.encode()); } trace!(target: "afg", @@ -263,7 +266,8 @@ impl> NetworkBridge { } } - let (neighbor_packet_worker, neighbor_packet_sender) = periodic::NeighborPacketWorker::new(); + let (neighbor_packet_worker, neighbor_packet_sender) = + periodic::NeighborPacketWorker::new(); NetworkBridge { service, @@ -277,12 +281,7 @@ impl> NetworkBridge { } /// Note the beginning of a new round to the `GossipValidator`. - pub(crate) fn note_round( - &self, - round: Round, - set_id: SetId, - voters: &VoterSet, - ) { + pub(crate) fn note_round(&self, round: Round, set_id: SetId, voters: &VoterSet) { // is a no-op if currently in that set. self.validator.note_set( set_id, @@ -290,10 +289,8 @@ impl> NetworkBridge { |to, neighbor| self.neighbor_sender.send(to, neighbor), ); - self.validator.note_round( - round, - |to, neighbor| self.neighbor_sender.send(to, neighbor), - ); + self.validator + .note_round(round, |to, neighbor| self.neighbor_sender.send(to, neighbor)); } /// Get a stream of signature-checked round messages from the network as well as a sink for round messages to the @@ -305,15 +302,8 @@ impl> NetworkBridge { set_id: SetId, voters: Arc>, has_voted: HasVoted, - ) -> ( - impl Stream> + Unpin, - OutgoingMessages, - ) { - self.note_round( - round, - set_id, - &*voters, - ); + ) -> (impl Stream> + Unpin, OutgoingMessages) { + self.note_round(round, set_id, &*voters); let keystore = keystore.and_then(|ks| { let id = ks.local_id(); @@ -326,20 +316,20 @@ impl> NetworkBridge { let topic = round_topic::(round.0, set_id.0); let telemetry = self.telemetry.clone(); - let incoming = self.gossip_engine.lock().messages_for(topic) - .filter_map(move |notification| { + let incoming = + self.gossip_engine.lock().messages_for(topic).filter_map(move |notification| { let decoded = GossipMessage::::decode(&mut ¬ification.message[..]); match decoded { Err(ref e) => { debug!(target: "afg", "Skipping malformed message {:?}: {}", notification, e); future::ready(None) - } + }, Ok(GossipMessage::Vote(msg)) => { // check signature. if !voters.contains(&msg.message.id) { debug!(target: "afg", "Skipping message from unknown voter {}", msg.message.id); - return future::ready(None); + return future::ready(None) } if voters.len().get() <= TELEMETRY_VOTERS_LIMIT { @@ -378,11 +368,11 @@ impl> NetworkBridge { } future::ready(Some(msg.message)) - } + }, _ => { debug!(target: "afg", "Skipping unknown message type"); future::ready(None) - } + }, } }); @@ -458,7 +448,7 @@ impl> NetworkBridge { &self, peers: Vec, hash: B::Hash, - number: NumberFor + number: NumberFor, ) { Network::set_sync_fork_request(&self.service, peers, hash, number) } @@ -473,9 +463,10 @@ impl> Future for NetworkBridge { Poll::Ready(Some((to, packet))) => { self.gossip_engine.lock().send_message(to, packet.encode()); }, - Poll::Ready(None) => return Poll::Ready( - Err(Error::Network("Neighbor packet worker stream closed.".into())) - ), + Poll::Ready(None) => + return Poll::Ready(Err(Error::Network( + "Neighbor packet worker stream closed.".into(), + ))), Poll::Pending => break, } } @@ -485,17 +476,17 @@ impl> Future for NetworkBridge { Poll::Ready(Some(PeerReport { who, cost_benefit })) => { self.gossip_engine.lock().report(who, cost_benefit); }, - Poll::Ready(None) => return Poll::Ready( - Err(Error::Network("Gossip validator report stream closed.".into())) - ), + Poll::Ready(None) => + return Poll::Ready(Err(Error::Network( + "Gossip validator report stream closed.".into(), + ))), Poll::Pending => break, } } match self.gossip_engine.lock().poll_unpin(cx) { - Poll::Ready(()) => return Poll::Ready( - Err(Error::Network("Gossip engine future finished.".into())) - ), + Poll::Ready(()) => + return Poll::Ready(Err(Error::Network("Gossip engine future finished.".into()))), Poll::Pending => {}, } @@ -513,18 +504,14 @@ fn incoming_global( ) -> impl Stream> { let process_commit = { let telemetry = telemetry.clone(); - move | - msg: FullCommitMessage, - mut notification: sc_network_gossip::TopicNotification, - gossip_engine: &Arc>>, - gossip_validator: &Arc>, - voters: &VoterSet, - | { + move |msg: FullCommitMessage, + mut notification: sc_network_gossip::TopicNotification, + gossip_engine: &Arc>>, + gossip_validator: &Arc>, + voters: &VoterSet| { if voters.len().get() <= TELEMETRY_VOTERS_LIMIT { let precommits_signed_by: Vec = - msg.message.auth_data.iter().map(move |(_, a)| { - format!("{}", a) - }).collect(); + msg.message.auth_data.iter().map(move |(_, a)| format!("{}", a)).collect(); telemetry!( telemetry; @@ -547,7 +534,7 @@ fn incoming_global( gossip_engine.lock().report(who, cost); } - return None; + return None } let round = msg.round; @@ -570,13 +557,13 @@ fn incoming_global( ); gossip_engine.lock().gossip_message(topic, notification.message.clone(), false); - } + }, voter::CommitProcessingOutcome::Bad(_) => { // report peer and do not gossip. if let Some(who) = notification.sender.take() { gossip_engine.lock().report(who, cost::INVALID_COMMIT); } - } + }, }; let cb = voter::Callback::Work(Box::new(cb)); @@ -585,27 +572,21 @@ fn incoming_global( } }; - let process_catch_up = move | - msg: FullCatchUpMessage, - mut notification: sc_network_gossip::TopicNotification, - gossip_engine: &Arc>>, - gossip_validator: &Arc>, - voters: &VoterSet, - | { + let process_catch_up = move |msg: FullCatchUpMessage, + mut notification: sc_network_gossip::TopicNotification, + gossip_engine: &Arc>>, + gossip_validator: &Arc>, + voters: &VoterSet| { let gossip_validator = gossip_validator.clone(); let gossip_engine = gossip_engine.clone(); - if let Err(cost) = check_catch_up::( - &msg.message, - voters, - msg.set_id, - telemetry.clone(), - ) { + if let Err(cost) = check_catch_up::(&msg.message, voters, msg.set_id, telemetry.clone()) + { if let Some(who) = notification.sender { gossip_engine.lock().report(who, cost); } - return None; + return None } let cb = move |outcome| { @@ -624,7 +605,10 @@ fn incoming_global( Some(voter::CommunicationIn::CatchUp(msg.message, cb)) }; - gossip_engine.clone().lock().messages_for(topic) + gossip_engine + .clone() + .lock() + .messages_for(topic) .filter_map(|notification| { // this could be optimized by decoding piecewise. let decoded = GossipMessage::::decode(&mut ¬ification.message[..]); @@ -642,7 +626,7 @@ fn incoming_global( _ => { debug!(target: "afg", "Skipping unknown message type"); None - } + }, }) }) } @@ -688,15 +672,15 @@ pub(crate) struct OutgoingMessages { impl Unpin for OutgoingMessages {} -impl Sink> for OutgoingMessages -{ +impl Sink> for OutgoingMessages { type Error = Error; fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { - Sink::poll_ready(Pin::new(&mut self.sender), cx) - .map(|elem| { elem.map_err(|e| { + Sink::poll_ready(Pin::new(&mut self.sender), cx).map(|elem| { + elem.map_err(|e| { Error::Network(format!("Failed to poll_ready channel sender: {:?}", e)) - })}) + }) + }) } fn start_send(mut self: Pin<&mut Self>, mut msg: Message) -> Result<(), Self::Error> { @@ -725,11 +709,13 @@ impl Sink> for OutgoingMessages keystore.local_id().clone(), self.round, self.set_id, - ).ok_or_else( - || Error::Signing(format!( - "Failed to sign GRANDPA vote for round {} targetting {:?}", self.round, target_hash + ) + .ok_or_else(|| { + Error::Signing(format!( + "Failed to sign GRANDPA vote for round {} targetting {:?}", + self.round, target_hash )) - )?; + })?; let message = GossipMessage::Vote(VoteMessage:: { message: signed.clone(), @@ -762,7 +748,7 @@ impl Sink> for OutgoingMessages // forward the message to the inner sender. return self.sender.start_send(signed).map_err(|e| { Error::Network(format!("Failed to start_send on channel sender: {:?}", e)) - }); + }) }; Ok(()) @@ -773,10 +759,11 @@ impl Sink> for OutgoingMessages } fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { - Sink::poll_close(Pin::new(&mut self.sender), cx) - .map(|elem| { elem.map_err(|e| { + Sink::poll_close(Pin::new(&mut self.sender), cx).map(|elem| { + elem.map_err(|e| { Error::Network(format!("Failed to poll_close channel sender: {:?}", e)) - })}) + }) + }) } } @@ -799,23 +786,22 @@ fn check_compact_commit( if let Some(weight) = voters.get(id).map(|info| info.weight()) { total_weight += weight.get(); if total_weight > full_threshold { - return Err(cost::MALFORMED_COMMIT); + return Err(cost::MALFORMED_COMMIT) } } else { debug!(target: "afg", "Skipping commit containing unknown voter {}", id); - return Err(cost::MALFORMED_COMMIT); + return Err(cost::MALFORMED_COMMIT) } } if total_weight < voters.threshold().get() { - return Err(cost::MALFORMED_COMMIT); + return Err(cost::MALFORMED_COMMIT) } // check signatures on all contained precommits. let mut buf = Vec::new(); - for (i, (precommit, &(ref sig, ref id))) in msg.precommits.iter() - .zip(&msg.auth_data) - .enumerate() + for (i, (precommit, &(ref sig, ref id))) in + msg.precommits.iter().zip(&msg.auth_data).enumerate() { use crate::communication::gossip::Misbehavior; use finality_grandpa::Message as GrandpaMessage; @@ -839,9 +825,10 @@ fn check_compact_commit( signatures_checked: i as i32, blocks_loaded: 0, equivocations_caught: 0, - }.cost(); + } + .cost(); - return Err(cost); + return Err(cost) } } @@ -863,7 +850,7 @@ fn check_catch_up( // check total weight is not out of range for a set of votes. fn check_weight<'a>( voters: &'a VoterSet, - votes: impl Iterator, + votes: impl Iterator, full_threshold: u64, ) -> Result<(), ReputationChange> { let mut total_weight = 0; @@ -872,32 +859,24 @@ fn check_catch_up( if let Some(weight) = voters.get(&id).map(|info| info.weight()) { total_weight += weight.get(); if total_weight > full_threshold { - return Err(cost::MALFORMED_CATCH_UP); + return Err(cost::MALFORMED_CATCH_UP) } } else { debug!(target: "afg", "Skipping catch up message containing unknown voter {}", id); - return Err(cost::MALFORMED_CATCH_UP); + return Err(cost::MALFORMED_CATCH_UP) } } if total_weight < voters.threshold().get() { - return Err(cost::MALFORMED_CATCH_UP); + return Err(cost::MALFORMED_CATCH_UP) } Ok(()) } - check_weight( - voters, - msg.prevotes.iter().map(|vote| &vote.id), - full_threshold, - )?; + check_weight(voters, msg.prevotes.iter().map(|vote| &vote.id), full_threshold)?; - check_weight( - voters, - msg.precommits.iter().map(|vote| &vote.id), - full_threshold, - )?; + check_weight(voters, msg.precommits.iter().map(|vote| &vote.id), full_threshold)?; fn check_signatures<'a, B, I>( messages: I, @@ -906,9 +885,10 @@ fn check_catch_up( mut signatures_checked: usize, buf: &mut Vec, telemetry: Option, - ) -> Result where + ) -> Result + where B: BlockT, - I: Iterator, &'a AuthorityId, &'a AuthoritySignature)>, + I: Iterator, &'a AuthorityId, &'a AuthoritySignature)>, { use crate::communication::gossip::Misbehavior; @@ -916,12 +896,7 @@ fn check_catch_up( signatures_checked += 1; if !sp_finality_grandpa::check_message_signature_with_buffer( - &msg, - id, - sig, - round, - set_id, - buf, + &msg, id, sig, round, set_id, buf, ) { debug!(target: "afg", "Bad catch up message signature {}", id); telemetry!( @@ -933,9 +908,10 @@ fn check_catch_up( let cost = Misbehavior::BadCatchUpMessage { signatures_checked: signatures_checked as i32, - }.cost(); + } + .cost(); - return Err(cost); + return Err(cost) } } @@ -959,7 +935,11 @@ fn check_catch_up( // check signatures on all contained precommits. let _ = check_signatures::( msg.precommits.iter().map(|vote| { - (finality_grandpa::Message::Precommit(vote.precommit.clone()), &vote.id, &vote.signature) + ( + finality_grandpa::Message::Precommit(vote.precommit.clone()), + &vote.id, + &vote.signature, + ) }), msg.round_number, set_id.0, @@ -1009,9 +989,12 @@ impl Sink<(RoundNumber, Commit)> for CommitsOut { Poll::Ready(Ok(())) } - fn start_send(self: Pin<&mut Self>, input: (RoundNumber, Commit)) -> Result<(), Self::Error> { + fn start_send( + self: Pin<&mut Self>, + input: (RoundNumber, Commit), + ) -> Result<(), Self::Error> { if !self.is_voter { - return Ok(()); + return Ok(()) } let (round, commit) = input; @@ -1024,7 +1007,9 @@ impl Sink<(RoundNumber, Commit)> for CommitsOut { "target_number" => ?commit.target_number, "target_hash" => ?commit.target_hash, ); - let (precommits, auth_data) = commit.precommits.into_iter() + let (precommits, auth_data) = commit + .precommits + .into_iter() .map(|signed| (signed.precommit, (signed.signature, signed.id))) .unzip(); @@ -1032,7 +1017,7 @@ impl Sink<(RoundNumber, Commit)> for CommitsOut { target_hash: commit.target_hash, target_number: commit.target_number, precommits, - auth_data + auth_data, }; let message = GossipMessage::Commit(FullCommitMessage:: { diff --git a/client/finality-grandpa/src/communication/periodic.rs b/client/finality-grandpa/src/communication/periodic.rs index 377882ed5dd2d..a3c7b9380b25f 100644 --- a/client/finality-grandpa/src/communication/periodic.rs +++ b/client/finality-grandpa/src/communication/periodic.rs @@ -18,15 +18,19 @@ //! Periodic rebroadcast of neighbor packets. +use futures::{future::FutureExt as _, prelude::*, ready, stream::Stream}; use futures_timer::Delay; -use futures::{future::{FutureExt as _}, prelude::*, ready, stream::Stream}; use log::debug; -use std::{pin::Pin, task::{Context, Poll}, time::Duration}; use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; +use super::gossip::{GossipMessage, NeighborPacket}; use sc_network::PeerId; -use sp_runtime::traits::{NumberFor, Block as BlockT}; -use super::gossip::{NeighborPacket, GossipMessage}; +use sp_runtime::traits::{Block as BlockT, NumberFor}; // How often to rebroadcast, in cases where no new packets are created. const REBROADCAST_AFTER: Duration = Duration::from_secs(2 * 60); @@ -34,7 +38,7 @@ const REBROADCAST_AFTER: Duration = Duration::from_secs(2 * 60); /// A sender used to send neighbor packets to a background job. #[derive(Clone)] pub(super) struct NeighborPacketSender( - TracingUnboundedSender<(Vec, NeighborPacket>)> + TracingUnboundedSender<(Vec, NeighborPacket>)>, ); impl NeighborPacketSender { @@ -63,24 +67,20 @@ pub(super) struct NeighborPacketWorker { impl Unpin for NeighborPacketWorker {} impl NeighborPacketWorker { - pub(super) fn new() -> (Self, NeighborPacketSender){ - let (tx, rx) = tracing_unbounded::<(Vec, NeighborPacket>)> - ("mpsc_grandpa_neighbor_packet_worker"); + pub(super) fn new() -> (Self, NeighborPacketSender) { + let (tx, rx) = tracing_unbounded::<(Vec, NeighborPacket>)>( + "mpsc_grandpa_neighbor_packet_worker", + ); let delay = Delay::new(REBROADCAST_AFTER); - (NeighborPacketWorker { - last: None, - delay, - rx, - }, NeighborPacketSender(tx)) + (NeighborPacketWorker { last: None, delay, rx }, NeighborPacketSender(tx)) } } -impl Stream for NeighborPacketWorker { +impl Stream for NeighborPacketWorker { type Item = (Vec, GossipMessage); - fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> - { + fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { let this = &mut *self; match this.rx.poll_next_unpin(cx) { Poll::Ready(None) => return Poll::Ready(None), @@ -88,8 +88,8 @@ impl Stream for NeighborPacketWorker { this.delay.reset(REBROADCAST_AFTER); this.last = Some((to.clone(), packet.clone())); - return Poll::Ready(Some((to, GossipMessage::::from(packet)))); - } + return Poll::Ready(Some((to, GossipMessage::::from(packet)))) + }, // Don't return yet, maybe the timer fired. Poll::Pending => {}, }; @@ -104,10 +104,10 @@ impl Stream for NeighborPacketWorker { // // Note: In case poll_unpin is called after the resetted delay fires again, this // will drop one tick. Deemed as very unlikely and also not critical. - while let Poll::Ready(()) = this.delay.poll_unpin(cx) {}; + while let Poll::Ready(()) = this.delay.poll_unpin(cx) {} if let Some((ref to, ref packet)) = this.last { - return Poll::Ready(Some((to.clone(), GossipMessage::::from(packet.clone())))); + return Poll::Ready(Some((to.clone(), GossipMessage::::from(packet.clone())))) } Poll::Pending diff --git a/client/finality-grandpa/src/communication/tests.rs b/client/finality-grandpa/src/communication/tests.rs index ec8c97dfe3e8a..868186bbf0fdd 100644 --- a/client/finality-grandpa/src/communication/tests.rs +++ b/client/finality-grandpa/src/communication/tests.rs @@ -18,21 +18,26 @@ //! Tests for the communication portion of the GRANDPA crate. -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use super::{ + gossip::{self, GossipValidator}, + Round, SetId, VoterSet, +}; +use crate::{communication::GRANDPA_PROTOCOL_NAME, environment::SharedVoterSetState}; use futures::prelude::*; +use parity_scale_codec::Encode; use sc_network::{config::Role, Event as NetworkEvent, ObservedRole, PeerId}; -use sc_network_test::{Block, Hash}; use sc_network_gossip::Validator; -use std::sync::Arc; +use sc_network_test::{Block, Hash}; +use sp_finality_grandpa::AuthorityList; use sp_keyring::Ed25519Keyring; -use parity_scale_codec::Encode; use sp_runtime::traits::NumberFor; -use std::{borrow::Cow, pin::Pin, task::{Context, Poll}}; -use crate::communication::GRANDPA_PROTOCOL_NAME; -use crate::environment::SharedVoterSetState; -use sp_finality_grandpa::AuthorityList; -use super::gossip::{self, GossipValidator}; -use super::{VoterSet, Round, SetId}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + borrow::Cow, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; #[derive(Debug)] pub(crate) enum Event { @@ -79,13 +84,14 @@ impl super::Network for TestNetwork { _peers: Vec, _hash: Hash, _number: NumberFor, - ) {} + ) { + } } impl sc_network_gossip::ValidatorContext for TestNetwork { - fn broadcast_topic(&mut self, _: Hash, _: bool) { } + fn broadcast_topic(&mut self, _: Hash, _: bool) {} - fn broadcast_message(&mut self, _: Hash, _: Vec, _: bool) { } + fn broadcast_message(&mut self, _: Hash, _: Vec, _: bool) {} fn send_message(&mut self, who: &sc_network::PeerId, data: Vec) { >::write_notification( @@ -96,7 +102,7 @@ impl sc_network_gossip::ValidatorContext for TestNetwork { ); } - fn send_topic(&mut self, _: &sc_network::PeerId, _: Hash, _: bool) { } + fn send_topic(&mut self, _: &sc_network::PeerId, _: Hash, _: bool) {} } pub(crate) struct Tester { @@ -107,15 +113,17 @@ pub(crate) struct Tester { impl Tester { fn filter_network_events(self, mut pred: F) -> impl Future - where F: FnMut(Event) -> bool + where + F: FnMut(Event) -> bool, { let mut s = Some(self); futures::future::poll_fn(move |cx| loop { match Stream::poll_next(Pin::new(&mut s.as_mut().unwrap().events), cx) { Poll::Ready(None) => panic!("concluded early"), - Poll::Ready(Some(item)) => if pred(item) { - return Poll::Ready(s.take().unwrap()) - }, + Poll::Ready(Some(item)) => + if pred(item) { + return Poll::Ready(s.take().unwrap()) + }, Poll::Pending => return Poll::Pending, } }) @@ -145,8 +153,7 @@ fn config() -> crate::Config { // dummy voter set state fn voter_set_state() -> SharedVoterSetState { - use crate::authorities::AuthoritySet; - use crate::environment::VoterSetState; + use crate::{authorities::AuthoritySet, environment::VoterSetState}; use finality_grandpa::round::State as RoundState; use sp_core::{crypto::Public, H256}; use sp_finality_grandpa::AuthorityId; @@ -157,20 +164,13 @@ fn voter_set_state() -> SharedVoterSetState { let voters = vec![(AuthorityId::from_slice(&[1; 32]), 1)]; let voters = AuthoritySet::genesis(voters).unwrap(); - let set_state = VoterSetState::live( - 0, - &voters, - base, - ); + let set_state = VoterSetState::live(0, &voters, base); set_state.into() } // needs to run in a tokio runtime. -pub(crate) fn make_test_network() -> ( - impl Future, - TestNetwork, -) { +pub(crate) fn make_test_network() -> (impl Future, TestNetwork) { let (tx, rx) = tracing_unbounded("test"); let net = TestNetwork { sender: tx }; @@ -185,13 +185,7 @@ pub(crate) fn make_test_network() -> ( } } - let bridge = super::NetworkBridge::new( - net.clone(), - config(), - voter_set_state(), - None, - None, - ); + let bridge = super::NetworkBridge::new(net.clone(), config(), voter_set_state(), None, None); ( futures::future::ready(Tester { @@ -204,19 +198,16 @@ pub(crate) fn make_test_network() -> ( } fn make_ids(keys: &[Ed25519Keyring]) -> AuthorityList { - keys.iter() - .map(|key| key.clone().public().into()) - .map(|id| (id, 1)) - .collect() + keys.iter().map(|key| key.clone().public().into()).map(|id| (id, 1)).collect() } struct NoopContext; impl sc_network_gossip::ValidatorContext for NoopContext { - fn broadcast_topic(&mut self, _: Hash, _: bool) { } - fn broadcast_message(&mut self, _: Hash, _: Vec, _: bool) { } - fn send_message(&mut self, _: &sc_network::PeerId, _: Vec) { } - fn send_topic(&mut self, _: &sc_network::PeerId, _: Hash, _: bool) { } + fn broadcast_topic(&mut self, _: Hash, _: bool) {} + fn broadcast_message(&mut self, _: Hash, _: Vec, _: bool) {} + fn send_message(&mut self, _: &sc_network::PeerId, _: Vec) {} + fn send_topic(&mut self, _: &sc_network::PeerId, _: Hash, _: bool) {} } #[test] @@ -232,9 +223,12 @@ fn good_commit_leads_to_relay() { let target_hash: Hash = [1; 32].into(); let target_number = 500; - let precommit = finality_grandpa::Precommit { target_hash: target_hash.clone(), target_number }; + let precommit = + finality_grandpa::Precommit { target_hash: target_hash.clone(), target_number }; let payload = sp_finality_grandpa::localized_payload( - round, set_id, &finality_grandpa::Message::Precommit(precommit.clone()) + round, + set_id, + &finality_grandpa::Message::Precommit(precommit.clone()), ); let mut precommits = Vec::new(); @@ -247,24 +241,21 @@ fn good_commit_leads_to_relay() { auth_data.push((signature, public[i].0.clone())) } - finality_grandpa::CompactCommit { - target_hash, - target_number, - precommits, - auth_data, - } + finality_grandpa::CompactCommit { target_hash, target_number, precommits, auth_data } }; let encoded_commit = gossip::GossipMessage::::Commit(gossip::FullCommitMessage { round: Round(round), set_id: SetId(set_id), message: commit, - }).encode(); + }) + .encode(); let id = sc_network::PeerId::random(); let global_topic = super::global_topic::(set_id); - let test = make_test_network().0 + let test = make_test_network() + .0 .then(move |tester| { // register a peer. tester.gossip_validator.new_peer(&mut NoopContext, &id, ObservedRole::Full); @@ -272,7 +263,8 @@ fn good_commit_leads_to_relay() { }) .then(move |(tester, id)| { // start round, dispatch commit, and wait for broadcast. - let (commits_in, _) = tester.net_handle.global_communication(SetId(1), voter_set, false); + let (commits_in, _) = + tester.net_handle.global_communication(SetId(1), voter_set, false); { let (action, ..) = tester.gossip_validator.do_validate(&id, &encoded_commit[..]); @@ -301,7 +293,10 @@ fn good_commit_leads_to_relay() { let _ = sender.unbounded_send(NetworkEvent::NotificationsReceived { remote: sender_id.clone(), - messages: vec![(GRANDPA_PROTOCOL_NAME.into(), commit_to_send.clone().into())], + messages: vec![( + GRANDPA_PROTOCOL_NAME.into(), + commit_to_send.clone().into(), + )], }); // Add a random peer which will be the recipient of this message @@ -316,13 +311,11 @@ fn good_commit_leads_to_relay() { // Announce its local set has being on the current set id through a neighbor // packet, otherwise it won't be eligible to receive the commit let _ = { - let update = gossip::VersionedNeighborPacket::V1( - gossip::NeighborPacket { - round: Round(round), - set_id: SetId(set_id), - commit_finalized_height: 1, - } - ); + let update = gossip::VersionedNeighborPacket::V1(gossip::NeighborPacket { + round: Round(round), + set_id: SetId(set_id), + commit_finalized_height: 1, + }); let msg = gossip::GossipMessage::::Neighbor(update); @@ -333,31 +326,27 @@ fn good_commit_leads_to_relay() { }; true - } + }, _ => false, }); // when the commit comes in, we'll tell the callback it was good. - let handle_commit = commits_in.into_future() - .map(|(item, _)| { - match item.unwrap() { - finality_grandpa::voter::CommunicationIn::Commit(_, _, mut callback) => { - callback.run(finality_grandpa::voter::CommitProcessingOutcome::good()); - }, - _ => panic!("commit expected"), - } - }); + let handle_commit = commits_in.into_future().map(|(item, _)| match item.unwrap() { + finality_grandpa::voter::CommunicationIn::Commit(_, _, mut callback) => { + callback.run(finality_grandpa::voter::CommitProcessingOutcome::good()); + }, + _ => panic!("commit expected"), + }); // once the message is sent and commit is "handled" we should have // a repropagation event coming from the network. - let fut = future::join(send_message, handle_commit).then(move |(tester, ())| { - tester.filter_network_events(move |event| match event { - Event::WriteNotification(_, data) => { - data == encoded_commit - } - _ => false, + let fut = future::join(send_message, handle_commit) + .then(move |(tester, ())| { + tester.filter_network_events(move |event| match event { + Event::WriteNotification(_, data) => data == encoded_commit, + _ => false, + }) }) - }) .map(|_| ()); // Poll both the future sending and handling the commit, as well as the underlying @@ -382,9 +371,12 @@ fn bad_commit_leads_to_report() { let target_hash: Hash = [1; 32].into(); let target_number = 500; - let precommit = finality_grandpa::Precommit { target_hash: target_hash.clone(), target_number }; + let precommit = + finality_grandpa::Precommit { target_hash: target_hash.clone(), target_number }; let payload = sp_finality_grandpa::localized_payload( - round, set_id, &finality_grandpa::Message::Precommit(precommit.clone()) + round, + set_id, + &finality_grandpa::Message::Precommit(precommit.clone()), ); let mut precommits = Vec::new(); @@ -397,24 +389,21 @@ fn bad_commit_leads_to_report() { auth_data.push((signature, public[i].0.clone())) } - finality_grandpa::CompactCommit { - target_hash, - target_number, - precommits, - auth_data, - } + finality_grandpa::CompactCommit { target_hash, target_number, precommits, auth_data } }; let encoded_commit = gossip::GossipMessage::::Commit(gossip::FullCommitMessage { round: Round(round), set_id: SetId(set_id), message: commit, - }).encode(); + }) + .encode(); let id = sc_network::PeerId::random(); let global_topic = super::global_topic::(set_id); - let test = make_test_network().0 + let test = make_test_network() + .0 .map(move |tester| { // register a peer. tester.gossip_validator.new_peer(&mut NoopContext, &id, ObservedRole::Full); @@ -422,7 +411,8 @@ fn bad_commit_leads_to_report() { }) .then(move |(tester, id)| { // start round, dispatch commit, and wait for broadcast. - let (commits_in, _) = tester.net_handle.global_communication(SetId(1), voter_set, false); + let (commits_in, _) = + tester.net_handle.global_communication(SetId(1), voter_set, false); { let (action, ..) = tester.gossip_validator.do_validate(&id, &encoded_commit[..]); @@ -449,35 +439,35 @@ fn bad_commit_leads_to_report() { }); let _ = sender.unbounded_send(NetworkEvent::NotificationsReceived { remote: sender_id.clone(), - messages: vec![(GRANDPA_PROTOCOL_NAME.into(), commit_to_send.clone().into())], + messages: vec![( + GRANDPA_PROTOCOL_NAME.into(), + commit_to_send.clone().into(), + )], }); true - } + }, _ => false, }); // when the commit comes in, we'll tell the callback it was bad. - let handle_commit = commits_in.into_future() - .map(|(item, _)| { - match item.unwrap() { - finality_grandpa::voter::CommunicationIn::Commit(_, _, mut callback) => { - callback.run(finality_grandpa::voter::CommitProcessingOutcome::bad()); - }, - _ => panic!("commit expected"), - } - }); + let handle_commit = commits_in.into_future().map(|(item, _)| match item.unwrap() { + finality_grandpa::voter::CommunicationIn::Commit(_, _, mut callback) => { + callback.run(finality_grandpa::voter::CommitProcessingOutcome::bad()); + }, + _ => panic!("commit expected"), + }); // once the message is sent and commit is "handled" we should have // a report event coming from the network. - let fut = future::join(send_message, handle_commit).then(move |(tester, ())| { - tester.filter_network_events(move |event| match event { - Event::Report(who, cost_benefit) => { - who == id && cost_benefit == super::cost::INVALID_COMMIT - } - _ => false, + let fut = future::join(send_message, handle_commit) + .then(move |(tester, ())| { + tester.filter_network_events(move |event| match event { + Event::Report(who, cost_benefit) => + who == id && cost_benefit == super::cost::INVALID_COMMIT, + _ => false, + }) }) - }) .map(|_| ()); // Poll both the future sending and handling the commit, as well as the underlying @@ -508,7 +498,8 @@ fn peer_with_higher_view_leads_to_catch_up_request() { set_id: SetId(0), round: Round(10), commit_finalized_height: 50, - }).encode(), + }) + .encode(), ); // neighbor packets are always discard @@ -518,27 +509,23 @@ fn peer_with_higher_view_leads_to_catch_up_request() { } // a catch up request should be sent to the peer for round - 1 - tester.filter_network_events(move |event| match event { - Event::WriteNotification(peer, message) => { - assert_eq!( - peer, - id, - ); - - assert_eq!( - message, - gossip::GossipMessage::::CatchUpRequest( - gossip::CatchUpRequestMessage { - set_id: SetId(0), - round: Round(9), - } - ).encode(), - ); + tester + .filter_network_events(move |event| match event { + Event::WriteNotification(peer, message) => { + assert_eq!(peer, id,); + + assert_eq!( + message, + gossip::GossipMessage::::CatchUpRequest( + gossip::CatchUpRequestMessage { set_id: SetId(0), round: Round(9) } + ) + .encode(), + ); - true - }, - _ => false, - }) + true + }, + _ => false, + }) .map(|_| ()) }); diff --git a/client/finality-grandpa/src/environment.rs b/client/finality-grandpa/src/environment.rs index 964e199f90968..c39453b1c8bea 100644 --- a/client/finality-grandpa/src/environment.rs +++ b/client/finality-grandpa/src/environment.rs @@ -16,12 +16,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::{BTreeMap, HashMap}; -use std::iter::FromIterator; -use std::marker::PhantomData; -use std::pin::Pin; -use std::sync::Arc; -use std::time::Duration; +use std::{ + collections::{BTreeMap, HashMap}, + iter::FromIterator, + marker::PhantomData, + pin::Pin, + sync::Arc, + time::Duration, +}; use finality_grandpa::{ round::State as RoundState, voter, voter_set::VoterSet, BlockNumberOps, Error as GrandpaError, @@ -44,8 +46,10 @@ use sp_finality_grandpa::{ AuthorityId, AuthoritySignature, Equivocation, EquivocationProof, GrandpaApi, RoundNumber, SetId, GRANDPA_ENGINE_ID, }; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}, +}; use crate::{ authorities::{AuthoritySet, SharedAuthoritySet}, @@ -105,13 +109,11 @@ impl Encode for CompletedRounds { impl parity_scale_codec::EncodeLike for CompletedRounds {} impl Decode for CompletedRounds { - fn decode(value: &mut I) -> Result { + fn decode( + value: &mut I, + ) -> Result { <(Vec>, SetId, Vec)>::decode(value) - .map(|(rounds, set_id, voters)| CompletedRounds { - rounds, - set_id, - voters, - }) + .map(|(rounds, set_id, voters)| CompletedRounds { rounds, set_id, voters }) } } @@ -121,9 +123,7 @@ impl CompletedRounds { genesis: CompletedRound, set_id: SetId, voters: &AuthoritySet>, - ) - -> CompletedRounds - { + ) -> CompletedRounds { let mut rounds = Vec::with_capacity(NUM_LAST_COMPLETED_ROUNDS); rounds.push(genesis); @@ -137,13 +137,14 @@ impl CompletedRounds { } /// Iterate over all completed rounds. - pub fn iter(&self) -> impl Iterator> { + pub fn iter(&self) -> impl Iterator> { self.rounds.iter().rev() } /// Returns the last (latest) completed round. pub fn last(&self) -> &CompletedRound { - self.rounds.first() + self.rounds + .first() .expect("inner is never empty; always contains at least genesis; qed") } @@ -152,10 +153,11 @@ impl CompletedRounds { pub fn push(&mut self, completed_round: CompletedRound) { use std::cmp::Reverse; - match self.rounds.binary_search_by_key( - &Reverse(completed_round.number), - |completed_round| Reverse(completed_round.number), - ) { + match self + .rounds + .binary_search_by_key(&Reverse(completed_round.number), |completed_round| { + Reverse(completed_round.number) + }) { Ok(idx) => self.rounds[idx] = completed_round, Err(idx) => self.rounds.insert(idx, completed_round), }; @@ -215,37 +217,31 @@ impl VoterSetState { let mut current_rounds = CurrentRounds::new(); current_rounds.insert(1, HasVoted::No); - VoterSetState::Live { - completed_rounds, - current_rounds, - } + VoterSetState::Live { completed_rounds, current_rounds } } /// Returns the last completed rounds. pub(crate) fn completed_rounds(&self) -> CompletedRounds { match self { - VoterSetState::Live { completed_rounds, .. } => - completed_rounds.clone(), - VoterSetState::Paused { completed_rounds } => - completed_rounds.clone(), + VoterSetState::Live { completed_rounds, .. } => completed_rounds.clone(), + VoterSetState::Paused { completed_rounds } => completed_rounds.clone(), } } /// Returns the last completed round. pub(crate) fn last_completed_round(&self) -> CompletedRound { match self { - VoterSetState::Live { completed_rounds, .. } => - completed_rounds.last().clone(), - VoterSetState::Paused { completed_rounds } => - completed_rounds.last().clone(), + VoterSetState::Live { completed_rounds, .. } => completed_rounds.last().clone(), + VoterSetState::Paused { completed_rounds } => completed_rounds.last().clone(), } } /// Returns the voter set state validating that it includes the given round /// in current rounds and that the voter isn't paused. - pub fn with_current_round(&self, round: RoundNumber) - -> Result<(&CompletedRounds, &CurrentRounds), Error> - { + pub fn with_current_round( + &self, + round: RoundNumber, + ) -> Result<(&CompletedRounds, &CurrentRounds), Error> { if let VoterSetState::Live { completed_rounds, current_rounds } = self { if current_rounds.contains_key(&round) { Ok((completed_rounds, current_rounds)) @@ -284,10 +280,9 @@ impl HasVoted { /// Returns the proposal we should vote with (if any.) pub fn propose(&self) -> Option<&PrimaryPropose> { match self { - HasVoted::Yes(_, Vote::Propose(propose)) => - Some(propose), - HasVoted::Yes(_, Vote::Prevote(propose, _)) | HasVoted::Yes(_, Vote::Precommit(propose, _, _)) => - propose.as_ref(), + HasVoted::Yes(_, Vote::Propose(propose)) => Some(propose), + HasVoted::Yes(_, Vote::Prevote(propose, _)) | + HasVoted::Yes(_, Vote::Precommit(propose, _, _)) => propose.as_ref(), _ => None, } } @@ -295,8 +290,8 @@ impl HasVoted { /// Returns the prevote we should vote with (if any.) pub fn prevote(&self) -> Option<&Prevote> { match self { - HasVoted::Yes(_, Vote::Prevote(_, prevote)) | HasVoted::Yes(_, Vote::Precommit(_, prevote, _)) => - Some(prevote), + HasVoted::Yes(_, Vote::Prevote(_, prevote)) | + HasVoted::Yes(_, Vote::Precommit(_, prevote, _)) => Some(prevote), _ => None, } } @@ -304,8 +299,7 @@ impl HasVoted { /// Returns the precommit we should vote with (if any.) pub fn precommit(&self) -> Option<&Precommit> { match self { - HasVoted::Yes(_, Vote::Precommit(_, _, precommit)) => - Some(precommit), + HasVoted::Yes(_, Vote::Precommit(_, _, precommit)) => Some(precommit), _ => None, } } @@ -376,21 +370,21 @@ impl SharedVoterSetState { /// Return vote status information for the current round. pub(crate) fn has_voted(&self, round: RoundNumber) -> HasVoted { match &*self.inner.read() { - VoterSetState::Live { current_rounds, .. } => { - current_rounds.get(&round).and_then(|has_voted| match has_voted { - HasVoted::Yes(id, vote) => - Some(HasVoted::Yes(id.clone(), vote.clone())), + VoterSetState::Live { current_rounds, .. } => current_rounds + .get(&round) + .and_then(|has_voted| match has_voted { + HasVoted::Yes(id, vote) => Some(HasVoted::Yes(id.clone(), vote.clone())), _ => None, }) - .unwrap_or(HasVoted::No) - }, + .unwrap_or(HasVoted::No), _ => HasVoted::No, } } // NOTE: not exposed outside of this module intentionally. fn with(&self, f: F) -> R - where F: FnOnce(&mut VoterSetState) -> R + where + F: FnOnce(&mut VoterSetState) -> R, { f(&mut *self.inner.write()) } @@ -452,8 +446,9 @@ impl, SC, VR> Environment(&self, f: F) -> Result<(), Error> where - F: FnOnce(&VoterSetState) -> Result>, Error> + pub(crate) fn update_voter_set_state(&self, f: F) -> Result<(), Error> + where + F: FnOnce(&VoterSetState) -> Result>, Error>, { self.voter_set_state.with(|voter_set_state| { if let Some(set_state) = f(&voter_set_state)? { @@ -461,7 +456,9 @@ impl, SC, VR> Environment { - return Err(Error::Safety( - "Authority set change signalled at genesis.".to_string(), - )) - } + Some((_, n)) if n.is_zero() => + return Err(Error::Safety("Authority set change signalled at genesis.".to_string())), // the next set starts at `n` so the current one lasts until `n - 1`. if // `n` is later than the best block, then the current set is still live // at best block. @@ -538,14 +532,15 @@ where // its parent block is the last block in the current set *header.parent_hash() - } + }, // there is no pending change, the latest block for the current set is // the best block. None => best_block_hash, }; // generate key ownership proof at that block - let key_owner_proof = match self.client + let key_owner_proof = match self + .client .runtime_api() .generate_key_ownership_proof( &BlockId::Hash(current_set_latest_hash), @@ -557,15 +552,12 @@ where Some(proof) => proof, None => { debug!(target: "afg", "Equivocation offender is not part of the authority set."); - return Ok(()); - } + return Ok(()) + }, }; // submit equivocation report at **best** block - let equivocation_proof = EquivocationProof::new( - authority_set.set_id, - equivocation, - ); + let equivocation_proof = EquivocationProof::new(authority_set.set_id, equivocation); self.client .runtime_api() @@ -608,7 +600,9 @@ pub(crate) fn ancestry( where Client: HeaderMetadata, { - if base == block { return Err(GrandpaError::NotDescendent) } + if base == block { + return Err(GrandpaError::NotDescendent) + } let tree_route_res = sp_blockchain::tree_route(&**client, block, base); @@ -618,22 +612,17 @@ where debug!(target: "afg", "Encountered error computing ancestry between block {:?} and base {:?}: {:?}", block, base, e); - return Err(GrandpaError::NotDescendent); - } + return Err(GrandpaError::NotDescendent) + }, }; if tree_route.common_block().hash != base { - return Err(GrandpaError::NotDescendent); + return Err(GrandpaError::NotDescendent) } // skip one because our ancestry is meant to start from the parent of `block`, // and `tree_route` includes it. - Ok(tree_route - .retracted() - .iter() - .skip(1) - .map(|e| e.hash) - .collect()) + Ok(tree_route.retracted().iter().skip(1).map(|e| e.hash).collect()) } impl voter::Environment> @@ -699,7 +688,7 @@ where // before activating the new set. the `authority_set` is updated immediately thus // we restrict the voter based on that. if set_id != authority_set.set_id() { - return Ok(None); + return Ok(None) } best_chain_containing(block, client, authority_set, select_chain, voting_rule) @@ -718,13 +707,12 @@ where let local_id = local_authority_id(&self.voters, self.config.keystore.as_ref()); let has_voted = match self.voter_set_state.has_voted(round) { - HasVoted::Yes(id, vote) => { + HasVoted::Yes(id, vote) => if local_id.as_ref().map(|k| k == &id).unwrap_or(false) { HasVoted::Yes(id, vote) } else { HasVoted::No - } - }, + }, HasVoted::No => HasVoted::No, }; @@ -756,14 +744,17 @@ where // schedule incoming messages from the network to be held until // corresponding blocks are imported. - let incoming = Box::pin(UntilVoteTargetImported::new( - self.client.import_notification_stream(), - self.network.clone(), - self.client.clone(), - incoming, - "round", - None, - ).map_err(Into::into)); + let incoming = Box::pin( + UntilVoteTargetImported::new( + self.client.import_notification_stream(), + self.network.clone(), + self.client.clone(), + incoming, + "round", + None, + ) + .map_err(Into::into), + ); // schedule network message cleanup when sink drops. let outgoing = Box::pin(outgoing.sink_err_into()); @@ -789,18 +780,20 @@ where self.update_voter_set_state(|voter_set_state| { let (completed_rounds, current_rounds) = voter_set_state.with_current_round(round)?; - let current_round = current_rounds.get(&round) + let current_round = current_rounds + .get(&round) .expect("checked in with_current_round that key exists; qed."); if !current_round.can_propose() { // we've already proposed in this round (in a previous run), // ignore the given vote and don't update the voter set // state - return Ok(None); + return Ok(None) } let mut current_rounds = current_rounds.clone(); - let current_round = current_rounds.get_mut(&round) + let current_round = current_rounds + .get_mut(&round) .expect("checked previously that key exists; qed."); *current_round = HasVoted::Yes(local_id, Vote::Propose(propose)); @@ -849,7 +842,7 @@ where // we've already prevoted in this round (in a previous run), // ignore the given vote and don't update the voter set // state - return Ok(None); + return Ok(None) } // report to telemetry and prometheus @@ -858,7 +851,8 @@ where let propose = current_round.propose(); let mut current_rounds = current_rounds.clone(); - let current_round = current_rounds.get_mut(&round) + let current_round = current_rounds + .get_mut(&round) .expect("checked previously that key exists; qed."); *current_round = HasVoted::Yes(local_id, Vote::Prevote(propose.cloned(), prevote)); @@ -911,7 +905,7 @@ where // we've already precommitted in this round (in a previous run), // ignore the given vote and don't update the voter set // state - return Ok(None); + return Ok(None) } // report to telemetry and prometheus @@ -922,12 +916,13 @@ where HasVoted::Yes(_, Vote::Prevote(_, prevote)) => prevote, _ => { let msg = "Voter precommitting before prevoting."; - return Err(Error::Safety(msg.to_string())); - } + return Err(Error::Safety(msg.to_string())) + }, }; let mut current_rounds = current_rounds.clone(); - let current_round = current_rounds.get_mut(&round) + let current_round = current_rounds + .get_mut(&round) .expect("checked previously that key exists; qed."); *current_round = HasVoted::Yes( @@ -973,7 +968,7 @@ where (completed_rounds, current_rounds) } else { let msg = "Voter acting while in paused state."; - return Err(Error::Safety(msg.to_string())); + return Err(Error::Safety(msg.to_string())) }; let mut completed_rounds = completed_rounds.clone(); @@ -998,10 +993,7 @@ where current_rounds.insert(round + 1, HasVoted::No); } - let set_state = VoterSetState::::Live { - completed_rounds, - current_rounds, - }; + let set_state = VoterSetState::::Live { completed_rounds, current_rounds }; crate::aux_schema::write_voter_set_state(&*self.client, &set_state)?; @@ -1038,21 +1030,21 @@ where (completed_rounds, current_rounds) } else { let msg = "Voter acting while in paused state."; - return Err(Error::Safety(msg.to_string())); + return Err(Error::Safety(msg.to_string())) }; let mut completed_rounds = completed_rounds.clone(); - if let Some(already_completed) = completed_rounds.rounds - .iter_mut().find(|r| r.number == round) + if let Some(already_completed) = + completed_rounds.rounds.iter_mut().find(|r| r.number == round) { let n_existing_votes = already_completed.votes.len(); // the interface of Environment guarantees that the previous `historical_votes` // from `completable` is a prefix of what is passed to `concluded`. - already_completed.votes.extend( - historical_votes.seen().iter().skip(n_existing_votes).cloned() - ); + already_completed + .votes + .extend(historical_votes.seen().iter().skip(n_existing_votes).cloned()); already_completed.state = state; crate::aux_schema::write_concluded_round(&*self.client, &already_completed)?; } @@ -1161,8 +1153,8 @@ where block, ); - return Ok(None); - } + return Ok(None) + }, }; // we refuse to vote beyond the current limit number where transitions are scheduled to occur. @@ -1195,7 +1187,7 @@ where } if *target_header.number() == target_number { - break; + break } target_header = client @@ -1230,15 +1222,15 @@ where restricted_number < target_header.number() }) .or_else(|| Some((target_header.hash(), *target_header.number()))) - } + }, Ok(None) => { debug!(target: "afg", "Encountered error finding best chain containing {:?}: couldn't find target block", block); None - } + }, Err(e) => { debug!(target: "afg", "Encountered error finding best chain containing {:?}: {:?}", block, e); None - } + }, }; Ok(result) @@ -1281,20 +1273,22 @@ where status.finalized_number, ); - return Ok(()); + return Ok(()) } // FIXME #1483: clone only when changed let old_authority_set = authority_set.clone(); let update_res: Result<_, Error> = client.lock_import_and_run(|import_op| { - let status = authority_set.apply_standard_changes( - hash, - number, - &is_descendent_of::(&*client, None), - initial_sync, - None, - ).map_err(|e| Error::Safety(e.to_string()))?; + let status = authority_set + .apply_standard_changes( + hash, + number, + &is_descendent_of::(&*client, None), + initial_sync, + None, + ) + .map_err(|e| Error::Safety(e.to_string()))?; // send a justification notification if a sender exists and in case of error log it. fn notify_justification( @@ -1327,17 +1321,15 @@ where if !justification_required { if let Some(justification_period) = justification_period { let last_finalized_number = client.info().finalized_number; - justification_required = - (!last_finalized_number.is_zero() || number - last_finalized_number == justification_period) && - (last_finalized_number / justification_period != number / justification_period); + justification_required = (!last_finalized_number.is_zero() || + number - last_finalized_number == justification_period) && + (last_finalized_number / justification_period != + number / justification_period); } } - let justification = GrandpaJustification::from_commit( - &client, - round_number, - commit, - )?; + let justification = + GrandpaJustification::from_commit(&client, round_number, commit)?; (justification_required, justification) }, @@ -1369,25 +1361,22 @@ where "number" => ?number, "hash" => ?hash, ); - crate::aux_schema::update_best_justification( - &justification, - |insert| apply_aux(import_op, insert, &[]), - )?; + crate::aux_schema::update_best_justification(&justification, |insert| { + apply_aux(import_op, insert, &[]) + })?; let new_authorities = if let Some((canon_hash, canon_number)) = status.new_set_block { // the authority set has changed. let (new_id, set_ref) = authority_set.current(); if set_ref.len() > 16 { - afg_log!(initial_sync, + afg_log!( + initial_sync, "👴 Applying GRANDPA set change to new set with {} authorities", set_ref.len(), ); } else { - afg_log!(initial_sync, - "👴 Applying GRANDPA set change to new set {:?}", - set_ref, - ); + afg_log!(initial_sync, "👴 Applying GRANDPA set change to new set {:?}", set_ref,); } telemetry!( @@ -1419,7 +1408,7 @@ where warn!(target: "afg", "Failed to write updated authority set to disk. Bailing."); warn!(target: "afg", "Node is in a potentially inconsistent state."); - return Err(e.into()); + return Err(e.into()) } } @@ -1433,6 +1422,6 @@ where *authority_set = old_authority_set; Err(CommandOrError::Error(e)) - } + }, } } diff --git a/client/finality-grandpa/src/finality_proof.rs b/client/finality-grandpa/src/finality_proof.rs index ec33d48774ae5..56533704af807 100644 --- a/client/finality-grandpa/src/finality_proof.rs +++ b/client/finality-grandpa/src/finality_proof.rs @@ -39,19 +39,20 @@ use log::{trace, warn}; use std::sync::Arc; -use parity_scale_codec::{Encode, Decode}; +use parity_scale_codec::{Decode, Encode}; +use sc_client_api::backend::Backend; use sp_blockchain::{Backend as BlockchainBackend, HeaderBackend}; use sp_finality_grandpa::GRANDPA_ENGINE_ID; use sp_runtime::{ generic::BlockId, - traits::{NumberFor, Block as BlockT, Header as HeaderT, One}, + traits::{Block as BlockT, Header as HeaderT, NumberFor, One}, }; -use sc_client_api::backend::Backend; use crate::{ - SharedAuthoritySet, best_justification, authorities::{AuthoritySetChangeId, AuthoritySetChanges}, + best_justification, justification::GrandpaJustification, + SharedAuthoritySet, }; const MAX_UNKNOWN_HEADERS: usize = 100_000; @@ -76,10 +77,7 @@ where backend: Arc, shared_authority_set: Option>>, ) -> Self { - FinalityProofProvider { - backend, - shared_authority_set, - } + FinalityProofProvider { backend, shared_authority_set } } /// Create new finality proof provider for the service using: @@ -113,14 +111,10 @@ where { changes } else { - return Ok(None); + return Ok(None) }; - prove_finality( - &*self.backend, - authority_set_changes, - block, - ) + prove_finality(&*self.backend, authority_set_changes, block) } } @@ -166,11 +160,10 @@ where if info.finalized_number < block { let err = format!( "Requested finality proof for descendant of #{} while we only have finalized #{}.", - block, - info.finalized_number, + block, info.finalized_number, ); trace!(target: "afg", "{}", &err); - return Err(FinalityProofError::BlockNotYetFinalized); + return Err(FinalityProofError::BlockNotYetFinalized) } let (justification, just_block) = match authority_set_changes.get_set_id(block) { @@ -185,9 +178,9 @@ where "No justification found for the latest finalized block. \ Returning empty proof.", ); - return Ok(None); + return Ok(None) } - } + }, AuthoritySetChangeId::Set(_, last_block_for_set) => { let last_block_for_set_id = BlockId::Number(last_block_for_set); let justification = if let Some(grandpa_justification) = backend @@ -203,10 +196,10 @@ where Returning empty proof.", block, ); - return Ok(None); + return Ok(None) }; (justification, last_block_for_set) - } + }, AuthoritySetChangeId::Unknown => { warn!( target: "afg", @@ -214,8 +207,8 @@ where You need to resync to populate AuthoritySetChanges properly.", block, ); - return Err(FinalityProofError::BlockNotInAuthoritySetChanges); - } + return Err(FinalityProofError::BlockNotInAuthoritySetChanges) + }, }; // Collect all headers from the requested block until the last block of the set @@ -224,7 +217,7 @@ where let mut current = block + One::one(); loop { if current > just_block || headers.len() >= MAX_UNKNOWN_HEADERS { - break; + break } headers.push(backend.blockchain().expect_header(BlockId::Number(current))?); current += One::one(); @@ -245,9 +238,7 @@ where #[cfg(test)] pub(crate) mod tests { use super::*; - use crate::{ - authorities::AuthoritySetChanges, BlockNumberOps, ClientError, SetId, - }; + use crate::{authorities::AuthoritySetChanges, BlockNumberOps, ClientError, SetId}; use futures::executor::block_on; use sc_block_builder::BlockBuilderProvider; use sc_client_api::{apply_aux, LockImportRun}; @@ -276,8 +267,9 @@ pub(crate) mod tests { let proof = super::FinalityProof::::decode(&mut &remote_proof[..]) .map_err(|_| ClientError::BadJustification("failed to decode finality proof".into()))?; - let justification: GrandpaJustification = Decode::decode(&mut &proof.justification[..]) - .map_err(|_| ClientError::JustificationDecode)?; + let justification: GrandpaJustification = + Decode::decode(&mut &proof.justification[..]) + .map_err(|_| ClientError::JustificationDecode)?; justification.verify(current_set_id, ¤t_authorities)?; Ok(proof) @@ -321,13 +313,13 @@ pub(crate) mod tests { } fn store_best_justification(client: &TestClient, just: &GrandpaJustification) { - client.lock_import_and_run(|import_op| { - crate::aux_schema::update_best_justification( - just, - |insert| apply_aux(import_op, insert, &[]), - ) - }) - .unwrap(); + client + .lock_import_and_run(|import_op| { + crate::aux_schema::update_best_justification(just, |insert| { + apply_aux(import_op, insert, &[]) + }) + }) + .unwrap(); } #[test] @@ -336,11 +328,7 @@ pub(crate) mod tests { let authority_set_changes = AuthoritySetChanges::empty(); // The last finalized block is 4, so we cannot provide further justifications. - let proof_of_5 = prove_finality( - &*backend, - authority_set_changes, - 5, - ); + let proof_of_5 = prove_finality(&*backend, authority_set_changes, 5); assert!(matches!(proof_of_5, Err(FinalityProofError::BlockNotYetFinalized))); } @@ -353,12 +341,7 @@ pub(crate) mod tests { // Block 4 is finalized without justification // => we can't prove finality of 3 - let proof_of_3 = prove_finality( - &*backend, - authority_set_changes, - 3, - ) - .unwrap(); + let proof_of_3 = prove_finality(&*backend, authority_set_changes, 3).unwrap(); assert_eq!(proof_of_3, None); } @@ -406,14 +389,15 @@ pub(crate) mod tests { 1, vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)], finality_proof.encode(), - ).unwrap_err(); + ) + .unwrap_err(); } fn create_commit( block: Block, round: u64, set_id: SetId, - auth: &[Ed25519Keyring] + auth: &[Ed25519Keyring], ) -> finality_grandpa::Commit where Id: From, @@ -481,11 +465,7 @@ pub(crate) mod tests { let mut authority_set_changes = AuthoritySetChanges::empty(); authority_set_changes.append(1, 8); - let proof_of_6 = prove_finality( - &*backend, - authority_set_changes, - 6, - ); + let proof_of_6 = prove_finality(&*backend, authority_set_changes, 6); assert!(matches!(proof_of_6, Err(FinalityProofError::BlockNotInAuthoritySetChanges))); } @@ -499,11 +479,9 @@ pub(crate) mod tests { let commit = create_commit(block8.clone(), round, 1, &[Ed25519Keyring::Alice]); let grandpa_just8 = GrandpaJustification::from_commit(&client, round, commit).unwrap(); - client.finalize_block( - BlockId::Number(8), - Some((ID, grandpa_just8.encode().clone())) - ) - .unwrap(); + client + .finalize_block(BlockId::Number(8), Some((ID, grandpa_just8.encode().clone()))) + .unwrap(); // Authority set change at block 8, so the justification stored there will be used in the // FinalityProof for block 6 @@ -512,13 +490,7 @@ pub(crate) mod tests { authority_set_changes.append(1, 8); let proof_of_6: FinalityProof = Decode::decode( - &mut &prove_finality( - &*backend, - authority_set_changes.clone(), - 6, - ) - .unwrap() - .unwrap()[..], + &mut &prove_finality(&*backend, authority_set_changes.clone(), 6).unwrap().unwrap()[..], ) .unwrap(); assert_eq!( @@ -540,10 +512,7 @@ pub(crate) mod tests { let mut authority_set_changes = AuthoritySetChanges::empty(); authority_set_changes.append(0, 5); - assert!(matches!( - prove_finality(&*backend, authority_set_changes, 6), - Ok(None), - )); + assert!(matches!(prove_finality(&*backend, authority_set_changes, 6), Ok(None),)); } #[test] @@ -563,13 +532,7 @@ pub(crate) mod tests { authority_set_changes.append(0, 5); let proof_of_6: FinalityProof = Decode::decode( - &mut &prove_finality( - &*backend, - authority_set_changes, - 6, - ) - .unwrap() - .unwrap()[..], + &mut &prove_finality(&*backend, authority_set_changes, 6).unwrap().unwrap()[..], ) .unwrap(); assert_eq!( diff --git a/client/finality-grandpa/src/import.rs b/client/finality-grandpa/src/import.rs index ebb26a28c3485..e6b3e8bdd0ed6 100644 --- a/client/finality-grandpa/src/import.rs +++ b/client/finality-grandpa/src/import.rs @@ -31,9 +31,11 @@ use sp_consensus::{ ImportResult, JustificationImport, SelectChain, }; use sp_finality_grandpa::{ConsensusLog, ScheduledChange, SetId, GRANDPA_ENGINE_ID}; -use sp_runtime::generic::{BlockId, OpaqueDigestItemId}; -use sp_runtime::traits::{Block as BlockT, DigestFor, Header as HeaderT, NumberFor, Zero}; -use sp_runtime::Justification; +use sp_runtime::{ + generic::{BlockId, OpaqueDigestItemId}, + traits::{Block as BlockT, DigestFor, Header as HeaderT, NumberFor, Zero}, + Justification, +}; use sp_utils::mpsc::TracingUnboundedSender; use crate::{ @@ -98,12 +100,8 @@ where let chain_info = self.inner.info(); // request justifications for all pending changes for which change blocks have already been imported - let pending_changes: Vec<_> = self - .authority_set - .inner() - .pending_changes() - .cloned() - .collect(); + let pending_changes: Vec<_> = + self.authority_set.inner().pending_changes().cloned().collect(); for pending_change in pending_changes { if pending_change.delay_kind == DelayKind::Finalized && @@ -241,7 +239,7 @@ where ) -> Option>> { // check for forced authority set hard forks if let Some(change) = self.authority_set_hard_forks.get(&hash) { - return Some(change.clone()); + return Some(change.clone()) } // check for forced change. @@ -252,7 +250,7 @@ where canon_height: *header.number(), canon_hash: hash, delay_kind: DelayKind::Best { median_last_finalized }, - }); + }) } // check normal scheduled change. @@ -295,10 +293,9 @@ where fn consume( mut self, ) -> Option<(AuthoritySet, SharedDataLocked<'a, AuthoritySet>)> { - self.old.take().map(|old| ( - old, - self.guard.take().expect("only taken on deconstruction; qed"), - )) + self.old + .take() + .map(|old| (old, self.guard.take().expect("only taken on deconstruction; qed"))) } } @@ -311,20 +308,14 @@ where } let number = *(block.header.number()); - let maybe_change = self.check_new_change( - &block.header, - hash, - ); + let maybe_change = self.check_new_change(&block.header, hash); // returns a function for checking whether a block is a descendent of another // consistent with querying client directly after importing the block. let parent_hash = *block.header.parent_hash(); let is_descendent_of = is_descendent_of(&*self.inner, Some((hash, parent_hash))); - let mut guard = InnerGuard { - guard: Some(self.authority_set.inner_locked()), - old: None, - }; + let mut guard = InnerGuard { guard: Some(self.authority_set.inner_locked()), old: None }; // whether to pause the old authority set -- happens after import // of a forced change block. @@ -339,10 +330,10 @@ where do_pause = true; } - guard.as_mut().add_pending_change( - change, - &is_descendent_of, - ).map_err(|e| ConsensusError::ClientImport(e.to_string()))?; + guard + .as_mut() + .add_pending_change(change, &is_descendent_of) + .map_err(|e| ConsensusError::ClientImport(e.to_string()))?; } let applied_changes = { @@ -389,7 +380,9 @@ where AppliedChanges::Forced(new_authorities) } else { - let did_standard = guard.as_mut().enacts_standard_change(hash, number, &is_descendent_of) + let did_standard = guard + .as_mut() + .enacts_standard_change(hash, number, &is_descendent_of) .map_err(|e| ConsensusError::ClientImport(e.to_string())) .map_err(ConsensusError::from)?; @@ -413,19 +406,17 @@ where crate::aux_schema::update_authority_set::( authorities, authorities_change, - |insert| block.auxiliary.extend( - insert.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec()))) - ) + |insert| { + block + .auxiliary + .extend(insert.iter().map(|(k, v)| (k.to_vec(), Some(v.to_vec())))) + }, ); } let just_in_case = just_in_case.map(|(o, i)| (o, i.release_mutex())); - Ok(PendingSetChanges { - just_in_case, - applied_changes, - do_pause, - }) + Ok(PendingSetChanges { just_in_case, applied_changes, do_pause }) } } @@ -459,7 +450,7 @@ where // Strip justifications when re-importing an existing block. let _justifications = block.justifications.take(); return (&*self.inner).import_block(block, new_cache).await - } + }, Ok(BlockStatus::Unknown) => {}, Err(e) => return Err(ConsensusError::ClientImport(e.to_string())), } @@ -483,7 +474,7 @@ where r, ); pending_changes.revert(); - return Ok(r); + return Ok(r) }, Err(e) => { debug!( @@ -492,7 +483,7 @@ where e, ); pending_changes.revert(); - return Err(ConsensusError::ClientImport(e.to_string())); + return Err(ConsensusError::ClientImport(e.to_string())) }, } }; @@ -501,9 +492,9 @@ where // Send the pause signal after import but BEFORE sending a `ChangeAuthorities` message. if do_pause { - let _ = self.send_voter_commands.unbounded_send( - VoterCommand::Pause("Forced change scheduled after inactivity".to_string()) - ); + let _ = self.send_voter_commands.unbounded_send(VoterCommand::Pause( + "Forced change scheduled after inactivity".to_string(), + )); } let needs_justification = applied_changes.needs_justification(); @@ -521,7 +512,8 @@ where // they should import the block and discard the justification, and they will // then request a justification from sync if it's necessary (which they should // then be able to successfully validate). - let _ = self.send_voter_commands.unbounded_send(VoterCommand::ChangeAuthorities(new)); + let _ = + self.send_voter_commands.unbounded_send(VoterCommand::ChangeAuthorities(new)); // we must clear all pending justifications requests, presumably they won't be // finalized hence why this forced changes was triggered @@ -537,8 +529,8 @@ where _ => {}, } - let grandpa_justification = justifications - .and_then(|just| just.into_justification(GRANDPA_ENGINE_ID)); + let grandpa_justification = + justifications.and_then(|just| just.into_justification(GRANDPA_ENGINE_ID)); match grandpa_justification { Some(justification) => { @@ -559,7 +551,7 @@ where } }); }, - None => { + None => if needs_justification { debug!( target: "afg", @@ -568,8 +560,7 @@ where ); imported_aux.needs_justification = true; - } - } + }, } Ok(ImportResult::Imported(imported_aux)) @@ -616,14 +607,9 @@ impl GrandpaBlockImport { - afg_log!(initial_sync, + afg_log!( + initial_sync, "👴 Imported justification for block #{} that triggers \ command {}, signaling voter.", number, @@ -713,10 +700,13 @@ where Error::Signing(error) => ConsensusError::ClientImport(error), Error::Timer(error) => ConsensusError::ClientImport(error.to_string()), Error::RuntimeApi(error) => ConsensusError::ClientImport(error.to_string()), - }); + }) }, Ok(_) => { - assert!(!enacts_change, "returns Ok when no authority set change should be enacted; qed;"); + assert!( + !enacts_change, + "returns Ok when no authority set change should be enacted; qed;" + ); }, } diff --git a/client/finality-grandpa/src/justification.rs b/client/finality-grandpa/src/justification.rs index 7805161f06c62..7b5492f979363 100644 --- a/client/finality-grandpa/src/justification.rs +++ b/client/finality-grandpa/src/justification.rs @@ -16,8 +16,10 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use finality_grandpa::{voter_set::VoterSet, Error as GrandpaError}; use parity_scale_codec::{Decode, Encode}; @@ -52,7 +54,8 @@ impl GrandpaJustification { client: &Arc, round: u64, commit: Commit, - ) -> Result, Error> where + ) -> Result, Error> + where C: HeaderBackend, { let mut votes_ancestries_hashes = HashSet::new(); @@ -66,12 +69,14 @@ impl GrandpaJustification { for signed in commit.precommits.iter() { let mut current_hash = signed.precommit.target_hash; loop { - if current_hash == commit.target_hash { break; } + if current_hash == commit.target_hash { + break + } match client.header(BlockId::Hash(current_hash))? { Some(current_header) => { if *current_header.number() <= commit.target_number { - return error(); + return error() } let parent_hash = *current_header.parent_hash(); @@ -95,20 +100,20 @@ impl GrandpaJustification { finalized_target: (Block::Hash, NumberFor), set_id: u64, voters: &VoterSet, - ) -> Result, ClientError> where + ) -> Result, ClientError> + where NumberFor: finality_grandpa::BlockNumberOps, { - let justification = GrandpaJustification::::decode(&mut &*encoded) .map_err(|_| ClientError::JustificationDecode)?; - if (justification.commit.target_hash, justification.commit.target_number) != finalized_target { + if (justification.commit.target_hash, justification.commit.target_number) != + finalized_target + { let msg = "invalid commit target in grandpa justification".to_string(); Err(ClientError::BadJustification(msg)) } else { - justification - .verify_with_voter_set(set_id, voters) - .map(|_| justification) + justification.verify_with_voter_set(set_id, voters).map(|_| justification) } } @@ -117,9 +122,8 @@ impl GrandpaJustification { where NumberFor: finality_grandpa::BlockNumberOps, { - let voters = VoterSet::new(authorities.iter().cloned()).ok_or(ClientError::Consensus( - sp_consensus::Error::InvalidAuthoritiesSet, - ))?; + let voters = VoterSet::new(authorities.iter().cloned()) + .ok_or(ClientError::Consensus(sp_consensus::Error::InvalidAuthoritiesSet))?; self.verify_with_voter_set(set_id, &voters) } @@ -137,16 +141,12 @@ impl GrandpaJustification { let ancestry_chain = AncestryChain::::new(&self.votes_ancestries); - match finality_grandpa::validate_commit( - &self.commit, - voters, - &ancestry_chain, - ) { + match finality_grandpa::validate_commit(&self.commit, voters, &ancestry_chain) { Ok(ref result) if result.ghost().is_some() => {}, _ => { let msg = "invalid commit in grandpa justification".to_string(); - return Err(ClientError::BadJustification(msg)); - } + return Err(ClientError::BadJustification(msg)) + }, } let mut buf = Vec::new(); @@ -161,11 +161,12 @@ impl GrandpaJustification { &mut buf, ) { return Err(ClientError::BadJustification( - "invalid signature for precommit in grandpa justification".to_string())); + "invalid signature for precommit in grandpa justification".to_string(), + )) } if self.commit.target_hash == signed.precommit.target_hash { - continue; + continue } match ancestry_chain.ancestry(self.commit.target_hash, signed.precommit.target_hash) { @@ -178,19 +179,20 @@ impl GrandpaJustification { }, _ => { return Err(ClientError::BadJustification( - "invalid precommit ancestry proof in grandpa justification".to_string())); + "invalid precommit ancestry proof in grandpa justification".to_string(), + )) }, } } - let ancestry_hashes = self.votes_ancestries - .iter() - .map(|h: &Block::Header| h.hash()) - .collect(); + let ancestry_hashes = + self.votes_ancestries.iter().map(|h: &Block::Header| h.hash()).collect(); if visited_hashes != ancestry_hashes { return Err(ClientError::BadJustification( - "invalid precommit ancestries in grandpa justification with unused headers".to_string())); + "invalid precommit ancestries in grandpa justification with unused headers" + .to_string(), + )) } Ok(()) @@ -211,24 +213,28 @@ struct AncestryChain { impl AncestryChain { fn new(ancestry: &[Block::Header]) -> AncestryChain { - let ancestry: HashMap<_, _> = ancestry - .iter() - .cloned() - .map(|h: Block::Header| (h.hash(), h)) - .collect(); + let ancestry: HashMap<_, _> = + ancestry.iter().cloned().map(|h: Block::Header| (h.hash(), h)).collect(); AncestryChain { ancestry } } } -impl finality_grandpa::Chain> for AncestryChain where - NumberFor: finality_grandpa::BlockNumberOps +impl finality_grandpa::Chain> for AncestryChain +where + NumberFor: finality_grandpa::BlockNumberOps, { - fn ancestry(&self, base: Block::Hash, block: Block::Hash) -> Result, GrandpaError> { + fn ancestry( + &self, + base: Block::Hash, + block: Block::Hash, + ) -> Result, GrandpaError> { let mut route = Vec::new(); let mut current_hash = block; loop { - if current_hash == base { break; } + if current_hash == base { + break + } match self.ancestry.get(¤t_hash) { Some(current_header) => { current_hash = *current_header.parent_hash(); diff --git a/client/finality-grandpa/src/lib.rs b/client/finality-grandpa/src/lib.rs index 6c3f0f6af37a8..58e7ba1493e8a 100644 --- a/client/finality-grandpa/src/lib.rs +++ b/client/finality-grandpa/src/lib.rs @@ -56,41 +56,39 @@ #![warn(missing_docs)] -use futures::{ - prelude::*, - StreamExt, -}; +use futures::{prelude::*, StreamExt}; use log::{debug, error, info}; +use parity_scale_codec::{Decode, Encode}; +use parking_lot::RwLock; +use prometheus_endpoint::{PrometheusError, Registry}; use sc_client_api::{ backend::{AuxStore, Backend}, - LockImportRun, BlockchainEvents, CallExecutor, - ExecutionStrategy, Finalizer, TransactionFor, ExecutorProvider, + BlockchainEvents, CallExecutor, ExecutionStrategy, ExecutorProvider, Finalizer, LockImportRun, + TransactionFor, }; -use parity_scale_codec::{Decode, Encode}; -use prometheus_endpoint::{PrometheusError, Registry}; +use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_DEBUG, CONSENSUS_INFO}; use sp_api::ProvideRuntimeApi; -use sp_blockchain::{HeaderBackend, Error as ClientError, HeaderMetadata}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{NumberFor, Block as BlockT, DigestFor, Zero}; -use sp_consensus::{SelectChain, BlockImport}; -use sp_core::{ - crypto::Public, -}; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; use sp_application_crypto::AppKey; +use sp_blockchain::{Error as ClientError, HeaderBackend, HeaderMetadata}; +use sp_consensus::{BlockImport, SelectChain}; +use sp_core::crypto::Public; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, DigestFor, NumberFor, Zero}, +}; use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver}; -use sc_telemetry::{telemetry, TelemetryHandle, CONSENSUS_INFO, CONSENSUS_DEBUG}; -use parking_lot::RwLock; -use finality_grandpa::Error as GrandpaError; -use finality_grandpa::{voter, voter_set::VoterSet}; pub use finality_grandpa::BlockNumberOps; - -use std::{fmt, io}; -use std::sync::Arc; -use std::time::Duration; -use std::pin::Pin; -use std::task::{Poll, Context}; +use finality_grandpa::{voter, voter_set::VoterSet, Error as GrandpaError}; + +use std::{ + fmt, io, + pin::Pin, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; // utility logging macro that takes as first argument a conditional to // decide whether to log under debug or info level (useful to restrict @@ -123,6 +121,7 @@ mod voting_rule; pub use authorities::{AuthoritySet, AuthoritySetChanges, SharedAuthoritySet}; pub use aux_schema::best_justification; +pub use finality_grandpa::voter::report; pub use finality_proof::{FinalityProof, FinalityProofError, FinalityProofProvider}; pub use import::{find_forced_change, find_scheduled_change, GrandpaBlockImport}; pub use justification::GrandpaJustification; @@ -132,13 +131,12 @@ pub use voting_rule::{ BeforeBestBlockBy, ThreeQuartersOfTheUnfinalizedChain, VotingRule, VotingRuleResult, VotingRulesBuilder, }; -pub use finality_grandpa::voter::report; use aux_schema::PersistentData; use communication::{Network as NetworkT, NetworkBridge}; use environment::{Environment, VoterSetState}; -use until_imported::UntilGlobalMessageBlocksImported; use sp_finality_grandpa::{AuthorityList, AuthoritySignature, SetId}; +use until_imported::UntilGlobalMessageBlocksImported; // Re-export these two because it's just so damn convenient. pub use sp_finality_grandpa::{AuthorityId, AuthorityPair, GrandpaApi, ScheduledChange}; @@ -159,7 +157,8 @@ pub type SignedMessage = finality_grandpa::SignedMessage< >; /// A primary propose message for this chain's block type. -pub type PrimaryPropose = finality_grandpa::PrimaryPropose<::Hash, NumberFor>; +pub type PrimaryPropose = + finality_grandpa::PrimaryPropose<::Hash, NumberFor>; /// A prevote message for this chain's block type. pub type Prevote = finality_grandpa::Prevote<::Hash, NumberFor>; /// A precommit message for this chain's block type. @@ -198,22 +197,14 @@ type CommunicationIn = finality_grandpa::voter::CommunicationIn< /// Global communication input stream for commits and catch up messages, with /// the hash type not being derived from the block, useful for forcing the hash /// to some type (e.g. `H256`) when the compiler can't do the inference. -type CommunicationInH = finality_grandpa::voter::CommunicationIn< - H, - NumberFor, - AuthoritySignature, - AuthorityId, ->; +type CommunicationInH = + finality_grandpa::voter::CommunicationIn, AuthoritySignature, AuthorityId>; /// Global communication sink for commits with the hash type not being derived /// from the block, useful for forcing the hash to some type (e.g. `H256`) when /// the compiler can't do the inference. -type CommunicationOutH = finality_grandpa::voter::CommunicationOut< - H, - NumberFor, - AuthoritySignature, - AuthorityId, ->; +type CommunicationOutH = + finality_grandpa::voter::CommunicationOut, AuthoritySignature, AuthorityId>; /// Shared voter state for querying. pub struct SharedVoterState { @@ -223,18 +214,14 @@ pub struct SharedVoterState { impl SharedVoterState { /// Create a new empty `SharedVoterState` instance. pub fn empty() -> Self { - Self { - inner: Arc::new(RwLock::new(None)), - } + Self { inner: Arc::new(RwLock::new(None)) } } fn reset( &self, voter_state: Box + Sync + Send>, ) -> Option<()> { - let mut shared_voter_state = self - .inner - .try_write_for(Duration::from_secs(1))?; + let mut shared_voter_state = self.inner.try_write_for(Duration::from_secs(1))?; *shared_voter_state = Some(voter_state); Some(()) @@ -323,7 +310,8 @@ pub(crate) trait BlockStatus { fn block_number(&self, hash: Block::Hash) -> Result>, Error>; } -impl BlockStatus for Arc where +impl BlockStatus for Arc +where Client: HeaderBackend, NumberFor: BlockNumberOps, { @@ -337,24 +325,36 @@ impl BlockStatus for Arc where /// Ideally this would be a trait alias, we're not there yet. /// tracking issue pub trait ClientForGrandpa: - LockImportRun + Finalizer + AuxStore - + HeaderMetadata + HeaderBackend - + BlockchainEvents + ProvideRuntimeApi + ExecutorProvider + LockImportRun + + Finalizer + + AuxStore + + HeaderMetadata + + HeaderBackend + + BlockchainEvents + + ProvideRuntimeApi + + ExecutorProvider + BlockImport, Error = sp_consensus::Error> - where - BE: Backend, - Block: BlockT, -{} +where + BE: Backend, + Block: BlockT, +{ +} impl ClientForGrandpa for T - where - BE: Backend, - Block: BlockT, - T: LockImportRun + Finalizer + AuxStore - + HeaderMetadata + HeaderBackend - + BlockchainEvents + ProvideRuntimeApi + ExecutorProvider - + BlockImport, Error = sp_consensus::Error>, -{} +where + BE: Backend, + Block: BlockT, + T: LockImportRun + + Finalizer + + AuxStore + + HeaderMetadata + + HeaderBackend + + BlockchainEvents + + ProvideRuntimeApi + + ExecutorProvider + + BlockImport, Error = sp_consensus::Error>, +{ +} /// Something that one can ask to do a block sync request. pub(crate) trait BlockSyncRequester { @@ -364,14 +364,25 @@ pub(crate) trait BlockSyncRequester { /// If the given vector of peers is empty then the underlying implementation /// should make a best effort to fetch the block from any peers it is /// connected to (NOTE: this assumption will change in the future #3629). - fn set_sync_fork_request(&self, peers: Vec, hash: Block::Hash, number: NumberFor); + fn set_sync_fork_request( + &self, + peers: Vec, + hash: Block::Hash, + number: NumberFor, + ); } -impl BlockSyncRequester for NetworkBridge where +impl BlockSyncRequester for NetworkBridge +where Block: BlockT, Network: NetworkT, { - fn set_sync_fork_request(&self, peers: Vec, hash: Block::Hash, number: NumberFor) { + fn set_sync_fork_request( + &self, + peers: Vec, + hash: Block::Hash, + number: NumberFor, + ) { NetworkBridge::set_sync_fork_request(self, peers, hash, number) } } @@ -391,7 +402,7 @@ pub(crate) enum VoterCommand { /// Pause the voter for given reason. Pause(String), /// New authorities. - ChangeAuthorities(NewAuthoritySet) + ChangeAuthorities(NewAuthoritySet), } impl fmt::Display for VoterCommand { @@ -436,7 +447,7 @@ impl From> for CommandOrError { } } -impl ::std::error::Error for CommandOrError { } +impl ::std::error::Error for CommandOrError {} impl fmt::Display for CommandOrError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -476,8 +487,10 @@ pub trait GenesisAuthoritySetProvider { fn get(&self) -> Result; } -impl GenesisAuthoritySetProvider for Arc> - where E: CallExecutor, +impl GenesisAuthoritySetProvider + for Arc> +where + E: CallExecutor, { fn get(&self) -> Result { // This implementation uses the Grandpa runtime API instead of reading directly from the @@ -492,10 +505,12 @@ impl GenesisAuthoritySetProvider for Arc( genesis_authorities_provider: &dyn GenesisAuthoritySetProvider, select_chain: SC, telemetry: Option, -) -> Result< - ( - GrandpaBlockImport, - LinkHalf, - ), - ClientError, -> +) -> Result<(GrandpaBlockImport, LinkHalf), ClientError> where SC: SelectChain, BE: Backend + 'static, @@ -539,13 +548,7 @@ pub fn block_import_with_authority_set_hard_forks select_chain: SC, authority_set_hard_forks: Vec<(SetId, (Block::Hash, NumberFor), AuthorityList)>, telemetry: Option, -) -> Result< - ( - GrandpaBlockImport, - LinkHalf, - ), - ClientError, -> +) -> Result<(GrandpaBlockImport, LinkHalf), ClientError> where SC: SelectChain, BE: Backend + 'static, @@ -554,11 +557,8 @@ where let chain_info = client.info(); let genesis_hash = chain_info.genesis_hash; - let persistent_data = aux_schema::load_persistent( - &*client, - genesis_hash, - >::zero(), - { + let persistent_data = + aux_schema::load_persistent(&*client, genesis_hash, >::zero(), { let telemetry = telemetry.clone(); move || { let authorities = genesis_authorities_provider.get()?; @@ -570,13 +570,11 @@ where ); Ok(authorities) } - }, - )?; + })?; let (voter_commands_tx, voter_commands_rx) = tracing_unbounded("mpsc_grandpa_voter_command"); - let (justification_sender, justification_stream) = - GrandpaJustificationStream::channel(); + let (justification_sender, justification_stream) = GrandpaJustificationStream::channel(); // create pending change objects with 0 delay and enacted on finality // (i.e. standard changes) for each authority set hard fork. @@ -646,11 +644,8 @@ where let is_voter = local_authority_id(voters, keystore).is_some(); // verification stream - let (global_in, global_out) = network.global_communication( - communication::SetId(set_id), - voters.clone(), - is_voter, - ); + let (global_in, global_out) = + network.global_communication(communication::SetId(set_id), voters.clone(), is_voter); // block commit and catch up messages until relevant blocks are imported. let global_in = UntilGlobalMessageBlocksImported::new( @@ -758,23 +753,18 @@ where ); let conf = config.clone(); - let telemetry_task = if let Some(telemetry_on_connect) = telemetry - .as_ref() - .map(|x| x.on_connect_stream()) - { - let authorities = persistent_data.authority_set.clone(); - let telemetry = telemetry.clone(); - let events = telemetry_on_connect - .for_each(move |_| { + let telemetry_task = + if let Some(telemetry_on_connect) = telemetry.as_ref().map(|x| x.on_connect_stream()) { + let authorities = persistent_data.authority_set.clone(); + let telemetry = telemetry.clone(); + let events = telemetry_on_connect.for_each(move |_| { let current_authorities = authorities.current_authorities(); let set_id = authorities.set_id(); let authority_id = local_authority_id(¤t_authorities, conf.keystore.as_ref()) .unwrap_or_default(); - let authorities = current_authorities - .iter() - .map(|(id, _)| id.to_string()) - .collect::>(); + let authorities = + current_authorities.iter().map(|(id, _)| id.to_string()).collect::>(); let authorities = serde_json::to_string(&authorities).expect( "authorities is always at least an empty vector; \ @@ -792,10 +782,10 @@ where future::ready(()) }); - future::Either::Left(events) - } else { - future::Either::Right(future::pending()) - }; + future::Either::Left(events) + } else { + future::Either::Right(future::pending()) + }; let voter_work = VoterWork::new( client, @@ -819,8 +809,7 @@ where }); // Make sure that `telemetry_task` doesn't accidentally finish and kill grandpa. - let telemetry_task = telemetry_task - .then(|_| future::pending::<()>()); + let telemetry_task = telemetry_task.then(|_| future::pending::<()>()); Ok(future::select(voter_work, telemetry_task).map(drop)) } @@ -842,7 +831,9 @@ impl Metrics { /// Future that powers the voter. #[must_use] struct VoterWork, SC, VR> { - voter: Pin>>> + Send>>, + voter: Pin< + Box>>> + Send>, + >, shared_voter_state: SharedVoterState, env: Arc>, voter_commands_rx: TracingUnboundedReceiver>>, @@ -881,7 +872,7 @@ where Some(Err(e)) => { debug!(target: "afg", "Failed to register metrics: {:?}", e); None - } + }, None => None, }; @@ -937,12 +928,7 @@ where let chain_info = self.env.client.info(); - let authorities = self - .env - .voters - .iter() - .map(|(id, _)| id.to_string()) - .collect::>(); + let authorities = self.env.voters.iter().map(|(id, _)| id.to_string()).collect::>(); let authorities = serde_json::to_string(&authorities).expect( "authorities is always at least an empty vector; elements are always of type string; qed.", @@ -961,10 +947,7 @@ where match &*self.env.voter_set_state.read() { VoterSetState::Live { completed_rounds, .. } => { - let last_finalized = ( - chain_info.finalized_hash, - chain_info.finalized_number, - ); + let last_finalized = (chain_info.finalized_hash, chain_info.finalized_number); let global_comms = global_communication( self.env.set_id, @@ -997,20 +980,18 @@ where self.voter = Box::pin(voter); }, - VoterSetState::Paused { .. } => - self.voter = Box::pin(future::pending()), + VoterSetState::Paused { .. } => self.voter = Box::pin(future::pending()), }; } fn handle_voter_command( &mut self, - command: VoterCommand> + command: VoterCommand>, ) -> Result<(), Error> { match command { VoterCommand::ChangeAuthorities(new) => { - let voters: Vec = new.authorities.iter().map(move |(a, _)| { - format!("{}", a) - }).collect(); + let voters: Vec = + new.authorities.iter().map(move |(a, _)| format!("{}", a)).collect(); telemetry!( self.telemetry; CONSENSUS_INFO; @@ -1034,14 +1015,12 @@ where Ok(Some(set_state)) })?; - let voters = Arc::new(VoterSet::new(new.authorities.into_iter()) - .expect( - "new authorities come from pending change; \ + let voters = Arc::new(VoterSet::new(new.authorities.into_iter()).expect( + "new authorities come from pending change; \ pending change comes from `AuthoritySet`; \ `AuthoritySet` validates authorities is non-empty and weights are non-zero; \ - qed." - ) - ); + qed.", + )); self.env = Arc::new(Environment { voters, @@ -1061,7 +1040,7 @@ where self.rebuild_voter(); Ok(()) - } + }, VoterCommand::Pause(reason) => { info!(target: "afg", "Pausing old validator set: {}", reason); @@ -1076,7 +1055,7 @@ where self.rebuild_voter(); Ok(()) - } + }, } } } @@ -1096,37 +1075,35 @@ where fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll { match Future::poll(Pin::new(&mut self.voter), cx) { - Poll::Pending => {} + Poll::Pending => {}, Poll::Ready(Ok(())) => { // voters don't conclude naturally - return Poll::Ready( - Err(Error::Safety("finality-grandpa inner voter has concluded.".into())) - ) - } + return Poll::Ready(Err(Error::Safety( + "finality-grandpa inner voter has concluded.".into(), + ))) + }, Poll::Ready(Err(CommandOrError::Error(e))) => { // return inner observer error return Poll::Ready(Err(e)) - } + }, Poll::Ready(Err(CommandOrError::VoterCommand(command))) => { // some command issued internally self.handle_voter_command(command)?; cx.waker().wake_by_ref(); - } + }, } match Stream::poll_next(Pin::new(&mut self.voter_commands_rx), cx) { - Poll::Pending => {} + Poll::Pending => {}, Poll::Ready(None) => { // the `voter_commands_rx` stream should never conclude since it's never closed. - return Poll::Ready( - Err(Error::Safety("`voter_commands_rx` was closed.".into())) - ) - } + return Poll::Ready(Err(Error::Safety("`voter_commands_rx` was closed.".into()))) + }, Poll::Ready(Some(command)) => { // some command issued externally self.handle_voter_command(command)?; cx.waker().wake_by_ref(); - } + }, } Future::poll(Pin::new(&mut self.network), cx) @@ -1142,10 +1119,10 @@ fn local_authority_id( ) -> Option { keystore.and_then(|keystore| { voters - .iter() - .find(|(p, _)| { - SyncCryptoStore::has_keys(&**keystore, &[(p.to_raw_vec(), AuthorityId::ID)]) - }) - .map(|(p, _)| p.clone()) + .iter() + .find(|(p, _)| { + SyncCryptoStore::has_keys(&**keystore, &[(p.to_raw_vec(), AuthorityId::ID)]) + }) + .map(|(p, _)| p.clone()) }) } diff --git a/client/finality-grandpa/src/notification.rs b/client/finality-grandpa/src/notification.rs index b545f0d8a637e..f0b0b1669dc95 100644 --- a/client/finality-grandpa/src/notification.rs +++ b/client/finality-grandpa/src/notification.rs @@ -16,14 +16,13 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::sync::Arc; use parking_lot::Mutex; +use std::sync::Arc; use sp_runtime::traits::Block as BlockT; use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; -use crate::justification::GrandpaJustification; -use crate::Error; +use crate::{justification::GrandpaJustification, Error}; // Stream of justifications returned when subscribing. type JustificationStream = TracingUnboundedReceiver>; @@ -41,16 +40,14 @@ type SharedJustificationSenders = Arc { - subscribers: SharedJustificationSenders + subscribers: SharedJustificationSenders, } impl GrandpaJustificationSender { /// The `subscribers` should be shared with a corresponding /// `GrandpaJustificationStream`. fn new(subscribers: SharedJustificationSenders) -> Self { - Self { - subscribers, - } + Self { subscribers } } /// Send out a notification to all subscribers that a new justification @@ -83,7 +80,7 @@ impl GrandpaJustificationSender { /// so it can be used to add more subscriptions. #[derive(Clone)] pub struct GrandpaJustificationStream { - subscribers: SharedJustificationSenders + subscribers: SharedJustificationSenders, } impl GrandpaJustificationStream { @@ -100,9 +97,7 @@ impl GrandpaJustificationStream { /// The `subscribers` should be shared with a corresponding /// `GrandpaJustificationSender`. fn new(subscribers: SharedJustificationSenders) -> Self { - Self { - subscribers, - } + Self { subscribers } } /// Subscribe to a channel through which justifications are sent diff --git a/client/finality-grandpa/src/observer.rs b/client/finality-grandpa/src/observer.rs index 23c4f873a10b7..cbea6c138c90f 100644 --- a/client/finality-grandpa/src/observer.rs +++ b/client/finality-grandpa/src/observer.rs @@ -16,10 +16,12 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::marker::{PhantomData, Unpin}; -use std::pin::Pin; -use std::sync::Arc; -use std::task::{Context, Poll}; +use std::{ + marker::{PhantomData, Unpin}, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; use finality_grandpa::{voter, voter_set::VoterSet, BlockNumberOps, Error as GrandpaError}; use futures::prelude::*; @@ -95,14 +97,14 @@ where }, voter::CommunicationIn::CatchUp(..) => { // ignore catch up messages - return future::ok(last_finalized_number); + return future::ok(last_finalized_number) }, }; // if the commit we've received targets a block lower or equal to the last // finalized, ignore it and continue with the current state if commit.target_number <= last_finalized_number { - return future::ok(last_finalized_number); + return future::ok(last_finalized_number) } let validation_result = match finality_grandpa::validate_commit( @@ -201,11 +203,9 @@ where telemetry.clone(), ); - let observer_work = observer_work - .map_ok(|_| ()) - .map_err(|e| { - warn!("GRANDPA Observer failed: {:?}", e); - }); + let observer_work = observer_work.map_ok(|_| ()).map_err(|e| { + warn!("GRANDPA Observer failed: {:?}", e); + }); Ok(observer_work.map(drop)) } @@ -213,7 +213,8 @@ where /// Future that powers the observer. #[must_use] struct ObserverWork> { - observer: Pin>>> + Send>>, + observer: + Pin>>> + Send>>, client: Arc, network: NetworkBridge, persistent_data: PersistentData, @@ -285,11 +286,13 @@ where let network = self.network.clone(); let voters = voters.clone(); - move |round| network.note_round( - crate::communication::Round(round), - crate::communication::SetId(set_id), - &*voters, - ) + move |round| { + network.note_round( + crate::communication::Round(round), + crate::communication::SetId(set_id), + &*voters, + ) + } }; // create observer for the current set @@ -337,7 +340,8 @@ where set_state }, - }.into(); + } + .into(); self.rebuild_observer(); Ok(()) @@ -356,33 +360,33 @@ where fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll { match Future::poll(Pin::new(&mut self.observer), cx) { - Poll::Pending => {} + Poll::Pending => {}, Poll::Ready(Ok(())) => { // observer commit stream doesn't conclude naturally; this could reasonably be an error. return Poll::Ready(Ok(())) - } + }, Poll::Ready(Err(CommandOrError::Error(e))) => { // return inner observer error return Poll::Ready(Err(e)) - } + }, Poll::Ready(Err(CommandOrError::VoterCommand(command))) => { // some command issued internally self.handle_voter_command(command)?; cx.waker().wake_by_ref(); - } + }, } match Stream::poll_next(Pin::new(&mut self.voter_commands_rx), cx) { - Poll::Pending => {} + Poll::Pending => {}, Poll::Ready(None) => { // the `voter_commands_rx` stream should never conclude since it's never closed. return Poll::Ready(Ok(())) - } + }, Poll::Ready(Some(command)) => { // some command issued externally self.handle_voter_command(command)?; cx.waker().wake_by_ref(); - } + }, } Future::poll(Pin::new(&mut self.network), cx) @@ -393,12 +397,15 @@ where mod tests { use super::*; + use crate::{ + aux_schema, + communication::tests::{make_test_network, Event}, + }; use assert_matches::assert_matches; - use sp_utils::mpsc::tracing_unbounded; - use crate::{aux_schema, communication::tests::{Event, make_test_network}}; - use substrate_test_runtime_client::{TestClientBuilder, TestClientBuilderExt}; use sc_network::PeerId; use sp_blockchain::HeaderBackend as _; + use sp_utils::mpsc::tracing_unbounded; + use substrate_test_runtime_client::{TestClientBuilder, TestClientBuilderExt}; use futures::executor; @@ -426,12 +433,9 @@ mod tests { let voters = vec![(sp_keyring::Ed25519Keyring::Alice.public().into(), 1)]; - let persistent_data = aux_schema::load_persistent( - &*backend, - client.info().genesis_hash, - 0, - || Ok(voters), - ).unwrap(); + let persistent_data = + aux_schema::load_persistent(&*backend, client.info().genesis_hash, 0, || Ok(voters)) + .unwrap(); let (_tx, voter_command_rx) = tracing_unbounded(""); diff --git a/client/finality-grandpa/src/tests.rs b/client/finality-grandpa/src/tests.rs index 725beec6a94b2..6243b1752c7c1 100644 --- a/client/finality-grandpa/src/tests.rs +++ b/client/finality-grandpa/src/tests.rs @@ -21,31 +21,37 @@ use super::*; use assert_matches::assert_matches; use environment::HasVoted; +use futures::executor::block_on; +use futures_timer::Delay; +use parking_lot::{Mutex, RwLock}; +use sc_network::config::{ProtocolConfig, Role}; use sc_network_test::{ - Block, BlockImportAdapter, Hash, PassThroughVerifier, Peer, PeersClient, PeersFullClient, - TestClient, TestNetFactory, FullPeerConfig, + Block, BlockImportAdapter, FullPeerConfig, Hash, PassThroughVerifier, Peer, PeersClient, + PeersFullClient, TestClient, TestNetFactory, }; -use sc_network::config::{ProtocolConfig, Role}; -use parking_lot::{RwLock, Mutex}; -use futures_timer::Delay; -use futures::executor::block_on; -use tokio::runtime::{Runtime, Handle}; -use sp_keyring::Ed25519Keyring; -use sp_blockchain::Result; use sp_api::{ApiRef, ProvideRuntimeApi}; -use substrate_test_runtime_client::runtime::BlockNumber; +use sp_blockchain::Result; use sp_consensus::{ - BlockOrigin, ForkChoiceStrategy, ImportedAux, BlockImportParams, ImportResult, BlockImport, - import_queue::BoxJustificationImport, + import_queue::BoxJustificationImport, BlockImport, BlockImportParams, BlockOrigin, + ForkChoiceStrategy, ImportResult, ImportedAux, }; -use std::{collections::{HashMap, HashSet}, pin::Pin}; -use sp_runtime::{Justifications, traits::{Block as BlockT, Header as HeaderT}}; -use sp_runtime::generic::{BlockId, DigestItem}; use sp_core::H256; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; use sp_finality_grandpa::{ - GRANDPA_ENGINE_ID, AuthorityList, EquivocationProof, GrandpaApi, OpaqueKeyOwnershipProof, + AuthorityList, EquivocationProof, GrandpaApi, OpaqueKeyOwnershipProof, GRANDPA_ENGINE_ID, +}; +use sp_keyring::Ed25519Keyring; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use sp_runtime::{ + generic::{BlockId, DigestItem}, + traits::{Block as BlockT, Header as HeaderT}, + Justifications, +}; +use std::{ + collections::{HashMap, HashSet}, + pin::Pin, }; +use substrate_test_runtime_client::runtime::BlockNumber; +use tokio::runtime::{Handle, Runtime}; use authorities::AuthoritySet; use sc_block_builder::BlockBuilderProvider; @@ -61,7 +67,7 @@ type GrandpaBlockImport = crate::GrandpaBlockImport< substrate_test_runtime_client::Backend, Block, PeersFullClient, - LongestChain + LongestChain, >; struct GrandpaTestNet { @@ -71,10 +77,8 @@ struct GrandpaTestNet { impl GrandpaTestNet { fn new(test_config: TestApi, n_authority: usize, n_full: usize) -> Self { - let mut net = GrandpaTestNet { - peers: Vec::with_capacity(n_authority + n_full), - test_config, - }; + let mut net = + GrandpaTestNet { peers: Vec::with_capacity(n_authority + n_full), test_config }; for _ in 0..n_authority { net.add_authority_peer(); @@ -105,10 +109,7 @@ impl TestNetFactory for GrandpaTestNet { /// Create new test network with peers and given config. fn from_config(_config: &ProtocolConfig) -> Self { - GrandpaTestNet { - peers: Vec::new(), - test_config: Default::default(), - } + GrandpaTestNet { peers: Vec::new(), test_config: Default::default() } } fn default_config() -> ProtocolConfig { @@ -133,13 +134,10 @@ impl TestNetFactory for GrandpaTestNet { PassThroughVerifier::new(false) // use non-instant finality. } - fn make_block_import(&self, client: PeersClient) - -> ( - BlockImportAdapter, - Option>, - PeerData, - ) - { + fn make_block_import( + &self, + client: PeersClient, + ) -> (BlockImportAdapter, Option>, PeerData) { match client { PeersClient::Full(ref client, ref backend) => { let (import, link) = block_import( @@ -147,7 +145,8 @@ impl TestNetFactory for GrandpaTestNet { &self.test_config, LongestChain::new(backend.clone()), None, - ).expect("Could not create block import for fresh peer."); + ) + .expect("Could not create block import for fresh peer."); let justification_import = Box::new(import.clone()); ( BlockImportAdapter::new(import), @@ -181,9 +180,7 @@ pub(crate) struct TestApi { impl TestApi { pub fn new(genesis_authorities: AuthorityList) -> Self { - TestApi { - genesis_authorities, - } + TestApi { genesis_authorities } } } @@ -235,21 +232,24 @@ fn make_ids(keys: &[Ed25519Keyring]) -> AuthorityList { fn create_keystore(authority: Ed25519Keyring) -> (SyncCryptoStorePtr, tempfile::TempDir) { let keystore_path = tempfile::tempdir().expect("Creates keystore path"); - let keystore = Arc::new(LocalKeystore::open(keystore_path.path(), None) - .expect("Creates keystore")); + let keystore = + Arc::new(LocalKeystore::open(keystore_path.path(), None).expect("Creates keystore")); SyncCryptoStore::ed25519_generate_new(&*keystore, GRANDPA, Some(&authority.to_seed())) .expect("Creates authority key"); (keystore, keystore_path) } -fn block_until_complete(future: impl Future + Unpin, net: &Arc>, runtime: &mut Runtime) { +fn block_until_complete( + future: impl Future + Unpin, + net: &Arc>, + runtime: &mut Runtime, +) { let drive_to_completion = futures::future::poll_fn(|cx| { - net.lock().poll(cx); Poll::<()>::Pending + net.lock().poll(cx); + Poll::<()>::Pending }); - runtime.block_on( - future::select(future, drive_to_completion) - ); + runtime.block_on(future::select(future, drive_to_completion)); } // Spawns grandpa voters. Returns a future to spawn on the runtime. @@ -264,11 +264,9 @@ fn initialize_grandpa( let (net_service, link) = { // temporary needed for some reason - let link = net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); - ( - net.peers[peer_id].network_service().clone(), - link, - ) + let link = + net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); + (net.peers[peer_id].network_service().clone(), link) }; let grandpa_params = GrandpaParams { @@ -288,9 +286,10 @@ fn initialize_grandpa( shared_voter_state: SharedVoterState::empty(), telemetry: None, }; - let voter = run_grandpa_voter(grandpa_params).expect("all in order with client and network"); + let voter = + run_grandpa_voter(grandpa_params).expect("all in order with client and network"); - fn assert_send(_: &T) { } + fn assert_send(_: &T) {} assert_send(&voter); voters.push(voter); @@ -307,8 +306,9 @@ fn run_to_completion_with( net: Arc>, peers: &[Ed25519Keyring], with: F, -) -> u64 where - F: FnOnce(Handle) -> Option>>> +) -> u64 +where + F: FnOnce(Handle) -> Option>>>, { let mut wait_for = Vec::new(); @@ -322,20 +322,19 @@ fn run_to_completion_with( let highest_finalized = highest_finalized.clone(); let client = net.lock().peers[peer_id].client().clone(); - wait_for.push( - Box::pin( - client.finality_notification_stream() - .take_while(move |n| { - let mut highest_finalized = highest_finalized.write(); - if *n.header.number() > *highest_finalized { - *highest_finalized = *n.header.number(); - } - future::ready(n.header.number() < &blocks) - }) - .collect::>() - .map(|_| ()) - ) - ); + wait_for.push(Box::pin( + client + .finality_notification_stream() + .take_while(move |n| { + let mut highest_finalized = highest_finalized.write(); + if *n.header.number() > *highest_finalized { + *highest_finalized = *n.header.number(); + } + future::ready(n.header.number() < &blocks) + }) + .collect::>() + .map(|_| ()), + )); } // wait for all finalized on each. @@ -350,7 +349,7 @@ fn run_to_completion( runtime: &mut Runtime, blocks: u64, net: Arc>, - peers: &[Ed25519Keyring] + peers: &[Ed25519Keyring], ) -> u64 { run_to_completion_with(runtime, blocks, net, peers, |_| None) } @@ -386,8 +385,7 @@ fn finalize_3_voters_no_observers() { net.block_until_sync(); for i in 0..3 { - assert_eq!(net.peer(i).client().info().best_number, 20, - "Peer #{} failed to sync", i); + assert_eq!(net.peer(i).client().info().best_number, 20, "Peer #{} failed to sync", i); } let net = Arc::new(Mutex::new(net)); @@ -395,7 +393,12 @@ fn finalize_3_voters_no_observers() { // normally there's no justification for finalized blocks assert!( - net.lock().peer(0).client().justifications(&BlockId::Number(20)).unwrap().is_none(), + net.lock() + .peer(0) + .client() + .justifications(&BlockId::Number(20)) + .unwrap() + .is_none(), "Extra justification for block#1", ); } @@ -425,7 +428,7 @@ fn finalize_3_voters_1_full_observer() { observer_enabled: true, telemetry: None, }, - link: link, + link, network: net_service, voting_rule: (), prometheus_registry: None, @@ -444,9 +447,10 @@ fn finalize_3_voters_1_full_observer() { for peer_id in 0..4 { let client = net.lock().peers[peer_id].client().clone(); finality_notifications.push( - client.finality_notification_stream() + client + .finality_notification_stream() .take_while(|n| future::ready(n.header.number() < &20)) - .for_each(move |_| future::ready(())) + .for_each(move |_| future::ready(())), ); } @@ -458,9 +462,8 @@ fn finalize_3_voters_1_full_observer() { // all peers should have stored the justification for the best finalized block #20 for peer_id in 0..4 { let client = net.lock().peers[peer_id].client().as_full().unwrap(); - let justification = crate::aux_schema::best_justification::<_, Block>(&*client) - .unwrap() - .unwrap(); + let justification = + crate::aux_schema::best_justification::<_, Block>(&*client).unwrap().unwrap(); assert_eq!(justification.commit.target_number, 20); } @@ -469,27 +472,16 @@ fn finalize_3_voters_1_full_observer() { #[test] fn transition_3_voters_twice_1_full_observer() { sp_tracing::try_init_simple(); - let peers_a = &[ - Ed25519Keyring::Alice, - Ed25519Keyring::Bob, - Ed25519Keyring::Charlie, - ]; + let peers_a = &[Ed25519Keyring::Alice, Ed25519Keyring::Bob, Ed25519Keyring::Charlie]; - let peers_b = &[ - Ed25519Keyring::Dave, - Ed25519Keyring::Eve, - Ed25519Keyring::Ferdie, - ]; + let peers_b = &[Ed25519Keyring::Dave, Ed25519Keyring::Eve, Ed25519Keyring::Ferdie]; - let peers_c = &[ - Ed25519Keyring::Alice, - Ed25519Keyring::Eve, - Ed25519Keyring::Two, - ]; + let peers_c = &[Ed25519Keyring::Alice, Ed25519Keyring::Eve, Ed25519Keyring::Two]; let observer = &[Ed25519Keyring::One]; - let all_peers = peers_a.iter() + let all_peers = peers_a + .iter() .chain(peers_b) .chain(peers_c) .chain(observer) @@ -511,11 +503,9 @@ fn transition_3_voters_twice_1_full_observer() { let (net_service, link) = { let net = net.lock(); - let link = net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); - ( - net.peers[peer_id].network_service().clone(), - link, - ) + let link = + net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); + (net.peers[peer_id].network_service().clone(), link) }; let grandpa_params = GrandpaParams { @@ -536,7 +526,8 @@ fn transition_3_voters_twice_1_full_observer() { telemetry: None, }; - voters.push(run_grandpa_voter(grandpa_params).expect("all in order with client and network")); + voters + .push(run_grandpa_voter(grandpa_params).expect("all in order with client and network")); } net.lock().peer(0).push_blocks(1, false); @@ -544,10 +535,10 @@ fn transition_3_voters_twice_1_full_observer() { for (i, peer) in net.lock().peers().iter().enumerate() { let full_client = peer.client().as_full().expect("only full clients are used in test"); - assert_eq!(full_client.chain_info().best_number, 1, - "Peer #{} failed to sync", i); + assert_eq!(full_client.chain_info().best_number, 1, "Peer #{} failed to sync", i); - let set: AuthoritySet = crate::aux_schema::load_authorities(&*full_client).unwrap(); + let set: AuthoritySet = + crate::aux_schema::load_authorities(&*full_client).unwrap(); assert_eq!(set.current(), (0, make_ids(peers_a).as_slice())); assert_eq!(set.pending_changes().count(), 0); @@ -559,7 +550,8 @@ fn transition_3_voters_twice_1_full_observer() { let peers_c = peers_c.clone(); // wait for blocks to be finalized before generating new ones - let block_production = client.finality_notification_stream() + let block_production = client + .finality_notification_stream() .take_while(|n| future::ready(n.header.number() < &30)) .for_each(move |n| { match n.header.number() { @@ -571,10 +563,10 @@ fn transition_3_voters_twice_1_full_observer() { // generate transition at block 15, applied at 20. net.lock().peer(0).generate_blocks(1, BlockOrigin::File, |builder| { let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_b), - delay: 4, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_b), delay: 4 }, + ); block }); @@ -585,10 +577,10 @@ fn transition_3_voters_twice_1_full_observer() { // add more until we have 30. net.lock().peer(0).generate_blocks(1, BlockOrigin::File, |builder| { let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(&peers_c), - delay: 0, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(&peers_c), delay: 0 }, + ); block }); @@ -612,16 +604,18 @@ fn transition_3_voters_twice_1_full_observer() { for (peer_id, _) in all_peers.into_iter().enumerate() { let client = net.lock().peers[peer_id].client().clone(); finality_notifications.push( - client.finality_notification_stream() + client + .finality_notification_stream() .take_while(|n| future::ready(n.header.number() < &30)) .for_each(move |_| future::ready(())) .map(move |()| { let full_client = client.as_full().expect("only full clients are used in test"); - let set: AuthoritySet = crate::aux_schema::load_authorities(&*full_client).unwrap(); + let set: AuthoritySet = + crate::aux_schema::load_authorities(&*full_client).unwrap(); assert_eq!(set.current(), (2, make_ids(peers_c).as_slice())); assert_eq!(set.pending_changes().count(), 0); - }) + }), ); } @@ -648,7 +642,13 @@ fn justification_is_generated_periodically() { // when block#32 (justification_period) is finalized, justification // is required => generated for i in 0..3 { - assert!(net.lock().peer(i).client().justifications(&BlockId::Number(32)).unwrap().is_some()); + assert!(net + .lock() + .peer(i) + .client() + .justifications(&BlockId::Number(32)) + .unwrap() + .is_some()); } } @@ -670,10 +670,10 @@ fn sync_justifications_on_change_blocks() { // at block 21 we do add a transition which is instant net.peer(0).generate_blocks(1, BlockOrigin::File, |builder| { let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_b), - delay: 0, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_b), delay: 0 }, + ); block }); @@ -682,8 +682,7 @@ fn sync_justifications_on_change_blocks() { net.block_until_sync(); for i in 0..4 { - assert_eq!(net.peer(i).client().info().best_number, 25, - "Peer #{} failed to sync", i); + assert_eq!(net.peer(i).client().info().best_number, 25, "Peer #{} failed to sync", i); } let net = Arc::new(Mutex::new(net)); @@ -693,12 +692,25 @@ fn sync_justifications_on_change_blocks() { // the first 3 peers are grandpa voters and therefore have already finalized // block 21 and stored a justification for i in 0..3 { - assert!(net.lock().peer(i).client().justifications(&BlockId::Number(21)).unwrap().is_some()); + assert!(net + .lock() + .peer(i) + .client() + .justifications(&BlockId::Number(21)) + .unwrap() + .is_some()); } // the last peer should get the justification by syncing from other peers futures::executor::block_on(futures::future::poll_fn(move |cx| { - if net.lock().peer(3).client().justifications(&BlockId::Number(21)).unwrap().is_none() { + if net + .lock() + .peer(3) + .client() + .justifications(&BlockId::Number(21)) + .unwrap() + .is_none() + { net.lock().poll(cx); Poll::Pending } else { @@ -717,8 +729,12 @@ fn finalizes_multiple_pending_changes_in_order() { let peers_c = &[Ed25519Keyring::Dave, Ed25519Keyring::Alice, Ed25519Keyring::Bob]; let all_peers = &[ - Ed25519Keyring::Alice, Ed25519Keyring::Bob, Ed25519Keyring::Charlie, - Ed25519Keyring::Dave, Ed25519Keyring::Eve, Ed25519Keyring::Ferdie, + Ed25519Keyring::Alice, + Ed25519Keyring::Bob, + Ed25519Keyring::Charlie, + Ed25519Keyring::Dave, + Ed25519Keyring::Eve, + Ed25519Keyring::Ferdie, ]; let genesis_voters = make_ids(peers_a); @@ -735,10 +751,10 @@ fn finalizes_multiple_pending_changes_in_order() { // at block 21 we do add a transition which is instant net.peer(0).generate_blocks(1, BlockOrigin::File, |builder| { let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_b), - delay: 0, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_b), delay: 0 }, + ); block }); @@ -748,10 +764,10 @@ fn finalizes_multiple_pending_changes_in_order() { // at block 26 we add another which is enacted at block 30 net.peer(0).generate_blocks(1, BlockOrigin::File, |builder| { let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_c), - delay: 4, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_c), delay: 4 }, + ); block }); @@ -762,8 +778,7 @@ fn finalizes_multiple_pending_changes_in_order() { // all peers imported both change blocks for i in 0..6 { - assert_eq!(net.peer(i).client().info().best_number, 30, - "Peer #{} failed to sync", i); + assert_eq!(net.peer(i).client().info().best_number, 30, "Peer #{} failed to sync", i); } let net = Arc::new(Mutex::new(net)); @@ -794,16 +809,17 @@ fn force_change_to_new_set() { let mut block = builder.build().unwrap().block; // add a forced transition at block 12. - add_forced_change(&mut block, 0, ScheduledChange { - next_authorities: voters.clone(), - delay: 10, - }); + add_forced_change( + &mut block, + 0, + ScheduledChange { next_authorities: voters.clone(), delay: 10 }, + ); // add a normal transition too to ensure that forced changes take priority. - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(genesis_authorities), - delay: 5, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(genesis_authorities), delay: 5 }, + ); block }); @@ -812,11 +828,11 @@ fn force_change_to_new_set() { net.lock().block_until_sync(); for (i, peer) in net.lock().peers().iter().enumerate() { - assert_eq!(peer.client().info().best_number, 26, - "Peer #{} failed to sync", i); + assert_eq!(peer.client().info().best_number, 26, "Peer #{} failed to sync", i); let full_client = peer.client().as_full().expect("only full clients are used in test"); - let set: AuthoritySet = crate::aux_schema::load_authorities(&*full_client).unwrap(); + let set: AuthoritySet = + crate::aux_schema::load_authorities(&*full_client).unwrap(); assert_eq!(set.current(), (1, voters.as_slice())); assert_eq!(set.pending_changes().count(), 0); @@ -841,12 +857,14 @@ fn allows_reimporting_change_blocks() { let (mut block_import, ..) = net.make_block_import(client.clone()); let full_client = client.as_full().unwrap(); - let builder = full_client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); + let builder = full_client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap(); let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_b), - delay: 0, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_b), delay: 0 }, + ); let block = || { let block = block.clone(); @@ -886,13 +904,15 @@ fn test_bad_justification() { let (mut block_import, ..) = net.make_block_import(client.clone()); let full_client = client.as_full().expect("only full clients are used in test"); - let builder = full_client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); + let builder = full_client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap(); let mut block = builder.build().unwrap().block; - add_scheduled_change(&mut block, ScheduledChange { - next_authorities: make_ids(peers_b), - delay: 0, - }); + add_scheduled_change( + &mut block, + ScheduledChange { next_authorities: make_ids(peers_b), delay: 0 }, + ); let block = || { let block = block.clone(); @@ -923,8 +943,8 @@ fn test_bad_justification() { #[test] fn voter_persists_its_votes() { - use std::sync::atomic::{AtomicUsize, Ordering}; use futures::future; + use std::sync::atomic::{AtomicUsize, Ordering}; sp_tracing::try_init_simple(); let mut runtime = Runtime::new().unwrap(); @@ -959,8 +979,7 @@ fn voter_persists_its_votes() { let set_state = { let bob_client = net.peer(1).client().clone(); - let (_, _, link) = net - .make_block_import(bob_client); + let (_, _, link) = net.make_block_import(bob_client); let LinkHalf { persistent_data, .. } = link.lock().take().unwrap(); let PersistentData { set_state, .. } = persistent_data; set_state @@ -983,10 +1002,7 @@ fn voter_persists_its_votes() { let (net_service, link) = { // temporary needed for some reason let link = net.peers[0].data.lock().take().expect("link initialized at startup; qed"); - ( - net.peers[0].network_service().clone(), - link, - ) + (net.peers[0].network_service().clone(), link) }; let grandpa_params = GrandpaParams { @@ -1026,8 +1042,7 @@ fn voter_persists_its_votes() { // read the persisted state after aborting alice_voter1. let alice_client = net.peer(0).client().clone(); - let (_block_import, _, link) = net - .make_block_import(alice_client); + let (_block_import, _, link) = net.make_block_import(alice_client); let link = link.lock().take().unwrap(); let grandpa_params = GrandpaParams { @@ -1064,8 +1079,7 @@ fn voter_persists_its_votes() { net.peer(0).push_blocks(20, false); net.block_until_sync(); - assert_eq!(net.peer(0).client().info().best_number, 20, - "Peer #{} failed to sync", 0); + assert_eq!(net.peer(0).client().info().best_number, 20, "Peer #{} failed to sync", 0); let net = Arc::new(Mutex::new(net)); @@ -1113,12 +1127,13 @@ fn voter_persists_its_votes() { // we push 20 more blocks to alice's chain net.lock().peer(0).push_blocks(20, false); - let interval = futures::stream::unfold(Delay::new(Duration::from_millis(200)), |delay| - Box::pin(async move { - delay.await; - Some(((), Delay::new(Duration::from_millis(200)))) - }) - ); + let interval = + futures::stream::unfold(Delay::new(Duration::from_millis(200)), |delay| { + Box::pin(async move { + delay.await; + Some(((), Delay::new(Duration::from_millis(200)))) + }) + }); interval .take_while(move |_| { @@ -1135,17 +1150,19 @@ fn voter_persists_its_votes() { runtime_handle.spawn(alice_voter2(peers, net.clone())); // and we push our own prevote for block 30 - let prevote = finality_grandpa::Prevote { - target_number: 30, - target_hash: block_30_hash, - }; + let prevote = + finality_grandpa::Prevote { target_number: 30, target_hash: block_30_hash }; // One should either be calling `Sink::send` or `Sink::start_send` followed // by `Sink::poll_complete` to make sure items are being flushed. Given that // we send in a loop including a delay until items are received, this can be // ignored for the sake of reduced complexity. - Pin::new(&mut *round_tx.lock()).start_send(finality_grandpa::Message::Prevote(prevote)).unwrap(); - } else if state.compare_exchange(1, 2, Ordering::SeqCst, Ordering::SeqCst).unwrap() == 1 { + Pin::new(&mut *round_tx.lock()) + .start_send(finality_grandpa::Message::Prevote(prevote)) + .unwrap(); + } else if state.compare_exchange(1, 2, Ordering::SeqCst, Ordering::SeqCst).unwrap() == + 1 + { // the next message we receive should be our own prevote let prevote = match signed.message { finality_grandpa::Message::Prevote(prevote) => prevote, @@ -1155,11 +1172,12 @@ fn voter_persists_its_votes() { // targeting block 30 assert!(prevote.target_number == 30); - // after alice restarts it should send its previous prevote - // therefore we won't ever receive it again since it will be a - // known message on the gossip layer - - } else if state.compare_exchange(2, 3, Ordering::SeqCst, Ordering::SeqCst).unwrap() == 2 { + // after alice restarts it should send its previous prevote + // therefore we won't ever receive it again since it will be a + // known message on the gossip layer + } else if state.compare_exchange(2, 3, Ordering::SeqCst, Ordering::SeqCst).unwrap() == + 2 + { // we then receive a precommit from alice for block 15 // even though we casted a prevote for block 30 let precommit = match signed.message { @@ -1202,13 +1220,13 @@ fn finalize_3_voters_1_light_observer() { }, net.peers[3].data.lock().take().expect("link initialized at startup; qed"), net.peers[3].network_service().clone(), - ).unwrap(); + ) + .unwrap(); net.peer(0).push_blocks(20, false); net.block_until_sync(); for i in 0..4 { - assert_eq!(net.peer(i).client().info().best_number, 20, - "Peer #{} failed to sync", i); + assert_eq!(net.peer(i).client().info().best_number, 20, "Peer #{} failed to sync", i); } let net = Arc::new(Mutex::new(net)); @@ -1231,7 +1249,11 @@ fn voter_catches_up_to_latest_round_when_behind() { let net = Arc::new(Mutex::new(net)); let mut finality_notifications = Vec::new(); - let voter = |keystore, peer_id, link, net: Arc>| -> Pin + Send>> { + let voter = |keystore, + peer_id, + link, + net: Arc>| + -> Pin + Send>> { let grandpa_params = GrandpaParams { config: Config { gossip_duration: TEST_GOSSIP_DURATION, @@ -1259,17 +1281,16 @@ fn voter_catches_up_to_latest_round_when_behind() { for (peer_id, key) in peers.iter().enumerate() { let (client, link) = { let net = net.lock(); - let link = net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); - ( - net.peers[peer_id].client().clone(), - link, - ) + let link = + net.peers[peer_id].data.lock().take().expect("link initialized at startup; qed"); + (net.peers[peer_id].client().clone(), link) }; finality_notifications.push( - client.finality_notification_stream() + client + .finality_notification_stream() .take_while(|n| future::ready(n.header.number() < &50)) - .for_each(move |_| future::ready(())) + .for_each(move |_| future::ready(())), ); let (keystore, keystore_path) = create_keystore(*key); @@ -1324,11 +1345,10 @@ fn voter_catches_up_to_latest_round_when_behind() { }; let drive_to_completion = futures::future::poll_fn(|cx| { - net.lock().poll(cx); Poll::<()>::Pending + net.lock().poll(cx); + Poll::<()>::Pending }); - runtime.block_on( - future::select(test, drive_to_completion) - ); + runtime.block_on(future::select(test, drive_to_completion)); } type TestEnvironment = Environment< @@ -1350,11 +1370,7 @@ where N: NetworkT, VR: VotingRule, { - let PersistentData { - ref authority_set, - ref set_state, - .. - } = link.persistent_data; + let PersistentData { ref authority_set, ref set_state, .. } = link.persistent_data; let config = Config { gossip_duration: TEST_GOSSIP_DURATION, @@ -1366,13 +1382,8 @@ where telemetry: None, }; - let network = NetworkBridge::new( - network_service.clone(), - config.clone(), - set_state.clone(), - None, - None, - ); + let network = + NetworkBridge::new(network_service.clone(), config.clone(), set_state.clone(), None, None); Environment { authority_set: authority_set.clone(), @@ -1428,25 +1439,28 @@ fn grandpa_environment_respects_voting_rules() { // the unrestricted environment should just return the best block assert_eq!( - block_on(unrestricted_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(unrestricted_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 21, ); // both the other environments should return block 16, which is 3/4 of the // way in the unfinalized chain assert_eq!( - block_on(three_quarters_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(three_quarters_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 16, ); assert_eq!( - block_on(default_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(default_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 16, ); @@ -1455,18 +1469,20 @@ fn grandpa_environment_respects_voting_rules() { // the 3/4 environment should propose block 21 for voting assert_eq!( - block_on(three_quarters_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(three_quarters_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 21, ); // while the default environment will always still make sure we don't vote // on the best block (2 behind) assert_eq!( - block_on(default_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(default_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 19, ); @@ -1477,9 +1493,10 @@ fn grandpa_environment_respects_voting_rules() { // best block, there's a hard rule that we can't cast any votes lower than // the given base (#21). assert_eq!( - block_on(default_env.best_chain_containing( - peer.client().info().finalized_hash - )).unwrap().unwrap().1, + block_on(default_env.best_chain_containing(peer.client().info().finalized_hash)) + .unwrap() + .unwrap() + .1, 21, ); } @@ -1518,9 +1535,7 @@ fn grandpa_environment_never_overwrites_round_voter_state() { assert_eq!(get_current_round(2), None); // after completing round 1 we should start tracking round 2 - environment - .completed(1, round_state(), base(), &historical_votes()) - .unwrap(); + environment.completed(1, round_state(), base(), &historical_votes()).unwrap(); assert_eq!(get_current_round(2).unwrap(), HasVoted::No); @@ -1530,10 +1545,8 @@ fn grandpa_environment_never_overwrites_round_voter_state() { let info = peer.client().info(); - let prevote = finality_grandpa::Prevote { - target_hash: info.best_hash, - target_number: info.best_number, - }; + let prevote = + finality_grandpa::Prevote { target_hash: info.best_hash, target_number: info.best_number }; // we prevote for round 2 which should lead to us updating the voter state environment.prevoted(2, prevote.clone()).unwrap(); @@ -1545,9 +1558,7 @@ fn grandpa_environment_never_overwrites_round_voter_state() { // if we report round 1 as completed again we should not overwrite the // voter state for round 2 - environment - .completed(1, round_state(), base(), &historical_votes()) - .unwrap(); + environment.completed(1, round_state(), base(), &historical_votes()).unwrap(); assert_matches!(get_current_round(2).unwrap(), HasVoted::Yes(_, _)); } @@ -1566,7 +1577,9 @@ fn imports_justification_for_regular_blocks_on_import() { let (mut block_import, ..) = net.make_block_import(client.clone()); let full_client = client.as_full().expect("only full clients are used in test"); - let builder = full_client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); + let builder = full_client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap(); let block = builder.build().unwrap().block; let block_hash = block.hash(); @@ -1597,11 +1610,7 @@ fn imports_justification_for_regular_blocks_on_import() { precommits: vec![precommit], }; - GrandpaJustification::from_commit( - &full_client, - round, - commit, - ).unwrap() + GrandpaJustification::from_commit(&full_client, round, commit).unwrap() }; // we import the block with justification attached @@ -1622,9 +1631,7 @@ fn imports_justification_for_regular_blocks_on_import() { ); // the justification should be imported and available from the client - assert!( - client.justifications(&BlockId::Hash(block_hash)).unwrap().is_some(), - ); + assert!(client.justifications(&BlockId::Hash(block_hash)).unwrap().is_some(),); } #[test] @@ -1644,10 +1651,7 @@ fn grandpa_environment_doesnt_send_equivocation_reports_for_itself() { }; let signed_prevote = { - let prevote = finality_grandpa::Prevote { - target_hash: H256::random(), - target_number: 1, - }; + let prevote = finality_grandpa::Prevote { target_hash: H256::random(), target_number: 1 }; let signed = alice.sign(&[]).into(); (prevote, signed) @@ -1667,10 +1671,7 @@ fn grandpa_environment_doesnt_send_equivocation_reports_for_itself() { // reporting the equivocation should fail since the offender is a local // authority (i.e. we have keys in our keystore for the given id) let equivocation_proof = sp_finality_grandpa::Equivocation::Prevote(equivocation.clone()); - assert!(matches!( - environment.report_equivocation(equivocation_proof), - Err(Error::Safety(_)) - )); + assert!(matches!(environment.report_equivocation(equivocation_proof), Err(Error::Safety(_)))); // if we set the equivocation offender to another id for which we don't have // keys it should work diff --git a/client/finality-grandpa/src/until_imported.rs b/client/finality-grandpa/src/until_imported.rs index 7cfd9e6074c47..d6e045bffc4f0 100644 --- a/client/finality-grandpa/src/until_imported.rs +++ b/client/finality-grandpa/src/until_imported.rs @@ -23,32 +23,31 @@ //! This is used for votes and commit messages currently. use super::{ - BlockStatus as BlockStatusT, - BlockSyncRequester as BlockSyncRequesterT, - CommunicationIn, - Error, + BlockStatus as BlockStatusT, BlockSyncRequester as BlockSyncRequesterT, CommunicationIn, Error, SignedMessage, }; -use log::{debug, warn}; -use sp_utils::mpsc::TracingUnboundedReceiver; -use futures::prelude::*; -use futures::stream::{Fuse, StreamExt}; -use futures_timer::Delay; use finality_grandpa::voter; -use parking_lot::Mutex; -use prometheus_endpoint::{ - Gauge, U64, PrometheusError, register, Registry, +use futures::{ + prelude::*, + stream::{Fuse, StreamExt}, }; +use futures_timer::Delay; +use log::{debug, warn}; +use parking_lot::Mutex; +use prometheus_endpoint::{register, Gauge, PrometheusError, Registry, U64}; use sc_client_api::{BlockImportNotification, ImportNotifications}; use sp_finality_grandpa::AuthorityId; use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; +use sp_utils::mpsc::TracingUnboundedReceiver; -use std::collections::{HashMap, VecDeque}; -use std::pin::Pin; -use std::sync::Arc; -use std::task::{Context, Poll}; -use std::time::Duration; +use std::{ + collections::{HashMap, VecDeque}, + pin::Pin, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::Instant; const LOG_PENDING_INTERVAL: Duration = Duration::from_secs(15); @@ -101,10 +100,13 @@ pub(crate) struct Metrics { impl Metrics { pub(crate) fn register(registry: &Registry) -> Result { Ok(Self { - global_waiting_messages: register(Gauge::new( - "finality_grandpa_until_imported_waiting_messages_number", - "Number of finality grandpa messages waiting within the until imported queue.", - )?, registry)?, + global_waiting_messages: register( + Gauge::new( + "finality_grandpa_until_imported_waiting_messages_number", + "Number of finality grandpa messages waiting within the until imported queue.", + )?, + registry, + )?, local_waiting_messages: 0, }) } @@ -120,7 +122,6 @@ impl Metrics { } } - impl Clone for Metrics { fn clone(&self) -> Self { Metrics { @@ -136,8 +137,7 @@ impl Drop for Metrics { fn drop(&mut self) { // Reduce the global counter by the amount of messages that were still left in the dropped // queue. - self.global_waiting_messages - .sub(self.local_waiting_messages) + self.global_waiting_messages.sub(self.local_waiting_messages) } } @@ -200,11 +200,12 @@ where // used in the event of missed import notifications const CHECK_PENDING_INTERVAL: Duration = Duration::from_secs(5); - let check_pending = futures::stream::unfold(Delay::new(CHECK_PENDING_INTERVAL), |delay| + let check_pending = futures::stream::unfold(Delay::new(CHECK_PENDING_INTERVAL), |delay| { Box::pin(async move { delay.await; Some((Ok(()), Delay::new(CHECK_PENDING_INTERVAL))) - })); + }) + }); UntilImported { import_notifications: import_notifications.fuse(), @@ -220,7 +221,9 @@ where } } -impl Stream for UntilImported where +impl Stream + for UntilImported +where Block: BlockT, BStatus: BlockStatusT, BSyncRequester: BlockSyncRequesterT, @@ -257,7 +260,7 @@ impl Stream for UntilImported break, } } @@ -269,12 +272,12 @@ impl Stream for UntilImported break, } } @@ -286,7 +289,9 @@ impl Stream for UntilImported Stream for UntilImported BlockUntilImported for SignedMessage { if let Some(number) = status_check.block_number(target_hash)? { if number != target_number { warn_authority_wrong_target(target_hash, msg.id); - return Ok(DiscardWaitOrReady::Discard); + return Ok(DiscardWaitOrReady::Discard) } else { - return Ok(DiscardWaitOrReady::Ready(msg)); + return Ok(DiscardWaitOrReady::Ready(msg)) } } @@ -386,13 +391,8 @@ impl BlockUntilImported for SignedMessage { /// Helper type definition for the stream which waits until vote targets for /// signed messages are imported. -pub(crate) type UntilVoteTargetImported = UntilImported< - Block, - BlockStatus, - BlockSyncRequester, - I, - SignedMessage, ->; +pub(crate) type UntilVoteTargetImported = + UntilImported>; /// This blocks a global message import, i.e. a commit or catch up messages, /// until all blocks referenced in its votes are known. @@ -445,19 +445,18 @@ impl BlockUntilImported for BlockGlobalMessage { if let Some(number) = status_check.block_number(target_hash)? { entry.insert(KnownOrUnknown::Known(number)); number - } else { entry.insert(KnownOrUnknown::Unknown(perceived_number)); perceived_number } - } + }, }; if canon_number != perceived_number { // invalid global message: messages targeting wrong number // or at least different from other vote in same global // message. - return Ok(false); + return Ok(false) } Ok(true) @@ -466,23 +465,24 @@ impl BlockUntilImported for BlockGlobalMessage { match input { voter::CommunicationIn::Commit(_, ref commit, ..) => { // add known hashes from all precommits. - let precommit_targets = commit.precommits - .iter() - .map(|c| (c.target_number, c.target_hash)); + let precommit_targets = + commit.precommits.iter().map(|c| (c.target_number, c.target_hash)); for (target_number, target_hash) in precommit_targets { if !query_known(target_hash, target_number)? { - return Ok(DiscardWaitOrReady::Discard); + return Ok(DiscardWaitOrReady::Discard) } } }, voter::CommunicationIn::CatchUp(ref catch_up, ..) => { // add known hashes from all prevotes and precommits. - let prevote_targets = catch_up.prevotes + let prevote_targets = catch_up + .prevotes .iter() .map(|s| (s.prevote.target_number, s.prevote.target_hash)); - let precommit_targets = catch_up.precommits + let precommit_targets = catch_up + .precommits .iter() .map(|s| (s.precommit.target_number, s.precommit.target_hash)); @@ -490,29 +490,39 @@ impl BlockUntilImported for BlockGlobalMessage { for (target_number, target_hash) in targets { if !query_known(target_hash, target_number)? { - return Ok(DiscardWaitOrReady::Discard); + return Ok(DiscardWaitOrReady::Discard) } } }, }; } - let unknown_hashes = checked_hashes.into_iter().filter_map(|(hash, num)| match num { - KnownOrUnknown::Unknown(number) => Some((hash, number)), - KnownOrUnknown::Known(_) => None, - }).collect::>(); + let unknown_hashes = checked_hashes + .into_iter() + .filter_map(|(hash, num)| match num { + KnownOrUnknown::Unknown(number) => Some((hash, number)), + KnownOrUnknown::Known(_) => None, + }) + .collect::>(); if unknown_hashes.is_empty() { // none of the hashes in the global message were unknown. // we can just return the message directly. - return Ok(DiscardWaitOrReady::Ready(input)); + return Ok(DiscardWaitOrReady::Ready(input)) } let locked_global = Arc::new(Mutex::new(Some(input))); - let items_to_await = unknown_hashes.into_iter().map(|(hash, target_number)| { - (hash, target_number, BlockGlobalMessage { inner: locked_global.clone(), target_number }) - }).collect(); + let items_to_await = unknown_hashes + .into_iter() + .map(|(hash, target_number)| { + ( + hash, + target_number, + BlockGlobalMessage { inner: locked_global.clone(), target_number }, + ) + }) + .collect(); // schedule waits for all unknown messages. // when the last one of these has `wait_completed` called on it, @@ -525,7 +535,7 @@ impl BlockUntilImported for BlockGlobalMessage { // Delete the inner message so it won't ever be forwarded. Future calls to // `wait_completed` on the same `inner` will ignore it. *self.inner.lock() = None; - return None; + return None } match Arc::try_unwrap(self.inner) { @@ -542,25 +552,20 @@ impl BlockUntilImported for BlockGlobalMessage { /// A stream which gates off incoming global messages, i.e. commit and catch up /// messages, until all referenced block hashes have been imported. -pub(crate) type UntilGlobalMessageBlocksImported = UntilImported< - Block, - BlockStatus, - BlockSyncRequester, - I, - BlockGlobalMessage, ->; +pub(crate) type UntilGlobalMessageBlocksImported = + UntilImported>; #[cfg(test)] mod tests { use super::*; use crate::{CatchUp, CompactCommit}; - use substrate_test_runtime_client::runtime::{Block, Hash, Header}; - use sp_consensus::BlockOrigin; - use sc_client_api::BlockImportNotification; + use finality_grandpa::Precommit; use futures::future::Either; use futures_timer::Delay; + use sc_client_api::BlockImportNotification; + use sp_consensus::BlockOrigin; use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender}; - use finality_grandpa::Precommit; + use substrate_test_runtime_client::runtime::{Block, Hash, Header}; #[derive(Clone)] struct TestChainState { @@ -571,10 +576,8 @@ mod tests { impl TestChainState { fn new() -> (Self, ImportNotifications) { let (tx, rx) = tracing_unbounded("test"); - let state = TestChainState { - sender: tx, - known_blocks: Arc::new(Mutex::new(HashMap::new())), - }; + let state = + TestChainState { sender: tx, known_blocks: Arc::new(Mutex::new(HashMap::new())) }; (state, rx) } @@ -588,13 +591,15 @@ mod tests { let number = header.number().clone(); self.known_blocks.lock().insert(hash, number); - self.sender.unbounded_send(BlockImportNotification { - hash, - origin: BlockOrigin::File, - header, - is_new_best: false, - tree_route: None, - }).unwrap(); + self.sender + .unbounded_send(BlockImportNotification { + hash, + origin: BlockOrigin::File, + header, + is_new_best: false, + tree_route: None, + }) + .unwrap(); } } @@ -615,14 +620,17 @@ mod tests { impl Default for TestBlockSyncRequester { fn default() -> Self { - TestBlockSyncRequester { - requests: Arc::new(Mutex::new(Vec::new())), - } + TestBlockSyncRequester { requests: Arc::new(Mutex::new(Vec::new())) } } } impl BlockSyncRequesterT for TestBlockSyncRequester { - fn set_sync_fork_request(&self, _peers: Vec, hash: Hash, number: NumberFor) { + fn set_sync_fork_request( + &self, + _peers: Vec, + hash: Hash, + number: NumberFor, + ) { self.requests.lock().push((hash, number)); } } @@ -639,7 +647,7 @@ mod tests { // unwrap the commit from `CommunicationIn` returning its fields in a tuple, // panics if the given message isn't a commit - fn unapply_commit(msg: CommunicationIn) -> (u64, CompactCommit::) { + fn unapply_commit(msg: CommunicationIn) -> (u64, CompactCommit) { match msg { voter::CommunicationIn::Commit(round, commit, ..) => (round, commit), _ => panic!("expected commit"), @@ -658,7 +666,8 @@ mod tests { fn message_all_dependencies_satisfied( msg: CommunicationIn, enact_dependencies: F, - ) -> CommunicationIn where + ) -> CommunicationIn + where F: FnOnce(&TestChainState), { let (chain_state, import_notifications) = TestChainState::new(); @@ -688,7 +697,8 @@ mod tests { fn blocking_message_on_dependencies( msg: CommunicationIn, enact_dependencies: F, - ) -> CommunicationIn where + ) -> CommunicationIn + where F: FnOnce(&TestChainState), { let (chain_state, import_notifications) = TestChainState::new(); @@ -710,16 +720,17 @@ mod tests { // NOTE: needs to be cloned otherwise it is moved to the stream and // dropped too early. let inner_chain_state = chain_state.clone(); - let work = future::select(until_imported.into_future(), Delay::new(Duration::from_millis(100))) - .then(move |res| match res { - Either::Left(_) => panic!("timeout should have fired first"), - Either::Right((_, until_imported)) => { - // timeout fired. push in the headers. - enact_dependencies(&inner_chain_state); - - until_imported - } - }); + let work = + future::select(until_imported.into_future(), Delay::new(Duration::from_millis(100))) + .then(move |res| match res { + Either::Left(_) => panic!("timeout should have fired first"), + Either::Right((_, until_imported)) => { + // timeout fired. push in the headers. + enact_dependencies(&inner_chain_state); + + until_imported + }, + }); futures::executor::block_on(work).0.unwrap().unwrap() } @@ -734,37 +745,22 @@ mod tests { target_hash: h1.hash(), target_number: 5, precommits: vec![ - Precommit { - target_hash: h2.hash(), - target_number: 6, - }, - Precommit { - target_hash: h3.hash(), - target_number: 7, - }, + Precommit { target_hash: h2.hash(), target_number: 6 }, + Precommit { target_hash: h3.hash(), target_number: 7 }, ], auth_data: Vec::new(), // not used }; - let unknown_commit = || voter::CommunicationIn::Commit( - 0, - unknown_commit.clone(), - voter::Callback::Blank, - ); + let unknown_commit = + || voter::CommunicationIn::Commit(0, unknown_commit.clone(), voter::Callback::Blank); - let res = blocking_message_on_dependencies( - unknown_commit(), - |chain_state| { - chain_state.import_header(h1); - chain_state.import_header(h2); - chain_state.import_header(h3); - }, - ); + let res = blocking_message_on_dependencies(unknown_commit(), |chain_state| { + chain_state.import_header(h1); + chain_state.import_header(h2); + chain_state.import_header(h3); + }); - assert_eq!( - unapply_commit(res), - unapply_commit(unknown_commit()), - ); + assert_eq!(unapply_commit(res), unapply_commit(unknown_commit()),); } #[test] @@ -777,37 +773,22 @@ mod tests { target_hash: h1.hash(), target_number: 5, precommits: vec![ - Precommit { - target_hash: h2.hash(), - target_number: 6, - }, - Precommit { - target_hash: h3.hash(), - target_number: 7, - }, + Precommit { target_hash: h2.hash(), target_number: 6 }, + Precommit { target_hash: h3.hash(), target_number: 7 }, ], auth_data: Vec::new(), // not used }; - let known_commit = || voter::CommunicationIn::Commit( - 0, - known_commit.clone(), - voter::Callback::Blank, - ); + let known_commit = + || voter::CommunicationIn::Commit(0, known_commit.clone(), voter::Callback::Blank); - let res = message_all_dependencies_satisfied( - known_commit(), - |chain_state| { - chain_state.import_header(h1); - chain_state.import_header(h2); - chain_state.import_header(h3); - }, - ); + let res = message_all_dependencies_satisfied(known_commit(), |chain_state| { + chain_state.import_header(h1); + chain_state.import_header(h2); + chain_state.import_header(h3); + }); - assert_eq!( - unapply_commit(res), - unapply_commit(known_commit()), - ); + assert_eq!(unapply_commit(res), unapply_commit(known_commit()),); } #[test] @@ -816,37 +797,27 @@ mod tests { let h2 = make_header(6); let h3 = make_header(7); - let signed_prevote = |header: &Header| { - finality_grandpa::SignedPrevote { - id: Default::default(), - signature: Default::default(), - prevote: finality_grandpa::Prevote { - target_hash: header.hash(), - target_number: *header.number(), - }, - } + let signed_prevote = |header: &Header| finality_grandpa::SignedPrevote { + id: Default::default(), + signature: Default::default(), + prevote: finality_grandpa::Prevote { + target_hash: header.hash(), + target_number: *header.number(), + }, }; - let signed_precommit = |header: &Header| { - finality_grandpa::SignedPrecommit { - id: Default::default(), - signature: Default::default(), - precommit: finality_grandpa::Precommit { - target_hash: header.hash(), - target_number: *header.number(), - }, - } + let signed_precommit = |header: &Header| finality_grandpa::SignedPrecommit { + id: Default::default(), + signature: Default::default(), + precommit: finality_grandpa::Precommit { + target_hash: header.hash(), + target_number: *header.number(), + }, }; - let prevotes = vec![ - signed_prevote(&h1), - signed_prevote(&h3), - ]; + let prevotes = vec![signed_prevote(&h1), signed_prevote(&h3)]; - let precommits = vec![ - signed_precommit(&h1), - signed_precommit(&h2), - ]; + let precommits = vec![signed_precommit(&h1), signed_precommit(&h2)]; let unknown_catch_up = finality_grandpa::CatchUp { round_number: 1, @@ -856,24 +827,16 @@ mod tests { base_number: *h1.number(), }; - let unknown_catch_up = || voter::CommunicationIn::CatchUp( - unknown_catch_up.clone(), - voter::Callback::Blank, - ); + let unknown_catch_up = + || voter::CommunicationIn::CatchUp(unknown_catch_up.clone(), voter::Callback::Blank); - let res = blocking_message_on_dependencies( - unknown_catch_up(), - |chain_state| { - chain_state.import_header(h1); - chain_state.import_header(h2); - chain_state.import_header(h3); - }, - ); + let res = blocking_message_on_dependencies(unknown_catch_up(), |chain_state| { + chain_state.import_header(h1); + chain_state.import_header(h2); + chain_state.import_header(h3); + }); - assert_eq!( - unapply_catch_up(res), - unapply_catch_up(unknown_catch_up()), - ); + assert_eq!(unapply_catch_up(res), unapply_catch_up(unknown_catch_up()),); } #[test] @@ -882,37 +845,27 @@ mod tests { let h2 = make_header(6); let h3 = make_header(7); - let signed_prevote = |header: &Header| { - finality_grandpa::SignedPrevote { - id: Default::default(), - signature: Default::default(), - prevote: finality_grandpa::Prevote { - target_hash: header.hash(), - target_number: *header.number(), - }, - } + let signed_prevote = |header: &Header| finality_grandpa::SignedPrevote { + id: Default::default(), + signature: Default::default(), + prevote: finality_grandpa::Prevote { + target_hash: header.hash(), + target_number: *header.number(), + }, }; - let signed_precommit = |header: &Header| { - finality_grandpa::SignedPrecommit { - id: Default::default(), - signature: Default::default(), - precommit: finality_grandpa::Precommit { - target_hash: header.hash(), - target_number: *header.number(), - }, - } + let signed_precommit = |header: &Header| finality_grandpa::SignedPrecommit { + id: Default::default(), + signature: Default::default(), + precommit: finality_grandpa::Precommit { + target_hash: header.hash(), + target_number: *header.number(), + }, }; - let prevotes = vec![ - signed_prevote(&h1), - signed_prevote(&h3), - ]; + let prevotes = vec![signed_prevote(&h1), signed_prevote(&h3)]; - let precommits = vec![ - signed_precommit(&h1), - signed_precommit(&h2), - ]; + let precommits = vec![signed_precommit(&h1), signed_precommit(&h2)]; let unknown_catch_up = finality_grandpa::CatchUp { round_number: 1, @@ -922,24 +875,16 @@ mod tests { base_number: *h1.number(), }; - let unknown_catch_up = || voter::CommunicationIn::CatchUp( - unknown_catch_up.clone(), - voter::Callback::Blank, - ); + let unknown_catch_up = + || voter::CommunicationIn::CatchUp(unknown_catch_up.clone(), voter::Callback::Blank); - let res = message_all_dependencies_satisfied( - unknown_catch_up(), - |chain_state| { - chain_state.import_header(h1); - chain_state.import_header(h2); - chain_state.import_header(h3); - }, - ); + let res = message_all_dependencies_satisfied(unknown_catch_up(), |chain_state| { + chain_state.import_header(h1); + chain_state.import_header(h2); + chain_state.import_header(h3); + }); - assert_eq!( - unapply_catch_up(res), - unapply_catch_up(unknown_catch_up()), - ); + assert_eq!(unapply_catch_up(res), unapply_catch_up(unknown_catch_up()),); } #[test] @@ -970,23 +915,14 @@ mod tests { target_hash: h1.hash(), target_number: 5, precommits: vec![ - Precommit { - target_hash: h2.hash(), - target_number: 6, - }, - Precommit { - target_hash: h3.hash(), - target_number: 7, - }, + Precommit { target_hash: h2.hash(), target_number: 6 }, + Precommit { target_hash: h3.hash(), target_number: 7 }, ], auth_data: Vec::new(), // not used }; - let unknown_commit = || voter::CommunicationIn::Commit( - 0, - unknown_commit.clone(), - voter::Callback::Blank, - ); + let unknown_commit = + || voter::CommunicationIn::Commit(0, unknown_commit.clone(), voter::Callback::Blank); // we send the commit message and spawn the until_imported stream global_tx.unbounded_send(unknown_commit()).unwrap(); @@ -1002,7 +938,7 @@ mod tests { if block_sync_requests.contains(&(h2.hash(), *h2.number())) && block_sync_requests.contains(&(h3.hash(), *h3.number())) { - return Poll::Ready(()); + return Poll::Ready(()) } // NOTE: nothing in this function is future-aware (i.e nothing gets registered to wake @@ -1016,10 +952,12 @@ mod tests { // the `until_imported` stream doesn't request the blocks immediately, // but it should request them after a small timeout let timeout = Delay::new(Duration::from_secs(60)); - let test = future::select(assert, timeout).map(|res| match res { - Either::Left(_) => {}, - Either::Right(_) => panic!("timed out waiting for block sync request"), - }).map(drop); + let test = future::select(assert, timeout) + .map(|res| match res { + Either::Left(_) => {}, + Either::Right(_) => panic!("timed out waiting for block sync request"), + }) + .map(drop); futures::executor::block_on(test); } @@ -1035,10 +973,8 @@ mod tests { base_number: *header.number(), }; - let catch_up = voter::CommunicationIn::CatchUp( - unknown_catch_up.clone(), - voter::Callback::Blank, - ); + let catch_up = + voter::CommunicationIn::CatchUp(unknown_catch_up.clone(), voter::Callback::Blank); Arc::new(Mutex::new(Some(catch_up))) } @@ -1047,15 +983,10 @@ mod tests { fn block_global_message_wait_completed_return_when_all_awaited() { let msg_inner = test_catch_up(); - let waiting_block_1 = BlockGlobalMessage:: { - inner: msg_inner.clone(), - target_number: 1, - }; + let waiting_block_1 = + BlockGlobalMessage:: { inner: msg_inner.clone(), target_number: 1 }; - let waiting_block_2 = BlockGlobalMessage:: { - inner: msg_inner, - target_number: 2, - }; + let waiting_block_2 = BlockGlobalMessage:: { inner: msg_inner, target_number: 2 }; // waiting_block_2 is still waiting for block 2, thus this should return `None`. assert!(waiting_block_1.wait_completed(1).is_none()); @@ -1069,15 +1000,10 @@ mod tests { fn block_global_message_wait_completed_return_none_on_block_number_missmatch() { let msg_inner = test_catch_up(); - let waiting_block_1 = BlockGlobalMessage:: { - inner: msg_inner.clone(), - target_number: 1, - }; + let waiting_block_1 = + BlockGlobalMessage:: { inner: msg_inner.clone(), target_number: 1 }; - let waiting_block_2 = BlockGlobalMessage:: { - inner: msg_inner, - target_number: 2, - }; + let waiting_block_2 = BlockGlobalMessage:: { inner: msg_inner, target_number: 2 }; // Calling wait_completed with wrong block number should yield None. assert!(waiting_block_1.wait_completed(1234).is_none()); diff --git a/client/finality-grandpa/src/voting_rule.rs b/client/finality-grandpa/src/voting_rule.rs index a5515c1be23ed..b974afe0d352e 100644 --- a/client/finality-grandpa/src/voting_rule.rs +++ b/client/finality-grandpa/src/voting_rule.rs @@ -22,15 +22,15 @@ //! restrictions that are taken into account by the GRANDPA environment when //! selecting a finality target to vote on. -use std::future::Future; -use std::sync::Arc; -use std::pin::Pin; +use std::{future::Future, pin::Pin, sync::Arc}; use dyn_clone::DynClone; use sc_client_api::blockchain::HeaderBackend; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header, NumberFor, One, Zero}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header, NumberFor, One, Zero}, +}; /// A future returned by a `VotingRule` to restrict a given vote, if any restriction is necessary. pub type VotingRuleResult = @@ -63,7 +63,8 @@ where ) -> VotingRuleResult; } -impl VotingRule for () where +impl VotingRule for () +where Block: BlockT, B: HeaderBackend, { @@ -83,7 +84,8 @@ impl VotingRule for () where /// behind the best block. #[derive(Clone)] pub struct BeforeBestBlockBy(N); -impl VotingRule for BeforeBestBlockBy> where +impl VotingRule for BeforeBestBlockBy> +where Block: BlockT, B: HeaderBackend, { @@ -97,7 +99,7 @@ impl VotingRule for BeforeBestBlockBy> wher use sp_arithmetic::traits::Saturating; if current_target.number().is_zero() { - return Box::pin(async { None }); + return Box::pin(async { None }) } // find the target number restricted by this rule @@ -105,17 +107,13 @@ impl VotingRule for BeforeBestBlockBy> wher // our current target is already lower than this rule would restrict if target_number >= *current_target.number() { - return Box::pin(async { None }); + return Box::pin(async { None }) } let current_target = current_target.clone(); // find the block at the given target height - Box::pin(std::future::ready(find_target( - &*backend, - target_number.clone(), - ¤t_target, - ))) + Box::pin(std::future::ready(find_target(&*backend, target_number.clone(), ¤t_target))) } } @@ -125,7 +123,8 @@ impl VotingRule for BeforeBestBlockBy> wher #[derive(Clone)] pub struct ThreeQuartersOfTheUnfinalizedChain; -impl VotingRule for ThreeQuartersOfTheUnfinalizedChain where +impl VotingRule for ThreeQuartersOfTheUnfinalizedChain +where Block: BlockT, B: HeaderBackend, { @@ -150,15 +149,11 @@ impl VotingRule for ThreeQuartersOfTheUnfinalizedChain where // our current target is already lower than this rule would restrict if target_number >= *current_target.number() { - return Box::pin(async { None }); + return Box::pin(async { None }) } // find the block at the given target height - Box::pin(std::future::ready(find_target( - &*backend, - target_number, - current_target, - ))) + Box::pin(std::future::ready(find_target(&*backend, target_number, current_target))) } } @@ -167,7 +162,8 @@ fn find_target( backend: &B, target_number: NumberFor, current_header: &Block::Header, -) -> Option<(Block::Hash, NumberFor)> where +) -> Option<(Block::Hash, NumberFor)> +where Block: BlockT, B: HeaderBackend, { @@ -184,11 +180,13 @@ fn find_target( } if *target_header.number() == target_number { - return Some((target_hash, target_number)); + return Some((target_hash, target_number)) } target_hash = *target_header.parent_hash(); - target_header = backend.header(BlockId::Hash(target_hash)).ok()? + target_header = backend + .header(BlockId::Hash(target_hash)) + .ok()? .expect("Header known to exist due to the existence of one of its descendents; qed"); } } @@ -199,13 +197,12 @@ struct VotingRules { impl Clone for VotingRules { fn clone(&self) -> Self { - VotingRules { - rules: self.rules.clone(), - } + VotingRules { rules: self.rules.clone() } } } -impl VotingRule for VotingRules where +impl VotingRule for VotingRules +where Block: BlockT, B: HeaderBackend + 'static, { @@ -230,8 +227,8 @@ impl VotingRule for VotingRules where .await .filter(|(_, restricted_number)| { // NOTE: we can only restrict votes within the interval [base, target) - restricted_number >= base.number() - && restricted_number < restricted_target.number() + restricted_number >= base.number() && + restricted_number < restricted_target.number() }) .and_then(|(hash, _)| backend.header(BlockId::Hash(hash)).ok()) .and_then(std::convert::identity) @@ -257,7 +254,8 @@ pub struct VotingRulesBuilder { rules: Vec>>, } -impl Default for VotingRulesBuilder where +impl Default for VotingRulesBuilder +where Block: BlockT, B: HeaderBackend + 'static, { @@ -268,19 +266,19 @@ impl Default for VotingRulesBuilder where } } -impl VotingRulesBuilder where +impl VotingRulesBuilder +where Block: BlockT, B: HeaderBackend + 'static, { /// Return a new voting rule builder using the given backend. pub fn new() -> Self { - VotingRulesBuilder { - rules: Vec::new(), - } + VotingRulesBuilder { rules: Vec::new() } } /// Add a new voting rule to the builder. - pub fn add(mut self, rule: R) -> Self where + pub fn add(mut self, rule: R) -> Self + where R: VotingRule + 'static, { self.rules.push(Box::new(rule)); @@ -288,8 +286,9 @@ impl VotingRulesBuilder where } /// Add all given voting rules to the builder. - pub fn add_all(mut self, rules: I) -> Self where - I: IntoIterator>>, + pub fn add_all(mut self, rules: I) -> Self + where + I: IntoIterator>>, { self.rules.extend(rules); self @@ -298,13 +297,12 @@ impl VotingRulesBuilder where /// Return a new `VotingRule` that applies all of the previously added /// voting rules in-order. pub fn build(self) -> impl VotingRule + Clone { - VotingRules { - rules: Arc::new(self.rules), - } + VotingRules { rules: Arc::new(self.rules) } } } -impl VotingRule for Box> where +impl VotingRule for Box> +where Block: BlockT, B: HeaderBackend, Self: Clone, @@ -358,33 +356,19 @@ mod tests { fn multiple_voting_rules_cannot_restrict_past_base() { // setup an aggregate voting rule composed of two voting rules // where each subtracts 50 blocks from the current target - let rule = VotingRulesBuilder::new() - .add(Subtract(50)) - .add(Subtract(50)) - .build(); + let rule = VotingRulesBuilder::new().add(Subtract(50)).add(Subtract(50)).build(); let mut client = Arc::new(TestClientBuilder::new().build()); for _ in 0..200 { - let block = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let block = client.new_block(Default::default()).unwrap().build().unwrap().block; futures::executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); } - let genesis = client - .header(&BlockId::Number(0u32.into())) - .unwrap() - .unwrap(); + let genesis = client.header(&BlockId::Number(0u32.into())).unwrap().unwrap(); - let best = client - .header(&BlockId::Hash(client.info().best_hash)) - .unwrap() - .unwrap(); + let best = client.header(&BlockId::Hash(client.info().best_hash)).unwrap().unwrap(); let (_, number) = futures::executor::block_on(rule.restrict_vote(client.clone(), &genesis, &best, &best)) @@ -394,10 +378,7 @@ mod tests { // which means that we should be voting for block #100 assert_eq!(number, 100); - let block110 = client - .header(&BlockId::Number(110u32.into())) - .unwrap() - .unwrap(); + let block110 = client.header(&BlockId::Number(110u32.into())).unwrap().unwrap(); let (_, number) = futures::executor::block_on(rule.restrict_vote( client.clone(), diff --git a/client/informant/src/display.rs b/client/informant/src/display.rs index 0b7f8bcfaf16b..441675d2986f3 100644 --- a/client/informant/src/display.rs +++ b/client/informant/src/display.rs @@ -84,34 +84,32 @@ impl InformantDisplay { let diff_bytes_inbound = total_bytes_inbound - self.last_total_bytes_inbound; let diff_bytes_outbound = total_bytes_outbound - self.last_total_bytes_outbound; - let (avg_bytes_per_sec_inbound, avg_bytes_per_sec_outbound) = - if elapsed > 0 { - self.last_total_bytes_inbound = total_bytes_inbound; - self.last_total_bytes_outbound = total_bytes_outbound; - (diff_bytes_inbound / elapsed, diff_bytes_outbound / elapsed) - } else { - (diff_bytes_inbound, diff_bytes_outbound) - }; - - let (level, status, target) = match ( - net_status.sync_state, - net_status.best_seen_block, - net_status.state_sync - ) { - (_, _, Some(state)) => ( - "⚙️ ", - "Downloading state".into(), - format!(", {}%, ({:.2}) Mib", state.percentage, (state.size as f32) / (1024f32 * 1024f32)), - ), - (SyncState::Idle, _, _) => ("💤", "Idle".into(), "".into()), - (SyncState::Downloading, None, _) => ("⚙️ ", format!("Preparing{}", speed), "".into()), - (SyncState::Downloading, Some(n), None) => ( - "⚙️ ", - format!("Syncing{}", speed), - format!(", target=#{}", n), - ), + let (avg_bytes_per_sec_inbound, avg_bytes_per_sec_outbound) = if elapsed > 0 { + self.last_total_bytes_inbound = total_bytes_inbound; + self.last_total_bytes_outbound = total_bytes_outbound; + (diff_bytes_inbound / elapsed, diff_bytes_outbound / elapsed) + } else { + (diff_bytes_inbound, diff_bytes_outbound) }; + let (level, status, target) = + match (net_status.sync_state, net_status.best_seen_block, net_status.state_sync) { + (_, _, Some(state)) => ( + "⚙️ ", + "Downloading state".into(), + format!( + ", {}%, ({:.2}) Mib", + state.percentage, + (state.size as f32) / (1024f32 * 1024f32) + ), + ), + (SyncState::Idle, _, _) => ("💤", "Idle".into(), "".into()), + (SyncState::Downloading, None, _) => + ("⚙️ ", format!("Preparing{}", speed), "".into()), + (SyncState::Downloading, Some(n), None) => + ("⚙️ ", format!("Syncing{}", speed), format!(", target=#{}", n)), + }; + if self.format.enable_color { info!( target: "substrate", @@ -151,7 +149,7 @@ impl InformantDisplay { fn speed( best_number: NumberFor, last_number: Option>, - last_update: Instant + last_update: Instant, ) -> String { // Number of milliseconds elapsed since last time. let elapsed_ms = { @@ -164,25 +162,28 @@ fn speed( // Number of blocks that have been imported since last time. let diff = match last_number { None => return String::new(), - Some(n) => best_number.saturating_sub(n) + Some(n) => best_number.saturating_sub(n), }; if let Ok(diff) = TryInto::::try_into(diff) { // If the number of blocks can be converted to a regular integer, then it's easy: just // do the math and turn it into a `f64`. - let speed = diff.saturating_mul(10_000).checked_div(u128::from(elapsed_ms)) - .map_or(0.0, |s| s as f64) / 10.0; + let speed = diff + .saturating_mul(10_000) + .checked_div(u128::from(elapsed_ms)) + .map_or(0.0, |s| s as f64) / + 10.0; format!(" {:4.1} bps", speed) - } else { // If the number of blocks can't be converted to a regular integer, then we need a more // algebraic approach and we stay within the realm of integers. let one_thousand = NumberFor::::from(1_000u32); - let elapsed = NumberFor::::from( - >::try_from(elapsed_ms).unwrap_or(u32::MAX) - ); + let elapsed = + NumberFor::::from(>::try_from(elapsed_ms).unwrap_or(u32::MAX)); - let speed = diff.saturating_mul(one_thousand).checked_div(&elapsed) + let speed = diff + .saturating_mul(one_thousand) + .checked_div(&elapsed) .unwrap_or_else(Zero::zero); format!(" {} bps", speed) } diff --git a/client/informant/src/lib.rs b/client/informant/src/lib.rs index a05ab368e3ed7..6a91f583cd3df 100644 --- a/client/informant/src/lib.rs +++ b/client/informant/src/lib.rs @@ -25,10 +25,10 @@ use log::{info, trace, warn}; use parity_util_mem::MallocSizeOf; use sc_client_api::{BlockchainEvents, UsageProvider}; use sc_network::NetworkService; +use sc_transaction_pool_api::TransactionPool; use sp_blockchain::HeaderMetadata; use sp_runtime::traits::{Block as BlockT, Header}; -use sc_transaction_pool_api::TransactionPool; -use std::{fmt::Display, sync::Arc, time::Duration, collections::VecDeque}; +use std::{collections::VecDeque, fmt::Display, sync::Arc, time::Duration}; mod display; @@ -48,9 +48,7 @@ pub struct OutputFormat { impl Default for OutputFormat { fn default() -> Self { - Self { - enable_color: true, - } + Self { enable_color: true } } } @@ -74,8 +72,7 @@ pub async fn build( network: Arc::Hash>>, pool: Arc, format: OutputFormat, -) -where +) where C: UsageProvider + HeaderMetadata + BlockchainEvents, >::Error: Display, { @@ -131,19 +128,19 @@ where client.import_notification_stream().for_each(move |n| { // detect and log reorganizations. if let Some((ref last_num, ref last_hash)) = last_best { - if n.header.parent_hash() != last_hash && n.is_new_best { - let maybe_ancestor = sp_blockchain::lowest_common_ancestor( - &*client, - last_hash.clone(), - n.hash, - ); + if n.header.parent_hash() != last_hash && n.is_new_best { + let maybe_ancestor = + sp_blockchain::lowest_common_ancestor(&*client, last_hash.clone(), n.hash); match maybe_ancestor { Ok(ref ancestor) if ancestor.hash != *last_hash => info!( "♻️ Reorg on #{},{} to #{},{}, common ancestor #{},{}", - Colour::Red.bold().paint(format!("{}", last_num)), last_hash, - Colour::Green.bold().paint(format!("{}", n.header.number())), n.hash, - Colour::White.bold().paint(format!("{}", ancestor.number)), ancestor.hash, + Colour::Red.bold().paint(format!("{}", last_num)), + last_hash, + Colour::Green.bold().paint(format!("{}", n.header.number())), + n.hash, + Colour::White.bold().paint(format!("{}", ancestor.number)), + ancestor.hash, ), Ok(_) => {}, Err(e) => warn!("Error computing tree route: {}", e), @@ -155,7 +152,6 @@ where last_best = Some((n.header.number().clone(), n.hash.clone())); } - // If we already printed a message for a given block recently, // we should not print it again. if !last_blocks.contains(&n.hash) { diff --git a/client/keystore/src/lib.rs b/client/keystore/src/lib.rs index 38ab640d2e303..5e29f691997e6 100644 --- a/client/keystore/src/lib.rs +++ b/client/keystore/src/lib.rs @@ -19,9 +19,9 @@ //! Keystore (and session key management) for ed25519 based chains like Polkadot. #![warn(missing_docs)] -use std::io; use sp_core::crypto::KeyTypeId; use sp_keystore::Error as TraitError; +use std::io; /// Local keystore implementation mod local; @@ -35,19 +35,19 @@ pub enum Error { /// JSON error. Json(serde_json::Error), /// Invalid password. - #[display(fmt="Invalid password")] + #[display(fmt = "Invalid password")] InvalidPassword, /// Invalid BIP39 phrase - #[display(fmt="Invalid recovery phrase (BIP39) data")] + #[display(fmt = "Invalid recovery phrase (BIP39) data")] InvalidPhrase, /// Invalid seed - #[display(fmt="Invalid seed")] + #[display(fmt = "Invalid seed")] InvalidSeed, /// Public key type is not supported - #[display(fmt="Key crypto type is not supported")] + #[display(fmt = "Key crypto type is not supported")] KeyNotSupported(KeyTypeId), /// Keystore unavailable - #[display(fmt="Keystore unavailable")] + #[display(fmt = "Keystore unavailable")] Unavailable, } @@ -58,9 +58,8 @@ impl From for TraitError { fn from(error: Error) -> Self { match error { Error::KeyNotSupported(id) => TraitError::KeyNotSupported(id), - Error::InvalidSeed | Error::InvalidPhrase | Error::InvalidPassword => { - TraitError::ValidationError(error.to_string()) - }, + Error::InvalidSeed | Error::InvalidPhrase | Error::InvalidPassword => + TraitError::ValidationError(error.to_string()), Error::Unavailable => TraitError::Unavailable, Error::Io(e) => TraitError::Other(e.to_string()), Error::Json(e) => TraitError::Other(e.to_string()), @@ -77,4 +76,3 @@ impl std::error::Error for Error { } } } - diff --git a/client/keystore/src/local.rs b/client/keystore/src/local.rs index 2377ea127756e..53f4785fb691b 100644 --- a/client/keystore/src/local.rs +++ b/client/keystore/src/local.rs @@ -17,30 +17,27 @@ // //! Local keystore implementation -use std::{ - collections::{HashMap, HashSet}, - fs::{self, File}, - io::Write, - path::PathBuf, - sync::Arc, -}; use async_trait::async_trait; use parking_lot::RwLock; +use sp_application_crypto::{ecdsa, ed25519, sr25519, AppKey, AppPair, IsWrappedBy}; use sp_core::{ - crypto::{CryptoTypePublicPair, KeyTypeId, Pair as PairT, ExposeSecret, SecretString, Public}, - sr25519::{Public as Sr25519Public, Pair as Sr25519Pair}, + crypto::{CryptoTypePublicPair, ExposeSecret, KeyTypeId, Pair as PairT, Public, SecretString}, + sr25519::{Pair as Sr25519Pair, Public as Sr25519Public}, Encode, }; use sp_keystore::{ - CryptoStore, - SyncCryptoStorePtr, - Error as TraitError, - SyncCryptoStore, - vrf::{VRFTranscriptData, VRFSignature, make_transcript}, + vrf::{make_transcript, VRFSignature, VRFTranscriptData}, + CryptoStore, Error as TraitError, SyncCryptoStore, SyncCryptoStorePtr, +}; +use std::{ + collections::{HashMap, HashSet}, + fs::{self, File}, + io::Write, + path::PathBuf, + sync::Arc, }; -use sp_application_crypto::{ed25519, sr25519, ecdsa, AppPair, AppKey, IsWrappedBy}; -use crate::{Result, Error}; +use crate::{Error, Result}; /// A local based keystore that is either memory-based or filesystem-based. pub struct LocalKeystore(RwLock); @@ -62,14 +59,20 @@ impl LocalKeystore { /// /// Returns `Ok(None)` if the key doesn't exist, `Ok(Some(_))` if the key exists and /// `Err(_)` when something failed. - pub fn key_pair(&self, public: &::Public) -> Result> { + pub fn key_pair( + &self, + public: &::Public, + ) -> Result> { self.0.read().key_pair::(public) } } #[async_trait] impl CryptoStore for LocalKeystore { - async fn keys(&self, id: KeyTypeId) -> std::result::Result, TraitError> { + async fn keys( + &self, + id: KeyTypeId, + ) -> std::result::Result, TraitError> { SyncCryptoStore::keys(self, id) } @@ -109,7 +112,12 @@ impl CryptoStore for LocalKeystore { SyncCryptoStore::ecdsa_generate_new(self, id, seed) } - async fn insert_unknown(&self, id: KeyTypeId, suri: &str, public: &[u8]) -> std::result::Result<(), ()> { + async fn insert_unknown( + &self, + id: KeyTypeId, + suri: &str, + public: &[u8], + ) -> std::result::Result<(), ()> { SyncCryptoStore::insert_unknown(self, id, suri, public) } @@ -154,28 +162,22 @@ impl CryptoStore for LocalKeystore { } impl SyncCryptoStore for LocalKeystore { - fn keys( - &self, - id: KeyTypeId - ) -> std::result::Result, TraitError> { + fn keys(&self, id: KeyTypeId) -> std::result::Result, TraitError> { let raw_keys = self.0.read().raw_public_keys(id)?; - Ok(raw_keys.into_iter() - .fold(Vec::new(), |mut v, k| { - v.push(CryptoTypePublicPair(sr25519::CRYPTO_ID, k.clone())); - v.push(CryptoTypePublicPair(ed25519::CRYPTO_ID, k.clone())); - v.push(CryptoTypePublicPair(ecdsa::CRYPTO_ID, k)); - v - })) + Ok(raw_keys.into_iter().fold(Vec::new(), |mut v, k| { + v.push(CryptoTypePublicPair(sr25519::CRYPTO_ID, k.clone())); + v.push(CryptoTypePublicPair(ed25519::CRYPTO_ID, k.clone())); + v.push(CryptoTypePublicPair(ecdsa::CRYPTO_ID, k)); + v + })) } fn supported_keys( &self, id: KeyTypeId, - keys: Vec + keys: Vec, ) -> std::result::Result, TraitError> { - let all_keys = SyncCryptoStore::keys(self, id)? - .into_iter() - .collect::>(); + let all_keys = SyncCryptoStore::keys(self, id)?.into_iter().collect::>(); Ok(keys.into_iter().filter(|key| all_keys.contains(key)).collect::>()) } @@ -188,36 +190,40 @@ impl SyncCryptoStore for LocalKeystore { match key.0 { ed25519::CRYPTO_ID => { let pub_key = ed25519::Public::from_slice(key.1.as_slice()); - let key_pair = self.0.read() + let key_pair = self + .0 + .read() .key_pair_by_type::(&pub_key, id) .map_err(|e| TraitError::from(e))?; key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() - } + }, sr25519::CRYPTO_ID => { let pub_key = sr25519::Public::from_slice(key.1.as_slice()); - let key_pair = self.0.read() + let key_pair = self + .0 + .read() .key_pair_by_type::(&pub_key, id) .map_err(|e| TraitError::from(e))?; key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() }, ecdsa::CRYPTO_ID => { let pub_key = ecdsa::Public::from_slice(key.1.as_slice()); - let key_pair = self.0.read() + let key_pair = self + .0 + .read() .key_pair_by_type::(&pub_key, id) .map_err(|e| TraitError::from(e))?; key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() - } - _ => Err(TraitError::KeyNotSupported(id)) + }, + _ => Err(TraitError::KeyNotSupported(id)), } } fn sr25519_public_keys(&self, key_type: KeyTypeId) -> Vec { - self.0.read().raw_public_keys(key_type) - .map(|v| { - v.into_iter() - .map(|k| sr25519::Public::from_slice(k.as_slice())) - .collect() - }) + self.0 + .read() + .raw_public_keys(key_type) + .map(|v| v.into_iter().map(|k| sr25519::Public::from_slice(k.as_slice())).collect()) .unwrap_or_default() } @@ -227,20 +233,20 @@ impl SyncCryptoStore for LocalKeystore { seed: Option<&str>, ) -> std::result::Result { let pair = match seed { - Some(seed) => self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), + Some(seed) => + self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), None => self.0.write().generate_by_type::(id), - }.map_err(|e| -> TraitError { e.into() })?; + } + .map_err(|e| -> TraitError { e.into() })?; Ok(pair.public()) } fn ed25519_public_keys(&self, key_type: KeyTypeId) -> Vec { - self.0.read().raw_public_keys(key_type) - .map(|v| { - v.into_iter() - .map(|k| ed25519::Public::from_slice(k.as_slice())) - .collect() - }) + self.0 + .read() + .raw_public_keys(key_type) + .map(|v| v.into_iter().map(|k| ed25519::Public::from_slice(k.as_slice())).collect()) .unwrap_or_default() } @@ -250,20 +256,20 @@ impl SyncCryptoStore for LocalKeystore { seed: Option<&str>, ) -> std::result::Result { let pair = match seed { - Some(seed) => self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), + Some(seed) => + self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), None => self.0.write().generate_by_type::(id), - }.map_err(|e| -> TraitError { e.into() })?; + } + .map_err(|e| -> TraitError { e.into() })?; Ok(pair.public()) } fn ecdsa_public_keys(&self, key_type: KeyTypeId) -> Vec { - self.0.read().raw_public_keys(key_type) - .map(|v| { - v.into_iter() - .map(|k| ecdsa::Public::from_slice(k.as_slice())) - .collect() - }) + self.0 + .read() + .raw_public_keys(key_type) + .map(|v| v.into_iter().map(|k| ecdsa::Public::from_slice(k.as_slice())).collect()) .unwrap_or_default() } @@ -273,21 +279,27 @@ impl SyncCryptoStore for LocalKeystore { seed: Option<&str>, ) -> std::result::Result { let pair = match seed { - Some(seed) => self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), + Some(seed) => + self.0.write().insert_ephemeral_from_seed_by_type::(seed, id), None => self.0.write().generate_by_type::(id), - }.map_err(|e| -> TraitError { e.into() })?; + } + .map_err(|e| -> TraitError { e.into() })?; Ok(pair.public()) } - fn insert_unknown(&self, key_type: KeyTypeId, suri: &str, public: &[u8]) - -> std::result::Result<(), ()> - { + fn insert_unknown( + &self, + key_type: KeyTypeId, + suri: &str, + public: &[u8], + ) -> std::result::Result<(), ()> { self.0.write().insert_unknown(key_type, suri, public).map_err(|_| ()) } fn has_keys(&self, public_keys: &[(Vec, KeyTypeId)]) -> bool { - public_keys.iter() + public_keys + .iter() .all(|(p, t)| self.0.read().key_phrase_by_type(&p, *t).ok().flatten().is_some()) } @@ -302,10 +314,7 @@ impl SyncCryptoStore for LocalKeystore { if let Some(pair) = pair { let (inout, proof, _) = pair.as_ref().vrf_sign(transcript); - Ok(Some(VRFSignature { - output: inout.to_output(), - proof, - })) + Ok(Some(VRFSignature { output: inout.to_output(), proof })) } else { Ok(None) } @@ -317,9 +326,8 @@ impl SyncCryptoStore for LocalKeystore { public: &ecdsa::Public, msg: &[u8; 32], ) -> std::result::Result, TraitError> { - let pair = self.0.read() - .key_pair_by_type::(public, id)?; - + let pair = self.0.read().key_pair_by_type::(public, id)?; + pair.map(|k| k.sign_prehashed(msg)).map(Ok).transpose() } } @@ -362,26 +370,16 @@ impl KeystoreInner { /// Get the password for this store. fn password(&self) -> Option<&str> { - self.password.as_ref() - .map(|p| p.expose_secret()) - .map(|p| p.as_str()) + self.password.as_ref().map(|p| p.expose_secret()).map(|p| p.as_str()) } /// Create a new in-memory store. fn new_in_memory() -> Self { - Self { - path: None, - additional: HashMap::new(), - password: None - } + Self { path: None, additional: HashMap::new(), password: None } } /// Get the key phrase for the given public key and key type from the in-memory store. - fn get_additional_pair( - &self, - public: &[u8], - key_type: KeyTypeId, - ) -> Option<&String> { + fn get_additional_pair(&self, public: &[u8], key_type: KeyTypeId) -> Option<&String> { let key = (key_type, public.to_vec()); self.additional.get(&key) } @@ -444,7 +442,7 @@ impl KeystoreInner { let path = if let Some(path) = self.key_file_path(public, key_type) { path } else { - return Ok(None); + return Ok(None) }; if path.exists() { @@ -468,10 +466,7 @@ impl KeystoreInner { return Ok(None) }; - let pair = Pair::from_string( - &phrase, - self.password(), - ).map_err(|_| Error::InvalidPhrase)?; + let pair = Pair::from_string(&phrase, self.password()).map_err(|_| Error::InvalidPhrase)?; if &pair.public() == public { Ok(Some(pair)) @@ -493,7 +488,9 @@ impl KeystoreInner { /// Returns a list of raw public keys filtered by `KeyTypeId` fn raw_public_keys(&self, id: KeyTypeId) -> Result>> { - let mut public_keys: Vec> = self.additional.keys() + let mut public_keys: Vec> = self + .additional + .keys() .into_iter() .filter_map(|k| if k.0 == id { Some(k.1.clone()) } else { None }) .collect(); @@ -508,11 +505,11 @@ impl KeystoreInner { match hex::decode(name) { Ok(ref hex) if hex.len() > 4 => { if &hex[0..4] != &id.0 { - continue; + continue } let public = hex[4..].to_vec(); public_keys.push(public); - } + }, _ => continue, } } @@ -526,42 +523,34 @@ impl KeystoreInner { /// /// Returns `Ok(None)` if the key doesn't exist, `Ok(Some(_))` if the key exists or `Err(_)` when /// something failed. - pub fn key_pair(&self, public: &::Public) -> Result> { + pub fn key_pair( + &self, + public: &::Public, + ) -> Result> { self.key_pair_by_type::(IsWrappedBy::from_ref(public), Pair::ID) .map(|v| v.map(Into::into)) } } - #[cfg(test)] mod tests { use super::*; - use tempfile::TempDir; - use sp_core::{ - Pair, - crypto::Ss58Codec, - testing::SR25519, - }; use sp_application_crypto::{ed25519, sr25519, AppPublic}; - use std::{ - fs, - str::FromStr, - }; + use sp_core::{crypto::Ss58Codec, testing::SR25519, Pair}; + use std::{fs, str::FromStr}; + use tempfile::TempDir; const TEST_KEY_TYPE: KeyTypeId = KeyTypeId(*b"test"); impl KeystoreInner { fn insert_ephemeral_from_seed(&mut self, seed: &str) -> Result { - self.insert_ephemeral_from_seed_by_type::(seed, Pair::ID).map(Into::into) + self.insert_ephemeral_from_seed_by_type::(seed, Pair::ID) + .map(Into::into) } fn public_keys(&self) -> Result> { self.raw_public_keys(Public::ID) - .map(|v| { - v.into_iter() - .map(|k| Public::from_slice(k.as_slice())) - .collect() - }) + .map(|v| v.into_iter().map(|k| Public::from_slice(k.as_slice())).collect()) } fn generate(&mut self) -> Result { @@ -592,23 +581,23 @@ mod tests { let key: ed25519::AppPair = store.0.write().generate().unwrap(); let key2 = ed25519::Pair::generate().0; - assert!( - !SyncCryptoStore::has_keys(&store, &[(key2.public().to_vec(), ed25519::AppPublic::ID)]) - ); + assert!(!SyncCryptoStore::has_keys( + &store, + &[(key2.public().to_vec(), ed25519::AppPublic::ID)] + )); - assert!( - !SyncCryptoStore::has_keys( - &store, - &[ - (key2.public().to_vec(), ed25519::AppPublic::ID), - (key.public().to_raw_vec(), ed25519::AppPublic::ID), - ], - ) - ); + assert!(!SyncCryptoStore::has_keys( + &store, + &[ + (key2.public().to_vec(), ed25519::AppPublic::ID), + (key.public().to_raw_vec(), ed25519::AppPublic::ID), + ], + )); - assert!( - SyncCryptoStore::has_keys(&store, &[(key.public().to_raw_vec(), ed25519::AppPublic::ID)]) - ); + assert!(SyncCryptoStore::has_keys( + &store, + &[(key.public().to_raw_vec(), ed25519::AppPublic::ID)] + )); } #[test] @@ -616,9 +605,11 @@ mod tests { let temp_dir = TempDir::new().unwrap(); let mut store = KeystoreInner::open(temp_dir.path(), None).unwrap(); - let pair: ed25519::AppPair = store.insert_ephemeral_from_seed( - "0x3d97c819d68f9bafa7d6e79cb991eebcd77d966c5334c0b94d9e1fa7ad0869dc" - ).unwrap(); + let pair: ed25519::AppPair = store + .insert_ephemeral_from_seed( + "0x3d97c819d68f9bafa7d6e79cb991eebcd77d966c5334c0b94d9e1fa7ad0869dc", + ) + .unwrap(); assert_eq!( "5DKUrgFqCPV8iAXx9sjy1nyBygQCeiUYRFWurZGhnrn3HJCA", pair.public().to_ss58check() @@ -637,7 +628,8 @@ mod tests { let mut store = KeystoreInner::open( temp_dir.path(), Some(FromStr::from_str(password.as_str()).unwrap()), - ).unwrap(); + ) + .unwrap(); let pair: ed25519::AppPair = store.generate().unwrap(); assert_eq!( @@ -652,7 +644,8 @@ mod tests { let store = KeystoreInner::open( temp_dir.path(), Some(FromStr::from_str(password.as_str()).unwrap()), - ).unwrap(); + ) + .unwrap(); assert_eq!( pair.public(), store.key_pair::(&pair.public()).unwrap().unwrap().public(), @@ -667,9 +660,15 @@ mod tests { let mut keys = Vec::new(); for i in 0..10 { keys.push(store.generate::().unwrap().public()); - keys.push(store.insert_ephemeral_from_seed::( - &format!("0x3d97c819d68f9bafa7d6e79cb991eebcd7{}d966c5334c0b94d9e1fa7ad0869dc", i), - ).unwrap().public()); + keys.push( + store + .insert_ephemeral_from_seed::(&format!( + "0x3d97c819d68f9bafa7d6e79cb991eebcd7{}d966c5334c0b94d9e1fa7ad0869dc", + i + )) + .unwrap() + .public(), + ); } // Generate a key of a different type @@ -690,16 +689,14 @@ mod tests { let secret_uri = "//Alice"; let key_pair = sr25519::AppPair::from_string(secret_uri, None).expect("Generates key pair"); - store.insert_unknown( - SR25519, - secret_uri, - key_pair.public().as_ref(), - ).expect("Inserts unknown key"); + store + .insert_unknown(SR25519, secret_uri, key_pair.public().as_ref()) + .expect("Inserts unknown key"); - let store_key_pair = store.key_pair_by_type::( - &key_pair.public(), - SR25519, - ).expect("Gets key pair from keystore").unwrap(); + let store_key_pair = store + .key_pair_by_type::(&key_pair.public(), SR25519) + .expect("Gets key pair from keystore") + .unwrap(); assert_eq!(key_pair.public(), store_key_pair.public()); } @@ -712,16 +709,15 @@ mod tests { let file_name = temp_dir.path().join(hex::encode(&SR25519.0[..2])); fs::write(file_name, "test").expect("Invalid file is written"); - assert!( - SyncCryptoStore::sr25519_public_keys(&store, SR25519).is_empty(), - ); + assert!(SyncCryptoStore::sr25519_public_keys(&store, SR25519).is_empty(),); } #[test] fn generate_with_seed_is_not_stored() { let temp_dir = TempDir::new().unwrap(); let store = LocalKeystore::open(temp_dir.path(), None).unwrap(); - let _alice_tmp_key = SyncCryptoStore::sr25519_generate_new(&store, TEST_KEY_TYPE, Some("//Alice")).unwrap(); + let _alice_tmp_key = + SyncCryptoStore::sr25519_generate_new(&store, TEST_KEY_TYPE, Some("//Alice")).unwrap(); assert_eq!(SyncCryptoStore::sr25519_public_keys(&store, TEST_KEY_TYPE).len(), 1); diff --git a/client/light/src/backend.rs b/client/light/src/backend.rs index 425720c1d7770..87d7dba3ddfb1 100644 --- a/client/light/src/backend.rs +++ b/client/light/src/backend.rs @@ -19,38 +19,44 @@ //! Light client backend. Only stores headers and justifications of blocks. //! Everything else is requested from full nodes on demand. -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; use parking_lot::RwLock; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; use codec::{Decode, Encode}; -use sp_core::ChangesTrieConfiguration; -use sp_core::storage::{well_known_keys, ChildInfo}; -use sp_core::offchain::storage::InMemOffchainStorage; -use sp_state_machine::{ - Backend as StateBackend, TrieBackend, InMemoryBackend, ChangesTrieTransaction, - StorageCollection, ChildStorageCollection, IndexOperation, -}; -use sp_runtime::{generic::BlockId, Justification, Justifications, Storage}; -use sp_runtime::traits::{Block as BlockT, NumberFor, Zero, Header, HashFor}; -use sp_blockchain::{Error as ClientError, Result as ClientResult}; +use super::blockchain::Blockchain; +use hash_db::Hasher; use sc_client_api::{ backend::{ - AuxStore, Backend as ClientBackend, BlockImportOperation, RemoteBackend, NewBlockState, - PrunableStateChangesTrieStorage, - }, - blockchain::{ - HeaderBackend as BlockchainHeaderBackend, well_known_cache_keys, + AuxStore, Backend as ClientBackend, BlockImportOperation, NewBlockState, + PrunableStateChangesTrieStorage, RemoteBackend, }, - light::Storage as BlockchainStorage, + blockchain::{well_known_cache_keys, HeaderBackend as BlockchainHeaderBackend}, in_mem::check_genesis_storage, + light::Storage as BlockchainStorage, UsageInfo, }; -use super::blockchain::Blockchain; -use hash_db::Hasher; +use sp_blockchain::{Error as ClientError, Result as ClientResult}; +use sp_core::{ + offchain::storage::InMemOffchainStorage, + storage::{well_known_keys, ChildInfo}, + ChangesTrieConfiguration, +}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, HashFor, Header, NumberFor, Zero}, + Justification, Justifications, Storage, +}; +use sp_state_machine::{ + Backend as StateBackend, ChangesTrieTransaction, ChildStorageCollection, InMemoryBackend, + IndexOperation, StorageCollection, TrieBackend, +}; -const IN_MEMORY_EXPECT_PROOF: &str = "InMemory state backend has Void error type and always succeeds; qed"; +const IN_MEMORY_EXPECT_PROOF: &str = + "InMemory state backend has Void error type and always succeeds; qed"; /// Light client backend. pub struct Backend { @@ -84,11 +90,7 @@ pub enum GenesisOrUnavailableState { impl Backend { /// Create new light backend. pub fn new(blockchain: Arc>) -> Self { - Self { - blockchain, - genesis_state: RwLock::new(None), - import_lock: Default::default(), - } + Self { blockchain, genesis_state: RwLock::new(None), import_lock: Default::default() } } /// Get shared blockchain reference. @@ -102,9 +104,13 @@ impl AuxStore for Backend { 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> ClientResult<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> ClientResult<()> { self.blockchain.storage().insert_aux(insert, delete) } @@ -114,10 +120,10 @@ impl AuxStore for Backend { } impl ClientBackend for Backend> - where - Block: BlockT, - S: BlockchainStorage, - Block::Hash: Ord, +where + Block: BlockT, + S: BlockchainStorage, + Block::Hash: Ord, { type BlockImportOperation = ImportOperation; type Blockchain = Blockchain; @@ -141,15 +147,12 @@ impl ClientBackend for Backend> fn begin_state_operation( &self, _operation: &mut Self::BlockImportOperation, - _block: BlockId + _block: BlockId, ) -> ClientResult<()> { Ok(()) } - fn commit_operation( - &self, - mut operation: Self::BlockImportOperation, - ) -> ClientResult<()> { + fn commit_operation(&self, mut operation: Self::BlockImportOperation) -> ClientResult<()> { if !operation.finalized_blocks.is_empty() { for block in operation.finalized_blocks { self.blockchain.storage().finalize_header(block)?; @@ -159,7 +162,9 @@ impl ClientBackend for Backend> if let Some(header) = operation.header { let is_genesis_import = header.number().is_zero(); if let Some(new_config) = operation.changes_trie_config_update { - operation.cache.insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_config.encode()); + operation + .cache + .insert(well_known_cache_keys::CHANGES_TRIE_CONFIG, new_config.encode()); } self.blockchain.storage().import_header( header, @@ -175,11 +180,12 @@ impl ClientBackend for Backend> } else { for (key, maybe_val) in operation.aux_ops { match maybe_val { - Some(val) => self.blockchain.storage().insert_aux( - &[(&key[..], &val[..])], - std::iter::empty(), - )?, - None => self.blockchain.storage().insert_aux(std::iter::empty(), &[&key[..]])?, + Some(val) => self + .blockchain + .storage() + .insert_aux(&[(&key[..], &val[..])], std::iter::empty())?, + None => + self.blockchain.storage().insert_aux(std::iter::empty(), &[&key[..]])?, } } } @@ -229,7 +235,7 @@ impl ClientBackend for Backend> // special case for genesis block if block_number.is_zero() { if let Some(genesis_state) = self.genesis_state.read().clone() { - return Ok(GenesisOrUnavailableState::Genesis(genesis_state)); + return Ok(GenesisOrUnavailableState::Genesis(genesis_state)) } } @@ -246,10 +252,7 @@ impl ClientBackend for Backend> Err(ClientError::NotAvailableOnLightClient) } - fn remove_leaf_block( - &self, - _hash: &Block::Hash, - ) -> ClientResult<()> { + fn remove_leaf_block(&self, _hash: &Block::Hash) -> ClientResult<()> { Err(ClientError::NotAvailableOnLightClient) } @@ -265,8 +268,9 @@ where Block::Hash: Ord, { fn is_local_state_available(&self, block: &BlockId) -> bool { - self.genesis_state.read().is_some() - && self.blockchain.expect_block_number_from_id(block) + self.genesis_state.read().is_some() && + self.blockchain + .expect_block_number_from_id(block) .map(|num| num.is_zero()) .unwrap_or(false) } @@ -277,10 +281,10 @@ where } impl BlockImportOperation for ImportOperation - where - Block: BlockT, - S: BlockchainStorage, - Block::Hash: Ord, +where + Block: BlockT, + S: BlockchainStorage, + Block::Hash: Ord, { type State = GenesisOrUnavailableState>; @@ -326,10 +330,14 @@ impl BlockImportOperation for ImportOperation check_genesis_storage(&input)?; // changes trie configuration - let changes_trie_config = input.top.iter() + let changes_trie_config = input + .top + .iter() .find(|(k, _)| &k[..] == well_known_keys::CHANGES_TRIE_CONFIG) - .map(|(_, v)| Decode::decode(&mut &v[..]) - .expect("changes trie configuration is encoded properly at genesis")); + .map(|(_, v)| { + Decode::decode(&mut &v[..]) + .expect("changes trie configuration is encoded properly at genesis") + }); self.changes_trie_config_update = Some(changes_trie_config); // this is only called when genesis block is imported => shouldn't be performance bottleneck @@ -337,7 +345,8 @@ impl BlockImportOperation for ImportOperation storage.insert(None, input.top); // create a list of children keys to re-compute roots for - let child_delta = input.children_default + let child_delta = input + .children_default .iter() .map(|(_storage_key, storage_child)| (&storage_child.child_info, std::iter::empty())); @@ -360,7 +369,8 @@ impl BlockImportOperation for ImportOperation } fn insert_aux(&mut self, ops: I) -> ClientResult<()> - where I: IntoIterator, Option>)> + where + I: IntoIterator, Option>)>, { self.aux_ops.append(&mut ops.into_iter().collect()); Ok(()) @@ -389,7 +399,10 @@ impl BlockImportOperation for ImportOperation Ok(()) } - fn update_transaction_index(&mut self, _index: Vec) -> sp_blockchain::Result<()> { + fn update_transaction_index( + &mut self, + _index: Vec, + ) -> sp_blockchain::Result<()> { // noop for the light client Ok(()) } @@ -405,8 +418,8 @@ impl std::fmt::Debug for GenesisOrUnavailableState { } impl StateBackend for GenesisOrUnavailableState - where - H::Out: Ord + codec::Codec, +where + H::Out: Ord + codec::Codec, { type Error = ClientError; type Transaction = as StateBackend>::Transaction; @@ -420,11 +433,7 @@ impl StateBackend for GenesisOrUnavailableState } } - fn child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> ClientResult>> { + fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> ClientResult>> { match *self { GenesisOrUnavailableState::Genesis(ref state) => Ok(state.child_storage(child_info, key).expect(IN_MEMORY_EXPECT_PROOF)), @@ -446,24 +455,24 @@ impl StateBackend for GenesisOrUnavailableState key: &[u8], ) -> Result>, Self::Error> { match *self { - GenesisOrUnavailableState::Genesis(ref state) => Ok( - state.next_child_storage_key(child_info, key) - .expect(IN_MEMORY_EXPECT_PROOF) - ), + GenesisOrUnavailableState::Genesis(ref state) => + Ok(state.next_child_storage_key(child_info, key).expect(IN_MEMORY_EXPECT_PROOF)), GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient), } } fn for_keys_with_prefix(&self, prefix: &[u8], action: A) { match *self { - GenesisOrUnavailableState::Genesis(ref state) => state.for_keys_with_prefix(prefix, action), + GenesisOrUnavailableState::Genesis(ref state) => + state.for_keys_with_prefix(prefix, action), GenesisOrUnavailableState::Unavailable => (), } } fn for_key_values_with_prefix(&self, prefix: &[u8], action: A) { match *self { - GenesisOrUnavailableState::Genesis(ref state) => state.for_key_values_with_prefix(prefix, action), + GenesisOrUnavailableState::Genesis(ref state) => + state.for_key_values_with_prefix(prefix, action), GenesisOrUnavailableState::Unavailable => (), } } @@ -477,9 +486,9 @@ impl StateBackend for GenesisOrUnavailableState allow_missing: bool, ) -> ClientResult { match *self { - GenesisOrUnavailableState::Genesis(ref state) => - Ok(state.apply_to_key_values_while(child_info, prefix, start_at, action, allow_missing) - .expect(IN_MEMORY_EXPECT_PROOF)), + GenesisOrUnavailableState::Genesis(ref state) => Ok(state + .apply_to_key_values_while(child_info, prefix, start_at, action, allow_missing) + .expect(IN_MEMORY_EXPECT_PROOF)), GenesisOrUnavailableState::Unavailable => Err(ClientError::NotAvailableOnLightClient), } } @@ -512,11 +521,13 @@ impl StateBackend for GenesisOrUnavailableState fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (H::Out, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, Self::Transaction) + where + H::Out: Ord, + { match *self { - GenesisOrUnavailableState::Genesis(ref state) => - state.storage_root(delta), + GenesisOrUnavailableState::Genesis(ref state) => state.storage_root(delta), GenesisOrUnavailableState::Unavailable => Default::default(), } } @@ -524,15 +535,17 @@ impl StateBackend for GenesisOrUnavailableState fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (H::Out, bool, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, bool, Self::Transaction) + where + H::Out: Ord, + { match *self { GenesisOrUnavailableState::Genesis(ref state) => { let (root, is_equal, _) = state.child_storage_root(child_info, delta); (root, is_equal, Default::default()) }, - GenesisOrUnavailableState::Unavailable => - (H::Out::default(), true, Default::default()), + GenesisOrUnavailableState::Unavailable => (H::Out::default(), true, Default::default()), } } @@ -550,7 +563,7 @@ impl StateBackend for GenesisOrUnavailableState } } - fn register_overlay_stats(&self, _stats: &sp_state_machine::StateMachineStats) { } + fn register_overlay_stats(&self, _stats: &sp_state_machine::StateMachineStats) {} fn usage_info(&self) -> sp_state_machine::UsageInfo { sp_state_machine::UsageInfo::empty() diff --git a/client/light/src/blockchain.rs b/client/light/src/blockchain.rs index 242839833a541..e88c724193697 100644 --- a/client/light/src/blockchain.rs +++ b/client/light/src/blockchain.rs @@ -21,27 +21,25 @@ use std::sync::Arc; -use sp_runtime::{Justifications, generic::BlockId}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}; - -use sp_blockchain::{ - HeaderMetadata, CachedHeaderMetadata, Error as ClientError, Result as ClientResult, +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero}, + Justifications, }; + +use crate::fetcher::RemoteHeaderRequest; pub use sc_client_api::{ - backend::{ - AuxStore, NewBlockState, ProvideChtRoots, - }, + backend::{AuxStore, NewBlockState, ProvideChtRoots}, blockchain::{ - Backend as BlockchainBackend, BlockStatus, Cache as BlockchainCache, + well_known_cache_keys, Backend as BlockchainBackend, BlockStatus, Cache as BlockchainCache, HeaderBackend as BlockchainHeaderBackend, Info as BlockchainInfo, ProvideCache, - well_known_cache_keys, - }, - light::{ - RemoteBlockchain, LocalOrRemote, Storage }, cht, + light::{LocalOrRemote, RemoteBlockchain, Storage}, +}; +use sp_blockchain::{ + CachedHeaderMetadata, Error as ClientError, HeaderMetadata, Result as ClientResult, }; -use crate::fetcher::RemoteHeaderRequest; /// Light client blockchain. pub struct Blockchain { @@ -51,9 +49,7 @@ pub struct Blockchain { impl Blockchain { /// Create new light blockchain backed with given storage. pub fn new(storage: S) -> Self { - Self { - storage, - } + Self { storage } } /// Get storage reference. @@ -62,7 +58,11 @@ impl Blockchain { } } -impl BlockchainHeaderBackend for Blockchain where Block: BlockT, S: Storage { +impl BlockchainHeaderBackend for Blockchain +where + Block: BlockT, + S: Storage, +{ fn header(&self, id: BlockId) -> ClientResult> { match RemoteBlockchain::header(self, id)? { LocalOrRemote::Local(header) => Ok(Some(header)), @@ -83,15 +83,25 @@ impl BlockchainHeaderBackend for Blockchain where Block: Blo self.storage.number(hash) } - fn hash(&self, number: <::Header as HeaderT>::Number) -> ClientResult> { + fn hash( + &self, + number: <::Header as HeaderT>::Number, + ) -> ClientResult> { self.storage.hash(number) } } -impl HeaderMetadata for Blockchain where Block: BlockT, S: Storage { +impl HeaderMetadata for Blockchain +where + Block: BlockT, + S: Storage, +{ type Error = ClientError; - fn header_metadata(&self, hash: Block::Hash) -> Result, Self::Error> { + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { self.storage.header_metadata(hash) } @@ -104,7 +114,11 @@ impl HeaderMetadata for Blockchain where Block: BlockT, S: S } } -impl BlockchainBackend for Blockchain where Block: BlockT, S: Storage { +impl BlockchainBackend for Blockchain +where + Block: BlockT, + S: Storage, +{ fn body(&self, _id: BlockId) -> ClientResult>> { Err(ClientError::NotAvailableOnLightClient) } @@ -129,16 +143,13 @@ impl BlockchainBackend for Blockchain where Block: BlockT, S Err(ClientError::NotAvailableOnLightClient) } - fn indexed_transaction( - &self, - _hash: &Block::Hash, - ) -> ClientResult>> { + fn indexed_transaction(&self, _hash: &Block::Hash) -> ClientResult>> { Err(ClientError::NotAvailableOnLightClient) } fn block_indexed_body( &self, - _id: BlockId + _id: BlockId, ) -> sp_blockchain::Result>>> { Err(ClientError::NotAvailableOnLightClient) } @@ -151,16 +162,16 @@ impl, Block: BlockT> ProvideCache for Blockchain { } impl RemoteBlockchain for Blockchain - where - S: Storage, +where + S: Storage, { - fn header(&self, id: BlockId) -> ClientResult, - >> { + fn header( + &self, + id: BlockId, + ) -> ClientResult>> { // first, try to read header from local storage if let Some(local_header) = self.storage.header(id)? { - return Ok(LocalOrRemote::Local(local_header)); + return Ok(LocalOrRemote::Local(local_header)) } // we need to know block number to check if it's a part of CHT @@ -173,8 +184,9 @@ impl RemoteBlockchain for Blockchain }; // if the header is genesis (never pruned), non-canonical, or from future => return - if number.is_zero() || self.storage.status(BlockId::Number(number))? == BlockStatus::Unknown { - return Ok(LocalOrRemote::Unknown); + if number.is_zero() || self.storage.status(BlockId::Number(number))? == BlockStatus::Unknown + { + return Ok(LocalOrRemote::Unknown) } Ok(LocalOrRemote::Remote(RemoteHeaderRequest { diff --git a/client/light/src/call_executor.rs b/client/light/src/call_executor.rs index c9ca3bab37bef..f666d8363127f 100644 --- a/client/light/src/call_executor.rs +++ b/client/light/src/call_executor.rs @@ -18,34 +18,33 @@ //! Methods that light client could use to execute runtime calls. -use std::{ - sync::Arc, panic::UnwindSafe, result, cell::RefCell, -}; +use std::{cell::RefCell, panic::UnwindSafe, result, sync::Arc}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use hash_db::Hasher; use sp_core::{ - convert_hash, NativeOrEncoded, traits::{CodeExecutor, SpawnNamed}, + convert_hash, + traits::{CodeExecutor, SpawnNamed}, + NativeOrEncoded, }; +use sp_externalities::Extensions; use sp_runtime::{ - generic::BlockId, traits::{Block as BlockT, Header as HeaderT, HashFor}, + generic::BlockId, + traits::{Block as BlockT, HashFor, Header as HeaderT}, }; -use sp_externalities::Extensions; use sp_state_machine::{ - self, Backend as StateBackend, OverlayedChanges, ExecutionStrategy, create_proof_check_backend, - execution_proof_check_on_trie_backend, ExecutionManager, StorageProof, + self, create_proof_check_backend, execution_proof_check_on_trie_backend, + Backend as StateBackend, ExecutionManager, ExecutionStrategy, OverlayedChanges, StorageProof, }; -use hash_db::Hasher; use sp_api::{ProofRecorder, StorageTransactionCache}; use sp_blockchain::{Error as ClientError, Result as ClientResult}; use sc_client_api::{ - backend::RemoteBackend, - light::RemoteCallRequest, - call_executor::CallExecutor, + backend::RemoteBackend, call_executor::CallExecutor, light::RemoteCallRequest, }; -use sc_executor::{RuntimeVersion, NativeVersion}; +use sc_executor::{NativeVersion, RuntimeVersion}; /// Call executor that is able to execute calls only on genesis state. /// @@ -64,19 +63,15 @@ impl GenesisCallExecutor { impl Clone for GenesisCallExecutor { fn clone(&self) -> Self { - GenesisCallExecutor { - backend: self.backend.clone(), - local: self.local.clone(), - } + GenesisCallExecutor { backend: self.backend.clone(), local: self.local.clone() } } } -impl CallExecutor for - GenesisCallExecutor - where - Block: BlockT, - B: RemoteBackend, - Local: CallExecutor, +impl CallExecutor for GenesisCallExecutor +where + Block: BlockT, + B: RemoteBackend, + Local: CallExecutor, { type Error = ClientError; @@ -99,7 +94,7 @@ impl CallExecutor for fn contextual_call< EM: Fn( Result, Self::Error>, - Result, Self::Error> + Result, Self::Error>, ) -> Result, Self::Error>, R: Encode + Decode + PartialEq, NC: FnOnce() -> result::Result + UnwindSafe, @@ -114,7 +109,10 @@ impl CallExecutor for native_call: Option, recorder: &Option>, extensions: Option, - ) -> ClientResult> where ExecutionManager: Clone { + ) -> ClientResult> + where + ExecutionManager: Clone, + { // there's no actual way/need to specify native/wasm execution strategy on light node // => we can safely ignore passed values @@ -125,7 +123,7 @@ impl CallExecutor for Result, Local::Error>, ) -> Result, Local::Error>, _, - NC + NC, >( &self.local, at, @@ -137,7 +135,8 @@ impl CallExecutor for native_call, recorder, extensions, - ).map_err(|e| ClientError::Execution(Box::new(e.to_string()))), + ) + .map_err(|e| ClientError::Execution(Box::new(e.to_string()))), false => Err(ClientError::NotAvailableOnLightClient), } } @@ -174,24 +173,19 @@ pub fn prove_execution( method: &str, call_data: &[u8], ) -> ClientResult<(Vec, StorageProof)> - where - Block: BlockT, - S: StateBackend>, - E: CallExecutor, +where + Block: BlockT, + S: StateBackend>, + E: CallExecutor, { - let trie_state = state.as_trie_backend() - .ok_or_else(|| - Box::new(sp_state_machine::ExecutionError::UnableToGenerateProof) as - Box - )?; + let trie_state = state.as_trie_backend().ok_or_else(|| { + Box::new(sp_state_machine::ExecutionError::UnableToGenerateProof) + as Box + })?; // execute method + record execution proof - let (result, exec_proof) = executor.prove_at_trie_state( - &trie_state, - &mut Default::default(), - method, - call_data, - )?; + let (result, exec_proof) = + executor.prove_at_trie_state(&trie_state, &mut Default::default(), method, call_data)?; Ok((result, exec_proof)) } @@ -205,11 +199,11 @@ pub fn check_execution_proof( request: &RemoteCallRequest

, remote_proof: StorageProof, ) -> ClientResult> - where - Header: HeaderT, - E: CodeExecutor + Clone + 'static, - H: Hasher, - H::Out: Ord + codec::Codec + 'static, +where + Header: HeaderT, + E: CodeExecutor + Clone + 'static, + H: Hasher, + H::Out: Ord + codec::Codec + 'static, { let local_state_root = request.header.state_root(); let root: H::Out = convert_hash(&local_state_root); @@ -220,7 +214,8 @@ pub fn check_execution_proof( // TODO: Remove when solved: https://github.com/paritytech/substrate/issues/5047 let backend_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(&trie_backend); - let runtime_code = backend_runtime_code.runtime_code() + let runtime_code = backend_runtime_code + .runtime_code() .map_err(|_e| ClientError::RuntimeCodeMissing)?; // execute method diff --git a/client/light/src/fetcher.rs b/client/light/src/fetcher.rs index e39cfe07fbf5e..fcdc7ad7ba596 100644 --- a/client/light/src/fetcher.rs +++ b/client/light/src/fetcher.rs @@ -18,34 +18,39 @@ //! Light client data fetcher. Fetches requested data from remote full nodes. -use std::sync::Arc; -use std::collections::{BTreeMap, HashMap}; -use std::marker::PhantomData; +use std::{ + collections::{BTreeMap, HashMap}, + marker::PhantomData, + sync::Arc, +}; -use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; use codec::{Decode, Encode}; -use sp_core::{convert_hash, traits::{CodeExecutor, SpawnNamed}, storage::{ChildInfo, ChildType}}; +use hash_db::{HashDB, Hasher, EMPTY_PREFIX}; +use sp_blockchain::{Error as ClientError, Result as ClientResult}; +use sp_core::{ + convert_hash, + storage::{ChildInfo, ChildType}, + traits::{CodeExecutor, SpawnNamed}, +}; use sp_runtime::traits::{ - Block as BlockT, Header as HeaderT, Hash, HashFor, NumberFor, - AtLeast32Bit, CheckedConversion, + AtLeast32Bit, Block as BlockT, CheckedConversion, Hash, HashFor, Header as HeaderT, NumberFor, }; +pub use sp_state_machine::StorageProof; use sp_state_machine::{ - ChangesTrieRootsStorage, ChangesTrieAnchorBlockId, ChangesTrieConfigurationRange, - InMemoryChangesTrieStorage, TrieBackend, read_proof_check, key_changes_proof_check_with_db, - read_child_proof_check, + key_changes_proof_check_with_db, read_child_proof_check, read_proof_check, + ChangesTrieAnchorBlockId, ChangesTrieConfigurationRange, ChangesTrieRootsStorage, + InMemoryChangesTrieStorage, TrieBackend, }; -pub use sp_state_machine::StorageProof; -use sp_blockchain::{Error as ClientError, Result as ClientResult}; +use crate::{blockchain::Blockchain, call_executor::check_execution_proof}; pub use sc_client_api::{ + cht, light::{ - RemoteCallRequest, RemoteHeaderRequest, RemoteReadRequest, RemoteReadChildRequest, - RemoteChangesRequest, ChangesProof, RemoteBodyRequest, Fetcher, FetchChecker, + ChangesProof, FetchChecker, Fetcher, RemoteBodyRequest, RemoteCallRequest, + RemoteChangesRequest, RemoteHeaderRequest, RemoteReadChildRequest, RemoteReadRequest, Storage as BlockchainStorage, }, - cht, }; -use crate::{blockchain::Blockchain, call_executor::check_execution_proof}; /// Remote data checker. pub struct LightDataChecker> { @@ -62,9 +67,7 @@ impl> LightDataChecker { executor: E, spawn_handle: Box, ) -> Self { - Self { - blockchain, executor, spawn_handle, _hasher: PhantomData - } + Self { blockchain, executor, spawn_handle, _hasher: PhantomData } } /// Check remote changes query proof assuming that CHT-s are of given size. @@ -74,26 +77,39 @@ impl> LightDataChecker { remote_proof: ChangesProof, cht_size: NumberFor, ) -> ClientResult, u32)>> - where - H: Hasher, - H::Out: Ord + codec::Codec, + where + H: Hasher, + H::Out: Ord + codec::Codec, { // since we need roots of all changes tries for the range begin..max // => remote node can't use max block greater that one that we have passed - if remote_proof.max_block > request.max_block.0 || remote_proof.max_block < request.last_block.0 { + if remote_proof.max_block > request.max_block.0 || + remote_proof.max_block < request.last_block.0 + { return Err(ClientError::ChangesTrieAccessFailed(format!( "Invalid max_block used by the remote node: {}. Local: {}..{}..{}", - remote_proof.max_block, request.first_block.0, request.last_block.0, request.max_block.0, - )).into()); + remote_proof.max_block, + request.first_block.0, + request.last_block.0, + request.max_block.0, + )) + .into()) } // check if remote node has responded with extra changes trie roots proofs // all changes tries roots must be in range [request.first_block.0; request.tries_roots.0) - let is_extra_first_root = remote_proof.roots.keys().next() - .map(|first_root| *first_root < request.first_block.0 - || *first_root >= request.tries_roots.0) + let is_extra_first_root = remote_proof + .roots + .keys() + .next() + .map(|first_root| { + *first_root < request.first_block.0 || *first_root >= request.tries_roots.0 + }) .unwrap_or(false); - let is_extra_last_root = remote_proof.roots.keys().next_back() + let is_extra_last_root = remote_proof + .roots + .keys() + .next_back() .map(|last_root| *last_root >= request.tries_roots.0) .unwrap_or(false); if is_extra_first_root || is_extra_last_root { @@ -112,11 +128,7 @@ impl> LightDataChecker { let remote_roots_proof = remote_proof.roots_proof; let remote_proof = remote_proof.proof; if !remote_roots.is_empty() { - self.check_changes_tries_proof( - cht_size, - &remote_roots, - remote_roots_proof, - )?; + self.check_changes_tries_proof(cht_size, &remote_roots, remote_roots_proof)?; } // and now check the key changes proof + get the changes @@ -125,7 +137,10 @@ impl> LightDataChecker { for config_range in &request.changes_trie_configs { let result_range = key_changes_proof_check_with_db::( ChangesTrieConfigurationRange { - config: config_range.config.as_ref().ok_or(ClientError::ChangesTriesNotSupported)?, + config: config_range + .config + .as_ref() + .ok_or(ClientError::ChangesTriesNotSupported)?, zero: config_range.zero.0, end: config_range.end.map(|(n, _)| n), }, @@ -141,7 +156,8 @@ impl> LightDataChecker { }, remote_max_block, request.storage_key.as_ref(), - &request.key) + &request.key, + ) .map_err(|err| ClientError::ChangesTrieAccessFailed(err))?; result.extend(result_range); } @@ -156,9 +172,9 @@ impl> LightDataChecker { remote_roots: &BTreeMap, B::Hash>, remote_roots_proof: StorageProof, ) -> ClientResult<()> - where - H: Hasher, - H::Out: Ord + codec::Codec, + where + H: Hasher, + H::Out: Ord + codec::Codec, { // all the checks are sharing the same storage let storage = remote_roots_proof.into_memory_db(); @@ -166,52 +182,62 @@ impl> LightDataChecker { // remote_roots.keys() are sorted => we can use this to group changes tries roots // that are belongs to the same CHT let blocks = remote_roots.keys().cloned(); - cht::for_each_cht_group::(cht_size, blocks, |mut storage, _, cht_blocks| { - // get local changes trie CHT root for given CHT - // it should be there, because it is never pruned AND request has been composed - // when required header has been pruned (=> replaced with CHT) - let first_block = cht_blocks.first().cloned() - .expect("for_each_cht_group never calls callback with empty groups"); - let local_cht_root = self.blockchain.storage().changes_trie_cht_root(cht_size, first_block)? - .ok_or(ClientError::InvalidCHTProof)?; - - // check changes trie root for every block within CHT range - for block in cht_blocks { - // check if the proofs storage contains the root - // normally this happens in when the proving backend is created, but since - // we share the storage for multiple checks, do it here - let mut cht_root = H::Out::default(); - cht_root.as_mut().copy_from_slice(local_cht_root.as_ref()); - if !storage.contains(&cht_root, EMPTY_PREFIX) { - return Err(ClientError::InvalidCHTProof.into()); + cht::for_each_cht_group::( + cht_size, + blocks, + |mut storage, _, cht_blocks| { + // get local changes trie CHT root for given CHT + // it should be there, because it is never pruned AND request has been composed + // when required header has been pruned (=> replaced with CHT) + let first_block = cht_blocks + .first() + .cloned() + .expect("for_each_cht_group never calls callback with empty groups"); + let local_cht_root = self + .blockchain + .storage() + .changes_trie_cht_root(cht_size, first_block)? + .ok_or(ClientError::InvalidCHTProof)?; + + // check changes trie root for every block within CHT range + for block in cht_blocks { + // check if the proofs storage contains the root + // normally this happens in when the proving backend is created, but since + // we share the storage for multiple checks, do it here + let mut cht_root = H::Out::default(); + cht_root.as_mut().copy_from_slice(local_cht_root.as_ref()); + if !storage.contains(&cht_root, EMPTY_PREFIX) { + return Err(ClientError::InvalidCHTProof.into()) + } + + // check proof for single changes trie root + let proving_backend = TrieBackend::new(storage, cht_root); + let remote_changes_trie_root = remote_roots[&block]; + cht::check_proof_on_proving_backend::( + local_cht_root, + block, + remote_changes_trie_root, + &proving_backend, + )?; + + // and return the storage to use in following checks + storage = proving_backend.into_storage(); } - // check proof for single changes trie root - let proving_backend = TrieBackend::new(storage, cht_root); - let remote_changes_trie_root = remote_roots[&block]; - cht::check_proof_on_proving_backend::( - local_cht_root, - block, - remote_changes_trie_root, - &proving_backend, - )?; - - // and return the storage to use in following checks - storage = proving_backend.into_storage(); - } - - Ok(storage) - }, storage) + Ok(storage) + }, + storage, + ) } } impl FetchChecker for LightDataChecker - where - Block: BlockT, - E: CodeExecutor + Clone + 'static, - H: Hasher, - H::Out: Ord + codec::Codec + 'static, - S: BlockchainStorage, +where + Block: BlockT, + E: CodeExecutor + Clone + 'static, + H: Hasher, + H::Out: Ord + codec::Codec + 'static, + S: BlockchainStorage, { fn check_header_proof( &self, @@ -219,15 +245,16 @@ impl FetchChecker for LightDataChecker remote_header: Option, remote_proof: StorageProof, ) -> ClientResult { - let remote_header = remote_header.ok_or_else(|| - ClientError::from(ClientError::InvalidCHTProof))?; + let remote_header = + remote_header.ok_or_else(|| ClientError::from(ClientError::InvalidCHTProof))?; let remote_header_hash = remote_header.hash(); cht::check_proof::( request.cht_root, request.block, remote_header_hash, remote_proof, - ).map(|_| remote_header) + ) + .map(|_| remote_header) } fn check_read_proof( @@ -239,7 +266,8 @@ impl FetchChecker for LightDataChecker convert_hash(request.header.state_root()), remote_proof, request.keys.iter(), - ).map_err(|e| ClientError::from(e)) + ) + .map_err(|e| ClientError::from(e)) } fn check_read_child_proof( @@ -256,7 +284,8 @@ impl FetchChecker for LightDataChecker remote_proof, &child_info, request.keys.iter(), - ).map_err(|e| ClientError::from(e)) + ) + .map_err(|e| ClientError::from(e)) } fn check_execution_proof( @@ -275,7 +304,7 @@ impl FetchChecker for LightDataChecker fn check_changes_proof( &self, request: &RemoteChangesRequest, - remote_proof: ChangesProof + remote_proof: ChangesProof, ) -> ClientResult, u32)>> { self.check_changes_proof_with_cht_size(request, remote_proof, cht::size()) } @@ -283,12 +312,11 @@ impl FetchChecker for LightDataChecker fn check_body_proof( &self, request: &RemoteBodyRequest, - body: Vec + body: Vec, ) -> ClientResult> { // TODO: #2621 - let extrinsics_root = HashFor::::ordered_trie_root( - body.iter().map(Encode::encode).collect(), - ); + let extrinsics_root = + HashFor::::ordered_trie_root(body.iter().map(Encode::encode).collect()); if *request.header.extrinsics_root() == extrinsics_root { Ok(body) } else { @@ -297,7 +325,6 @@ impl FetchChecker for LightDataChecker expected: extrinsics_root.to_string(), }) } - } } @@ -308,10 +335,18 @@ struct RootsStorage<'a, Number: AtLeast32Bit, Hash: 'a> { } impl<'a, H, Number, Hash> ChangesTrieRootsStorage for RootsStorage<'a, Number, Hash> - where - H: Hasher, - Number: std::fmt::Display + std::hash::Hash + Clone + AtLeast32Bit + Encode + Decode + Send + Sync + 'static, - Hash: 'a + Send + Sync + Clone + AsRef<[u8]>, +where + H: Hasher, + Number: std::fmt::Display + + std::hash::Hash + + Clone + + AtLeast32Bit + + Encode + + Decode + + Send + + Sync + + 'static, + Hash: 'a + Send + Sync + Clone + AsRef<[u8]>, { fn build_anchor( &self, @@ -329,7 +364,8 @@ impl<'a, H, Number, Hash> ChangesTrieRootsStorage for RootsStorage<'a let root = if block < self.roots.0 { self.prev_roots.get(&Number::unique_saturated_from(block)).cloned() } else { - let index: Option = block.checked_sub(&self.roots.0).and_then(|index| index.checked_into()); + let index: Option = + block.checked_sub(&self.roots.0).and_then(|index| index.checked_into()); index.and_then(|index| self.roots.1.get(index as usize).cloned()) }; diff --git a/client/light/src/lib.rs b/client/light/src/lib.rs index e647b8743cc0f..ed48c05258d0f 100644 --- a/client/light/src/lib.rs +++ b/client/light/src/lib.rs @@ -18,16 +18,19 @@ //! Light client components. +use sp_core::traits::{CodeExecutor, SpawnNamed}; use sp_runtime::traits::{Block as BlockT, HashFor}; use std::sync::Arc; -use sp_core::traits::{CodeExecutor, SpawnNamed}; pub mod backend; pub mod blockchain; pub mod call_executor; pub mod fetcher; -pub use {backend::*, blockchain::*, call_executor::*, fetcher::*}; +pub use backend::*; +pub use blockchain::*; +pub use call_executor::*; +pub use fetcher::*; /// Create an instance of fetch data checker. pub fn new_fetch_checker>( @@ -35,8 +38,8 @@ pub fn new_fetch_checker>( executor: E, spawn_handle: Box, ) -> LightDataChecker, B, S> - where - E: CodeExecutor, +where + E: CodeExecutor, { LightDataChecker::new(blockchain, executor, spawn_handle) } @@ -48,9 +51,9 @@ pub fn new_light_blockchain>(storage: S) -> A /// Create an instance of light client backend. pub fn new_light_backend(blockchain: Arc>) -> Arc>> - where - B: BlockT, - S: BlockchainStorage, +where + B: BlockT, + S: BlockchainStorage, { Arc::new(Backend::new(blockchain)) } diff --git a/client/network-gossip/src/bridge.rs b/client/network-gossip/src/bridge.rs index fd9aac96c0102..a807693f761a9 100644 --- a/client/network-gossip/src/bridge.rs +++ b/client/network-gossip/src/bridge.rs @@ -16,13 +16,17 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{Network, Validator}; -use crate::state_machine::{ConsensusGossip, TopicNotification, PERIODIC_MAINTENANCE_INTERVAL}; +use crate::{ + state_machine::{ConsensusGossip, TopicNotification, PERIODIC_MAINTENANCE_INTERVAL}, + Network, Validator, +}; use sc_network::{Event, ReputationChange}; -use futures::prelude::*; -use futures::channel::mpsc::{channel, Sender, Receiver}; +use futures::{ + channel::mpsc::{channel, Receiver, Sender}, + prelude::*, +}; use libp2p::PeerId; use log::trace; use prometheus_endpoint::Registry; @@ -74,7 +78,10 @@ impl GossipEngine { protocol: impl Into>, validator: Arc>, metrics_registry: Option<&Registry>, - ) -> Self where B: 'static { + ) -> Self + where + B: 'static, + { let protocol = protocol.into(); let network_event_stream = network.event_stream(); @@ -99,11 +106,7 @@ impl GossipEngine { /// the message's topic. No validation is performed on the message, if the /// message is already expired it should be dropped on the next garbage /// collection. - pub fn register_gossip_message( - &mut self, - topic: B::Hash, - message: Vec, - ) { + pub fn register_gossip_message(&mut self, topic: B::Hash, message: Vec) { self.state_machine.register_message(topic, message); } @@ -113,9 +116,7 @@ impl GossipEngine { } /// Get data of valid, incoming messages for a topic (but might have expired meanwhile). - pub fn messages_for(&mut self, topic: B::Hash) - -> Receiver - { + pub fn messages_for(&mut self, topic: B::Hash) -> Receiver { let past_messages = self.state_machine.messages_for(topic).collect::>(); // The channel length is not critical for correctness. By the implementation of `channel` // each sender is guaranteed a single buffer slot, making it a non-rendezvous channel and @@ -124,7 +125,7 @@ impl GossipEngine { // contains a single message. let (mut tx, rx) = channel(usize::max(past_messages.len(), 10)); - for notification in past_messages{ + for notification in past_messages { tx.try_send(notification) .expect("receiver known to be live, and buffer size known to suffice; qed"); } @@ -135,22 +136,12 @@ impl GossipEngine { } /// Send all messages with given topic to a peer. - pub fn send_topic( - &mut self, - who: &PeerId, - topic: B::Hash, - force: bool - ) { + pub fn send_topic(&mut self, who: &PeerId, topic: B::Hash, force: bool) { self.state_machine.send_topic(&mut *self.network, who, topic, force) } /// Multicast a message to all peers. - pub fn gossip_message( - &mut self, - topic: B::Hash, - message: Vec, - force: bool, - ) { + pub fn gossip_message(&mut self, topic: B::Hash, message: Vec, force: bool) { self.state_machine.multicast(&mut *self.network, topic, message, force) } @@ -184,30 +175,33 @@ impl Future for GossipEngine { Poll::Ready(Some(event)) => match event { Event::SyncConnected { remote } => { this.network.add_set_reserved(remote, this.protocol.clone()); - } + }, Event::SyncDisconnected { remote } => { this.network.remove_set_reserved(remote, this.protocol.clone()); - } + }, Event::NotificationStreamOpened { remote, protocol, role, .. } => { if protocol != this.protocol { - continue; + continue } this.state_machine.new_peer(&mut *this.network, remote, role); - } + }, Event::NotificationStreamClosed { remote, protocol } => { if protocol != this.protocol { - continue; + continue } this.state_machine.peer_disconnected(&mut *this.network, remote); }, Event::NotificationsReceived { remote, messages } => { - let messages = messages.into_iter().filter_map(|(engine, data)| { - if engine == this.protocol { - Some(data.to_vec()) - } else { - None - } - }).collect(); + let messages = messages + .into_iter() + .filter_map(|(engine, data)| { + if engine == this.protocol { + Some(data.to_vec()) + } else { + None + } + }) + .collect(); let to_forward = this.state_machine.on_incoming( &mut *this.network, @@ -217,26 +211,26 @@ impl Future for GossipEngine { this.forwarding_state = ForwardingState::Busy(to_forward.into()); }, - Event::Dht(_) => {} - } + Event::Dht(_) => {}, + }, // The network event stream closed. Do the same for [`GossipValidator`]. Poll::Ready(None) => return Poll::Ready(()), Poll::Pending => break, } - } + }, ForwardingState::Busy(to_forward) => { let (topic, notification) = match to_forward.pop_front() { Some(n) => n, None => { this.forwarding_state = ForwardingState::Idle; - continue; - } + continue + }, }; let sinks = match this.message_sinks.get_mut(&topic) { Some(sinks) => sinks, None => { - continue; + continue }, }; @@ -249,8 +243,8 @@ impl Future for GossipEngine { Poll::Pending => { // Push back onto queue for later. to_forward.push_front((topic, notification)); - break 'outer; - } + break 'outer + }, } } @@ -259,7 +253,7 @@ impl Future for GossipEngine { if sinks.is_empty() { this.message_sinks.remove(&topic); - continue; + continue } trace!( @@ -271,18 +265,16 @@ impl Future for GossipEngine { for sink in sinks { match sink.start_send(notification.clone()) { Ok(()) => {}, - Err(e) if e.is_full() => unreachable!( - "Previously ensured that all sinks are ready; qed.", - ), + Err(e) if e.is_full() => + unreachable!("Previously ensured that all sinks are ready; qed.",), // Receiver got dropped. Will be removed in next iteration (See (1)). Err(_) => {}, } } - } + }, } } - while let Poll::Ready(()) = this.periodic_maintenance_interval.poll_unpin(cx) { this.periodic_maintenance_interval.reset(PERIODIC_MAINTENANCE_INTERVAL); this.state_machine.tick(&mut *this.network); @@ -299,17 +291,23 @@ impl Future for GossipEngine { #[cfg(test)] mod tests { - use async_std::task::spawn; + use super::*; use crate::{ValidationResult, ValidatorContext}; - use futures::{channel::mpsc::{unbounded, UnboundedSender}, executor::{block_on, block_on_stream}, future::poll_fn}; + use async_std::task::spawn; + use futures::{ + channel::mpsc::{unbounded, UnboundedSender}, + executor::{block_on, block_on_stream}, + future::poll_fn, + }; use quickcheck::{Arbitrary, Gen, QuickCheck}; use sc_network::ObservedRole; - use sp_runtime::{testing::H256, traits::{Block as BlockT}}; - use std::borrow::Cow; - use std::convert::TryInto; - use std::sync::{Arc, Mutex}; + use sp_runtime::{testing::H256, traits::Block as BlockT}; + use std::{ + borrow::Cow, + convert::TryInto, + sync::{Arc, Mutex}, + }; use substrate_test_runtime_client::runtime::Block; - use super::*; #[derive(Clone, Default)] struct TestNetwork { @@ -329,18 +327,15 @@ mod tests { Box::pin(rx) } - fn report_peer(&self, _: PeerId, _: ReputationChange) { - } + fn report_peer(&self, _: PeerId, _: ReputationChange) {} fn disconnect_peer(&self, _: PeerId, _: Cow<'static, str>) { unimplemented!(); } - fn add_set_reserved(&self, _: PeerId, _: Cow<'static, str>) { - } + fn add_set_reserved(&self, _: PeerId, _: Cow<'static, str>) {} - fn remove_set_reserved(&self, _: PeerId, _: Cow<'static, str>) { - } + fn remove_set_reserved(&self, _: PeerId, _: Cow<'static, str>) {} fn write_notification(&self, _: PeerId, _: Cow<'static, str>, _: Vec) { unimplemented!(); @@ -405,32 +400,32 @@ mod tests { None, ); - let mut event_sender = network.inner.lock() - .unwrap() - .event_senders - .pop() - .unwrap(); + let mut event_sender = network.inner.lock().unwrap().event_senders.pop().unwrap(); // Register the remote peer. - event_sender.start_send( - Event::NotificationStreamOpened { + event_sender + .start_send(Event::NotificationStreamOpened { remote: remote_peer.clone(), protocol: protocol.clone(), negotiated_fallback: None, role: ObservedRole::Authority, - } - ).expect("Event stream is unbounded; qed."); + }) + .expect("Event stream is unbounded; qed."); let messages = vec![vec![1], vec![2]]; - let events = messages.iter().cloned().map(|m| { - Event::NotificationsReceived { + let events = messages + .iter() + .cloned() + .map(|m| Event::NotificationsReceived { remote: remote_peer.clone(), - messages: vec![(protocol.clone(), m.into())] - } - }).collect::>(); + messages: vec![(protocol.clone(), m.into())], + }) + .collect::>(); // Send first event before subscribing. - event_sender.start_send(events[0].clone()).expect("Event stream is unbounded; qed."); + event_sender + .start_send(events[0].clone()) + .expect("Event stream is unbounded; qed."); let mut subscribers = vec![]; for _ in 0..2 { @@ -438,13 +433,14 @@ mod tests { } // Send second event after subscribing. - event_sender.start_send(events[1].clone()).expect("Event stream is unbounded; qed."); + event_sender + .start_send(events[1].clone()) + .expect("Event stream is unbounded; qed."); spawn(gossip_engine); - let mut subscribers = subscribers.into_iter() - .map(|s| block_on_stream(s)) - .collect::>(); + let mut subscribers = + subscribers.into_iter().map(|s| block_on_stream(s)).collect::>(); // Expect each subscriber to receive both events. for message in messages { @@ -463,7 +459,7 @@ mod tests { #[test] fn forwarding_to_different_size_and_topic_channels() { #[derive(Clone, Debug)] - struct ChannelLengthAndTopic{ + struct ChannelLengthAndTopic { length: usize, topic: H256, } @@ -486,7 +482,7 @@ mod tests { topic: H256, } - impl Arbitrary for Message{ + impl Arbitrary for Message { fn arbitrary(g: &mut Gen) -> Self { let possible_topics = (0..10).collect::>(); Self { @@ -517,13 +513,16 @@ mod tests { let remote_peer = PeerId::random(); let network = TestNetwork::default(); - let num_channels_per_topic = channels.iter() - .fold(HashMap::new(), |mut acc, ChannelLengthAndTopic { topic, .. }| { + let num_channels_per_topic = channels.iter().fold( + HashMap::new(), + |mut acc, ChannelLengthAndTopic { topic, .. }| { acc.entry(topic).and_modify(|e| *e += 1).or_insert(1); acc - }); + }, + ); - let expected_msgs_per_topic_all_chan = notifications.iter() + let expected_msgs_per_topic_all_chan = notifications + .iter() .fold(HashMap::new(), |mut acc, messages| { for message in messages { acc.entry(message.topic).and_modify(|e| *e += 1).or_insert(1); @@ -545,12 +544,12 @@ mod tests { ); // Create channels. - let (txs, mut rxs) = channels.iter() - .map(|ChannelLengthAndTopic { length, topic }| { - (topic.clone(), channel(*length)) - }) + let (txs, mut rxs) = channels + .iter() + .map(|ChannelLengthAndTopic { length, topic }| (topic.clone(), channel(*length))) .fold((vec![], vec![]), |mut acc, (topic, (tx, rx))| { - acc.0.push((topic, tx)); acc.1.push((topic, rx)); + acc.0.push((topic, tx)); + acc.1.push((topic, rx)); acc }); @@ -560,30 +559,27 @@ mod tests { Some(entry) => entry.push(tx), None => { gossip_engine.message_sinks.insert(topic, vec![tx]); - } + }, } } - - let mut event_sender = network.inner.lock() - .unwrap() - .event_senders - .pop() - .unwrap(); + let mut event_sender = network.inner.lock().unwrap().event_senders.pop().unwrap(); // Register the remote peer. - event_sender.start_send( - Event::NotificationStreamOpened { + event_sender + .start_send(Event::NotificationStreamOpened { remote: remote_peer.clone(), protocol: protocol.clone(), negotiated_fallback: None, role: ObservedRole::Authority, - } - ).expect("Event stream is unbounded; qed."); + }) + .expect("Event stream is unbounded; qed."); // Send messages into the network event stream. for (i_notification, messages) in notifications.iter().enumerate() { - let messages = messages.into_iter().enumerate() + let messages = messages + .into_iter() + .enumerate() .map(|(i_message, Message { topic })| { // Embed the topic in the first 256 bytes of the message to be extracted by // the [`TestValidator`] later on. @@ -595,12 +591,15 @@ mod tests { message.push(i_message.try_into().unwrap()); (protocol.clone(), message.into()) - }).collect(); - - event_sender.start_send(Event::NotificationsReceived { - remote: remote_peer.clone(), - messages, - }).expect("Event stream is unbounded; qed."); + }) + .collect(); + + event_sender + .start_send(Event::NotificationsReceived { + remote: remote_peer.clone(), + messages, + }) + .expect("Event stream is unbounded; qed."); } let mut received_msgs_per_topic_all_chan = HashMap::::new(); @@ -621,19 +620,19 @@ mod tests { match rx.poll_next_unpin(cx) { Poll::Ready(Some(_)) => { progress = true; - received_msgs_per_topic_all_chan.entry(*topic) + received_msgs_per_topic_all_chan + .entry(*topic) .and_modify(|e| *e += 1) .or_insert(1); }, - Poll::Ready(None) => unreachable!( - "Sender side of channel is never dropped", - ), + Poll::Ready(None) => + unreachable!("Sender side of channel is never dropped",), Poll::Pending => {}, } } if !progress { - break; + break } } Poll::Ready(()) @@ -655,10 +654,10 @@ mod tests { } // Past regressions. - prop(vec![], vec![vec![Message{ topic: H256::default()}]]); + prop(vec![], vec![vec![Message { topic: H256::default() }]]); prop( - vec![ChannelLengthAndTopic {length: 71, topic: H256::default()}], - vec![vec![Message{ topic: H256::default()}]], + vec![ChannelLengthAndTopic { length: 71, topic: H256::default() }], + vec![vec![Message { topic: H256::default() }]], ); QuickCheck::new().quickcheck(prop as fn(_, _)) diff --git a/client/network-gossip/src/lib.rs b/client/network-gossip/src/lib.rs index f8b6e8f0c2fdc..45fc19d6ef8ac 100644 --- a/client/network-gossip/src/lib.rs +++ b/client/network-gossip/src/lib.rs @@ -61,13 +61,15 @@ //! These status packets will typically contain light pieces of information //! used to inform peers of a current view of protocol state. -pub use self::bridge::GossipEngine; -pub use self::state_machine::TopicNotification; -pub use self::validator::{DiscardAll, MessageIntent, Validator, ValidatorContext, ValidationResult}; +pub use self::{ + bridge::GossipEngine, + state_machine::TopicNotification, + validator::{DiscardAll, MessageIntent, ValidationResult, Validator, ValidatorContext}, +}; use futures::prelude::*; use sc_network::{multiaddr, Event, ExHashT, NetworkService, PeerId, ReputationChange}; -use sp_runtime::{traits::Block as BlockT}; +use sp_runtime::traits::Block as BlockT; use std::{borrow::Cow, iter, pin::Pin, sync::Arc}; mod bridge; @@ -111,18 +113,23 @@ impl Network for Arc> { } fn add_set_reserved(&self, who: PeerId, protocol: Cow<'static, str>) { - let addr = iter::once(multiaddr::Protocol::P2p(who.into())) - .collect::(); - let result = NetworkService::add_peers_to_reserved_set(self, protocol, iter::once(addr).collect()); + let addr = + iter::once(multiaddr::Protocol::P2p(who.into())).collect::(); + let result = + NetworkService::add_peers_to_reserved_set(self, protocol, iter::once(addr).collect()); if let Err(err) = result { log::error!(target: "gossip", "add_set_reserved failed: {}", err); } } fn remove_set_reserved(&self, who: PeerId, protocol: Cow<'static, str>) { - let addr = iter::once(multiaddr::Protocol::P2p(who.into())) - .collect::(); - let result = NetworkService::remove_peers_from_reserved_set(self, protocol, iter::once(addr).collect()); + let addr = + iter::once(multiaddr::Protocol::P2p(who.into())).collect::(); + let result = NetworkService::remove_peers_from_reserved_set( + self, + protocol, + iter::once(addr).collect(), + ); if let Err(err) = result { log::error!(target: "gossip", "remove_set_reserved failed: {}", err); } diff --git a/client/network-gossip/src/state_machine.rs b/client/network-gossip/src/state_machine.rs index ea1a336585981..5cda52b9db493 100644 --- a/client/network-gossip/src/state_machine.rs +++ b/client/network-gossip/src/state_machine.rs @@ -16,18 +16,20 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{Network, MessageIntent, Validator, ValidatorContext, ValidationResult}; +use crate::{MessageIntent, Network, ValidationResult, Validator, ValidatorContext}; -use std::borrow::Cow; -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; -use std::iter; -use std::time; -use lru::LruCache; use libp2p::PeerId; +use lru::LruCache; use prometheus_endpoint::{register, Counter, PrometheusError, Registry, U64}; -use sp_runtime::traits::{Block as BlockT, Hash, HashFor}; use sc_network::ObservedRole; +use sp_runtime::traits::{Block as BlockT, Hash, HashFor}; +use std::{ + borrow::Cow, + collections::{HashMap, HashSet}, + iter, + sync::Arc, + time, +}; use wasm_timer::Instant; // FIXME: Add additional spam/DoS attack protection: https://github.com/paritytech/substrate/issues/1115 @@ -87,17 +89,13 @@ impl<'g, 'p, B: BlockT> ValidatorContext for NetworkContext<'g, 'p, B> { /// Broadcast a message to all peers that have not received it previously. fn broadcast_message(&mut self, topic: B::Hash, message: Vec, force: bool) { - self.gossip.multicast( - self.network, - topic, - message, - force, - ); + self.gossip.multicast(self.network, topic, message, force); } /// Send addressed message to a peer. fn send_message(&mut self, who: &PeerId, message: Vec) { - self.network.write_notification(who.clone(), self.gossip.protocol.clone(), message); + self.network + .write_notification(who.clone(), self.gossip.protocol.clone(), message); } /// Send all messages with given topic to a peer. @@ -114,8 +112,9 @@ fn propagate<'a, B: BlockT, I>( peers: &mut HashMap>, validator: &Arc>, ) - // (msg_hash, topic, message) - where I: Clone + IntoIterator)>, +// (msg_hash, topic, message) +where + I: Clone + IntoIterator)>, { let mut message_allowed = validator.message_allowed(); @@ -124,7 +123,7 @@ fn propagate<'a, B: BlockT, I>( let intent = match intent { MessageIntent::Broadcast { .. } => if peer.known_messages.contains(&message_hash) { - continue; + continue } else { MessageIntent::Broadcast }, @@ -140,7 +139,7 @@ fn propagate<'a, B: BlockT, I>( }; if !message_allowed(id, intent, &topic, &message) { - continue; + continue } peer.known_messages.insert(message_hash.clone()); @@ -180,7 +179,7 @@ impl ConsensusGossip { Some(Err(e)) => { tracing::debug!(target: "gossip", "Failed to register metrics: {:?}", e); None - } + }, None => None, }; @@ -204,9 +203,7 @@ impl ConsensusGossip { ?role, "Registering peer", ); - self.peers.insert(who.clone(), PeerConsensus { - known_messages: HashSet::new(), - }); + self.peers.insert(who.clone(), PeerConsensus { known_messages: HashSet::new() }); let validator = self.validator.clone(); let mut context = NetworkContext { gossip: self, network }; @@ -221,12 +218,7 @@ impl ConsensusGossip { sender: Option, ) { if self.known_messages.put(message_hash.clone(), ()).is_none() { - self.messages.push(MessageEntry { - message_hash, - topic, - message, - sender, - }); + self.messages.push(MessageEntry { message_hash, topic, message, sender }); if let Some(ref metrics) = self.metrics { metrics.registered_messages.inc(); @@ -239,11 +231,7 @@ impl ConsensusGossip { /// the message's topic. No validation is performed on the message, if the /// message is already expired it should be dropped on the next garbage /// collection. - pub fn register_message( - &mut self, - topic: B::Hash, - message: Vec, - ) { + pub fn register_message(&mut self, topic: B::Hash, message: Vec) { let message_hash = HashFor::::hash(&message[..]); self.register_message_hashed(message_hash, topic, message, None); } @@ -267,7 +255,9 @@ impl ConsensusGossip { /// Rebroadcast all messages to all peers. fn rebroadcast(&mut self, network: &mut dyn Network) { - let messages = self.messages.iter() + let messages = self + .messages + .iter() .map(|entry| (&entry.message_hash, &entry.topic, &entry.message)); propagate( network, @@ -275,20 +265,28 @@ impl ConsensusGossip { messages, MessageIntent::PeriodicRebroadcast, &mut self.peers, - &self.validator + &self.validator, ); } /// Broadcast all messages with given topic. pub fn broadcast_topic(&mut self, network: &mut dyn Network, topic: B::Hash, force: bool) { - let messages = self.messages.iter() - .filter_map(|entry| - if entry.topic == topic { - Some((&entry.message_hash, &entry.topic, &entry.message)) - } else { None } - ); + let messages = self.messages.iter().filter_map(|entry| { + if entry.topic == topic { + Some((&entry.message_hash, &entry.topic, &entry.message)) + } else { + None + } + }); let intent = if force { MessageIntent::ForcedBroadcast } else { MessageIntent::Broadcast }; - propagate(network, self.protocol.clone(), messages, intent, &mut self.peers, &self.validator); + propagate( + network, + self.protocol.clone(), + messages, + intent, + &mut self.peers, + &self.validator, + ); } /// Prune old or no longer relevant consensus messages. Provide a predicate @@ -298,8 +296,7 @@ impl ConsensusGossip { let before = self.messages.len(); let mut message_expired = self.validator.message_expired(); - self.messages - .retain(|entry| !message_expired(entry.topic, &entry.message)); + self.messages.retain(|entry| !message_expired(entry.topic, &entry.message)); let expired_messages = before - self.messages.len(); @@ -323,10 +320,13 @@ impl ConsensusGossip { /// Get valid messages received in the past for a topic (might have expired meanwhile). pub fn messages_for(&mut self, topic: B::Hash) -> impl Iterator + '_ { - self.messages.iter().filter(move |e| e.topic == topic).map(|entry| TopicNotification { - message: entry.message.clone(), - sender: entry.sender.clone(), - }) + self.messages + .iter() + .filter(move |e| e.topic == topic) + .map(|entry| TopicNotification { + message: entry.message.clone(), + sender: entry.sender.clone(), + }) } /// Register incoming messages and return the ones that are new and valid (according to a gossip @@ -360,7 +360,7 @@ impl ConsensusGossip { "Ignored already known message", ); network.report_peer(who.clone(), rep::DUPLICATE_GOSSIP); - continue; + continue } // validate the message @@ -380,7 +380,7 @@ impl ConsensusGossip { protocol = %self.protocol, "Discard message from peer", ); - continue; + continue }, }; @@ -393,24 +393,19 @@ impl ConsensusGossip { protocol = %self.protocol, "Got message from unregistered peer", ); - continue; - } + continue + }, }; network.report_peer(who.clone(), rep::GOSSIP_SUCCESS); peer.known_messages.insert(message_hash); - to_forward.push((topic, TopicNotification { - message: message.clone(), - sender: Some(who.clone()) - })); + to_forward.push(( + topic, + TopicNotification { message: message.clone(), sender: Some(who.clone()) }, + )); if keep { - self.register_message_hashed( - message_hash, - topic, - message, - Some(who.clone()), - ); + self.register_message_hashed(message_hash, topic, message, Some(who.clone())); } } @@ -423,24 +418,21 @@ impl ConsensusGossip { network: &mut dyn Network, who: &PeerId, topic: B::Hash, - force: bool + force: bool, ) { let mut message_allowed = self.validator.message_allowed(); if let Some(ref mut peer) = self.peers.get_mut(who) { for entry in self.messages.iter().filter(|m| m.topic == topic) { - let intent = if force { - MessageIntent::ForcedBroadcast - } else { - MessageIntent::Broadcast - }; + let intent = + if force { MessageIntent::ForcedBroadcast } else { MessageIntent::Broadcast }; if !force && peer.known_messages.contains(&entry.message_hash) { - continue; + continue } if !message_allowed(who, intent, &entry.topic, &entry.message) { - continue; + continue } peer.known_messages.insert(entry.message_hash.clone()); @@ -452,7 +444,11 @@ impl ConsensusGossip { ?entry.message, "Sending topic message", ); - network.write_notification(who.clone(), self.protocol.clone(), entry.message.clone()); + network.write_notification( + who.clone(), + self.protocol.clone(), + entry.message.clone(), + ); } } } @@ -474,18 +470,13 @@ impl ConsensusGossip { iter::once((&message_hash, &topic, &message)), intent, &mut self.peers, - &self.validator + &self.validator, ); } /// Send addressed message to a peer. The message is not kept or multicast /// later on. - pub fn send_message( - &mut self, - network: &mut dyn Network, - who: &PeerId, - message: Vec, - ) { + pub fn send_message(&mut self, network: &mut dyn Network, who: &PeerId, message: Vec) { let peer = match self.peers.get_mut(who) { None => return, Some(peer) => peer, @@ -534,11 +525,15 @@ impl Metrics { #[cfg(test)] mod tests { + use super::*; use futures::prelude::*; use sc_network::{Event, ReputationChange}; - use sp_runtime::testing::{H256, Block as RawBlock, ExtrinsicWrapper}; - use std::{borrow::Cow, pin::Pin, sync::{Arc, Mutex}}; - use super::*; + use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper, H256}; + use std::{ + borrow::Cow, + pin::Pin, + sync::{Arc, Mutex}, + }; type Block = RawBlock>; @@ -552,7 +547,7 @@ mod tests { sender: None, }); } - } + }; } struct AllowAll; @@ -568,7 +563,7 @@ mod tests { } struct DiscardAll; - impl Validator for DiscardAll{ + impl Validator for DiscardAll { fn validate( &self, _context: &mut dyn ValidatorContext, @@ -602,11 +597,9 @@ mod tests { unimplemented!(); } - fn add_set_reserved(&self, _: PeerId, _: Cow<'static, str>) { - } + fn add_set_reserved(&self, _: PeerId, _: Cow<'static, str>) {} - fn remove_set_reserved(&self, _: PeerId, _: Cow<'static, str>) { - } + fn remove_set_reserved(&self, _: PeerId, _: Cow<'static, str>) {} fn write_notification(&self, _: PeerId, _: Cow<'static, str>, _: Vec) { unimplemented!(); @@ -677,7 +670,7 @@ mod tests { assert_eq!( consensus.messages_for(topic).next(), - Some(TopicNotification { message: message, sender: None }), + Some(TopicNotification { message, sender: None }), ); } @@ -712,15 +705,12 @@ mod tests { #[test] fn on_incoming_ignores_discarded_messages() { let to_forward = ConsensusGossip::::new(Arc::new(DiscardAll), "/foo".into(), None) - .on_incoming( - &mut NoOpNetwork::default(), - PeerId::random(), - vec![vec![1, 2, 3]], - ); + .on_incoming(&mut NoOpNetwork::default(), PeerId::random(), vec![vec![1, 2, 3]]); assert!( to_forward.is_empty(), - "Expected `on_incoming` to ignore discarded message but got {:?}", to_forward, + "Expected `on_incoming` to ignore discarded message but got {:?}", + to_forward, ); } diff --git a/client/network-gossip/src/validator.rs b/client/network-gossip/src/validator.rs index 4b5440c1a06f3..9a2652d03f642 100644 --- a/client/network-gossip/src/validator.rs +++ b/client/network-gossip/src/validator.rs @@ -26,15 +26,14 @@ pub trait Validator: Send + Sync { } /// New connection is dropped. - fn peer_disconnected(&self, _context: &mut dyn ValidatorContext, _who: &PeerId) { - } + fn peer_disconnected(&self, _context: &mut dyn ValidatorContext, _who: &PeerId) {} /// Validate consensus message. fn validate( &self, context: &mut dyn ValidatorContext, sender: &PeerId, - data: &[u8] + data: &[u8], ) -> ValidationResult; /// Produce a closure for validating messages on a given topic. @@ -43,7 +42,9 @@ pub trait Validator: Send + Sync { } /// Produce a closure for filtering egress messages. - fn message_allowed<'a>(&'a self) -> Box bool + 'a> { + fn message_allowed<'a>( + &'a self, + ) -> Box bool + 'a> { Box::new(move |_who, _intent, _topic, _data| true) } } @@ -99,7 +100,9 @@ impl Validator for DiscardAll { Box::new(move |_topic, _data| true) } - fn message_allowed<'a>(&'a self) -> Box bool + 'a> { + fn message_allowed<'a>( + &'a self, + ) -> Box bool + 'a> { Box::new(move |_who, _intent, _topic, _data| false) } } diff --git a/client/network/build.rs b/client/network/build.rs index 0eea622e87574..6e5b83d4e58ae 100644 --- a/client/network/build.rs +++ b/client/network/build.rs @@ -1,8 +1,5 @@ -const PROTOS: &[&str] = &[ - "src/schema/api.v1.proto", - "src/schema/light.v1.proto", - "src/schema/bitswap.v1.2.0.proto", -]; +const PROTOS: &[&str] = + &["src/schema/api.v1.proto", "src/schema/light.v1.proto", "src/schema/bitswap.v1.2.0.proto"]; fn main() { prost_build::compile_protos(PROTOS, &["src/schema"]).unwrap(); diff --git a/client/network/src/behaviour.rs b/client/network/src/behaviour.rs index 576c49d1da366..37dfc0cf99c24 100644 --- a/client/network/src/behaviour.rs +++ b/client/network/src/behaviour.rs @@ -17,27 +17,33 @@ // along with this program. If not, see . use crate::{ - config::ProtocolId, bitswap::Bitswap, + config::ProtocolId, discovery::{DiscoveryBehaviour, DiscoveryConfig, DiscoveryOut}, + light_client_requests, peer_info, protocol::{message::Roles, CustomMessageOutcome, NotificationsSink, Protocol}, - peer_info, request_responses, light_client_requests, - ObservedRole, DhtEvent, + request_responses, DhtEvent, ObservedRole, }; use bytes::Bytes; use futures::{channel::oneshot, stream::StreamExt}; -use libp2p::NetworkBehaviour; -use libp2p::core::{Multiaddr, PeerId, PublicKey}; -use libp2p::identify::IdentifyInfo; -use libp2p::kad::record; -use libp2p::swarm::{ - NetworkBehaviourAction, NetworkBehaviourEventProcess, PollParameters, toggle::Toggle +use libp2p::{ + core::{Multiaddr, PeerId, PublicKey}, + identify::IdentifyInfo, + kad::record, + swarm::{toggle::Toggle, NetworkBehaviourAction, NetworkBehaviourEventProcess, PollParameters}, + NetworkBehaviour, }; use log::debug; use prost::Message; -use sp_consensus::{BlockOrigin, import_queue::{IncomingBlock, Origin}}; -use sp_runtime::{traits::{Block as BlockT, NumberFor}, Justifications}; +use sp_consensus::{ + import_queue::{IncomingBlock, Origin}, + BlockOrigin, +}; +use sp_runtime::{ + traits::{Block as BlockT, NumberFor}, + Justifications, +}; use std::{ borrow::Cow, collections::{HashSet, VecDeque}, @@ -47,8 +53,7 @@ use std::{ }; pub use crate::request_responses::{ - ResponseFailure, InboundFailure, RequestFailure, OutboundFailure, RequestId, - IfDisconnected + IfDisconnected, InboundFailure, OutboundFailure, RequestFailure, RequestId, ResponseFailure, }; /// General behaviour of the network. Combines all protocols together. @@ -210,8 +215,9 @@ impl Behaviour { peer_info: peer_info::PeerInfoBehaviour::new(user_agent, local_public_key), discovery: disco_config.finish(), bitswap: bitswap.into(), - request_responses: - request_responses::RequestResponsesBehaviour::new(request_response_protocols.into_iter())?, + request_responses: request_responses::RequestResponsesBehaviour::new( + request_response_protocols.into_iter(), + )?, light_client_request_sender, events: VecDeque::new(), block_request_protocol_name, @@ -233,7 +239,9 @@ impl Behaviour { /// /// Identifies Kademlia instances by their [`ProtocolId`] and kbuckets by the base 2 logarithm /// of their lower bound. - pub fn num_entries_per_kbucket(&mut self) -> impl ExactSizeIterator)> { + pub fn num_entries_per_kbucket( + &mut self, + ) -> impl ExactSizeIterator)> { self.discovery.num_entries_per_kbucket() } @@ -243,7 +251,9 @@ impl Behaviour { } /// Returns the total size in bytes of all the records in the Kademlia record stores. - pub fn kademlia_records_total_size(&mut self) -> impl ExactSizeIterator { + pub fn kademlia_records_total_size( + &mut self, + ) -> impl ExactSizeIterator { self.discovery.kademlia_records_total_size() } @@ -265,7 +275,8 @@ impl Behaviour { pending_response: oneshot::Sender, RequestFailure>>, connect: IfDisconnected, ) { - self.request_responses.send_request(target, protocol, request, pending_response, connect) + self.request_responses + .send_request(target, protocol, request, pending_response, connect) } /// Returns a shared reference to the user protocol. @@ -307,21 +318,20 @@ fn reported_roles_to_observed_role(roles: Roles) -> ObservedRole { } } -impl NetworkBehaviourEventProcess for -Behaviour { +impl NetworkBehaviourEventProcess for Behaviour { fn inject_event(&mut self, event: void::Void) { void::unreachable(event) } } -impl NetworkBehaviourEventProcess> for -Behaviour { +impl NetworkBehaviourEventProcess> for Behaviour { fn inject_event(&mut self, event: CustomMessageOutcome) { match event { CustomMessageOutcome::BlockImport(origin, blocks) => self.events.push_back(BehaviourOut::BlockImport(origin, blocks)), - CustomMessageOutcome::JustificationImport(origin, hash, nb, justification) => - self.events.push_back(BehaviourOut::JustificationImport(origin, hash, nb, justification)), + CustomMessageOutcome::JustificationImport(origin, hash, nb, justification) => self + .events + .push_back(BehaviourOut::JustificationImport(origin, hash, nb, justification)), CustomMessageOutcome::BlockRequest { target, request, pending_response } => { let mut buf = Vec::with_capacity(request.encoded_len()); if let Err(err) = request.encode(&mut buf) { @@ -334,7 +344,11 @@ Behaviour { } self.request_responses.send_request( - &target, &self.block_request_protocol_name, buf, pending_response, IfDisconnected::ImmediateError, + &target, + &self.block_request_protocol_name, + buf, + pending_response, + IfDisconnected::ImmediateError, ); }, CustomMessageOutcome::StateRequest { target, request, pending_response } => { @@ -349,11 +363,19 @@ Behaviour { } self.request_responses.send_request( - &target, &self.state_request_protocol_name, buf, pending_response, IfDisconnected::ImmediateError, + &target, + &self.state_request_protocol_name, + buf, + pending_response, + IfDisconnected::ImmediateError, ); }, CustomMessageOutcome::NotificationStreamOpened { - remote, protocol, negotiated_fallback, roles, notifications_sink + remote, + protocol, + negotiated_fallback, + roles, + notifications_sink, } => { self.events.push_back(BehaviourOut::NotificationStreamOpened { remote, @@ -363,32 +385,33 @@ Behaviour { notifications_sink: notifications_sink.clone(), }); }, - CustomMessageOutcome::NotificationStreamReplaced { remote, protocol, notifications_sink } => - self.events.push_back(BehaviourOut::NotificationStreamReplaced { - remote, - protocol, - notifications_sink, - }), - CustomMessageOutcome::NotificationStreamClosed { remote, protocol } => - self.events.push_back(BehaviourOut::NotificationStreamClosed { - remote, - protocol, - }), + CustomMessageOutcome::NotificationStreamReplaced { + remote, + protocol, + notifications_sink, + } => self.events.push_back(BehaviourOut::NotificationStreamReplaced { + remote, + protocol, + notifications_sink, + }), + CustomMessageOutcome::NotificationStreamClosed { remote, protocol } => self + .events + .push_back(BehaviourOut::NotificationStreamClosed { remote, protocol }), CustomMessageOutcome::NotificationsReceived { remote, messages } => { self.events.push_back(BehaviourOut::NotificationsReceived { remote, messages }); }, CustomMessageOutcome::PeerNewBest(peer_id, number) => { self.light_client_request_sender.update_best_block(&peer_id, number); - } + }, CustomMessageOutcome::SyncConnected(peer_id) => { self.light_client_request_sender.inject_connected(peer_id); self.events.push_back(BehaviourOut::SyncConnected(peer_id)) - } + }, CustomMessageOutcome::SyncDisconnected(peer_id) => { self.light_client_request_sender.inject_disconnected(peer_id); self.events.push_back(BehaviourOut::SyncDisconnected(peer_id)) - } - CustomMessageOutcome::None => {} + }, + CustomMessageOutcome::None => {}, } } } @@ -397,38 +420,29 @@ impl NetworkBehaviourEventProcess for Behav fn inject_event(&mut self, event: request_responses::Event) { match event { request_responses::Event::InboundRequest { peer, protocol, result } => { - self.events.push_back(BehaviourOut::InboundRequest { + self.events.push_back(BehaviourOut::InboundRequest { peer, protocol, result }); + }, + request_responses::Event::RequestFinished { peer, protocol, duration, result } => { + self.events.push_back(BehaviourOut::RequestFinished { peer, protocol, + duration, result, }); - } - request_responses::Event::RequestFinished { peer, protocol, duration, result } => { - self.events.push_back(BehaviourOut::RequestFinished { - peer, protocol, duration, result, - }); }, - request_responses::Event::ReputationChanges { peer, changes } => { + request_responses::Event::ReputationChanges { peer, changes } => for change in changes { self.substrate.report_peer(peer, change); - } - } + }, } } } -impl NetworkBehaviourEventProcess - for Behaviour { +impl NetworkBehaviourEventProcess for Behaviour { fn inject_event(&mut self, event: peer_info::PeerInfoEvent) { let peer_info::PeerInfoEvent::Identified { peer_id, - info: IdentifyInfo { - protocol_version, - agent_version, - mut listen_addrs, - protocols, - .. - }, + info: IdentifyInfo { protocol_version, agent_version, mut listen_addrs, protocols, .. }, } = event; if listen_addrs.len() > 30 { @@ -447,8 +461,7 @@ impl NetworkBehaviourEventProcess } } -impl NetworkBehaviourEventProcess - for Behaviour { +impl NetworkBehaviourEventProcess for Behaviour { fn inject_event(&mut self, out: DiscoveryOut) { match out { DiscoveryOut::UnroutablePeer(_peer_id) => { @@ -456,27 +469,28 @@ impl NetworkBehaviourEventProcess // to Kademlia is handled by the `Identify` protocol, part of the // `PeerInfoBehaviour`. See the `NetworkBehaviourEventProcess` // implementation for `PeerInfoEvent`. - } + }, DiscoveryOut::Discovered(peer_id) => { self.substrate.add_default_set_discovered_nodes(iter::once(peer_id)); - } + }, DiscoveryOut::ValueFound(results, duration) => { - self.events.push_back(BehaviourOut::Dht(DhtEvent::ValueFound(results), duration)); - } + self.events + .push_back(BehaviourOut::Dht(DhtEvent::ValueFound(results), duration)); + }, DiscoveryOut::ValueNotFound(key, duration) => { self.events.push_back(BehaviourOut::Dht(DhtEvent::ValueNotFound(key), duration)); - } + }, DiscoveryOut::ValuePut(key, duration) => { self.events.push_back(BehaviourOut::Dht(DhtEvent::ValuePut(key), duration)); - } + }, DiscoveryOut::ValuePutFailed(key, duration) => { - self.events.push_back(BehaviourOut::Dht(DhtEvent::ValuePutFailed(key), duration)); - } - DiscoveryOut::RandomKademliaStarted(protocols) => { + self.events + .push_back(BehaviourOut::Dht(DhtEvent::ValuePutFailed(key), duration)); + }, + DiscoveryOut::RandomKademliaStarted(protocols) => for protocol in protocols { self.events.push_back(BehaviourOut::RandomKademliaStarted(protocol)); - } - } + }, } } } @@ -488,22 +502,16 @@ impl Behaviour { _: &mut impl PollParameters, ) -> Poll>> { use light_client_requests::sender::OutEvent; - while let Poll::Ready(Some(event)) = - self.light_client_request_sender.poll_next_unpin(cx) - { + while let Poll::Ready(Some(event)) = self.light_client_request_sender.poll_next_unpin(cx) { match event { - OutEvent::SendRequest { - target, - request, - pending_response, - protocol_name, - } => self.request_responses.send_request( - &target, - &protocol_name, - request, - pending_response, - IfDisconnected::ImmediateError, - ), + OutEvent::SendRequest { target, request, pending_response, protocol_name } => + self.request_responses.send_request( + &target, + &protocol_name, + request, + pending_response, + IfDisconnected::ImmediateError, + ), } } diff --git a/client/network/src/bitswap.rs b/client/network/src/bitswap.rs index aea2b8420cb2c..3a10367c64a4b 100644 --- a/client/network/src/bitswap.rs +++ b/client/network/src/bitswap.rs @@ -20,31 +20,39 @@ //! Only supports bitswap 1.2.0. //! CID is expected to reference 256-bit Blake2b transaction hash. -use std::collections::VecDeque; -use std::io; -use std::sync::Arc; -use std::task::{Context, Poll}; +use crate::{ + chain::Client, + schema::bitswap::{ + message::{wantlist::WantType, Block as MessageBlock, BlockPresence, BlockPresenceType}, + Message as BitswapMessage, + }, +}; use cid::Version; use core::pin::Pin; -use futures::Future; -use futures::io::{AsyncRead, AsyncWrite}; -use libp2p::core::{ - connection::ConnectionId, Multiaddr, PeerId, - upgrade, InboundUpgrade, OutboundUpgrade, UpgradeInfo, +use futures::{ + io::{AsyncRead, AsyncWrite}, + Future, }; -use libp2p::swarm::{ - NetworkBehaviour, NetworkBehaviourAction, NotifyHandler, PollParameters, - ProtocolsHandler, IntoProtocolsHandler, OneShotHandler, +use libp2p::{ + core::{ + connection::ConnectionId, upgrade, InboundUpgrade, Multiaddr, OutboundUpgrade, PeerId, + UpgradeInfo, + }, + swarm::{ + IntoProtocolsHandler, NetworkBehaviour, NetworkBehaviourAction, NotifyHandler, + OneShotHandler, PollParameters, ProtocolsHandler, + }, }; -use log::{error, debug, trace}; +use log::{debug, error, trace}; use prost::Message; -use sp_runtime::traits::{Block as BlockT}; -use unsigned_varint::{encode as varint_encode}; -use crate::chain::Client; -use crate::schema::bitswap::{ - Message as BitswapMessage, - message::{wantlist::WantType, Block as MessageBlock, BlockPresenceType, BlockPresence}, +use sp_runtime::traits::Block as BlockT; +use std::{ + collections::VecDeque, + io, + sync::Arc, + task::{Context, Poll}, }; +use unsigned_varint::encode as varint_encode; const LOG_TARGET: &str = "bitswap"; @@ -182,10 +190,7 @@ pub struct Bitswap { impl Bitswap { /// Create a new instance of the bitswap protocol handler. pub fn new(client: Arc>) -> Self { - Bitswap { - client, - ready_blocks: Default::default(), - } + Bitswap { client, ready_blocks: Default::default() } } } @@ -201,11 +206,9 @@ impl NetworkBehaviour for Bitswap { Vec::new() } - fn inject_connected(&mut self, _peer: &PeerId) { - } + fn inject_connected(&mut self, _peer: &PeerId) {} - fn inject_disconnected(&mut self, _peer: &PeerId) { - } + fn inject_disconnected(&mut self, _peer: &PeerId) {} fn inject_event(&mut self, peer: PeerId, _connection: ConnectionId, message: HandlerEvent) { let request = match message { @@ -215,7 +218,7 @@ impl NetworkBehaviour for Bitswap { trace!(target: LOG_TARGET, "Received request: {:?} from {}", request, peer); if self.ready_blocks.len() > MAX_RESPONSE_QUEUE { debug!(target: LOG_TARGET, "Ignored request: queue is full"); - return; + return } let mut response = BitswapMessage { wantlist: None, @@ -227,29 +230,25 @@ impl NetworkBehaviour for Bitswap { let wantlist = match request.wantlist { Some(wantlist) => wantlist, None => { - debug!( - target: LOG_TARGET, - "Unexpected bitswap message from {}", - peer, - ); - return; - } + debug!(target: LOG_TARGET, "Unexpected bitswap message from {}", peer,); + return + }, }; if wantlist.entries.len() > MAX_WANTED_BLOCKS { trace!(target: LOG_TARGET, "Ignored request: too many entries"); - return; + return } for entry in wantlist.entries { let cid = match cid::Cid::read_bytes(entry.block.as_slice()) { Ok(cid) => cid, Err(e) => { trace!(target: LOG_TARGET, "Bad CID {:?}: {:?}", entry.block, e); - continue; - } + continue + }, }; - if cid.version() != cid::Version::V1 - || cid.hash().code() != u64::from(cid::multihash::Code::Blake2b256) - || cid.hash().size() != 32 + if cid.version() != cid::Version::V1 || + cid.hash().code() != u64::from(cid::multihash::Code::Blake2b256) || + cid.hash().size() != 32 { debug!(target: LOG_TARGET, "Ignoring unsupported CID {}: {}", peer, cid); continue @@ -261,7 +260,7 @@ impl NetworkBehaviour for Bitswap { Err(e) => { error!(target: LOG_TARGET, "Error retrieving transaction {}: {}", hash, e); None - } + }, }; match transaction { Some(transaction) => { @@ -273,10 +272,9 @@ impl NetworkBehaviour for Bitswap { mh_type: cid.hash().code(), mh_len: cid.hash().size(), }; - response.payload.push(MessageBlock { - prefix: prefix.to_bytes(), - data: transaction, - }); + response + .payload + .push(MessageBlock { prefix: prefix.to_bytes(), data: transaction }); } else { response.block_presences.push(BlockPresence { r#type: BlockPresenceType::Have as i32, @@ -292,7 +290,7 @@ impl NetworkBehaviour for Bitswap { cid: cid.to_bytes(), }); } - } + }, } } trace!(target: LOG_TARGET, "Response: {:?}", response); @@ -304,7 +302,7 @@ impl NetworkBehaviour for Bitswap { <::Handler as ProtocolsHandler>::InEvent, Self::OutEvent, >, - > { + >{ if let Some((peer_id, message)) = self.ready_blocks.pop_front() { return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id: peer_id.clone(), diff --git a/client/network/src/block_request_handler.rs b/client/network/src/block_request_handler.rs index ce65e5eca3457..d530349f0a023 100644 --- a/client/network/src/block_request_handler.rs +++ b/client/network/src/block_request_handler.rs @@ -17,25 +17,32 @@ //! Helper for handling (i.e. answering) block requests from a remote peer via the //! [`crate::request_responses::RequestResponsesBehaviour`]. -use codec::{Encode, Decode}; -use crate::chain::Client; -use crate::config::ProtocolId; -use crate::protocol::{message::BlockAttributes}; -use crate::request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}; -use crate::schema::v1::block_request::FromBlock; -use crate::schema::v1::{BlockResponse, Direction}; -use crate::{PeerId, ReputationChange}; -use futures::channel::{mpsc, oneshot}; -use futures::stream::StreamExt; +use crate::{ + chain::Client, + config::ProtocolId, + protocol::message::BlockAttributes, + request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}, + schema::v1::{block_request::FromBlock, BlockResponse, Direction}, + PeerId, ReputationChange, +}; +use codec::{Decode, Encode}; +use futures::{ + channel::{mpsc, oneshot}, + stream::StreamExt, +}; use log::debug; use lru::LruCache; use prost::Message; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header, One, Zero}; -use std::cmp::min; -use std::sync::Arc; -use std::time::Duration; -use std::hash::{Hasher, Hash}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header, One, Zero}, +}; +use std::{ + cmp::min, + hash::{Hash, Hasher}, + sync::Arc, + time::Duration, +}; const LOG_TARGET: &str = "sync"; const MAX_BLOCKS_IN_RESPONSE: usize = 128; @@ -139,9 +146,7 @@ impl BlockRequestHandler { Ok(()) => debug!(target: LOG_TARGET, "Handled block request from {}.", peer), Err(e) => debug!( target: LOG_TARGET, - "Failed to handle block request from {}: {}", - peer, - e, + "Failed to handle block request from {}: {}", peer, e, ), } } @@ -159,11 +164,11 @@ impl BlockRequestHandler { FromBlock::Hash(ref h) => { let h = Decode::decode(&mut h.as_ref())?; BlockId::::Hash(h) - } + }, FromBlock::Number(ref n) => { let n = Decode::decode(&mut n.as_ref())?; BlockId::::Number(n) - } + }, }; let max_blocks = if request.max_blocks == 0 { @@ -172,8 +177,8 @@ impl BlockRequestHandler { min(request.max_blocks as usize, MAX_BLOCKS_IN_RESPONSE) }; - let direction = Direction::from_i32(request.direction) - .ok_or(HandleRequestError::ParseDirection)?; + let direction = + Direction::from_i32(request.direction).ok_or(HandleRequestError::ParseDirection)?; let attributes = BlockAttributes::from_be_u32(request.fields)?; @@ -201,7 +206,7 @@ impl BlockRequestHandler { }, None => { self.seen_requests.put(key.clone(), SeenRequestsValue::First); - } + }, } debug!( @@ -247,11 +252,13 @@ impl BlockRequestHandler { Err(()) }; - pending_response.send(OutgoingResponse { - result, - reputation_changes: reputation_change.into_iter().collect(), - sent_feedback: None, - }).map_err(|_| HandleRequestError::SendResponse) + pending_response + .send(OutgoingResponse { + result, + reputation_changes: reputation_change.into_iter().collect(), + sent_feedback: None, + }) + .map_err(|_| HandleRequestError::SendResponse) } fn get_block_response( @@ -298,10 +305,8 @@ impl BlockRequestHandler { let justification = justifications.and_then(|just| just.into_justification(*b"FRNK")); - let is_empty_justification = justification - .as_ref() - .map(|j| j.is_empty()) - .unwrap_or(false); + let is_empty_justification = + justification.as_ref().map(|j| j.is_empty()).unwrap_or(false); let justification = justification.unwrap_or_default(); @@ -310,25 +315,27 @@ impl BlockRequestHandler { let body = if get_body { match self.client.block_body(&BlockId::Hash(hash))? { - Some(mut extrinsics) => extrinsics.iter_mut() - .map(|extrinsic| extrinsic.encode()) - .collect(), + Some(mut extrinsics) => + extrinsics.iter_mut().map(|extrinsic| extrinsic.encode()).collect(), None => { log::trace!(target: LOG_TARGET, "Missing data for block request."); - break; - } + break + }, } } else { Vec::new() }; - let indexed_body = if get_indexed_body { + let indexed_body = if get_indexed_body { match self.client.block_indexed_body(&BlockId::Hash(hash))? { Some(transactions) => transactions, None => { - log::trace!(target: LOG_TARGET, "Missing indexed block data for block request."); - break; - } + log::trace!( + target: LOG_TARGET, + "Missing indexed block data for block request." + ); + break + }, } } else { Vec::new() @@ -336,11 +343,7 @@ impl BlockRequestHandler { let block_data = crate::schema::v1::BlockData { hash: hash.encode(), - header: if get_header { - header.encode() - } else { - Vec::new() - }, + header: if get_header { header.encode() } else { Vec::new() }, body, receipt: Vec::new(), message_queue: Vec::new(), @@ -358,15 +361,13 @@ impl BlockRequestHandler { } match direction { - Direction::Ascending => { - block_id = BlockId::Number(number + One::one()) - } + Direction::Ascending => block_id = BlockId::Number(number + One::one()), Direction::Descending => { if number.is_zero() { break } block_id = BlockId::Hash(parent_hash) - } + }, } } diff --git a/client/network/src/chain.rs b/client/network/src/chain.rs index 32d4cc9ff024f..599e9d796c118 100644 --- a/client/network/src/chain.rs +++ b/client/network/src/chain.rs @@ -18,18 +18,30 @@ //! Blockchain access trait -use sp_blockchain::{Error, HeaderBackend, HeaderMetadata}; use sc_client_api::{BlockBackend, ProofProvider}; +pub use sc_client_api::{ImportedState, StorageData, StorageKey}; +use sp_blockchain::{Error, HeaderBackend, HeaderMetadata}; use sp_runtime::traits::{Block as BlockT, BlockIdTo}; -pub use sc_client_api::{StorageKey, StorageData, ImportedState}; /// Local client abstraction for the network. -pub trait Client: HeaderBackend + ProofProvider + BlockIdTo - + BlockBackend + HeaderMetadata + Send + Sync -{} - -impl Client for T - where - T: HeaderBackend + ProofProvider + BlockIdTo - + BlockBackend + HeaderMetadata + Send + Sync -{} +pub trait Client: + HeaderBackend + + ProofProvider + + BlockIdTo + + BlockBackend + + HeaderMetadata + + Send + + Sync +{ +} + +impl Client for T where + T: HeaderBackend + + ProofProvider + + BlockIdTo + + BlockBackend + + HeaderMetadata + + Send + + Sync +{ +} diff --git a/client/network/src/config.rs b/client/network/src/config.rs index 8cc467a7fb9fd..e41315bd787be 100644 --- a/client/network/src/config.rs +++ b/client/network/src/config.rs @@ -21,14 +21,14 @@ //! The [`Params`] struct is the struct that must be passed in order to initialize the networking. //! See the documentation of [`Params`]. -pub use crate::chain::Client; -pub use crate::on_demand_layer::{AlwaysBadChecker, OnDemand}; -pub use crate::request_responses::{ - IncomingRequest, - OutgoingResponse, - ProtocolConfig as RequestResponseConfig, +pub use crate::{ + chain::Client, + on_demand_layer::{AlwaysBadChecker, OnDemand}, + request_responses::{ + IncomingRequest, OutgoingResponse, ProtocolConfig as RequestResponseConfig, + }, }; -pub use libp2p::{identity, core::PublicKey, wasm_ext::ExtTransport, build_multiaddr}; +pub use libp2p::{build_multiaddr, core::PublicKey, identity, wasm_ext::ExtTransport}; // Note: this re-export shouldn't be part of the public API of the crate and will be removed in // the future. @@ -46,15 +46,19 @@ use libp2p::{ use prometheus_endpoint::Registry; use sp_consensus::{block_validation::BlockAnnounceValidator, import_queue::ImportQueue}; use sp_runtime::traits::Block as BlockT; -use std::{borrow::Cow, convert::TryFrom, future::Future, pin::Pin, str::FromStr}; use std::{ + borrow::Cow, collections::HashMap, + convert::TryFrom, error::Error, fs, + future::Future, io::{self, Write}, net::Ipv4Addr, path::{Path, PathBuf}, + pin::Pin, str, + str::FromStr, sync::Arc, }; use zeroize::Zeroize; @@ -181,7 +185,7 @@ pub enum TransactionImport { } /// Future resolving to transaction import result. -pub type TransactionImportFuture = Pin + Send>>; +pub type TransactionImportFuture = Pin + Send>>; /// Transaction pool interface pub trait TransactionPool: Send + Sync { @@ -192,10 +196,7 @@ pub trait TransactionPool: Send + Sync { /// Import a transaction into the pool. /// /// This will return future. - fn import( - &self, - transaction: B::Extrinsic, - ) -> TransactionImportFuture; + fn import(&self, transaction: B::Extrinsic) -> TransactionImportFuture; /// Notify the pool about transactions broadcast. fn on_broadcasted(&self, propagations: HashMap>); /// Get transaction by hash. @@ -219,16 +220,15 @@ impl TransactionPool for EmptyTransaction Default::default() } - fn import( - &self, - _transaction: B::Extrinsic - ) -> TransactionImportFuture { + fn import(&self, _transaction: B::Extrinsic) -> TransactionImportFuture { Box::pin(future::ready(TransactionImport::KnownGood)) } fn on_broadcasted(&self, _: HashMap>) {} - fn transaction(&self, _h: &H) -> Option { None } + fn transaction(&self, _h: &H) -> Option { + None + } } /// Name of a protocol, transmitted on the wire. Should be unique for each chain. Always UTF-8. @@ -274,10 +274,10 @@ pub fn parse_str_addr(addr_str: &str) -> Result<(PeerId, Multiaddr), ParseErr> { } /// Splits a Multiaddress into a Multiaddress and PeerId. -pub fn parse_addr(mut addr: Multiaddr)-> Result<(PeerId, Multiaddr), ParseErr> { +pub fn parse_addr(mut addr: Multiaddr) -> Result<(PeerId, Multiaddr), ParseErr> { let who = match addr.pop() { - Some(multiaddr::Protocol::P2p(key)) => PeerId::from_multihash(key) - .map_err(|_| ParseErr::InvalidPeerId)?, + Some(multiaddr::Protocol::P2p(key)) => + PeerId::from_multihash(key).map_err(|_| ParseErr::InvalidPeerId)?, _ => return Err(ParseErr::PeerIdMissing), }; @@ -325,10 +325,7 @@ impl FromStr for MultiaddrWithPeerId { fn from_str(s: &str) -> Result { let (peer_id, multiaddr) = parse_str_addr(s)?; - Ok(MultiaddrWithPeerId { - peer_id, - multiaddr, - }) + Ok(MultiaddrWithPeerId { peer_id, multiaddr }) } } @@ -504,18 +501,13 @@ impl NetworkConfiguration { /// Create new default configuration for localhost-only connection with random port (useful for testing) pub fn new_local() -> NetworkConfiguration { - let mut config = NetworkConfiguration::new( - "test-node", - "test-client", - Default::default(), - None, - ); - - config.listen_addresses = vec![ - iter::once(multiaddr::Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1))) + let mut config = + NetworkConfiguration::new("test-node", "test-client", Default::default(), None); + + config.listen_addresses = + vec![iter::once(multiaddr::Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1))) .chain(iter::once(multiaddr::Protocol::Tcp(0))) - .collect() - ]; + .collect()]; config.allow_non_globals_in_dht = true; config @@ -523,18 +515,13 @@ impl NetworkConfiguration { /// Create new default configuration for localhost-only connection with random port (useful for testing) pub fn new_memory() -> NetworkConfiguration { - let mut config = NetworkConfiguration::new( - "test-node", - "test-client", - Default::default(), - None, - ); - - config.listen_addresses = vec![ - iter::once(multiaddr::Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1))) + let mut config = + NetworkConfiguration::new("test-node", "test-client", Default::default(), None); + + config.listen_addresses = + vec![iter::once(multiaddr::Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1))) .chain(iter::once(multiaddr::Protocol::Tcp(0))) - .collect() - ]; + .collect()]; config.allow_non_globals_in_dht = true; config @@ -674,7 +661,7 @@ impl NonReservedPeerMode { #[derive(Clone, Debug)] pub enum NodeKeyConfig { /// A Ed25519 secret key configuration. - Ed25519(Secret) + Ed25519(Secret), } impl Default for NodeKeyConfig { @@ -698,7 +685,7 @@ pub enum Secret { /// * `ed25519::SecretKey`: An unencoded 32 bytes Ed25519 secret key. File(PathBuf), /// Always generate a new secret key `K`. - New + New, } impl fmt::Debug for Secret { @@ -725,35 +712,27 @@ impl NodeKeyConfig { pub fn into_keypair(self) -> io::Result { use NodeKeyConfig::*; match self { - Ed25519(Secret::New) => - Ok(Keypair::generate_ed25519()), - - Ed25519(Secret::Input(k)) => - Ok(Keypair::Ed25519(k.into())), - - Ed25519(Secret::File(f)) => - get_secret( - f, - |mut b| { - match String::from_utf8(b.to_vec()) - .ok() - .and_then(|s|{ - if s.len() == 64 { - hex::decode(&s).ok() - } else { - None - }} - ) - { - Some(s) => ed25519::SecretKey::from_bytes(s), - _ => ed25519::SecretKey::from_bytes(&mut b), - } - }, - ed25519::SecretKey::generate, - |b| b.as_ref().to_vec() - ) - .map(ed25519::Keypair::from) - .map(Keypair::Ed25519), + Ed25519(Secret::New) => Ok(Keypair::generate_ed25519()), + + Ed25519(Secret::Input(k)) => Ok(Keypair::Ed25519(k.into())), + + Ed25519(Secret::File(f)) => get_secret( + f, + |mut b| match String::from_utf8(b.to_vec()).ok().and_then(|s| { + if s.len() == 64 { + hex::decode(&s).ok() + } else { + None + } + }) { + Some(s) => ed25519::SecretKey::from_bytes(s), + _ => ed25519::SecretKey::from_bytes(&mut b), + }, + ed25519::SecretKey::generate, + |b| b.as_ref().to_vec(), + ) + .map(ed25519::Keypair::from) + .map(Keypair::Ed25519), } } } @@ -770,9 +749,9 @@ where W: Fn(&K) -> Vec, { std::fs::read(&file) - .and_then(|mut sk_bytes| - parse(&mut sk_bytes) - .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))) + .and_then(|mut sk_bytes| { + parse(&mut sk_bytes).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) + }) .or_else(|e| { if e.kind() == io::ErrorKind::NotFound { file.as_ref().parent().map_or(Ok(()), fs::create_dir_all)?; @@ -790,7 +769,7 @@ where /// Write secret bytes to a file. fn write_secret_file

(path: P, sk_bytes: &[u8]) -> io::Result<()> where - P: AsRef + P: AsRef, { let mut file = open_secret_file(&path)?; file.write_all(sk_bytes) @@ -800,26 +779,19 @@ where #[cfg(unix)] fn open_secret_file

(path: P) -> io::Result where - P: AsRef + P: AsRef, { use std::os::unix::fs::OpenOptionsExt; - fs::OpenOptions::new() - .write(true) - .create_new(true) - .mode(0o600) - .open(path) + fs::OpenOptions::new().write(true).create_new(true).mode(0o600).open(path) } /// Opens a file containing a secret key in write mode. #[cfg(not(unix))] fn open_secret_file

(path: P) -> Result where - P: AsRef + P: AsRef, { - fs::OpenOptions::new() - .write(true) - .create_new(true) - .open(path) + fs::OpenOptions::new().write(true).create_new(true).open(path) } #[cfg(test)] @@ -835,7 +807,7 @@ mod tests { match kp { Keypair::Ed25519(p) => p.secret().as_ref().iter().cloned().collect(), Keypair::Secp256k1(p) => p.secret().to_bytes().to_vec(), - _ => panic!("Unexpected keypair.") + _ => panic!("Unexpected keypair."), } } diff --git a/client/network/src/discovery.rs b/client/network/src/discovery.rs index 0f2a501bcdeff..ac50c6d8cc5eb 100644 --- a/client/network/src/discovery.rs +++ b/client/network/src/discovery.rs @@ -47,26 +47,42 @@ //! of a node's address, you must call `add_self_reported_address`. //! -use crate::config::ProtocolId; -use crate::utils::LruHashSet; +use crate::{config::ProtocolId, utils::LruHashSet}; use futures::prelude::*; use futures_timer::Delay; use ip_network::IpNetwork; -use libp2p::core::{connection::{ConnectionId, ListenerId}, ConnectedPoint, Multiaddr, PeerId, PublicKey}; -use libp2p::swarm::{NetworkBehaviour, NetworkBehaviourAction, PollParameters, ProtocolsHandler, IntoProtocolsHandler}; -use libp2p::swarm::protocols_handler::multi::IntoMultiHandler; -use libp2p::kad::{Kademlia, KademliaBucketInserts, KademliaConfig, KademliaEvent, QueryResult, Quorum, Record}; -use libp2p::kad::GetClosestPeersError; -use libp2p::kad::handler::KademliaHandlerProto; -use libp2p::kad::QueryId; -use libp2p::kad::record::{self, store::{MemoryStore, RecordStore}}; #[cfg(not(target_os = "unknown"))] use libp2p::mdns::{Mdns, MdnsConfig, MdnsEvent}; -use libp2p::multiaddr::Protocol; +use libp2p::{ + core::{ + connection::{ConnectionId, ListenerId}, + ConnectedPoint, Multiaddr, PeerId, PublicKey, + }, + kad::{ + handler::KademliaHandlerProto, + record::{ + self, + store::{MemoryStore, RecordStore}, + }, + GetClosestPeersError, Kademlia, KademliaBucketInserts, KademliaConfig, KademliaEvent, + QueryId, QueryResult, Quorum, Record, + }, + multiaddr::Protocol, + swarm::{ + protocols_handler::multi::IntoMultiHandler, IntoProtocolsHandler, NetworkBehaviour, + NetworkBehaviourAction, PollParameters, ProtocolsHandler, + }, +}; use log::{debug, info, trace, warn}; -use std::{cmp, collections::{HashMap, HashSet, VecDeque}, io, num::NonZeroUsize, time::Duration}; -use std::task::{Context, Poll}; use sp_core::hexdisplay::HexDisplay; +use std::{ + cmp, + collections::{HashMap, HashSet, VecDeque}, + io, + num::NonZeroUsize, + task::{Context, Poll}, + time::Duration, +}; /// Maximum number of known external addresses that we will cache. /// This only affects whether we will log whenever we (re-)discover @@ -101,7 +117,7 @@ impl DiscoveryConfig { discovery_only_if_under_num: std::u64::MAX, enable_mdns: false, kademlia_disjoint_query_paths: false, - protocol_ids: HashSet::new() + protocol_ids: HashSet::new(), } } @@ -114,7 +130,7 @@ impl DiscoveryConfig { /// Set custom nodes which never expire, e.g. bootstrap or reserved nodes. pub fn with_user_defined(&mut self, user_defined: I) -> &mut Self where - I: IntoIterator + I: IntoIterator, { self.user_defined.extend(user_defined); self @@ -152,7 +168,7 @@ impl DiscoveryConfig { pub fn add_protocol(&mut self, id: ProtocolId) -> &mut Self { if self.protocol_ids.contains(&id) { warn!(target: "sub-libp2p", "Discovery already registered for protocol {:?}", id); - return self; + return self } self.protocol_ids.insert(id); @@ -181,7 +197,8 @@ impl DiscoveryConfig { protocol_ids, } = self; - let kademlias = protocol_ids.into_iter() + let kademlias = protocol_ids + .into_iter() .map(|protocol_id| { let proto_name = protocol_name_from_protocol_id(&protocol_id); @@ -227,7 +244,7 @@ impl DiscoveryConfig { allow_non_globals_in_dht, known_external_addresses: LruHashSet::new( NonZeroUsize::new(MAX_KNOWN_EXTERNAL_ADDRESSES) - .expect("value is a constant; constant is non-zero; qed.") + .expect("value is a constant; constant is non-zero; qed."), ), } } @@ -305,7 +322,7 @@ impl DiscoveryBehaviour { &mut self, peer_id: &PeerId, supported_protocols: impl Iterator>, - addr: Multiaddr + addr: Multiaddr, ) { if !self.allow_non_globals_in_dht && !self.can_add_to_dht(&addr) { log::trace!(target: "sub-libp2p", "Ignoring self-reported non-global address {} from {}.", addr, peer_id); @@ -353,7 +370,8 @@ impl DiscoveryBehaviour { for k in self.kademlias.values_mut() { if let Err(e) = k.put_record(Record::new(key.clone(), value.clone()), Quorum::All) { warn!(target: "sub-libp2p", "Libp2p => Failed to put record: {:?}", e); - self.pending_events.push_back(DiscoveryOut::ValuePutFailed(key.clone(), Duration::from_secs(0))); + self.pending_events + .push_back(DiscoveryOut::ValuePutFailed(key.clone(), Duration::from_secs(0))); } } } @@ -362,14 +380,16 @@ impl DiscoveryBehaviour { /// /// Identifies Kademlia instances by their [`ProtocolId`] and kbuckets by the base 2 logarithm /// of their lower bound. - pub fn num_entries_per_kbucket(&mut self) -> impl ExactSizeIterator)> { - self.kademlias.iter_mut() - .map(|(id, kad)| { - let buckets = kad.kbuckets() - .map(|bucket| (bucket.range().0.ilog2().unwrap_or(0), bucket.iter().count())) - .collect(); - (id, buckets) - }) + pub fn num_entries_per_kbucket( + &mut self, + ) -> impl ExactSizeIterator)> { + self.kademlias.iter_mut().map(|(id, kad)| { + let buckets = kad + .kbuckets() + .map(|bucket| (bucket.range().0.ilog2().unwrap_or(0), bucket.iter().count())) + .collect(); + (id, buckets) + }) } /// Returns the number of records in the Kademlia record stores. @@ -382,7 +402,9 @@ impl DiscoveryBehaviour { } /// Returns the total size in bytes of all the records in the Kademlia record stores. - pub fn kademlia_records_total_size(&mut self) -> impl ExactSizeIterator { + pub fn kademlia_records_total_size( + &mut self, + ) -> impl ExactSizeIterator { // Note that this code is ok only because we use a `MemoryStore`. If the records were // for example stored on disk, this would load every single one of them every single time. self.kademlias.iter_mut().map(|(id, kad)| { @@ -402,9 +424,9 @@ impl DiscoveryBehaviour { let ip = match addr.iter().next() { Some(Protocol::Ip4(ip)) => IpNetwork::from(ip), Some(Protocol::Ip6(ip)) => IpNetwork::from(ip), - Some(Protocol::Dns(_)) | Some(Protocol::Dns4(_)) | Some(Protocol::Dns6(_)) - => return true, - _ => return false + Some(Protocol::Dns(_)) | Some(Protocol::Dns4(_)) | Some(Protocol::Dns6(_)) => + return true, + _ => return false, }; ip.is_global() } @@ -459,19 +481,24 @@ impl NetworkBehaviour for DiscoveryBehaviour { type OutEvent = DiscoveryOut; fn new_handler(&mut self) -> Self::ProtocolsHandler { - let iter = self.kademlias.iter_mut() + let iter = self + .kademlias + .iter_mut() .map(|(p, k)| (p.clone(), NetworkBehaviour::new_handler(k))); - IntoMultiHandler::try_from_iter(iter) - .expect("There can be at most one handler per `ProtocolId` and \ + IntoMultiHandler::try_from_iter(iter).expect( + "There can be at most one handler per `ProtocolId` and \ protocol names contain the `ProtocolId` so no two protocol \ names in `self.kademlias` can be equal which is the only error \ `try_from_iter` can return, therefore this call is guaranteed \ - to succeed; qed") + to succeed; qed", + ) } fn addresses_of_peer(&mut self, peer_id: &PeerId) -> Vec { - let mut list = self.user_defined.iter() + let mut list = self + .user_defined + .iter() .filter_map(|(p, a)| if p == peer_id { Some(a.clone()) } else { None }) .collect::>(); @@ -488,7 +515,7 @@ impl NetworkBehaviour for DiscoveryBehaviour { list_to_filter.retain(|addr| { if let Some(Protocol::Ip4(addr)) = addr.iter().next() { if addr.is_private() { - return false; + return false } } @@ -504,7 +531,12 @@ impl NetworkBehaviour for DiscoveryBehaviour { list } - fn inject_connection_established(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_established( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.num_connections += 1; for k in self.kademlias.values_mut() { NetworkBehaviour::inject_connection_established(k, peer_id, conn, endpoint) @@ -517,7 +549,12 @@ impl NetworkBehaviour for DiscoveryBehaviour { } } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.num_connections -= 1; for k in self.kademlias.values_mut() { NetworkBehaviour::inject_connection_closed(k, peer_id, conn, endpoint) @@ -534,7 +571,7 @@ impl NetworkBehaviour for DiscoveryBehaviour { &mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, - error: &dyn std::error::Error + error: &dyn std::error::Error, ) { for k in self.kademlias.values_mut() { NetworkBehaviour::inject_addr_reach_failure(k, peer_id, addr, error) @@ -556,8 +593,7 @@ impl NetworkBehaviour for DiscoveryBehaviour { } fn inject_new_external_addr(&mut self, addr: &Multiaddr) { - let new_addr = addr.clone() - .with(Protocol::P2p(self.local_peer_id.clone().into())); + let new_addr = addr.clone().with(Protocol::P2p(self.local_peer_id.clone().into())); // NOTE: we might re-discover the same address multiple times // in which case we just want to refrain from logging. @@ -627,10 +663,10 @@ impl NetworkBehaviour for DiscoveryBehaviour { <::Handler as ProtocolsHandler>::InEvent, Self::OutEvent, >, - > { + >{ // Immediately process the content of `discovered`. if let Some(ev) = self.pending_events.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) } // Poll the stream that fires when we need to start a random Kademlia query. @@ -657,12 +693,14 @@ impl NetworkBehaviour for DiscoveryBehaviour { // Schedule the next random query with exponentially increasing delay, // capped at 60 seconds. *next_kad_random_query = Delay::new(self.duration_to_next_kad); - self.duration_to_next_kad = cmp::min(self.duration_to_next_kad * 2, - Duration::from_secs(60)); + self.duration_to_next_kad = + cmp::min(self.duration_to_next_kad * 2, Duration::from_secs(60)); if actually_started { - let ev = DiscoveryOut::RandomKademliaStarted(self.kademlias.keys().cloned().collect()); - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); + let ev = DiscoveryOut::RandomKademliaStarted( + self.kademlias.keys().cloned().collect(), + ); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) } } } @@ -674,86 +712,112 @@ impl NetworkBehaviour for DiscoveryBehaviour { NetworkBehaviourAction::GenerateEvent(ev) => match ev { KademliaEvent::RoutingUpdated { peer, .. } => { let ev = DiscoveryOut::Discovered(peer); - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + }, KademliaEvent::UnroutablePeer { peer, .. } => { let ev = DiscoveryOut::UnroutablePeer(peer); - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + }, KademliaEvent::RoutablePeer { peer, .. } => { let ev = DiscoveryOut::Discovered(peer); - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + }, KademliaEvent::PendingRoutablePeer { .. } => { // We are not interested in this event at the moment. - } - KademliaEvent::QueryResult { result: QueryResult::GetClosestPeers(res), .. } => { - match res { - Err(GetClosestPeersError::Timeout { key, peers }) => { - debug!(target: "sub-libp2p", + }, + KademliaEvent::QueryResult { + result: QueryResult::GetClosestPeers(res), + .. + } => match res { + Err(GetClosestPeersError::Timeout { key, peers }) => { + debug!(target: "sub-libp2p", "Libp2p => Query for {:?} timed out with {} results", HexDisplay::from(&key), peers.len()); - }, - Ok(ok) => { - trace!(target: "sub-libp2p", + }, + Ok(ok) => { + trace!(target: "sub-libp2p", "Libp2p => Query for {:?} yielded {:?} results", HexDisplay::from(&ok.key), ok.peers.len()); - if ok.peers.is_empty() && self.num_connections != 0 { - debug!(target: "sub-libp2p", "Libp2p => Random Kademlia query has yielded empty \ + if ok.peers.is_empty() && self.num_connections != 0 { + debug!(target: "sub-libp2p", "Libp2p => Random Kademlia query has yielded empty \ results"); - } } - } - } - KademliaEvent::QueryResult { result: QueryResult::GetRecord(res), stats, .. } => { + }, + }, + KademliaEvent::QueryResult { + result: QueryResult::GetRecord(res), + stats, + .. + } => { let ev = match res { Ok(ok) => { - let results = ok.records + let results = ok + .records .into_iter() .map(|r| (r.record.key, r.record.value)) .collect(); - DiscoveryOut::ValueFound(results, stats.duration().unwrap_or_else(Default::default)) - } + DiscoveryOut::ValueFound( + results, + stats.duration().unwrap_or_else(Default::default), + ) + }, Err(e @ libp2p::kad::GetRecordError::NotFound { .. }) => { trace!(target: "sub-libp2p", "Libp2p => Failed to get record: {:?}", e); - DiscoveryOut::ValueNotFound(e.into_key(), stats.duration().unwrap_or_else(Default::default)) - } + DiscoveryOut::ValueNotFound( + e.into_key(), + stats.duration().unwrap_or_else(Default::default), + ) + }, Err(e) => { debug!(target: "sub-libp2p", "Libp2p => Failed to get record: {:?}", e); - DiscoveryOut::ValueNotFound(e.into_key(), stats.duration().unwrap_or_else(Default::default)) - } + DiscoveryOut::ValueNotFound( + e.into_key(), + stats.duration().unwrap_or_else(Default::default), + ) + }, }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } - KademliaEvent::QueryResult { result: QueryResult::PutRecord(res), stats, .. } => { + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + }, + KademliaEvent::QueryResult { + result: QueryResult::PutRecord(res), + stats, + .. + } => { let ev = match res { - Ok(ok) => DiscoveryOut::ValuePut(ok.key, stats.duration().unwrap_or_else(Default::default)), + Ok(ok) => DiscoveryOut::ValuePut( + ok.key, + stats.duration().unwrap_or_else(Default::default), + ), Err(e) => { debug!(target: "sub-libp2p", "Libp2p => Failed to put record: {:?}", e); - DiscoveryOut::ValuePutFailed(e.into_key(), stats.duration().unwrap_or_else(Default::default)) - } + DiscoveryOut::ValuePutFailed( + e.into_key(), + stats.duration().unwrap_or_else(Default::default), + ) + }, }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } - KademliaEvent::QueryResult { result: QueryResult::RepublishRecord(res), .. } => { - match res { - Ok(ok) => debug!(target: "sub-libp2p", + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + }, + KademliaEvent::QueryResult { + result: QueryResult::RepublishRecord(res), + .. + } => match res { + Ok(ok) => debug!(target: "sub-libp2p", "Libp2p => Record republished: {:?}", ok.key), - Err(e) => debug!(target: "sub-libp2p", + Err(e) => debug!(target: "sub-libp2p", "Libp2p => Republishing of record {:?} failed with: {:?}", - e.key(), e) - } - } + e.key(), e), + }, // We never start any other type of query. e => { warn!(target: "sub-libp2p", "Libp2p => Unhandled Kademlia event: {:?}", e) - } - } + }, + }, NetworkBehaviourAction::DialAddress { address } => return Poll::Ready(NetworkBehaviourAction::DialAddress { address }), NetworkBehaviourAction::DialPeer { peer_id, condition } => @@ -762,10 +826,13 @@ impl NetworkBehaviour for DiscoveryBehaviour { return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, - event: (pid.clone(), event) + event: (pid.clone(), event), }), NetworkBehaviourAction::ReportObservedAddr { address, score } => - return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }), + return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { + address, + score, + }), } } } @@ -774,29 +841,30 @@ impl NetworkBehaviour for DiscoveryBehaviour { #[cfg(not(target_os = "unknown"))] while let Poll::Ready(ev) = self.mdns.poll(cx, params) { match ev { - NetworkBehaviourAction::GenerateEvent(event) => { - match event { - MdnsEvent::Discovered(list) => { - if self.num_connections >= self.discovery_only_if_under_num { - continue; - } + NetworkBehaviourAction::GenerateEvent(event) => match event { + MdnsEvent::Discovered(list) => { + if self.num_connections >= self.discovery_only_if_under_num { + continue + } - self.pending_events.extend(list.map(|(peer_id, _)| DiscoveryOut::Discovered(peer_id))); - if let Some(ev) = self.pending_events.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)); - } - }, - MdnsEvent::Expired(_) => {} - } + self.pending_events + .extend(list.map(|(peer_id, _)| DiscoveryOut::Discovered(peer_id))); + if let Some(ev) = self.pending_events.pop_front() { + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(ev)) + } + }, + MdnsEvent::Expired(_) => {}, }, NetworkBehaviourAction::DialAddress { address } => return Poll::Ready(NetworkBehaviourAction::DialAddress { address }), NetworkBehaviourAction::DialPeer { peer_id, condition } => return Poll::Ready(NetworkBehaviourAction::DialPeer { peer_id, condition }), - NetworkBehaviourAction::NotifyHandler { event, .. } => - match event {}, // `event` is an enum with no variant + NetworkBehaviourAction::NotifyHandler { event, .. } => match event {}, // `event` is an enum with no variant NetworkBehaviourAction::ReportObservedAddr { address, score } => - return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }), + return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { + address, + score, + }), } } @@ -839,15 +907,14 @@ impl MdnsWrapper { ) -> Poll> { loop { match self { - MdnsWrapper::Instantiating(fut) => { + MdnsWrapper::Instantiating(fut) => *self = match futures::ready!(fut.as_mut().poll(cx)) { Ok(mdns) => MdnsWrapper::Ready(mdns), Err(err) => { warn!(target: "sub-libp2p", "Failed to initialize mDNS: {:?}", err); MdnsWrapper::Disabled }, - } - } + }, MdnsWrapper::Ready(mdns) => return mdns.poll(cx, params), MdnsWrapper::Disabled => return Poll::Pending, } @@ -857,17 +924,20 @@ impl MdnsWrapper { #[cfg(test)] mod tests { + use super::{protocol_name_from_protocol_id, DiscoveryConfig, DiscoveryOut}; use crate::config::ProtocolId; use futures::prelude::*; - use libp2p::identity::Keypair; - use libp2p::{Multiaddr, PeerId}; - use libp2p::core::upgrade; - use libp2p::core::transport::{Transport, MemoryTransport}; - use libp2p::noise; - use libp2p::swarm::Swarm; - use libp2p::yamux; + use libp2p::{ + core::{ + transport::{MemoryTransport, Transport}, + upgrade, + }, + identity::Keypair, + noise, + swarm::Swarm, + yamux, Multiaddr, PeerId, + }; use std::{collections::HashSet, task::Poll}; - use super::{DiscoveryConfig, DiscoveryOut, protocol_name_from_protocol_id}; #[test] fn discovery_working() { @@ -876,50 +946,56 @@ mod tests { // Build swarms whose behaviour is `DiscoveryBehaviour`, each aware of // the first swarm via `with_user_defined`. - let mut swarms = (0..25).map(|i| { - let keypair = Keypair::generate_ed25519(); - - let noise_keys = noise::Keypair::::new() - .into_authentic(&keypair) - .unwrap(); - - let transport = MemoryTransport - .upgrade(upgrade::Version::V1) - .authenticate(noise::NoiseConfig::xx(noise_keys).into_authenticated()) - .multiplex(yamux::YamuxConfig::default()) - .boxed(); - - let behaviour = { - let mut config = DiscoveryConfig::new(keypair.public()); - config.with_user_defined(first_swarm_peer_id_and_addr.clone()) - .allow_private_ipv4(true) - .allow_non_globals_in_dht(true) - .discovery_limit(50) - .add_protocol(protocol_id.clone()); - - config.finish() - }; + let mut swarms = (0..25) + .map(|i| { + let keypair = Keypair::generate_ed25519(); + + let noise_keys = + noise::Keypair::::new().into_authentic(&keypair).unwrap(); + + let transport = MemoryTransport + .upgrade(upgrade::Version::V1) + .authenticate(noise::NoiseConfig::xx(noise_keys).into_authenticated()) + .multiplex(yamux::YamuxConfig::default()) + .boxed(); + + let behaviour = { + let mut config = DiscoveryConfig::new(keypair.public()); + config + .with_user_defined(first_swarm_peer_id_and_addr.clone()) + .allow_private_ipv4(true) + .allow_non_globals_in_dht(true) + .discovery_limit(50) + .add_protocol(protocol_id.clone()); + + config.finish() + }; - let mut swarm = Swarm::new(transport, behaviour, keypair.public().into_peer_id()); - let listen_addr: Multiaddr = format!("/memory/{}", rand::random::()).parse().unwrap(); + let mut swarm = Swarm::new(transport, behaviour, keypair.public().into_peer_id()); + let listen_addr: Multiaddr = + format!("/memory/{}", rand::random::()).parse().unwrap(); - if i == 0 { - first_swarm_peer_id_and_addr = Some((keypair.public().into_peer_id(), listen_addr.clone())) - } + if i == 0 { + first_swarm_peer_id_and_addr = + Some((keypair.public().into_peer_id(), listen_addr.clone())) + } - swarm.listen_on(listen_addr.clone()).unwrap(); - (swarm, listen_addr) - }).collect::>(); + swarm.listen_on(listen_addr.clone()).unwrap(); + (swarm, listen_addr) + }) + .collect::>(); // Build a `Vec>` with the list of nodes remaining to be discovered. - let mut to_discover = (0..swarms.len()).map(|n| { - (0..swarms.len()) - // Skip the first swarm as all other swarms already know it. - .skip(1) - .filter(|p| *p != n) - .map(|p| Swarm::local_peer_id(&swarms[p].0).clone()) - .collect::>() - }).collect::>(); + let mut to_discover = (0..swarms.len()) + .map(|n| { + (0..swarms.len()) + // Skip the first swarm as all other swarms already know it. + .skip(1) + .filter(|p| *p != n) + .map(|p| Swarm::local_peer_id(&swarms[p].0).clone()) + .collect::>() + }) + .collect::>(); let fut = futures::future::poll_fn(move |cx| { 'polling: loop { @@ -927,13 +1003,17 @@ mod tests { match swarms[swarm_n].0.poll_next_unpin(cx) { Poll::Ready(Some(e)) => { match e { - DiscoveryOut::UnroutablePeer(other) | DiscoveryOut::Discovered(other) => { + DiscoveryOut::UnroutablePeer(other) | + DiscoveryOut::Discovered(other) => { // Call `add_self_reported_address` to simulate identify happening. - let addr = swarms.iter().find_map(|(s, a)| - if s.behaviour().local_peer_id == other { - Some(a.clone()) - } else { - None + let addr = swarms + .iter() + .find_map(|(s, a)| { + if s.behaviour().local_peer_id == other { + Some(a.clone()) + } else { + None + } }) .unwrap(); swarms[swarm_n].0.behaviour_mut().add_self_reported_address( @@ -945,11 +1025,13 @@ mod tests { to_discover[swarm_n].remove(&other); }, DiscoveryOut::RandomKademliaStarted(_) => {}, - e => {panic!("Unexpected event: {:?}", e)}, + e => { + panic!("Unexpected event: {:?}", e) + }, } continue 'polling - } - _ => {} + }, + _ => {}, } } break @@ -973,7 +1055,8 @@ mod tests { let mut discovery = { let keypair = Keypair::generate_ed25519(); let mut config = DiscoveryConfig::new(keypair.public()); - config.allow_private_ipv4(true) + config + .allow_private_ipv4(true) .allow_non_globals_in_dht(true) .discovery_limit(50) .add_protocol(supported_protocol_id.clone()); @@ -992,7 +1075,8 @@ mod tests { for kademlia in discovery.kademlias.values_mut() { assert!( - kademlia.kbucket(remote_peer_id.clone()) + kademlia + .kbucket(remote_peer_id.clone()) .expect("Remote peer id not to be equal to local peer id.") .is_empty(), "Expect peer with unsupported protocol not to be added." @@ -1009,7 +1093,8 @@ mod tests { for kademlia in discovery.kademlias.values_mut() { assert_eq!( 1, - kademlia.kbucket(remote_peer_id.clone()) + kademlia + .kbucket(remote_peer_id.clone()) .expect("Remote peer id not to be equal to local peer id.") .num_entries(), "Expect peer with supported protocol to be added." @@ -1025,7 +1110,8 @@ mod tests { let mut discovery = { let keypair = Keypair::generate_ed25519(); let mut config = DiscoveryConfig::new(keypair.public()); - config.allow_private_ipv4(true) + config + .allow_private_ipv4(true) .allow_non_globals_in_dht(true) .discovery_limit(50) .add_protocol(protocol_a.clone()) @@ -1045,17 +1131,20 @@ mod tests { assert_eq!( 1, - discovery.kademlias.get_mut(&protocol_a) + discovery + .kademlias + .get_mut(&protocol_a) .expect("Kademlia instance to exist.") .kbucket(remote_peer_id.clone()) .expect("Remote peer id not to be equal to local peer id.") .num_entries(), "Expected remote peer to be added to `protocol_a` Kademlia instance.", - ); assert!( - discovery.kademlias.get_mut(&protocol_b) + discovery + .kademlias + .get_mut(&protocol_b) .expect("Kademlia instance to exist.") .kbucket(remote_peer_id.clone()) .expect("Remote peer id not to be equal to local peer id.") diff --git a/client/network/src/error.rs b/client/network/src/error.rs index 2a226b58b46a5..32fc6f9e1e31c 100644 --- a/client/network/src/error.rs +++ b/client/network/src/error.rs @@ -19,7 +19,7 @@ //! Substrate network possible errors. use crate::config::TransportConfig; -use libp2p::{PeerId, Multiaddr}; +use libp2p::{Multiaddr, PeerId}; use std::{borrow::Cow, fmt}; @@ -38,7 +38,7 @@ pub enum Error { fmt = "The same bootnode (`{}`) is registered with two different peer ids: `{}` and `{}`", address, first_id, - second_id, + second_id )] DuplicateBootnode { /// The address of the bootnode. @@ -53,7 +53,7 @@ pub enum Error { /// The network addresses are invalid because they don't match the transport. #[display( fmt = "The following addresses are invalid because they don't match the transport: {:?}", - addresses, + addresses )] AddressesForAnotherTransport { /// Transport used. diff --git a/client/network/src/lib.rs b/client/network/src/lib.rs index b43836cacaa54..90abf309c2530 100644 --- a/client/network/src/lib.rs +++ b/client/network/src/lib.rs @@ -247,9 +247,9 @@ mod behaviour; mod chain; -mod peer_info; mod discovery; mod on_demand_layer; +mod peer_info; mod protocol; mod request_responses; mod schema; @@ -257,22 +257,25 @@ mod service; mod transport; mod utils; -pub mod block_request_handler; pub mod bitswap; -pub mod light_client_requests; -pub mod state_request_handler; +pub mod block_request_handler; pub mod config; pub mod error; +pub mod light_client_requests; pub mod network_state; +pub mod state_request_handler; pub mod transactions; #[doc(inline)] pub use libp2p::{multiaddr, Multiaddr, PeerId}; -pub use protocol::{event::{DhtEvent, Event, ObservedRole}, PeerInfo}; -pub use protocol::sync::{SyncState, StateDownloadProgress}; +pub use protocol::{ + event::{DhtEvent, Event, ObservedRole}, + sync::{StateDownloadProgress, SyncState}, + PeerInfo, +}; pub use service::{ - NetworkService, NetworkWorker, RequestFailure, OutboundFailure, NotificationSender, - NotificationSenderReady, IfDisconnected, + IfDisconnected, NetworkService, NetworkWorker, NotificationSender, NotificationSenderReady, + OutboundFailure, RequestFailure, }; pub use sc_peerset::ReputationChange; diff --git a/client/network/src/light_client_requests.rs b/client/network/src/light_client_requests.rs index f859a35f45b24..8489585e28831 100644 --- a/client/network/src/light_client_requests.rs +++ b/client/network/src/light_client_requests.rs @@ -18,13 +18,12 @@ //! Helpers for outgoing and incoming light client requests. -/// For outgoing light client requests. -pub mod sender; /// For incoming light client requests. pub mod handler; +/// For outgoing light client requests. +pub mod sender; -use crate::config::ProtocolId; -use crate::request_responses::ProtocolConfig; +use crate::{config::ProtocolId, request_responses::ProtocolConfig}; use std::time::Duration; @@ -51,24 +50,30 @@ pub fn generate_protocol_config(protocol_id: &ProtocolId) -> ProtocolConfig { #[cfg(test)] mod tests { use super::*; - use crate::request_responses::IncomingRequest; - use crate::config::ProtocolId; + use crate::{config::ProtocolId, request_responses::IncomingRequest}; use assert_matches::assert_matches; - use futures::executor::{block_on, LocalPool}; - use futures::task::Spawn; - use futures::{channel::oneshot, prelude::*}; + use futures::{ + channel::oneshot, + executor::{block_on, LocalPool}, + prelude::*, + task::Spawn, + }; use libp2p::PeerId; - use sc_client_api::StorageProof; - use sc_client_api::light::{RemoteCallRequest, RemoteChangesRequest, RemoteHeaderRequest}; - use sc_client_api::light::{self, RemoteReadRequest, RemoteBodyRequest, ChangesProof}; - use sc_client_api::{FetchChecker, RemoteReadChildRequest}; + use sc_client_api::{ + light::{ + self, ChangesProof, RemoteBodyRequest, RemoteCallRequest, RemoteChangesRequest, + RemoteHeaderRequest, RemoteReadRequest, + }, + FetchChecker, RemoteReadChildRequest, StorageProof, + }; use sp_blockchain::Error as ClientError; use sp_core::storage::ChildInfo; - use sp_runtime::generic::Header; - use sp_runtime::traits::{BlakeTwo256, Block as BlockT, NumberFor}; - use std::collections::HashMap; - use std::sync::Arc; + use sp_runtime::{ + generic::Header, + traits::{BlakeTwo256, Block as BlockT, NumberFor}, + }; + use std::{collections::HashMap, sync::Arc}; pub struct DummyFetchChecker { pub ok: bool, @@ -94,12 +99,7 @@ mod tests { _: StorageProof, ) -> Result, Option>>, ClientError> { match self.ok { - true => Ok(request - .keys - .iter() - .cloned() - .map(|k| (k, Some(vec![42]))) - .collect()), + true => Ok(request.keys.iter().cloned().map(|k| (k, Some(vec![42]))).collect()), false => Err(ClientError::Backend("Test error".into())), } } @@ -110,12 +110,7 @@ mod tests { _: StorageProof, ) -> Result, Option>>, ClientError> { match self.ok { - true => Ok(request - .keys - .iter() - .cloned() - .map(|k| (k, Some(vec![42]))) - .collect()), + true => Ok(request.keys.iter().cloned().map(|k| (k, Some(vec![42]))).collect()), false => Err(ClientError::Backend("Test error".into())), } } @@ -184,7 +179,8 @@ mod tests { fn send_receive(request: sender::Request, pool: &LocalPool) { let client = Arc::new(substrate_test_runtime_client::new()); - let (handler, protocol_config) = handler::LightClientRequestHandler::new(&protocol_id(), client); + let (handler, protocol_config) = + handler::LightClientRequestHandler::new(&protocol_id(), client); pool.spawner().spawn_obj(handler.run().boxed().into()).unwrap(); let (_peer_set, peer_set_handle) = peerset(); @@ -199,18 +195,28 @@ mod tests { sender.inject_connected(PeerId::random()); sender.request(request).unwrap(); - let sender::OutEvent::SendRequest { pending_response, request, .. } = block_on(sender.next()).unwrap(); + let sender::OutEvent::SendRequest { pending_response, request, .. } = + block_on(sender.next()).unwrap(); let (tx, rx) = oneshot::channel(); block_on(protocol_config.inbound_queue.unwrap().send(IncomingRequest { peer: PeerId::random(), payload: request, pending_response: tx, - })).unwrap(); - pool.spawner().spawn_obj(async move { - pending_response.send(Ok(rx.await.unwrap().result.unwrap())).unwrap(); - }.boxed().into()).unwrap(); - - pool.spawner().spawn_obj(sender.for_each(|_| future::ready(())).boxed().into()).unwrap(); + })) + .unwrap(); + pool.spawner() + .spawn_obj( + async move { + pending_response.send(Ok(rx.await.unwrap().result.unwrap())).unwrap(); + } + .boxed() + .into(), + ) + .unwrap(); + + pool.spawner() + .spawn_obj(sender.for_each(|_| future::ready(())).boxed().into()) + .unwrap(); } #[test] @@ -225,10 +231,7 @@ mod tests { }; let mut pool = LocalPool::new(); - send_receive(sender::Request::Call { - request, - sender: chan.0, - }, &pool); + send_receive(sender::Request::Call { request, sender: chan.0 }, &pool); assert_eq!(vec![42], pool.run_until(chan.1).unwrap().unwrap()); // ^--- from `DummyFetchChecker::check_execution_proof` } @@ -243,17 +246,10 @@ mod tests { retry_count: None, }; let mut pool = LocalPool::new(); - send_receive(sender::Request::Read { - request, - sender: chan.0, - }, &pool); + send_receive(sender::Request::Read { request, sender: chan.0 }, &pool); assert_eq!( Some(vec![42]), - pool.run_until(chan.1) - .unwrap() - .unwrap() - .remove(&b":key"[..]) - .unwrap() + pool.run_until(chan.1).unwrap().unwrap().remove(&b":key"[..]).unwrap() ); // ^--- from `DummyFetchChecker::check_read_proof` } @@ -270,17 +266,10 @@ mod tests { retry_count: None, }; let mut pool = LocalPool::new(); - send_receive(sender::Request::ReadChild { - request, - sender: chan.0, - }, &pool); + send_receive(sender::Request::ReadChild { request, sender: chan.0 }, &pool); assert_eq!( Some(vec![42]), - pool.run_until(chan.1) - .unwrap() - .unwrap() - .remove(&b":key"[..]) - .unwrap() + pool.run_until(chan.1).unwrap().unwrap().remove(&b":key"[..]).unwrap() ); // ^--- from `DummyFetchChecker::check_read_child_proof` } @@ -295,15 +284,9 @@ mod tests { retry_count: None, }; let mut pool = LocalPool::new(); - send_receive(sender::Request::Header { - request, - sender: chan.0, - }, &pool); + send_receive(sender::Request::Header { request, sender: chan.0 }, &pool); // The remote does not know block 1: - assert_matches!( - pool.run_until(chan.1).unwrap(), - Err(ClientError::RemoteFetchFailed) - ); + assert_matches!(pool.run_until(chan.1).unwrap(), Err(ClientError::RemoteFetchFailed)); } #[test] @@ -324,10 +307,7 @@ mod tests { retry_count: None, }; let mut pool = LocalPool::new(); - send_receive(sender::Request::Changes { - request, - sender: chan.0, - }, &pool); + send_receive(sender::Request::Changes { request, sender: chan.0 }, &pool); assert_eq!(vec![(100, 2)], pool.run_until(chan.1).unwrap().unwrap()); // ^--- from `DummyFetchChecker::check_changes_proof` } diff --git a/client/network/src/light_client_requests/handler.rs b/client/network/src/light_client_requests/handler.rs index 1cfae0a3cb1df..b1ba92efe684c 100644 --- a/client/network/src/light_client_requests/handler.rs +++ b/client/network/src/light_client_requests/handler.rs @@ -22,34 +22,27 @@ //! [`crate::request_responses::RequestResponsesBehaviour`] with //! [`LightClientRequestHandler`](handler::LightClientRequestHandler). -use codec::{self, Encode, Decode}; use crate::{ chain::Client, config::ProtocolId, - schema, - PeerId, + request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}, + schema, PeerId, }; -use crate::request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}; -use futures::{channel::mpsc, prelude::*}; +use codec::{self, Decode, Encode}; +use futures::{channel::mpsc, prelude::*}; +use log::{debug, trace}; use prost::Message; -use sc_client_api::{ - StorageProof, - light -}; +use sc_client_api::{light, StorageProof}; use sc_peerset::ReputationChange; use sp_core::{ - storage::{ChildInfo, ChildType,StorageKey, PrefixedStorageKey}, hexdisplay::HexDisplay, + storage::{ChildInfo, ChildType, PrefixedStorageKey, StorageKey}, }; use sp_runtime::{ - traits::{Block, Zero}, generic::BlockId, + traits::{Block, Zero}, }; -use std::{ - collections::{BTreeMap}, - sync::Arc, -}; -use log::{trace, debug}; +use std::{collections::BTreeMap, sync::Arc}; const LOG_TARGET: &str = "light-client-request-handler"; @@ -62,10 +55,7 @@ pub struct LightClientRequestHandler { impl LightClientRequestHandler { /// Create a new [`crate::block_request_handler::BlockRequestHandler`]. - pub fn new( - protocol_id: &ProtocolId, - client: Arc>, - ) -> (Self, ProtocolConfig) { + pub fn new(protocol_id: &ProtocolId, client: Arc>) -> (Self, ProtocolConfig) { // For now due to lack of data on light client request handling in production systems, this // value is chosen to match the block request limit. let (tx, request_receiver) = mpsc::channel(20); @@ -86,7 +76,7 @@ impl LightClientRequestHandler { let response = OutgoingResponse { result: Ok(response_data), reputation_changes: Vec::new(), - sent_feedback: None + sent_feedback: None, }; match pending_response.send(response) { @@ -98,35 +88,36 @@ impl LightClientRequestHandler { Err(_) => debug!( target: LOG_TARGET, "Failed to handle light client request from {}: {}", - peer, HandleRequestError::SendResponse, + peer, + HandleRequestError::SendResponse, ), }; - } , + }, Err(e) => { debug!( target: LOG_TARGET, - "Failed to handle light client request from {}: {}", - peer, e, + "Failed to handle light client request from {}: {}", peer, e, ); let reputation_changes = match e { HandleRequestError::BadRequest(_) => { vec![ReputationChange::new(-(1 << 12), "bad request")] - } + }, _ => Vec::new(), }; let response = OutgoingResponse { result: Err(()), reputation_changes, - sent_feedback: None + sent_feedback: None, }; if pending_response.send(response).is_err() { debug!( target: LOG_TARGET, "Failed to handle light client request from {}: {}", - peer, HandleRequestError::SendResponse, + peer, + HandleRequestError::SendResponse, ); }; }, @@ -134,7 +125,6 @@ impl LightClientRequestHandler { } } - fn handle_request( &mut self, peer: PeerId, @@ -154,8 +144,8 @@ impl LightClientRequestHandler { Some(schema::v1::light::request::Request::RemoteChangesRequest(r)) => self.on_remote_changes_request(&peer, r)?, None => { - return Err(HandleRequestError::BadRequest("Remote request without request data.")); - } + return Err(HandleRequestError::BadRequest("Remote request without request data.")) + }, }; let mut data = Vec::new(); @@ -171,24 +161,30 @@ impl LightClientRequestHandler { ) -> Result { log::trace!( "Remote call request from {} ({} at {:?}).", - peer, request.method, request.block, + peer, + request.method, + request.block, ); let block = Decode::decode(&mut request.block.as_ref())?; - let proof = match self.client.execution_proof( - &BlockId::Hash(block), - &request.method, &request.data, - ) { - Ok((_, proof)) => proof, - Err(e) => { - log::trace!( - "remote call request from {} ({} at {:?}) failed with: {}", - peer, request.method, request.block, e, - ); - StorageProof::empty() - } - }; + let proof = + match self + .client + .execution_proof(&BlockId::Hash(block), &request.method, &request.data) + { + Ok((_, proof)) => proof, + Err(e) => { + log::trace!( + "remote call request from {} ({} at {:?}) failed with: {}", + peer, + request.method, + request.block, + e, + ); + StorageProof::empty() + }, + }; let response = { let r = schema::v1::light::RemoteCallResponse { proof: proof.encode() }; @@ -210,23 +206,28 @@ impl LightClientRequestHandler { log::trace!( "Remote read request from {} ({} at {:?}).", - peer, fmt_keys(request.keys.first(), request.keys.last()), request.block, + peer, + fmt_keys(request.keys.first(), request.keys.last()), + request.block, ); let block = Decode::decode(&mut request.block.as_ref())?; - let proof = match self.client.read_proof( - &BlockId::Hash(block), - &mut request.keys.iter().map(AsRef::as_ref), - ) { + let proof = match self + .client + .read_proof(&BlockId::Hash(block), &mut request.keys.iter().map(AsRef::as_ref)) + { Ok(proof) => proof, Err(error) => { log::trace!( "remote read request from {} ({} at {:?}) failed with: {}", - peer, fmt_keys(request.keys.first(), request.keys.last()), request.block, error, + peer, + fmt_keys(request.keys.first(), request.keys.last()), + request.block, + error, ); StorageProof::empty() - } + }, }; let response = { @@ -262,11 +263,13 @@ impl LightClientRequestHandler { Some((ChildType::ParentKeyId, storage_key)) => Ok(ChildInfo::new_default(storage_key)), None => Err(sp_blockchain::Error::InvalidChildStorageKey), }; - let proof = match child_info.and_then(|child_info| self.client.read_child_proof( - &BlockId::Hash(block), - &child_info, - &mut request.keys.iter().map(AsRef::as_ref) - )) { + let proof = match child_info.and_then(|child_info| { + self.client.read_child_proof( + &BlockId::Hash(block), + &child_info, + &mut request.keys.iter().map(AsRef::as_ref), + ) + }) { Ok(proof) => proof, Err(error) => { log::trace!( @@ -278,7 +281,7 @@ impl LightClientRequestHandler { error, ); StorageProof::empty() - } + }, }; let response = { @@ -302,10 +305,12 @@ impl LightClientRequestHandler { Err(error) => { log::trace!( "Remote header proof request from {} ({:?}) failed with: {}.", - peer, request.block, error + peer, + request.block, + error ); (Default::default(), StorageProof::empty()) - } + }, }; let response = { @@ -325,7 +330,11 @@ impl LightClientRequestHandler { "Remote changes proof request from {} for key {} ({:?}..{:?}).", peer, if !request.storage_key.is_empty() { - format!("{} : {}", HexDisplay::from(&request.storage_key), HexDisplay::from(&request.key)) + format!( + "{} : {}", + HexDisplay::from(&request.storage_key), + HexDisplay::from(&request.key) + ) } else { HexDisplay::from(&request.key).to_string() }, @@ -344,10 +353,11 @@ impl LightClientRequestHandler { Some(PrefixedStorageKey::new_ref(&request.storage_key)) }; - let proof = match self.client.key_changes_proof(first, last, min, max, storage_key, &key) { - Ok(proof) => proof, - Err(error) => { - log::trace!( + let proof = + match self.client.key_changes_proof(first, last, min, max, storage_key, &key) { + Ok(proof) => proof, + Err(error) => { + log::trace!( "Remote changes proof request from {} for key {} ({:?}..{:?}) failed with: {}.", peer, format!("{} : {}", HexDisplay::from(&request.storage_key), HexDisplay::from(&key.0)), @@ -356,20 +366,22 @@ impl LightClientRequestHandler { error, ); - light::ChangesProof:: { - max_block: Zero::zero(), - proof: Vec::new(), - roots: BTreeMap::new(), - roots_proof: StorageProof::empty(), - } - } - }; + light::ChangesProof:: { + max_block: Zero::zero(), + proof: Vec::new(), + roots: BTreeMap::new(), + roots_proof: StorageProof::empty(), + } + }, + }; let response = { let r = schema::v1::light::RemoteChangesResponse { max: proof.max_block.encode(), proof: proof.proof, - roots: proof.roots.into_iter() + roots: proof + .roots + .into_iter() .map(|(k, v)| schema::v1::light::Pair { fst: k.encode(), snd: v.encode() }) .collect(), roots_proof: proof.roots_proof.encode(), diff --git a/client/network/src/light_client_requests/sender.rs b/client/network/src/light_client_requests/sender.rs index 77efa1b982e7a..d99738680952b 100644 --- a/client/network/src/light_client_requests/sender.rs +++ b/client/network/src/light_client_requests/sender.rs @@ -29,28 +29,21 @@ //! 3. Wait for the response and forward the response via the [`futures::channel::oneshot::Sender`] provided earlier //! with [`LightClientRequestSender::request`](sender::LightClientRequestSender::request). -use codec::{self, Encode, Decode}; use crate::{ config::ProtocolId, - protocol::message::{BlockAttributes}, - schema, - PeerId, + protocol::message::BlockAttributes, + request_responses::{OutboundFailure, RequestFailure}, + schema, PeerId, }; -use crate::request_responses::{RequestFailure, OutboundFailure}; -use futures::{channel::{oneshot}, future::BoxFuture, prelude::*, stream::FuturesUnordered}; +use codec::{self, Decode, Encode}; +use futures::{channel::oneshot, future::BoxFuture, prelude::*, stream::FuturesUnordered}; use prost::Message; -use sc_client_api::{ - light::{ - self, RemoteBodyRequest, - } -}; +use sc_client_api::light::{self, RemoteBodyRequest}; use sc_peerset::ReputationChange; -use sp_blockchain::{Error as ClientError}; -use sp_runtime::{ - traits::{Block, Header, NumberFor}, -}; +use sp_blockchain::Error as ClientError; +use sp_runtime::traits::{Block, Header, NumberFor}; use std::{ - collections::{BTreeMap, VecDeque, HashMap}, + collections::{BTreeMap, HashMap, VecDeque}, pin::Pin, sync::Arc, task::{Context, Poll}, @@ -60,9 +53,11 @@ mod rep { use super::*; /// Reputation change for a peer when a request timed out. - pub const TIMEOUT: ReputationChange = ReputationChange::new(-(1 << 8), "light client request timeout"); + pub const TIMEOUT: ReputationChange = + ReputationChange::new(-(1 << 8), "light client request timeout"); /// Reputation change for a peer when a request is refused. - pub const REFUSED: ReputationChange = ReputationChange::new(-(1 << 8), "light client request refused"); + pub const REFUSED: ReputationChange = + ReputationChange::new(-(1 << 8), "light client request refused"); } /// Configuration options for [`LightClientRequestSender`]. @@ -95,9 +90,12 @@ pub struct LightClientRequestSender { /// Pending (local) requests. pending_requests: VecDeque>, /// Requests on their way to remote peers. - sent_requests: FuturesUnordered, Result, RequestFailure>, oneshot::Canceled>), - >>, + sent_requests: FuturesUnordered< + BoxFuture< + 'static, + (SentRequest, Result, RequestFailure>, oneshot::Canceled>), + >, + >, /// Handle to use for reporting misbehaviour of peers. peerset: sc_peerset::PeersetHandle, } @@ -121,11 +119,7 @@ impl PendingRequest { } fn into_sent(self, peer_id: PeerId) -> SentRequest { - SentRequest { - attempts_left: self.attempts_left, - request: self.request, - peer: peer_id, - } + SentRequest { attempts_left: self.attempts_left, request: self.request, peer: peer_id } } } @@ -142,10 +136,7 @@ struct SentRequest { impl SentRequest { fn into_pending(self) -> PendingRequest { - PendingRequest { - attempts_left: self.attempts_left, - request: self.request, - } + PendingRequest { attempts_left: self.attempts_left, request: self.request } } } @@ -206,7 +197,7 @@ where peer: PeerId, request: &Request, response: Response, - ) -> Result, Error> { + ) -> Result, Error> { log::trace!("response from {}", peer); match response { Response::Light(r) => self.on_response_light(request, r), @@ -222,27 +213,26 @@ where use schema::v1::light::response::Response; match response.response { Some(Response::RemoteCallResponse(response)) => - if let Request::Call { request , .. } = request { + if let Request::Call { request, .. } = request { let proof = Decode::decode(&mut response.proof.as_ref())?; let reply = self.checker.check_execution_proof(request, proof)?; Ok(Reply::VecU8(reply)) } else { Err(Error::UnexpectedResponse) - } - Some(Response::RemoteReadResponse(response)) => - match request { - Request::Read { request, .. } => { - let proof = Decode::decode(&mut response.proof.as_ref())?; - let reply = self.checker.check_read_proof(&request, proof)?; - Ok(Reply::MapVecU8OptVecU8(reply)) - } - Request::ReadChild { request, .. } => { - let proof = Decode::decode(&mut response.proof.as_ref())?; - let reply = self.checker.check_read_child_proof(&request, proof)?; - Ok(Reply::MapVecU8OptVecU8(reply)) - } - _ => Err(Error::UnexpectedResponse) - } + }, + Some(Response::RemoteReadResponse(response)) => match request { + Request::Read { request, .. } => { + let proof = Decode::decode(&mut response.proof.as_ref())?; + let reply = self.checker.check_read_proof(&request, proof)?; + Ok(Reply::MapVecU8OptVecU8(reply)) + }, + Request::ReadChild { request, .. } => { + let proof = Decode::decode(&mut response.proof.as_ref())?; + let reply = self.checker.check_read_child_proof(&request, proof)?; + Ok(Reply::MapVecU8OptVecU8(reply)) + }, + _ => Err(Error::UnexpectedResponse), + }, Some(Response::RemoteChangesResponse(response)) => if let Request::Changes { request, .. } = request { let max_block = Decode::decode(&mut response.max.as_ref())?; @@ -256,31 +246,33 @@ where } r }; - let reply = self.checker.check_changes_proof(&request, light::ChangesProof { - max_block, - proof: response.proof, - roots, - roots_proof, - })?; + let reply = self.checker.check_changes_proof( + &request, + light::ChangesProof { + max_block, + proof: response.proof, + roots, + roots_proof, + }, + )?; Ok(Reply::VecNumberU32(reply)) } else { Err(Error::UnexpectedResponse) - } + }, Some(Response::RemoteHeaderResponse(response)) => if let Request::Header { request, .. } = request { - let header = - if response.header.is_empty() { - None - } else { - Some(Decode::decode(&mut response.header.as_ref())?) - }; + let header = if response.header.is_empty() { + None + } else { + Some(Decode::decode(&mut response.header.as_ref())?) + }; let proof = Decode::decode(&mut response.proof.as_ref())?; let reply = self.checker.check_header_proof(&request, header, proof)?; Ok(Reply::Header(reply)) } else { Err(Error::UnexpectedResponse) - } - None => Err(Error::UnexpectedResponse) + }, + None => Err(Error::UnexpectedResponse), } } @@ -289,10 +281,10 @@ where request: &Request, response: schema::v1::BlockResponse, ) -> Result, Error> { - let request = if let Request::Body { request , .. } = &request { + let request = if let Request::Body { request, .. } = &request { request } else { - return Err(Error::UnexpectedResponse); + return Err(Error::UnexpectedResponse) }; let body: Vec<_> = match response.blocks.into_iter().next() { @@ -300,7 +292,8 @@ where None => return Err(Error::UnexpectedResponse), }; - let body = body.into_iter() + let body = body + .into_iter() .map(|extrinsic| B::Extrinsic::decode(&mut &extrinsic[..])) .collect::>()?; @@ -323,13 +316,14 @@ where } } - impl Stream for LightClientRequestSender { type Item = OutEvent; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { // If we have received responses to previously sent requests, check them and pass them on. - while let Poll::Ready(Some((sent_request, request_result))) = self.sent_requests.poll_next_unpin(cx) { + while let Poll::Ready(Some((sent_request, request_result))) = + self.sent_requests.poll_next_unpin(cx) + { if let Some(info) = self.peers.get_mut(&sent_request.peer) { if info.status != PeerStatus::Busy { // If we get here, something is wrong with our internal handling of peer status @@ -347,30 +341,38 @@ impl Stream for LightClientRequestSender { Err(oneshot::Canceled) => { log::debug!("Oneshot for request to peer {} was canceled.", sent_request.peer); self.remove_peer(sent_request.peer); - self.peerset.report_peer(sent_request.peer, ReputationChange::new_fatal("no response from peer")); + self.peerset.report_peer( + sent_request.peer, + ReputationChange::new_fatal("no response from peer"), + ); self.pending_requests.push_back(sent_request.into_pending()); - continue; - } + continue + }, }; let decoded_request_result = request_result.map(|response| { if sent_request.request.is_block_request() { - schema::v1::BlockResponse::decode(&response[..]) - .map(|r| Response::Block(r)) + schema::v1::BlockResponse::decode(&response[..]).map(|r| Response::Block(r)) } else { - schema::v1::light::Response::decode(&response[..]) - .map(|r| Response::Light(r)) + schema::v1::light::Response::decode(&response[..]).map(|r| Response::Light(r)) } }); let response = match decoded_request_result { Ok(Ok(response)) => response, Ok(Err(e)) => { - log::debug!("Failed to decode response from peer {}: {:?}.", sent_request.peer, e); + log::debug!( + "Failed to decode response from peer {}: {:?}.", + sent_request.peer, + e + ); self.remove_peer(sent_request.peer); - self.peerset.report_peer(sent_request.peer, ReputationChange::new_fatal("invalid response from peer")); + self.peerset.report_peer( + sent_request.peer, + ReputationChange::new_fatal("invalid response from peer"), + ); self.pending_requests.push_back(sent_request.into_pending()); - continue; + continue }, Err(e) => { log::debug!("Request to peer {} failed with {:?}.", sent_request.peer, e); @@ -379,22 +381,19 @@ impl Stream for LightClientRequestSender { RequestFailure::NotConnected => { self.remove_peer(sent_request.peer); self.pending_requests.push_back(sent_request.into_pending()); - } + }, RequestFailure::UnknownProtocol => { debug_assert!( false, "Light client and block request protocol should be known when \ sending requests.", ); - } + }, RequestFailure::Refused => { self.remove_peer(sent_request.peer); - self.peerset.report_peer( - sent_request.peer, - rep::REFUSED, - ); + self.peerset.report_peer(sent_request.peer, rep::REFUSED); self.pending_requests.push_back(sent_request.into_pending()); - } + }, RequestFailure::Obsolete => { debug_assert!( false, @@ -402,13 +401,10 @@ impl Stream for LightClientRequestSender { response receiver.", ); self.pending_requests.push_back(sent_request.into_pending()); - } + }, RequestFailure::Network(OutboundFailure::Timeout) => { self.remove_peer(sent_request.peer); - self.peerset.report_peer( - sent_request.peer, - rep::TIMEOUT, - ); + self.peerset.report_peer(sent_request.peer, rep::TIMEOUT); self.pending_requests.push_back(sent_request.into_pending()); }, RequestFailure::Network(OutboundFailure::UnsupportedProtocols) => { @@ -420,31 +416,27 @@ impl Stream for LightClientRequestSender { ), ); self.pending_requests.push_back(sent_request.into_pending()); - } + }, RequestFailure::Network(OutboundFailure::DialFailure) => { self.remove_peer(sent_request.peer); self.peerset.report_peer( sent_request.peer, - ReputationChange::new_fatal( - "failed to dial peer", - ), + ReputationChange::new_fatal("failed to dial peer"), ); self.pending_requests.push_back(sent_request.into_pending()); - } + }, RequestFailure::Network(OutboundFailure::ConnectionClosed) => { self.remove_peer(sent_request.peer); self.peerset.report_peer( sent_request.peer, - ReputationChange::new_fatal( - "connection to peer closed", - ), + ReputationChange::new_fatal("connection to peer closed"), ); self.pending_requests.push_back(sent_request.into_pending()); - } + }, } - continue; - } + continue + }, }; match self.on_response(sent_request.peer, &sent_request.request, response) { @@ -454,23 +446,23 @@ impl Stream for LightClientRequestSender { self.remove_peer(sent_request.peer); self.peerset.report_peer( sent_request.peer, - ReputationChange::new_fatal( - "unexpected response from peer", - ), + ReputationChange::new_fatal("unexpected response from peer"), ); self.pending_requests.push_back(sent_request.into_pending()); - } + }, Err(other) => { - log::debug!("error handling response from peer {}: {}", sent_request.peer, other); + log::debug!( + "error handling response from peer {}: {}", + sent_request.peer, + other + ); self.remove_peer(sent_request.peer); self.peerset.report_peer( sent_request.peer, - ReputationChange::new_fatal( - "invalid response from peer", - ), + ReputationChange::new_fatal("invalid response from peer"), ); self.pending_requests.push_back(sent_request.into_pending()) - } + }, } } @@ -497,7 +489,7 @@ impl Stream for LightClientRequestSender { peer = Some((*peer_id, peer_info)); break }, - _ => peer = Some((*peer_id, peer_info)) + _ => peer = Some((*peer_id, peer_info)), } } } @@ -509,8 +501,8 @@ impl Stream for LightClientRequestSender { self.pending_requests.push_front(pending_request); log::debug!("No peer available to send request to."); - break; - } + break + }, }; let request_bytes = match pending_request.request.serialize_request() { @@ -519,7 +511,7 @@ impl Stream for LightClientRequestSender { log::debug!("failed to serialize request: {}", error); pending_request.request.return_reply(Err(ClientError::RemoteFetchFailed)); continue - } + }, }; let (tx, rx) = oneshot::channel(); @@ -528,16 +520,15 @@ impl Stream for LightClientRequestSender { pending_request.attempts_left -= 1; - self.sent_requests.push(async move { - (pending_request.into_sent(peer_id), rx.await) - }.boxed()); + self.sent_requests + .push(async move { (pending_request.into_sent(peer_id), rx.await) }.boxed()); return Poll::Ready(Some(OutEvent::SendRequest { target: peer_id, request: request_bytes, pending_response: tx, protocol_name: protocol, - })); + })) } Poll::Pending @@ -557,7 +548,7 @@ pub enum OutEvent { pending_response: oneshot::Sender, RequestFailure>>, /// The name of the protocol to use to send the request. protocol_name: String, - } + }, } /// Incoming response from remote. @@ -605,7 +596,6 @@ enum Reply { Extrinsics(Vec), } - /// Information we have about some peer. #[derive(Debug)] struct PeerInfo { @@ -615,10 +605,7 @@ struct PeerInfo { impl Default for PeerInfo { fn default() -> Self { - PeerInfo { - best_block: None, - status: PeerStatus::Idle, - } + PeerInfo { best_block: None, status: PeerStatus::Idle } } } @@ -645,43 +632,43 @@ pub enum Request { /// Request. request: RemoteBodyRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender, ClientError>> + sender: oneshot::Sender, ClientError>>, }, /// Remote header request. Header { /// Request. request: light::RemoteHeaderRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender> + sender: oneshot::Sender>, }, /// Remote read request. Read { /// Request. request: light::RemoteReadRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender, Option>>, ClientError>> + sender: oneshot::Sender, Option>>, ClientError>>, }, /// Remote read child request. ReadChild { /// Request. request: light::RemoteReadChildRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender, Option>>, ClientError>> + sender: oneshot::Sender, Option>>, ClientError>>, }, /// Remote call request. Call { /// Request. request: light::RemoteCallRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender, ClientError>> + sender: oneshot::Sender, ClientError>>, }, /// Remote changes request. Changes { /// Request. request: light::RemoteChangesRequest, /// [`oneshot::Sender`] to return response. - sender: oneshot::Sender, u32)>, ClientError>> - } + sender: oneshot::Sender, u32)>, ClientError>>, + }, } impl Request { @@ -728,19 +715,19 @@ impl Request { let mut buf = Vec::with_capacity(rq.encoded_len()); rq.encode(&mut buf)?; - return Ok(buf); - } + return Ok(buf) + }, Request::Header { request, .. } => { let r = schema::v1::light::RemoteHeaderRequest { block: request.block.encode() }; schema::v1::light::request::Request::RemoteHeaderRequest(r) - } + }, Request::Read { request, .. } => { let r = schema::v1::light::RemoteReadRequest { block: request.block.encode(), keys: request.keys.clone(), }; schema::v1::light::request::Request::RemoteReadRequest(r) - } + }, Request::ReadChild { request, .. } => { let r = schema::v1::light::RemoteReadChildRequest { block: request.block.encode(), @@ -748,7 +735,7 @@ impl Request { keys: request.keys.clone(), }; schema::v1::light::request::Request::RemoteReadChildRequest(r) - } + }, Request::Call { request, .. } => { let r = schema::v1::light::RemoteCallRequest { block: request.block.encode(), @@ -756,19 +743,22 @@ impl Request { data: request.call_data.clone(), }; schema::v1::light::request::Request::RemoteCallRequest(r) - } + }, Request::Changes { request, .. } => { let r = schema::v1::light::RemoteChangesRequest { first: request.first_block.1.encode(), last: request.last_block.1.encode(), min: request.tries_roots.1.encode(), max: request.max_block.1.encode(), - storage_key: request.storage_key.clone().map(|s| s.into_inner()) + storage_key: request + .storage_key + .clone() + .map(|s| s.into_inner()) .unwrap_or_default(), key: request.key.clone(), }; schema::v1::light::request::Request::RemoteChangesRequest(r) - } + }, }; let rq = schema::v1::light::Request { request: Some(request) }; @@ -786,32 +776,35 @@ impl Request { Err(e) => send(Err(e), sender), Ok(Reply::Extrinsics(x)) => send(Ok(x), sender), reply => log::error!("invalid reply for body request: {:?}, {:?}", reply, request), - } + }, Request::Header { request, sender } => match result { Err(e) => send(Err(e), sender), Ok(Reply::Header(x)) => send(Ok(x), sender), - reply => log::error!("invalid reply for header request: {:?}, {:?}", reply, request), - } + reply => + log::error!("invalid reply for header request: {:?}, {:?}", reply, request), + }, Request::Read { request, sender } => match result { Err(e) => send(Err(e), sender), Ok(Reply::MapVecU8OptVecU8(x)) => send(Ok(x), sender), reply => log::error!("invalid reply for read request: {:?}, {:?}", reply, request), - } + }, Request::ReadChild { request, sender } => match result { Err(e) => send(Err(e), sender), Ok(Reply::MapVecU8OptVecU8(x)) => send(Ok(x), sender), - reply => log::error!("invalid reply for read child request: {:?}, {:?}", reply, request), - } + reply => + log::error!("invalid reply for read child request: {:?}, {:?}", reply, request), + }, Request::Call { request, sender } => match result { Err(e) => send(Err(e), sender), Ok(Reply::VecU8(x)) => send(Ok(x), sender), reply => log::error!("invalid reply for call request: {:?}, {:?}", reply, request), - } + }, Request::Changes { request, sender } => match result { Err(e) => send(Err(e), sender), Ok(Reply::VecNumberU32(x)) => send(Ok(x), sender), - reply => log::error!("invalid reply for changes request: {:?}, {:?}", reply, request), - } + reply => + log::error!("invalid reply for changes request: {:?}, {:?}", reply, request), + }, } } } @@ -819,19 +812,17 @@ impl Request { #[cfg(test)] mod tests { use super::*; - use crate::light_client_requests::tests::{DummyFetchChecker, protocol_id, peerset, dummy_header}; - use crate::request_responses::OutboundFailure; + use crate::{ + light_client_requests::tests::{dummy_header, peerset, protocol_id, DummyFetchChecker}, + request_responses::OutboundFailure, + }; use assert_matches::assert_matches; - use futures::channel::oneshot; - use futures::executor::block_on; - use futures::poll; + use futures::{channel::oneshot, executor::block_on, poll}; use sc_client_api::StorageProof; use sp_core::storage::ChildInfo; - use sp_runtime::generic::Header; - use sp_runtime::traits::BlakeTwo256; - use std::collections::HashSet; - use std::iter::FromIterator; + use sp_runtime::{generic::Header, traits::BlakeTwo256}; + use std::{collections::HashSet, iter::FromIterator}; fn empty_proof() -> Vec { StorageProof::empty().encode() @@ -843,10 +834,7 @@ mod tests { let (_peer_set, peer_set_handle) = peerset(); let mut sender = LightClientRequestSender::::new( &protocol_id(), - Arc::new(DummyFetchChecker { - ok: true, - _mark: std::marker::PhantomData, - }), + Arc::new(DummyFetchChecker { ok: true, _mark: std::marker::PhantomData }), peer_set_handle, ); @@ -864,17 +852,15 @@ mod tests { fn body_request_fields_encoded_properly() { let (sender, _receiver) = oneshot::channel(); let request = Request::::Body { - request: RemoteBodyRequest { - header: dummy_header(), - retry_count: None, - }, + request: RemoteBodyRequest { header: dummy_header(), retry_count: None }, sender, }; let serialized_request = request.serialize_request().unwrap(); - let deserialized_request = schema::v1::BlockRequest::decode(&serialized_request[..]).unwrap(); + let deserialized_request = + schema::v1::BlockRequest::decode(&serialized_request[..]).unwrap(); assert!(BlockAttributes::from_be_u32(deserialized_request.fields) - .unwrap() - .contains(BlockAttributes::BODY)); + .unwrap() + .contains(BlockAttributes::BODY)); } #[test] @@ -916,29 +902,26 @@ mod tests { sender.request(Request::Call { request, sender: chan.0 }).unwrap(); assert_eq!(1, sender.pending_requests.len(), "Expect one pending request."); - let OutEvent::SendRequest { target, pending_response, .. } = block_on(sender.next()).unwrap(); - assert!( - target == peer0 || target == peer1, - "Expect request to originate from known peer.", - ); + let OutEvent::SendRequest { target, pending_response, .. } = + block_on(sender.next()).unwrap(); + assert!(target == peer0 || target == peer1, "Expect request to originate from known peer.",); // And we should have one busy peer. assert!({ - let (idle, busy): (Vec<_>, Vec<_>) = sender - .peers - .iter() - .partition(|(_, info)| info.status == PeerStatus::Idle); - idle.len() == 1 - && busy.len() == 1 - && (idle[0].0 == &peer0 || busy[0].0 == &peer0) - && (idle[0].0 == &peer1 || busy[0].0 == &peer1) + let (idle, busy): (Vec<_>, Vec<_>) = + sender.peers.iter().partition(|(_, info)| info.status == PeerStatus::Idle); + idle.len() == 1 && + busy.len() == 1 && (idle[0].0 == &peer0 || busy[0].0 == &peer0) && + (idle[0].0 == &peer1 || busy[0].0 == &peer1) }); assert_eq!(0, sender.pending_requests.len(), "Expect no pending request."); assert_eq!(1, sender.sent_requests.len(), "Expect one request to be sent."); // Report first attempt as timed out. - pending_response.send(Err(RequestFailure::Network(OutboundFailure::Timeout))).unwrap(); + pending_response + .send(Err(RequestFailure::Network(OutboundFailure::Timeout))) + .unwrap(); // Expect a new request to be issued. let OutEvent::SendRequest { pending_response, .. } = block_on(sender.next()).unwrap(); @@ -948,13 +931,17 @@ mod tests { assert_eq!(1, sender.sent_requests.len(), "Expect new request to be issued."); // Report second attempt as timed out. - pending_response.send(Err(RequestFailure::Network(OutboundFailure::Timeout))).unwrap(); + pending_response + .send(Err(RequestFailure::Network(OutboundFailure::Timeout))) + .unwrap(); assert_matches!( - block_on(async { poll!(sender.next()) }), Poll::Pending, + block_on(async { poll!(sender.next()) }), + Poll::Pending, "Expect sender to not issue another attempt.", ); assert_matches!( - block_on(chan.1).unwrap(), Err(ClientError::RemoteFetchFailed), + block_on(chan.1).unwrap(), + Err(ClientError::RemoteFetchFailed), "Expect request failure to be reported.", ); assert_eq!(0, sender.peers.len(), "Expect no peer to be left"); @@ -988,12 +975,7 @@ mod tests { call_data: vec![], retry_count: Some(1), }; - sender - .request(Request::Call { - request, - sender: chan.0, - }) - .unwrap(); + sender.request(Request::Call { request, sender: chan.0 }).unwrap(); assert_eq!(1, sender.pending_requests.len(), "Expect one pending request."); assert_eq!(0, sender.sent_requests.len(), "Expect zero sent requests."); @@ -1003,9 +985,7 @@ mod tests { assert_eq!(1, sender.sent_requests.len(), "Expect one sent request."); let response = { - let r = schema::v1::light::RemoteCallResponse { - proof: empty_proof(), - }; + let r = schema::v1::light::RemoteCallResponse { proof: empty_proof() }; let response = schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteCallResponse(r)), }; @@ -1017,7 +997,8 @@ mod tests { pending_response.send(Ok(response)).unwrap(); assert_matches!( - block_on(async { poll!(sender.next()) }), Poll::Pending, + block_on(async { poll!(sender.next()) }), + Poll::Pending, "Expect sender to not issue another attempt, given that there is no peer left.", ); @@ -1050,12 +1031,7 @@ mod tests { call_data: vec![], retry_count: Some(1), }; - sender - .request(Request::Call { - request, - sender: chan.0, - }) - .unwrap(); + sender.request(Request::Call { request, sender: chan.0 }).unwrap(); assert_eq!(1, sender.pending_requests.len()); assert_eq!(0, sender.sent_requests.len()); @@ -1064,9 +1040,7 @@ mod tests { assert_eq!(1, sender.sent_requests.len(), "Expect one sent request."); let response = { - let r = schema::v1::light::RemoteReadResponse { - proof: empty_proof(), - }; // Not a RemoteCallResponse! + let r = schema::v1::light::RemoteReadResponse { proof: empty_proof() }; // Not a RemoteCallResponse! let response = schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteReadResponse(r)), }; @@ -1077,7 +1051,8 @@ mod tests { pending_response.send(Ok(response)).unwrap(); assert_matches!( - block_on(async { poll!(sender.next()) }), Poll::Pending, + block_on(async { poll!(sender.next()) }), + Poll::Pending, "Expect sender to not issue another attempt, given that there is no peer left.", ); @@ -1114,12 +1089,7 @@ mod tests { call_data: vec![], retry_count: Some(3), // Attempt up to three retries. }; - sender - .request(Request::Call { - request, - sender: chan.0, - }) - .unwrap(); + sender.request(Request::Call { request, sender: chan.0 }).unwrap(); assert_eq!(1, sender.pending_requests.len()); assert_eq!(0, sender.sent_requests.len()); @@ -1132,9 +1102,7 @@ mod tests { for (i, _peer) in peers.iter().enumerate() { // Construct an invalid response let response = { - let r = schema::v1::light::RemoteCallResponse { - proof: empty_proof(), - }; + let r = schema::v1::light::RemoteCallResponse { proof: empty_proof() }; let response = schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteCallResponse(r)), }; @@ -1152,13 +1120,11 @@ mod tests { } else { // Last peer and last attempt. assert_matches!( - block_on(async { poll!(sender.next()) }), Poll::Pending, + block_on(async { poll!(sender.next()) }), + Poll::Pending, "Expect sender to not issue another attempt, given that there is no peer left.", ); - assert_matches!( - chan.1.try_recv(), - Ok(Some(Err(ClientError::RemoteFetchFailed))) - ) + assert_matches!(chan.1.try_recv(), Ok(Some(Err(ClientError::RemoteFetchFailed)))) } } } @@ -1187,35 +1153,27 @@ mod tests { proof: empty_proof(), }; schema::v1::light::Response { - response: Some(schema::v1::light::response::Response::RemoteHeaderResponse( - r, - )), + response: Some(schema::v1::light::response::Response::RemoteHeaderResponse(r)), } - } + }, Request::Read { .. } => { - let r = schema::v1::light::RemoteReadResponse { - proof: empty_proof(), - }; + let r = schema::v1::light::RemoteReadResponse { proof: empty_proof() }; schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteReadResponse(r)), } - } + }, Request::ReadChild { .. } => { - let r = schema::v1::light::RemoteReadResponse { - proof: empty_proof(), - }; + let r = schema::v1::light::RemoteReadResponse { proof: empty_proof() }; schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteReadResponse(r)), } - } + }, Request::Call { .. } => { - let r = schema::v1::light::RemoteCallResponse { - proof: empty_proof(), - }; + let r = schema::v1::light::RemoteCallResponse { proof: empty_proof() }; schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteCallResponse(r)), } - } + }, Request::Changes { .. } => { let r = schema::v1::light::RemoteChangesResponse { max: std::iter::repeat(1).take(32).collect(), @@ -1226,7 +1184,7 @@ mod tests { schema::v1::light::Response { response: Some(schema::v1::light::response::Response::RemoteChangesResponse(r)), } - } + }, }; let response = { @@ -1245,7 +1203,8 @@ mod tests { pending_response.send(Ok(response)).unwrap(); assert_matches!( - block_on(async { poll!(sender.next()) }), Poll::Pending, + block_on(async { poll!(sender.next()) }), + Poll::Pending, "Expect sender to not issue another attempt, given that there is no peer left.", ); @@ -1263,10 +1222,7 @@ mod tests { call_data: vec![], retry_count: None, }; - issue_request(Request::Call { - request, - sender: chan.0, - }); + issue_request(Request::Call { request, sender: chan.0 }); assert_matches!(chan.1.try_recv(), Ok(Some(Ok(_)))) } @@ -1279,10 +1235,7 @@ mod tests { keys: vec![b":key".to_vec()], retry_count: None, }; - issue_request(Request::Read { - request, - sender: chan.0, - }); + issue_request(Request::Read { request, sender: chan.0 }); assert_matches!(chan.1.try_recv(), Ok(Some(Ok(_)))) } @@ -1297,10 +1250,7 @@ mod tests { keys: vec![b":key".to_vec()], retry_count: None, }; - issue_request(Request::ReadChild { - request, - sender: chan.0, - }); + issue_request(Request::ReadChild { request, sender: chan.0 }); assert_matches!(chan.1.try_recv(), Ok(Some(Ok(_)))) } @@ -1312,10 +1262,7 @@ mod tests { block: 1, retry_count: None, }; - issue_request(Request::Header { - request, - sender: chan.0, - }); + issue_request(Request::Header { request, sender: chan.0 }); assert_matches!(chan.1.try_recv(), Ok(Some(Ok(_)))) } @@ -1336,10 +1283,7 @@ mod tests { storage_key: None, retry_count: None, }; - issue_request(Request::Changes { - request, - sender: chan.0, - }); + issue_request(Request::Changes { request, sender: chan.0 }); assert_matches!(chan.1.try_recv(), Ok(Some(Ok(_)))) } } diff --git a/client/network/src/network_state.rs b/client/network/src/network_state.rs index 4ddfadda172e4..3f3d0596f16a0 100644 --- a/client/network/src/network_state.rs +++ b/client/network/src/network_state.rs @@ -22,7 +22,10 @@ use libp2p::{core::ConnectedPoint, Multiaddr}; use serde::{Deserialize, Serialize}; -use std::{collections::{HashMap, HashSet}, time::Duration}; +use std::{ + collections::{HashMap, HashSet}, + time::Duration, +}; /// Returns general information about the networking. /// @@ -90,13 +93,9 @@ pub enum PeerEndpoint { impl From for PeerEndpoint { fn from(endpoint: ConnectedPoint) -> Self { match endpoint { - ConnectedPoint::Dialer { address } => - PeerEndpoint::Dialing(address), + ConnectedPoint::Dialer { address } => PeerEndpoint::Dialing(address), ConnectedPoint::Listener { local_addr, send_back_addr } => - PeerEndpoint::Listening { - local_addr, - send_back_addr - } + PeerEndpoint::Listening { local_addr, send_back_addr }, } } } diff --git a/client/network/src/on_demand_layer.rs b/client/network/src/on_demand_layer.rs index ef8076e8cbed7..ebcf012c0faef 100644 --- a/client/network/src/on_demand_layer.rs +++ b/client/network/src/on_demand_layer.rs @@ -23,13 +23,19 @@ use crate::light_client_requests; use futures::{channel::oneshot, prelude::*}; use parking_lot::Mutex; use sc_client_api::{ - FetchChecker, Fetcher, RemoteBodyRequest, RemoteCallRequest, RemoteChangesRequest, - RemoteHeaderRequest, RemoteReadChildRequest, RemoteReadRequest, StorageProof, ChangesProof, + ChangesProof, FetchChecker, Fetcher, RemoteBodyRequest, RemoteCallRequest, + RemoteChangesRequest, RemoteHeaderRequest, RemoteReadChildRequest, RemoteReadRequest, + StorageProof, }; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; use sp_blockchain::Error as ClientError; use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; -use std::{collections::HashMap, pin::Pin, sync::Arc, task::Context, task::Poll}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + collections::HashMap, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; /// Implements the `Fetcher` trait of the client. Makes it possible for the light client to perform /// network requests for some state. @@ -45,13 +51,13 @@ pub struct OnDemand { /// Note that a better alternative would be to use a MPMC queue here, and add a `poll` method /// from the `OnDemand`. However there exists no popular implementation of MPMC channels in /// asynchronous Rust at the moment - requests_queue: Mutex>>>, + requests_queue: + Mutex>>>, /// Sending side of `requests_queue`. requests_send: TracingUnboundedSender>, } - #[derive(Debug, thiserror::Error)] #[error("AlwaysBadChecker")] struct ErrorAlwaysBadChecker; @@ -83,7 +89,7 @@ impl FetchChecker for AlwaysBadChecker { &self, _request: &RemoteReadRequest, _remote_proof: StorageProof, - ) -> Result,Option>>, ClientError> { + ) -> Result, Option>>, ClientError> { Err(ErrorAlwaysBadChecker.into()) } @@ -106,7 +112,7 @@ impl FetchChecker for AlwaysBadChecker { fn check_changes_proof( &self, _request: &RemoteChangesRequest, - _remote_proof: ChangesProof + _remote_proof: ChangesProof, ) -> Result, u32)>, ClientError> { Err(ErrorAlwaysBadChecker.into()) } @@ -114,7 +120,7 @@ impl FetchChecker for AlwaysBadChecker { fn check_body_proof( &self, _request: &RemoteBodyRequest, - _body: Vec + _body: Vec, ) -> Result, ClientError> { Err(ErrorAlwaysBadChecker.into()) } @@ -129,11 +135,7 @@ where let (requests_send, requests_queue) = tracing_unbounded("mpsc_ondemand"); let requests_queue = Mutex::new(Some(requests_queue)); - OnDemand { - checker, - requests_queue, - requests_send, - } + OnDemand { checker, requests_queue, requests_send } } /// Get checker reference. @@ -148,9 +150,9 @@ where /// /// If this function returns `None`, that means that the receiver has already been extracted in /// the past, and therefore that something already handles the requests. - pub(crate) fn extract_receiver(&self) - -> Option>> - { + pub(crate) fn extract_receiver( + &self, + ) -> Option>> { self.requests_queue.lock().take() } } diff --git a/client/network/src/peer_info.rs b/client/network/src/peer_info.rs index 39bbd1d870460..a123482be0727 100644 --- a/client/network/src/peer_info.rs +++ b/client/network/src/peer_info.rs @@ -16,24 +16,33 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use crate::utils::interval; use fnv::FnvHashMap; use futures::prelude::*; -use libp2p::Multiaddr; -use libp2p::core::connection::{ConnectionId, ListenerId}; -use libp2p::core::{ConnectedPoint, either::EitherOutput, PeerId, PublicKey}; -use libp2p::swarm::{IntoProtocolsHandler, IntoProtocolsHandlerSelect, ProtocolsHandler}; -use libp2p::swarm::{NetworkBehaviour, NetworkBehaviourAction, PollParameters}; -use libp2p::identify::{Identify, IdentifyConfig, IdentifyEvent, IdentifyInfo}; -use libp2p::ping::{Ping, PingConfig, PingEvent, PingSuccess}; -use log::{debug, trace, error}; +use libp2p::{ + core::{ + connection::{ConnectionId, ListenerId}, + either::EitherOutput, + ConnectedPoint, PeerId, PublicKey, + }, + identify::{Identify, IdentifyConfig, IdentifyEvent, IdentifyInfo}, + ping::{Ping, PingConfig, PingEvent, PingSuccess}, + swarm::{ + IntoProtocolsHandler, IntoProtocolsHandlerSelect, NetworkBehaviour, NetworkBehaviourAction, + PollParameters, ProtocolsHandler, + }, + Multiaddr, +}; +use log::{debug, error, trace}; use smallvec::SmallVec; -use std::{error, io}; -use std::collections::hash_map::Entry; -use std::pin::Pin; -use std::task::{Context, Poll}; -use std::time::Duration; +use std::{ + collections::hash_map::Entry, + error, io, + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::Instant; -use crate::utils::interval; /// Time after we disconnect from a node before we purge its information from the cache. const CACHE_EXPIRE: Duration = Duration::from_secs(10 * 60); @@ -70,21 +79,13 @@ impl NodeInfo { fn new(endpoint: ConnectedPoint) -> Self { let mut endpoints = SmallVec::new(); endpoints.push(endpoint); - NodeInfo { - info_expire: None, - endpoints, - client_version: None, - latest_ping: None, - } + NodeInfo { info_expire: None, endpoints, client_version: None, latest_ping: None } } } impl PeerInfoBehaviour { /// Builds a new `PeerInfoBehaviour`. - pub fn new( - user_agent: String, - local_public_key: PublicKey, - ) -> Self { + pub fn new(user_agent: String, local_public_key: PublicKey) -> Self { let identify = { let cfg = IdentifyConfig::new("/substrate/1.0".to_string(), local_public_key) .with_agent_version(user_agent); @@ -172,7 +173,7 @@ pub enum PeerInfoEvent { impl NetworkBehaviour for PeerInfoBehaviour { type ProtocolsHandler = IntoProtocolsHandlerSelect< ::ProtocolsHandler, - ::ProtocolsHandler + ::ProtocolsHandler, >; type OutEvent = PeerInfoEvent; @@ -191,13 +192,18 @@ impl NetworkBehaviour for PeerInfoBehaviour { self.identify.inject_connected(peer_id); } - fn inject_connection_established(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_established( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.ping.inject_connection_established(peer_id, conn, endpoint); self.identify.inject_connection_established(peer_id, conn, endpoint); match self.nodes_info.entry(peer_id.clone()) { Entry::Vacant(e) => { e.insert(NodeInfo::new(endpoint.clone())); - } + }, Entry::Occupied(e) => { let e = e.into_mut(); if e.info_expire.as_ref().map(|exp| *exp < Instant::now()).unwrap_or(false) { @@ -206,11 +212,16 @@ impl NetworkBehaviour for PeerInfoBehaviour { } e.info_expire = None; e.endpoints.push(endpoint.clone()); - } + }, } } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.ping.inject_connection_closed(peer_id, conn, endpoint); self.identify.inject_connection_closed(peer_id, conn, endpoint); @@ -238,7 +249,7 @@ impl NetworkBehaviour for PeerInfoBehaviour { &mut self, peer_id: PeerId, connection: ConnectionId, - event: <::Handler as ProtocolsHandler>::OutEvent + event: <::Handler as ProtocolsHandler>::OutEvent, ) { match event { EitherOutput::First(event) => self.ping.inject_event(peer_id, connection, event), @@ -246,7 +257,12 @@ impl NetworkBehaviour for PeerInfoBehaviour { } } - fn inject_addr_reach_failure(&mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, error: &dyn std::error::Error) { + fn inject_addr_reach_failure( + &mut self, + peer_id: Option<&PeerId>, + addr: &Multiaddr, + error: &dyn std::error::Error, + ) { self.ping.inject_addr_reach_failure(peer_id, addr, error); self.identify.inject_addr_reach_failure(peer_id, addr, error); } @@ -300,7 +316,7 @@ impl NetworkBehaviour for PeerInfoBehaviour { <::Handler as ProtocolsHandler>::InEvent, Self::OutEvent > - > { + >{ loop { match self.ping.poll(cx, params) { Poll::Pending => break, @@ -317,28 +333,29 @@ impl NetworkBehaviour for PeerInfoBehaviour { return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, - event: EitherOutput::First(event) + event: EitherOutput::First(event), }), Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }) => - return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }), + return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { + address, + score, + }), } } loop { match self.identify.poll(cx, params) { Poll::Pending => break, - Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)) => { - match event { - IdentifyEvent::Received { peer_id, info, .. } => { - self.handle_identify_report(&peer_id, &info); - let event = PeerInfoEvent::Identified { peer_id, info }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)); - } - IdentifyEvent::Error { peer_id, error } => - debug!(target: "sub-libp2p", "Identification with peer {:?} failed => {}", peer_id, error), - IdentifyEvent::Pushed { .. } => {} - IdentifyEvent::Sent { .. } => {} - } + Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)) => match event { + IdentifyEvent::Received { peer_id, info, .. } => { + self.handle_identify_report(&peer_id, &info); + let event = PeerInfoEvent::Identified { peer_id, info }; + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(event)) + }, + IdentifyEvent::Error { peer_id, error } => + debug!(target: "sub-libp2p", "Identification with peer {:?} failed => {}", peer_id, error), + IdentifyEvent::Pushed { .. } => {}, + IdentifyEvent::Sent { .. } => {}, }, Poll::Ready(NetworkBehaviourAction::DialAddress { address }) => return Poll::Ready(NetworkBehaviourAction::DialAddress { address }), @@ -348,10 +365,13 @@ impl NetworkBehaviour for PeerInfoBehaviour { return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, - event: EitherOutput::Second(event) + event: EitherOutput::Second(event), }), Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }) => - return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }), + return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { + address, + score, + }), } } diff --git a/client/network/src/protocol.rs b/client/network/src/protocol.rs index eaed7ffcccace..0838657fae530 100644 --- a/client/network/src/protocol.rs +++ b/client/network/src/protocol.rs @@ -21,49 +21,64 @@ use crate::{ config::{self, ProtocolId}, error, request_responses::RequestFailure, - utils::{interval, LruHashSet}, schema::v1::StateResponse, + utils::{interval, LruHashSet}, }; use bytes::Bytes; use codec::{Decode, DecodeAll, Encode}; use futures::{channel::oneshot, prelude::*}; +use libp2p::{ + core::{ + connection::{ConnectionId, ListenerId}, + ConnectedPoint, + }, + request_response::OutboundFailure, + swarm::{ + IntoProtocolsHandler, NetworkBehaviour, NetworkBehaviourAction, PollParameters, + ProtocolsHandler, + }, + Multiaddr, PeerId, +}; +use log::{debug, error, log, trace, warn, Level}; +use message::{ + generic::{Message as GenericMessage, Roles}, + BlockAnnounce, Message, +}; use notifications::{Notifications, NotificationsOut}; -use libp2p::core::{ConnectedPoint, connection::{ConnectionId, ListenerId}}; -use libp2p::request_response::OutboundFailure; -use libp2p::swarm::{NetworkBehaviour, NetworkBehaviourAction, PollParameters}; -use libp2p::swarm::{ProtocolsHandler, IntoProtocolsHandler}; -use libp2p::{Multiaddr, PeerId}; -use log::{log, Level, trace, debug, warn, error}; -use message::{BlockAnnounce, Message}; -use message::generic::{Message as GenericMessage, Roles}; -use prometheus_endpoint::{Registry, Gauge, GaugeVec, PrometheusError, Opts, register, U64}; +use prometheus_endpoint::{register, Gauge, GaugeVec, Opts, PrometheusError, Registry, U64}; use prost::Message as _; +use sp_arithmetic::traits::SaturatedConversion; use sp_consensus::{ - BlockOrigin, block_validation::BlockAnnounceValidator, - import_queue::{BlockImportResult, BlockImportError, IncomingBlock, Origin} + import_queue::{BlockImportError, BlockImportResult, IncomingBlock, Origin}, + BlockOrigin, }; use sp_runtime::{ - Justifications, generic::BlockId, - traits::{Block as BlockT, Header as HeaderT, NumberFor, Zero, CheckedSub}, + traits::{Block as BlockT, CheckedSub, Header as HeaderT, NumberFor, Zero}, + Justifications, +}; +use std::{ + borrow::Cow, + collections::{HashMap, HashSet, VecDeque}, + convert::TryFrom as _, + io, iter, + num::NonZeroUsize, + pin::Pin, + sync::Arc, + task::Poll, + time, }; -use sp_arithmetic::traits::SaturatedConversion; use sync::{ChainSync, Status as SyncStatus}; -use std::borrow::Cow; -use std::convert::TryFrom as _; -use std::collections::{HashMap, HashSet, VecDeque}; -use std::sync::Arc; -use std::{io, iter, num::NonZeroUsize, pin::Pin, task::Poll, time}; mod notifications; -pub mod message; pub mod event; +pub mod message; pub mod sync; -pub use notifications::{NotificationsSink, Ready, NotifsHandlerError}; +pub use notifications::{NotificationsSink, NotifsHandlerError, Ready}; /// Interval at which we perform time based maintenance const TICK_TIMEOUT: time::Duration = time::Duration::from_millis(1100); @@ -134,7 +149,7 @@ impl Metrics { let g = GaugeVec::new( Opts::new( "sync_extra_justifications", - "Number of extra justifications requests" + "Number of extra justifications requests", ), &["status"], )?; @@ -191,10 +206,7 @@ enum PeerRequest { struct Peer { info: PeerInfo, /// Current request, if any. Started by emitting [`CustomMessageOutcome::BlockRequest`]. - request: Option<( - PeerRequest, - oneshot::Receiver, RequestFailure>>, - )>, + request: Option<(PeerRequest, oneshot::Receiver, RequestFailure>>)>, /// Holds a set of blocks known to this peer. known_blocks: LruHashSet, } @@ -228,13 +240,8 @@ impl ProtocolConfig { } else { match self.sync_mode { config::SyncMode::Full => sync::SyncMode::Full, - config::SyncMode::Fast { - skip_proofs, - storage_chain_mode, - } => sync::SyncMode::LightState { - skip_proofs, - storage_chain_mode - }, + config::SyncMode::Fast { skip_proofs, storage_chain_mode } => + sync::SyncMode::LightState { skip_proofs, storage_chain_mode }, } } } @@ -296,7 +303,8 @@ impl Protocol { chain.clone(), block_announce_validator, config.max_parallel_downloads, - ).map_err(Box::new)?; + ) + .map_err(Box::new)?; let boot_node_ids = { let mut list = HashSet::new(); @@ -312,7 +320,11 @@ impl Protocol { for reserved in &network_config.default_peers_set.reserved_nodes { imp_p.insert(reserved.peer_id.clone()); } - for reserved in network_config.extra_sets.iter().flat_map(|s| s.set_config.reserved_nodes.iter()) { + for reserved in network_config + .extra_sets + .iter() + .flat_map(|s| s.set_config.reserved_nodes.iter()) + { imp_p.insert(reserved.peer_id.clone()); } imp_p.shrink_to_fit(); @@ -322,7 +334,8 @@ impl Protocol { let mut known_addresses = Vec::new(); let (peerset, peerset_handle) = { - let mut sets = Vec::with_capacity(NUM_HARDCODED_PEERSETS + network_config.extra_sets.len()); + let mut sets = + Vec::with_capacity(NUM_HARDCODED_PEERSETS + network_config.extra_sets.len()); let mut default_sets_reserved = HashSet::new(); for reserved in network_config.default_peers_set.reserved_nodes.iter() { @@ -342,8 +355,8 @@ impl Protocol { out_peers: network_config.default_peers_set.out_peers, bootnodes, reserved_nodes: default_sets_reserved.clone(), - reserved_only: network_config.default_peers_set.non_reserved_mode - == config::NonReservedPeerMode::Deny, + reserved_only: network_config.default_peers_set.non_reserved_mode == + config::NonReservedPeerMode::Deny, }); for set_cfg in &network_config.extra_sets { @@ -365,9 +378,7 @@ impl Protocol { }); } - sc_peerset::Peerset::from_config(sc_peerset::PeersetConfig { - sets, - }) + sc_peerset::Peerset::from_config(sc_peerset::PeersetConfig { sets }) }; let block_announces_protocol: Cow<'static, str> = Cow::from({ @@ -383,12 +394,9 @@ impl Protocol { let best_hash = info.best_hash; let genesis_hash = info.genesis_hash; - let block_announces_handshake = BlockAnnouncesHandshake::::build( - &config, - best_number, - best_hash, - genesis_hash, - ).encode(); + let block_announces_handshake = + BlockAnnouncesHandshake::::build(&config, best_number, best_hash, genesis_hash) + .encode(); let sync_protocol_config = notifications::ProtocolConfig { name: block_announces_protocol, @@ -399,22 +407,22 @@ impl Protocol { Notifications::new( peerset, - iter::once(sync_protocol_config) - .chain(network_config.extra_sets.iter() - .zip(notifications_protocols_handshakes) - .map(|(s, hs)| notifications::ProtocolConfig { + iter::once(sync_protocol_config).chain( + network_config.extra_sets.iter().zip(notifications_protocols_handshakes).map( + |(s, hs)| notifications::ProtocolConfig { name: s.notifications_protocol.clone(), fallback_names: s.fallback_names.clone(), handshake: hs, max_notification_size: s.max_notification_size, - }) + }, ), + ), ) }; let block_announce_data_cache = lru::LruCache::new( - network_config.default_peers_set.in_peers as usize - + network_config.default_peers_set.out_peers as usize, + network_config.default_peers_set.in_peers as usize + + network_config.default_peers_set.out_peers as usize, ); let protocol = Protocol { @@ -428,8 +436,11 @@ impl Protocol { important_peers, peerset_handle: peerset_handle.clone(), behaviour, - notification_protocols: - network_config.extra_sets.iter().map(|s| s.notifications_protocol.clone()).collect(), + notification_protocols: network_config + .extra_sets + .iter() + .map(|s| s.notifications_protocol.clone()) + .collect(), bad_handshake_substreams: Default::default(), metrics: if let Some(r) = metrics_registry { Some(Metrics::register(r)?) @@ -461,8 +472,12 @@ impl Protocol { /// Disconnects the given peer if we are connected to it. pub fn disconnect_peer(&mut self, peer_id: &PeerId, protocol_name: &str) { - if let Some(position) = self.notification_protocols.iter().position(|p| *p == protocol_name) { - self.behaviour.disconnect_peer(peer_id, sc_peerset::SetId::from(position + NUM_HARDCODED_PEERSETS)); + if let Some(position) = self.notification_protocols.iter().position(|p| *p == protocol_name) + { + self.behaviour.disconnect_peer( + peer_id, + sc_peerset::SetId::from(position + NUM_HARDCODED_PEERSETS), + ); } else { log::warn!(target: "sub-libp2p", "disconnect_peer() with invalid protocol name") } @@ -480,10 +495,7 @@ impl Protocol { /// Returns the number of peers we're connected to and that are being queried. pub fn num_active_peers(&self) -> usize { - self.peers - .values() - .filter(|p| p.request.is_some()) - .count() + self.peers.values().filter(|p| p.request.is_some()).count() } /// Current global sync state. @@ -524,12 +536,8 @@ impl Protocol { self.behaviour.set_notif_protocol_handshake( HARDCODED_PEERSETS_SYNC, - BlockAnnouncesHandshake::::build( - &self.config, - number, - hash, - self.genesis_hash, - ).encode() + BlockAnnouncesHandshake::::build(&self.config, number, hash, self.genesis_hash) + .encode(), ); } @@ -566,8 +574,11 @@ impl Protocol { } if let Some(_peer_data) = self.peers.remove(&peer) { - if let Some(sync::OnBlockData::Import(origin, blocks)) = self.sync.peer_disconnected(&peer) { - self.pending_messages.push_back(CustomMessageOutcome::BlockImport(origin, blocks)); + if let Some(sync::OnBlockData::Import(origin, blocks)) = + self.sync.peer_disconnected(&peer) + { + self.pending_messages + .push_back(CustomMessageOutcome::BlockImport(origin, blocks)); } Ok(()) } else { @@ -588,67 +599,76 @@ impl Protocol { request: message::BlockRequest, response: crate::schema::v1::BlockResponse, ) -> CustomMessageOutcome { - let blocks = response.blocks.into_iter().map(|block_data| { - Ok(message::BlockData:: { - hash: Decode::decode(&mut block_data.hash.as_ref())?, - header: if !block_data.header.is_empty() { - Some(Decode::decode(&mut block_data.header.as_ref())?) - } else { - None - }, - body: if request.fields.contains(message::BlockAttributes::BODY) { - Some(block_data.body.iter().map(|body| { - Decode::decode(&mut body.as_ref()) - }).collect::, _>>()?) - } else { - None - }, - indexed_body: if request.fields.contains(message::BlockAttributes::INDEXED_BODY) { - Some(block_data.indexed_body) - } else { - None - }, - receipt: if !block_data.message_queue.is_empty() { - Some(block_data.receipt) - } else { - None - }, - message_queue: if !block_data.message_queue.is_empty() { - Some(block_data.message_queue) - } else { - None - }, - justification: if !block_data.justification.is_empty() { - Some(block_data.justification) - } else if block_data.is_empty_justification { - Some(Vec::new()) - } else { - None - }, - justifications: if !block_data.justifications.is_empty() { - Some(DecodeAll::decode_all(&mut block_data.justifications.as_ref())?) - } else { - None - }, + let blocks = response + .blocks + .into_iter() + .map(|block_data| { + Ok(message::BlockData:: { + hash: Decode::decode(&mut block_data.hash.as_ref())?, + header: if !block_data.header.is_empty() { + Some(Decode::decode(&mut block_data.header.as_ref())?) + } else { + None + }, + body: if request.fields.contains(message::BlockAttributes::BODY) { + Some( + block_data + .body + .iter() + .map(|body| Decode::decode(&mut body.as_ref())) + .collect::, _>>()?, + ) + } else { + None + }, + indexed_body: if request.fields.contains(message::BlockAttributes::INDEXED_BODY) + { + Some(block_data.indexed_body) + } else { + None + }, + receipt: if !block_data.message_queue.is_empty() { + Some(block_data.receipt) + } else { + None + }, + message_queue: if !block_data.message_queue.is_empty() { + Some(block_data.message_queue) + } else { + None + }, + justification: if !block_data.justification.is_empty() { + Some(block_data.justification) + } else if block_data.is_empty_justification { + Some(Vec::new()) + } else { + None + }, + justifications: if !block_data.justifications.is_empty() { + Some(DecodeAll::decode_all(&mut block_data.justifications.as_ref())?) + } else { + None + }, + }) }) - }).collect::, codec::Error>>(); + .collect::, codec::Error>>(); let blocks = match blocks { Ok(blocks) => blocks, Err(err) => { debug!(target: "sync", "Failed to decode block response from {}: {}", peer_id, err); self.peerset_handle.report_peer(peer_id, rep::BAD_MESSAGE); - return CustomMessageOutcome::None; - } + return CustomMessageOutcome::None + }, }; - let block_response = message::BlockResponse:: { - id: request.id, - blocks, - }; + let block_response = message::BlockResponse:: { id: request.id, blocks }; let blocks_range = || match ( - block_response.blocks.first().and_then(|b| b.header.as_ref().map(|h| h.number())), + block_response + .blocks + .first() + .and_then(|b| b.header.as_ref().map(|h| h.number())), block_response.blocks.last().and_then(|b| b.header.as_ref().map(|h| h.number())), ) { (Some(first), Some(last)) if first != last => format!(" ({}..{})", first, last), @@ -671,20 +691,18 @@ impl Protocol { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu); CustomMessageOutcome::None - } + }, } } else { match self.sync.on_block_data(&peer_id, Some(request), block_response) { Ok(sync::OnBlockData::Import(origin, blocks)) => CustomMessageOutcome::BlockImport(origin, blocks), - Ok(sync::OnBlockData::Request(peer, req)) => { - self.prepare_block_request(peer, req) - } + Ok(sync::OnBlockData::Request(peer, req)) => self.prepare_block_request(peer, req), Err(sync::BadPeer(id, repu)) => { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu); CustomMessageOutcome::None - } + }, } } } @@ -699,14 +717,13 @@ impl Protocol { match self.sync.on_state_data(&peer_id, response) { Ok(sync::OnStateData::Import(origin, block)) => CustomMessageOutcome::BlockImport(origin, vec![block]), - Ok(sync::OnStateData::Request(peer, req)) => { - prepare_state_request::(&mut self.peers, peer, req) - } + Ok(sync::OnStateData::Request(peer, req)) => + prepare_state_request::(&mut self.peers, peer, req), Err(sync::BadPeer(id, repu)) => { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu); CustomMessageOutcome::None - } + }, } } @@ -732,7 +749,7 @@ impl Protocol { if self.peers.contains_key(&who) { log::error!(target: "sync", "Called on_sync_peer_connected with already connected peer {}", who); debug_assert!(false); - return Err(()); + return Err(()) } if status.genesis_hash != self.genesis_hash { @@ -755,7 +772,7 @@ impl Protocol { ); } - return Err(()); + return Err(()) } if self.config.roles.is_light() { @@ -764,14 +781,11 @@ impl Protocol { debug!(target: "sync", "Peer {} is unable to serve light requests", who); self.peerset_handle.report_peer(who.clone(), rep::BAD_ROLE); self.behaviour.disconnect_peer(&who, HARDCODED_PEERSETS_SYNC); - return Err(()); + return Err(()) } // we don't interested in peers that are far behind us - let self_best_block = self - .chain - .info() - .best_number; + let self_best_block = self.chain.info().best_number; let blocks_difference = self_best_block .checked_sub(&status.best_number) .unwrap_or_else(Zero::zero) @@ -780,7 +794,7 @@ impl Protocol { debug!(target: "sync", "Peer {} is far behind us and will unable to serve light requests", who); self.peerset_handle.report_peer(who.clone(), rep::PEER_BEHIND_US_LIGHT); self.behaviour.disconnect_peer(&who, HARDCODED_PEERSETS_SYNC); - return Err(()); + return Err(()) } } @@ -788,11 +802,12 @@ impl Protocol { info: PeerInfo { roles: status.roles, best_hash: status.best_hash, - best_number: status.best_number + best_number: status.best_number, }, request: None, - known_blocks: LruHashSet::new(NonZeroUsize::new(MAX_KNOWN_BLOCKS) - .expect("Constant is nonzero")), + known_blocks: LruHashSet::new( + NonZeroUsize::new(MAX_KNOWN_BLOCKS).expect("Constant is nonzero"), + ), }; let req = if peer.info.roles.is_full() { @@ -802,7 +817,7 @@ impl Protocol { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu); return Err(()) - } + }, } } else { None @@ -811,7 +826,8 @@ impl Protocol { debug!(target: "sync", "Connected {}", who); self.peers.insert(who.clone(), peer); - self.pending_messages.push_back(CustomMessageOutcome::PeerNewBest(who.clone(), status.best_number)); + self.pending_messages + .push_back(CustomMessageOutcome::PeerNewBest(who.clone(), status.best_number)); if let Some(req) = req { let event = self.prepare_block_request(who.clone(), req); @@ -830,23 +846,25 @@ impl Protocol { Ok(Some(header)) => header, Ok(None) => { warn!("Trying to announce unknown block: {}", hash); - return; - } + return + }, Err(e) => { warn!("Error reading block header {}: {:?}", hash, e); - return; - } + return + }, }; // don't announce genesis block since it will be ignored if header.number().is_zero() { - return; + return } let is_best = self.chain.info().best_hash == hash; debug!(target: "sync", "Reannouncing block {:?} is_best: {}", hash, is_best); - let data = data.or_else(|| self.block_announce_data_cache.get(&hash).cloned()).unwrap_or_default(); + let data = data + .or_else(|| self.block_announce_data_cache.get(&hash).cloned()) + .unwrap_or_default(); for (who, ref mut peer) in self.peers.iter_mut() { let inserted = peer.known_blocks.insert(hash); @@ -862,11 +880,8 @@ impl Protocol { data: Some(data.clone()), }; - self.behaviour.write_notification( - who, - HARDCODED_PEERSETS_SYNC, - message.encode() - ); + self.behaviour + .write_notification(who, HARDCODED_PEERSETS_SYNC, message.encode()); } } } @@ -884,11 +899,7 @@ impl Protocol { /// in the task before being polled once. So, it is required to call /// [`ChainSync::poll_block_announce_validation`] to ensure that the future is /// registered properly and will wake up the task when being ready. - fn push_block_announce_validation( - &mut self, - who: PeerId, - announce: BlockAnnounce, - ) { + fn push_block_announce_validation(&mut self, who: PeerId, announce: BlockAnnounce) { let hash = announce.header.hash(); let peer = match self.peers.get_mut(&who) { @@ -896,8 +907,8 @@ impl Protocol { None => { log::error!(target: "sync", "Received block announce from disconnected peer {}", who); debug_assert!(false); - return; - } + return + }, }; peer.known_blocks.insert(hash.clone()); @@ -918,8 +929,7 @@ impl Protocol { validation_result: sync::PollBlockAnnounceValidation, ) -> CustomMessageOutcome { let (header, is_best, who) = match validation_result { - sync::PollBlockAnnounceValidation::Skip => - return CustomMessageOutcome::None, + sync::PollBlockAnnounceValidation::Skip => return CustomMessageOutcome::None, sync::PollBlockAnnounceValidation::Nothing { is_best, who, announce } => { self.update_peer_info(&who); @@ -940,7 +950,7 @@ impl Protocol { } else { return CustomMessageOutcome::None } - } + }, sync::PollBlockAnnounceValidation::ImportHeader { announce, is_best, who } => { self.update_peer_info(&who); @@ -951,7 +961,7 @@ impl Protocol { } (announce.header, is_best, who) - } + }, sync::PollBlockAnnounceValidation::Failure { who, disconnect } => { if disconnect { self.behaviour.disconnect_peer(&who, HARDCODED_PEERSETS_SYNC); @@ -959,7 +969,7 @@ impl Protocol { self.report_peer(who, rep::BAD_BLOCK_ANNOUNCEMENT); return CustomMessageOutcome::None - } + }, }; let number = *header.number(); @@ -971,39 +981,32 @@ impl Protocol { None, message::generic::BlockResponse { id: 0, - blocks: vec![ - message::generic::BlockData { - hash: header.hash(), - header: Some(header), - body: None, - indexed_body: None, - receipt: None, - message_queue: None, - justification: None, - justifications: None, - }, - ], + blocks: vec![message::generic::BlockData { + hash: header.hash(), + header: Some(header), + body: None, + indexed_body: None, + receipt: None, + message_queue: None, + justification: None, + justifications: None, + }], }, ); if is_best { - self.pending_messages.push_back( - CustomMessageOutcome::PeerNewBest(who, number), - ); + self.pending_messages.push_back(CustomMessageOutcome::PeerNewBest(who, number)); } match blocks_to_import { - Ok(sync::OnBlockData::Import(origin, blocks)) => { - CustomMessageOutcome::BlockImport(origin, blocks) - }, - Ok(sync::OnBlockData::Request(peer, req)) => { - self.prepare_block_request(peer, req) - } + Ok(sync::OnBlockData::Import(origin, blocks)) => + CustomMessageOutcome::BlockImport(origin, blocks), + Ok(sync::OnBlockData::Request(peer, req)) => self.prepare_block_request(peer, req), Err(sync::BadPeer(id, repu)) => { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu); CustomMessageOutcome::None - } + }, } } @@ -1029,7 +1032,12 @@ impl Protocol { /// Request syncing for the given block from given set of peers. /// Uses `protocol` to queue a new block download request and tries to dispatch all pending /// requests. - pub fn set_sync_fork_request(&mut self, peers: Vec, hash: &B::Hash, number: NumberFor) { + pub fn set_sync_fork_request( + &mut self, + peers: Vec, + hash: &B::Hash, + number: NumberFor, + ) { self.sync.set_sync_fork_request(peers, hash, number) } @@ -1040,39 +1048,41 @@ impl Protocol { &mut self, imported: usize, count: usize, - results: Vec<(Result>, BlockImportError>, B::Hash)> + results: Vec<(Result>, BlockImportError>, B::Hash)>, ) { - let results = self.sync.on_blocks_processed( - imported, - count, - results, - ); + let results = self.sync.on_blocks_processed(imported, count, results); for result in results { match result { Ok((id, req)) => { - self.pending_messages.push_back( - prepare_block_request(&mut self.peers, id, req) - ); - } + self.pending_messages.push_back(prepare_block_request( + &mut self.peers, + id, + req, + )); + }, Err(sync::BadPeer(id, repu)) => { self.behaviour.disconnect_peer(&id, HARDCODED_PEERSETS_SYNC); self.peerset_handle.report_peer(id, repu) - } + }, } } } /// Call this when a justification has been processed by the import queue, with or without /// errors. - pub fn justification_import_result(&mut self, who: PeerId, hash: B::Hash, number: NumberFor, success: bool) { + pub fn justification_import_result( + &mut self, + who: PeerId, + hash: B::Hash, + number: NumberFor, + success: bool, + ) { self.sync.on_justification_import(hash, number, success); if !success { log::info!("💔 Invalid justification provided by {} for #{}", who, hash); self.behaviour.disconnect_peer(&who, HARDCODED_PEERSETS_SYNC); - self.peerset_handle.report_peer( - who, - sc_peerset::ReputationChange::new_fatal("Invalid justification") - ); + self.peerset_handle + .report_peer(who, sc_peerset::ReputationChange::new_fatal("Invalid justification")); } } @@ -1104,7 +1114,10 @@ impl Protocol { /// Removes a `PeerId` from the list of reserved peers. pub fn remove_set_reserved_peer(&self, protocol: Cow<'static, str>, peer: PeerId) { if let Some(index) = self.notification_protocols.iter().position(|p| *p == protocol) { - self.peerset_handle.remove_reserved_peer(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); + self.peerset_handle.remove_reserved_peer( + sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), + peer, + ); } else { log::error!( target: "sub-libp2p", @@ -1117,7 +1130,8 @@ impl Protocol { /// Adds a `PeerId` to the list of reserved peers. pub fn add_set_reserved_peer(&self, protocol: Cow<'static, str>, peer: PeerId) { if let Some(index) = self.notification_protocols.iter().position(|p| *p == protocol) { - self.peerset_handle.add_reserved_peer(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); + self.peerset_handle + .add_reserved_peer(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); } else { log::error!( target: "sub-libp2p", @@ -1139,7 +1153,8 @@ impl Protocol { /// Add a peer to a peers set. pub fn add_to_peers_set(&self, protocol: Cow<'static, str>, peer: PeerId) { if let Some(index) = self.notification_protocols.iter().position(|p| *p == protocol) { - self.peerset_handle.add_to_peers_set(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); + self.peerset_handle + .add_to_peers_set(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); } else { log::error!( target: "sub-libp2p", @@ -1152,7 +1167,10 @@ impl Protocol { /// Remove a peer from a peers set. pub fn remove_from_peers_set(&self, protocol: Cow<'static, str>, peer: PeerId) { if let Some(index) = self.notification_protocols.iter().position(|p| *p == protocol) { - self.peerset_handle.remove_from_peers_set(sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), peer); + self.peerset_handle.remove_from_peers_set( + sc_peerset::SetId::from(index + NUM_HARDCODED_PEERSETS), + peer, + ); } else { log::error!( target: "sub-libp2p", @@ -1172,13 +1190,21 @@ impl Protocol { metrics.fork_targets.set(m.fork_targets.into()); metrics.queued_blocks.set(m.queued_blocks.into()); - metrics.justifications.with_label_values(&["pending"]) + metrics + .justifications + .with_label_values(&["pending"]) .set(m.justifications.pending_requests.into()); - metrics.justifications.with_label_values(&["active"]) + metrics + .justifications + .with_label_values(&["active"]) .set(m.justifications.active_requests.into()); - metrics.justifications.with_label_values(&["failed"]) + metrics + .justifications + .with_label_values(&["failed"]) .set(m.justifications.failed_requests.into()); - metrics.justifications.with_label_values(&["importing"]) + metrics + .justifications + .with_label_values(&["importing"]) .set(m.justifications.importing_requests.into()); } } @@ -1209,11 +1235,7 @@ fn prepare_block_request( support_multiple_justifications: true, }; - CustomMessageOutcome::BlockRequest { - target: who, - request: request, - pending_response: tx, - } + CustomMessageOutcome::BlockRequest { target: who, request, pending_response: tx } } fn prepare_state_request( @@ -1226,11 +1248,7 @@ fn prepare_state_request( if let Some(ref mut peer) = peers.get_mut(&who) { peer.request = Some((PeerRequest::State, rx)); } - CustomMessageOutcome::StateRequest { - target: who, - request: request, - pending_response: tx, - } + CustomMessageOutcome::StateRequest { target: who, request, pending_response: tx } } /// Outcome of an incoming custom message. @@ -1246,7 +1264,7 @@ pub enum CustomMessageOutcome { /// See [`crate::Event::NotificationStreamOpened::negotiated_fallback`]. negotiated_fallback: Option>, roles: Roles, - notifications_sink: NotificationsSink + notifications_sink: NotificationsSink, }, /// The [`NotificationsSink`] of some notification protocols need an update. NotificationStreamReplaced { @@ -1255,9 +1273,15 @@ pub enum CustomMessageOutcome { notifications_sink: NotificationsSink, }, /// Notification protocols have been closed with a remote. - NotificationStreamClosed { remote: PeerId, protocol: Cow<'static, str> }, + NotificationStreamClosed { + remote: PeerId, + protocol: Cow<'static, str>, + }, /// Messages have been received on one or more notifications protocols. - NotificationsReceived { remote: PeerId, messages: Vec<(Cow<'static, str>, Bytes)> }, + NotificationsReceived { + remote: PeerId, + messages: Vec<(Cow<'static, str>, Bytes)>, + }, /// A new block request must be emitted. BlockRequest { target: PeerId, @@ -1291,11 +1315,21 @@ impl NetworkBehaviour for Protocol { self.behaviour.addresses_of_peer(peer_id) } - fn inject_connection_established(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_established( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.behaviour.inject_connection_established(peer_id, conn, endpoint) } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.behaviour.inject_connection_closed(peer_id, conn, endpoint) } @@ -1325,9 +1359,9 @@ impl NetworkBehaviour for Protocol { <::Handler as ProtocolsHandler>::InEvent, Self::OutEvent > - > { + >{ if let Some(message) = self.pending_messages.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)) } // Check for finished outgoing requests. @@ -1340,38 +1374,44 @@ impl NetworkBehaviour for Protocol { let (req, _) = peer.request.take().unwrap(); match req { PeerRequest::Block(req) => { - let protobuf_response = match crate::schema::v1::BlockResponse::decode(&resp[..]) { - Ok(proto) => proto, - Err(e) => { - debug!( - target: "sync", - "Failed to decode block response from peer {:?}: {:?}.", - id, - e - ); - self.peerset_handle.report_peer(id.clone(), rep::BAD_MESSAGE); - self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - continue; - } - }; + let protobuf_response = + match crate::schema::v1::BlockResponse::decode(&resp[..]) { + Ok(proto) => proto, + Err(e) => { + debug!( + target: "sync", + "Failed to decode block response from peer {:?}: {:?}.", + id, + e + ); + self.peerset_handle + .report_peer(id.clone(), rep::BAD_MESSAGE); + self.behaviour + .disconnect_peer(id, HARDCODED_PEERSETS_SYNC); + continue + }, + }; finished_block_requests.push((id.clone(), req, protobuf_response)); }, PeerRequest::State => { - let protobuf_response = match crate::schema::v1::StateResponse::decode(&resp[..]) { - Ok(proto) => proto, - Err(e) => { - debug!( - target: "sync", - "Failed to decode state response from peer {:?}: {:?}.", - id, - e - ); - self.peerset_handle.report_peer(id.clone(), rep::BAD_MESSAGE); - self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - continue; - } - }; + let protobuf_response = + match crate::schema::v1::StateResponse::decode(&resp[..]) { + Ok(proto) => proto, + Err(e) => { + debug!( + target: "sync", + "Failed to decode state response from peer {:?}: {:?}.", + id, + e + ); + self.peerset_handle + .report_peer(id.clone(), rep::BAD_MESSAGE); + self.behaviour + .disconnect_peer(id, HARDCODED_PEERSETS_SYNC); + continue + }, + }; finished_state_requests.push((id.clone(), protobuf_response)); }, @@ -1385,32 +1425,35 @@ impl NetworkBehaviour for Protocol { RequestFailure::Network(OutboundFailure::Timeout) => { self.peerset_handle.report_peer(id.clone(), rep::TIMEOUT); self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - } + }, RequestFailure::Network(OutboundFailure::UnsupportedProtocols) => { self.peerset_handle.report_peer(id.clone(), rep::BAD_PROTOCOL); self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - } + }, RequestFailure::Network(OutboundFailure::DialFailure) => { self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - } + }, RequestFailure::Refused => { self.peerset_handle.report_peer(id.clone(), rep::REFUSED); self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); - } - RequestFailure::Network(OutboundFailure::ConnectionClosed) - | RequestFailure::NotConnected => { + }, + RequestFailure::Network(OutboundFailure::ConnectionClosed) | + RequestFailure::NotConnected => { self.behaviour.disconnect_peer(id, HARDCODED_PEERSETS_SYNC); }, RequestFailure::UnknownProtocol => { - debug_assert!(false, "Block request protocol should always be known."); - } + debug_assert!( + false, + "Block request protocol should always be known." + ); + }, RequestFailure::Obsolete => { debug_assert!( false, "Can not receive `RequestFailure::Obsolete` after dropping the \ response receiver.", ); - } + }, } }, Poll::Ready(Err(oneshot::Canceled)) => { @@ -1461,7 +1504,7 @@ impl NetworkBehaviour for Protocol { } if let Some(message) = self.pending_messages.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)) } let event = match self.behaviour.poll(cx, params) { @@ -1472,14 +1515,22 @@ impl NetworkBehaviour for Protocol { Poll::Ready(NetworkBehaviourAction::DialPeer { peer_id, condition }) => return Poll::Ready(NetworkBehaviourAction::DialPeer { peer_id, condition }), Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, event }) => - return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, event }), + return Poll::Ready(NetworkBehaviourAction::NotifyHandler { + peer_id, + handler, + event, + }), Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }) => return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { address, score }), }; let outcome = match event { NotificationsOut::CustomProtocolOpen { - peer_id, set_id, received_handshake, notifications_sink, negotiated_fallback + peer_id, + set_id, + received_handshake, + notifications_sink, + negotiated_fallback, } => { // Set number 0 is hardcoded the default set of peers we sync from. if set_id == HARDCODED_PEERSETS_SYNC { @@ -1512,16 +1563,21 @@ impl NetworkBehaviour for Protocol { ); self.peerset_handle.report_peer(peer_id, rep::BAD_MESSAGE); CustomMessageOutcome::None - } + }, Err(err) => { - match as DecodeAll>::decode_all(&mut &received_handshake[..]) { + match as DecodeAll>::decode_all( + &mut &received_handshake[..], + ) { Ok(handshake) => { - if self.on_sync_peer_connected(peer_id.clone(), handshake).is_ok() { + if self + .on_sync_peer_connected(peer_id.clone(), handshake) + .is_ok() + { CustomMessageOutcome::SyncConnected(peer_id) } else { CustomMessageOutcome::None } - } + }, Err(err2) => { debug!( target: "sync", @@ -1533,21 +1589,24 @@ impl NetworkBehaviour for Protocol { ); self.peerset_handle.report_peer(peer_id, rep::BAD_MESSAGE); CustomMessageOutcome::None - } + }, } - } + }, } - } else { - match (message::Roles::decode_all(&received_handshake[..]), self.peers.get(&peer_id)) { - (Ok(roles), _) => - CustomMessageOutcome::NotificationStreamOpened { - remote: peer_id, - protocol: self.notification_protocols[usize::from(set_id) - NUM_HARDCODED_PEERSETS].clone(), - negotiated_fallback, - roles, - notifications_sink, - }, + match ( + message::Roles::decode_all(&received_handshake[..]), + self.peers.get(&peer_id), + ) { + (Ok(roles), _) => CustomMessageOutcome::NotificationStreamOpened { + remote: peer_id, + protocol: self.notification_protocols + [usize::from(set_id) - NUM_HARDCODED_PEERSETS] + .clone(), + negotiated_fallback, + roles, + notifications_sink, + }, (Err(_), Some(peer)) if received_handshake.is_empty() => { // As a convenience, we allow opening substreams for "external" // notification protocols with an empty handshake. This fetches the @@ -1555,7 +1614,9 @@ impl NetworkBehaviour for Protocol { // TODO: remove this after https://github.com/paritytech/substrate/issues/5685 CustomMessageOutcome::NotificationStreamOpened { remote: peer_id, - protocol: self.notification_protocols[usize::from(set_id) - NUM_HARDCODED_PEERSETS].clone(), + protocol: self.notification_protocols + [usize::from(set_id) - NUM_HARDCODED_PEERSETS] + .clone(), negotiated_fallback, roles: peer.info.roles, notifications_sink, @@ -1567,11 +1628,11 @@ impl NetworkBehaviour for Protocol { self.behaviour.disconnect_peer(&peer_id, set_id); self.peerset_handle.report_peer(peer_id, rep::BAD_MESSAGE); CustomMessageOutcome::None - } + }, } } - } - NotificationsOut::CustomProtocolReplaced { peer_id, notifications_sink, set_id } => { + }, + NotificationsOut::CustomProtocolReplaced { peer_id, notifications_sink, set_id } => if set_id == HARDCODED_PEERSETS_SYNC { CustomMessageOutcome::None } else if self.bad_handshake_substreams.contains(&(peer_id.clone(), set_id)) { @@ -1579,11 +1640,12 @@ impl NetworkBehaviour for Protocol { } else { CustomMessageOutcome::NotificationStreamReplaced { remote: peer_id, - protocol: self.notification_protocols[usize::from(set_id) - NUM_HARDCODED_PEERSETS].clone(), + protocol: self.notification_protocols + [usize::from(set_id) - NUM_HARDCODED_PEERSETS] + .clone(), notifications_sink, } - } - }, + }, NotificationsOut::CustomProtocolClosed { peer_id, set_id } => { // Set number 0 is hardcoded the default set of peers we sync from. if set_id == HARDCODED_PEERSETS_SYNC { @@ -1605,55 +1667,57 @@ impl NetworkBehaviour for Protocol { } else { CustomMessageOutcome::NotificationStreamClosed { remote: peer_id, - protocol: self.notification_protocols[usize::from(set_id) - NUM_HARDCODED_PEERSETS].clone(), + protocol: self.notification_protocols + [usize::from(set_id) - NUM_HARDCODED_PEERSETS] + .clone(), } } }, - NotificationsOut::Notification { peer_id, set_id, message } => - match set_id { - HARDCODED_PEERSETS_SYNC if self.peers.contains_key(&peer_id) => { - if let Ok(announce) = message::BlockAnnounce::decode(&mut message.as_ref()) { - self.push_block_announce_validation(peer_id, announce); - - // Make sure that the newly added block announce validation future was - // polled once to be registered in the task. - if let Poll::Ready(res) = self.sync.poll_block_announce_validation(cx) { - self.process_block_announce_validation_result(res) - } else { - CustomMessageOutcome::None - } + NotificationsOut::Notification { peer_id, set_id, message } => match set_id { + HARDCODED_PEERSETS_SYNC if self.peers.contains_key(&peer_id) => { + if let Ok(announce) = message::BlockAnnounce::decode(&mut message.as_ref()) { + self.push_block_announce_validation(peer_id, announce); + + // Make sure that the newly added block announce validation future was + // polled once to be registered in the task. + if let Poll::Ready(res) = self.sync.poll_block_announce_validation(cx) { + self.process_block_announce_validation_result(res) } else { - warn!(target: "sub-libp2p", "Failed to decode block announce"); CustomMessageOutcome::None } - } - HARDCODED_PEERSETS_SYNC => { - trace!( - target: "sync", - "Received sync for peer earlier refused by sync layer: {}", - peer_id - ); - CustomMessageOutcome::None - } - _ if self.bad_handshake_substreams.contains(&(peer_id.clone(), set_id)) => { + } else { + warn!(target: "sub-libp2p", "Failed to decode block announce"); CustomMessageOutcome::None } - _ => { - let protocol_name = self.notification_protocols[usize::from(set_id) - NUM_HARDCODED_PEERSETS].clone(); - CustomMessageOutcome::NotificationsReceived { - remote: peer_id, - messages: vec![(protocol_name, message.freeze())], - } + }, + HARDCODED_PEERSETS_SYNC => { + trace!( + target: "sync", + "Received sync for peer earlier refused by sync layer: {}", + peer_id + ); + CustomMessageOutcome::None + }, + _ if self.bad_handshake_substreams.contains(&(peer_id.clone(), set_id)) => + CustomMessageOutcome::None, + _ => { + let protocol_name = self.notification_protocols + [usize::from(set_id) - NUM_HARDCODED_PEERSETS] + .clone(); + CustomMessageOutcome::NotificationsReceived { + remote: peer_id, + messages: vec![(protocol_name, message.freeze())], } - } + }, + }, }; if !matches!(outcome, CustomMessageOutcome::::None) { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(outcome)); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(outcome)) } if let Some(message) = self.pending_messages.pop_front() { - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)); + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(message)) } // This block can only be reached if an event was pulled from the behaviour and that @@ -1667,7 +1731,7 @@ impl NetworkBehaviour for Protocol { &mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, - error: &dyn std::error::Error + error: &dyn std::error::Error, ) { self.behaviour.inject_addr_reach_failure(peer_id, addr, error) } diff --git a/client/network/src/protocol/event.rs b/client/network/src/protocol/event.rs index c13980b3f4302..df56f426ad1fe 100644 --- a/client/network/src/protocol/event.rs +++ b/client/network/src/protocol/event.rs @@ -20,8 +20,7 @@ //! events that happen on the network like DHT get/put results received. use bytes::Bytes; -use libp2p::core::PeerId; -use libp2p::kad::record::Key; +use libp2p::{core::PeerId, kad::record::Key}; use std::borrow::Cow; /// Events generated by DHT as a response to get_value and put_value requests. diff --git a/client/network/src/protocol/message.rs b/client/network/src/protocol/message.rs index 50d0fd7969021..95f5ffa3a545a 100644 --- a/client/network/src/protocol/message.rs +++ b/client/network/src/protocol/message.rs @@ -18,16 +18,17 @@ //! Network packet message types. These get serialized and put into the lower level protocol payload. -use bitflags::bitflags; -use sp_runtime::{ConsensusEngineId, traits::{Block as BlockT, Header as HeaderT}}; -use codec::{Encode, Decode, Input, Output, Error}; pub use self::generic::{ - BlockAnnounce, RemoteCallRequest, RemoteReadRequest, - RemoteHeaderRequest, RemoteHeaderResponse, - RemoteChangesRequest, RemoteChangesResponse, - FromBlock, RemoteReadChildRequest, Roles, + BlockAnnounce, FromBlock, RemoteCallRequest, RemoteChangesRequest, RemoteChangesResponse, + RemoteHeaderRequest, RemoteHeaderResponse, RemoteReadChildRequest, RemoteReadRequest, Roles, }; +use bitflags::bitflags; +use codec::{Decode, Encode, Error, Input, Output}; use sc_client_api::StorageProof; +use sp_runtime::{ + traits::{Block as BlockT, Header as HeaderT}, + ConsensusEngineId, +}; /// A unique ID of a request. pub type RequestId = u64; @@ -41,24 +42,16 @@ pub type Message = generic::Message< >; /// Type alias for using the block request type using block type parameters. -pub type BlockRequest = generic::BlockRequest< - ::Hash, - <::Header as HeaderT>::Number, ->; +pub type BlockRequest = + generic::BlockRequest<::Hash, <::Header as HeaderT>::Number>; /// Type alias for using the BlockData type using block type parameters. -pub type BlockData = generic::BlockData< - ::Header, - ::Hash, - ::Extrinsic, ->; +pub type BlockData = + generic::BlockData<::Header, ::Hash, ::Extrinsic>; /// Type alias for using the BlockResponse type using block type parameters. -pub type BlockResponse = generic::BlockResponse< - ::Header, - ::Hash, - ::Extrinsic, ->; +pub type BlockResponse = + generic::BlockResponse<::Header, ::Hash, ::Extrinsic>; /// A set of transactions. pub type Transactions = Vec; @@ -168,14 +161,13 @@ impl generic::BlockAnnounce { /// Generic types. pub mod generic { - use bitflags::bitflags; - use codec::{Encode, Decode, Input, Output}; - use sp_runtime::{EncodedJustification, Justifications}; use super::{ - RemoteReadResponse, Transactions, Direction, - RequestId, BlockAttributes, RemoteCallResponse, ConsensusEngineId, - BlockState, StorageProof, + BlockAttributes, BlockState, ConsensusEngineId, Direction, RemoteCallResponse, + RemoteReadResponse, RequestId, StorageProof, Transactions, }; + use bitflags::bitflags; + use codec::{Decode, Encode, Input, Output}; + use sp_runtime::{EncodedJustification, Justifications}; bitflags! { /// Bitmask of the roles that a node fulfills. @@ -358,11 +350,12 @@ pub mod generic { let compact = CompactStatus::decode(value)?; let chain_status = match >::decode(value) { Ok(v) => v, - Err(e) => if compact.version <= LAST_CHAIN_STATUS_VERSION { - return Err(e) - } else { - Vec::new() - } + Err(e) => + if compact.version <= LAST_CHAIN_STATUS_VERSION { + return Err(e) + } else { + Vec::new() + }, }; let CompactStatus { @@ -443,11 +436,7 @@ pub mod generic { let header = H::decode(input)?; let state = BlockState::decode(input).ok(); let data = Vec::decode(input).ok(); - Ok(BlockAnnounce { - header, - state, - data, - }) + Ok(BlockAnnounce { header, state, data }) } } diff --git a/client/network/src/protocol/notifications.rs b/client/network/src/protocol/notifications.rs index 8739eb4948b77..e489970e987c6 100644 --- a/client/network/src/protocol/notifications.rs +++ b/client/network/src/protocol/notifications.rs @@ -19,10 +19,12 @@ //! Implementation of libp2p's `NetworkBehaviour` trait that establishes communications and opens //! notifications substreams. -pub use self::behaviour::{Notifications, NotificationsOut, ProtocolConfig}; -pub use self::handler::{NotifsHandlerError, NotificationsSink, Ready}; +pub use self::{ + behaviour::{Notifications, NotificationsOut, ProtocolConfig}, + handler::{NotificationsSink, NotifsHandlerError, Ready}, +}; mod behaviour; mod handler; -mod upgrade; mod tests; +mod upgrade; diff --git a/client/network/src/protocol/notifications/behaviour.rs b/client/network/src/protocol/notifications/behaviour.rs index f95f6870e5fae..3bf65560faa4f 100644 --- a/client/network/src/protocol/notifications/behaviour.rs +++ b/client/network/src/protocol/notifications/behaviour.rs @@ -16,28 +16,34 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::protocol::notifications::{ - handler::{self, NotificationsSink, NotifsHandlerProto, NotifsHandlerOut, NotifsHandlerIn} +use crate::protocol::notifications::handler::{ + self, NotificationsSink, NotifsHandlerIn, NotifsHandlerOut, NotifsHandlerProto, }; use bytes::BytesMut; use fnv::FnvHashMap; use futures::prelude::*; -use libp2p::core::{ConnectedPoint, Multiaddr, PeerId, connection::ConnectionId}; -use libp2p::swarm::{ - DialPeerCondition, - NetworkBehaviour, - NetworkBehaviourAction, - NotifyHandler, - PollParameters +use libp2p::{ + core::{connection::ConnectionId, ConnectedPoint, Multiaddr, PeerId}, + swarm::{ + DialPeerCondition, NetworkBehaviour, NetworkBehaviourAction, NotifyHandler, PollParameters, + }, }; use log::{error, trace, warn}; use parking_lot::RwLock; use rand::distributions::{Distribution as _, Uniform}; use smallvec::SmallVec; -use std::task::{Context, Poll}; -use std::{borrow::Cow, cmp, collections::{hash_map::Entry, VecDeque}}; -use std::{error, mem, pin::Pin, str, sync::Arc, time::Duration}; +use std::{ + borrow::Cow, + cmp, + collections::{hash_map::Entry, VecDeque}, + error, mem, + pin::Pin, + str, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::Instant; /// Network behaviour that handles opening substreams for custom protocols with other peers. @@ -111,7 +117,9 @@ pub struct Notifications { /// /// By design, we never remove elements from this list. Elements are removed only when the /// `Delay` triggers. As such, this stream may produce obsolete elements. - delays: stream::FuturesUnordered + Send>>>, + delays: stream::FuturesUnordered< + Pin + Send>>, + >, /// [`DelayId`] to assign to the next delay. next_delay_id: DelayId, @@ -401,7 +409,7 @@ impl Notifications { pub fn set_notif_protocol_handshake( &mut self, set_id: sc_peerset::SetId, - handshake_message: impl Into> + handshake_message: impl Into>, ) { if let Some(p) = self.notif_protocols.get_mut(usize::from(set_id)) { *p.handshake.write() = handshake_message.into(); @@ -438,9 +446,10 @@ impl Notifications { &mut self, peer_id: &PeerId, set_id: sc_peerset::SetId, - ban: Option + ban: Option, ) { - let mut entry = if let Entry::Occupied(entry) = self.peers.entry((peer_id.clone(), set_id)) { + let mut entry = if let Entry::Occupied(entry) = self.peers.entry((peer_id.clone(), set_id)) + { entry } else { return @@ -454,11 +463,7 @@ impl Notifications { st @ PeerState::Backoff { .. } => *entry.into_mut() = st, // DisabledPendingEnable => Disabled. - PeerState::DisabledPendingEnable { - connections, - timer_deadline, - timer: _ - } => { + PeerState::DisabledPendingEnable { connections, timer_deadline, timer: _ } => { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", peer_id, set_id); self.peerset.dropped(set_id, peer_id.clone(), sc_peerset::DropReason::Unknown); let backoff_until = Some(if let Some(ban) = ban { @@ -466,10 +471,7 @@ impl Notifications { } else { timer_deadline }); - *entry.into_mut() = PeerState::Disabled { - connections, - backoff_until - } + *entry.into_mut() = PeerState::Disabled { connections, backoff_until } }, // Enabled => Disabled. @@ -481,15 +483,13 @@ impl Notifications { if connections.iter().any(|(_, s)| matches!(s, ConnectionState::Open(_))) { trace!(target: "sub-libp2p", "External API <= Closed({}, {:?})", peer_id, set_id); - let event = NotificationsOut::CustomProtocolClosed { - peer_id: peer_id.clone(), - set_id, - }; + let event = + NotificationsOut::CustomProtocolClosed { peer_id: peer_id.clone(), set_id }; self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } - for (connec_id, connec_state) in connections.iter_mut() - .filter(|(_, s)| matches!(s, ConnectionState::Open(_))) + for (connec_id, connec_state) in + connections.iter_mut().filter(|(_, s)| matches!(s, ConnectionState::Open(_))) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", peer_id, *connec_id, set_id); self.events.push_back(NetworkBehaviourAction::NotifyHandler { @@ -500,8 +500,8 @@ impl Notifications { *connec_state = ConnectionState::Closing; } - for (connec_id, connec_state) in connections.iter_mut() - .filter(|(_, s)| matches!(s, ConnectionState::Opening)) + for (connec_id, connec_state) in + connections.iter_mut().filter(|(_, s)| matches!(s, ConnectionState::Opening)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", peer_id, *connec_id, set_id); self.events.push_back(NetworkBehaviourAction::NotifyHandler { @@ -512,21 +512,25 @@ impl Notifications { *connec_state = ConnectionState::OpeningThenClosing; } - debug_assert!(!connections.iter().any(|(_, s)| matches!(s, ConnectionState::Open(_)))); - debug_assert!(!connections.iter().any(|(_, s)| matches!(s, ConnectionState::Opening))); + debug_assert!(!connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::Open(_)))); + debug_assert!(!connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::Opening))); let backoff_until = ban.map(|dur| Instant::now() + dur); - *entry.into_mut() = PeerState::Disabled { - connections, - backoff_until - } + *entry.into_mut() = PeerState::Disabled { connections, backoff_until } }, // Incoming => Disabled. // Ongoing opening requests from the remote are rejected. PeerState::Incoming { mut connections, backoff_until } => { - let inc = if let Some(inc) = self.incoming.iter_mut() - .find(|i| i.peer_id == entry.key().0 && i.set_id == set_id && i.alive) { + let inc = if let Some(inc) = self + .incoming + .iter_mut() + .find(|i| i.peer_id == entry.key().0 && i.set_id == set_id && i.alive) + { inc } else { error!(target: "sub-libp2p", "State mismatch in libp2p: no entry in \ @@ -536,7 +540,8 @@ impl Notifications { inc.alive = false; - for (connec_id, connec_state) in connections.iter_mut() + for (connec_id, connec_state) in connections + .iter_mut() .filter(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", peer_id, *connec_id, set_id); @@ -555,11 +560,10 @@ impl Notifications { (None, None) => None, }; - debug_assert!(!connections.iter().any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); - *entry.into_mut() = PeerState::Disabled { - connections, - backoff_until - } + debug_assert!(!connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + *entry.into_mut() = PeerState::Disabled { connections, backoff_until } }, PeerState::Poisoned => @@ -568,14 +572,21 @@ impl Notifications { } /// Returns the list of all the peers that the peerset currently requests us to be connected to. - pub fn requested_peers<'a>(&'a self, set_id: sc_peerset::SetId) -> impl Iterator + 'a { - self.peers.iter() + pub fn requested_peers<'a>( + &'a self, + set_id: sc_peerset::SetId, + ) -> impl Iterator + 'a { + self.peers + .iter() .filter(move |((_, set), state)| *set == set_id && state.is_requested()) .map(|((id, _), _)| id) } /// Returns the list of reserved peers. - pub fn reserved_peers<'a>(&'a self, set_id: sc_peerset::SetId) -> impl Iterator + 'a { + pub fn reserved_peers<'a>( + &'a self, + set_id: sc_peerset::SetId, + ) -> impl Iterator + 'a { self.peerset.reserved_peers(set_id) } @@ -595,14 +606,15 @@ impl Notifications { set_id: sc_peerset::SetId, message: impl Into>, ) { - let notifs_sink = match self.peers.get(&(target.clone(), set_id)).and_then(|p| p.get_open()) { + let notifs_sink = match self.peers.get(&(target.clone(), set_id)).and_then(|p| p.get_open()) + { None => { trace!(target: "sub-libp2p", "Tried to sent notification to {:?} without an open channel.", target); return }, - Some(sink) => sink + Some(sink) => sink, }; let message = message.into(); @@ -637,11 +649,11 @@ impl Notifications { // The `DialPeerCondition` ensures that dial attempts are de-duplicated self.events.push_back(NetworkBehaviourAction::DialPeer { peer_id: entry.key().0.clone(), - condition: DialPeerCondition::Disconnected + condition: DialPeerCondition::Disconnected, }); entry.insert(PeerState::Requested); - return; - } + return + }, }; let now = Instant::now(); @@ -652,10 +664,8 @@ impl Notifications { let peer_id = occ_entry.key().0.clone(); trace!(target: "sub-libp2p", "PSM => Connect({}, {:?}): Will start to connect at \ until {:?}", peer_id, set_id, timer_deadline); - *occ_entry.into_mut() = PeerState::PendingRequest { - timer: *timer, - timer_deadline: *timer_deadline, - }; + *occ_entry.into_mut() = + PeerState::PendingRequest { timer: *timer, timer_deadline: *timer_deadline }; }, // Backoff (expired) => Requested @@ -666,16 +676,15 @@ impl Notifications { // The `DialPeerCondition` ensures that dial attempts are de-duplicated self.events.push_back(NetworkBehaviourAction::DialPeer { peer_id: occ_entry.key().0.clone(), - condition: DialPeerCondition::Disconnected + condition: DialPeerCondition::Disconnected, }); *occ_entry.into_mut() = PeerState::Requested; }, // Disabled (with non-expired ban) => DisabledPendingEnable - PeerState::Disabled { - connections, - backoff_until: Some(ref backoff) - } if *backoff > now => { + PeerState::Disabled { connections, backoff_until: Some(ref backoff) } + if *backoff > now => + { let peer_id = occ_entry.key().0.clone(); trace!(target: "sub-libp2p", "PSM => Connect({}, {:?}): But peer is backed-off until {:?}", peer_id, set_id, backoff); @@ -683,27 +692,30 @@ impl Notifications { let delay_id = self.next_delay_id; self.next_delay_id.0 += 1; let delay = futures_timer::Delay::new(*backoff - now); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); *occ_entry.into_mut() = PeerState::DisabledPendingEnable { connections, timer: delay_id, timer_deadline: *backoff, }; - }, + } // Disabled => Enabled PeerState::Disabled { mut connections, backoff_until } => { - debug_assert!(!connections.iter().any(|(_, s)| { - matches!(s, ConnectionState::Open(_)) - })); + debug_assert!(!connections + .iter() + .any(|(_, s)| { matches!(s, ConnectionState::Open(_)) })); // The first element of `closed` is chosen to open the notifications substream. - if let Some((connec_id, connec_state)) = connections.iter_mut() - .find(|(_, s)| matches!(s, ConnectionState::Closed)) + if let Some((connec_id, connec_state)) = + connections.iter_mut().find(|(_, s)| matches!(s, ConnectionState::Closed)) { trace!(target: "sub-libp2p", "PSM => Connect({}, {:?}): Enabling connections.", occ_entry.key().0, set_id); @@ -740,10 +752,13 @@ impl Notifications { self.next_delay_id.0 += 1; debug_assert!(timer_deadline > now); let delay = futures_timer::Delay::new(timer_deadline - now); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); *occ_entry.into_mut() = PeerState::DisabledPendingEnable { connections, @@ -757,16 +772,22 @@ impl Notifications { PeerState::Incoming { mut connections, .. } => { trace!(target: "sub-libp2p", "PSM => Connect({}, {:?}): Enabling connections.", occ_entry.key().0, set_id); - if let Some(inc) = self.incoming.iter_mut() - .find(|i| i.peer_id == occ_entry.key().0 && i.set_id == set_id && i.alive) { + if let Some(inc) = self + .incoming + .iter_mut() + .find(|i| i.peer_id == occ_entry.key().0 && i.set_id == set_id && i.alive) + { inc.alive = false; } else { error!(target: "sub-libp2p", "State mismatch in libp2p: no entry in \ incoming for incoming peer") } - debug_assert!(connections.iter().any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); - for (connec_id, connec_state) in connections.iter_mut() + debug_assert!(connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + for (connec_id, connec_state) in connections + .iter_mut() .filter(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Open({:?})", @@ -820,7 +841,7 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Drop({}, {:?}): Already disabled.", entry.key().0, set_id); return - } + }, }; match mem::replace(entry.get_mut(), PeerState::Poisoned) { @@ -836,10 +857,8 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Drop({}, {:?}): Interrupting pending enabling.", entry.key().0, set_id); - *entry.into_mut() = PeerState::Disabled { - connections, - backoff_until: Some(timer_deadline), - }; + *entry.into_mut() = + PeerState::Disabled { connections, backoff_until: Some(timer_deadline) }; }, // Enabled => Disabled @@ -847,8 +866,10 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Drop({}, {:?}): Disabling connections.", entry.key().0, set_id); - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); if connections.iter().any(|(_, s)| matches!(s, ConnectionState::Open(_))) { trace!(target: "sub-libp2p", "External API <= Closed({}, {:?})", entry.key().0, set_id); @@ -859,8 +880,8 @@ impl Notifications { self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } - for (connec_id, connec_state) in connections.iter_mut() - .filter(|(_, s)| matches!(s, ConnectionState::Opening)) + for (connec_id, connec_state) in + connections.iter_mut().filter(|(_, s)| matches!(s, ConnectionState::Opening)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", entry.key(), *connec_id, set_id); @@ -872,8 +893,8 @@ impl Notifications { *connec_state = ConnectionState::OpeningThenClosing; } - for (connec_id, connec_state) in connections.iter_mut() - .filter(|(_, s)| matches!(s, ConnectionState::Open(_))) + for (connec_id, connec_state) in + connections.iter_mut().filter(|(_, s)| matches!(s, ConnectionState::Open(_))) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", entry.key(), *connec_id, set_id); @@ -922,7 +943,8 @@ impl Notifications { /// Function that is called when the peerset wants us to accept a connection /// request from a peer. fn peerset_report_accept(&mut self, index: sc_peerset::IncomingIndex) { - let incoming = if let Some(pos) = self.incoming.iter().position(|i| i.incoming_id == index) { + let incoming = if let Some(pos) = self.incoming.iter().position(|i| i.incoming_id == index) + { self.incoming.remove(pos) } else { error!(target: "sub-libp2p", "PSM => Accept({:?}): Invalid index", index); @@ -933,12 +955,16 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Accept({:?}, {}, {:?}): Obsolete incoming", index, incoming.peer_id, incoming.set_id); match self.peers.get_mut(&(incoming.peer_id.clone(), incoming.set_id)) { - Some(PeerState::DisabledPendingEnable { .. }) | - Some(PeerState::Enabled { .. }) => {} + Some(PeerState::DisabledPendingEnable { .. }) | Some(PeerState::Enabled { .. }) => { + }, _ => { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", incoming.peer_id, incoming.set_id); - self.peerset.dropped(incoming.set_id, incoming.peer_id, sc_peerset::DropReason::Unknown); + self.peerset.dropped( + incoming.set_id, + incoming.peer_id, + sc_peerset::DropReason::Unknown, + ); }, } return @@ -948,8 +974,8 @@ impl Notifications { Some(s) => s, None => { debug_assert!(false); - return; - } + return + }, }; match mem::replace(state, PeerState::Poisoned) { @@ -958,8 +984,11 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Accept({:?}, {}, {:?}): Enabling connections.", index, incoming.peer_id, incoming.set_id); - debug_assert!(connections.iter().any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); - for (connec_id, connec_state) in connections.iter_mut() + debug_assert!(connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + for (connec_id, connec_state) in connections + .iter_mut() .filter(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Open({:?})", @@ -973,7 +1002,7 @@ impl Notifications { } *state = PeerState::Enabled { connections }; - } + }, // Any state other than `Incoming` is invalid. peer => { @@ -981,13 +1010,14 @@ impl Notifications { "State mismatch in libp2p: Expected alive incoming. Got {:?}.", peer); debug_assert!(false); - } + }, } } /// Function that is called when the peerset wants us to reject an incoming peer. fn peerset_report_reject(&mut self, index: sc_peerset::IncomingIndex) { - let incoming = if let Some(pos) = self.incoming.iter().position(|i| i.incoming_id == index) { + let incoming = if let Some(pos) = self.incoming.iter().position(|i| i.incoming_id == index) + { self.incoming.remove(pos) } else { error!(target: "sub-libp2p", "PSM => Reject({:?}): Invalid index", index); @@ -1004,8 +1034,8 @@ impl Notifications { Some(s) => s, None => { debug_assert!(false); - return; - } + return + }, }; match mem::replace(state, PeerState::Poisoned) { @@ -1014,8 +1044,11 @@ impl Notifications { trace!(target: "sub-libp2p", "PSM => Reject({:?}, {}, {:?}): Rejecting connections.", index, incoming.peer_id, incoming.set_id); - debug_assert!(connections.iter().any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); - for (connec_id, connec_state) in connections.iter_mut() + debug_assert!(connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + for (connec_id, connec_state) in connections + .iter_mut() .filter(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote)) { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Close({:?})", @@ -1029,10 +1062,10 @@ impl Notifications { } *state = PeerState::Disabled { connections, backoff_until }; - } + }, peer => error!(target: "sub-libp2p", "State mismatch in libp2p: Expected alive incoming. Got {:?}.", - peer) + peer), } } } @@ -1049,15 +1082,18 @@ impl NetworkBehaviour for Notifications { Vec::new() } - fn inject_connected(&mut self, _: &PeerId) { - } + fn inject_connected(&mut self, _: &PeerId) {} - fn inject_connection_established(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_established( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { for set_id in (0..self.notif_protocols.len()).map(sc_peerset::SetId::from) { match self.peers.entry((peer_id.clone(), set_id)).or_insert(PeerState::Poisoned) { // Requested | PendingRequest => Enabled - st @ &mut PeerState::Requested | - st @ &mut PeerState::PendingRequest { .. } => { + st @ &mut PeerState::Requested | st @ &mut PeerState::PendingRequest { .. } => { trace!(target: "sub-libp2p", "Libp2p => Connected({}, {:?}, {:?}): Connection was requested by PSM.", peer_id, set_id, endpoint @@ -1072,12 +1108,11 @@ impl NetworkBehaviour for Notifications { let mut connections = SmallVec::new(); connections.push((*conn, ConnectionState::Opening)); *st = PeerState::Enabled { connections }; - } + }, // Poisoned gets inserted above if the entry was missing. // Ø | Backoff => Disabled - st @ &mut PeerState::Poisoned | - st @ &mut PeerState::Backoff { .. } => { + st @ &mut PeerState::Poisoned | st @ &mut PeerState::Backoff { .. } => { let backoff_until = if let PeerState::Backoff { timer_deadline, .. } = st { Some(*timer_deadline) } else { @@ -1090,7 +1125,7 @@ impl NetworkBehaviour for Notifications { let mut connections = SmallVec::new(); connections.push((*conn, ConnectionState::Closed)); *st = PeerState::Disabled { connections, backoff_until }; - } + }, // In all other states, add this new connection to the list of closed inactive // connections. @@ -1102,14 +1137,21 @@ impl NetworkBehaviour for Notifications { "Libp2p => Connected({}, {:?}, {:?}, {:?}): Secondary connection. Leaving closed.", peer_id, set_id, endpoint, *conn); connections.push((*conn, ConnectionState::Closed)); - } + }, } } } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, _endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + _endpoint: &ConnectedPoint, + ) { for set_id in (0..self.notif_protocols.len()).map(sc_peerset::SetId::from) { - let mut entry = if let Entry::Occupied(entry) = self.peers.entry((peer_id.clone(), set_id)) { + let mut entry = if let Entry::Occupied(entry) = + self.peers.entry((peer_id.clone(), set_id)) + { entry } else { error!(target: "sub-libp2p", "inject_connection_closed: State mismatch in the custom protos handler"); @@ -1139,15 +1181,16 @@ impl NetworkBehaviour for Notifications { self.next_delay_id.0 += 1; let delay = futures_timer::Delay::new(until - now); let peer_id = peer_id.clone(); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); - - *entry.get_mut() = PeerState::Backoff { - timer: delay_id, - timer_deadline: until, - }; + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); + + *entry.get_mut() = + PeerState::Backoff { timer: delay_id, timer_deadline: until }; } else { entry.remove(); } @@ -1177,13 +1220,15 @@ impl NetworkBehaviour for Notifications { if connections.is_empty() { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", peer_id, set_id); - self.peerset.dropped(set_id, peer_id.clone(), sc_peerset::DropReason::Unknown); + self.peerset.dropped( + set_id, + peer_id.clone(), + sc_peerset::DropReason::Unknown, + ); *entry.get_mut() = PeerState::Backoff { timer, timer_deadline }; - } else { - *entry.get_mut() = PeerState::DisabledPendingEnable { - connections, timer_deadline, timer - }; + *entry.get_mut() = + PeerState::DisabledPendingEnable { connections, timer_deadline, timer }; } }, @@ -1195,7 +1240,9 @@ impl NetworkBehaviour for Notifications { peer_id, set_id, *conn ); - debug_assert!(connections.iter().any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + debug_assert!(connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); if let Some(pos) = connections.iter().position(|(c, _)| *c == *conn) { connections.remove(pos); @@ -1205,16 +1252,18 @@ impl NetworkBehaviour for Notifications { "inject_connection_closed: State mismatch in the custom protos handler"); } - let no_desired_left = !connections.iter().any(|(_, s)| { - matches!(s, ConnectionState::OpenDesiredByRemote) - }); + let no_desired_left = !connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote)); // If no connection is `OpenDesiredByRemote` anymore, clean up the peerset incoming // request. if no_desired_left { // In the incoming state, we don't report "Dropped". Instead we will just // ignore the corresponding Accept/Reject. - if let Some(state) = self.incoming.iter_mut() + if let Some(state) = self + .incoming + .iter_mut() .find(|i| i.alive && i.set_id == set_id && i.peer_id == *peer_id) { state.alive = false; @@ -1233,29 +1282,29 @@ impl NetworkBehaviour for Notifications { self.next_delay_id.0 += 1; let delay = futures_timer::Delay::new(until - now); let peer_id = peer_id.clone(); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); - - *entry.get_mut() = PeerState::Backoff { - timer: delay_id, - timer_deadline: until, - }; + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); + + *entry.get_mut() = + PeerState::Backoff { timer: delay_id, timer_deadline: until }; } else { entry.remove(); } } else { entry.remove(); } - } else if no_desired_left { // If no connection is `OpenDesiredByRemote` anymore, switch to `Disabled`. *entry.get_mut() = PeerState::Disabled { connections, backoff_until }; } else { *entry.get_mut() = PeerState::Incoming { connections, backoff_until }; } - } + }, // Enabled => Enabled | Backoff // Peers are always backed-off when disconnecting while Enabled. @@ -1266,8 +1315,10 @@ impl NetworkBehaviour for Notifications { peer_id, set_id, *conn ); - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); if let Some(pos) = connections.iter().position(|(c, _)| *c == *conn) { let (_, state) = connections.remove(pos); @@ -1275,11 +1326,9 @@ impl NetworkBehaviour for Notifications { if let Some((replacement_pos, replacement_sink)) = connections .iter() .enumerate() - .filter_map(|(num, (_, s))| { - match s { - ConnectionState::Open(s) => Some((num, s.clone())), - _ => None - } + .filter_map(|(num, (_, s))| match s { + ConnectionState::Open(s) => Some((num, s.clone())), + _ => None, }) .next() { @@ -1294,7 +1343,8 @@ impl NetworkBehaviour for Notifications { set_id, notifications_sink: replacement_sink, }; - self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); + self.events + .push_back(NetworkBehaviourAction::GenerateEvent(event)); } } else { trace!( @@ -1308,7 +1358,6 @@ impl NetworkBehaviour for Notifications { self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } } - } else { error!(target: "sub-libp2p", "inject_connection_closed: State mismatch in the custom protos handler"); @@ -1317,38 +1366,44 @@ impl NetworkBehaviour for Notifications { if connections.is_empty() { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", peer_id, set_id); - self.peerset.dropped(set_id, peer_id.clone(), sc_peerset::DropReason::Unknown); + self.peerset.dropped( + set_id, + peer_id.clone(), + sc_peerset::DropReason::Unknown, + ); let ban_dur = Uniform::new(5, 10).sample(&mut rand::thread_rng()); let delay_id = self.next_delay_id; self.next_delay_id.0 += 1; let delay = futures_timer::Delay::new(Duration::from_secs(ban_dur)); let peer_id = peer_id.clone(); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); *entry.get_mut() = PeerState::Backoff { timer: delay_id, timer_deadline: Instant::now() + Duration::from_secs(ban_dur), }; - - } else if !connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_))) - { + } else if !connections.iter().any(|(_, s)| { + matches!(s, ConnectionState::Opening | ConnectionState::Open(_)) + }) { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", peer_id, set_id); - self.peerset.dropped(set_id, peer_id.clone(), sc_peerset::DropReason::Unknown); - - *entry.get_mut() = PeerState::Disabled { - connections, - backoff_until: None - }; + self.peerset.dropped( + set_id, + peer_id.clone(), + sc_peerset::DropReason::Unknown, + ); + *entry.get_mut() = PeerState::Disabled { connections, backoff_until: None }; } else { *entry.get_mut() = PeerState::Enabled { connections }; } - } + }, PeerState::Requested | PeerState::PendingRequest { .. } | @@ -1367,10 +1422,14 @@ impl NetworkBehaviour for Notifications { } } - fn inject_disconnected(&mut self, _peer_id: &PeerId) { - } + fn inject_disconnected(&mut self, _peer_id: &PeerId) {} - fn inject_addr_reach_failure(&mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, error: &dyn error::Error) { + fn inject_addr_reach_failure( + &mut self, + peer_id: Option<&PeerId>, + addr: &Multiaddr, + error: &dyn error::Error, + ) { trace!(target: "sub-libp2p", "Libp2p => Reach failure for {:?} through {:?}: {:?}", peer_id, addr, error); } @@ -1386,26 +1445,33 @@ impl NetworkBehaviour for Notifications { }, // "Basic" situation: we failed to reach a peer that the peerset requested. - st @ PeerState::Requested | - st @ PeerState::PendingRequest { .. } => { + st @ PeerState::Requested | st @ PeerState::PendingRequest { .. } => { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", peer_id, set_id); - self.peerset.dropped(set_id, peer_id.clone(), sc_peerset::DropReason::Unknown); + self.peerset.dropped( + set_id, + peer_id.clone(), + sc_peerset::DropReason::Unknown, + ); let now = Instant::now(); let ban_duration = match st { - PeerState::PendingRequest { timer_deadline, .. } if timer_deadline > now => + PeerState::PendingRequest { timer_deadline, .. } + if timer_deadline > now => cmp::max(timer_deadline - now, Duration::from_secs(5)), - _ => Duration::from_secs(5) + _ => Duration::from_secs(5), }; let delay_id = self.next_delay_id; self.next_delay_id.0 += 1; let delay = futures_timer::Delay::new(ban_duration); let peer_id = peer_id.clone(); - self.delays.push(async move { - delay.await; - (delay_id, peer_id, set_id) - }.boxed()); + self.delays.push( + async move { + delay.await; + (delay_id, peer_id, set_id) + } + .boxed(), + ); *entry.into_mut() = PeerState::Backoff { timer: delay_id, @@ -1415,8 +1481,10 @@ impl NetworkBehaviour for Notifications { // We can still get dial failures even if we are already connected to the peer, // as an extra diagnostic for an earlier attempt. - st @ PeerState::Disabled { .. } | st @ PeerState::Enabled { .. } | - st @ PeerState::DisabledPendingEnable { .. } | st @ PeerState::Incoming { .. } => { + st @ PeerState::Disabled { .. } | + st @ PeerState::Enabled { .. } | + st @ PeerState::DisabledPendingEnable { .. } | + st @ PeerState::Incoming { .. } => { *entry.into_mut() = st; }, @@ -1429,12 +1497,7 @@ impl NetworkBehaviour for Notifications { } } - fn inject_event( - &mut self, - source: PeerId, - connection: ConnectionId, - event: NotifsHandlerOut, - ) { + fn inject_event(&mut self, source: PeerId, connection: ConnectionId, event: NotifsHandlerOut) { match event { NotifsHandlerOut::OpenDesiredByRemote { protocol_index } => { let set_id = sc_peerset::SetId::from(protocol_index); @@ -1443,20 +1506,24 @@ impl NetworkBehaviour for Notifications { "Handler({:?}, {:?}]) => OpenDesiredByRemote({:?})", source, connection, set_id); - let mut entry = if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { - entry - } else { - error!(target: "sub-libp2p", "OpenDesiredByRemote: State mismatch in the custom protos handler"); - debug_assert!(false); - return - }; + let mut entry = + if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { + entry + } else { + error!(target: "sub-libp2p", "OpenDesiredByRemote: State mismatch in the custom protos handler"); + debug_assert!(false); + return + }; match mem::replace(entry.get_mut(), PeerState::Poisoned) { // Incoming => Incoming PeerState::Incoming { mut connections, backoff_until } => { - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::OpenDesiredByRemote))); - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, _)| *c == connection) { + debug_assert!(connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::OpenDesiredByRemote))); + if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, _)| *c == connection) + { if let ConnectionState::Closed = *connec_state { *connec_state = ConnectionState::OpenDesiredByRemote; } else { @@ -1482,10 +1549,14 @@ impl NetworkBehaviour for Notifications { }, PeerState::Enabled { mut connections } => { - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, _)| *c == connection) { + if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, _)| *c == connection) + { if let ConnectionState::Closed = *connec_state { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Open({:?})", source, connection, set_id); @@ -1504,7 +1575,7 @@ impl NetworkBehaviour for Notifications { debug_assert!(matches!( connec_state, ConnectionState::OpenDesiredByRemote | - ConnectionState::Closing | ConnectionState::Opening + ConnectionState::Closing | ConnectionState::Opening )); } } else { @@ -1520,7 +1591,9 @@ impl NetworkBehaviour for Notifications { // Disabled => Disabled | Incoming PeerState::Disabled { mut connections, backoff_until } => { - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, _)| *c == connection) { + if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, _)| *c == connection) + { if let ConnectionState::Closed = *connec_state { *connec_state = ConnectionState::OpenDesiredByRemote; @@ -1537,8 +1610,8 @@ impl NetworkBehaviour for Notifications { incoming_id, }); - *entry.into_mut() = PeerState::Incoming { connections, backoff_until }; - + *entry.into_mut() = + PeerState::Incoming { connections, backoff_until }; } else { // Connections in `OpeningThenClosing` and `Closing` state can be // in a Closed phase, and as such can emit `OpenDesiredByRemote` @@ -1548,7 +1621,8 @@ impl NetworkBehaviour for Notifications { connec_state, ConnectionState::OpeningThenClosing | ConnectionState::Closing )); - *entry.into_mut() = PeerState::Disabled { connections, backoff_until }; + *entry.into_mut() = + PeerState::Disabled { connections, backoff_until }; } } else { error!( @@ -1557,11 +1631,13 @@ impl NetworkBehaviour for Notifications { ); debug_assert!(false); } - } + }, // DisabledPendingEnable => Enabled | DisabledPendingEnable PeerState::DisabledPendingEnable { mut connections, timer, timer_deadline } => { - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, _)| *c == connection) { + if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, _)| *c == connection) + { if let ConnectionState::Closed = *connec_state { trace!(target: "sub-libp2p", "Handler({:?}, {:?}) <= Open({:?})", source, connection, set_id); @@ -1573,7 +1649,6 @@ impl NetworkBehaviour for Notifications { *connec_state = ConnectionState::Opening; *entry.into_mut() = PeerState::Enabled { connections }; - } else { // Connections in `OpeningThenClosing` and `Closing` state can be // in a Closed phase, and as such can emit `OpenDesiredByRemote` @@ -1596,7 +1671,7 @@ impl NetworkBehaviour for Notifications { ); debug_assert!(false); } - } + }, state => { error!(target: "sub-libp2p", @@ -1604,9 +1679,9 @@ impl NetworkBehaviour for Notifications { state); debug_assert!(false); return - } + }, }; - } + }, NotifsHandlerOut::CloseDesired { protocol_index } => { let set_id = sc_peerset::SetId::from(protocol_index); @@ -1615,32 +1690,37 @@ impl NetworkBehaviour for Notifications { "Handler({}, {:?}) => CloseDesired({:?})", source, connection, set_id); - let mut entry = if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { - entry - } else { - error!(target: "sub-libp2p", "CloseDesired: State mismatch in the custom protos handler"); - debug_assert!(false); - return - }; + let mut entry = + if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { + entry + } else { + error!(target: "sub-libp2p", "CloseDesired: State mismatch in the custom protos handler"); + debug_assert!(false); + return + }; match mem::replace(entry.get_mut(), PeerState::Poisoned) { // Enabled => Enabled | Disabled PeerState::Enabled { mut connections } => { - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); - let pos = if let Some(pos) = connections.iter().position(|(c, _)| *c == connection) { + let pos = if let Some(pos) = + connections.iter().position(|(c, _)| *c == connection) + { pos } else { error!(target: "sub-libp2p", "CloseDesired: State mismatch in the custom protos handler"); debug_assert!(false); - return; + return }; if matches!(connections[pos].1, ConnectionState::Closing) { *entry.into_mut() = PeerState::Enabled { connections }; - return; + return } debug_assert!(matches!(connections[pos].1, ConnectionState::Open(_))); @@ -1656,11 +1736,9 @@ impl NetworkBehaviour for Notifications { if let Some((replacement_pos, replacement_sink)) = connections .iter() .enumerate() - .filter_map(|(num, (_, s))| { - match s { - ConnectionState::Open(s) => Some((num, s.clone())), - _ => None - } + .filter_map(|(num, (_, s))| match s { + ConnectionState::Open(s) => Some((num, s.clone())), + _ => None, }) .next() { @@ -1675,24 +1753,27 @@ impl NetworkBehaviour for Notifications { } *entry.into_mut() = PeerState::Enabled { connections }; - } else { // List of open connections wasn't empty before but now it is. - if !connections.iter().any(|(_, s)| matches!(s, ConnectionState::Opening)) { + if !connections + .iter() + .any(|(_, s)| matches!(s, ConnectionState::Opening)) + { trace!(target: "sub-libp2p", "PSM <= Dropped({}, {:?})", source, set_id); - self.peerset.dropped(set_id, source.clone(), sc_peerset::DropReason::Refused); - *entry.into_mut() = PeerState::Disabled { - connections, backoff_until: None - }; + self.peerset.dropped( + set_id, + source.clone(), + sc_peerset::DropReason::Refused, + ); + *entry.into_mut() = + PeerState::Disabled { connections, backoff_until: None }; } else { *entry.into_mut() = PeerState::Enabled { connections }; } trace!(target: "sub-libp2p", "External API <= Closed({}, {:?})", source, set_id); - let event = NotificationsOut::CustomProtocolClosed { - peer_id: source, - set_id, - }; + let event = + NotificationsOut::CustomProtocolClosed { peer_id: source, set_id }; self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } }, @@ -1702,16 +1783,16 @@ impl NetworkBehaviour for Notifications { state @ PeerState::Disabled { .. } | state @ PeerState::DisabledPendingEnable { .. } => { *entry.into_mut() = state; - return; + return }, state => { error!(target: "sub-libp2p", "Unexpected state in the custom protos handler: {:?}", state); return - } + }, } - } + }, NotifsHandlerOut::CloseResult { protocol_index } => { let set_id = sc_peerset::SetId::from(protocol_index); @@ -1726,10 +1807,9 @@ impl NetworkBehaviour for Notifications { Some(PeerState::DisabledPendingEnable { connections, .. }) | Some(PeerState::Disabled { connections, .. }) | Some(PeerState::Enabled { connections, .. }) => { - if let Some((_, connec_state)) = connections - .iter_mut() - .find(|(c, s)| *c == connection && matches!(s, ConnectionState::Closing)) - { + if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::Closing) + }) { *connec_state = ConnectionState::Closed; } else { error!(target: "sub-libp2p", @@ -1743,12 +1823,16 @@ impl NetworkBehaviour for Notifications { "CloseResult: Unexpected state in the custom protos handler: {:?}", state); debug_assert!(false); - } + }, } - } + }, NotifsHandlerOut::OpenResultOk { - protocol_index, negotiated_fallback, received_handshake, notifications_sink, .. + protocol_index, + negotiated_fallback, + received_handshake, + notifications_sink, + .. } => { let set_id = sc_peerset::SetId::from(protocol_index); trace!(target: "sub-libp2p", @@ -1757,13 +1841,16 @@ impl NetworkBehaviour for Notifications { match self.peers.get_mut(&(source.clone(), set_id)) { Some(PeerState::Enabled { connections, .. }) => { - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); - let any_open = connections.iter().any(|(_, s)| matches!(s, ConnectionState::Open(_))); - - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::Opening)) - { + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); + let any_open = + connections.iter().any(|(_, s)| matches!(s, ConnectionState::Open(_))); + + if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::Opening) + }) { if !any_open { trace!(target: "sub-libp2p", "External API <= Open({}, {:?})", source, set_id); let event = NotificationsOut::CustomProtocolOpen { @@ -1776,9 +1863,10 @@ impl NetworkBehaviour for Notifications { self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } *connec_state = ConnectionState::Open(notifications_sink); - } else if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::OpeningThenClosing)) - { + } else if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::OpeningThenClosing) + }) { *connec_state = ConnectionState::Closing; } else { debug_assert!(false); @@ -1790,16 +1878,16 @@ impl NetworkBehaviour for Notifications { Some(PeerState::Incoming { connections, .. }) | Some(PeerState::DisabledPendingEnable { connections, .. }) | Some(PeerState::Disabled { connections, .. }) => { - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::OpeningThenClosing)) - { + if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::OpeningThenClosing) + }) { *connec_state = ConnectionState::Closing; } else { error!(target: "sub-libp2p", "OpenResultOk State mismatch in the custom protos handler"); debug_assert!(false); } - } + }, state => { error!(target: "sub-libp2p", @@ -1807,9 +1895,9 @@ impl NetworkBehaviour for Notifications { state); debug_assert!(false); return - } + }, } - } + }, NotifsHandlerOut::OpenResultErr { protocol_index } => { let set_id = sc_peerset::SetId::from(protocol_index); @@ -1817,27 +1905,31 @@ impl NetworkBehaviour for Notifications { "Handler({:?}, {:?}) => OpenResultErr({:?})", source, connection, set_id); - let mut entry = if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { - entry - } else { - error!(target: "sub-libp2p", "OpenResultErr: State mismatch in the custom protos handler"); - debug_assert!(false); - debug_assert!(false); - return - }; + let mut entry = + if let Entry::Occupied(entry) = self.peers.entry((source.clone(), set_id)) { + entry + } else { + error!(target: "sub-libp2p", "OpenResultErr: State mismatch in the custom protos handler"); + debug_assert!(false); + debug_assert!(false); + return + }; match mem::replace(entry.get_mut(), PeerState::Poisoned) { PeerState::Enabled { mut connections } => { - debug_assert!(connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_)))); - - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::Opening)) - { + debug_assert!(connections.iter().any(|(_, s)| matches!( + s, + ConnectionState::Opening | ConnectionState::Open(_) + ))); + + if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::Opening) + }) { *connec_state = ConnectionState::Closed; - } else if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::OpeningThenClosing)) - { + } else if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, s)| { + *c == connection && matches!(s, ConnectionState::OpeningThenClosing) + }) { *connec_state = ConnectionState::Closing; } else { error!(target: "sub-libp2p", @@ -1845,16 +1937,20 @@ impl NetworkBehaviour for Notifications { debug_assert!(false); } - if !connections.iter().any(|(_, s)| - matches!(s, ConnectionState::Opening | ConnectionState::Open(_))) - { + if !connections.iter().any(|(_, s)| { + matches!(s, ConnectionState::Opening | ConnectionState::Open(_)) + }) { trace!(target: "sub-libp2p", "PSM <= Dropped({:?})", source); - self.peerset.dropped(set_id, source.clone(), sc_peerset::DropReason::Refused); + self.peerset.dropped( + set_id, + source.clone(), + sc_peerset::DropReason::Refused, + ); let ban_dur = Uniform::new(5, 10).sample(&mut rand::thread_rng()); *entry.into_mut() = PeerState::Disabled { connections, - backoff_until: Some(Instant::now() + Duration::from_secs(ban_dur)) + backoff_until: Some(Instant::now() + Duration::from_secs(ban_dur)), }; } else { *entry.into_mut() = PeerState::Enabled { connections }; @@ -1867,9 +1963,11 @@ impl NetworkBehaviour for Notifications { PeerState::Incoming { connections, .. } | PeerState::Disabled { connections, .. } | PeerState::DisabledPendingEnable { connections, .. } => { - if let Some((_, connec_state)) = connections.iter_mut().find(|(c, s)| - *c == connection && matches!(s, ConnectionState::OpeningThenClosing)) - { + if let Some((_, connec_state)) = + connections.iter_mut().find(|(c, s)| { + *c == connection && + matches!(s, ConnectionState::OpeningThenClosing) + }) { *connec_state = ConnectionState::Closing; } else { error!(target: "sub-libp2p", @@ -1877,20 +1975,22 @@ impl NetworkBehaviour for Notifications { debug_assert!(false); } }, - _ => unreachable!("Match branches are the same as the one on which we - enter this block; qed"), + _ => unreachable!( + "Match branches are the same as the one on which we + enter this block; qed" + ), }; *entry.into_mut() = state; - } + }, state => { error!(target: "sub-libp2p", "Unexpected state in the custom protos handler: {:?}", state); debug_assert!(false); - } + }, }; - } + }, NotifsHandlerOut::Notification { protocol_index, message } => { let set_id = sc_peerset::SetId::from(protocol_index); @@ -1905,11 +2005,7 @@ impl NetworkBehaviour for Notifications { ); trace!(target: "sub-libp2p", "External API <= Message({}, {:?})", source, set_id); - let event = NotificationsOut::Notification { - peer_id: source, - set_id, - message, - }; + let event = NotificationsOut::Notification { peer_id: source, set_id, message }; self.events.push_back(NetworkBehaviourAction::GenerateEvent(event)); } else { @@ -1922,7 +2018,7 @@ impl NetworkBehaviour for Notifications { message.len() ); } - } + }, } } @@ -1930,14 +2026,9 @@ impl NetworkBehaviour for Notifications { &mut self, cx: &mut Context, _params: &mut impl PollParameters, - ) -> Poll< - NetworkBehaviourAction< - NotifsHandlerIn, - Self::OutEvent, - >, - > { + ) -> Poll> { if let Some(event) = self.events.pop_front() { - return Poll::Ready(event); + return Poll::Ready(event) } // Poll for instructions from the peerset. @@ -1946,26 +2037,27 @@ impl NetworkBehaviour for Notifications { match futures::Stream::poll_next(Pin::new(&mut self.peerset), cx) { Poll::Ready(Some(sc_peerset::Message::Accept(index))) => { self.peerset_report_accept(index); - } + }, Poll::Ready(Some(sc_peerset::Message::Reject(index))) => { self.peerset_report_reject(index); - } + }, Poll::Ready(Some(sc_peerset::Message::Connect { peer_id, set_id, .. })) => { self.peerset_report_connect(peer_id, set_id); - } + }, Poll::Ready(Some(sc_peerset::Message::Drop { peer_id, set_id, .. })) => { self.peerset_report_disconnect(peer_id, set_id); - } + }, Poll::Ready(None) => { error!(target: "sub-libp2p", "Peerset receiver stream has returned None"); - break; - } + break + }, Poll::Pending => break, } } while let Poll::Ready(Some((delay_id, peer_id, set_id))) = - Pin::new(&mut self.delays).poll_next(cx) { + Pin::new(&mut self.delays).poll_next(cx) + { let peer_state = match self.peers.get_mut(&(peer_id.clone(), set_id)) { Some(s) => s, // We intentionally never remove elements from `delays`, and it may @@ -1977,24 +2069,24 @@ impl NetworkBehaviour for Notifications { PeerState::Backoff { timer, .. } if *timer == delay_id => { trace!(target: "sub-libp2p", "Libp2p <= Clean up ban of {:?} from the state", peer_id); self.peers.remove(&(peer_id, set_id)); - } + }, PeerState::PendingRequest { timer, .. } if *timer == delay_id => { trace!(target: "sub-libp2p", "Libp2p <= Dial {:?} now that ban has expired", peer_id); // The `DialPeerCondition` ensures that dial attempts are de-duplicated self.events.push_back(NetworkBehaviourAction::DialPeer { peer_id, - condition: DialPeerCondition::Disconnected + condition: DialPeerCondition::Disconnected, }); *peer_state = PeerState::Requested; - } + }, PeerState::DisabledPendingEnable { connections, timer, timer_deadline } if *timer == delay_id => { // The first element of `closed` is chosen to open the notifications substream. - if let Some((connec_id, connec_state)) = connections.iter_mut() - .find(|(_, s)| matches!(s, ConnectionState::Closed)) + if let Some((connec_id, connec_state)) = + connections.iter_mut().find(|(_, s)| matches!(s, ConnectionState::Closed)) { trace!(target: "sub-libp2p", "Handler({}, {:?}) <= Open({:?}) (ban expired)", peer_id, *connec_id, set_id); @@ -2011,10 +2103,13 @@ impl NetworkBehaviour for Notifications { *timer_deadline = Instant::now() + Duration::from_secs(5); let delay = futures_timer::Delay::new(Duration::from_secs(5)); let timer = *timer; - self.delays.push(async move { - delay.await; - (timer, peer_id, set_id) - }.boxed()); + self.delays.push( + async move { + delay.await; + (timer, peer_id, set_id) + } + .boxed(), + ); } } @@ -2025,7 +2120,7 @@ impl NetworkBehaviour for Notifications { } if let Some(event) = self.events.pop_front() { - return Poll::Ready(event); + return Poll::Ready(event) } Poll::Pending diff --git a/client/network/src/protocol/notifications/handler.rs b/client/network/src/protocol/notifications/handler.rs index 3d38182c3c9d6..dcb32865b1e8d 100644 --- a/client/network/src/protocol/notifications/handler.rs +++ b/client/network/src/protocol/notifications/handler.rs @@ -57,31 +57,39 @@ //! It is illegal to send a [`NotifsHandlerIn::Open`] before a previously-emitted //! [`NotifsHandlerIn::Open`] has gotten an answer. -use crate::protocol::notifications::{ - upgrade::{ - NotificationsIn, NotificationsOut, NotificationsInSubstream, NotificationsOutSubstream, - NotificationsHandshakeError, UpgradeCollec - }, +use crate::protocol::notifications::upgrade::{ + NotificationsHandshakeError, NotificationsIn, NotificationsInSubstream, NotificationsOut, + NotificationsOutSubstream, UpgradeCollec, }; use bytes::BytesMut; -use libp2p::core::{ConnectedPoint, PeerId, upgrade::{InboundUpgrade, OutboundUpgrade}}; -use libp2p::swarm::{ - ProtocolsHandler, ProtocolsHandlerEvent, - IntoProtocolsHandler, - KeepAlive, - ProtocolsHandlerUpgrErr, - SubstreamProtocol, - NegotiatedSubstream, -}; use futures::{ channel::mpsc, lock::{Mutex as FuturesMutex, MutexGuard as FuturesMutexGuard}, - prelude::* + prelude::*, +}; +use libp2p::{ + core::{ + upgrade::{InboundUpgrade, OutboundUpgrade}, + ConnectedPoint, PeerId, + }, + swarm::{ + IntoProtocolsHandler, KeepAlive, NegotiatedSubstream, ProtocolsHandler, + ProtocolsHandlerEvent, ProtocolsHandlerUpgrErr, SubstreamProtocol, + }, }; use log::error; use parking_lot::{Mutex, RwLock}; -use std::{borrow::Cow, collections::VecDeque, mem, pin::Pin, str, sync::Arc, task::{Context, Poll}, time::Duration}; +use std::{ + borrow::Cow, + collections::VecDeque, + mem, + pin::Pin, + str, + sync::Arc, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::Instant; /// Number of pending notifications in asynchronous contexts. @@ -131,7 +139,7 @@ pub struct NotifsHandler { /// Events to return in priority from `poll`. events_queue: VecDeque< - ProtocolsHandlerEvent + ProtocolsHandlerEvent, >, } @@ -195,10 +203,12 @@ enum State { /// We use two different channels in order to have two different channel sizes, but from /// the receiving point of view, the two channels are the same. /// The receivers are fused in case the user drops the [`NotificationsSink`] entirely. - notifications_sink_rx: stream::Peekable>, - stream::Fuse> - >>, + notifications_sink_rx: stream::Peekable< + stream::Select< + stream::Fuse>, + stream::Fuse>, + >, + >, /// Outbound substream that has been accepted by the remote. /// @@ -220,28 +230,33 @@ impl IntoProtocolsHandler for NotifsHandlerProto { type Handler = NotifsHandler; fn inbound_protocol(&self) -> UpgradeCollec { - self.protocols.iter() - .map(|cfg| NotificationsIn::new(cfg.name.clone(), cfg.fallback_names.clone(), cfg.max_notification_size)) + self.protocols + .iter() + .map(|cfg| { + NotificationsIn::new( + cfg.name.clone(), + cfg.fallback_names.clone(), + cfg.max_notification_size, + ) + }) .collect::>() } fn into_handler(self, peer_id: &PeerId, connected_point: &ConnectedPoint) -> Self::Handler { NotifsHandler { - protocols: self.protocols.into_iter().map(|config| { - let in_upgrade = NotificationsIn::new( - config.name.clone(), - config.fallback_names.clone(), - config.max_notification_size - ); - - Protocol { - config, - in_upgrade, - state: State::Closed { - pending_opening: false, - }, - } - }).collect(), + protocols: self + .protocols + .into_iter() + .map(|config| { + let in_upgrade = NotificationsIn::new( + config.name.clone(), + config.fallback_names.clone(), + config.max_notification_size, + ); + + Protocol { config, in_upgrade, state: State::Closed { pending_opening: false } } + }) + .collect(), peer_id: peer_id.clone(), endpoint: connected_point.clone(), when_connection_open: Instant::now(), @@ -363,9 +378,7 @@ struct NotificationsSinkInner { enum NotificationsSinkMessage { /// Message emitted by [`NotificationsSink::reserve_notification`] and /// [`NotificationsSink::write_notification_now`]. - Notification { - message: Vec, - }, + Notification { message: Vec }, /// Must close the connection. ForceClose, @@ -386,14 +399,10 @@ impl NotificationsSink { /// error to send a notification using an unknown protocol. /// /// This method will be removed in a future version. - pub fn send_sync_notification<'a>( - &'a self, - message: impl Into> - ) { + pub fn send_sync_notification<'a>(&'a self, message: impl Into>) { let mut lock = self.inner.sync_channel.lock(); - let result = lock.try_send(NotificationsSinkMessage::Notification { - message: message.into() - }); + let result = + lock.try_send(NotificationsSinkMessage::Notification { message: message.into() }); if result.is_err() { // Cloning the `mpsc::Sender` guarantees the allocation of an extra spot in the @@ -433,13 +442,10 @@ impl<'a> Ready<'a> { /// Consumes this slots reservation and actually queues the notification. /// /// Returns an error if the substream has been closed. - pub fn send( - mut self, - notification: impl Into> - ) -> Result<(), ()> { - self.lock.start_send(NotificationsSinkMessage::Notification { - message: notification.into(), - }).map_err(|_| ()) + pub fn send(mut self, notification: impl Into>) -> Result<(), ()> { + self.lock + .start_send(NotificationsSinkMessage::Notification { message: notification.into() }) + .map_err(|_| ()) } } @@ -457,12 +463,8 @@ impl NotifsHandlerProto { /// handshake, and the maximum allowed size of a notification. At the moment, the message /// is always the same whether we open a substream ourselves or respond to handshake from /// the remote. - pub fn new( - list: impl Into>, - ) -> Self { - NotifsHandlerProto { - protocols: list.into(), - } + pub fn new(list: impl Into>) -> Self { + NotifsHandlerProto { protocols: list.into() } } } @@ -477,7 +479,9 @@ impl ProtocolsHandler for NotifsHandler { type InboundOpenInfo = (); fn listen_protocol(&self) -> SubstreamProtocol { - let protocols = self.protocols.iter() + let protocols = self + .protocols + .iter() .map(|p| p.in_upgrade.clone()) .collect::>(); @@ -486,17 +490,16 @@ impl ProtocolsHandler for NotifsHandler { fn inject_fully_negotiated_inbound( &mut self, - (mut in_substream_open, protocol_index): - >::Output, - (): () + (mut in_substream_open, protocol_index): >::Output, + (): (), ) { let mut protocol_info = &mut self.protocols[protocol_index]; match protocol_info.state { State::Closed { pending_opening } => { self.events_queue.push_back(ProtocolsHandlerEvent::Custom( - NotifsHandlerOut::OpenDesiredByRemote { - protocol_index, - } + NotifsHandlerOut::OpenDesiredByRemote { protocol_index }, )); protocol_info.state = State::OpenDesiredByRemote { @@ -512,13 +515,13 @@ impl ProtocolsHandler for NotifsHandler { // in mind that it is invalid for the remote to open multiple such // substreams, and therefore sending a "RST" is the most correct thing // to do. - return; + return }, State::Opening { ref mut in_substream, .. } | State::Open { ref mut in_substream, .. } => { if in_substream.is_some() { // Same remark as above. - return; + return } // Create `handshake_message` on a separate line to be sure that the @@ -533,18 +536,18 @@ impl ProtocolsHandler for NotifsHandler { fn inject_fully_negotiated_outbound( &mut self, new_open: >::Output, - protocol_index: Self::OutboundOpenInfo + protocol_index: Self::OutboundOpenInfo, ) { match self.protocols[protocol_index].state { State::Closed { ref mut pending_opening } | State::OpenDesiredByRemote { ref mut pending_opening, .. } => { debug_assert!(*pending_opening); *pending_opening = false; - } + }, State::Open { .. } => { error!(target: "sub-libp2p", "☎️ State mismatch in notifications handler"); debug_assert!(false); - } + }, State::Opening { ref mut in_substream } => { let (async_tx, async_rx) = mpsc::channel(ASYNC_NOTIFICATIONS_BUFFER_SIZE); let (sync_tx, sync_rx) = mpsc::channel(SYNC_NOTIFICATIONS_BUFFER_SIZE); @@ -557,7 +560,8 @@ impl ProtocolsHandler for NotifsHandler { }; self.protocols[protocol_index].state = State::Open { - notifications_sink_rx: stream::select(async_rx.fuse(), sync_rx.fuse()).peekable(), + notifications_sink_rx: stream::select(async_rx.fuse(), sync_rx.fuse()) + .peekable(), out_substream: Some(new_open.substream), in_substream: in_substream.take(), }; @@ -568,10 +572,10 @@ impl ProtocolsHandler for NotifsHandler { negotiated_fallback: new_open.negotiated_fallback, endpoint: self.endpoint.clone(), received_handshake: new_open.handshake, - notifications_sink - } + notifications_sink, + }, )); - } + }, } } @@ -586,18 +590,18 @@ impl ProtocolsHandler for NotifsHandler { protocol_info.config.name.clone(), protocol_info.config.fallback_names.clone(), protocol_info.config.handshake.read().clone(), - protocol_info.config.max_notification_size + protocol_info.config.max_notification_size, ); - self.events_queue.push_back(ProtocolsHandlerEvent::OutboundSubstreamRequest { - protocol: SubstreamProtocol::new(proto, protocol_index) - .with_timeout(OPEN_TIMEOUT), - }); + self.events_queue.push_back( + ProtocolsHandlerEvent::OutboundSubstreamRequest { + protocol: SubstreamProtocol::new(proto, protocol_index) + .with_timeout(OPEN_TIMEOUT), + }, + ); } - protocol_info.state = State::Opening { - in_substream: None, - }; + protocol_info.state = State::Opening { in_substream: None }; }, State::OpenDesiredByRemote { pending_opening, in_substream } => { let handshake_message = protocol_info.config.handshake.read().clone(); @@ -610,27 +614,27 @@ impl ProtocolsHandler for NotifsHandler { protocol_info.config.max_notification_size, ); - self.events_queue.push_back(ProtocolsHandlerEvent::OutboundSubstreamRequest { - protocol: SubstreamProtocol::new(proto, protocol_index) - .with_timeout(OPEN_TIMEOUT), - }); + self.events_queue.push_back( + ProtocolsHandlerEvent::OutboundSubstreamRequest { + protocol: SubstreamProtocol::new(proto, protocol_index) + .with_timeout(OPEN_TIMEOUT), + }, + ); } in_substream.send_handshake(handshake_message); // The state change is done in two steps because of borrowing issues. - let in_substream = match - mem::replace(&mut protocol_info.state, State::Opening { in_substream: None }) - { + let in_substream = match mem::replace( + &mut protocol_info.state, + State::Opening { in_substream: None }, + ) { State::OpenDesiredByRemote { in_substream, .. } => in_substream, - _ => unreachable!() - }; - protocol_info.state = State::Opening { - in_substream: Some(in_substream), + _ => unreachable!(), }; + protocol_info.state = State::Opening { in_substream: Some(in_substream) }; }, - State::Opening { .. } | - State::Open { .. } => { + State::Opening { .. } | State::Open { .. } => { // As documented, it is forbidden to send an `Open` while there is already // one in the fly. error!(target: "sub-libp2p", "opening already-opened handler"); @@ -642,34 +646,26 @@ impl ProtocolsHandler for NotifsHandler { NotifsHandlerIn::Close { protocol_index } => { match self.protocols[protocol_index].state { State::Open { .. } => { - self.protocols[protocol_index].state = State::Closed { - pending_opening: false, - }; + self.protocols[protocol_index].state = + State::Closed { pending_opening: false }; }, State::Opening { .. } => { - self.protocols[protocol_index].state = State::Closed { - pending_opening: true, - }; + self.protocols[protocol_index].state = + State::Closed { pending_opening: true }; self.events_queue.push_back(ProtocolsHandlerEvent::Custom( - NotifsHandlerOut::OpenResultErr { - protocol_index, - } + NotifsHandlerOut::OpenResultErr { protocol_index }, )); }, State::OpenDesiredByRemote { pending_opening, .. } => { - self.protocols[protocol_index].state = State::Closed { - pending_opening, - }; - } + self.protocols[protocol_index].state = State::Closed { pending_opening }; + }, State::Closed { .. } => {}, } - self.events_queue.push_back( - ProtocolsHandlerEvent::Custom(NotifsHandlerOut::CloseResult { - protocol_index, - }) - ); + self.events_queue.push_back(ProtocolsHandlerEvent::Custom( + NotifsHandlerOut::CloseResult { protocol_index }, + )); }, } } @@ -677,26 +673,22 @@ impl ProtocolsHandler for NotifsHandler { fn inject_dial_upgrade_error( &mut self, num: usize, - _: ProtocolsHandlerUpgrErr + _: ProtocolsHandlerUpgrErr, ) { match self.protocols[num].state { State::Closed { ref mut pending_opening } | State::OpenDesiredByRemote { ref mut pending_opening, .. } => { debug_assert!(*pending_opening); *pending_opening = false; - } + }, State::Opening { .. } => { - self.protocols[num].state = State::Closed { - pending_opening: false, - }; + self.protocols[num].state = State::Closed { pending_opening: false }; self.events_queue.push_back(ProtocolsHandlerEvent::Custom( - NotifsHandlerOut::OpenResultErr { - protocol_index: num, - } + NotifsHandlerOut::OpenResultErr { protocol_index: num }, )); - } + }, // No substream is being open when already `Open`. State::Open { .. } => debug_assert!(false), @@ -706,7 +698,7 @@ impl ProtocolsHandler for NotifsHandler { fn connection_keep_alive(&self) -> KeepAlive { // `Yes` if any protocol has some activity. if self.protocols.iter().any(|p| !matches!(p.state, State::Closed { .. })) { - return KeepAlive::Yes; + return KeepAlive::Yes } // A grace period of `INITIAL_KEEPALIVE_TIME` must be given to leave time for the remote @@ -718,17 +710,23 @@ impl ProtocolsHandler for NotifsHandler { &mut self, cx: &mut Context, ) -> Poll< - ProtocolsHandlerEvent + ProtocolsHandlerEvent< + Self::OutboundProtocol, + Self::OutboundOpenInfo, + Self::OutEvent, + Self::Error, + >, > { if let Some(ev) = self.events_queue.pop_front() { - return Poll::Ready(ev); + return Poll::Ready(ev) } // For each open substream, try send messages from `notifications_sink_rx` to the // substream. for protocol_index in 0..self.protocols.len() { - if let State::Open { notifications_sink_rx, out_substream: Some(out_substream), .. } - = &mut self.protocols[protocol_index].state + if let State::Open { + notifications_sink_rx, out_substream: Some(out_substream), .. + } = &mut self.protocols[protocol_index].state { loop { // Only proceed with `out_substream.poll_ready_unpin` if there is an element @@ -736,9 +734,9 @@ impl ProtocolsHandler for NotifsHandler { // a substream is ready to send if there isn't actually something to send. match Pin::new(&mut *notifications_sink_rx).as_mut().poll_peek(cx) { Poll::Ready(Some(&NotificationsSinkMessage::ForceClose)) => { - return Poll::Ready( - ProtocolsHandlerEvent::Close(NotifsHandlerError::SyncNotificationsClogged) - ); + return Poll::Ready(ProtocolsHandlerEvent::Close( + NotifsHandlerError::SyncNotificationsClogged, + )) }, Poll::Ready(Some(&NotificationsSinkMessage::Notification { .. })) => {}, Poll::Ready(None) | Poll::Pending => break, @@ -748,19 +746,20 @@ impl ProtocolsHandler for NotifsHandler { // substream is ready to accept a message. match out_substream.poll_ready_unpin(cx) { Poll::Ready(_) => {}, - Poll::Pending => break + Poll::Pending => break, } // Now that the substream is ready for a message, grab what to send. let message = match notifications_sink_rx.poll_next_unpin(cx) { - Poll::Ready(Some(NotificationsSinkMessage::Notification { message })) => message, - Poll::Ready(Some(NotificationsSinkMessage::ForceClose)) - | Poll::Ready(None) - | Poll::Pending => { + Poll::Ready(Some(NotificationsSinkMessage::Notification { message })) => + message, + Poll::Ready(Some(NotificationsSinkMessage::ForceClose)) | + Poll::Ready(None) | + Poll::Pending => { // Should never be reached, as per `poll_peek` above. debug_assert!(false); - break; - } + break + }, }; let _ = out_substream.start_send_unpin(message); @@ -784,15 +783,15 @@ impl ProtocolsHandler for NotifsHandler { Poll::Ready(Err(_)) => { *out_substream = None; let event = NotifsHandlerOut::CloseDesired { protocol_index }; - return Poll::Ready(ProtocolsHandlerEvent::Custom(event)); - } + return Poll::Ready(ProtocolsHandlerEvent::Custom(event)) + }, }; - } + }, State::Closed { .. } | State::Opening { .. } | State::Open { out_substream: None, .. } | - State::OpenDesiredByRemote { .. } => {} + State::OpenDesiredByRemote { .. } => {}, } } @@ -803,45 +802,40 @@ impl ProtocolsHandler for NotifsHandler { match &mut self.protocols[protocol_index].state { State::Closed { .. } | State::Open { in_substream: None, .. } | - State::Opening { in_substream: None } => {} + State::Opening { in_substream: None } => {}, - State::Open { in_substream: in_substream @ Some(_), .. } => { + State::Open { in_substream: in_substream @ Some(_), .. } => match Stream::poll_next(Pin::new(in_substream.as_mut().unwrap()), cx) { Poll::Pending => {}, Poll::Ready(Some(Ok(message))) => { - let event = NotifsHandlerOut::Notification { - protocol_index, - message, - }; + let event = NotifsHandlerOut::Notification { protocol_index, message }; return Poll::Ready(ProtocolsHandlerEvent::Custom(event)) }, - Poll::Ready(None) | Poll::Ready(Some(Err(_))) => - *in_substream = None, - } - } + Poll::Ready(None) | Poll::Ready(Some(Err(_))) => *in_substream = None, + }, - State::OpenDesiredByRemote { in_substream, pending_opening } => { + State::OpenDesiredByRemote { in_substream, pending_opening } => match NotificationsInSubstream::poll_process(Pin::new(in_substream), cx) { Poll::Pending => {}, Poll::Ready(Ok(void)) => match void {}, Poll::Ready(Err(_)) => { - self.protocols[protocol_index].state = State::Closed { - pending_opening: *pending_opening, - }; + self.protocols[protocol_index].state = + State::Closed { pending_opening: *pending_opening }; return Poll::Ready(ProtocolsHandlerEvent::Custom( - NotifsHandlerOut::CloseDesired { protocol_index } + NotifsHandlerOut::CloseDesired { protocol_index }, )) }, - } - } + }, - State::Opening { in_substream: in_substream @ Some(_), .. } => { - match NotificationsInSubstream::poll_process(Pin::new(in_substream.as_mut().unwrap()), cx) { + State::Opening { in_substream: in_substream @ Some(_), .. } => + match NotificationsInSubstream::poll_process( + Pin::new(in_substream.as_mut().unwrap()), + cx, + ) { Poll::Pending => {}, Poll::Ready(Ok(void)) => match void {}, Poll::Ready(Err(_)) => *in_substream = None, - } - } + }, } } diff --git a/client/network/src/protocol/notifications/tests.rs b/client/network/src/protocol/notifications/tests.rs index 4c7461c94b20d..a80315050830b 100644 --- a/client/network/src/protocol/notifications/tests.rs +++ b/client/network/src/protocol/notifications/tests.rs @@ -21,19 +21,24 @@ use crate::protocol::notifications::{Notifications, NotificationsOut, ProtocolConfig}; use futures::prelude::*; -use libp2p::{PeerId, Multiaddr, Transport}; -use libp2p::core::{ - connection::{ConnectionId, ListenerId}, - ConnectedPoint, - transport::MemoryTransport, - upgrade +use libp2p::{ + core::{ + connection::{ConnectionId, ListenerId}, + transport::MemoryTransport, + upgrade, ConnectedPoint, + }, + identity, noise, + swarm::{ + IntoProtocolsHandler, NetworkBehaviour, NetworkBehaviourAction, PollParameters, + ProtocolsHandler, Swarm, + }, + yamux, Multiaddr, PeerId, Transport, }; -use libp2p::{identity, noise, yamux}; -use libp2p::swarm::{ - Swarm, ProtocolsHandler, IntoProtocolsHandler, PollParameters, - NetworkBehaviour, NetworkBehaviourAction +use std::{ + error, io, iter, + task::{Context, Poll}, + time::Duration, }; -use std::{error, io, iter, task::{Context, Poll}, time::Duration}; /// Builds two nodes that have each other as bootstrap nodes. /// This is to be used only for testing, and a panic will happen if something goes wrong. @@ -45,12 +50,11 @@ fn build_nodes() -> (Swarm, Swarm) { .map(|_| format!("/memory/{}", rand::random::()).parse().unwrap()) .collect(); - for index in 0 .. 2 { + for index in 0..2 { let keypair = keypairs[index].clone(); - let noise_keys = noise::Keypair::::new() - .into_authentic(&keypair) - .unwrap(); + let noise_keys = + noise::Keypair::::new().into_authentic(&keypair).unwrap(); let transport = MemoryTransport .upgrade(upgrade::Version::V1) @@ -60,48 +64,43 @@ fn build_nodes() -> (Swarm, Swarm) { .boxed(); let (peerset, _) = sc_peerset::Peerset::from_config(sc_peerset::PeersetConfig { - sets: vec![ - sc_peerset::SetConfig { - in_peers: 25, - out_peers: 25, - bootnodes: if index == 0 { - keypairs - .iter() - .skip(1) - .map(|keypair| keypair.public().into_peer_id()) - .collect() - } else { - vec![] - }, - reserved_nodes: Default::default(), - reserved_only: false, - } - ], + sets: vec![sc_peerset::SetConfig { + in_peers: 25, + out_peers: 25, + bootnodes: if index == 0 { + keypairs.iter().skip(1).map(|keypair| keypair.public().into_peer_id()).collect() + } else { + vec![] + }, + reserved_nodes: Default::default(), + reserved_only: false, + }], }); let behaviour = CustomProtoWithAddr { - inner: Notifications::new(peerset, iter::once(ProtocolConfig { - name: "/foo".into(), - fallback_names: Vec::new(), - handshake: Vec::new(), - max_notification_size: 1024 * 1024 - })), + inner: Notifications::new( + peerset, + iter::once(ProtocolConfig { + name: "/foo".into(), + fallback_names: Vec::new(), + handshake: Vec::new(), + max_notification_size: 1024 * 1024, + }), + ), addrs: addrs .iter() .enumerate() - .filter_map(|(n, a)| if n != index { - Some((keypairs[n].public().into_peer_id(), a.clone())) - } else { - None + .filter_map(|(n, a)| { + if n != index { + Some((keypairs[n].public().into_peer_id(), a.clone())) + } else { + None + } }) .collect(), }; - let mut swarm = Swarm::new( - transport, - behaviour, - keypairs[index].public().into_peer_id() - ); + let mut swarm = Swarm::new(transport, behaviour, keypairs[index].public().into_peer_id()); swarm.listen_on(addrs[index].clone()).unwrap(); out.push(swarm); } @@ -159,11 +158,21 @@ impl NetworkBehaviour for CustomProtoWithAddr { self.inner.inject_disconnected(peer_id) } - fn inject_connection_established(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_established( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.inner.inject_connection_established(peer_id, conn, endpoint) } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { self.inner.inject_connection_closed(peer_id, conn, endpoint) } @@ -171,7 +180,7 @@ impl NetworkBehaviour for CustomProtoWithAddr { &mut self, peer_id: PeerId, connection: ConnectionId, - event: <::Handler as ProtocolsHandler>::OutEvent + event: <::Handler as ProtocolsHandler>::OutEvent, ) { self.inner.inject_event(peer_id, connection, event) } @@ -185,11 +194,16 @@ impl NetworkBehaviour for CustomProtoWithAddr { <::Handler as ProtocolsHandler>::InEvent, Self::OutEvent > - > { + >{ self.inner.poll(cx, params) } - fn inject_addr_reach_failure(&mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, error: &dyn std::error::Error) { + fn inject_addr_reach_failure( + &mut self, + peer_id: Option<&PeerId>, + addr: &Multiaddr, + error: &dyn std::error::Error, + ) { self.inner.inject_addr_reach_failure(peer_id, addr, error) } @@ -235,7 +249,12 @@ fn reconnect_after_disconnect() { // For this test, the services can be in the following states. #[derive(Debug, Copy, Clone, PartialEq, Eq)] - enum ServiceState { NotConnected, FirstConnec, Disconnected, ConnectedAgain } + enum ServiceState { + NotConnected, + FirstConnec, + Disconnected, + ConnectedAgain, + } let mut service1_state = ServiceState::NotConnected; let mut service2_state = ServiceState::NotConnected; @@ -253,55 +272,55 @@ fn reconnect_after_disconnect() { }; match event { - future::Either::Left(NotificationsOut::CustomProtocolOpen { .. }) => { + future::Either::Left(NotificationsOut::CustomProtocolOpen { .. }) => match service1_state { ServiceState::NotConnected => { service1_state = ServiceState::FirstConnec; if service2_state == ServiceState::FirstConnec { service1.behaviour_mut().disconnect_peer( Swarm::local_peer_id(&service2), - sc_peerset::SetId::from(0) + sc_peerset::SetId::from(0), ); } }, ServiceState::Disconnected => service1_state = ServiceState::ConnectedAgain, ServiceState::FirstConnec | ServiceState::ConnectedAgain => panic!(), - } - }, - future::Either::Left(NotificationsOut::CustomProtocolClosed { .. }) => { + }, + future::Either::Left(NotificationsOut::CustomProtocolClosed { .. }) => match service1_state { ServiceState::FirstConnec => service1_state = ServiceState::Disconnected, - ServiceState::ConnectedAgain| ServiceState::NotConnected | + ServiceState::ConnectedAgain | + ServiceState::NotConnected | ServiceState::Disconnected => panic!(), - } - }, - future::Either::Right(NotificationsOut::CustomProtocolOpen { .. }) => { + }, + future::Either::Right(NotificationsOut::CustomProtocolOpen { .. }) => match service2_state { ServiceState::NotConnected => { service2_state = ServiceState::FirstConnec; if service1_state == ServiceState::FirstConnec { service1.behaviour_mut().disconnect_peer( Swarm::local_peer_id(&service2), - sc_peerset::SetId::from(0) + sc_peerset::SetId::from(0), ); } }, ServiceState::Disconnected => service2_state = ServiceState::ConnectedAgain, ServiceState::FirstConnec | ServiceState::ConnectedAgain => panic!(), - } - }, - future::Either::Right(NotificationsOut::CustomProtocolClosed { .. }) => { + }, + future::Either::Right(NotificationsOut::CustomProtocolClosed { .. }) => match service2_state { ServiceState::FirstConnec => service2_state = ServiceState::Disconnected, - ServiceState::ConnectedAgain| ServiceState::NotConnected | + ServiceState::ConnectedAgain | + ServiceState::NotConnected | ServiceState::Disconnected => panic!(), - } - }, - _ => {} + }, + _ => {}, } - if service1_state == ServiceState::ConnectedAgain && service2_state == ServiceState::ConnectedAgain { - break; + if service1_state == ServiceState::ConnectedAgain && + service2_state == ServiceState::ConnectedAgain + { + break } } @@ -316,7 +335,7 @@ fn reconnect_after_disconnect() { let s2 = service2.next(); futures::pin_mut!(s1, s2); match future::select(future::select(s1, s2), &mut delay).await { - future::Either::Right(_) => break, // success + future::Either::Right(_) => break, // success future::Either::Left((future::Either::Left((ev, _)), _)) => ev, future::Either::Left((future::Either::Right((ev, _)), _)) => ev, } @@ -325,7 +344,7 @@ fn reconnect_after_disconnect() { match event { NotificationsOut::CustomProtocolOpen { .. } | NotificationsOut::CustomProtocolClosed { .. } => panic!(), - _ => {} + _ => {}, } } }); diff --git a/client/network/src/protocol/notifications/upgrade.rs b/client/network/src/protocol/notifications/upgrade.rs index 35ae6917272a2..196b4f44f81f7 100644 --- a/client/network/src/protocol/notifications/upgrade.rs +++ b/client/network/src/protocol/notifications/upgrade.rs @@ -16,16 +16,13 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -pub use self::collec::UpgradeCollec; -pub use self::notifications::{ - NotificationsIn, - NotificationsInOpen, - NotificationsInSubstream, - NotificationsOut, - NotificationsOutOpen, - NotificationsOutSubstream, - NotificationsHandshakeError, - NotificationsOutError, +pub use self::{ + collec::UpgradeCollec, + notifications::{ + NotificationsHandshakeError, NotificationsIn, NotificationsInOpen, + NotificationsInSubstream, NotificationsOut, NotificationsOutError, NotificationsOutOpen, + NotificationsOutSubstream, + }, }; mod collec; diff --git a/client/network/src/protocol/notifications/upgrade/collec.rs b/client/network/src/protocol/notifications/upgrade/collec.rs index 8531fb8bdfdbf..8a2a7f7942025 100644 --- a/client/network/src/protocol/notifications/upgrade/collec.rs +++ b/client/network/src/protocol/notifications/upgrade/collec.rs @@ -18,7 +18,12 @@ use futures::prelude::*; use libp2p::core::upgrade::{InboundUpgrade, ProtocolName, UpgradeInfo}; -use std::{iter::FromIterator, pin::Pin, task::{Context, Poll}, vec}; +use std::{ + iter::FromIterator, + pin::Pin, + task::{Context, Poll}, + vec, +}; // TODO: move this to libp2p => https://github.com/libp2p/rust-libp2p/issues/1445 @@ -44,9 +49,10 @@ impl UpgradeInfo for UpgradeCollec { type InfoIter = vec::IntoIter; fn protocol_info(&self) -> Self::InfoIter { - self.0.iter().enumerate() - .flat_map(|(n, p)| - p.protocol_info().into_iter().map(move |i| ProtoNameWithUsize(i, n))) + self.0 + .iter() + .enumerate() + .flat_map(|(n, p)| p.protocol_info().into_iter().map(move |i| ProtoNameWithUsize(i, n))) .collect::>() .into_iter() } diff --git a/client/network/src/protocol/notifications/upgrade/notifications.rs b/client/network/src/protocol/notifications/upgrade/notifications.rs index 26bb92d77656b..90a2c1ce54cc4 100644 --- a/client/network/src/protocol/notifications/upgrade/notifications.rs +++ b/client/network/src/protocol/notifications/upgrade/notifications.rs @@ -16,6 +16,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use asynchronous_codec::Framed; /// Notifications protocol. /// /// The Substrate notifications protocol consists in the following: @@ -35,13 +36,18 @@ /// Notification substreams are unidirectional. If A opens a substream with B, then B is /// encouraged but not required to open a substream to A as well. /// - use bytes::BytesMut; use futures::prelude::*; -use asynchronous_codec::Framed; -use libp2p::core::{UpgradeInfo, InboundUpgrade, OutboundUpgrade, upgrade}; +use libp2p::core::{upgrade, InboundUpgrade, OutboundUpgrade, UpgradeInfo}; use log::error; -use std::{borrow::Cow, convert::{Infallible, TryFrom as _}, io, mem, pin::Pin, task::{Context, Poll}, vec}; +use std::{ + borrow::Cow, + convert::{Infallible, TryFrom as _}, + io, mem, + pin::Pin, + task::{Context, Poll}, + vec, +}; use unsigned_varint::codec::UviBytes; /// Maximum allowed size of the two handshake messages, in bytes. @@ -111,15 +117,12 @@ impl NotificationsIn { pub fn new( main_protocol_name: impl Into>, fallback_names: Vec>, - max_notification_size: u64 + max_notification_size: u64, ) -> Self { let mut protocol_names = fallback_names; protocol_names.insert(0, main_protocol_name.into()); - NotificationsIn { - protocol_names, - max_notification_size, - } + NotificationsIn { protocol_names, max_notification_size } } } @@ -128,29 +131,31 @@ impl UpgradeInfo for NotificationsIn { type InfoIter = vec::IntoIter; fn protocol_info(&self) -> Self::InfoIter { - self.protocol_names.iter().cloned().map(StringProtocolName).collect::>().into_iter() + self.protocol_names + .iter() + .cloned() + .map(StringProtocolName) + .collect::>() + .into_iter() } } impl InboundUpgrade for NotificationsIn -where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, +where + TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, { type Output = NotificationsInOpen; type Future = Pin> + Send>>; type Error = NotificationsHandshakeError; - fn upgrade_inbound( - self, - mut socket: TSubstream, - negotiated_name: Self::Info, - ) -> Self::Future { + fn upgrade_inbound(self, mut socket: TSubstream, negotiated_name: Self::Info) -> Self::Future { Box::pin(async move { let handshake_len = unsigned_varint::aio::read_usize(&mut socket).await?; if handshake_len > MAX_HANDSHAKE_SIZE { return Err(NotificationsHandshakeError::TooLarge { requested: handshake_len, max: MAX_HANDSHAKE_SIZE, - }); + }) } let mut handshake = vec![0u8; handshake_len]; @@ -191,13 +196,14 @@ pub struct NotificationsInOpen { } impl NotificationsInSubstream -where TSubstream: AsyncRead + AsyncWrite + Unpin, +where + TSubstream: AsyncRead + AsyncWrite + Unpin, { /// Sends the handshake in order to inform the remote that we accept the substream. pub fn send_handshake(&mut self, message: impl Into>) { if !matches!(self.handshake, NotificationsInSubstreamHandshake::NotSent) { error!(target: "sub-libp2p", "Tried to send handshake twice"); - return; + return } self.handshake = NotificationsInSubstreamHandshake::PendingSend(message.into()); @@ -205,7 +211,10 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, /// Equivalent to `Stream::poll_next`, except that it only drives the handshake and is /// guaranteed to not generate any notification. - pub fn poll_process(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { + pub fn poll_process( + self: Pin<&mut Self>, + cx: &mut Context, + ) -> Poll> { let mut this = self.project(); loop { @@ -222,7 +231,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, Poll::Pending => { *this.handshake = NotificationsInSubstreamHandshake::PendingSend(msg); return Poll::Pending - } + }, }, NotificationsInSubstreamHandshake::Flush => match Sink::poll_flush(this.socket.as_mut(), cx)? { @@ -231,7 +240,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, Poll::Pending => { *this.handshake = NotificationsInSubstreamHandshake::Flush; return Poll::Pending - } + }, }, st @ NotificationsInSubstreamHandshake::NotSent | @@ -239,15 +248,16 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, st @ NotificationsInSubstreamHandshake::ClosingInResponseToRemote | st @ NotificationsInSubstreamHandshake::BothSidesClosed => { *this.handshake = st; - return Poll::Pending; - } + return Poll::Pending + }, } } } } impl Stream for NotificationsInSubstream -where TSubstream: AsyncRead + AsyncWrite + Unpin, +where + TSubstream: AsyncRead + AsyncWrite + Unpin, { type Item = Result; @@ -273,7 +283,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, Poll::Pending => { *this.handshake = NotificationsInSubstreamHandshake::PendingSend(msg); return Poll::Pending - } + }, }, NotificationsInSubstreamHandshake::Flush => match Sink::poll_flush(this.socket.as_mut(), cx)? { @@ -282,13 +292,14 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, Poll::Pending => { *this.handshake = NotificationsInSubstreamHandshake::Flush; return Poll::Pending - } + }, }, NotificationsInSubstreamHandshake::Sent => { match Stream::poll_next(this.socket.as_mut(), cx) { - Poll::Ready(None) => *this.handshake = - NotificationsInSubstreamHandshake::ClosingInResponseToRemote, + Poll::Ready(None) => + *this.handshake = + NotificationsInSubstreamHandshake::ClosingInResponseToRemote, Poll::Ready(Some(msg)) => { *this.handshake = NotificationsInSubstreamHandshake::Sent; return Poll::Ready(Some(msg)) @@ -305,13 +316,13 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin, Poll::Ready(()) => *this.handshake = NotificationsInSubstreamHandshake::BothSidesClosed, Poll::Pending => { - *this.handshake = NotificationsInSubstreamHandshake::ClosingInResponseToRemote; + *this.handshake = + NotificationsInSubstreamHandshake::ClosingInResponseToRemote; return Poll::Pending - } + }, }, - NotificationsInSubstreamHandshake::BothSidesClosed => - return Poll::Ready(None), + NotificationsInSubstreamHandshake::BothSidesClosed => return Poll::Ready(None), } } } @@ -333,11 +344,7 @@ impl NotificationsOut { let mut protocol_names = fallback_names; protocol_names.insert(0, main_protocol_name.into()); - NotificationsOut { - protocol_names, - initial_message, - max_notification_size, - } + NotificationsOut { protocol_names, initial_message, max_notification_size } } } @@ -356,22 +363,24 @@ impl UpgradeInfo for NotificationsOut { type InfoIter = vec::IntoIter; fn protocol_info(&self) -> Self::InfoIter { - self.protocol_names.iter().cloned().map(StringProtocolName).collect::>().into_iter() + self.protocol_names + .iter() + .cloned() + .map(StringProtocolName) + .collect::>() + .into_iter() } } impl OutboundUpgrade for NotificationsOut -where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, +where + TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, { type Output = NotificationsOutOpen; type Future = Pin> + Send>>; type Error = NotificationsHandshakeError; - fn upgrade_outbound( - self, - mut socket: TSubstream, - negotiated_name: Self::Info, - ) -> Self::Future { + fn upgrade_outbound(self, mut socket: TSubstream, negotiated_name: Self::Info) -> Self::Future { Box::pin(async move { upgrade::write_with_len_prefix(&mut socket, &self.initial_message).await?; @@ -381,7 +390,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, return Err(NotificationsHandshakeError::TooLarge { requested: handshake_len, max: MAX_HANDSHAKE_SIZE, - }); + }) } let mut handshake = vec![0u8; handshake_len]; @@ -399,9 +408,7 @@ where TSubstream: AsyncRead + AsyncWrite + Unpin + Send + 'static, } else { Some(negotiated_name.0) }, - substream: NotificationsOutSubstream { - socket: Framed::new(socket, codec), - } + substream: NotificationsOutSubstream { socket: Framed::new(socket, codec) }, }) }) } @@ -419,14 +426,14 @@ pub struct NotificationsOutOpen { } impl Sink> for NotificationsOutSubstream - where TSubstream: AsyncRead + AsyncWrite + Unpin, +where + TSubstream: AsyncRead + AsyncWrite + Unpin, { type Error = NotificationsOutError; fn poll_ready(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { let mut this = self.project(); - Sink::poll_ready(this.socket.as_mut(), cx) - .map_err(NotificationsOutError::Io) + Sink::poll_ready(this.socket.as_mut(), cx).map_err(NotificationsOutError::Io) } fn start_send(self: Pin<&mut Self>, item: Vec) -> Result<(), Self::Error> { @@ -437,14 +444,12 @@ impl Sink> for NotificationsOutSubstream fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { let mut this = self.project(); - Sink::poll_flush(this.socket.as_mut(), cx) - .map_err(NotificationsOutError::Io) + Sink::poll_flush(this.socket.as_mut(), cx).map_err(NotificationsOutError::Io) } fn poll_close(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { let mut this = self.project(); - Sink::poll_close(this.socket.as_mut(), cx) - .map_err(NotificationsOutError::Io) + Sink::poll_close(this.socket.as_mut(), cx).map_err(NotificationsOutError::Io) } } @@ -471,11 +476,12 @@ impl From for NotificationsHandshakeError { fn from(err: unsigned_varint::io::ReadError) -> Self { match err { unsigned_varint::io::ReadError::Io(err) => NotificationsHandshakeError::Io(err), - unsigned_varint::io::ReadError::Decode(err) => NotificationsHandshakeError::VarintDecode(err), + unsigned_varint::io::ReadError::Decode(err) => + NotificationsHandshakeError::VarintDecode(err), _ => { log::warn!("Unrecognized varint decoding error"); NotificationsHandshakeError::Io(From::from(io::ErrorKind::InvalidData)) - } + }, } } } @@ -492,7 +498,7 @@ mod tests { use super::{NotificationsIn, NotificationsInOpen, NotificationsOut, NotificationsOutOpen}; use async_std::net::{TcpListener, TcpStream}; - use futures::{prelude::*, channel::oneshot}; + use futures::{channel::oneshot, prelude::*}; use libp2p::core::upgrade; use std::borrow::Cow; @@ -506,8 +512,10 @@ mod tests { let NotificationsOutOpen { handshake, mut substream, .. } = upgrade::apply_outbound( socket, NotificationsOut::new(PROTO_NAME, Vec::new(), &b"initial message"[..], 1024 * 1024), - upgrade::Version::V1 - ).await.unwrap(); + upgrade::Version::V1, + ) + .await + .unwrap(); assert_eq!(handshake, b"hello world"); substream.send(b"test message".to_vec()).await.unwrap(); @@ -520,8 +528,10 @@ mod tests { let (socket, _) = listener.accept().await.unwrap(); let NotificationsInOpen { handshake, mut substream, .. } = upgrade::apply_inbound( socket, - NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024) - ).await.unwrap(); + NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024), + ) + .await + .unwrap(); assert_eq!(handshake, b"initial message"); substream.send_handshake(&b"hello world"[..]); @@ -545,8 +555,10 @@ mod tests { let NotificationsOutOpen { handshake, mut substream, .. } = upgrade::apply_outbound( socket, NotificationsOut::new(PROTO_NAME, Vec::new(), vec![], 1024 * 1024), - upgrade::Version::V1 - ).await.unwrap(); + upgrade::Version::V1, + ) + .await + .unwrap(); assert!(handshake.is_empty()); substream.send(Default::default()).await.unwrap(); @@ -559,8 +571,10 @@ mod tests { let (socket, _) = listener.accept().await.unwrap(); let NotificationsInOpen { handshake, mut substream, .. } = upgrade::apply_inbound( socket, - NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024) - ).await.unwrap(); + NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024), + ) + .await + .unwrap(); assert!(handshake.is_empty()); substream.send_handshake(vec![]); @@ -582,8 +596,9 @@ mod tests { let outcome = upgrade::apply_outbound( socket, NotificationsOut::new(PROTO_NAME, Vec::new(), &b"hello"[..], 1024 * 1024), - upgrade::Version::V1 - ).await; + upgrade::Version::V1, + ) + .await; // Despite the protocol negotiation being successfully conducted on the listener // side, we have to receive an error here because the listener didn't send the @@ -598,8 +613,10 @@ mod tests { let (socket, _) = listener.accept().await.unwrap(); let NotificationsInOpen { handshake, substream, .. } = upgrade::apply_inbound( socket, - NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024) - ).await.unwrap(); + NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024), + ) + .await + .unwrap(); assert_eq!(handshake, b"hello"); @@ -620,9 +637,15 @@ mod tests { let ret = upgrade::apply_outbound( socket, // We check that an initial message that is too large gets refused. - NotificationsOut::new(PROTO_NAME, Vec::new(), (0..32768).map(|_| 0).collect::>(), 1024 * 1024), - upgrade::Version::V1 - ).await; + NotificationsOut::new( + PROTO_NAME, + Vec::new(), + (0..32768).map(|_| 0).collect::>(), + 1024 * 1024, + ), + upgrade::Version::V1, + ) + .await; assert!(ret.is_err()); }); @@ -633,8 +656,9 @@ mod tests { let (socket, _) = listener.accept().await.unwrap(); let ret = upgrade::apply_inbound( socket, - NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024) - ).await; + NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024), + ) + .await; assert!(ret.is_err()); }); @@ -651,8 +675,9 @@ mod tests { let ret = upgrade::apply_outbound( socket, NotificationsOut::new(PROTO_NAME, Vec::new(), &b"initial message"[..], 1024 * 1024), - upgrade::Version::V1 - ).await; + upgrade::Version::V1, + ) + .await; assert!(ret.is_err()); }); @@ -663,8 +688,10 @@ mod tests { let (socket, _) = listener.accept().await.unwrap(); let NotificationsInOpen { handshake, mut substream, .. } = upgrade::apply_inbound( socket, - NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024) - ).await.unwrap(); + NotificationsIn::new(PROTO_NAME, Vec::new(), 1024 * 1024), + ) + .await + .unwrap(); assert_eq!(handshake, b"initial message"); // We check that a handshake that is too large gets refused. diff --git a/client/network/src/protocol/sync.rs b/client/network/src/protocol/sync.rs index 55b64c157c65e..1c7431f74daf7 100644 --- a/client/network/src/protocol/sync.rs +++ b/client/network/src/protocol/sync.rs @@ -29,35 +29,40 @@ //! order to update it. //! -use codec::Encode; -use blocks::BlockCollection; -use state::StateSync; -use sp_blockchain::{Error as ClientError, HeaderMetadata}; -use sp_consensus::{BlockOrigin, BlockStatus, - block_validation::{BlockAnnounceValidator, Validation}, - import_queue::{IncomingBlock, BlockImportResult, BlockImportError} +use crate::{ + protocol::message::{self, BlockAnnounce, BlockAttributes, BlockRequest, BlockResponse}, + schema::v1::{StateRequest, StateResponse}, }; -use crate::protocol::message::{ - self, BlockAnnounce, BlockAttributes, BlockRequest, BlockResponse, -}; -use crate::schema::v1::{StateResponse, StateRequest}; +use blocks::BlockCollection; +use codec::Encode; use either::Either; use extra_requests::ExtraRequests; +use futures::{stream::FuturesUnordered, task::Poll, Future, FutureExt, StreamExt}; use libp2p::PeerId; -use log::{debug, trace, warn, info, error}; +use log::{debug, error, info, trace, warn}; +use sp_arithmetic::traits::Saturating; +use sp_blockchain::{Error as ClientError, HeaderMetadata}; +use sp_consensus::{ + block_validation::{BlockAnnounceValidator, Validation}, + import_queue::{BlockImportError, BlockImportResult, IncomingBlock}, + BlockOrigin, BlockStatus, +}; use sp_runtime::{ - EncodedJustification, Justifications, generic::BlockId, traits::{ - Block as BlockT, Header as HeaderT, NumberFor, Zero, One, CheckedSub, SaturatedConversion, - Hash, HashFor, + Block as BlockT, CheckedSub, Hash, HashFor, Header as HeaderT, NumberFor, One, + SaturatedConversion, Zero, }, + EncodedJustification, Justifications, }; -use sp_arithmetic::traits::Saturating; +use state::StateSync; use std::{ - fmt, ops::Range, collections::{HashMap, hash_map::Entry, HashSet}, sync::Arc, pin::Pin, + collections::{hash_map::Entry, HashMap, HashSet}, + fmt, + ops::Range, + pin::Pin, + sync::Arc, }; -use futures::{task::Poll, Future, stream::FuturesUnordered, FutureExt, StreamExt}; mod blocks; mod extra_requests; @@ -126,7 +131,7 @@ mod rep { pub const BAD_JUSTIFICATION: Rep = Rep::new(-(1 << 16), "Bad justification"); /// Reputation change when a peer sent us invlid ancestry result. - pub const UNKNOWN_ANCESTOR:Rep = Rep::new(-(1 << 16), "DB Error"); + pub const UNKNOWN_ANCESTOR: Rep = Rep::new(-(1 << 16), "DB Error"); /// Peer response data does not have requested bits. pub const BAD_RESPONSE: Rep = Rep::new(-(1 << 12), "Incomplete response"); @@ -142,7 +147,7 @@ impl PendingRequests { match self { PendingRequests::Some(set) => { set.insert(id.clone()); - } + }, PendingRequests::All => {}, } } @@ -207,9 +212,8 @@ pub struct ChainSync { /// Total number of downloaded blocks. downloaded_blocks: usize, /// All block announcement that are currently being validated. - block_announce_validation: FuturesUnordered< - Pin> + Send>> - >, + block_announce_validation: + FuturesUnordered> + Send>>>, /// Stats per peer about the number of concurrent block announce validations. block_announce_validation_per_peer_stats: HashMap, /// State sync in progress, if any. @@ -258,7 +262,7 @@ pub struct PeerInfo { /// Their best block hash. pub best_hash: B::Hash, /// Their best block number. - pub best_number: NumberFor + pub best_number: NumberFor, } struct ForkTarget { @@ -276,11 +280,7 @@ pub enum PeerSyncState { /// Available for sync requests. Available, /// Searching for ancestors the Peer has in common with us. - AncestorSearch { - start: NumberFor, - current: NumberFor, - state: AncestorSearchState, - }, + AncestorSearch { start: NumberFor, current: NumberFor, state: AncestorSearchState }, /// Actively downloading new blocks, starting from the given Number. DownloadingNew(NumberFor), /// Downloading a stale block with given Hash. Stale means that it is a @@ -305,7 +305,7 @@ pub enum SyncState { /// Initial sync is complete, keep-up sync is active. Idle, /// Actively catching up with the chain. - Downloading + Downloading, } /// Reported state download progress. @@ -350,7 +350,7 @@ pub enum OnBlockData { /// The block should be imported. Import(BlockOrigin, Vec>), /// A new block request needs to be made to the given peer. - Request(PeerId, BlockRequest) + Request(PeerId, BlockRequest), } impl OnBlockData { @@ -371,7 +371,7 @@ pub enum OnStateData { /// The block and state that should be imported. Import(BlockOrigin, IncomingBlock), /// A new state request needs to be made to the given peer. - Request(PeerId, StateRequest) + Request(PeerId, StateRequest), } /// Result of [`ChainSync::poll_block_announce_validation`]. @@ -435,9 +435,7 @@ enum PreValidateBlockAnnounce { /// An error means that *this* node failed to validate it because some internal error happened. /// If the block announcement was invalid, [`Self::Failure`] is the correct variant to express /// this. - Error { - who: PeerId, - }, + Error { who: PeerId }, /// The block announcement should be skipped. /// /// This should *only* be returned when there wasn't a slot registered @@ -451,15 +449,9 @@ pub enum OnBlockJustification { /// The justification needs no further handling. Nothing, /// The justification should be imported. - Import { - peer: PeerId, - hash: B::Hash, - number: NumberFor, - justifications: Justifications - } + Import { peer: PeerId, hash: B::Hash, number: NumberFor, justifications: Justifications }, } - /// Operation mode. #[derive(Debug, PartialEq, Eq)] pub enum SyncMode { @@ -468,10 +460,7 @@ pub enum SyncMode { // Sync headers and block bodies Full, // Sync headers and the last finalied state - LightState { - storage_chain_mode: bool, - skip_proofs: bool, - }, + LightState { storage_chain_mode: bool, skip_proofs: bool }, } /// Result of [`ChainSync::has_slot_for_block_announce_validation`]. @@ -517,12 +506,15 @@ impl ChainSync { fn required_block_attributes(&self) -> BlockAttributes { match self.mode { - SyncMode::Full => BlockAttributes::HEADER | BlockAttributes::JUSTIFICATION | BlockAttributes::BODY, + SyncMode::Full => + BlockAttributes::HEADER | BlockAttributes::JUSTIFICATION | BlockAttributes::BODY, SyncMode::Light => BlockAttributes::HEADER | BlockAttributes::JUSTIFICATION, SyncMode::LightState { storage_chain_mode: false, .. } => BlockAttributes::HEADER | BlockAttributes::JUSTIFICATION | BlockAttributes::BODY, SyncMode::LightState { storage_chain_mode: true, .. } => - BlockAttributes::HEADER | BlockAttributes::JUSTIFICATION | BlockAttributes::INDEXED_BODY, + BlockAttributes::HEADER | + BlockAttributes::JUSTIFICATION | + BlockAttributes::INDEXED_BODY, } } @@ -538,24 +530,26 @@ impl ChainSync { /// /// Returns `None` if the peer is unknown. pub fn peer_info(&self, who: &PeerId) -> Option> { - self.peers.get(who).map(|p| PeerInfo { best_hash: p.best_hash, best_number: p.best_number }) + self.peers + .get(who) + .map(|p| PeerInfo { best_hash: p.best_hash, best_number: p.best_number }) } /// Returns the current sync status. pub fn status(&self) -> Status { let best_seen = self.peers.values().map(|p| p.best_number).max(); - let sync_state = - if let Some(n) = best_seen { - // A chain is classified as downloading if the provided best block is - // more than `MAJOR_SYNC_BLOCKS` behind the best queued block. - if n > self.best_queued_number && n - self.best_queued_number > MAJOR_SYNC_BLOCKS.into() { - SyncState::Downloading - } else { - SyncState::Idle - } + let sync_state = if let Some(n) = best_seen { + // A chain is classified as downloading if the provided best block is + // more than `MAJOR_SYNC_BLOCKS` behind the best queued block. + if n > self.best_queued_number && n - self.best_queued_number > MAJOR_SYNC_BLOCKS.into() + { + SyncState::Downloading } else { SyncState::Idle - }; + } + } else { + SyncState::Idle + }; Status { state: sync_state, @@ -569,7 +563,10 @@ impl ChainSync { /// Number of active forks requests. This includes /// requests that are pending or could be issued right away. pub fn num_sync_requests(&self) -> usize { - self.fork_targets.values().filter(|f| f.number <= self.best_queued_number).count() + self.fork_targets + .values() + .filter(|f| f.number <= self.best_queued_number) + .count() } /// Number of downloaded blocks. @@ -580,23 +577,26 @@ impl ChainSync { /// Handle a new connected peer. /// /// Call this method whenever we connect to a new peer. - pub fn new_peer(&mut self, who: PeerId, best_hash: B::Hash, best_number: NumberFor) - -> Result>, BadPeer> - { + pub fn new_peer( + &mut self, + who: PeerId, + best_hash: B::Hash, + best_number: NumberFor, + ) -> Result>, BadPeer> { // There is nothing sync can get from the node that has no blockchain data. match self.block_status(&best_hash) { Err(e) => { debug!(target:"sync", "Error reading blockchain: {:?}", e); Err(BadPeer(who, rep::BLOCKCHAIN_READ_ERROR)) - } + }, Ok(BlockStatus::KnownBad) => { info!("💔 New peer with known bad best block {} ({}).", best_hash, best_number); Err(BadPeer(who, rep::BAD_BLOCK)) - } + }, Ok(BlockStatus::Unknown) => { if best_number.is_zero() { info!("💔 New peer with unknown genesis hash {} ({}).", best_hash, best_number); - return Err(BadPeer(who, rep::GENESIS_MISMATCH)); + return Err(BadPeer(who, rep::GENESIS_MISMATCH)) } // If there are more than `MAJOR_SYNC_BLOCKS` in the import queue then we have // enough to do in the import queue that it's not worth kicking off @@ -608,13 +608,16 @@ impl ChainSync { self.best_queued_hash, self.best_queued_number ); - self.peers.insert(who.clone(), PeerSync { - peer_id: who, - common_number: self.best_queued_number, - best_hash, - best_number, - state: PeerSyncState::Available, - }); + self.peers.insert( + who.clone(), + PeerSync { + peer_id: who, + common_number: self.best_queued_number, + best_hash, + best_number, + state: PeerSyncState::Available, + }, + ); return Ok(None) } @@ -644,38 +647,46 @@ impl ChainSync { start: self.best_queued_number, state: AncestorSearchState::ExponentialBackoff(One::one()), }, - Some(ancestry_request::(common_best)) + Some(ancestry_request::(common_best)), ) }; self.pending_requests.add(&who); - self.peers.insert(who.clone(), PeerSync { - peer_id: who, - common_number: Zero::zero(), - best_hash, - best_number, - state, - }); + self.peers.insert( + who.clone(), + PeerSync { + peer_id: who, + common_number: Zero::zero(), + best_hash, + best_number, + state, + }, + ); Ok(req) - } - Ok(BlockStatus::Queued) | Ok(BlockStatus::InChainWithState) | Ok(BlockStatus::InChainPruned) => { + }, + Ok(BlockStatus::Queued) | + Ok(BlockStatus::InChainWithState) | + Ok(BlockStatus::InChainPruned) => { debug!( target: "sync", "New peer with known best hash {} ({}).", best_hash, best_number, ); - self.peers.insert(who.clone(), PeerSync { - peer_id: who.clone(), - common_number: std::cmp::min(self.best_queued_number, best_number), - best_hash, - best_number, - state: PeerSyncState::Available, - }); + self.peers.insert( + who.clone(), + PeerSync { + peer_id: who.clone(), + common_number: std::cmp::min(self.best_queued_number, best_number), + best_hash, + best_number, + state: PeerSyncState::Available, + }, + ); self.pending_requests.add(&who); Ok(None) - } + }, } } @@ -688,9 +699,8 @@ impl ChainSync { /// Schedule a justification request for the given block. pub fn request_justification(&mut self, hash: &B::Hash, number: NumberFor) { let client = &self.client; - self.extra_justifications.schedule((*hash, number), |base, block| { - is_descendent_of(&**client, base, block) - }) + self.extra_justifications + .schedule((*hash, number), |base, block| is_descendent_of(&**client, base, block)) } /// Clear all pending justification requests. @@ -707,7 +717,9 @@ impl ChainSync { number: NumberFor, ) { if peers.is_empty() { - peers = self.peers.iter() + peers = self + .peers + .iter() // Only request blocks from peers who are ahead or on a par. .filter(|(_, peer)| peer.best_number >= number) .map(|(id, _)| id.clone()) @@ -725,14 +737,14 @@ impl ChainSync { if self.is_known(&hash) { debug!(target: "sync", "Refusing to sync known hash {:?}", hash); - return; + return } trace!(target: "sync", "Downloading requested old fork {:?}", hash); for peer_id in &peers { if let Some(peer) = self.peers.get_mut(peer_id) { - if let PeerSyncState::AncestorSearch {..} = peer.state { - continue; + if let PeerSyncState::AncestorSearch { .. } = peer.state { + continue } if number > peer.best_number { @@ -745,22 +757,24 @@ impl ChainSync { self.fork_targets .entry(hash.clone()) - .or_insert_with(|| ForkTarget { - number, - peers: Default::default(), - parent_hash: None, - }) - .peers.extend(peers); + .or_insert_with(|| ForkTarget { number, peers: Default::default(), parent_hash: None }) + .peers + .extend(peers); } /// Get an iterator over all scheduled justification requests. - pub fn justification_requests(&mut self) -> impl Iterator)> + '_ { + pub fn justification_requests( + &mut self, + ) -> impl Iterator)> + '_ { let peers = &mut self.peers; let mut matcher = self.extra_justifications.matcher(); std::iter::from_fn(move || { if let Some((peer, request)) = matcher.next(&peers) { - peers.get_mut(&peer) - .expect("`Matcher::next` guarantees the `PeerId` comes from the given peers; qed") + peers + .get_mut(&peer) + .expect( + "`Matcher::next` guarantees the `PeerId` comes from the given peers; qed", + ) .state = PeerSyncState::DownloadingJustification(request.0); let req = message::generic::BlockRequest { id: 0, @@ -768,7 +782,7 @@ impl ChainSync { from: message::FromBlock::Hash(request.0), to: None, direction: message::Direction::Ascending, - max: Some(1) + max: Some(1), }; Some((peer, req)) } else { @@ -790,7 +804,8 @@ impl ChainSync { let attrs = self.required_block_attributes(); let blocks = &mut self.blocks; let fork_targets = &mut self.fork_targets; - let last_finalized = std::cmp::min(self.best_queued_number, self.client.info().finalized_number); + let last_finalized = + std::cmp::min(self.best_queued_number, self.client.info().finalized_number); let best_queued = self.best_queued_number; let client = &self.client; let queue = &self.queue_blocks; @@ -806,9 +821,10 @@ impl ChainSync { // number is smaller than the last finalized block number, we should do an ancestor // search to find a better common block. If the queue is full we wait till all blocks are // imported though. - if best_queued.saturating_sub(peer.common_number) > MAX_BLOCKS_TO_LOOK_BACKWARDS.into() - && best_queued < peer.best_number && peer.common_number < last_finalized - && queue.len() <= MAJOR_SYNC_BLOCKS.into() + if best_queued.saturating_sub(peer.common_number) > MAX_BLOCKS_TO_LOOK_BACKWARDS.into() && + best_queued < peer.best_number && + peer.common_number < last_finalized && + queue.len() <= MAJOR_SYNC_BLOCKS.into() { trace!( target: "sync", @@ -843,18 +859,14 @@ impl ChainSync { req, ); Some((id, req)) - } else if let Some((hash, req)) = fork_sync_request( - id, - fork_targets, - best_queued, - last_finalized, - attrs, - |hash| if queue.contains(hash) { - BlockStatus::Queued - } else { - client.block_status(&BlockId::Hash(*hash)).unwrap_or(BlockStatus::Unknown) - }, - ) { + } else if let Some((hash, req)) = + fork_sync_request(id, fork_targets, best_queued, last_finalized, attrs, |hash| { + if queue.contains(hash) { + BlockStatus::Queued + } else { + client.block_status(&BlockId::Hash(*hash)).unwrap_or(BlockStatus::Unknown) + } + }) { trace!(target: "sync", "Downloading fork {:?} from {}", hash, id); peer.state = PeerSyncState::DownloadingStale(hash); Some((id, req)) @@ -869,11 +881,11 @@ impl ChainSync { pub fn state_request(&mut self) -> Option<(PeerId, StateRequest)> { if let Some(sync) = &self.state_sync { if sync.is_complete() { - return None; + return None } if self.peers.iter().any(|(_, peer)| peer.state == PeerSyncState::DownloadingState) { // Only one pending state request is allowed. - return None; + return None } for (id, peer) in self.peers.iter_mut() { if peer.state.is_available() && peer.common_number >= sync.target_block_num() { @@ -898,38 +910,42 @@ impl ChainSync { &mut self, who: &PeerId, request: Option>, - response: BlockResponse + response: BlockResponse, ) -> Result, BadPeer> { self.downloaded_blocks += response.blocks.len(); - let new_blocks: Vec> = - if let Some(peer) = self.peers.get_mut(who) { - let mut blocks = response.blocks; - if request.as_ref().map_or(false, |r| r.direction == message::Direction::Descending) { - trace!(target: "sync", "Reversing incoming block list"); - blocks.reverse() - } - self.pending_requests.add(who); - if let Some(request) = request { - match &mut peer.state { - PeerSyncState::DownloadingNew(start_block) => { - self.blocks.clear_peer_download(who); - let start_block = *start_block; - peer.state = PeerSyncState::Available; - validate_blocks::(&blocks, who, Some(request))?; - self.blocks.insert(start_block, blocks, who.clone()); - self.drain_blocks() + let new_blocks: Vec> = if let Some(peer) = self.peers.get_mut(who) { + let mut blocks = response.blocks; + if request + .as_ref() + .map_or(false, |r| r.direction == message::Direction::Descending) + { + trace!(target: "sync", "Reversing incoming block list"); + blocks.reverse() + } + self.pending_requests.add(who); + if let Some(request) = request { + match &mut peer.state { + PeerSyncState::DownloadingNew(start_block) => { + self.blocks.clear_peer_download(who); + let start_block = *start_block; + peer.state = PeerSyncState::Available; + validate_blocks::(&blocks, who, Some(request))?; + self.blocks.insert(start_block, blocks, who.clone()); + self.drain_blocks() + }, + PeerSyncState::DownloadingStale(_) => { + peer.state = PeerSyncState::Available; + if blocks.is_empty() { + debug!(target: "sync", "Empty block response from {}", who); + return Err(BadPeer(who.clone(), rep::NO_BLOCK)) } - PeerSyncState::DownloadingStale(_) => { - peer.state = PeerSyncState::Available; - if blocks.is_empty() { - debug!(target: "sync", "Empty block response from {}", who); - return Err(BadPeer(who.clone(), rep::NO_BLOCK)); - } - validate_blocks::(&blocks, who, Some(request))?; - blocks.into_iter().map(|b| { - let justifications = b.justifications.or( - legacy_justification_mapping(b.justification) - ); + validate_blocks::(&blocks, who, Some(request))?; + blocks + .into_iter() + .map(|b| { + let justifications = b + .justifications + .or(legacy_justification_mapping(b.justification)); IncomingBlock { hash: b.hash, header: b.header, @@ -942,110 +958,114 @@ impl ChainSync { skip_execution: self.skip_execution(), state: None, } - }).collect() + }) + .collect() + }, + PeerSyncState::AncestorSearch { current, start, state } => { + let matching_hash = match (blocks.get(0), self.client.hash(*current)) { + (Some(block), Ok(maybe_our_block_hash)) => { + trace!( + target: "sync", + "Got ancestry block #{} ({}) from peer {}", + current, + block.hash, + who, + ); + maybe_our_block_hash.filter(|x| x == &block.hash) + }, + (None, _) => { + debug!( + target: "sync", + "Invalid response when searching for ancestor from {}", + who, + ); + return Err(BadPeer(who.clone(), rep::UNKNOWN_ANCESTOR)) + }, + (_, Err(e)) => { + info!( + target: "sync", + "❌ Error answering legitimate blockchain query: {:?}", + e, + ); + return Err(BadPeer(who.clone(), rep::BLOCKCHAIN_READ_ERROR)) + }, + }; + if matching_hash.is_some() { + if *start < self.best_queued_number && + self.best_queued_number <= peer.best_number + { + // We've made progress on this chain since the search was started. + // Opportunistically set common number to updated number + // instead of the one that started the search. + peer.common_number = self.best_queued_number; + } else if peer.common_number < *current { + peer.common_number = *current; + } } - PeerSyncState::AncestorSearch { current, start, state } => { - let matching_hash = match (blocks.get(0), self.client.hash(*current)) { - (Some(block), Ok(maybe_our_block_hash)) => { - trace!( - target: "sync", - "Got ancestry block #{} ({}) from peer {}", - current, - block.hash, - who, - ); - maybe_our_block_hash.filter(|x| x == &block.hash) - }, - (None, _) => { - debug!( - target: "sync", - "Invalid response when searching for ancestor from {}", - who, - ); - return Err(BadPeer(who.clone(), rep::UNKNOWN_ANCESTOR)) - }, - (_, Err(e)) => { - info!( - target: "sync", - "❌ Error answering legitimate blockchain query: {:?}", - e, - ); - return Err(BadPeer(who.clone(), rep::BLOCKCHAIN_READ_ERROR)) - } + if matching_hash.is_none() && current.is_zero() { + trace!(target:"sync", "Ancestry search: genesis mismatch for peer {}", who); + return Err(BadPeer(who.clone(), rep::GENESIS_MISMATCH)) + } + if let Some((next_state, next_num)) = + handle_ancestor_search_state(state, *current, matching_hash.is_some()) + { + peer.state = PeerSyncState::AncestorSearch { + current: next_num, + start: *start, + state: next_state, }; - if matching_hash.is_some() { - if *start < self.best_queued_number && self.best_queued_number <= peer.best_number { - // We've made progress on this chain since the search was started. - // Opportunistically set common number to updated number - // instead of the one that started the search. - peer.common_number = self.best_queued_number; - } - else if peer.common_number < *current { - peer.common_number = *current; - } - } - if matching_hash.is_none() && current.is_zero() { - trace!(target:"sync", "Ancestry search: genesis mismatch for peer {}", who); - return Err(BadPeer(who.clone(), rep::GENESIS_MISMATCH)) - } - if let Some((next_state, next_num)) = - handle_ancestor_search_state(state, *current, matching_hash.is_some()) + return Ok(OnBlockData::Request( + who.clone(), + ancestry_request::(next_num), + )) + } else { + // Ancestry search is complete. Check if peer is on a stale fork unknown to us and + // add it to sync targets if necessary. + trace!( + target: "sync", + "Ancestry search complete. Ours={} ({}), Theirs={} ({}), Common={:?} ({})", + self.best_queued_hash, + self.best_queued_number, + peer.best_hash, + peer.best_number, + matching_hash, + peer.common_number, + ); + if peer.common_number < peer.best_number && + peer.best_number < self.best_queued_number { - peer.state = PeerSyncState::AncestorSearch { - current: next_num, - start: *start, - state: next_state, - }; - return Ok( - OnBlockData::Request(who.clone(), ancestry_request::(next_num)) - ) - } else { - // Ancestry search is complete. Check if peer is on a stale fork unknown to us and - // add it to sync targets if necessary. trace!( target: "sync", - "Ancestry search complete. Ours={} ({}), Theirs={} ({}), Common={:?} ({})", - self.best_queued_hash, - self.best_queued_number, + "Added fork target {} for {}", peer.best_hash, - peer.best_number, - matching_hash, - peer.common_number, + who, ); - if peer.common_number < peer.best_number - && peer.best_number < self.best_queued_number - { - trace!( - target: "sync", - "Added fork target {} for {}", - peer.best_hash, - who, - ); - self.fork_targets - .entry(peer.best_hash.clone()) - .or_insert_with(|| ForkTarget { - number: peer.best_number, - parent_hash: None, - peers: Default::default(), - }) - .peers.insert(who.clone()); - } - peer.state = PeerSyncState::Available; - Vec::new() + self.fork_targets + .entry(peer.best_hash.clone()) + .or_insert_with(|| ForkTarget { + number: peer.best_number, + parent_hash: None, + peers: Default::default(), + }) + .peers + .insert(who.clone()); } - }, - PeerSyncState::Available - | PeerSyncState::DownloadingJustification(..) - | PeerSyncState::DownloadingState - => Vec::new() - } - } else { - // When request.is_none() this is a block announcement. Just accept blocks. - validate_blocks::(&blocks, who, None)?; - blocks.into_iter().map(|b| { - let justifications = b.justifications.or( - legacy_justification_mapping(b.justification) - ); + peer.state = PeerSyncState::Available; + Vec::new() + } + }, + PeerSyncState::Available | + PeerSyncState::DownloadingJustification(..) | + PeerSyncState::DownloadingState => Vec::new(), + } + } else { + // When request.is_none() this is a block announcement. Just accept blocks. + validate_blocks::(&blocks, who, None)?; + blocks + .into_iter() + .map(|b| { + let justifications = + b.justifications.or(legacy_justification_mapping(b.justification)); IncomingBlock { hash: b.hash, header: b.header, @@ -1058,12 +1078,13 @@ impl ChainSync { skip_execution: true, state: None, } - }).collect() - } - } else { - // We don't know of this peer, so we also did not request anything from it. - return Err(BadPeer(who.clone(), rep::NOT_REQUESTED)); - }; + }) + .collect() + } + } else { + // We don't know of this peer, so we also did not request anything from it. + return Err(BadPeer(who.clone(), rep::NOT_REQUESTED)) + }; Ok(self.validate_and_queue_blocks(new_blocks)) } @@ -1087,7 +1108,7 @@ impl ChainSync { sync.import(response) } else { debug!(target: "sync", "Ignored obsolete state response from {}", who); - return Err(BadPeer(who.clone(), rep::NOT_REQUESTED)); + return Err(BadPeer(who.clone(), rep::NOT_REQUESTED)) }; match import_result { @@ -1112,14 +1133,13 @@ impl ChainSync { }; debug!(target: "sync", "State sync is complete. Import is queued"); Ok(OnStateData::Import(origin, block)) - } - state::ImportResult::Continue(request) => { - Ok(OnStateData::Request(who.clone(), request)) - } + }, + state::ImportResult::Continue(request) => + Ok(OnStateData::Request(who.clone(), request)), state::ImportResult::BadResponse => { debug!(target: "sync", "Bad state data received from {}", who); Err(BadPeer(who.clone(), rep::BAD_BLOCK)) - } + }, } } @@ -1139,7 +1159,10 @@ impl ChainSync { BlockOrigin::NetworkInitialSync }; - if let Some((h, n)) = new_blocks.last().and_then(|b| b.header.as_ref().map(|h| (&b.hash, *h.number()))) { + if let Some((h, n)) = new_blocks + .last() + .and_then(|b| b.header.as_ref().map(|h| (&b.hash, *h.number()))) + { trace!( target:"sync", "Accepted {} blocks ({:?}) with origin {:?}", @@ -1159,16 +1182,17 @@ impl ChainSync { /// /// Returns `Some` if this produces a justification that must be imported /// into the import queue. - pub fn on_block_justification - (&mut self, who: PeerId, response: BlockResponse) -> Result, BadPeer> - { - let peer = - if let Some(peer) = self.peers.get_mut(&who) { - peer - } else { - error!(target: "sync", "💔 Called on_block_justification with a bad peer ID"); - return Ok(OnBlockJustification::Nothing) - }; + pub fn on_block_justification( + &mut self, + who: PeerId, + response: BlockResponse, + ) -> Result, BadPeer> { + let peer = if let Some(peer) = self.peers.get_mut(&who) { + peer + } else { + error!(target: "sync", "💔 Called on_block_justification with a bad peer ID"); + return Ok(OnBlockJustification::Nothing) + }; self.pending_requests.add(&who); if let PeerSyncState::DownloadingJustification(hash) = peer.state { @@ -1181,7 +1205,7 @@ impl ChainSync { target: "sync", "💔 Invalid block justification provided by {}: requested: {:?} got: {:?}", who, hash, block.hash ); - return Err(BadPeer(who, rep::BAD_JUSTIFICATION)); + return Err(BadPeer(who, rep::BAD_JUSTIFICATION)) } block.justifications.or(legacy_justification_mapping(block.justification)) @@ -1197,9 +1221,8 @@ impl ChainSync { None }; - if let Some((peer, hash, number, j)) = self - .extra_justifications - .on_response(who, justification) + if let Some((peer, hash, number, j)) = + self.extra_justifications.on_response(who, justification) { return Ok(OnBlockJustification::Import { peer, hash, number, justifications: j }) } @@ -1230,7 +1253,7 @@ impl ChainSync { } for (result, hash) in results { if has_error { - continue; + continue } if result.is_err() { @@ -1242,7 +1265,7 @@ impl ChainSync { if let Some(peer) = who.and_then(|p| self.peers.get_mut(&p)) { peer.update_common_number(number); } - } + }, Ok(BlockImportResult::ImportedUnknown(number, aux, who)) => { if aux.clear_justification_requests { trace!( @@ -1274,7 +1297,8 @@ impl ChainSync { if let Some(peer) = who.and_then(|p| self.peers.get_mut(&p)) { peer.update_common_number(number); } - let state_sync_complete = self.state_sync.as_ref().map_or(false, |s| s.target() == hash); + let state_sync_complete = + self.state_sync.as_ref().map_or(false, |s| s.target() == hash); if state_sync_complete { info!( target: "sync", @@ -1286,7 +1310,7 @@ impl ChainSync { output.extend(self.restart()); } }, - Err(BlockImportError::IncompleteHeader(who)) => { + Err(BlockImportError::IncompleteHeader(who)) => if let Some(peer) = who { warn!( target: "sync", @@ -1294,9 +1318,8 @@ impl ChainSync { ); output.push(Err(BadPeer(peer, rep::INCOMPLETE_HEADER))); output.extend(self.restart()); - } - }, - Err(BlockImportError::VerificationFailed(who, e)) => { + }, + Err(BlockImportError::VerificationFailed(who, e)) => if let Some(peer) = who { warn!( target: "sync", @@ -1307,9 +1330,8 @@ impl ChainSync { ); output.push(Err(BadPeer(peer, rep::VERIFICATION_FAIL))); output.extend(self.restart()); - } - }, - Err(BlockImportError::BadBlock(who)) => { + }, + Err(BlockImportError::BadBlock(who)) => if let Some(peer) = who { warn!( target: "sync", @@ -1318,21 +1340,19 @@ impl ChainSync { peer, ); output.push(Err(BadPeer(peer, rep::BAD_BLOCK))); - } - }, + }, Err(BlockImportError::MissingState) => { // This may happen if the chain we were requesting upon has been discarded // in the meantime because other chain has been finalized. // Don't mark it as bad as it still may be synced if explicitly requested. trace!(target: "sync", "Obsolete block {:?}", hash); }, - e @ Err(BlockImportError::UnknownParent) | - e @ Err(BlockImportError::Other(_)) => { + e @ Err(BlockImportError::UnknownParent) | e @ Err(BlockImportError::Other(_)) => { warn!(target: "sync", "💔 Error importing block {:?}: {:?}", hash, e); self.state_sync = None; output.extend(self.restart()); }, - Err(BlockImportError::Cancelled) => {} + Err(BlockImportError::Cancelled) => {}, }; } @@ -1344,7 +1364,8 @@ impl ChainSync { /// with or without errors. pub fn on_justification_import(&mut self, hash: B::Hash, number: NumberFor, success: bool) { let finalization_result = if success { Ok((hash, number)) } else { Err(()) }; - self.extra_justifications.try_finalize_root((hash, number), finalization_result, true); + self.extra_justifications + .try_finalize_root((hash, number), finalization_result, true); self.pending_requests.set_all(); } @@ -1356,12 +1377,10 @@ impl ChainSync { }); if let SyncMode::LightState { skip_proofs, .. } = &self.mode { - if self.state_sync.is_none() - && !self.peers.is_empty() - && self.queue_blocks.is_empty() - { + if self.state_sync.is_none() && !self.peers.is_empty() && self.queue_blocks.is_empty() { // Finalized a recent block. - let mut heads: Vec<_> = self.peers.iter().map(|(_, peer)| peer.best_number).collect(); + let mut heads: Vec<_> = + self.peers.iter().map(|(_, peer)| peer.best_number).collect(); heads.sort(); let median = heads[heads.len() / 2]; if number + STATE_SYNC_FINALITY_THRESHOLD.saturated_into() >= median { @@ -1372,7 +1391,8 @@ impl ChainSync { number, hash, ); - self.state_sync = Some(StateSync::new(self.client.clone(), header, *skip_proofs)); + self.state_sync = + Some(StateSync::new(self.client.clone(), header, *skip_proofs)); } } } @@ -1400,15 +1420,12 @@ impl ChainSync { self.best_queued_hash = *hash; // Update common blocks for (n, peer) in self.peers.iter_mut() { - if let PeerSyncState::AncestorSearch {..} = peer.state { + if let PeerSyncState::AncestorSearch { .. } = peer.state { // Wait for ancestry search to complete first. - continue; + continue } - let new_common_number = if peer.best_number >= number { - number - } else { - peer.best_number - }; + let new_common_number = + if peer.best_number >= number { number } else { peer.best_number }; trace!( target: "sync", "Updating peer {} info, ours={}, common={}->{}, their best={}", @@ -1435,7 +1452,10 @@ impl ChainSync { /// /// It is *required* to call [`Self::peer_block_announce_validation_finished`] when the /// validation is finished to clear the slot. - fn has_slot_for_block_announce_validation(&mut self, peer: &PeerId) -> HasSlotForBlockAnnounceValidation { + fn has_slot_for_block_announce_validation( + &mut self, + peer: &PeerId, + ) -> HasSlotForBlockAnnounceValidation { if self.block_announce_validation.len() >= MAX_CONCURRENT_BLOCK_ANNOUNCE_VALIDATIONS { return HasSlotForBlockAnnounceValidation::TotalMaximumSlotsReached } @@ -1478,15 +1498,18 @@ impl ChainSync { ); if number.is_zero() { - self.block_announce_validation.push(async move { - warn!( - target: "sync", - "💔 Ignored genesis block (#0) announcement from {}: {}", - who, - hash, - ); - PreValidateBlockAnnounce::Skip - }.boxed()); + self.block_announce_validation.push( + async move { + warn!( + target: "sync", + "💔 Ignored genesis block (#0) announcement from {}: {}", + who, + hash, + ); + PreValidateBlockAnnounce::Skip + } + .boxed(), + ); return } @@ -1494,18 +1517,21 @@ impl ChainSync { match self.has_slot_for_block_announce_validation(&who) { HasSlotForBlockAnnounceValidation::Yes => {}, HasSlotForBlockAnnounceValidation::TotalMaximumSlotsReached => { - self.block_announce_validation.push(async move { - warn!( - target: "sync", - "💔 Ignored block (#{} -- {}) announcement from {} because all validation slots are occupied.", - number, - hash, - who, - ); - PreValidateBlockAnnounce::Skip - }.boxed()); + self.block_announce_validation.push( + async move { + warn!( + target: "sync", + "💔 Ignored block (#{} -- {}) announcement from {} because all validation slots are occupied.", + number, + hash, + who, + ); + PreValidateBlockAnnounce::Skip + } + .boxed(), + ); return - } + }, HasSlotForBlockAnnounceValidation::MaximumPeerSlotsReached => { self.block_announce_validation.push(async move { warn!( @@ -1518,7 +1544,7 @@ impl ChainSync { PreValidateBlockAnnounce::Skip }.boxed()); return - } + }, } // Let external validator check the block announcement. @@ -1526,33 +1552,36 @@ impl ChainSync { let future = self.block_announce_validator.validate(&header, assoc_data); let hash = hash.clone(); - self.block_announce_validation.push(async move { - match future.await { - Ok(Validation::Success { is_new_best }) => PreValidateBlockAnnounce::Process { - is_new_best: is_new_best || is_best, - announce, - who, - }, - Ok(Validation::Failure { disconnect }) => { - debug!( - target: "sync", - "Block announcement validation of block {:?} from {} failed", - hash, + self.block_announce_validation.push( + async move { + match future.await { + Ok(Validation::Success { is_new_best }) => PreValidateBlockAnnounce::Process { + is_new_best: is_new_best || is_best, + announce, who, - ); - PreValidateBlockAnnounce::Failure { who, disconnect } - } - Err(e) => { - debug!( - target: "sync", - "💔 Block announcement validation of block {:?} errored: {}", - hash, - e, - ); - PreValidateBlockAnnounce::Error { who } + }, + Ok(Validation::Failure { disconnect }) => { + debug!( + target: "sync", + "Block announcement validation of block {:?} from {} failed", + hash, + who, + ); + PreValidateBlockAnnounce::Failure { who, disconnect } + }, + Err(e) => { + debug!( + target: "sync", + "💔 Block announcement validation of block {:?} errored: {}", + hash, + e, + ); + PreValidateBlockAnnounce::Error { who } + }, } } - }.boxed()); + .boxed(), + ); } /// Poll block announce validation. @@ -1603,7 +1632,7 @@ impl ChainSync { if *entry.get() == 0 { entry.remove(); } - } + }, } } @@ -1622,9 +1651,8 @@ impl ChainSync { ); return PollBlockAnnounceValidation::Failure { who, disconnect } }, - PreValidateBlockAnnounce::Process { announce, is_new_best, who } => { - (announce, is_new_best, who) - }, + PreValidateBlockAnnounce::Process { announce, is_new_best, who } => + (announce, is_new_best, who), PreValidateBlockAnnounce::Error { .. } | PreValidateBlockAnnounce::Skip => { debug!( target: "sync", @@ -1644,7 +1672,8 @@ impl ChainSync { let number = *announce.header.number(); let hash = announce.header.hash(); - let parent_status = self.block_status(announce.header.parent_hash()).unwrap_or(BlockStatus::Unknown); + let parent_status = + self.block_status(announce.header.parent_hash()).unwrap_or(BlockStatus::Unknown); let known_parent = parent_status != BlockStatus::Unknown; let ancient_parent = parent_status == BlockStatus::InChainPruned; @@ -1662,7 +1691,7 @@ impl ChainSync { peer.best_hash = hash; } - if let PeerSyncState::AncestorSearch {..} = peer.state { + if let PeerSyncState::AncestorSearch { .. } = peer.state { trace!(target: "sync", "Peer state is ancestor search."); return PollBlockAnnounceValidation::Nothing { is_best, who, announce } } @@ -1672,8 +1701,8 @@ impl ChainSync { if is_best { if known && self.best_queued_number >= number { peer.update_common_number(number); - } else if announce.header.parent_hash() == &self.best_queued_hash - || known_parent && self.best_queued_number >= number + } else if announce.header.parent_hash() == &self.best_queued_hash || + known_parent && self.best_queued_number >= number { peer.update_common_number(number - One::one()); } @@ -1727,7 +1756,8 @@ impl ChainSync { parent_hash: Some(*announce.header.parent_hash()), peers: Default::default(), }) - .peers.insert(who.clone()); + .peers + .insert(who.clone()); } PollBlockAnnounceValidation::Nothing { is_best, who, announce } @@ -1775,9 +1805,9 @@ impl ChainSync { // We make sure our commmon number is at least something we have. p.common_number = self.best_queued_number; self.peers.insert(id, p); - return None; - } - _ => {} + return None + }, + _ => {}, } // handle peers that were in other states. @@ -1792,7 +1822,7 @@ impl ChainSync { /// Find a block to start sync from. If we sync with state, that's the latest block we have state for. fn reset_sync_start_point(&mut self) -> Result<(), ClientError> { let info = self.client.info(); - if matches!(self.mode, SyncMode::LightState {..}) && info.finalized_state.is_some() { + if matches!(self.mode, SyncMode::LightState { .. }) && info.finalized_state.is_some() { log::warn!( target: "sync", "Can't use fast sync mode with a partially synced database. Reverting to full sync mode." @@ -1803,7 +1833,9 @@ impl ChainSync { self.best_queued_hash = info.best_hash; self.best_queued_number = info.best_number; if self.mode == SyncMode::Full { - if self.client.block_status(&BlockId::hash(info.best_hash))? != BlockStatus::InChainWithState { + if self.client.block_status(&BlockId::hash(info.best_hash))? != + BlockStatus::InChainWithState + { self.import_existing = true; // Latest state is missing, start with the last finalized state or genesis instead. if let Some((hash, number)) = info.finalized_state { @@ -1836,7 +1868,9 @@ impl ChainSync { /// Is any peer downloading the given hash? fn is_already_downloading(&self, hash: &B::Hash) -> bool { - self.peers.iter().any(|(_, p)| p.state == PeerSyncState::DownloadingStale(*hash)) + self.peers + .iter() + .any(|(_, p)| p.state == PeerSyncState::DownloadingStale(*hash)) } /// Return some key metrics. @@ -1846,7 +1880,7 @@ impl ChainSync { queued_blocks: self.queue_blocks.len().try_into().unwrap_or(std::u32::MAX), fork_targets: self.fork_targets.len().try_into().unwrap_or(std::u32::MAX), justifications: self.extra_justifications.metrics(), - _priv: () + _priv: (), } } @@ -1856,9 +1890,10 @@ impl ChainSync { .drain(self.best_queued_number + One::one()) .into_iter() .map(|block_data| { - let justifications = block_data.block.justifications.or( - legacy_justification_mapping(block_data.block.justification) - ); + let justifications = block_data + .block + .justifications + .or(legacy_justification_mapping(block_data.block.justification)); IncomingBlock { hash: block_data.block.hash, header: block_data.block.header, @@ -1871,16 +1906,18 @@ impl ChainSync { skip_execution: self.skip_execution(), state: None, } - }).collect() + }) + .collect() } - } // This is purely during a backwards compatible transitionary period and should be removed // once we can assume all nodes can send and receive multiple Justifications // The ID tag is hardcoded here to avoid depending on the GRANDPA crate. // See: https://github.com/paritytech/substrate/issues/8172 -fn legacy_justification_mapping(justification: Option) -> Option { +fn legacy_justification_mapping( + justification: Option, +) -> Option { justification.map(|just| (*b"FRNK", just).into()) } @@ -1889,7 +1926,7 @@ pub(crate) struct Metrics { pub(crate) queued_blocks: u32, pub(crate) fork_targets: u32, pub(crate) justifications: extra_requests::Metrics, - _priv: () + _priv: (), } /// Request the ancestry for a block. Sends a request for header and justification for the given @@ -1901,7 +1938,7 @@ fn ancestry_request(block: NumberFor) -> BlockRequest { from: message::FromBlock::Number(block), to: None, direction: message::Direction::Ascending, - max: Some(1) + max: Some(1), } } @@ -1935,7 +1972,7 @@ fn handle_ancestor_search_state( let next_distance_to_tip = *next_distance_to_tip; if block_hash_match && next_distance_to_tip == One::one() { // We found the ancestor in the first step so there is no need to execute binary search. - return None; + return None } if block_hash_match { let left = curr_block_num; @@ -1943,15 +1980,18 @@ fn handle_ancestor_search_state( let middle = left + (right - left) / two; Some((AncestorSearchState::BinarySearch(left, right), middle)) } else { - let next_block_num = curr_block_num.checked_sub(&next_distance_to_tip) - .unwrap_or_else(Zero::zero); + let next_block_num = + curr_block_num.checked_sub(&next_distance_to_tip).unwrap_or_else(Zero::zero); let next_distance_to_tip = next_distance_to_tip * two; - Some((AncestorSearchState::ExponentialBackoff(next_distance_to_tip), next_block_num)) + Some(( + AncestorSearchState::ExponentialBackoff(next_distance_to_tip), + next_block_num, + )) } - } + }, AncestorSearchState::BinarySearch(mut left, mut right) => { if left >= curr_block_num { - return None; + return None } if block_hash_match { left = curr_block_num; @@ -1961,7 +2001,7 @@ fn handle_ancestor_search_state( assert!(right >= left); let middle = left + (right - left) / two; Some((AncestorSearchState::BinarySearch(left, right), middle)) - } + }, } } @@ -1977,7 +2017,7 @@ fn peer_block_request( ) -> Option<(Range>, BlockRequest)> { if best_num >= peer.best_number { // Will be downloaded as alternative fork instead. - return None; + return None } else if peer.common_number < finalized { trace!( target: "sync", @@ -2009,7 +2049,7 @@ fn peer_block_request( from, to: None, direction: message::Direction::Descending, - max: Some((range.end - range.start).saturated_into::()) + max: Some((range.end - range.start).saturated_into::()), }; Some((range, request)) @@ -2027,11 +2067,11 @@ fn fork_sync_request( targets.retain(|hash, r| { if r.number <= finalized { trace!(target: "sync", "Removed expired fork sync request {:?} (#{})", hash, r.number); - return false; + return false } if check_block(hash) != BlockStatus::Unknown { trace!(target: "sync", "Removed obsolete fork sync request {:?} (#{})", hash, r.number); - return false; + return false } true }); @@ -2048,27 +2088,34 @@ fn fork_sync_request( 1 }; trace!(target: "sync", "Downloading requested fork {:?} from {}, {} blocks", hash, id, count); - return Some((hash.clone(), message::generic::BlockRequest { - id: 0, - fields: attributes.clone(), - from: message::FromBlock::Hash(hash.clone()), - to: None, - direction: message::Direction::Descending, - max: Some(count), - })) + return Some(( + hash.clone(), + message::generic::BlockRequest { + id: 0, + fields: attributes.clone(), + from: message::FromBlock::Hash(hash.clone()), + to: None, + direction: message::Direction::Descending, + max: Some(count), + }, + )) } } None } /// Returns `true` if the given `block` is a descendent of `base`. -fn is_descendent_of(client: &T, base: &Block::Hash, block: &Block::Hash) -> sp_blockchain::Result - where - Block: BlockT, - T: HeaderMetadata + ?Sized, +fn is_descendent_of( + client: &T, + base: &Block::Hash, + block: &Block::Hash, +) -> sp_blockchain::Result +where + Block: BlockT, + T: HeaderMetadata + ?Sized, { if base == block { - return Ok(false); + return Ok(false) } let ancestor = sp_blockchain::lowest_common_ancestor(client, *block, *base)?; @@ -2101,13 +2148,13 @@ fn validate_blocks( blocks.last() } else { blocks.first() - }.and_then(|b| b.header.as_ref()); + } + .and_then(|b| b.header.as_ref()); - let expected_block = block_header.as_ref() - .map_or(false, |h| match request.from { - message::FromBlock::Hash(hash) => h.hash() == hash, - message::FromBlock::Number(n) => h.number() == &n, - }); + let expected_block = block_header.as_ref().map_or(false, |h| match request.from { + message::FromBlock::Hash(hash) => h.hash() == hash, + message::FromBlock::Number(n) => h.number() == &n, + }); if !expected_block { debug!( @@ -2120,8 +2167,8 @@ fn validate_blocks( return Err(BadPeer(who.clone(), rep::NOT_REQUESTED)) } - if request.fields.contains(message::BlockAttributes::HEADER) - && blocks.iter().any(|b| b.header.is_none()) + if request.fields.contains(message::BlockAttributes::HEADER) && + blocks.iter().any(|b| b.header.is_none()) { trace!( target: "sync", @@ -2132,8 +2179,8 @@ fn validate_blocks( return Err(BadPeer(who.clone(), rep::BAD_RESPONSE)) } - if request.fields.contains(message::BlockAttributes::BODY) - && blocks.iter().any(|b| b.body.is_none()) + if request.fields.contains(message::BlockAttributes::BODY) && + blocks.iter().any(|b| b.body.is_none()) { trace!( target: "sync", @@ -2161,7 +2208,8 @@ fn validate_blocks( } if let (Some(header), Some(body)) = (&b.header, &b.body) { let expected = *header.extrinsics_root(); - let got = HashFor::::ordered_trie_root(body.iter().map(Encode::encode).collect()); + let got = + HashFor::::ordered_trie_root(body.iter().map(Encode::encode).collect()); if expected != got { debug!( target:"sync", @@ -2181,17 +2229,19 @@ fn validate_blocks( #[cfg(test)] mod test { - use super::message::{FromBlock, BlockState, BlockData}; - use super::*; + use super::{ + message::{BlockData, BlockState, FromBlock}, + *, + }; + use futures::{executor::block_on, future::poll_fn}; use sc_block_builder::BlockBuilderProvider; use sp_blockchain::HeaderBackend; use sp_consensus::block_validation::DefaultBlockAnnounceValidator; use substrate_test_runtime_client::{ runtime::{Block, Hash, Header}, - ClientBlockImportExt, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, - BlockBuilderExt, TestClient, ClientExt, + BlockBuilderExt, ClientBlockImportExt, ClientExt, DefaultTestClientBuilderExt, TestClient, + TestClientBuilder, TestClientBuilderExt, }; - use futures::{future::poll_fn, executor::block_on}; #[test] fn processes_empty_response_on_justification_request_for_unknown_block() { @@ -2203,12 +2253,8 @@ mod test { let block_announce_validator = Box::new(DefaultBlockAnnounceValidator); let peer_id = PeerId::random(); - let mut sync = ChainSync::new( - SyncMode::Full, - client.clone(), - block_announce_validator, - 1, - ).unwrap(); + let mut sync = + ChainSync::new(SyncMode::Full, client.clone(), block_announce_validator, 1).unwrap(); let (a1_hash, a1_number) = { let a1 = client.new_block(Default::default()).unwrap().build().unwrap().block; @@ -2222,50 +2268,36 @@ mod test { sync.request_justification(&a1_hash, a1_number); // the justification request should be scheduled to that peer - assert!( - sync.justification_requests().any(|(who, request)| { - who == peer_id && request.from == FromBlock::Hash(a1_hash) - }) - ); + assert!(sync + .justification_requests() + .any(|(who, request)| { who == peer_id && request.from == FromBlock::Hash(a1_hash) })); // there are no extra pending requests - assert_eq!( - sync.extra_justifications.pending_requests().count(), - 0, - ); + assert_eq!(sync.extra_justifications.pending_requests().count(), 0,); // there's one in-flight extra request to the expected peer - assert!( - sync.extra_justifications.active_requests().any(|(who, (hash, number))| { - *who == peer_id && *hash == a1_hash && *number == a1_number - }) - ); + assert!(sync.extra_justifications.active_requests().any(|(who, (hash, number))| { + *who == peer_id && *hash == a1_hash && *number == a1_number + })); // if the peer replies with an empty response (i.e. it doesn't know the block), // the active request should be cleared. assert_eq!( sync.on_block_justification( peer_id.clone(), - BlockResponse:: { - id: 0, - blocks: vec![], - } + BlockResponse:: { id: 0, blocks: vec![] } ), Ok(OnBlockJustification::Nothing), ); // there should be no in-flight requests - assert_eq!( - sync.extra_justifications.active_requests().count(), - 0, - ); + assert_eq!(sync.extra_justifications.active_requests().count(), 0,); // and the request should now be pending again, waiting for reschedule - assert!( - sync.extra_justifications.pending_requests().any(|(hash, number)| { - *hash == a1_hash && *number == a1_number - }) - ); + assert!(sync + .extra_justifications + .pending_requests() + .any(|(hash, number)| { *hash == a1_hash && *number == a1_number })); } #[test] @@ -2276,7 +2308,8 @@ mod test { client.clone(), Box::new(DefaultBlockAnnounceValidator), 1, - ).unwrap(); + ) + .unwrap(); let peer_id1 = PeerId::random(); let peer_id2 = PeerId::random(); @@ -2311,10 +2344,10 @@ mod test { // the justification request should be scheduled to the // new peer which is at the given block assert!(sync.justification_requests().any(|(p, r)| { - p == peer_id3 - && r.fields == BlockAttributes::JUSTIFICATION - && r.from == message::FromBlock::Hash(b1_hash) - && r.to == None + p == peer_id3 && + r.fields == BlockAttributes::JUSTIFICATION && + r.from == message::FromBlock::Hash(b1_hash) && + r.to == None })); assert_eq!( @@ -2326,7 +2359,9 @@ mod test { let block_requests = sync.restart(); // which should make us send out block requests to the first two peers - assert!(block_requests.map(|r| r.unwrap()).all(|(p, _)| { p == peer_id1 || p == peer_id2 })); + assert!(block_requests + .map(|r| r.unwrap()) + .all(|(p, _)| { p == peer_id1 || p == peer_id2 })); // peer 3 should be unaffected it was downloading finality data assert_eq!( @@ -2337,30 +2372,18 @@ mod test { // Set common block to something that we don't have (e.g. failed import) sync.peers.get_mut(&peer_id3).unwrap().common_number = 100; let _ = sync.restart().count(); - assert_eq!( - sync.peers.get(&peer_id3).unwrap().common_number, - 50 - ); + assert_eq!(sync.peers.get(&peer_id3).unwrap().common_number, 50); } /// Send a block annoucnement for the given `header`. - fn send_block_announce( - header: Header, - peer_id: &PeerId, - sync: &mut ChainSync, - ) { + fn send_block_announce(header: Header, peer_id: &PeerId, sync: &mut ChainSync) { let block_annnounce = BlockAnnounce { header: header.clone(), state: Some(BlockState::Best), data: Some(Vec::new()), }; - sync.push_block_announce_validation( - peer_id.clone(), - header.hash(), - block_annnounce, - true, - ); + sync.push_block_announce_validation(peer_id.clone(), header.hash(), block_annnounce, true); // Poll until we have procssed the block announcement block_on(poll_fn(|cx| loop { @@ -2374,8 +2397,9 @@ mod test { fn create_block_response(blocks: Vec) -> BlockResponse { BlockResponse:: { id: 0, - blocks: blocks.into_iter().map(|b| - BlockData:: { + blocks: blocks + .into_iter() + .map(|b| BlockData:: { hash: b.hash(), header: Some(b.header().clone()), body: Some(b.deconstruct().1), @@ -2384,8 +2408,8 @@ mod test { message_queue: None, justification: None, justifications: None, - } - ).collect(), + }) + .collect(), } } @@ -2414,11 +2438,8 @@ mod test { fn build_block(client: &mut Arc, at: Option, fork: bool) -> Block { let at = at.unwrap_or_else(|| client.info().best_hash); - let mut block_builder = client.new_block_at( - &BlockId::Hash(at), - Default::default(), - false, - ).unwrap(); + let mut block_builder = + client.new_block_at(&BlockId::Hash(at), Default::default(), false).unwrap(); if fork { block_builder.push_storage_change(vec![1, 2, 3], Some(vec![4, 5, 6])).unwrap(); @@ -2450,15 +2471,16 @@ mod test { client.clone(), Box::new(DefaultBlockAnnounceValidator), 5, - ).unwrap(); + ) + .unwrap(); let peer_id1 = PeerId::random(); let peer_id2 = PeerId::random(); let mut client2 = client.clone(); let mut build_block_at = |at, import| { - let mut block_builder = client2.new_block_at(&BlockId::Hash(at), Default::default(), false) - .unwrap(); + let mut block_builder = + client2.new_block_at(&BlockId::Hash(at), Default::default(), false).unwrap(); // Make sure we generate a different block as fork block_builder.push_storage_change(vec![1, 2, 3], Some(vec![4, 5, 6])).unwrap(); @@ -2517,13 +2539,11 @@ mod test { let response = create_block_response(vec![block2.clone()]); let res = sync.on_block_data(&peer_id1, Some(request2), response).unwrap(); - assert!( - matches!( - res, - OnBlockData::Import(_, blocks) - if blocks.iter().all(|b| [2, 3, 4].contains(b.header.as_ref().unwrap().number())) - ) - ); + assert!(matches!( + res, + OnBlockData::Import(_, blocks) + if blocks.iter().all(|b| [2, 3, 4].contains(b.header.as_ref().unwrap().number())) + )); let response = create_block_response(vec![block2.clone()]); let res = sync.on_block_data(&peer_id2, Some(request3), response).unwrap(); @@ -2552,7 +2572,9 @@ mod test { let blocks = { let mut client = Arc::new(TestClientBuilder::new().build()); - (0..MAX_DOWNLOAD_AHEAD * 2).map(|_| build_block(&mut client, None, false)).collect::>() + (0..MAX_DOWNLOAD_AHEAD * 2) + .map(|_| build_block(&mut client, None, false)) + .collect::>() }; let mut client = Arc::new(TestClientBuilder::new().build()); @@ -2563,14 +2585,16 @@ mod test { client.clone(), Box::new(DefaultBlockAnnounceValidator), 5, - ).unwrap(); + ) + .unwrap(); let peer_id1 = PeerId::random(); let peer_id2 = PeerId::random(); let best_block = blocks.last().unwrap().clone(); // Connect the node we will sync from - sync.new_peer(peer_id1.clone(), best_block.hash(), *best_block.header().number()).unwrap(); + sync.new_peer(peer_id1.clone(), best_block.hash(), *best_block.header().number()) + .unwrap(); sync.new_peer(peer_id2.clone(), info.best_hash, 0).unwrap(); let mut best_block_num = 0; @@ -2590,18 +2614,17 @@ mod test { let response = create_block_response(resp_blocks.clone()); let res = sync.on_block_data(&peer_id1, Some(request), response).unwrap(); - assert!( - matches!( - res, - OnBlockData::Import(_, blocks) if blocks.len() == MAX_BLOCKS_TO_REQUEST - ), - ); + assert!(matches!( + res, + OnBlockData::Import(_, blocks) if blocks.len() == MAX_BLOCKS_TO_REQUEST + ),); best_block_num += MAX_BLOCKS_TO_REQUEST as u32; - resp_blocks.into_iter() - .rev() - .for_each(|b| block_on(client.import_as_final(BlockOrigin::Own, b)).unwrap()); + resp_blocks + .into_iter() + .rev() + .for_each(|b| block_on(client.import_as_final(BlockOrigin::Own, b)).unwrap()); } // "Wait" for the queue to clear @@ -2627,12 +2650,10 @@ mod test { let response = create_block_response(vec![blocks[(best_block_num - 1) as usize].clone()]); let res = sync.on_block_data(&peer_id2, Some(peer2_req), response).unwrap(); - assert!( - matches!( - res, - OnBlockData::Import(_, blocks) if blocks.is_empty() - ), - ); + assert!(matches!( + res, + OnBlockData::Import(_, blocks) if blocks.is_empty() + ),); let peer1_from = unwrap_from_block_number(peer1_req.unwrap().from); @@ -2671,10 +2692,13 @@ mod test { .cloned() .collect::>(); - fork_blocks.into_iter().chain( + fork_blocks + .into_iter() + .chain( (0..MAX_BLOCKS_TO_LOOK_BACKWARDS * 2 + 1) - .map(|_| build_block(&mut client, None, true)) - ).collect::>() + .map(|_| build_block(&mut client, None, true)), + ) + .collect::>() }; let info = client.info(); @@ -2684,27 +2708,27 @@ mod test { client.clone(), Box::new(DefaultBlockAnnounceValidator), 5, - ).unwrap(); + ) + .unwrap(); let finalized_block = blocks[MAX_BLOCKS_TO_LOOK_BACKWARDS as usize * 2 - 1].clone(); let just = (*b"TEST", Vec::new()); - client.finalize_block(BlockId::Hash(finalized_block.hash()), Some(just)).unwrap(); + client + .finalize_block(BlockId::Hash(finalized_block.hash()), Some(just)) + .unwrap(); sync.update_chain_info(&info.best_hash, info.best_number); let peer_id1 = PeerId::random(); let common_block = blocks[MAX_BLOCKS_TO_LOOK_BACKWARDS as usize / 2].clone(); // Connect the node we will sync from - sync.new_peer(peer_id1.clone(), common_block.hash(), *common_block.header().number()).unwrap(); + sync.new_peer(peer_id1.clone(), common_block.hash(), *common_block.header().number()) + .unwrap(); send_block_announce(fork_blocks.last().unwrap().header().clone(), &peer_id1, &mut sync); - let mut request = get_block_request( - &mut sync, - FromBlock::Number(info.best_number), - 1, - &peer_id1, - ); + let mut request = + get_block_request(&mut sync, FromBlock::Number(info.best_number), 1, &peer_id1); // Do the ancestor search loop { @@ -2739,36 +2763,34 @@ mod test { let response = create_block_response(resp_blocks.clone()); let res = sync.on_block_data(&peer_id1, Some(request), response).unwrap(); - assert!( - matches!( - res, - OnBlockData::Import(_, blocks) if blocks.len() == MAX_BLOCKS_TO_REQUEST - ), - ); + assert!(matches!( + res, + OnBlockData::Import(_, blocks) if blocks.len() == MAX_BLOCKS_TO_REQUEST + ),); best_block_num += MAX_BLOCKS_TO_REQUEST as u32; let _ = sync.on_blocks_processed( MAX_BLOCKS_TO_REQUEST as usize, MAX_BLOCKS_TO_REQUEST as usize, - resp_blocks.iter() + resp_blocks + .iter() .rev() - .map(|b| + .map(|b| { ( - Ok( - BlockImportResult::ImportedUnknown( - b.header().number().clone(), - Default::default(), - Some(peer_id1.clone()), - ) - ), + Ok(BlockImportResult::ImportedUnknown( + b.header().number().clone(), + Default::default(), + Some(peer_id1.clone()), + )), b.hash(), ) - ) - .collect() + }) + .collect(), ); - resp_blocks.into_iter() + resp_blocks + .into_iter() .rev() .for_each(|b| block_on(client.import(BlockOrigin::Own, b)).unwrap()); } @@ -2786,21 +2808,21 @@ mod test { fn removes_target_fork_on_disconnect() { sp_tracing::try_init_simple(); let mut client = Arc::new(TestClientBuilder::new().build()); - let blocks = (0..3) - .map(|_| build_block(&mut client, None, false)) - .collect::>(); + let blocks = (0..3).map(|_| build_block(&mut client, None, false)).collect::>(); let mut sync = ChainSync::new( SyncMode::Full, client.clone(), Box::new(DefaultBlockAnnounceValidator), 1, - ).unwrap(); + ) + .unwrap(); let peer_id1 = PeerId::random(); let common_block = blocks[1].clone(); // Connect the node we will sync from - sync.new_peer(peer_id1.clone(), common_block.hash(), *common_block.header().number()).unwrap(); + sync.new_peer(peer_id1.clone(), common_block.hash(), *common_block.header().number()) + .unwrap(); // Create a "new" header and announce it let mut header = blocks[0].header().clone(); diff --git a/client/network/src/protocol/sync/blocks.rs b/client/network/src/protocol/sync/blocks.rs index 01b5f6016f8a5..640c47c08416d 100644 --- a/client/network/src/protocol/sync/blocks.rs +++ b/client/network/src/protocol/sync/blocks.rs @@ -16,13 +16,15 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::cmp; -use std::ops::Range; -use std::collections::{HashMap, BTreeMap}; -use log::trace; +use crate::protocol::message; use libp2p::PeerId; +use log::trace; use sp_runtime::traits::{Block as BlockT, NumberFor, One}; -use crate::protocol::message; +use std::{ + cmp, + collections::{BTreeMap, HashMap}, + ops::Range, +}; /// Block data with origin. #[derive(Debug, Clone, PartialEq, Eq)] @@ -35,10 +37,7 @@ pub struct BlockData { #[derive(Debug)] enum BlockRangeState { - Downloading { - len: NumberFor, - downloading: u32, - }, + Downloading { len: NumberFor, downloading: u32 }, Complete(Vec>), } @@ -62,10 +61,7 @@ pub struct BlockCollection { impl BlockCollection { /// Create a new instance. pub fn new() -> Self { - BlockCollection { - blocks: BTreeMap::new(), - peer_requests: HashMap::new(), - } + BlockCollection { blocks: BTreeMap::new(), peer_requests: HashMap::new() } } /// Clear everything. @@ -77,7 +73,7 @@ impl BlockCollection { /// Insert a set of blocks into collection. pub fn insert(&mut self, start: NumberFor, blocks: Vec>, who: PeerId) { if blocks.is_empty() { - return; + return } match self.blocks.get(&start) { @@ -86,13 +82,20 @@ impl BlockCollection { }, Some(&BlockRangeState::Complete(ref existing)) if existing.len() >= blocks.len() => { trace!(target: "sync", "Ignored block data already downloaded: {}", start); - return; + return }, _ => (), } - self.blocks.insert(start, BlockRangeState::Complete(blocks.into_iter() - .map(|b| BlockData { origin: Some(who.clone()), block: b }).collect())); + self.blocks.insert( + start, + BlockRangeState::Complete( + blocks + .into_iter() + .map(|b| BlockData { origin: Some(who.clone()), block: b }) + .collect(), + ), + ); } /// Returns a set of block hashes that require a header download. The returned set is marked as being downloaded. @@ -107,7 +110,7 @@ impl BlockCollection { ) -> Option>> { if peer_best <= common { // Bail out early - return None; + return None } // First block number that we need to download let first_different = common + >::one(); @@ -120,15 +123,13 @@ impl BlockCollection { break match (prev, next) { (Some((start, &BlockRangeState::Downloading { ref len, downloading })), _) if downloading < max_parallel => - (*start .. *start + *len, downloading), + (*start..*start + *len, downloading), (Some((start, r)), Some((next_start, _))) if *start + r.len() < *next_start => - (*start + r.len() .. cmp::min(*next_start, *start + r.len() + count), 0), // gap - (Some((start, r)), None) => - (*start + r.len() .. *start + r.len() + count, 0), // last range - (None, None) => - (first_different .. first_different + count, 0), // empty + (*start + r.len()..cmp::min(*next_start, *start + r.len() + count), 0), // gap + (Some((start, r)), None) => (*start + r.len()..*start + r.len() + count, 0), // last range + (None, None) => (first_different..first_different + count, 0), // empty (None, Some((start, _))) if *start > first_different => - (first_different .. cmp::min(first_different + count, *start), 0), // gap at the start + (first_different..cmp::min(first_different + count, *start), 0), // gap at the start _ => { prev = next; continue @@ -139,23 +140,33 @@ impl BlockCollection { // crop to peers best if range.start > peer_best { trace!(target: "sync", "Out of range for peer {} ({} vs {})", who, range.start, peer_best); - return None; + return None } range.end = cmp::min(peer_best + One::one(), range.end); - if self.blocks.iter().next().map_or(false, |(n, _)| range.start > *n + max_ahead.into()) { + if self + .blocks + .iter() + .next() + .map_or(false, |(n, _)| range.start > *n + max_ahead.into()) + { trace!(target: "sync", "Too far ahead for peer {} ({})", who, range.start); - return None; + return None } self.peer_requests.insert(who, range.start); - self.blocks.insert(range.start, BlockRangeState::Downloading { - len: range.end - range.start, - downloading: downloading + 1 - }); + self.blocks.insert( + range.start, + BlockRangeState::Downloading { + len: range.end - range.start, + downloading: downloading + 1, + }, + ); if range.end <= range.start { - panic!("Empty range {:?}, count={}, peer_best={}, common={}, blocks={:?}", - range, count, peer_best, common, self.blocks); + panic!( + "Empty range {:?}, count={}, peer_best={}, common={}, blocks={:?}", + range, count, peer_best, common, self.blocks + ); } Some(range) } @@ -188,16 +199,14 @@ impl BlockCollection { pub fn clear_peer_download(&mut self, who: &PeerId) { if let Some(start) = self.peer_requests.remove(who) { let remove = match self.blocks.get_mut(&start) { - Some(&mut BlockRangeState::Downloading { ref mut downloading, .. }) if *downloading > 1 => { + Some(&mut BlockRangeState::Downloading { ref mut downloading, .. }) + if *downloading > 1 => + { *downloading -= 1; false - }, - Some(&mut BlockRangeState::Downloading { .. }) => { - true - }, - _ => { - false } + Some(&mut BlockRangeState::Downloading { .. }) => true, + _ => false, }; if remove { self.blocks.remove(&start); @@ -210,27 +219,28 @@ impl BlockCollection { mod test { use super::{BlockCollection, BlockData, BlockRangeState}; use crate::{protocol::message, PeerId}; - use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper}; use sp_core::H256; + use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper}; type Block = RawBlock>; fn is_empty(bc: &BlockCollection) -> bool { - bc.blocks.is_empty() && - bc.peer_requests.is_empty() + bc.blocks.is_empty() && bc.peer_requests.is_empty() } fn generate_blocks(n: usize) -> Vec> { - (0 .. n).map(|_| message::generic::BlockData { - hash: H256::random(), - header: None, - body: None, - indexed_body: None, - message_queue: None, - receipt: None, - justification: None, - justifications: None, - }).collect() + (0..n) + .map(|_| message::generic::BlockData { + hash: H256::random(), + header: None, + body: None, + indexed_body: None, + message_queue: None, + receipt: None, + justification: None, + justifications: None, + }) + .collect() } #[test] @@ -252,32 +262,47 @@ mod test { let peer2 = PeerId::random(); let blocks = generate_blocks(150); - assert_eq!(bc.needed_blocks(peer0.clone(), 40, 150, 0, 1, 200), Some(1 .. 41)); - assert_eq!(bc.needed_blocks(peer1.clone(), 40, 150, 0, 1, 200), Some(41 .. 81)); - assert_eq!(bc.needed_blocks(peer2.clone(), 40, 150, 0, 1, 200), Some(81 .. 121)); + assert_eq!(bc.needed_blocks(peer0.clone(), 40, 150, 0, 1, 200), Some(1..41)); + assert_eq!(bc.needed_blocks(peer1.clone(), 40, 150, 0, 1, 200), Some(41..81)); + assert_eq!(bc.needed_blocks(peer2.clone(), 40, 150, 0, 1, 200), Some(81..121)); bc.clear_peer_download(&peer1); bc.insert(41, blocks[41..81].to_vec(), peer1.clone()); assert_eq!(bc.drain(1), vec![]); - assert_eq!(bc.needed_blocks(peer1.clone(), 40, 150, 0, 1, 200), Some(121 .. 151)); + assert_eq!(bc.needed_blocks(peer1.clone(), 40, 150, 0, 1, 200), Some(121..151)); bc.clear_peer_download(&peer0); bc.insert(1, blocks[1..11].to_vec(), peer0.clone()); - assert_eq!(bc.needed_blocks(peer0.clone(), 40, 150, 0, 1, 200), Some(11 .. 41)); - assert_eq!(bc.drain(1), blocks[1..11].iter() - .map(|b| BlockData { block: b.clone(), origin: Some(peer0.clone()) }).collect::>()); + assert_eq!(bc.needed_blocks(peer0.clone(), 40, 150, 0, 1, 200), Some(11..41)); + assert_eq!( + bc.drain(1), + blocks[1..11] + .iter() + .map(|b| BlockData { block: b.clone(), origin: Some(peer0.clone()) }) + .collect::>() + ); bc.clear_peer_download(&peer0); bc.insert(11, blocks[11..41].to_vec(), peer0.clone()); let drained = bc.drain(12); - assert_eq!(drained[..30], blocks[11..41].iter() - .map(|b| BlockData { block: b.clone(), origin: Some(peer0.clone()) }).collect::>()[..]); - assert_eq!(drained[30..], blocks[41..81].iter() - .map(|b| BlockData { block: b.clone(), origin: Some(peer1.clone()) }).collect::>()[..]); + assert_eq!( + drained[..30], + blocks[11..41] + .iter() + .map(|b| BlockData { block: b.clone(), origin: Some(peer0.clone()) }) + .collect::>()[..] + ); + assert_eq!( + drained[30..], + blocks[41..81] + .iter() + .map(|b| BlockData { block: b.clone(), origin: Some(peer1.clone()) }) + .collect::>()[..] + ); bc.clear_peer_download(&peer2); - assert_eq!(bc.needed_blocks(peer2.clone(), 40, 150, 80, 1, 200), Some(81 .. 121)); + assert_eq!(bc.needed_blocks(peer2.clone(), 40, 150, 80, 1, 200), Some(81..121)); bc.clear_peer_download(&peer2); bc.insert(81, blocks[81..121].to_vec(), peer2.clone()); bc.clear_peer_download(&peer1); @@ -285,25 +310,38 @@ mod test { assert_eq!(bc.drain(80), vec![]); let drained = bc.drain(81); - assert_eq!(drained[..40], blocks[81..121].iter() - .map(|b| BlockData { block: b.clone(), origin: Some(peer2.clone()) }).collect::>()[..]); - assert_eq!(drained[40..], blocks[121..150].iter() - .map(|b| BlockData { block: b.clone(), origin: Some(peer1.clone()) }).collect::>()[..]); + assert_eq!( + drained[..40], + blocks[81..121] + .iter() + .map(|b| BlockData { block: b.clone(), origin: Some(peer2.clone()) }) + .collect::>()[..] + ); + assert_eq!( + drained[40..], + blocks[121..150] + .iter() + .map(|b| BlockData { block: b.clone(), origin: Some(peer1.clone()) }) + .collect::>()[..] + ); } #[test] fn large_gap() { let mut bc: BlockCollection = BlockCollection::new(); - bc.blocks.insert(100, BlockRangeState::Downloading { - len: 128, - downloading: 1, - }); - let blocks = generate_blocks(10).into_iter().map(|b| BlockData { block: b, origin: None }).collect(); + bc.blocks.insert(100, BlockRangeState::Downloading { len: 128, downloading: 1 }); + let blocks = generate_blocks(10) + .into_iter() + .map(|b| BlockData { block: b, origin: None }) + .collect(); bc.blocks.insert(114305, BlockRangeState::Complete(blocks)); let peer0 = PeerId::random(); - assert_eq!(bc.needed_blocks(peer0.clone(), 128, 10000, 000, 1, 200), Some(1 .. 100)); + assert_eq!(bc.needed_blocks(peer0.clone(), 128, 10000, 000, 1, 200), Some(1..100)); assert_eq!(bc.needed_blocks(peer0.clone(), 128, 10000, 600, 1, 200), None); // too far ahead - assert_eq!(bc.needed_blocks(peer0.clone(), 128, 10000, 600, 1, 200000), Some(100 + 128 .. 100 + 128 + 128)); + assert_eq!( + bc.needed_blocks(peer0.clone(), 128, 10000, 600, 1, 200000), + Some(100 + 128..100 + 128 + 128) + ); } } diff --git a/client/network/src/protocol/sync/extra_requests.rs b/client/network/src/protocol/sync/extra_requests.rs index 3de79b3f48734..52419b5d7702c 100644 --- a/client/network/src/protocol/sync/extra_requests.rs +++ b/client/network/src/protocol/sync/extra_requests.rs @@ -16,14 +16,16 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sp_blockchain::Error as ClientError; use crate::protocol::sync::{PeerSync, PeerSyncState}; use fork_tree::ForkTree; use libp2p::PeerId; use log::{debug, trace, warn}; +use sp_blockchain::Error as ClientError; use sp_runtime::traits::{Block as BlockT, NumberFor, Zero}; -use std::collections::{HashMap, HashSet, VecDeque}; -use std::time::Duration; +use std::{ + collections::{HashMap, HashSet, VecDeque}, + time::Duration, +}; use wasm_timer::Instant; // Time to wait before trying to get the same extra data from the same peer. @@ -61,7 +63,7 @@ pub(crate) struct Metrics { pub(crate) active_requests: u32, pub(crate) importing_requests: u32, pub(crate) failed_requests: u32, - _priv: () + _priv: (), } impl ExtraRequests { @@ -93,13 +95,14 @@ impl ExtraRequests { /// Queue an extra data request to be considered by the `Matcher`. pub(crate) fn schedule(&mut self, request: ExtraRequest, is_descendent_of: F) - where F: Fn(&B::Hash, &B::Hash) -> Result + where + F: Fn(&B::Hash, &B::Hash) -> Result, { match self.tree.import(request.0, request.1, (), &is_descendent_of) { Ok(true) => { // this is a new root so we add it to the current `pending_requests` self.pending_requests.push_back((request.0, request.1)); - } + }, Err(fork_tree::Error::Revert) => { // we have finalized further than the given request, presumably // by some other part of the system (not sync). we can safely @@ -107,8 +110,8 @@ impl ExtraRequests { }, Err(err) => { debug!(target: "sync", "Failed to insert request {:?} into tree: {:?}", request, err); - } - _ => () + }, + _ => (), } } @@ -120,7 +123,11 @@ impl ExtraRequests { } /// Processes the response for the request previously sent to the given peer. - pub(crate) fn on_response(&mut self, who: PeerId, resp: Option) -> Option<(PeerId, B::Hash, NumberFor, R)> { + pub(crate) fn on_response( + &mut self, + who: PeerId, + resp: Option, + ) -> Option<(PeerId, B::Hash, NumberFor, R)> { // we assume that the request maps to the given response, this is // currently enforced by the outer network protocol before passing on // messages to chain sync. @@ -157,9 +164,10 @@ impl ExtraRequests { &mut self, best_finalized_hash: &B::Hash, best_finalized_number: NumberFor, - is_descendent_of: F + is_descendent_of: F, ) -> Result<(), fork_tree::Error> - where F: Fn(&B::Hash, &B::Hash) -> Result + where + F: Fn(&B::Hash, &B::Hash) -> Result, { let request = (*best_finalized_hash, best_finalized_number); @@ -203,9 +211,8 @@ impl ExtraRequests { &mut self, request: ExtraRequest, result: Result, E>, - reschedule_on_failure: bool - ) -> bool - { + reschedule_on_failure: bool, + ) -> bool { if !self.importing_requests.remove(&request) { return false } @@ -217,7 +224,7 @@ impl ExtraRequests { self.pending_requests.push_front(request); } return true - } + }, }; if self.tree.finalize_root(&finalized_hash).is_none() { @@ -258,7 +265,7 @@ impl ExtraRequests { active_requests: self.active_requests.len().try_into().unwrap_or(std::u32::MAX), failed_requests: self.failed_requests.len().try_into().unwrap_or(std::u32::MAX), importing_requests: self.importing_requests.len().try_into().unwrap_or(std::u32::MAX), - _priv: () + _priv: (), } } } @@ -269,15 +276,12 @@ pub(crate) struct Matcher<'a, B: BlockT> { /// Length of pending requests collection. /// Used to ensure we do not loop more than once over all pending requests. remaining: usize, - extras: &'a mut ExtraRequests + extras: &'a mut ExtraRequests, } impl<'a, B: BlockT> Matcher<'a, B> { fn new(extras: &'a mut ExtraRequests) -> Self { - Matcher { - remaining: extras.pending_requests.len(), - extras - } + Matcher { remaining: extras.pending_requests.len(), extras } } /// Finds a peer to which a pending request can be sent. @@ -294,7 +298,10 @@ impl<'a, B: BlockT> Matcher<'a, B> { /// /// The returned `PeerId` (if any) is guaranteed to come from the given `peers` /// argument. - pub(crate) fn next(&mut self, peers: &HashMap>) -> Option<(PeerId, ExtraRequest)> { + pub(crate) fn next( + &mut self, + peers: &HashMap>, + ) -> Option<(PeerId, ExtraRequest)> { if self.remaining == 0 { return None } @@ -305,7 +312,9 @@ impl<'a, B: BlockT> Matcher<'a, B> { } while let Some(request) = self.extras.pending_requests.pop_front() { - for (peer, sync) in peers.iter().filter(|(_, sync)| sync.state == PeerSyncState::Available) { + for (peer, sync) in + peers.iter().filter(|(_, sync)| sync.state == PeerSyncState::Available) + { // only ask peers that have synced at least up to the block number that we're asking the extra for if sync.best_number < request.1 { continue @@ -315,7 +324,13 @@ impl<'a, B: BlockT> Matcher<'a, B> { continue } // only ask if the same request has not failed for this peer before - if self.extras.failed_requests.get(&request).map(|rr| rr.iter().any(|i| &i.0 == peer)).unwrap_or(false) { + if self + .extras + .failed_requests + .get(&request) + .map(|rr| rr.iter().any(|i| &i.0 == peer)) + .unwrap_or(false) + { continue } self.extras.active_requests.insert(peer.clone(), request); @@ -343,22 +358,22 @@ impl<'a, B: BlockT> Matcher<'a, B> { #[cfg(test)] mod tests { + use super::*; use crate::protocol::sync::PeerSync; - use sp_blockchain::Error as ClientError; use quickcheck::{Arbitrary, Gen, QuickCheck}; - use std::collections::{HashMap, HashSet}; - use super::*; + use sp_blockchain::Error as ClientError; use sp_test_primitives::{Block, BlockNumber, Hash}; + use std::collections::{HashMap, HashSet}; #[test] fn requests_are_processed_in_order() { fn property(mut peers: ArbitraryPeers) { let mut requests = ExtraRequests::::new("test"); - let num_peers_available = peers.0.values() - .filter(|s| s.state == PeerSyncState::Available).count(); + let num_peers_available = + peers.0.values().filter(|s| s.state == PeerSyncState::Available).count(); - for i in 0 .. num_peers_available { + for i in 0..num_peers_available { requests.schedule((Hash::random(), i as u64), |a, b| Ok(a[0] >= b[0])) } @@ -368,12 +383,12 @@ mod tests { for p in &pending { let (peer, r) = m.next(&peers.0).unwrap(); assert_eq!(p, &r); - peers.0.get_mut(&peer).unwrap().state = PeerSyncState::DownloadingJustification(r.0); + peers.0.get_mut(&peer).unwrap().state = + PeerSyncState::DownloadingJustification(r.0); } } - QuickCheck::new() - .quickcheck(property as fn(ArbitraryPeers)) + QuickCheck::new().quickcheck(property as fn(ArbitraryPeers)) } #[test] @@ -398,22 +413,24 @@ mod tests { fn property(mut peers: ArbitraryPeers) -> bool { let mut requests = ExtraRequests::::new("test"); - let num_peers_available = peers.0.values() - .filter(|s| s.state == PeerSyncState::Available).count(); + let num_peers_available = + peers.0.values().filter(|s| s.state == PeerSyncState::Available).count(); - for i in 0 .. num_peers_available { + for i in 0..num_peers_available { requests.schedule((Hash::random(), i as u64), |a, b| Ok(a[0] >= b[0])) } let mut m = requests.matcher(); while let Some((peer, r)) = m.next(&peers.0) { - peers.0.get_mut(&peer).unwrap().state = PeerSyncState::DownloadingJustification(r.0); + peers.0.get_mut(&peer).unwrap().state = + PeerSyncState::DownloadingJustification(r.0); } assert!(requests.pending_requests.is_empty()); let active_peers = requests.active_requests.keys().cloned().collect::>(); - let previously_active = requests.active_requests.values().cloned().collect::>(); + let previously_active = + requests.active_requests.values().cloned().collect::>(); for peer in &active_peers { requests.peer_disconnected(peer) @@ -424,8 +441,7 @@ mod tests { previously_active == requests.pending_requests.iter().cloned().collect::>() } - QuickCheck::new() - .quickcheck(property as fn(ArbitraryPeers) -> bool) + QuickCheck::new().quickcheck(property as fn(ArbitraryPeers) -> bool) } #[test] @@ -433,31 +449,44 @@ mod tests { fn property(mut peers: ArbitraryPeers) { let mut requests = ExtraRequests::::new("test"); - let num_peers_available = peers.0.values() - .filter(|s| s.state == PeerSyncState::Available).count(); + let num_peers_available = + peers.0.values().filter(|s| s.state == PeerSyncState::Available).count(); - for i in 0 .. num_peers_available { + for i in 0..num_peers_available { requests.schedule((Hash::random(), i as u64), |a, b| Ok(a[0] >= b[0])) } let mut m = requests.matcher(); while let Some((peer, r)) = m.next(&peers.0) { - peers.0.get_mut(&peer).unwrap().state = PeerSyncState::DownloadingJustification(r.0); + peers.0.get_mut(&peer).unwrap().state = + PeerSyncState::DownloadingJustification(r.0); } - let active = requests.active_requests.iter().map(|(p, &r)| (p.clone(), r)).collect::>(); + let active = requests + .active_requests + .iter() + .map(|(p, &r)| (p.clone(), r)) + .collect::>(); for (peer, req) in &active { assert!(requests.failed_requests.get(req).is_none()); assert!(!requests.pending_requests.contains(req)); assert!(requests.on_response::<()>(peer.clone(), None).is_none()); assert!(requests.pending_requests.contains(req)); - assert_eq!(1, requests.failed_requests.get(req).unwrap().iter().filter(|(p, _)| p == peer).count()) + assert_eq!( + 1, + requests + .failed_requests + .get(req) + .unwrap() + .iter() + .filter(|(p, _)| p == peer) + .count() + ) } } - QuickCheck::new() - .quickcheck(property as fn(ArbitraryPeers)) + QuickCheck::new().quickcheck(property as fn(ArbitraryPeers)) } #[test] @@ -497,7 +526,10 @@ mod tests { finality_proofs.try_finalize_root::<()>((hash6, 6), Ok((hash7, 7)), true); // ensure that there's no request for #6 - assert_eq!(finality_proofs.pending_requests.iter().collect::>(), Vec::<&(Hash, u64)>::new()); + assert_eq!( + finality_proofs.pending_requests.iter().collect::>(), + Vec::<&(Hash, u64)>::new() + ); } #[test] @@ -560,7 +592,7 @@ mod tests { impl Arbitrary for ArbitraryPeers { fn arbitrary(g: &mut Gen) -> Self { let mut peers = HashMap::with_capacity(g.size()); - for _ in 0 .. g.size() { + for _ in 0..g.size() { let ps = ArbitraryPeerSync::arbitrary(g).0; peers.insert(ps.peer_id.clone(), ps); } diff --git a/client/network/src/protocol/sync/state.rs b/client/network/src/protocol/sync/state.rs index fc9dfdbb8c376..73e4eac1f5bc0 100644 --- a/client/network/src/protocol/sync/state.rs +++ b/client/network/src/protocol/sync/state.rs @@ -16,13 +16,15 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::sync::Arc; -use codec::{Encode, Decode}; -use sp_runtime::traits::{Block as BlockT, Header, NumberFor}; -use sc_client_api::StorageProof; -use crate::schema::v1::{StateRequest, StateResponse, StateEntry}; -use crate::chain::{Client, ImportedState}; use super::StateDownloadProgress; +use crate::{ + chain::{Client, ImportedState}, + schema::v1::{StateEntry, StateRequest, StateResponse}, +}; +use codec::{Decode, Encode}; +use sc_client_api::StorageProof; +use sp_runtime::traits::{Block as BlockT, Header, NumberFor}; +use std::sync::Arc; /// State sync support. @@ -73,14 +75,14 @@ impl StateSync { target: "sync", "Bad state response", ); - return ImportResult::BadResponse; + return ImportResult::BadResponse } if !self.skip_proof && response.proof.is_empty() { log::debug!( target: "sync", "Missing proof", ); - return ImportResult::BadResponse; + return ImportResult::BadResponse } let complete = if !self.skip_proof { log::debug!( @@ -93,24 +95,21 @@ impl StateSync { Ok(proof) => proof, Err(e) => { log::debug!(target: "sync", "Error decoding proof: {:?}", e); - return ImportResult::BadResponse; - } - }; - let (values, complete) = match self.client.verify_range_proof( - self.target_root, - proof, - &self.last_key - ) { - Err(e) => { - log::debug!( - target: "sync", - "StateResponse failed proof verification: {:?}", - e, - ); - return ImportResult::BadResponse; + return ImportResult::BadResponse }, - Ok(values) => values, }; + let (values, complete) = + match self.client.verify_range_proof(self.target_root, proof, &self.last_key) { + Err(e) => { + log::debug!( + target: "sync", + "StateResponse failed proof verification: {:?}", + e, + ); + return ImportResult::BadResponse + }, + Ok(values) => values, + }; log::debug!(target: "sync", "Imported with {} keys", values.len()); if let Some(last) = values.last().map(|(k, _)| k) { @@ -120,7 +119,7 @@ impl StateSync { for (key, value) in values { self.imported_bytes += key.len() as u64; self.state.push((key, value)) - }; + } self.imported_bytes += proof_size; complete } else { @@ -142,10 +141,14 @@ impl StateSync { }; if complete { self.complete = true; - ImportResult::Import(self.target_block.clone(), self.target_header.clone(), ImportedState { - block: self.target_block.clone(), - state: std::mem::take(&mut self.state) - }) + ImportResult::Import( + self.target_block.clone(), + self.target_header.clone(), + ImportedState { + block: self.target_block.clone(), + state: std::mem::take(&mut self.state), + }, + ) } else { ImportResult::Continue(self.next_request()) } @@ -178,10 +181,6 @@ impl StateSync { /// Returns state sync estimated progress. pub fn progress(&self) -> StateDownloadProgress { let percent_done = (*self.last_key.get(0).unwrap_or(&0u8) as u32) * 100 / 256; - StateDownloadProgress { - percentage: percent_done, - size: self.imported_bytes, - } + StateDownloadProgress { percentage: percent_done, size: self.imported_bytes } } } - diff --git a/client/network/src/request_responses.rs b/client/network/src/request_responses.rs index 20469e143d41e..efc1085883f07 100644 --- a/client/network/src/request_responses.rs +++ b/client/network/src/request_responses.rs @@ -35,15 +35,19 @@ //! is used to handle incoming requests. //! -use futures::{channel::{mpsc, oneshot}, prelude::*}; +use crate::ReputationChange; +use futures::{ + channel::{mpsc, oneshot}, + prelude::*, +}; use libp2p::{ core::{ connection::{ConnectionId, ListenerId}, ConnectedPoint, Multiaddr, PeerId, }, request_response::{ - RequestResponse, RequestResponseCodec, RequestResponseConfig, RequestResponseEvent, - RequestResponseMessage, ResponseChannel, ProtocolSupport + ProtocolSupport, RequestResponse, RequestResponseCodec, RequestResponseConfig, + RequestResponseEvent, RequestResponseMessage, ResponseChannel, }, swarm::{ protocols_handler::multi::MultiHandler, NetworkBehaviour, NetworkBehaviourAction, @@ -51,58 +55,62 @@ use libp2p::{ }, }; use std::{ - borrow::Cow, collections::{hash_map::Entry, HashMap}, convert::TryFrom as _, io, iter, - pin::Pin, task::{Context, Poll}, time::Duration, + borrow::Cow, + collections::{hash_map::Entry, HashMap}, + convert::TryFrom as _, + io, iter, + pin::Pin, + task::{Context, Poll}, + time::Duration, }; use wasm_timer::Instant; -use crate::ReputationChange; pub use libp2p::request_response::{InboundFailure, OutboundFailure, RequestId}; /// Configuration for a single request-response protocol. #[derive(Debug, Clone)] pub struct ProtocolConfig { - /// Name of the protocol on the wire. Should be something like `/foo/bar`. - pub name: Cow<'static, str>, - - /// Maximum allowed size, in bytes, of a request. - /// - /// Any request larger than this value will be declined as a way to avoid allocating too - /// much memory for it. - pub max_request_size: u64, - - /// Maximum allowed size, in bytes, of a response. - /// - /// Any response larger than this value will be declined as a way to avoid allocating too - /// much memory for it. - pub max_response_size: u64, - - /// Duration after which emitted requests are considered timed out. - /// - /// If you expect the response to come back quickly, you should set this to a smaller duration. - pub request_timeout: Duration, - - /// Channel on which the networking service will send incoming requests. - /// - /// Every time a peer sends a request to the local node using this protocol, the networking - /// service will push an element on this channel. The receiving side of this channel then has - /// to pull this element, process the request, and send back the response to send back to the - /// peer. - /// - /// The size of the channel has to be carefully chosen. If the channel is full, the networking - /// service will discard the incoming request send back an error to the peer. Consequently, - /// the channel being full is an indicator that the node is overloaded. - /// - /// You can typically set the size of the channel to `T / d`, where `T` is the - /// `request_timeout` and `d` is the expected average duration of CPU and I/O it takes to - /// build a response. - /// - /// Can be `None` if the local node does not support answering incoming requests. - /// If this is `None`, then the local node will not advertise support for this protocol towards - /// other peers. If this is `Some` but the channel is closed, then the local node will - /// advertise support for this protocol, but any incoming request will lead to an error being - /// sent back. - pub inbound_queue: Option>, + /// Name of the protocol on the wire. Should be something like `/foo/bar`. + pub name: Cow<'static, str>, + + /// Maximum allowed size, in bytes, of a request. + /// + /// Any request larger than this value will be declined as a way to avoid allocating too + /// much memory for it. + pub max_request_size: u64, + + /// Maximum allowed size, in bytes, of a response. + /// + /// Any response larger than this value will be declined as a way to avoid allocating too + /// much memory for it. + pub max_response_size: u64, + + /// Duration after which emitted requests are considered timed out. + /// + /// If you expect the response to come back quickly, you should set this to a smaller duration. + pub request_timeout: Duration, + + /// Channel on which the networking service will send incoming requests. + /// + /// Every time a peer sends a request to the local node using this protocol, the networking + /// service will push an element on this channel. The receiving side of this channel then has + /// to pull this element, process the request, and send back the response to send back to the + /// peer. + /// + /// The size of the channel has to be carefully chosen. If the channel is full, the networking + /// service will discard the incoming request send back an error to the peer. Consequently, + /// the channel being full is an indicator that the node is overloaded. + /// + /// You can typically set the size of the channel to `T / d`, where `T` is the + /// `request_timeout` and `d` is the expected average duration of CPU and I/O it takes to + /// build a response. + /// + /// Can be `None` if the local node does not support answering incoming requests. + /// If this is `None`, then the local node will not advertise support for this protocol towards + /// other peers. If this is `Some` but the channel is closed, then the local node will + /// advertise support for this protocol, but any incoming request will lead to an error being + /// sent back. + pub inbound_queue: Option>, } /// A single request received by a peer on a request-response protocol. @@ -179,14 +187,11 @@ pub enum Event { /// Duration the request took. duration: Duration, /// Result of the request. - result: Result<(), RequestFailure> + result: Result<(), RequestFailure>, }, /// A request protocol handler issued reputation changes for the given peer. - ReputationChanges { - peer: PeerId, - changes: Vec, - } + ReputationChanges { peer: PeerId, changes: Vec }, } /// Combination of a protocol name and a request id. @@ -234,19 +239,17 @@ pub struct RequestResponsesBehaviour { /// "response builder" used to build responses for incoming requests. protocols: HashMap< Cow<'static, str>, - (RequestResponse, Option>) + (RequestResponse, Option>), >, /// Pending requests, passed down to a [`RequestResponse`] behaviour, awaiting a reply. - pending_requests: HashMap< - ProtocolRequestId, - (Instant, oneshot::Sender, RequestFailure>>), - >, + pending_requests: + HashMap, RequestFailure>>)>, /// Whenever an incoming request arrives, a `Future` is added to this list and will yield the /// start time and the response to send back to the remote. pending_responses: stream::FuturesUnordered< - Pin> + Send>> + Pin> + Send>>, >, /// Whenever an incoming request arrives, the arrival [`Instant`] is recorded here. @@ -282,15 +285,18 @@ impl RequestResponsesBehaviour { ProtocolSupport::Outbound }; - let rq_rp = RequestResponse::new(GenericCodec { - max_request_size: protocol.max_request_size, - max_response_size: protocol.max_response_size, - }, iter::once((protocol.name.as_bytes().to_vec(), protocol_support)), cfg); + let rq_rp = RequestResponse::new( + GenericCodec { + max_request_size: protocol.max_request_size, + max_response_size: protocol.max_response_size, + }, + iter::once((protocol.name.as_bytes().to_vec(), protocol_support)), + cfg, + ); match protocols.entry(protocol.name) { Entry::Vacant(e) => e.insert((rq_rp, protocol.inbound_queue)), - Entry::Occupied(e) => - return Err(RegisterError::DuplicateProtocol(e.key().clone())), + Entry::Occupied(e) => return Err(RegisterError::DuplicateProtocol(e.key().clone())), }; } @@ -348,19 +354,20 @@ impl RequestResponsesBehaviour { } impl NetworkBehaviour for RequestResponsesBehaviour { - type ProtocolsHandler = MultiHandler< - String, - as NetworkBehaviour>::ProtocolsHandler, - >; + type ProtocolsHandler = + MultiHandler as NetworkBehaviour>::ProtocolsHandler>; type OutEvent = Event; fn new_handler(&mut self) -> Self::ProtocolsHandler { - let iter = self.protocols.iter_mut() + let iter = self + .protocols + .iter_mut() .map(|(p, (r, _))| (p.to_string(), NetworkBehaviour::new_handler(r))); - MultiHandler::try_from_iter(iter) - .expect("Protocols are in a HashMap and there can be at most one handler per \ - protocol name, which is the only possible error; qed") + MultiHandler::try_from_iter(iter).expect( + "Protocols are in a HashMap and there can be at most one handler per \ + protocol name, which is the only possible error; qed", + ) } fn addresses_of_peer(&mut self, _: &PeerId) -> Vec { @@ -384,7 +391,12 @@ impl NetworkBehaviour for RequestResponsesBehaviour { } } - fn inject_connection_closed(&mut self, peer_id: &PeerId, conn: &ConnectionId, endpoint: &ConnectedPoint) { + fn inject_connection_closed( + &mut self, + peer_id: &PeerId, + conn: &ConnectionId, + endpoint: &ConnectedPoint, + ) { for (p, _) in self.protocols.values_mut() { NetworkBehaviour::inject_connection_closed(p, peer_id, conn, endpoint) } @@ -400,7 +412,7 @@ impl NetworkBehaviour for RequestResponsesBehaviour { &mut self, peer_id: Option<&PeerId>, addr: &Multiaddr, - error: &dyn std::error::Error + error: &dyn std::error::Error, ) { for (p, _) in self.protocols.values_mut() { NetworkBehaviour::inject_addr_reach_failure(p, peer_id, addr, error) @@ -488,11 +500,7 @@ impl NetworkBehaviour for RequestResponsesBehaviour { request_id, protocol: protocol_name, inner_channel, - response: OutgoingResponse { - result, - reputation_changes, - sent_feedback, - }, + response: OutgoingResponse { result, reputation_changes, sent_feedback }, } = match outcome { Some(outcome) => outcome, // The response builder was too busy or handling the request failed. This is @@ -514,10 +522,8 @@ impl NetworkBehaviour for RequestResponsesBehaviour { ); } else { if let Some(sent_feedback) = sent_feedback { - self.send_feedback.insert( - (protocol_name, request_id).into(), - sent_feedback - ); + self.send_feedback + .insert((protocol_name, request_id).into(), sent_feedback); } } } @@ -525,11 +531,8 @@ impl NetworkBehaviour for RequestResponsesBehaviour { if !reputation_changes.is_empty() { return Poll::Ready(NetworkBehaviourAction::GenerateEvent( - Event::ReputationChanges{ - peer, - changes: reputation_changes, - }, - )); + Event::ReputationChanges { peer, changes: reputation_changes }, + )) } } @@ -543,38 +546,35 @@ impl NetworkBehaviour for RequestResponsesBehaviour { // Other events generated by the underlying behaviour are transparently // passed through. NetworkBehaviourAction::DialAddress { address } => { - log::error!("The request-response isn't supposed to start dialing peers"); + log::error!( + "The request-response isn't supposed to start dialing peers" + ); return Poll::Ready(NetworkBehaviourAction::DialAddress { address }) - } - NetworkBehaviourAction::DialPeer { peer_id, condition } => { + }, + NetworkBehaviourAction::DialPeer { peer_id, condition } => return Poll::Ready(NetworkBehaviourAction::DialPeer { peer_id, condition, - }) - } - NetworkBehaviourAction::NotifyHandler { - peer_id, - handler, - event, - } => { + }), + NetworkBehaviourAction::NotifyHandler { peer_id, handler, event } => return Poll::Ready(NetworkBehaviourAction::NotifyHandler { peer_id, handler, event: ((*protocol).to_string(), event), - }) - } - NetworkBehaviourAction::ReportObservedAddr { address, score } => { + }), + NetworkBehaviourAction::ReportObservedAddr { address, score } => return Poll::Ready(NetworkBehaviourAction::ReportObservedAddr { - address, score, - }) - } + address, + score, + }), }; match ev { // Received a request from a remote. RequestResponseEvent::Message { peer, - message: RequestResponseMessage::Request { request_id, request, channel, .. }, + message: + RequestResponseMessage::Request { request_id, request, channel, .. }, } => { self.pending_responses_arrival_time.insert( (protocol.clone(), request_id.clone()).into(), @@ -605,7 +605,11 @@ impl NetworkBehaviour for RequestResponsesBehaviour { // `InboundFailure::Omission` event. if let Ok(response) = rx.await { Some(RequestProcessingOutcome { - peer, request_id, protocol, inner_channel: channel, response + peer, + request_id, + protocol, + inner_channel: channel, + response, }) } else { None @@ -614,27 +618,25 @@ impl NetworkBehaviour for RequestResponsesBehaviour { // This `continue` makes sure that `pending_responses` gets polled // after we have added the new element. - continue 'poll_all; - } + continue 'poll_all + }, // Received a response from a remote to one of our requests. RequestResponseEvent::Message { peer, - message: RequestResponseMessage::Response { - request_id, - response, - }, + message: RequestResponseMessage::Response { request_id, response }, .. } => { - let (started, delivered) = match self.pending_requests.remove( - &(protocol.clone(), request_id).into(), - ) { + let (started, delivered) = match self + .pending_requests + .remove(&(protocol.clone(), request_id).into()) + { Some((started, pending_response)) => { - let delivered = pending_response.send( - response.map_err(|()| RequestFailure::Refused), - ).map_err(|_| RequestFailure::Obsolete); + let delivered = pending_response + .send(response.map_err(|()| RequestFailure::Refused)) + .map_err(|_| RequestFailure::Obsolete); (started, delivered) - } + }, None => { log::warn!( target: "sub-libp2p", @@ -642,8 +644,8 @@ impl NetworkBehaviour for RequestResponsesBehaviour { request_id, ); debug_assert!(false); - continue; - } + continue + }, }; let out = Event::RequestFinished { @@ -653,21 +655,22 @@ impl NetworkBehaviour for RequestResponsesBehaviour { result: delivered, }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)) + }, // One of our requests has failed. RequestResponseEvent::OutboundFailure { - peer, - request_id, - error, - .. + peer, request_id, error, .. } => { - let started = match self.pending_requests.remove(&(protocol.clone(), request_id).into()) { + let started = match self + .pending_requests + .remove(&(protocol.clone(), request_id).into()) + { Some((started, pending_response)) => { - if pending_response.send( - Err(RequestFailure::Network(error.clone())), - ).is_err() { + if pending_response + .send(Err(RequestFailure::Network(error.clone()))) + .is_err() + { log::debug!( target: "sub-libp2p", "Request with id {:?} failed. At the same time local \ @@ -676,7 +679,7 @@ impl NetworkBehaviour for RequestResponsesBehaviour { ); } started - } + }, None => { log::warn!( target: "sub-libp2p", @@ -684,8 +687,8 @@ impl NetworkBehaviour for RequestResponsesBehaviour { request_id, ); debug_assert!(false); - continue; - } + continue + }, }; let out = Event::RequestFinished { @@ -695,29 +698,30 @@ impl NetworkBehaviour for RequestResponsesBehaviour { result: Err(RequestFailure::Network(error)), }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)) + }, // An inbound request failed, either while reading the request or due to failing // to send a response. - RequestResponseEvent::InboundFailure { request_id, peer, error, .. } => { - self.pending_responses_arrival_time.remove( - &(protocol.clone(), request_id).into(), - ); + RequestResponseEvent::InboundFailure { + request_id, peer, error, .. + } => { + self.pending_responses_arrival_time + .remove(&(protocol.clone(), request_id).into()); self.send_feedback.remove(&(protocol.clone(), request_id).into()); let out = Event::InboundRequest { peer, protocol: protocol.clone(), result: Err(ResponseFailure::Network(error)), }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)); - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)) + }, // A response to an inbound request has been sent. RequestResponseEvent::ResponseSent { request_id, peer } => { - let arrival_time = self.pending_responses_arrival_time.remove( - &(protocol.clone(), request_id).into(), - ) + let arrival_time = self + .pending_responses_arrival_time + .remove(&(protocol.clone(), request_id).into()) .map(|t| t.elapsed()) .expect( "Time is added for each inbound request on arrival and only \ @@ -727,9 +731,9 @@ impl NetworkBehaviour for RequestResponsesBehaviour { failed; qed.", ); - if let Some(send_feedback) = self.send_feedback.remove( - &(protocol.clone(), request_id).into() - ) { + if let Some(send_feedback) = + self.send_feedback.remove(&(protocol.clone(), request_id).into()) + { let _ = send_feedback.send(()); } @@ -739,14 +743,13 @@ impl NetworkBehaviour for RequestResponsesBehaviour { result: Ok(arrival_time), }; - return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)); - - } + return Poll::Ready(NetworkBehaviourAction::GenerateEvent(out)) + }, }; } } - break Poll::Pending; + break Poll::Pending } } } @@ -786,7 +789,7 @@ pub enum ResponseFailure { /// Implements the libp2p [`RequestResponseCodec`] trait. Defines how streams of bytes are turned /// into requests and responses and vice-versa. #[derive(Debug, Clone)] -#[doc(hidden)] // Needs to be public in order to satisfy the Rust compiler. +#[doc(hidden)] // Needs to be public in order to satisfy the Rust compiler. pub struct GenericCodec { max_request_size: u64, max_response_size: u64, @@ -807,13 +810,14 @@ impl RequestResponseCodec for GenericCodec { T: AsyncRead + Unpin + Send, { // Read the length. - let length = unsigned_varint::aio::read_usize(&mut io).await + let length = unsigned_varint::aio::read_usize(&mut io) + .await .map_err(|err| io::Error::new(io::ErrorKind::InvalidInput, err))?; if length > usize::try_from(self.max_request_size).unwrap_or(usize::MAX) { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("Request size exceeds limit: {} > {}", length, self.max_request_size) - )); + format!("Request size exceeds limit: {} > {}", length, self.max_request_size), + )) } // Read the payload. @@ -841,7 +845,7 @@ impl RequestResponseCodec for GenericCodec { Err(unsigned_varint::io::ReadError::Io(err)) if matches!(err.kind(), io::ErrorKind::UnexpectedEof) => { - return Ok(Err(())); + return Ok(Err(())) } Err(err) => return Err(io::Error::new(io::ErrorKind::InvalidInput, err)), }; @@ -849,8 +853,8 @@ impl RequestResponseCodec for GenericCodec { if length > usize::try_from(self.max_response_size).unwrap_or(usize::MAX) { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("Response size exceeds limit: {} > {}", length, self.max_response_size) - )); + format!("Response size exceeds limit: {} > {}", length, self.max_response_size), + )) } // Read the payload. @@ -913,23 +917,30 @@ impl RequestResponseCodec for GenericCodec { mod tests { use super::*; - use futures::channel::{mpsc, oneshot}; - use futures::executor::LocalPool; - use futures::task::Spawn; - use libp2p::identity::Keypair; - use libp2p::Multiaddr; - use libp2p::core::upgrade; - use libp2p::core::transport::{Transport, MemoryTransport}; - use libp2p::noise; - use libp2p::swarm::{Swarm, SwarmEvent}; + use futures::{ + channel::{mpsc, oneshot}, + executor::LocalPool, + task::Spawn, + }; + use libp2p::{ + core::{ + transport::{MemoryTransport, Transport}, + upgrade, + }, + identity::Keypair, + noise, + swarm::{Swarm, SwarmEvent}, + Multiaddr, + }; use std::{iter, time::Duration}; - fn build_swarm(list: impl Iterator) -> (Swarm, Multiaddr) { + fn build_swarm( + list: impl Iterator, + ) -> (Swarm, Multiaddr) { let keypair = Keypair::generate_ed25519(); - let noise_keys = noise::Keypair::::new() - .into_authentic(&keypair) - .unwrap(); + let noise_keys = + noise::Keypair::::new().into_authentic(&keypair).unwrap(); let transport = MemoryTransport .upgrade(upgrade::Version::V1) @@ -956,18 +967,24 @@ mod tests { .map(|_| { let (tx, mut rx) = mpsc::channel::(64); - pool.spawner().spawn_obj(async move { - while let Some(rq) = rx.next().await { - let (fb_tx, fb_rx) = oneshot::channel(); - assert_eq!(rq.payload, b"this is a request"); - let _ = rq.pending_response.send(super::OutgoingResponse { - result: Ok(b"this is a response".to_vec()), - reputation_changes: Vec::new(), - sent_feedback: Some(fb_tx), - }); - fb_rx.await.unwrap(); - } - }.boxed().into()).unwrap(); + pool.spawner() + .spawn_obj( + async move { + while let Some(rq) = rx.next().await { + let (fb_tx, fb_rx) = oneshot::channel(); + assert_eq!(rq.payload, b"this is a request"); + let _ = rq.pending_response.send(super::OutgoingResponse { + result: Ok(b"this is a response".to_vec()), + reputation_changes: Vec::new(), + sent_feedback: Some(fb_tx), + }); + fb_rx.await.unwrap(); + } + } + .boxed() + .into(), + ) + .unwrap(); let protocol_config = ProtocolConfig { name: From::from(protocol_name), @@ -989,19 +1006,23 @@ mod tests { } // Running `swarm[0]` in the background. - pool.spawner().spawn_obj({ - let (mut swarm, _) = swarms.remove(0); - async move { - loop { - match swarm.next_event().await { - SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { - result.unwrap(); - }, - _ => {} + pool.spawner() + .spawn_obj({ + let (mut swarm, _) = swarms.remove(0); + async move { + loop { + match swarm.next_event().await { + SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { + result.unwrap(); + }, + _ => {}, + } } } - }.boxed().into() - }).unwrap(); + .boxed() + .into() + }) + .unwrap(); // Remove and run the remaining swarm. let (mut swarm, _) = swarms.remove(0); @@ -1021,14 +1042,12 @@ mod tests { ); assert!(response_receiver.is_none()); response_receiver = Some(receiver); - } - SwarmEvent::Behaviour(Event::RequestFinished { - result, .. - }) => { + }, + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { result.unwrap(); - break; - } - _ => {} + break + }, + _ => {}, } } @@ -1046,21 +1065,27 @@ mod tests { .map(|_| { let (tx, mut rx) = mpsc::channel::(64); - pool.spawner().spawn_obj(async move { - while let Some(rq) = rx.next().await { - assert_eq!(rq.payload, b"this is a request"); - let _ = rq.pending_response.send(super::OutgoingResponse { - result: Ok(b"this response exceeds the limit".to_vec()), - reputation_changes: Vec::new(), - sent_feedback: None, - }); - } - }.boxed().into()).unwrap(); + pool.spawner() + .spawn_obj( + async move { + while let Some(rq) = rx.next().await { + assert_eq!(rq.payload, b"this is a request"); + let _ = rq.pending_response.send(super::OutgoingResponse { + result: Ok(b"this response exceeds the limit".to_vec()), + reputation_changes: Vec::new(), + sent_feedback: None, + }); + } + } + .boxed() + .into(), + ) + .unwrap(); let protocol_config = ProtocolConfig { name: From::from(protocol_name), max_request_size: 1024, - max_response_size: 8, // <-- important for the test + max_response_size: 8, // <-- important for the test request_timeout: Duration::from_secs(30), inbound_queue: Some(tx), }; @@ -1078,20 +1103,24 @@ mod tests { // Running `swarm[0]` in the background until a `InboundRequest` event happens, // which is a hint about the test having ended. - pool.spawner().spawn_obj({ - let (mut swarm, _) = swarms.remove(0); - async move { - loop { - match swarm.next_event().await { - SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { - assert!(result.is_ok()); - break - }, - _ => {} + pool.spawner() + .spawn_obj({ + let (mut swarm, _) = swarms.remove(0); + async move { + loop { + match swarm.next_event().await { + SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { + assert!(result.is_ok()); + break + }, + _ => {}, + } } } - }.boxed().into() - }).unwrap(); + .boxed() + .into() + }) + .unwrap(); // Remove and run the remaining swarm. let (mut swarm, _) = swarms.remove(0); @@ -1111,20 +1140,18 @@ mod tests { ); assert!(response_receiver.is_none()); response_receiver = Some(receiver); - } - SwarmEvent::Behaviour(Event::RequestFinished { - result, .. - }) => { + }, + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { assert!(result.is_err()); - break; - } - _ => {} + break + }, + _ => {}, } } match response_receiver.unwrap().await.unwrap().unwrap_err() { RequestFailure::Network(OutboundFailure::ConnectionClosed) => {}, - _ => panic!() + _ => panic!(), } }); } @@ -1197,89 +1224,97 @@ mod tests { swarm_1.dial_addr(listen_add_2).unwrap(); // Run swarm 2 in the background, receiving two requests. - pool.spawner().spawn_obj( - async move { - loop { - match swarm_2.next_event().await { - SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { - result.unwrap(); - }, - _ => {} + pool.spawner() + .spawn_obj( + async move { + loop { + match swarm_2.next_event().await { + SwarmEvent::Behaviour(Event::InboundRequest { result, .. }) => { + result.unwrap(); + }, + _ => {}, + } } } - }.boxed().into() - ).unwrap(); + .boxed() + .into(), + ) + .unwrap(); // Handle both requests sent by swarm 1 to swarm 2 in the background. // // Make sure both requests overlap, by answering the first only after receiving the // second. - pool.spawner().spawn_obj(async move { - let protocol_1_request = swarm_2_handler_1.next().await; - let protocol_2_request = swarm_2_handler_2.next().await; - - protocol_1_request.unwrap() - .pending_response - .send(OutgoingResponse { - result: Ok(b"this is a response".to_vec()), - reputation_changes: Vec::new(), - sent_feedback: None, - }) - .unwrap(); - protocol_2_request.unwrap() - .pending_response - .send(OutgoingResponse { - result: Ok(b"this is a response".to_vec()), - reputation_changes: Vec::new(), - sent_feedback: None, - }) - .unwrap(); - }.boxed().into()).unwrap(); + pool.spawner() + .spawn_obj( + async move { + let protocol_1_request = swarm_2_handler_1.next().await; + let protocol_2_request = swarm_2_handler_2.next().await; + + protocol_1_request + .unwrap() + .pending_response + .send(OutgoingResponse { + result: Ok(b"this is a response".to_vec()), + reputation_changes: Vec::new(), + sent_feedback: None, + }) + .unwrap(); + protocol_2_request + .unwrap() + .pending_response + .send(OutgoingResponse { + result: Ok(b"this is a response".to_vec()), + reputation_changes: Vec::new(), + sent_feedback: None, + }) + .unwrap(); + } + .boxed() + .into(), + ) + .unwrap(); // Have swarm 1 send two requests to swarm 2 and await responses. - pool.run_until( - async move { - let mut response_receivers = None; - let mut num_responses = 0; - - loop { - match swarm_1.next_event().await { - SwarmEvent::ConnectionEstablished { peer_id, .. } => { - let (sender_1, receiver_1) = oneshot::channel(); - let (sender_2, receiver_2) = oneshot::channel(); - swarm_1.behaviour_mut().send_request( - &peer_id, - protocol_name_1, - b"this is a request".to_vec(), - sender_1, - IfDisconnected::ImmediateError, - ); - swarm_1.behaviour_mut().send_request( - &peer_id, - protocol_name_2, - b"this is a request".to_vec(), - sender_2, - IfDisconnected::ImmediateError, - ); - assert!(response_receivers.is_none()); - response_receivers = Some((receiver_1, receiver_2)); - } - SwarmEvent::Behaviour(Event::RequestFinished { - result, .. - }) => { - num_responses += 1; - result.unwrap(); - if num_responses == 2 { - break; - } + pool.run_until(async move { + let mut response_receivers = None; + let mut num_responses = 0; + + loop { + match swarm_1.next_event().await { + SwarmEvent::ConnectionEstablished { peer_id, .. } => { + let (sender_1, receiver_1) = oneshot::channel(); + let (sender_2, receiver_2) = oneshot::channel(); + swarm_1.behaviour_mut().send_request( + &peer_id, + protocol_name_1, + b"this is a request".to_vec(), + sender_1, + IfDisconnected::ImmediateError, + ); + swarm_1.behaviour_mut().send_request( + &peer_id, + protocol_name_2, + b"this is a request".to_vec(), + sender_2, + IfDisconnected::ImmediateError, + ); + assert!(response_receivers.is_none()); + response_receivers = Some((receiver_1, receiver_2)); + }, + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { + num_responses += 1; + result.unwrap(); + if num_responses == 2 { + break } - _ => {} - } + }, + _ => {}, } - let (response_receiver_1, response_receiver_2) = response_receivers.unwrap(); - assert_eq!(response_receiver_1.await.unwrap().unwrap(), b"this is a response"); - assert_eq!(response_receiver_2.await.unwrap().unwrap(), b"this is a response"); } - ); + let (response_receiver_1, response_receiver_2) = response_receivers.unwrap(); + assert_eq!(response_receiver_1.await.unwrap().unwrap(), b"this is a response"); + assert_eq!(response_receiver_2.await.unwrap().unwrap(), b"this is a response"); + }); } } diff --git a/client/network/src/service.rs b/client/network/src/service.rs index fb303312093cd..b119a446021f5 100644 --- a/client/network/src/service.rs +++ b/client/network/src/service.rs @@ -28,59 +28,45 @@ //! which is then processed by [`NetworkWorker::poll`]. use crate::{ - ExHashT, NetworkStateInfo, NetworkStatus, behaviour::{self, Behaviour, BehaviourOut}, + bitswap::Bitswap, config::{parse_str_addr, Params, TransportConfig}, - DhtEvent, discovery::DiscoveryConfig, error::Error, + light_client_requests, network_state::{ NetworkState, NotConnectedPeer as NetworkStateNotConnectedPeer, Peer as NetworkStatePeer, }, on_demand_layer::AlwaysBadChecker, - light_client_requests, protocol::{ self, - message::generic::Roles, - NotifsHandlerError, - NotificationsSink, - PeerInfo, - Protocol, - Ready, event::Event, - sync::{SyncState, Status as SyncStatus}, + message::generic::Roles, + sync::{Status as SyncStatus, SyncState}, + NotificationsSink, NotifsHandlerError, PeerInfo, Protocol, Ready, }, - transactions, - transport, ReputationChange, - - bitswap::Bitswap, + transactions, transport, DhtEvent, ExHashT, NetworkStateInfo, NetworkStatus, ReputationChange, }; use codec::Encode as _; use futures::{channel::oneshot, prelude::*}; -use libp2p::{PeerId, multiaddr, Multiaddr}; -use libp2p::core::{ - ConnectedPoint, - Executor, - connection::{ - ConnectionLimits, - ConnectionError, - PendingConnectionError +use libp2p::{ + core::{ + connection::{ConnectionError, ConnectionLimits, PendingConnectionError}, + either::EitherError, + upgrade, ConnectedPoint, Executor, }, - either::EitherError, - upgrade -}; -use libp2p::kad::record; -use libp2p::ping::handler::PingFailure; -use libp2p::swarm::{ - AddressScore, - NetworkBehaviour, - SwarmBuilder, - SwarmEvent, - protocols_handler::NodeHandlerWrapperError + kad::record, + multiaddr, + ping::handler::PingFailure, + swarm::{ + protocols_handler::NodeHandlerWrapperError, AddressScore, NetworkBehaviour, SwarmBuilder, + SwarmEvent, + }, + Multiaddr, PeerId, }; -use log::{error, info, trace, debug, warn}; -use metrics::{Metrics, MetricSources, Histogram, HistogramVec}; +use log::{debug, error, info, trace, warn}; +use metrics::{Histogram, HistogramVec, MetricSources, Metrics}; use parking_lot::Mutex; use sc_peerset::PeersetHandle; use sp_consensus::import_queue::{BlockImportError, BlockImportResult, ImportQueue, Link}; @@ -91,10 +77,9 @@ use std::{ cmp, collections::{HashMap, HashSet}, convert::TryFrom as _, - fs, - iter, + fs, iter, marker::PhantomData, - num:: NonZeroUsize, + num::NonZeroUsize, pin::Pin, str, sync::{ @@ -104,7 +89,9 @@ use std::{ task::Poll, }; -pub use behaviour::{ResponseFailure, InboundFailure, RequestFailure, OutboundFailure, IfDisconnected}; +pub use behaviour::{ + IfDisconnected, InboundFailure, OutboundFailure, RequestFailure, ResponseFailure, +}; mod metrics; mod out_events; @@ -156,7 +143,12 @@ impl NetworkWorker { ¶ms.network_config.transport, )?; ensure_addresses_consistent_with_transport( - params.network_config.default_peers_set.reserved_nodes.iter().map(|x| &x.multiaddr), + params + .network_config + .default_peers_set + .reserved_nodes + .iter() + .map(|x| &x.multiaddr), ¶ms.network_config.transport, )?; for extra_set in ¶ms.network_config.extra_sets { @@ -176,10 +168,12 @@ impl NetworkWorker { fs::create_dir_all(path)?; } - let transactions_handler_proto = transactions::TransactionsHandlerPrototype::new( - params.protocol_id.clone() - ); - params.network_config.extra_sets.insert(0, transactions_handler_proto.set_config()); + let transactions_handler_proto = + transactions::TransactionsHandlerPrototype::new(params.protocol_id.clone()); + params + .network_config + .extra_sets + .insert(0, transactions_handler_proto.set_config()); // Private and public keys configuration. let local_identity = params.network_config.node_key.clone().into_keypair()?; @@ -201,8 +195,12 @@ impl NetworkWorker { params.chain.clone(), params.protocol_id.clone(), ¶ms.network_config, - iter::once(Vec::new()).chain((0..params.network_config.extra_sets.len() - 1) - .map(|_| default_notif_handshake_message.clone())).collect(), + iter::once(Vec::new()) + .chain( + (0..params.network_config.extra_sets.len() - 1) + .map(|_| default_notif_handshake_message.clone()), + ) + .collect(), params.block_announce_validator, params.metrics_registry.as_ref(), )?; @@ -221,23 +219,21 @@ impl NetworkWorker { let boot_node_ids = Arc::new(boot_node_ids); // Check for duplicate bootnodes. - known_addresses.iter() - .try_for_each(|(peer_id, addr)| - if let Some(other) = known_addresses - .iter() - .find(|o| o.1 == *addr && o.0 != *peer_id) - { - Err(Error::DuplicateBootnode { - address: addr.clone(), - first_id: peer_id.clone(), - second_id: other.0.clone(), - }) - } else { - Ok(()) - } - )?; + known_addresses.iter().try_for_each(|(peer_id, addr)| { + if let Some(other) = known_addresses.iter().find(|o| o.1 == *addr && o.0 != *peer_id) { + Err(Error::DuplicateBootnode { + address: addr.clone(), + first_id: peer_id.clone(), + second_id: other.0.clone(), + }) + } else { + Ok(()) + } + })?; - let checker = params.on_demand.as_ref() + let checker = params + .on_demand + .as_ref() .map(|od| od.checker().clone()) .unwrap_or_else(|| Arc::new(AlwaysBadChecker)); @@ -249,8 +245,7 @@ impl NetworkWorker { let (mut swarm, bandwidth): (Swarm, _) = { let user_agent = format!( "{} ({})", - params.network_config.client_version, - params.network_config.node_name + params.network_config.client_version, params.network_config.node_name ); let light_client_request_sender = { @@ -264,21 +259,25 @@ impl NetworkWorker { let discovery_config = { let mut config = DiscoveryConfig::new(local_public.clone()); config.with_user_defined(known_addresses); - config.discovery_limit(u64::from(params.network_config.default_peers_set.out_peers) + 15); + config.discovery_limit( + u64::from(params.network_config.default_peers_set.out_peers) + 15, + ); config.add_protocol(params.protocol_id.clone()); config.with_dht_random_walk(params.network_config.enable_dht_random_walk); config.allow_non_globals_in_dht(params.network_config.allow_non_globals_in_dht); - config.use_kademlia_disjoint_query_paths(params.network_config.kademlia_disjoint_query_paths); + config.use_kademlia_disjoint_query_paths( + params.network_config.kademlia_disjoint_query_paths, + ); match params.network_config.transport { TransportConfig::MemoryOnly => { config.with_mdns(false); config.allow_private_ipv4(false); - } + }, TransportConfig::Normal { enable_mdns, allow_private_ipv4, .. } => { config.with_mdns(enable_mdns); config.allow_private_ipv4(allow_private_ipv4); - } + }, } config @@ -288,7 +287,7 @@ impl NetworkWorker { let (config_mem, config_wasm) = match params.network_config.transport { TransportConfig::MemoryOnly => (true, None), TransportConfig::Normal { wasm_external_transport, .. } => - (false, wasm_external_transport) + (false, wasm_external_transport), }; // The yamux buffer size limit is configured to be equal to the maximum frame size @@ -298,27 +297,33 @@ impl NetworkWorker { // a variable-length-encoding 64bits number. In other words, we make the // assumption that no notification larger than 2^64 will ever be sent. let yamux_maximum_buffer_size = { - let requests_max = params.network_config - .request_response_protocols.iter() + let requests_max = params + .network_config + .request_response_protocols + .iter() .map(|cfg| usize::try_from(cfg.max_request_size).unwrap_or(usize::MAX)); - let responses_max = params.network_config - .request_response_protocols.iter() - .map(|cfg| usize::try_from(cfg.max_response_size).unwrap_or(usize::MAX)); - let notifs_max = params.network_config - .extra_sets.iter() - .map(|cfg| usize::try_from(cfg.max_notification_size).unwrap_or(usize::MAX)); + let responses_max = + params.network_config.request_response_protocols.iter().map(|cfg| { + usize::try_from(cfg.max_response_size).unwrap_or(usize::MAX) + }); + let notifs_max = params.network_config.extra_sets.iter().map(|cfg| { + usize::try_from(cfg.max_notification_size).unwrap_or(usize::MAX) + }); // A "default" max is added to cover all the other protocols: ping, identify, // kademlia, block announces, and transactions. let default_max = cmp::max( 1024 * 1024, usize::try_from(protocol::BLOCK_ANNOUNCES_TRANSACTIONS_SUBSTREAM_SIZE) - .unwrap_or(usize::MAX) + .unwrap_or(usize::MAX), ); iter::once(default_max) - .chain(requests_max).chain(responses_max).chain(notifs_max) - .max().expect("iterator known to always yield at least one element; qed") + .chain(requests_max) + .chain(responses_max) + .chain(notifs_max) + .max() + .expect("iterator known to always yield at least one element; qed") .saturating_add(10) }; @@ -327,7 +332,7 @@ impl NetworkWorker { config_mem, config_wasm, params.network_config.yamux_window_size, - yamux_maximum_buffer_size + yamux_maximum_buffer_size, ) }; @@ -348,18 +353,18 @@ impl NetworkWorker { match result { Ok(b) => b, - Err(crate::request_responses::RegisterError::DuplicateProtocol(proto)) => { - return Err(Error::DuplicateRequestResponseProtocol { - protocol: proto, - }) - }, + Err(crate::request_responses::RegisterError::DuplicateProtocol(proto)) => + return Err(Error::DuplicateRequestResponseProtocol { protocol: proto }), } }; let mut builder = SwarmBuilder::new(transport, behaviour, local_peer_id.clone()) - .connection_limits(ConnectionLimits::default() - .with_max_established_per_peer(Some(crate::MAX_CONNECTIONS_PER_PEER as u32)) - .with_max_established_incoming(Some(crate::MAX_CONNECTIONS_ESTABLISHED_INCOMING)) + .connection_limits( + ConnectionLimits::default() + .with_max_established_per_peer(Some(crate::MAX_CONNECTIONS_PER_PEER as u32)) + .with_max_established_incoming(Some( + crate::MAX_CONNECTIONS_ESTABLISHED_INCOMING, + )), ) .substream_upgrade_protocol_override(upgrade::Version::V1Lazy) .notify_handler_buffer_size(NonZeroUsize::new(32).expect("32 != 0; qed")) @@ -378,14 +383,15 @@ impl NetworkWorker { // Initialize the metrics. let metrics = match ¶ms.metrics_registry { - Some(registry) => { - Some(metrics::register(registry, MetricSources { + Some(registry) => Some(metrics::register( + registry, + MetricSources { bandwidth: bandwidth.clone(), major_syncing: is_major_syncing.clone(), connected_peers: num_connected.clone(), - })?) - } - None => None + }, + )?), + None => None, }; // Listen on multiaddresses. @@ -412,8 +418,9 @@ impl NetworkWorker { local_peer_id, to_worker, peers_notifications_sinks: peers_notifications_sinks.clone(), - notifications_sizes_metric: - metrics.as_ref().map(|metrics| metrics.notifications_sizes.clone()), + notifications_sizes_metric: metrics + .as_ref() + .map(|metrics| metrics.notifications_sizes.clone()), _marker: PhantomData, }); @@ -421,7 +428,7 @@ impl NetworkWorker { service.clone(), params.role, params.transaction_pool, - params.metrics_registry.as_ref() + params.metrics_registry.as_ref(), )?; (params.transactions_handler_executor)(tx_handler.run().boxed()); @@ -520,12 +527,18 @@ impl NetworkWorker { /// You must call this when a new block is finalized by the client. pub fn on_block_finalized(&mut self, hash: B::Hash, header: B::Header) { - self.network_service.behaviour_mut().user_protocol_mut().on_block_finalized(hash, &header); + self.network_service + .behaviour_mut() + .user_protocol_mut() + .on_block_finalized(hash, &header); } /// Inform the network service about new best imported block. pub fn new_best_block_imported(&mut self, hash: B::Hash, number: NumberFor) { - self.network_service.behaviour_mut().user_protocol_mut().new_best_block_imported(hash, number); + self.network_service + .behaviour_mut() + .user_protocol_mut() + .new_best_block_imported(hash, number); } /// Returns the local `PeerId`. @@ -550,50 +563,76 @@ impl NetworkWorker { let connected_peers = { let swarm = &mut *swarm; - open.iter().filter_map(move |peer_id| { - let known_addresses = NetworkBehaviour::addresses_of_peer(swarm.behaviour_mut(), peer_id) - .into_iter().collect(); - - let endpoint = if let Some(e) = swarm.behaviour_mut().node(peer_id).map(|i| i.endpoint()).flatten() { - e.clone().into() - } else { - error!(target: "sub-libp2p", "Found state inconsistency between custom protocol \ + open.iter() + .filter_map(move |peer_id| { + let known_addresses = + NetworkBehaviour::addresses_of_peer(swarm.behaviour_mut(), peer_id) + .into_iter() + .collect(); + + let endpoint = if let Some(e) = + swarm.behaviour_mut().node(peer_id).map(|i| i.endpoint()).flatten() + { + e.clone().into() + } else { + error!(target: "sub-libp2p", "Found state inconsistency between custom protocol \ and debug information about {:?}", peer_id); - return None - }; - - Some((peer_id.to_base58(), NetworkStatePeer { - endpoint, - version_string: swarm.behaviour_mut().node(peer_id) - .and_then(|i| i.client_version().map(|s| s.to_owned())), - latest_ping_time: swarm.behaviour_mut().node(peer_id).and_then(|i| i.latest_ping()), - known_addresses, - })) - }).collect() + return None + }; + + Some(( + peer_id.to_base58(), + NetworkStatePeer { + endpoint, + version_string: swarm + .behaviour_mut() + .node(peer_id) + .and_then(|i| i.client_version().map(|s| s.to_owned())), + latest_ping_time: swarm + .behaviour_mut() + .node(peer_id) + .and_then(|i| i.latest_ping()), + known_addresses, + }, + )) + }) + .collect() }; let not_connected_peers = { let swarm = &mut *swarm; - swarm.behaviour_mut().known_peers().into_iter() + swarm + .behaviour_mut() + .known_peers() + .into_iter() .filter(|p| open.iter().all(|n| n != p)) .map(move |peer_id| { - (peer_id.to_base58(), NetworkStateNotConnectedPeer { - version_string: swarm.behaviour_mut().node(&peer_id) - .and_then(|i| i.client_version().map(|s| s.to_owned())), - latest_ping_time: swarm.behaviour_mut().node(&peer_id).and_then(|i| i.latest_ping()), - known_addresses: NetworkBehaviour::addresses_of_peer(swarm.behaviour_mut(), &peer_id) - .into_iter().collect(), - }) + ( + peer_id.to_base58(), + NetworkStateNotConnectedPeer { + version_string: swarm + .behaviour_mut() + .node(&peer_id) + .and_then(|i| i.client_version().map(|s| s.to_owned())), + latest_ping_time: swarm + .behaviour_mut() + .node(&peer_id) + .and_then(|i| i.latest_ping()), + known_addresses: NetworkBehaviour::addresses_of_peer( + swarm.behaviour_mut(), + &peer_id, + ) + .into_iter() + .collect(), + }, + ) }) .collect() }; let peer_id = Swarm::::local_peer_id(&swarm).to_base58(); let listened_addresses = swarm.listeners().cloned().collect(); - let external_addresses = swarm.external_addresses() - .map(|r| &r.addr) - .cloned() - .collect(); + let external_addresses = swarm.external_addresses().map(|r| &r.addr).cloned().collect(); NetworkState { peer_id, @@ -607,7 +646,9 @@ impl NetworkWorker { /// Get currently connected peers. pub fn peers_debug_info(&mut self) -> Vec<(PeerId, PeerInfo)> { - self.network_service.behaviour_mut().user_protocol_mut() + self.network_service + .behaviour_mut() + .user_protocol_mut() .peers_info() .map(|(id, info)| (id.clone(), info.clone())) .collect() @@ -641,9 +682,7 @@ impl NetworkService { /// Need a better solution to manage authorized peers, but now just use reserved peers for /// prototyping. pub fn set_authorized_peers(&self, peers: HashSet) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::SetReserved(peers)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::SetReserved(peers)); } /// Set authorized_only flag. @@ -683,7 +722,12 @@ impl NetworkService { /// The protocol must have been registered with /// [`NetworkConfiguration::notifications_protocols`](crate::config::NetworkConfiguration::notifications_protocols). /// - pub fn write_notification(&self, target: PeerId, protocol: Cow<'static, str>, message: Vec) { + pub fn write_notification( + &self, + target: PeerId, + protocol: Cow<'static, str>, + message: Vec, + ) { // We clone the `NotificationsSink` in order to be able to unlock the network-wide // `peers_notifications_sinks` mutex as soon as possible. let sink = { @@ -697,7 +741,7 @@ impl NetworkService { "Attempted to send notification on missing or closed substream: {}, {:?}", target, protocol, ); - return; + return } }; @@ -798,19 +842,16 @@ impl NetworkService { if let Some(sink) = peers_notifications_sinks.get(&(target, protocol.clone())) { sink.clone() } else { - return Err(NotificationSenderError::Closed); + return Err(NotificationSenderError::Closed) } }; - let notification_size_metric = self.notifications_sizes_metric.as_ref().map(|histogram| { - histogram.with_label_values(&["out", &protocol]) - }); + let notification_size_metric = self + .notifications_sizes_metric + .as_ref() + .map(|histogram| histogram.with_label_values(&["out", &protocol])); - Ok(NotificationSender { - sink, - protocol_name: protocol, - notification_size_metric, - }) + Ok(NotificationSender { sink, protocol_name: protocol, notification_size_metric }) } /// Returns a stream containing the events that happen on the network. @@ -898,9 +939,9 @@ impl NetworkService { pub async fn status(&self) -> Result, ()> { let (tx, rx) = oneshot::channel(); - let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::NetworkStatus { - pending_response: tx, - }); + let _ = self + .to_worker + .unbounded_send(ServiceToWorkerMsg::NetworkStatus { pending_response: tx }); match rx.await { Ok(v) => v.map_err(|_| ()), @@ -918,9 +959,9 @@ impl NetworkService { pub async fn network_state(&self) -> Result { let (tx, rx) = oneshot::channel(); - let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::NetworkState { - pending_response: tx, - }); + let _ = self + .to_worker + .unbounded_send(ServiceToWorkerMsg::NetworkState { pending_response: tx }); match rx.await { Ok(v) => v.map_err(|_| ()), @@ -967,7 +1008,9 @@ impl NetworkService { /// prevents the local node from re-establishing an outgoing substream to this peer until it /// is added again. pub fn disconnect_peer(&self, who: PeerId, protocol: impl Into>) { - let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::DisconnectPeer(who, protocol.into())); + let _ = self + .to_worker + .unbounded_send(ServiceToWorkerMsg::DisconnectPeer(who, protocol.into())); } /// Request a justification for the given block from the network. @@ -982,9 +1025,7 @@ impl NetworkService { /// Clear all pending justification requests. pub fn clear_justification_requests(&self) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::ClearJustificationRequests); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::ClearJustificationRequests); } /// Are we in the process of downloading the chain? @@ -997,9 +1038,7 @@ impl NetworkService { /// This will generate either a `ValueFound` or a `ValueNotFound` event and pass it as an /// item on the [`NetworkWorker`] stream. pub fn get_value(&self, key: &record::Key) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::GetValue(key.clone())); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::GetValue(key.clone())); } /// Start putting a value in the DHT. @@ -1007,24 +1046,18 @@ impl NetworkService { /// This will generate either a `ValuePut` or a `ValuePutFailed` event and pass it as an /// item on the [`NetworkWorker`] stream. pub fn put_value(&self, key: record::Key, value: Vec) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::PutValue(key, value)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::PutValue(key, value)); } /// Connect to unreserved peers and allow unreserved peers to connect for syncing purposes. pub fn accept_unreserved_peers(&self) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::SetReservedOnly(false)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::SetReservedOnly(false)); } /// Disconnect from unreserved peers and deny new unreserved peers to connect for syncing /// purposes. pub fn deny_unreserved_peers(&self) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::SetReservedOnly(true)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::SetReservedOnly(true)); } /// Adds a `PeerId` and its address as reserved. The string should encode the address @@ -1042,17 +1075,13 @@ impl NetworkService { let _ = self .to_worker .unbounded_send(ServiceToWorkerMsg::AddKnownAddress(peer_id.clone(), addr)); - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::AddReserved(peer_id)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::AddReserved(peer_id)); Ok(()) } /// Removes a `PeerId` from the list of reserved peers. pub fn remove_reserved_peer(&self, peer_id: PeerId) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::RemoveReserved(peer_id)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::RemoveReserved(peer_id)); } /// Add peers to a peer set. @@ -1062,7 +1091,11 @@ impl NetworkService { /// /// Returns an `Err` if one of the given addresses is invalid or contains an /// invalid peer ID (which includes the local peer ID). - pub fn add_peers_to_reserved_set(&self, protocol: Cow<'static, str>, peers: HashSet) -> Result<(), String> { + pub fn add_peers_to_reserved_set( + &self, + protocol: Cow<'static, str>, + peers: HashSet, + ) -> Result<(), String> { let peers = self.split_multiaddr_and_peer_id(peers)?; for (peer_id, addr) in peers.into_iter() { @@ -1095,7 +1128,7 @@ impl NetworkService { pub fn remove_peers_from_reserved_set( &self, protocol: Cow<'static, str>, - peers: HashSet + peers: HashSet, ) -> Result<(), String> { let peers = self.split_multiaddr_and_peer_id(peers)?; for (peer_id, _) in peers.into_iter() { @@ -1113,9 +1146,7 @@ impl NetworkService { /// a stale fork missing. /// Passing empty `peers` set effectively removes the sync request. pub fn set_sync_fork_request(&self, peers: Vec, hash: B::Hash, number: NumberFor) { - let _ = self - .to_worker - .unbounded_send(ServiceToWorkerMsg::SyncFork(peers, hash, number)); + let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::SyncFork(peers, hash, number)); } /// Add a peer to a set of peers. @@ -1127,7 +1158,11 @@ impl NetworkService { /// /// Returns an `Err` if one of the given addresses is invalid or contains an /// invalid peer ID (which includes the local peer ID). - pub fn add_to_peers_set(&self, protocol: Cow<'static, str>, peers: HashSet) -> Result<(), String> { + pub fn add_to_peers_set( + &self, + protocol: Cow<'static, str>, + peers: HashSet, + ) -> Result<(), String> { let peers = self.split_multiaddr_and_peer_id(peers)?; for (peer_id, addr) in peers.into_iter() { @@ -1159,7 +1194,11 @@ impl NetworkService { /// invalid peer ID (which includes the local peer ID). // // NOTE: technically, this function only needs `Vec`, but we use `Multiaddr` here for convenience. - pub fn remove_from_peers_set(&self, protocol: Cow<'static, str>, peers: HashSet) -> Result<(), String> { + pub fn remove_from_peers_set( + &self, + protocol: Cow<'static, str>, + peers: HashSet, + ) -> Result<(), String> { let peers = self.split_multiaddr_and_peer_id(peers)?; for (peer_id, _) in peers.into_iter() { let _ = self @@ -1185,8 +1224,12 @@ impl NetworkService { /// /// Returns an `Err` if one of the given addresses is invalid or contains an /// invalid peer ID (which includes the local peer ID). - fn split_multiaddr_and_peer_id(&self, peers: HashSet) -> Result, String> { - peers.into_iter() + fn split_multiaddr_and_peer_id( + &self, + peers: HashSet, + ) -> Result, String> { + peers + .into_iter() .map(|mut addr| { let peer = match addr.pop() { Some(multiaddr::Protocol::P2p(key)) => PeerId::from_multihash(key) @@ -1206,9 +1249,7 @@ impl NetworkService { } } -impl sp_consensus::SyncOracle - for NetworkService -{ +impl sp_consensus::SyncOracle for NetworkService { fn is_major_syncing(&mut self) -> bool { NetworkService::is_major_syncing(self) } @@ -1218,9 +1259,7 @@ impl sp_consensus::SyncOracle } } -impl<'a, B: BlockT + 'static, H: ExHashT> sp_consensus::SyncOracle - for &'a NetworkService -{ +impl<'a, B: BlockT + 'static, H: ExHashT> sp_consensus::SyncOracle for &'a NetworkService { fn is_major_syncing(&mut self) -> bool { NetworkService::is_major_syncing(self) } @@ -1241,9 +1280,9 @@ impl sp_consensus::JustificationSyncLink for NetworkSe } impl NetworkStateInfo for NetworkService - where - B: sp_runtime::traits::Block, - H: ExHashT, +where + B: sp_runtime::traits::Block, + H: ExHashT, { /// Returns the local external addresses. fn external_addresses(&self) -> Vec { @@ -1271,7 +1310,9 @@ pub struct NotificationSender { impl NotificationSender { /// Returns a future that resolves when the `NotificationSender` is ready to send a notification. - pub async fn ready<'a>(&'a self) -> Result, NotificationSenderError> { + pub async fn ready<'a>( + &'a self, + ) -> Result, NotificationSenderError> { Ok(NotificationSenderReady { ready: match self.sink.reserve_notification().await { Ok(r) => r, @@ -1318,9 +1359,7 @@ impl<'a> NotificationSenderReady<'a> { ); trace!(target: "sub-libp2p", "Handler({:?}) <= Async notification", self.peer_id); - self.ready - .send(notification) - .map_err(|()| NotificationSenderError::Closed) + self.ready.send(notification).map_err(|()| NotificationSenderError::Closed) } } @@ -1417,9 +1456,8 @@ impl Future for NetworkWorker { let this = &mut *self; // Poll the import queue for actions to perform. - this.import_queue.poll_actions(cx, &mut NetworkLink { - protocol: &mut this.network_service, - }); + this.import_queue + .poll_actions(cx, &mut NetworkLink { protocol: &mut this.network_service }); // Check for new incoming light client requests. if let Some(light_client_rqs) = this.light_client_rqs.as_mut() { @@ -1428,8 +1466,10 @@ impl Future for NetworkWorker { match result { Ok(()) => {}, Err(light_client_requests::sender::SendRequestError::TooManyRequests) => { - log::warn!("Couldn't start light client request: too many pending requests"); - } + log::warn!( + "Couldn't start light client request: too many pending requests" + ); + }, } if let Some(metrics) = this.metrics.as_ref() { @@ -1451,7 +1491,7 @@ impl Future for NetworkWorker { num_iterations += 1; if num_iterations >= 100 { cx.waker().wake_by_ref(); - break; + break } // Process the next message coming from the `NetworkService`. @@ -1462,12 +1502,21 @@ impl Future for NetworkWorker { }; match msg { - ServiceToWorkerMsg::AnnounceBlock(hash, data) => - this.network_service.behaviour_mut().user_protocol_mut().announce_block(hash, data), - ServiceToWorkerMsg::RequestJustification(hash, number) => - this.network_service.behaviour_mut().user_protocol_mut().request_justification(&hash, number), - ServiceToWorkerMsg::ClearJustificationRequests => - this.network_service.behaviour_mut().user_protocol_mut().clear_justification_requests(), + ServiceToWorkerMsg::AnnounceBlock(hash, data) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .announce_block(hash, data), + ServiceToWorkerMsg::RequestJustification(hash, number) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .request_justification(&hash, number), + ServiceToWorkerMsg::ClearJustificationRequests => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .clear_justification_requests(), ServiceToWorkerMsg::PropagateTransaction(hash) => this.tx_handler_controller.propagate_transaction(hash), ServiceToWorkerMsg::PropagateTransactions => @@ -1476,30 +1525,68 @@ impl Future for NetworkWorker { this.network_service.behaviour_mut().get_value(&key), ServiceToWorkerMsg::PutValue(key, value) => this.network_service.behaviour_mut().put_value(key, value), - ServiceToWorkerMsg::SetReservedOnly(reserved_only) => - this.network_service.behaviour_mut().user_protocol_mut().set_reserved_only(reserved_only), - ServiceToWorkerMsg::SetReserved(peers) => - this.network_service.behaviour_mut().user_protocol_mut().set_reserved_peers(peers), - ServiceToWorkerMsg::AddReserved(peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().add_reserved_peer(peer_id), - ServiceToWorkerMsg::RemoveReserved(peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().remove_reserved_peer(peer_id), - ServiceToWorkerMsg::AddSetReserved(protocol, peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().add_set_reserved_peer(protocol, peer_id), - ServiceToWorkerMsg::RemoveSetReserved(protocol, peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().remove_set_reserved_peer(protocol, peer_id), + ServiceToWorkerMsg::SetReservedOnly(reserved_only) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .set_reserved_only(reserved_only), + ServiceToWorkerMsg::SetReserved(peers) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .set_reserved_peers(peers), + ServiceToWorkerMsg::AddReserved(peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .add_reserved_peer(peer_id), + ServiceToWorkerMsg::RemoveReserved(peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .remove_reserved_peer(peer_id), + ServiceToWorkerMsg::AddSetReserved(protocol, peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .add_set_reserved_peer(protocol, peer_id), + ServiceToWorkerMsg::RemoveSetReserved(protocol, peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .remove_set_reserved_peer(protocol, peer_id), ServiceToWorkerMsg::AddKnownAddress(peer_id, addr) => this.network_service.behaviour_mut().add_known_address(peer_id, addr), - ServiceToWorkerMsg::AddToPeersSet(protocol, peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().add_to_peers_set(protocol, peer_id), - ServiceToWorkerMsg::RemoveFromPeersSet(protocol, peer_id) => - this.network_service.behaviour_mut().user_protocol_mut().remove_from_peers_set(protocol, peer_id), - ServiceToWorkerMsg::SyncFork(peer_ids, hash, number) => - this.network_service.behaviour_mut().user_protocol_mut().set_sync_fork_request(peer_ids, &hash, number), - ServiceToWorkerMsg::EventStream(sender) => - this.event_streams.push(sender), - ServiceToWorkerMsg::Request { target, protocol, request, pending_response, connect } => { - this.network_service.behaviour_mut().send_request(&target, &protocol, request, pending_response, connect); + ServiceToWorkerMsg::AddToPeersSet(protocol, peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .add_to_peers_set(protocol, peer_id), + ServiceToWorkerMsg::RemoveFromPeersSet(protocol, peer_id) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .remove_from_peers_set(protocol, peer_id), + ServiceToWorkerMsg::SyncFork(peer_ids, hash, number) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .set_sync_fork_request(peer_ids, &hash, number), + ServiceToWorkerMsg::EventStream(sender) => this.event_streams.push(sender), + ServiceToWorkerMsg::Request { + target, + protocol, + request, + pending_response, + connect, + } => { + this.network_service.behaviour_mut().send_request( + &target, + &protocol, + request, + pending_response, + connect, + ); }, ServiceToWorkerMsg::NetworkStatus { pending_response } => { let _ = pending_response.send(Ok(this.status())); @@ -1507,10 +1594,16 @@ impl Future for NetworkWorker { ServiceToWorkerMsg::NetworkState { pending_response } => { let _ = pending_response.send(Ok(this.network_state())); }, - ServiceToWorkerMsg::DisconnectPeer(who, protocol_name) => - this.network_service.behaviour_mut().user_protocol_mut().disconnect_peer(&who, &protocol_name), - ServiceToWorkerMsg::NewBestBlockImported(hash, number) => - this.network_service.behaviour_mut().user_protocol_mut().new_best_block_imported(hash, number), + ServiceToWorkerMsg::DisconnectPeer(who, protocol_name) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .disconnect_peer(&who, &protocol_name), + ServiceToWorkerMsg::NewBestBlockImported(hash, number) => this + .network_service + .behaviour_mut() + .user_protocol_mut() + .new_best_block_imported(hash, number), } } @@ -1521,7 +1614,7 @@ impl Future for NetworkWorker { num_iterations += 1; if num_iterations >= 1000 { cx.waker().wake_by_ref(); - break; + break } // Process the next action coming from the network. @@ -1537,28 +1630,40 @@ impl Future for NetworkWorker { } this.import_queue.import_blocks(origin, blocks); }, - Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::JustificationImport(origin, hash, nb, justifications))) => { + Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::JustificationImport( + origin, + hash, + nb, + justifications, + ))) => { if let Some(metrics) = this.metrics.as_ref() { metrics.import_queue_justifications_submitted.inc(); } this.import_queue.import_justifications(origin, hash, nb, justifications); }, - Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::InboundRequest { protocol, result, .. })) => { + Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::InboundRequest { + protocol, + result, + .. + })) => { if let Some(metrics) = this.metrics.as_ref() { match result { Ok(serve_time) => { - metrics.requests_in_success_total + metrics + .requests_in_success_total .with_label_values(&[&protocol]) .observe(serve_time.as_secs_f64()); - } + }, Err(err) => { let reason = match err { ResponseFailure::Network(InboundFailure::Timeout) => "timeout", - ResponseFailure::Network(InboundFailure::UnsupportedProtocols) => - // `UnsupportedProtocols` is reported for every single - // inbound request whenever a request with an unsupported - // protocol is received. This is not reported in order to - // avoid confusions. + ResponseFailure::Network( + InboundFailure::UnsupportedProtocols, + ) => + // `UnsupportedProtocols` is reported for every single + // inbound request whenever a request with an unsupported + // protocol is received. This is not reported in order to + // avoid confusions. continue, ResponseFailure::Network(InboundFailure::ResponseOmission) => "busy-omitted", @@ -1566,23 +1671,28 @@ impl Future for NetworkWorker { "connection-closed", }; - metrics.requests_in_failure_total + metrics + .requests_in_failure_total .with_label_values(&[&protocol, reason]) .inc(); - } + }, } } }, Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::RequestFinished { - protocol, duration, result, .. - })) => { + protocol, + duration, + result, + .. + })) => if let Some(metrics) = this.metrics.as_ref() { match result { Ok(_) => { - metrics.requests_out_success_total + metrics + .requests_out_success_total .with_label_values(&[&protocol]) .observe(duration.as_secs_f64()); - } + }, Err(err) => { let reason = match err { RequestFailure::NotConnected => "not-connected", @@ -1591,34 +1701,42 @@ impl Future for NetworkWorker { RequestFailure::Obsolete => "obsolete", RequestFailure::Network(OutboundFailure::DialFailure) => "dial-failure", - RequestFailure::Network(OutboundFailure::Timeout) => - "timeout", + RequestFailure::Network(OutboundFailure::Timeout) => "timeout", RequestFailure::Network(OutboundFailure::ConnectionClosed) => "connection-closed", - RequestFailure::Network(OutboundFailure::UnsupportedProtocols) => - "unsupported", + RequestFailure::Network( + OutboundFailure::UnsupportedProtocols, + ) => "unsupported", }; - metrics.requests_out_failure_total + metrics + .requests_out_failure_total .with_label_values(&[&protocol, reason]) .inc(); - } + }, } - } - }, - Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::RandomKademliaStarted(protocol))) => { + }, + Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::RandomKademliaStarted( + protocol, + ))) => if let Some(metrics) = this.metrics.as_ref() { - metrics.kademlia_random_queries_total + metrics + .kademlia_random_queries_total .with_label_values(&[&protocol.as_ref()]) .inc(); - } - }, + }, Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationStreamOpened { - remote, protocol, negotiated_fallback, notifications_sink, role + remote, + protocol, + negotiated_fallback, + notifications_sink, + role, })) => { if let Some(metrics) = this.metrics.as_ref() { - metrics.notifications_streams_opened_total - .with_label_values(&[&protocol]).inc(); + metrics + .notifications_streams_opened_total + .with_label_values(&[&protocol]) + .inc(); } { let mut peers_notifications_sinks = this.peers_notifications_sinks.lock(); @@ -1634,7 +1752,9 @@ impl Future for NetworkWorker { }); }, Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationStreamReplaced { - remote, protocol, notifications_sink + remote, + protocol, + notifications_sink, })) => { let mut peers_notifications_sinks = this.peers_notifications_sinks.lock(); if let Some(s) = peers_notifications_sinks.get_mut(&(remote, protocol)) { @@ -1668,10 +1788,15 @@ impl Future for NetworkWorker { role, });*/ }, - Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationStreamClosed { remote, protocol })) => { + Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationStreamClosed { + remote, + protocol, + })) => { if let Some(metrics) = this.metrics.as_ref() { - metrics.notifications_streams_closed_total - .with_label_values(&[&protocol[..]]).inc(); + metrics + .notifications_streams_closed_total + .with_label_values(&[&protocol[..]]) + .inc(); } this.event_streams.send(Event::NotificationStreamClosed { remote: remote.clone(), @@ -1679,23 +1804,24 @@ impl Future for NetworkWorker { }); { let mut peers_notifications_sinks = this.peers_notifications_sinks.lock(); - let _previous_value = peers_notifications_sinks - .remove(&(remote.clone(), protocol)); + let _previous_value = + peers_notifications_sinks.remove(&(remote.clone(), protocol)); debug_assert!(_previous_value.is_some()); } }, - Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationsReceived { remote, messages })) => { + Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::NotificationsReceived { + remote, + messages, + })) => { if let Some(metrics) = this.metrics.as_ref() { for (protocol, message) in &messages { - metrics.notifications_sizes + metrics + .notifications_sizes .with_label_values(&["in", protocol]) .observe(message.len() as f64); } } - this.event_streams.send(Event::NotificationsReceived { - remote, - messages, - }); + this.event_streams.send(Event::NotificationsReceived { remote, messages }); }, Poll::Ready(SwarmEvent::Behaviour(BehaviourOut::SyncConnected(remote))) => { this.event_streams.send(Event::SyncConnected { remote }); @@ -1711,13 +1837,19 @@ impl Future for NetworkWorker { DhtEvent::ValuePut(_) => "value-put", DhtEvent::ValuePutFailed(_) => "value-put-failed", }; - metrics.kademlia_query_duration.with_label_values(&[query_type]) + metrics + .kademlia_query_duration + .with_label_values(&[query_type]) .observe(duration.as_secs_f64()); } this.event_streams.send(Event::Dht(event)); }, - Poll::Ready(SwarmEvent::ConnectionEstablished { peer_id, endpoint, num_established }) => { + Poll::Ready(SwarmEvent::ConnectionEstablished { + peer_id, + endpoint, + num_established, + }) => { debug!(target: "sub-libp2p", "Libp2p => Connected({:?})", peer_id); if let Some(metrics) = this.metrics.as_ref() { @@ -1732,7 +1864,12 @@ impl Future for NetworkWorker { } } }, - Poll::Ready(SwarmEvent::ConnectionClosed { peer_id, cause, endpoint, num_established }) => { + Poll::Ready(SwarmEvent::ConnectionClosed { + peer_id, + cause, + endpoint, + num_established, + }) => { debug!(target: "sub-libp2p", "Libp2p => Disconnected({:?}, {:?})", peer_id, cause); if let Some(metrics) = this.metrics.as_ref() { let direction = match endpoint { @@ -1741,17 +1878,27 @@ impl Future for NetworkWorker { }; let reason = match cause { Some(ConnectionError::IO(_)) => "transport-error", - Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler(EitherError::A(EitherError::A( - EitherError::A(EitherError::B(EitherError::A( - PingFailure::Timeout)))))))) => "ping-timeout", - Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler(EitherError::A(EitherError::A( - EitherError::A(EitherError::A( - NotifsHandlerError::SyncNotificationsClogged))))))) => "sync-notifications-clogged", - Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler(_))) => "protocol-error", - Some(ConnectionError::Handler(NodeHandlerWrapperError::KeepAliveTimeout)) => "keep-alive-timeout", + Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler( + EitherError::A(EitherError::A(EitherError::A(EitherError::B( + EitherError::A(PingFailure::Timeout), + )))), + ))) => "ping-timeout", + Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler( + EitherError::A(EitherError::A(EitherError::A(EitherError::A( + NotifsHandlerError::SyncNotificationsClogged, + )))), + ))) => "sync-notifications-clogged", + Some(ConnectionError::Handler(NodeHandlerWrapperError::Handler(_))) => + "protocol-error", + Some(ConnectionError::Handler( + NodeHandlerWrapperError::KeepAliveTimeout, + )) => "keep-alive-timeout", None => "actively-closed", }; - metrics.connections_closed_total.with_label_values(&[direction, reason]).inc(); + metrics + .connections_closed_total + .with_label_values(&[direction, reason]) + .inc(); // `num_established` represents the number of *remaining* connections. if num_established == 0 { @@ -1791,15 +1938,22 @@ impl Future for NetworkWorker { if let Some(metrics) = this.metrics.as_ref() { match error { - PendingConnectionError::ConnectionLimit(_) => - metrics.pending_connections_errors_total.with_label_values(&["limit-reached"]).inc(), - PendingConnectionError::InvalidPeerId => - metrics.pending_connections_errors_total.with_label_values(&["invalid-peer-id"]).inc(), - PendingConnectionError::Transport(_) | PendingConnectionError::IO(_) => - metrics.pending_connections_errors_total.with_label_values(&["transport-error"]).inc(), + PendingConnectionError::ConnectionLimit(_) => metrics + .pending_connections_errors_total + .with_label_values(&["limit-reached"]) + .inc(), + PendingConnectionError::InvalidPeerId => metrics + .pending_connections_errors_total + .with_label_values(&["invalid-peer-id"]) + .inc(), + PendingConnectionError::Transport(_) | + PendingConnectionError::IO(_) => metrics + .pending_connections_errors_total + .with_label_values(&["transport-error"]) + .inc(), } } - } + }, Poll::Ready(SwarmEvent::Dialing(peer_id)) => trace!(target: "sub-libp2p", "Libp2p => Dialing({:?})", peer_id), Poll::Ready(SwarmEvent::IncomingConnection { local_addr, send_back_addr }) => { @@ -1809,7 +1963,11 @@ impl Future for NetworkWorker { metrics.incoming_connections_total.inc(); } }, - Poll::Ready(SwarmEvent::IncomingConnectionError { local_addr, send_back_addr, error }) => { + Poll::Ready(SwarmEvent::IncomingConnectionError { + local_addr, + send_back_addr, + error, + }) => { debug!(target: "sub-libp2p", "Libp2p => IncomingConnectionError({},{}): {}", local_addr, send_back_addr, error); if let Some(metrics) = this.metrics.as_ref() { @@ -1820,14 +1978,20 @@ impl Future for NetworkWorker { PendingConnectionError::IO(_) => "transport-error", }; - metrics.incoming_connections_errors_total.with_label_values(&[reason]).inc(); + metrics + .incoming_connections_errors_total + .with_label_values(&[reason]) + .inc(); } }, Poll::Ready(SwarmEvent::BannedPeer { peer_id, endpoint }) => { debug!(target: "sub-libp2p", "Libp2p => BannedPeer({}). Connected via {:?}.", peer_id, endpoint); if let Some(metrics) = this.metrics.as_ref() { - metrics.incoming_connections_errors_total.with_label_values(&["banned"]).inc(); + metrics + .incoming_connections_errors_total + .with_label_values(&["banned"]) + .inc(); } }, Poll::Ready(SwarmEvent::UnknownPeerUnreachableAddr { address, error }) => @@ -1837,8 +2001,8 @@ impl Future for NetworkWorker { if let Some(metrics) = this.metrics.as_ref() { metrics.listeners_local_addresses.sub(addresses.len() as u64); } - let addrs = addresses.into_iter().map(|a| a.to_string()) - .collect::>().join(", "); + let addrs = + addresses.into_iter().map(|a| a.to_string()).collect::>().join(", "); match reason { Ok(()) => error!( target: "sub-libp2p", @@ -1861,7 +2025,8 @@ impl Future for NetworkWorker { }; } - let num_connected_peers = this.network_service.behaviour_mut().user_protocol_mut().num_connected_peers(); + let num_connected_peers = + this.network_service.behaviour_mut().user_protocol_mut().num_connected_peers(); // Update the variables shared with the `NetworkService`. this.num_connected.store(num_connected_peers, Ordering::Relaxed); @@ -1873,10 +2038,11 @@ impl Future for NetworkWorker { *this.external_addresses.lock() = external_addresses; } - let is_major_syncing = match this.network_service.behaviour_mut().user_protocol_mut().sync_state().state { - SyncState::Idle => false, - SyncState::Downloading => true, - }; + let is_major_syncing = + match this.network_service.behaviour_mut().user_protocol_mut().sync_state().state { + SyncState::Idle => false, + SyncState::Downloading => true, + }; this.tx_handler_controller.set_gossip_enabled(!is_major_syncing); @@ -1885,25 +2051,41 @@ impl Future for NetworkWorker { if let Some(metrics) = this.metrics.as_ref() { for (proto, buckets) in this.network_service.behaviour_mut().num_entries_per_kbucket() { for (lower_ilog2_bucket_bound, num_entries) in buckets { - metrics.kbuckets_num_nodes - .with_label_values(&[&proto.as_ref(), &lower_ilog2_bucket_bound.to_string()]) + metrics + .kbuckets_num_nodes + .with_label_values(&[ + &proto.as_ref(), + &lower_ilog2_bucket_bound.to_string(), + ]) .set(num_entries as u64); } } - for (proto, num_entries) in this.network_service.behaviour_mut().num_kademlia_records() { - metrics.kademlia_records_count.with_label_values(&[&proto.as_ref()]).set(num_entries as u64); + for (proto, num_entries) in this.network_service.behaviour_mut().num_kademlia_records() + { + metrics + .kademlia_records_count + .with_label_values(&[&proto.as_ref()]) + .set(num_entries as u64); } - for (proto, num_entries) in this.network_service.behaviour_mut().kademlia_records_total_size() { - metrics.kademlia_records_sizes_total.with_label_values(&[&proto.as_ref()]).set(num_entries as u64); + for (proto, num_entries) in + this.network_service.behaviour_mut().kademlia_records_total_size() + { + metrics + .kademlia_records_sizes_total + .with_label_values(&[&proto.as_ref()]) + .set(num_entries as u64); } - metrics.peerset_num_discovered.set( - this.network_service.behaviour_mut().user_protocol().num_discovered_peers() as u64 - ); + metrics + .peerset_num_discovered + .set(this.network_service.behaviour_mut().user_protocol().num_discovered_peers() + as u64); metrics.peerset_num_requested.set( - this.network_service.behaviour_mut().user_protocol().requested_peers().count() as u64 + this.network_service.behaviour_mut().user_protocol().requested_peers().count() + as u64, ); metrics.pending_connections.set( - Swarm::network_info(&this.network_service).connection_counters().num_pending() as u64 + Swarm::network_info(&this.network_service).connection_counters().num_pending() + as u64, ); } @@ -1911,8 +2093,7 @@ impl Future for NetworkWorker { } } -impl Unpin for NetworkWorker { -} +impl Unpin for NetworkWorker {} /// The libp2p swarm, customized for our needs. type Swarm = libp2p::swarm::Swarm>; @@ -1927,15 +2108,32 @@ impl<'a, B: BlockT> Link for NetworkLink<'a, B> { &mut self, imported: usize, count: usize, - results: Vec<(Result>, BlockImportError>, B::Hash)> + results: Vec<(Result>, BlockImportError>, B::Hash)>, ) { - self.protocol.behaviour_mut().user_protocol_mut().on_blocks_processed(imported, count, results) + self.protocol + .behaviour_mut() + .user_protocol_mut() + .on_blocks_processed(imported, count, results) } - fn justification_imported(&mut self, who: PeerId, hash: &B::Hash, number: NumberFor, success: bool) { - self.protocol.behaviour_mut().user_protocol_mut().justification_import_result(who, hash.clone(), number, success); + fn justification_imported( + &mut self, + who: PeerId, + hash: &B::Hash, + number: NumberFor, + success: bool, + ) { + self.protocol.behaviour_mut().user_protocol_mut().justification_import_result( + who, + hash.clone(), + number, + success, + ); } fn request_justification(&mut self, hash: &B::Hash, number: NumberFor) { - self.protocol.behaviour_mut().user_protocol_mut().request_justification(hash, number) + self.protocol + .behaviour_mut() + .user_protocol_mut() + .request_justification(hash, number) } } @@ -1945,9 +2143,9 @@ fn ensure_addresses_consistent_with_transport<'a>( ) -> Result<(), Error> { if matches!(transport, TransportConfig::MemoryOnly) { let addresses: Vec<_> = addresses - .filter(|x| x.iter() - .any(|y| !matches!(y, libp2p::core::multiaddr::Protocol::Memory(_))) - ) + .filter(|x| { + x.iter().any(|y| !matches!(y, libp2p::core::multiaddr::Protocol::Memory(_))) + }) .cloned() .collect(); @@ -1955,13 +2153,11 @@ fn ensure_addresses_consistent_with_transport<'a>( return Err(Error::AddressesForAnotherTransport { transport: transport.clone(), addresses, - }); + }) } } else { let addresses: Vec<_> = addresses - .filter(|x| x.iter() - .any(|y| matches!(y, libp2p::core::multiaddr::Protocol::Memory(_))) - ) + .filter(|x| x.iter().any(|y| matches!(y, libp2p::core::multiaddr::Protocol::Memory(_)))) .cloned() .collect(); @@ -1969,7 +2165,7 @@ fn ensure_addresses_consistent_with_transport<'a>( return Err(Error::AddressesForAnotherTransport { transport: transport.clone(), addresses, - }); + }) } } diff --git a/client/network/src/service/metrics.rs b/client/network/src/service/metrics.rs index 40d65ea45f111..e33cd4b194d69 100644 --- a/client/network/src/service/metrics.rs +++ b/client/network/src/service/metrics.rs @@ -18,10 +18,8 @@ use crate::transport::BandwidthSinks; use prometheus_endpoint::{ - self as prometheus, - Counter, CounterVec, Gauge, GaugeVec, HistogramOpts, - PrometheusError, Registry, U64, Opts, - SourcedCounter, SourcedGauge, MetricSource, + self as prometheus, Counter, CounterVec, Gauge, GaugeVec, HistogramOpts, MetricSource, Opts, + PrometheusError, Registry, SourcedCounter, SourcedGauge, U64, }; use std::{ str, @@ -267,13 +265,14 @@ impl BandwidthCounters { /// Registers the `BandwidthCounters` metric whose values are /// obtained from the given sinks. fn register(registry: &Registry, sinks: Arc) -> Result<(), PrometheusError> { - prometheus::register(SourcedCounter::new( - &Opts::new( - "sub_libp2p_network_bytes_total", - "Total bandwidth usage" - ).variable_label("direction"), - BandwidthCounters(sinks), - )?, registry)?; + prometheus::register( + SourcedCounter::new( + &Opts::new("sub_libp2p_network_bytes_total", "Total bandwidth usage") + .variable_label("direction"), + BandwidthCounters(sinks), + )?, + registry, + )?; Ok(()) } @@ -296,13 +295,16 @@ impl MajorSyncingGauge { /// Registers the `MajorSyncGauge` metric whose value is /// obtained from the given `AtomicBool`. fn register(registry: &Registry, value: Arc) -> Result<(), PrometheusError> { - prometheus::register(SourcedGauge::new( - &Opts::new( - "sub_libp2p_is_major_syncing", - "Whether the node is performing a major sync or not.", - ), - MajorSyncingGauge(value), - )?, registry)?; + prometheus::register( + SourcedGauge::new( + &Opts::new( + "sub_libp2p_is_major_syncing", + "Whether the node is performing a major sync or not.", + ), + MajorSyncingGauge(value), + )?, + registry, + )?; Ok(()) } @@ -324,13 +326,13 @@ impl NumConnectedGauge { /// Registers the `MajorSyncingGauge` metric whose value is /// obtained from the given `AtomicUsize`. fn register(registry: &Registry, value: Arc) -> Result<(), PrometheusError> { - prometheus::register(SourcedGauge::new( - &Opts::new( - "sub_libp2p_peers_count", - "Number of connected peers", - ), - NumConnectedGauge(value), - )?, registry)?; + prometheus::register( + SourcedGauge::new( + &Opts::new("sub_libp2p_peers_count", "Number of connected peers"), + NumConnectedGauge(value), + )?, + registry, + )?; Ok(()) } diff --git a/client/network/src/service/out_events.rs b/client/network/src/service/out_events.rs index 7ec6c608a8fcf..82e94fabd82aa 100644 --- a/client/network/src/service/out_events.rs +++ b/client/network/src/service/out_events.rs @@ -34,13 +34,15 @@ use crate::Event; -use futures::{prelude::*, channel::mpsc, ready, stream::FusedStream}; +use futures::{channel::mpsc, prelude::*, ready, stream::FusedStream}; use parking_lot::Mutex; use prometheus_endpoint::{register, CounterVec, GaugeVec, Opts, PrometheusError, Registry, U64}; use std::{ convert::TryFrom as _, - fmt, pin::Pin, sync::Arc, - task::{Context, Poll} + fmt, + pin::Pin, + sync::Arc, + task::{Context, Poll}, }; /// Creates a new channel that can be associated to a [`OutChannels`]. @@ -100,8 +102,10 @@ impl Stream for Receiver { let metrics = self.metrics.lock().clone(); match metrics.as_ref().map(|m| m.as_ref()) { Some(Some(metrics)) => metrics.event_out(&ev, self.name), - Some(None) => (), // no registry - None => log::warn!("Inconsistency in out_events: event happened before sender associated"), + Some(None) => (), // no registry + None => log::warn!( + "Inconsistency in out_events: event happened before sender associated" + ), } Poll::Ready(Some(ev)) } else { @@ -136,16 +140,10 @@ pub struct OutChannels { impl OutChannels { /// Creates a new empty collection of senders. pub fn new(registry: Option<&Registry>) -> Result { - let metrics = if let Some(registry) = registry { - Some(Metrics::register(registry)?) - } else { - None - }; + let metrics = + if let Some(registry) = registry { Some(Metrics::register(registry)?) } else { None }; - Ok(OutChannels { - event_streams: Vec::new(), - metrics: Arc::new(metrics), - }) + Ok(OutChannels { event_streams: Vec::new(), metrics: Arc::new(metrics) }) } /// Adds a new [`Sender`] to the collection. @@ -164,9 +162,8 @@ impl OutChannels { /// Sends an event. pub fn send(&mut self, event: Event) { - self.event_streams.retain(|sender| { - sender.inner.unbounded_send(event.clone()).is_ok() - }); + self.event_streams + .retain(|sender| sender.inner.unbounded_send(event.clone()).is_ok()); if let Some(metrics) = &*self.metrics { for ev in &self.event_streams { @@ -223,20 +220,18 @@ impl Metrics { fn event_in(&self, event: &Event, num: u64, name: &str) { match event { Event::Dht(_) => { - self.events_total - .with_label_values(&["dht", "sent", name]) - .inc_by(num); - } + self.events_total.with_label_values(&["dht", "sent", name]).inc_by(num); + }, Event::SyncConnected { .. } => { self.events_total .with_label_values(&["sync-connected", "sent", name]) .inc_by(num); - } + }, Event::SyncDisconnected { .. } => { self.events_total .with_label_values(&["sync-disconnected", "sent", name]) .inc_by(num); - } + }, Event::NotificationStreamOpened { protocol, .. } => { self.events_total .with_label_values(&[&format!("notif-open-{:?}", protocol), "sent", name]) @@ -247,36 +242,31 @@ impl Metrics { .with_label_values(&[&format!("notif-closed-{:?}", protocol), "sent", name]) .inc_by(num); }, - Event::NotificationsReceived { messages, .. } => { + Event::NotificationsReceived { messages, .. } => for (protocol, message) in messages { self.events_total .with_label_values(&[&format!("notif-{:?}", protocol), "sent", name]) .inc_by(num); - self.notifications_sizes - .with_label_values(&[protocol, "sent", name]) - .inc_by(num.saturating_mul(u64::try_from(message.len()).unwrap_or(u64::MAX))); - } - }, + self.notifications_sizes.with_label_values(&[protocol, "sent", name]).inc_by( + num.saturating_mul(u64::try_from(message.len()).unwrap_or(u64::MAX)), + ); + }, } } fn event_out(&self, event: &Event, name: &str) { match event { Event::Dht(_) => { - self.events_total - .with_label_values(&["dht", "received", name]) - .inc(); - } + self.events_total.with_label_values(&["dht", "received", name]).inc(); + }, Event::SyncConnected { .. } => { - self.events_total - .with_label_values(&["sync-connected", "received", name]) - .inc(); - } + self.events_total.with_label_values(&["sync-connected", "received", name]).inc(); + }, Event::SyncDisconnected { .. } => { self.events_total .with_label_values(&["sync-disconnected", "received", name]) .inc(); - } + }, Event::NotificationStreamOpened { protocol, .. } => { self.events_total .with_label_values(&[&format!("notif-open-{:?}", protocol), "received", name]) @@ -287,7 +277,7 @@ impl Metrics { .with_label_values(&[&format!("notif-closed-{:?}", protocol), "received", name]) .inc(); }, - Event::NotificationsReceived { messages, .. } => { + Event::NotificationsReceived { messages, .. } => for (protocol, message) in messages { self.events_total .with_label_values(&[&format!("notif-{:?}", protocol), "received", name]) @@ -295,8 +285,7 @@ impl Metrics { self.notifications_sizes .with_label_values(&[&protocol, "received", name]) .inc_by(u64::try_from(message.len()).unwrap_or(u64::MAX)); - } - }, + }, } } } diff --git a/client/network/src/service/tests.rs b/client/network/src/service/tests.rs index 4a739e50628a5..7acfeadcae13b 100644 --- a/client/network/src/service/tests.rs +++ b/client/network/src/service/tests.rs @@ -16,13 +16,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::{config, Event, NetworkService, NetworkWorker}; -use crate::block_request_handler::BlockRequestHandler; -use crate::state_request_handler::StateRequestHandler; -use crate::light_client_requests::handler::LightClientRequestHandler; +use crate::{ + block_request_handler::BlockRequestHandler, config, + light_client_requests::handler::LightClientRequestHandler, + state_request_handler::StateRequestHandler, Event, NetworkService, NetworkWorker, +}; -use libp2p::PeerId; use futures::prelude::*; +use libp2p::PeerId; use sp_runtime::traits::{Block as BlockT, Header as _}; use std::{borrow::Cow, sync::Arc, time::Duration}; use substrate_test_runtime_client::{TestClientBuilder, TestClientBuilderExt as _}; @@ -37,14 +38,10 @@ type TestNetworkService = NetworkService< /// /// > **Note**: We return the events stream in order to not possibly lose events between the /// > construction of the service and the moment the events stream is grabbed. -fn build_test_full_node(config: config::NetworkConfiguration) - -> (Arc, impl Stream) -{ - let client = Arc::new( - TestClientBuilder::with_default_backend() - .build_with_longest_chain() - .0, - ); +fn build_test_full_node( + config: config::NetworkConfiguration, +) -> (Arc, impl Stream) { + let client = Arc::new(TestClientBuilder::with_default_backend().build_with_longest_chain().0); #[derive(Clone)] struct PassThroughVerifier(bool); @@ -69,14 +66,13 @@ fn build_test_full_node(config: config::NetworkConfiguration) .log(|l| { l.try_as_raw(sp_runtime::generic::OpaqueDigestItemId::Consensus(b"aura")) .or_else(|| { - l.try_as_raw(sp_runtime::generic::OpaqueDigestItemId::Consensus(b"babe")) + l.try_as_raw(sp_runtime::generic::OpaqueDigestItemId::Consensus( + b"babe", + )) }) }) .map(|blob| { - vec![( - sp_blockchain::well_known_cache_keys::AUTHORITIES, - blob.to_vec(), - )] + vec![(sp_blockchain::well_known_cache_keys::AUTHORITIES, blob.to_vec())] }); let mut import = sp_consensus::BlockImportParams::new(origin, header); @@ -99,30 +95,20 @@ fn build_test_full_node(config: config::NetworkConfiguration) let protocol_id = config::ProtocolId::from("/test-protocol-name"); let block_request_protocol_config = { - let (handler, protocol_config) = BlockRequestHandler::new( - &protocol_id, - client.clone(), - 50, - ); + let (handler, protocol_config) = BlockRequestHandler::new(&protocol_id, client.clone(), 50); async_std::task::spawn(handler.run().boxed()); protocol_config }; let state_request_protocol_config = { - let (handler, protocol_config) = StateRequestHandler::new( - &protocol_id, - client.clone(), - 50, - ); + let (handler, protocol_config) = StateRequestHandler::new(&protocol_id, client.clone(), 50); async_std::task::spawn(handler.run().boxed()); protocol_config }; let light_client_request_protocol_config = { - let (handler, protocol_config) = LightClientRequestHandler::new( - &protocol_id, - client.clone(), - ); + let (handler, protocol_config) = + LightClientRequestHandler::new(&protocol_id, client.clone()); async_std::task::spawn(handler.run().boxed()); protocol_config }; @@ -130,7 +116,9 @@ fn build_test_full_node(config: config::NetworkConfiguration) let worker = NetworkWorker::new(config::Params { role: config::Role::Full, executor: None, - transactions_handler_executor: Box::new(|task| { async_std::task::spawn(task); }), + transactions_handler_executor: Box::new(|task| { + async_std::task::spawn(task); + }), network_config: config, chain: client.clone(), on_demand: None, @@ -162,43 +150,42 @@ const PROTOCOL_NAME: Cow<'static, str> = Cow::Borrowed("/foo"); /// Builds two nodes and their associated events stream. /// The nodes are connected together and have the `PROTOCOL_NAME` protocol registered. -fn build_nodes_one_proto() - -> (Arc, impl Stream, Arc, impl Stream) -{ +fn build_nodes_one_proto() -> ( + Arc, + impl Stream, + Arc, + impl Stream, +) { let listen_addr = config::build_multiaddr![Memory(rand::random::())]; let (node1, events_stream1) = build_test_full_node(config::NetworkConfiguration { - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: PROTOCOL_NAME, - fallback_names: Vec::new(), - max_notification_size: 1024 * 1024, - set_config: Default::default() - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: PROTOCOL_NAME, + fallback_names: Vec::new(), + max_notification_size: 1024 * 1024, + set_config: Default::default(), + }], listen_addresses: vec![listen_addr.clone()], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); let (node2, events_stream2) = build_test_full_node(config::NetworkConfiguration { - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: PROTOCOL_NAME, - fallback_names: Vec::new(), - max_notification_size: 1024 * 1024, - set_config: config::SetConfig { - reserved_nodes: vec![config::MultiaddrWithPeerId { - multiaddr: listen_addr, - peer_id: node1.local_peer_id().clone(), - }], - .. Default::default() - } - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: PROTOCOL_NAME, + fallback_names: Vec::new(), + max_notification_size: 1024 * 1024, + set_config: config::SetConfig { + reserved_nodes: vec![config::MultiaddrWithPeerId { + multiaddr: listen_addr, + peer_id: node1.local_peer_id().clone(), + }], + ..Default::default() + }, + }], listen_addresses: vec![], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); (node1, events_stream1, node2, events_stream2) @@ -214,10 +201,18 @@ fn notifications_state_consistent() { // Write some initial notifications that shouldn't get through. for _ in 0..(rand::random::() % 5) { - node1.write_notification(node2.local_peer_id().clone(), PROTOCOL_NAME, b"hello world".to_vec()); + node1.write_notification( + node2.local_peer_id().clone(), + PROTOCOL_NAME, + b"hello world".to_vec(), + ); } for _ in 0..(rand::random::() % 5) { - node2.write_notification(node1.local_peer_id().clone(), PROTOCOL_NAME, b"hello world".to_vec()); + node2.write_notification( + node1.local_peer_id().clone(), + PROTOCOL_NAME, + b"hello world".to_vec(), + ); } async_std::task::block_on(async move { @@ -234,16 +229,24 @@ fn notifications_state_consistent() { iterations += 1; if iterations >= 1_000 { assert!(something_happened); - break; + break } // Start by sending a notification from node1 to node2 and vice-versa. Part of the // test consists in ensuring that notifications get ignored if the stream isn't open. if rand::random::() % 5 >= 3 { - node1.write_notification(node2.local_peer_id().clone(), PROTOCOL_NAME, b"hello world".to_vec()); + node1.write_notification( + node2.local_peer_id().clone(), + PROTOCOL_NAME, + b"hello world".to_vec(), + ); } if rand::random::() % 5 >= 3 { - node2.write_notification(node1.local_peer_id().clone(), PROTOCOL_NAME, b"hello world".to_vec()); + node2.write_notification( + node1.local_peer_id().clone(), + PROTOCOL_NAME, + b"hello world".to_vec(), + ); } // Also randomly disconnect the two nodes from time to time. @@ -272,32 +275,40 @@ fn notifications_state_consistent() { }; match next_event { - future::Either::Left(Event::NotificationStreamOpened { remote, protocol, .. }) => { + future::Either::Left(Event::NotificationStreamOpened { + remote, protocol, .. + }) => { something_happened = true; assert!(!node1_to_node2_open); node1_to_node2_open = true; assert_eq!(remote, *node2.local_peer_id()); assert_eq!(protocol, PROTOCOL_NAME); - } - future::Either::Right(Event::NotificationStreamOpened { remote, protocol, .. }) => { + }, + future::Either::Right(Event::NotificationStreamOpened { + remote, protocol, .. + }) => { something_happened = true; assert!(!node2_to_node1_open); node2_to_node1_open = true; assert_eq!(remote, *node1.local_peer_id()); assert_eq!(protocol, PROTOCOL_NAME); - } - future::Either::Left(Event::NotificationStreamClosed { remote, protocol, .. }) => { + }, + future::Either::Left(Event::NotificationStreamClosed { + remote, protocol, .. + }) => { assert!(node1_to_node2_open); node1_to_node2_open = false; assert_eq!(remote, *node2.local_peer_id()); assert_eq!(protocol, PROTOCOL_NAME); - } - future::Either::Right(Event::NotificationStreamClosed { remote, protocol, .. }) => { + }, + future::Either::Right(Event::NotificationStreamClosed { + remote, protocol, .. + }) => { assert!(node2_to_node1_open); node2_to_node1_open = false; assert_eq!(remote, *node1.local_peer_id()); assert_eq!(protocol, PROTOCOL_NAME); - } + }, future::Either::Left(Event::NotificationsReceived { remote, .. }) => { assert!(node1_to_node2_open); assert_eq!(remote, *node2.local_peer_id()); @@ -305,10 +316,10 @@ fn notifications_state_consistent() { node1.write_notification( node2.local_peer_id().clone(), PROTOCOL_NAME, - b"hello world".to_vec() + b"hello world".to_vec(), ); } - } + }, future::Either::Right(Event::NotificationsReceived { remote, .. }) => { assert!(node2_to_node1_open); assert_eq!(remote, *node1.local_peer_id()); @@ -316,18 +327,18 @@ fn notifications_state_consistent() { node2.write_notification( node1.local_peer_id().clone(), PROTOCOL_NAME, - b"hello world".to_vec() + b"hello world".to_vec(), ); } - } + }, // Add new events here. - future::Either::Left(Event::SyncConnected { .. }) => {} - future::Either::Right(Event::SyncConnected { .. }) => {} - future::Either::Left(Event::SyncDisconnected { .. }) => {} - future::Either::Right(Event::SyncDisconnected { .. }) => {} - future::Either::Left(Event::Dht(_)) => {} - future::Either::Right(Event::Dht(_)) => {} + future::Either::Left(Event::SyncConnected { .. }) => {}, + future::Either::Right(Event::SyncConnected { .. }) => {}, + future::Either::Left(Event::SyncDisconnected { .. }) => {}, + future::Either::Right(Event::SyncDisconnected { .. }) => {}, + future::Either::Left(Event::Dht(_)) => {}, + future::Either::Right(Event::Dht(_)) => {}, }; } }); @@ -339,19 +350,14 @@ fn lots_of_incoming_peers_works() { let (main_node, _) = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![listen_addr.clone()], - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: PROTOCOL_NAME, - fallback_names: Vec::new(), - max_notification_size: 1024 * 1024, - set_config: config::SetConfig { - in_peers: u32::MAX, - .. Default::default() - }, - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: PROTOCOL_NAME, + fallback_names: Vec::new(), + max_notification_size: 1024 * 1024, + set_config: config::SetConfig { in_peers: u32::MAX, ..Default::default() }, + }], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); let main_node_peer_id = main_node.local_peer_id().clone(); @@ -365,22 +371,20 @@ fn lots_of_incoming_peers_works() { let (_dialing_node, event_stream) = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![], - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: PROTOCOL_NAME, - fallback_names: Vec::new(), - max_notification_size: 1024 * 1024, - set_config: config::SetConfig { - reserved_nodes: vec![config::MultiaddrWithPeerId { - multiaddr: listen_addr.clone(), - peer_id: main_node_peer_id.clone(), - }], - .. Default::default() - }, - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: PROTOCOL_NAME, + fallback_names: Vec::new(), + max_notification_size: 1024 * 1024, + set_config: config::SetConfig { + reserved_nodes: vec![config::MultiaddrWithPeerId { + multiaddr: listen_addr.clone(), + peer_id: main_node_peer_id.clone(), + }], + ..Default::default() + }, + }], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); background_tasks_to_wait.push(async_std::task::spawn(async move { @@ -416,9 +420,7 @@ fn lots_of_incoming_peers_works() { })); } - futures::executor::block_on(async move { - future::join_all(background_tasks_to_wait).await - }); + futures::executor::block_on(async move { future::join_all(background_tasks_to_wait).await }); } #[test] @@ -437,14 +439,13 @@ fn notifications_back_pressure() { while received_notifications < TOTAL_NOTIFS { match events_stream2.next().await.unwrap() { Event::NotificationStreamClosed { .. } => panic!(), - Event::NotificationsReceived { messages, .. } => { + Event::NotificationsReceived { messages, .. } => for message in messages { assert_eq!(message.0, PROTOCOL_NAME); assert_eq!(message.1, format!("hello #{}", received_notifications)); received_notifications += 1; - } - } - _ => {} + }, + _ => {}, }; if rand::random::() < 2 { @@ -458,7 +459,7 @@ fn notifications_back_pressure() { loop { match events_stream1.next().await.unwrap() { Event::NotificationStreamOpened { .. } => break, - _ => {} + _ => {}, }; } @@ -483,37 +484,33 @@ fn fallback_name_working() { let listen_addr = config::build_multiaddr![Memory(rand::random::())]; let (node1, mut events_stream1) = build_test_full_node(config::NetworkConfiguration { - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: NEW_PROTOCOL_NAME.clone(), - fallback_names: vec![PROTOCOL_NAME], - max_notification_size: 1024 * 1024, - set_config: Default::default() - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: NEW_PROTOCOL_NAME.clone(), + fallback_names: vec![PROTOCOL_NAME], + max_notification_size: 1024 * 1024, + set_config: Default::default(), + }], listen_addresses: vec![listen_addr.clone()], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); let (_, mut events_stream2) = build_test_full_node(config::NetworkConfiguration { - extra_sets: vec![ - config::NonDefaultSetConfig { - notifications_protocol: PROTOCOL_NAME, - fallback_names: Vec::new(), - max_notification_size: 1024 * 1024, - set_config: config::SetConfig { - reserved_nodes: vec![config::MultiaddrWithPeerId { - multiaddr: listen_addr, - peer_id: node1.local_peer_id().clone(), - }], - .. Default::default() - } - } - ], + extra_sets: vec![config::NonDefaultSetConfig { + notifications_protocol: PROTOCOL_NAME, + fallback_names: Vec::new(), + max_notification_size: 1024 * 1024, + set_config: config::SetConfig { + reserved_nodes: vec![config::MultiaddrWithPeerId { + multiaddr: listen_addr, + peer_id: node1.local_peer_id().clone(), + }], + ..Default::default() + }, + }], listen_addresses: vec![], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new_local() + ..config::NetworkConfiguration::new_local() }); let receiver = async_std::task::spawn(async move { @@ -525,7 +522,7 @@ fn fallback_name_working() { assert_eq!(negotiated_fallback, None); break }, - _ => {} + _ => {}, }; } }); @@ -539,7 +536,7 @@ fn fallback_name_working() { assert_eq!(negotiated_fallback, Some(PROTOCOL_NAME)); break }, - _ => {} + _ => {}, }; } @@ -555,7 +552,7 @@ fn ensure_listen_addresses_consistent_with_transport_memory() { let _ = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![listen_addr.clone()], transport: config::TransportConfig::MemoryOnly, - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -566,7 +563,7 @@ fn ensure_listen_addresses_consistent_with_transport_not_memory() { let _ = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![listen_addr.clone()], - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -583,7 +580,7 @@ fn ensure_boot_node_addresses_consistent_with_transport_memory() { listen_addresses: vec![listen_addr.clone()], transport: config::TransportConfig::MemoryOnly, boot_nodes: vec![boot_node], - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -599,7 +596,7 @@ fn ensure_boot_node_addresses_consistent_with_transport_not_memory() { let _ = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![listen_addr.clone()], boot_nodes: vec![boot_node], - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -617,9 +614,9 @@ fn ensure_reserved_node_addresses_consistent_with_transport_memory() { transport: config::TransportConfig::MemoryOnly, default_peers_set: config::SetConfig { reserved_nodes: vec![reserved_node], - .. Default::default() + ..Default::default() }, - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -636,9 +633,9 @@ fn ensure_reserved_node_addresses_consistent_with_transport_not_memory() { listen_addresses: vec![listen_addr.clone()], default_peers_set: config::SetConfig { reserved_nodes: vec![reserved_node], - .. Default::default() + ..Default::default() }, - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -652,7 +649,7 @@ fn ensure_public_addresses_consistent_with_transport_memory() { listen_addresses: vec![listen_addr.clone()], transport: config::TransportConfig::MemoryOnly, public_addresses: vec![public_address], - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } @@ -665,6 +662,6 @@ fn ensure_public_addresses_consistent_with_transport_not_memory() { let _ = build_test_full_node(config::NetworkConfiguration { listen_addresses: vec![listen_addr.clone()], public_addresses: vec![public_address], - .. config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) + ..config::NetworkConfiguration::new("test-node", "test-client", Default::default(), None) }); } diff --git a/client/network/src/state_request_handler.rs b/client/network/src/state_request_handler.rs index d340ff21bd449..a15ee246a2ef8 100644 --- a/client/network/src/state_request_handler.rs +++ b/client/network/src/state_request_handler.rs @@ -17,22 +17,27 @@ //! Helper for handling (i.e. answering) state requests from a remote peer via the //! [`crate::request_responses::RequestResponsesBehaviour`]. -use codec::{Encode, Decode}; -use crate::chain::Client; -use crate::config::ProtocolId; -use crate::request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}; -use crate::schema::v1::{StateResponse, StateRequest, StateEntry}; -use crate::{PeerId, ReputationChange}; -use futures::channel::{mpsc, oneshot}; -use futures::stream::StreamExt; +use crate::{ + chain::Client, + config::ProtocolId, + request_responses::{IncomingRequest, OutgoingResponse, ProtocolConfig}, + schema::v1::{StateEntry, StateRequest, StateResponse}, + PeerId, ReputationChange, +}; +use codec::{Decode, Encode}; +use futures::{ + channel::{mpsc, oneshot}, + stream::StreamExt, +}; use log::debug; use lru::LruCache; use prost::Message; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::Block as BlockT; -use std::sync::Arc; -use std::time::Duration; -use std::hash::{Hasher, Hash}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; +use std::{ + hash::{Hash, Hasher}, + sync::Arc, + time::Duration, +}; const LOG_TARGET: &str = "sync"; const MAX_RESPONSE_BYTES: usize = 2 * 1024 * 1024; // Actual reponse may be bigger. @@ -127,9 +132,7 @@ impl StateRequestHandler { Ok(()) => debug!(target: LOG_TARGET, "Handled block request from {}.", peer), Err(e) => debug!( target: LOG_TARGET, - "Failed to handle state request from {}: {}", - peer, - e, + "Failed to handle state request from {}: {}", peer, e, ), } } @@ -144,11 +147,8 @@ impl StateRequestHandler { let request = StateRequest::decode(&payload[..])?; let block: B::Hash = Decode::decode(&mut request.block.as_ref())?; - let key = SeenRequestsKey { - peer: *peer, - block: block.clone(), - start: request.start.clone(), - }; + let key = + SeenRequestsKey { peer: *peer, block: block.clone(), start: request.start.clone() }; let mut reputation_changes = Vec::new(); @@ -163,7 +163,7 @@ impl StateRequestHandler { }, None => { self.seen_requests.put(key.clone(), SeenRequestsValue::First); - } + }, } log::trace!( @@ -194,7 +194,8 @@ impl StateRequestHandler { &request.start, MAX_RESPONSE_BYTES, )?; - response.entries = entries.into_iter().map(|(key, value)| StateEntry { key, value }).collect(); + response.entries = + entries.into_iter().map(|(key, value)| StateEntry { key, value }).collect(); if response.entries.is_empty() { response.complete = true; } @@ -224,11 +225,9 @@ impl StateRequestHandler { Err(()) }; - pending_response.send(OutgoingResponse { - result, - reputation_changes, - sent_feedback: None, - }).map_err(|_| HandleRequestError::SendResponse) + pending_response + .send(OutgoingResponse { result, reputation_changes, sent_feedback: None }) + .map_err(|_| HandleRequestError::SendResponse) } } diff --git a/client/network/src/transactions.rs b/client/network/src/transactions.rs index 8a7dd78c834ce..e09daaa9a19b7 100644 --- a/client/network/src/transactions.rs +++ b/client/network/src/transactions.rs @@ -28,23 +28,33 @@ //! use crate::{ - ExHashT, Event, ObservedRole, - config::{self, ProtocolId, TransactionPool, TransactionImportFuture, TransactionImport}, - error, protocol::message, service::NetworkService, utils::{interval, LruHashSet}, + config::{self, ProtocolId, TransactionImport, TransactionImportFuture, TransactionPool}, + error, + protocol::message, + service::NetworkService, + utils::{interval, LruHashSet}, + Event, ExHashT, ObservedRole, }; use codec::{Decode, Encode}; use futures::{channel::mpsc, prelude::*, stream::FuturesUnordered}; use libp2p::{multiaddr, PeerId}; -use log::{trace, debug, warn}; -use prometheus_endpoint::{ - Registry, Counter, PrometheusError, register, U64 -}; +use log::{debug, trace, warn}; +use prometheus_endpoint::{register, Counter, PrometheusError, Registry, U64}; use sp_runtime::traits::Block as BlockT; -use std::borrow::Cow; -use std::collections::{HashMap, hash_map::Entry}; -use std::sync::{atomic::{AtomicBool, Ordering}, Arc}; -use std::{iter, num::NonZeroUsize, pin::Pin, task::Poll, time}; +use std::{ + borrow::Cow, + collections::{hash_map::Entry, HashMap}, + iter, + num::NonZeroUsize, + pin::Pin, + sync::{ + atomic::{AtomicBool, Ordering}, + Arc, + }, + task::Poll, + time, +}; /// Interval at which we propagate transactions; const PROPAGATE_TIMEOUT: time::Duration = time::Duration::from_millis(2900); @@ -84,10 +94,13 @@ struct Metrics { impl Metrics { fn register(r: &Registry) -> Result { Ok(Metrics { - propagated_transactions: register(Counter::new( - "sync_propagated_transactions", - "Number of transactions propagated to at least one peer", - )?, r)?, + propagated_transactions: register( + Counter::new( + "sync_propagated_transactions", + "Number of transactions propagated to at least one peer", + )?, + r, + )?, }) } } @@ -106,7 +119,7 @@ impl Future for PendingTransaction { let mut this = self.project(); if let Poll::Ready(import_result) = Pin::new(&mut this.validation).poll_unpin(cx) { - return Poll::Ready((this.tx_hash.clone(), import_result)); + return Poll::Ready((this.tx_hash.clone(), import_result)) } Poll::Pending @@ -128,7 +141,7 @@ impl TransactionsHandlerPrototype { proto.push_str(protocol_id.as_ref()); proto.push_str("/transactions/1"); proto - }) + }), } } @@ -143,7 +156,7 @@ impl TransactionsHandlerPrototype { out_peers: 0, reserved_nodes: Vec::new(), non_reserved_mode: config::NonReservedPeerMode::Deny, - } + }, } } @@ -182,10 +195,7 @@ impl TransactionsHandlerPrototype { }, }; - let controller = TransactionsHandlerController { - to_handler, - gossip_enabled, - }; + let controller = TransactionsHandlerController { to_handler, gossip_enabled }; Ok((handler, controller)) } @@ -264,7 +274,7 @@ impl TransactionsHandler { /// interrupted. pub async fn run(mut self) { loop { - futures::select!{ + futures::select! { _ = self.propagate_timeout.next().fuse() => { self.propagate_transactions(); }, @@ -301,7 +311,7 @@ impl TransactionsHandler { .collect::(); let result = self.service.add_peers_to_reserved_set( self.protocol_name.clone(), - iter::once(addr).collect() + iter::once(addr).collect(), ); if let Err(err) = result { log::error!(target: "sync", "Add reserved peer failed: {}", err); @@ -312,22 +322,30 @@ impl TransactionsHandler { .collect::(); let result = self.service.remove_peers_from_reserved_set( self.protocol_name.clone(), - iter::once(addr).collect() + iter::once(addr).collect(), ); if let Err(err) = result { log::error!(target: "sync", "Removing reserved peer failed: {}", err); } }, - Event::NotificationStreamOpened { remote, protocol, role, .. } if protocol == self.protocol_name => { - let _was_in = self.peers.insert(remote, Peer { - known_transactions: LruHashSet::new(NonZeroUsize::new(MAX_KNOWN_TRANSACTIONS) - .expect("Constant is nonzero")), - role, - }); + Event::NotificationStreamOpened { remote, protocol, role, .. } + if protocol == self.protocol_name => + { + let _was_in = self.peers.insert( + remote, + Peer { + known_transactions: LruHashSet::new( + NonZeroUsize::new(MAX_KNOWN_TRANSACTIONS).expect("Constant is nonzero"), + ), + role, + }, + ); debug_assert!(_was_in.is_none()); } - Event::NotificationStreamClosed { remote, protocol } if protocol == self.protocol_name => { + Event::NotificationStreamClosed { remote, protocol } + if protocol == self.protocol_name => + { let _peer = self.peers.remove(&remote); debug_assert!(_peer.is_some()); } @@ -335,7 +353,7 @@ impl TransactionsHandler { Event::NotificationsReceived { remote, messages } => { for (protocol, message) in messages { if protocol != self.protocol_name { - continue; + continue } if let Ok(m) = as Decode>::decode( @@ -349,28 +367,24 @@ impl TransactionsHandler { }, // Not our concern. - Event::NotificationStreamOpened { .. } | Event::NotificationStreamClosed { .. } => {} + Event::NotificationStreamOpened { .. } | Event::NotificationStreamClosed { .. } => {}, } } /// Called when peer sends us new transactions - fn on_transactions( - &mut self, - who: PeerId, - transactions: message::Transactions, - ) { + fn on_transactions(&mut self, who: PeerId, transactions: message::Transactions) { // sending transaction to light node is considered a bad behavior if matches!(self.local_role, config::Role::Light) { debug!(target: "sync", "Peer {} is trying to send transactions to the light node", who); self.service.disconnect_peer(who, self.protocol_name.clone()); self.service.report_peer(who, rep::UNEXPECTED_TRANSACTIONS); - return; + return } // Accept transactions only when enabled if !self.gossip_enabled.load(Ordering::Relaxed) { trace!(target: "sync", "{} Ignoring transactions while disabled", who); - return; + return } trace!(target: "sync", "Received {} transactions from {}", transactions.len(), who); @@ -382,7 +396,7 @@ impl TransactionsHandler { "Ignoring any further transactions that exceed `MAX_PENDING_TRANSACTIONS`({}) limit", MAX_PENDING_TRANSACTIONS, ); - break; + break } let hash = self.transaction_pool.hash_of(&t); @@ -400,7 +414,7 @@ impl TransactionsHandler { }, Entry::Occupied(mut entry) => { entry.get_mut().push(who.clone()); - } + }, } } } @@ -408,7 +422,8 @@ impl TransactionsHandler { fn on_handle_transaction_import(&mut self, who: PeerId, import: TransactionImport) { match import { - TransactionImport::KnownGood => self.service.report_peer(who, rep::ANY_TRANSACTION_REFUND), + TransactionImport::KnownGood => + self.service.report_peer(who, rep::ANY_TRANSACTION_REFUND), TransactionImport::NewGood => self.service.report_peer(who, rep::GOOD_TRANSACTION), TransactionImport::Bad => self.service.report_peer(who, rep::BAD_TRANSACTION), TransactionImport::None => {}, @@ -416,14 +431,11 @@ impl TransactionsHandler { } /// Propagate one transaction. - pub fn propagate_transaction( - &mut self, - hash: &H, - ) { + pub fn propagate_transaction(&mut self, hash: &H) { debug!(target: "sync", "Propagating transaction [{:?}]", hash); // Accept transactions only when enabled if !self.gossip_enabled.load(Ordering::Relaxed) { - return; + return } if let Some(transaction) = self.transaction_pool.transaction(hash) { let propagated_to = self.do_propagate_transactions(&[(hash.clone(), transaction)]); @@ -441,7 +453,7 @@ impl TransactionsHandler { for (who, peer) in self.peers.iter_mut() { // never send transactions to the light node if matches!(peer.role, ObservedRole::Light) { - continue; + continue } let (hashes, to_send): (Vec<_>, Vec<_>) = transactions @@ -454,16 +466,13 @@ impl TransactionsHandler { if !to_send.is_empty() { for hash in hashes { - propagated_to - .entry(hash) - .or_default() - .push(who.to_base58()); + propagated_to.entry(hash).or_default().push(who.to_base58()); } trace!(target: "sync", "Sending {} transactions to {}", to_send.len(), who); self.service.write_notification( who.clone(), self.protocol_name.clone(), - to_send.encode() + to_send.encode(), ); } } @@ -479,7 +488,7 @@ impl TransactionsHandler { fn propagate_transactions(&mut self) { // Accept transactions only when enabled if !self.gossip_enabled.load(Ordering::Relaxed) { - return; + return } debug!(target: "sync", "Propagating transactions"); let transactions = self.transaction_pool.transactions(); diff --git a/client/network/src/transport.rs b/client/network/src/transport.rs index ab587e01a875b..710d4775993b0 100644 --- a/client/network/src/transport.rs +++ b/client/network/src/transport.rs @@ -17,15 +17,18 @@ // along with this program. If not, see . use libp2p::{ - PeerId, Transport, + bandwidth, core::{ - self, either::EitherTransport, muxing::StreamMuxerBox, - transport::{Boxed, OptionalTransport}, upgrade + self, + either::EitherTransport, + muxing::StreamMuxerBox, + transport::{Boxed, OptionalTransport}, + upgrade, }, - mplex, identity, bandwidth, wasm_ext, noise + identity, mplex, noise, wasm_ext, PeerId, Transport, }; #[cfg(not(target_os = "unknown"))] -use libp2p::{tcp, dns, websocket}; +use libp2p::{dns, tcp, websocket}; use std::{sync::Arc, time::Duration}; pub use self::bandwidth::BandwidthSinks; @@ -61,8 +64,8 @@ pub fn build_transport( #[cfg(not(target_os = "unknown"))] let transport = transport.or_transport(if !memory_only { let desktop_trans = tcp::TcpConfig::new().nodelay(true); - let desktop_trans = websocket::WsConfig::new(desktop_trans.clone()) - .or_transport(desktop_trans); + let desktop_trans = + websocket::WsConfig::new(desktop_trans.clone()).or_transport(desktop_trans); let dns_init = futures::executor::block_on(dns::DnsConfig::system(desktop_trans.clone())); OptionalTransport::some(if let Ok(dns) = dns_init { EitherTransport::Left(dns) @@ -81,23 +84,24 @@ pub fn build_transport( let (transport, bandwidth) = bandwidth::BandwidthLogging::new(transport); - let authentication_config = { - // For more information about these two panics, see in "On the Importance of - // Checking Cryptographic Protocols for Faults" by Dan Boneh, Richard A. DeMillo, - // and Richard J. Lipton. - let noise_keypair = noise::Keypair::::new().into_authentic(&keypair) + let authentication_config = + { + // For more information about these two panics, see in "On the Importance of + // Checking Cryptographic Protocols for Faults" by Dan Boneh, Richard A. DeMillo, + // and Richard J. Lipton. + let noise_keypair = noise::Keypair::::new().into_authentic(&keypair) .expect("can only fail in case of a hardware bug; since this signing is performed only \ once and at initialization, we're taking the bet that the inconvenience of a very \ rare panic here is basically zero"); - // Legacy noise configurations for backward compatibility. - let mut noise_legacy = noise::LegacyConfig::default(); - noise_legacy.recv_legacy_handshake = true; + // Legacy noise configurations for backward compatibility. + let mut noise_legacy = noise::LegacyConfig::default(); + noise_legacy.recv_legacy_handshake = true; - let mut xx_config = noise::NoiseConfig::xx(noise_keypair); - xx_config.set_legacy_config(noise_legacy.clone()); - xx_config.into_authenticated() - }; + let mut xx_config = noise::NoiseConfig::xx(noise_keypair); + xx_config.set_legacy_config(noise_legacy.clone()); + xx_config.into_authenticated() + }; let multiplexing_config = { let mut mplex_config = mplex::MplexConfig::new(); @@ -117,7 +121,8 @@ pub fn build_transport( core::upgrade::SelectUpgrade::new(yamux_config, mplex_config) }; - let transport = transport.upgrade(upgrade::Version::V1Lazy) + let transport = transport + .upgrade(upgrade::Version::V1Lazy) .authenticate(authentication_config) .multiplex(multiplexing_config) .timeout(Duration::from_secs(20)) diff --git a/client/network/src/utils.rs b/client/network/src/utils.rs index 02673ef49fb4c..b23b7e0c101e0 100644 --- a/client/network/src/utils.rs +++ b/client/network/src/utils.rs @@ -19,8 +19,7 @@ use futures::{stream::unfold, FutureExt, Stream, StreamExt}; use futures_timer::Delay; use linked_hash_set::LinkedHashSet; -use std::time::Duration; -use std::{hash::Hash, num::NonZeroUsize}; +use std::{hash::Hash, num::NonZeroUsize, time::Duration}; /// Creates a stream that returns a new value every `duration`. pub fn interval(duration: Duration) -> impl Stream + Unpin { @@ -39,10 +38,7 @@ pub struct LruHashSet { impl LruHashSet { /// Create a new `LruHashSet` with the given (exclusive) limit. pub fn new(limit: NonZeroUsize) -> Self { - Self { - set: LinkedHashSet::new(), - limit, - } + Self { set: LinkedHashSet::new(), limit } } /// Insert element into the set. @@ -55,7 +51,7 @@ impl LruHashSet { if self.set.len() == usize::from(self.limit) { self.set.pop_front(); // remove oldest entry } - return true; + return true } false } diff --git a/client/network/test/src/block_import.rs b/client/network/test/src/block_import.rs index 6d3ceb4a933d8..4593e06250d36 100644 --- a/client/network/test/src/block_import.rs +++ b/client/network/test/src/block_import.rs @@ -18,16 +18,21 @@ //! Testing block import logic. -use sp_consensus::ImportedAux; -use sp_consensus::import_queue::{ - import_single_block, BasicQueue, BlockImportError, BlockImportResult, IncomingBlock, +use super::*; +use futures::executor::block_on; +use sc_block_builder::BlockBuilderProvider; +use sp_consensus::{ + import_queue::{ + import_single_block, BasicQueue, BlockImportError, BlockImportResult, IncomingBlock, + }, + ImportedAux, }; -use substrate_test_runtime_client::{self, prelude::*}; -use substrate_test_runtime_client::runtime::{Block, Hash}; use sp_runtime::generic::BlockId; -use sc_block_builder::BlockBuilderProvider; -use futures::executor::block_on; -use super::*; +use substrate_test_runtime_client::{ + self, + prelude::*, + runtime::{Block, Hash}, +}; fn prepare_good_block() -> (TestClient, Hash, u64, PeerId, IncomingBlock) { let mut client = substrate_test_runtime_client::new(); @@ -38,18 +43,24 @@ fn prepare_good_block() -> (TestClient, Hash, u64, PeerId, IncomingBlock) let header = client.header(&BlockId::Number(1)).unwrap(); let justifications = client.justifications(&BlockId::Number(1)).unwrap(); let peer_id = PeerId::random(); - (client, hash, number, peer_id.clone(), IncomingBlock { + ( + client, hash, - header, - body: Some(Vec::new()), - indexed_body: None, - justifications, - origin: Some(peer_id.clone()), - allow_missing_state: false, - import_existing: false, - state: None, - skip_execution: false, - }) + number, + peer_id.clone(), + IncomingBlock { + hash, + header, + body: Some(Vec::new()), + indexed_body: None, + justifications, + origin: Some(peer_id.clone()), + allow_missing_state: false, + import_existing: false, + state: None, + skip_execution: false, + }, + ) } #[test] @@ -63,11 +74,11 @@ fn import_single_good_block_works() { &mut substrate_test_runtime_client::new(), BlockOrigin::File, block, - &mut PassThroughVerifier::new(true) + &mut PassThroughVerifier::new(true), )) { Ok(BlockImportResult::ImportedUnknown(ref num, ref aux, ref org)) - if *num == number && *aux == expected_aux && *org == Some(peer_id) => {} - r @ _ => panic!("{:?}", r) + if *num == number && *aux == expected_aux && *org == Some(peer_id) => {}, + r @ _ => panic!("{:?}", r), } } @@ -78,10 +89,10 @@ fn import_single_good_known_block_is_ignored() { &mut client, BlockOrigin::File, block, - &mut PassThroughVerifier::new(true) + &mut PassThroughVerifier::new(true), )) { - Ok(BlockImportResult::ImportedKnown(ref n, _)) if *n == number => {} - _ => panic!() + Ok(BlockImportResult::ImportedKnown(ref n, _)) if *n == number => {}, + _ => panic!(), } } @@ -93,10 +104,10 @@ fn import_single_good_block_without_header_fails() { &mut substrate_test_runtime_client::new(), BlockOrigin::File, block, - &mut PassThroughVerifier::new(true) + &mut PassThroughVerifier::new(true), )) { - Err(BlockImportError::IncompleteHeader(ref org)) if *org == Some(peer_id) => {} - _ => panic!() + Err(BlockImportError::IncompleteHeader(ref org)) if *org == Some(peer_id) => {}, + _ => panic!(), } } diff --git a/client/network/test/src/lib.rs b/client/network/test/src/lib.rs index 900e05e26a78f..0bdaa0d14e4fb 100644 --- a/client/network/test/src/lib.rs +++ b/client/network/test/src/lib.rs @@ -23,52 +23,58 @@ mod block_import; mod sync; use std::{ - borrow::Cow, collections::HashMap, pin::Pin, sync::Arc, task::{Poll, Context as FutureContext} + borrow::Cow, + collections::HashMap, + pin::Pin, + sync::Arc, + task::{Context as FutureContext, Poll}, }; -use libp2p::build_multiaddr; +use futures::{future::BoxFuture, prelude::*}; +use libp2p::{build_multiaddr, PeerId}; use log::trace; -use sc_network::block_request_handler::{self, BlockRequestHandler}; -use sc_network::state_request_handler::{self, StateRequestHandler}; -use sc_network::light_client_requests::{self, handler::LightClientRequestHandler}; -use sp_blockchain::{ - HeaderBackend, Result as ClientResult, - well_known_cache_keys::{self, Id as CacheKeyId}, - Info as BlockchainInfo, -}; +use parking_lot::Mutex; +use sc_block_builder::{BlockBuilder, BlockBuilderProvider}; use sc_client_api::{ - BlockchainEvents, BlockImportNotification, FinalityNotifications, ImportNotifications, FinalityNotification, - backend::{TransactionFor, AuxStore, Backend, Finalizer}, BlockBackend, + backend::{AuxStore, Backend, Finalizer, TransactionFor}, + BlockBackend, BlockImportNotification, BlockchainEvents, FinalityNotification, + FinalityNotifications, ImportNotifications, }; use sc_consensus::LongestChain; -use sc_block_builder::{BlockBuilder, BlockBuilderProvider}; -use sc_network::config::Role; -use sp_consensus::block_validation::{DefaultBlockAnnounceValidator, BlockAnnounceValidator}; -use sp_consensus::import_queue::{ - BasicQueue, BoxJustificationImport, Verifier, -}; -use sp_consensus::block_import::{BlockImport, ImportResult}; -use sp_consensus::Error as ConsensusError; -use sp_consensus::{BlockOrigin, ForkChoiceStrategy, BlockImportParams, BlockCheckParams, JustificationImport}; -use futures::prelude::*; -use futures::future::BoxFuture; +pub use sc_network::config::EmptyTransactionPool; use sc_network::{ - NetworkWorker, NetworkService, config::{ProtocolId, MultiaddrWithPeerId, NonReservedPeerMode}, - Multiaddr, + block_request_handler::{self, BlockRequestHandler}, + config::{ + MultiaddrWithPeerId, NetworkConfiguration, NonDefaultSetConfig, NonReservedPeerMode, + ProtocolConfig, ProtocolId, Role, SyncMode, TransportConfig, + }, + light_client_requests::{self, handler::LightClientRequestHandler}, + state_request_handler::{self, StateRequestHandler}, + Multiaddr, NetworkService, NetworkWorker, +}; +use sc_service::client::Client; +use sp_blockchain::{ + well_known_cache_keys::{self, Id as CacheKeyId}, + HeaderBackend, Info as BlockchainInfo, Result as ClientResult, +}; +use sp_consensus::{ + block_import::{BlockImport, ImportResult}, + block_validation::{BlockAnnounceValidator, DefaultBlockAnnounceValidator}, + import_queue::{BasicQueue, BoxJustificationImport, Verifier}, + BlockCheckParams, BlockImportParams, BlockOrigin, Error as ConsensusError, ForkChoiceStrategy, + JustificationImport, }; -use sc_network::config::{NetworkConfiguration, NonDefaultSetConfig, TransportConfig, SyncMode}; -use libp2p::PeerId; -use parking_lot::Mutex; use sp_core::H256; -use sc_network::config::ProtocolConfig; -use sp_runtime::generic::{BlockId, OpaqueDigestItemId}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; -use sp_runtime::{Justification, Justifications}; +use sp_runtime::{ + generic::{BlockId, OpaqueDigestItemId}, + traits::{Block as BlockT, Header as HeaderT, NumberFor}, + Justification, Justifications, +}; use substrate_test_runtime_client::AccountKeyring; -use sc_service::client::Client; -pub use sc_network::config::EmptyTransactionPool; -pub use substrate_test_runtime_client::runtime::{Block, Extrinsic, Hash, Transfer}; -pub use substrate_test_runtime_client::{TestClient, TestClientBuilder, TestClientBuilderExt}; +pub use substrate_test_runtime_client::{ + runtime::{Block, Extrinsic, Hash, Transfer}, + TestClient, TestClientBuilder, TestClientBuilderExt, +}; type AuthorityId = sp_consensus_babe::AuthorityId; @@ -85,10 +91,7 @@ impl PassThroughVerifier { /// /// Every verified block will use `finalized` for the `BlockImportParams`. pub fn new(finalized: bool) -> Self { - Self { - finalized, - fork_choice: ForkChoiceStrategy::LongestChain, - } + Self { finalized, fork_choice: ForkChoiceStrategy::LongestChain } } /// Create a new instance. @@ -96,10 +99,7 @@ impl PassThroughVerifier { /// Every verified block will use `finalized` for the `BlockImportParams` and /// the given [`ForkChoiceStrategy`]. pub fn new_with_fork_choice(finalized: bool, fork_choice: ForkChoiceStrategy) -> Self { - Self { - finalized, - fork_choice, - } + Self { finalized, fork_choice } } } @@ -111,12 +111,14 @@ impl Verifier for PassThroughVerifier { origin: BlockOrigin, header: B::Header, justifications: Option, - body: Option> + body: Option>, ) -> Result<(BlockImportParams, Option)>>), String> { - let maybe_keys = header.digest() - .log(|l| l.try_as_raw(OpaqueDigestItemId::Consensus(b"aura")) - .or_else(|| l.try_as_raw(OpaqueDigestItemId::Consensus(b"babe"))) - ) + let maybe_keys = header + .digest() + .log(|l| { + l.try_as_raw(OpaqueDigestItemId::Consensus(b"aura")) + .or_else(|| l.try_as_raw(OpaqueDigestItemId::Consensus(b"babe"))) + }) .map(|blob| vec![(well_known_cache_keys::AUTHORITIES, blob.to_vec())]); let mut import = BlockImportParams::new(origin, header); import.body = body; @@ -132,13 +134,13 @@ pub type PeersFullClient = Client< substrate_test_runtime_client::Backend, substrate_test_runtime_client::Executor, Block, - substrate_test_runtime_client::runtime::RuntimeApi + substrate_test_runtime_client::runtime::RuntimeApi, >; pub type PeersLightClient = Client< substrate_test_runtime_client::LightBackend, substrate_test_runtime_client::LightExecutor, Block, - substrate_test_runtime_client::runtime::RuntimeApi + substrate_test_runtime_client::runtime::RuntimeApi, >; #[derive(Clone)] @@ -173,7 +175,10 @@ impl PeersClient { } } - pub fn header(&self, block: &BlockId) -> ClientResult::Header>> { + pub fn header( + &self, + block: &BlockId, + ) -> ClientResult::Header>> { match *self { PeersClient::Full(ref client, ref _backend) => client.header(block), PeersClient::Light(ref client, ref _backend) => client.header(block), @@ -207,7 +212,7 @@ impl PeersClient { } } - pub fn import_notification_stream(&self) -> ImportNotifications{ + pub fn import_notification_stream(&self) -> ImportNotifications { match *self { PeersClient::Full(ref client, ref _backend) => client.import_notification_stream(), PeersClient::Light(ref client, ref _backend) => client.import_notification_stream(), @@ -218,11 +223,13 @@ impl PeersClient { &self, id: BlockId, justification: Option, - notify: bool + notify: bool, ) -> ClientResult<()> { match *self { - PeersClient::Full(ref client, ref _backend) => client.finalize_block(id, justification, notify), - PeersClient::Light(ref client, ref _backend) => client.finalize_block(id, justification, notify), + PeersClient::Full(ref client, ref _backend) => + client.finalize_block(id, justification, notify), + PeersClient::Light(ref client, ref _backend) => + client.finalize_block(id, justification, notify), } } } @@ -273,7 +280,8 @@ pub struct Peer { listen_addr: Multiaddr, } -impl Peer where +impl Peer +where B: BlockImport + Send + Sync, B::Transaction: Send, { @@ -288,7 +296,9 @@ impl Peer where } // Returns a clone of the local SelectChain, only available on full nodes - pub fn select_chain(&self) -> Option> { + pub fn select_chain( + &self, + ) -> Option> { self.select_chain.clone() } @@ -328,17 +338,22 @@ impl Peer where } /// Add blocks to the peer -- edit the block before adding - pub fn generate_blocks( - &mut self, - count: usize, - origin: BlockOrigin, - edit_block: F, - ) -> H256 - where - F: FnMut(BlockBuilder) -> Block + pub fn generate_blocks(&mut self, count: usize, origin: BlockOrigin, edit_block: F) -> H256 + where + F: FnMut( + BlockBuilder, + ) -> Block, { let best_hash = self.client.info().best_hash; - self.generate_blocks_at(BlockId::Hash(best_hash), count, origin, edit_block, false, true, true) + self.generate_blocks_at( + BlockId::Hash(best_hash), + count, + origin, + edit_block, + false, + true, + true, + ) } /// Add blocks to the peer -- edit the block before adding. The chain will @@ -352,16 +367,18 @@ impl Peer where headers_only: bool, inform_sync_about_new_best_block: bool, announce_block: bool, - ) -> H256 where F: FnMut(BlockBuilder) -> Block { - let full_client = self.client.as_full() - .expect("blocks could only be generated by full clients"); + ) -> H256 + where + F: FnMut( + BlockBuilder, + ) -> Block, + { + let full_client = + self.client.as_full().expect("blocks could only be generated by full clients"); let mut at = full_client.header(&at).unwrap().unwrap().hash(); - for _ in 0..count { - let builder = full_client.new_block_at( - &BlockId::Hash(at), - Default::default(), - false, - ).unwrap(); + for _ in 0..count { + let builder = + full_client.new_block_at(&BlockId::Hash(at), Default::default(), false).unwrap(); let block = edit_block(builder); let hash = block.header.hash(); trace!( @@ -377,16 +394,16 @@ impl Peer where header.clone(), None, if headers_only { None } else { Some(block.extrinsics) }, - )).unwrap(); + )) + .unwrap(); let cache = if let Some(cache) = cache { cache.into_iter().collect() } else { Default::default() }; - futures::executor::block_on( - self.block_import.import_block(import_block, cache) - ).expect("block_import failed"); + futures::executor::block_on(self.block_import.import_block(import_block, cache)) + .expect("block_import failed"); if announce_block { self.network.service().announce_block(hash, None); } @@ -458,7 +475,8 @@ impl Peer where self.generate_blocks_at( at, count, - BlockOrigin::File, |mut builder| { + BlockOrigin::File, + |mut builder| { let transfer = Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Alice.into(), @@ -520,9 +538,10 @@ impl Peer where /// Count the total number of imported blocks. pub fn blocks_count(&self) -> u64 { - self.backend.as_ref().map( - |backend| backend.blockchain().info().best_number - ).unwrap_or(0) + self.backend + .as_ref() + .map(|backend| backend.blockchain().info().best_number) + .unwrap_or(0) } /// Return a collection of block hashes that failed verification @@ -531,9 +550,10 @@ impl Peer where } pub fn has_block(&self, hash: &H256) -> bool { - self.backend.as_ref().map( - |backend| backend.blockchain().header(BlockId::hash(*hash)).unwrap().is_some() - ).unwrap_or(false) + self.backend + .as_ref() + .map(|backend| backend.blockchain().header(BlockId::hash(*hash)).unwrap().is_some()) + .unwrap_or(false) } } @@ -542,22 +562,22 @@ pub trait BlockImportAdapterFull: Block, Transaction = TransactionFor, Error = ConsensusError, - > + - Send + - Sync + - Clone -{} + > + Send + + Sync + + Clone +{ +} impl BlockImportAdapterFull for T where T: BlockImport< - Block, - Transaction = TransactionFor, - Error = ConsensusError, - > + - Send + - Sync + - Clone -{} + Block, + Transaction = TransactionFor, + Error = ConsensusError, + > + Send + + Sync + + Clone +{ +} /// Implements `BlockImport` for any `Transaction`. Internally the transaction is /// "converted", aka the field is set to `None`. @@ -572,14 +592,13 @@ pub struct BlockImportAdapter { impl BlockImportAdapter { /// Create a new instance of `Self::Full`. pub fn new(inner: I) -> Self { - Self { - inner, - } + Self { inner } } } #[async_trait::async_trait] -impl BlockImport for BlockImportAdapter where +impl BlockImport for BlockImportAdapter +where I: BlockImport + Send + Sync, I::Transaction: Send, { @@ -615,13 +634,18 @@ impl Verifier for VerifierAdapter { origin: BlockOrigin, header: B::Header, justifications: Option, - body: Option> + body: Option>, ) -> Result<(BlockImportParams, Option)>>), String> { let hash = header.hash(); - self.verifier.lock().await.verify(origin, header, justifications, body).await.map_err(|e| { - self.failed_verifications.lock().insert(hash, e.clone()); - e - }) + self.verifier + .lock() + .await + .verify(origin, header, justifications, body) + .await + .map_err(|e| { + self.failed_verifications.lock().insert(hash, e.clone()); + e + }) } } @@ -664,7 +688,10 @@ pub struct FullPeerConfig { pub storage_chain: bool, } -pub trait TestNetFactory: Sized where >::Transaction: Send { +pub trait TestNetFactory: Sized +where + >::Transaction: Send, +{ type Verifier: 'static + Verifier; type BlockImport: BlockImport + Clone + Send + Sync + 'static; type PeerData: Default; @@ -687,12 +714,14 @@ pub trait TestNetFactory: Sized where >: ); /// Get custom block import handle for fresh client, along with peer data. - fn make_block_import(&self, client: PeersClient) - -> ( - BlockImportAdapter, - Option>, - Self::PeerData, - ); + fn make_block_import( + &self, + client: PeersClient, + ) -> ( + BlockImportAdapter, + Option>, + Self::PeerData, + ); fn default_config() -> ProtocolConfig { ProtocolConfig::default() @@ -723,18 +752,15 @@ pub trait TestNetFactory: Sized where >: (Some(keep_blocks), false) => TestClientBuilder::with_pruning_window(keep_blocks), (None, false) => TestClientBuilder::with_default_backend(), }; - if matches!(config.sync_mode, SyncMode::Fast{..}) { + if matches!(config.sync_mode, SyncMode::Fast { .. }) { test_client_builder = test_client_builder.set_no_genesis(); } let backend = test_client_builder.backend(); let (c, longest_chain) = test_client_builder.build_with_longest_chain(); let client = Arc::new(c); - let ( - block_import, - justification_import, - data, - ) = self.make_block_import(PeersClient::Full(client.clone(), backend.clone())); + let (block_import, justification_import, data) = + self.make_block_import(PeersClient::Full(client.clone(), backend.clone())); let verifier = self.make_verifier( PeersClient::Full(client.clone(), backend.clone()), @@ -753,30 +779,31 @@ pub trait TestNetFactory: Sized where >: let listen_addr = build_multiaddr![Memory(rand::random::())]; - let mut network_config = NetworkConfiguration::new( - "test-node", - "test-client", - Default::default(), - None, - ); + let mut network_config = + NetworkConfiguration::new("test-node", "test-client", Default::default(), None); network_config.sync_mode = config.sync_mode; network_config.transport = TransportConfig::MemoryOnly; network_config.listen_addresses = vec![listen_addr.clone()]; network_config.allow_non_globals_in_dht = true; - network_config.extra_sets = config.notifications_protocols.into_iter().map(|p| { - NonDefaultSetConfig { + network_config.extra_sets = config + .notifications_protocols + .into_iter() + .map(|p| NonDefaultSetConfig { notifications_protocol: p, fallback_names: Vec::new(), max_notification_size: 1024 * 1024, - set_config: Default::default() - } - }).collect(); + set_config: Default::default(), + }) + .collect(); if let Some(connect_to) = config.connect_to_peers { - let addrs = connect_to.iter().map(|v| { - let peer_id = self.peer(*v).network_service().local_peer_id().clone(); - let multiaddr = self.peer(*v).listen_addr.clone(); - MultiaddrWithPeerId { peer_id, multiaddr } - }).collect(); + let addrs = connect_to + .iter() + .map(|v| { + let peer_id = self.peer(*v).network_service().local_peer_id().clone(); + let multiaddr = self.peer(*v).listen_addr.clone(); + MultiaddrWithPeerId { peer_id, multiaddr } + }) + .collect(); network_config.default_peers_set.reserved_nodes = addrs; network_config.default_peers_set.non_reserved_mode = NonReservedPeerMode::Deny; } @@ -784,27 +811,22 @@ pub trait TestNetFactory: Sized where >: let protocol_id = ProtocolId::from("test-protocol-name"); let block_request_protocol_config = { - let (handler, protocol_config) = BlockRequestHandler::new( - &protocol_id, - client.clone(), - 50, - ); + let (handler, protocol_config) = + BlockRequestHandler::new(&protocol_id, client.clone(), 50); self.spawn_task(handler.run().boxed()); protocol_config }; let state_request_protocol_config = { - let (handler, protocol_config) = StateRequestHandler::new( - &protocol_id, - client.clone(), - 50, - ); + let (handler, protocol_config) = + StateRequestHandler::new(&protocol_id, client.clone(), 50); self.spawn_task(handler.run().boxed()); protocol_config }; let light_client_request_protocol_config = { - let (handler, protocol_config) = LightClientRequestHandler::new(&protocol_id, client.clone()); + let (handler, protocol_config) = + LightClientRequestHandler::new(&protocol_id, client.clone()); self.spawn_task(handler.run().boxed()); protocol_config }; @@ -812,20 +834,24 @@ pub trait TestNetFactory: Sized where >: let network = NetworkWorker::new(sc_network::config::Params { role: if config.is_authority { Role::Authority } else { Role::Full }, executor: None, - transactions_handler_executor: Box::new(|task| { async_std::task::spawn(task); }), + transactions_handler_executor: Box::new(|task| { + async_std::task::spawn(task); + }), network_config, chain: client.clone(), on_demand: None, transaction_pool: Arc::new(EmptyTransactionPool), protocol_id, import_queue, - block_announce_validator: config.block_announce_validator + block_announce_validator: config + .block_announce_validator .unwrap_or_else(|| Box::new(DefaultBlockAnnounceValidator)), metrics_registry: None, block_request_protocol_config, state_request_protocol_config, light_client_request_protocol_config, - }).unwrap(); + }) + .unwrap(); trace!(target: "test_network", "Peer identifier: {}", network.service().local_peer_id()); @@ -838,7 +864,8 @@ pub trait TestNetFactory: Sized where >: } let imported_blocks_stream = Box::pin(client.import_notification_stream().fuse()); - let finality_notification_stream = Box::pin(client.finality_notification_stream().fuse()); + let finality_notification_stream = + Box::pin(client.finality_notification_stream().fuse()); peers.push(Peer { data, @@ -859,11 +886,8 @@ pub trait TestNetFactory: Sized where >: fn add_light_peer(&mut self) { let (c, backend) = substrate_test_runtime_client::new_light(); let client = Arc::new(c); - let ( - block_import, - justification_import, - data, - ) = self.make_block_import(PeersClient::Light(client.clone(), backend.clone())); + let (block_import, justification_import, data) = + self.make_block_import(PeersClient::Light(client.clone(), backend.clone())); let verifier = self.make_verifier( PeersClient::Light(client.clone(), backend.clone()), @@ -882,24 +906,18 @@ pub trait TestNetFactory: Sized where >: let listen_addr = build_multiaddr![Memory(rand::random::())]; - let mut network_config = NetworkConfiguration::new( - "test-node", - "test-client", - Default::default(), - None, - ); + let mut network_config = + NetworkConfiguration::new("test-node", "test-client", Default::default(), None); network_config.transport = TransportConfig::MemoryOnly; network_config.listen_addresses = vec![listen_addr.clone()]; network_config.allow_non_globals_in_dht = true; let protocol_id = ProtocolId::from("test-protocol-name"); - let block_request_protocol_config = block_request_handler::generate_protocol_config( - &protocol_id, - ); - let state_request_protocol_config = state_request_handler::generate_protocol_config( - &protocol_id, - ); + let block_request_protocol_config = + block_request_handler::generate_protocol_config(&protocol_id); + let state_request_protocol_config = + state_request_handler::generate_protocol_config(&protocol_id); let light_client_request_protocol_config = light_client_requests::generate_protocol_config(&protocol_id); @@ -907,7 +925,9 @@ pub trait TestNetFactory: Sized where >: let network = NetworkWorker::new(sc_network::config::Params { role: Role::Light, executor: None, - transactions_handler_executor: Box::new(|task| { async_std::task::spawn(task); }), + transactions_handler_executor: Box::new(|task| { + async_std::task::spawn(task); + }), network_config, chain: client.clone(), on_demand: None, @@ -919,15 +939,20 @@ pub trait TestNetFactory: Sized where >: block_request_protocol_config, state_request_protocol_config, light_client_request_protocol_config, - }).unwrap(); + }) + .unwrap(); self.mut_peers(|peers| { for peer in peers.iter_mut() { - peer.network.add_known_address(network.service().local_peer_id().clone(), listen_addr.clone()); + peer.network.add_known_address( + network.service().local_peer_id().clone(), + listen_addr.clone(), + ); } let imported_blocks_stream = Box::pin(client.import_notification_stream().fuse()); - let finality_notification_stream = Box::pin(client.finality_notification_stream().fuse()); + let finality_notification_stream = + Box::pin(client.finality_notification_stream().fuse()); peers.push(Peer { data, @@ -967,7 +992,7 @@ pub trait TestNetFactory: Sized where >: match (highest, peer.client.info().best_hash) { (None, b) => highest = Some(b), (Some(ref a), ref b) if a == b => {}, - (Some(_), _) => return Poll::Pending + (Some(_), _) => return Poll::Pending, } } Poll::Ready(()) @@ -1008,23 +1033,27 @@ pub trait TestNetFactory: Sized where >: /// /// Calls `poll_until_sync` repeatedly. fn block_until_sync(&mut self) { - futures::executor::block_on(futures::future::poll_fn::<(), _>(|cx| self.poll_until_sync(cx))); + futures::executor::block_on(futures::future::poll_fn::<(), _>(|cx| { + self.poll_until_sync(cx) + })); } /// Blocks the current thread until there are no pending packets. /// /// Calls `poll_until_idle` repeatedly with the runtime passed as parameter. fn block_until_idle(&mut self) { - futures::executor::block_on(futures::future::poll_fn::<(), _>(|cx| self.poll_until_idle(cx))); + futures::executor::block_on(futures::future::poll_fn::<(), _>(|cx| { + self.poll_until_idle(cx) + })); } /// Blocks the current thread until all peers are connected to each other. /// /// Calls `poll_until_connected` repeatedly with the runtime passed as parameter. fn block_until_connected(&mut self) { - futures::executor::block_on( - futures::future::poll_fn::<(), _>(|cx| self.poll_until_connected(cx)), - ); + futures::executor::block_on(futures::future::poll_fn::<(), _>(|cx| { + self.poll_until_connected(cx) + })); } /// Polls the testnet. Processes all the pending actions. @@ -1038,13 +1067,17 @@ pub trait TestNetFactory: Sized where >: trace!(target: "sync", "-- Polling complete {}: {}", i, peer.id()); // We poll `imported_blocks_stream`. - while let Poll::Ready(Some(notification)) = peer.imported_blocks_stream.as_mut().poll_next(cx) { + while let Poll::Ready(Some(notification)) = + peer.imported_blocks_stream.as_mut().poll_next(cx) + { peer.network.service().announce_block(notification.hash, None); } // We poll `finality_notification_stream`, but we only take the last event. let mut last = None; - while let Poll::Ready(Some(item)) = peer.finality_notification_stream.as_mut().poll_next(cx) { + while let Poll::Ready(Some(item)) = + peer.finality_notification_stream.as_mut().poll_next(cx) + { last = Some(item); } if let Some(notification) = last { @@ -1063,10 +1096,7 @@ pub struct TestNet { impl TestNet { /// Create a `TestNet` that used the given fork choice rule. pub fn with_fork_choice(fork_choice: ForkChoiceStrategy) -> Self { - Self { - peers: Vec::new(), - fork_choice, - } + Self { peers: Vec::new(), fork_choice } } } @@ -1077,25 +1107,26 @@ impl TestNetFactory for TestNet { /// Create new test network with peers and given config. fn from_config(_config: &ProtocolConfig) -> Self { - TestNet { - peers: Vec::new(), - fork_choice: ForkChoiceStrategy::LongestChain, - } + TestNet { peers: Vec::new(), fork_choice: ForkChoiceStrategy::LongestChain } } - fn make_verifier(&self, _client: PeersClient, _config: &ProtocolConfig, _peer_data: &()) - -> Self::Verifier - { + fn make_verifier( + &self, + _client: PeersClient, + _config: &ProtocolConfig, + _peer_data: &(), + ) -> Self::Verifier { PassThroughVerifier::new_with_fork_choice(false, self.fork_choice.clone()) } - fn make_block_import(&self, client: PeersClient) - -> ( - BlockImportAdapter, - Option>, - Self::PeerData, - ) - { + fn make_block_import( + &self, + client: PeersClient, + ) -> ( + BlockImportAdapter, + Option>, + Self::PeerData, + ) { (client.as_block_import(), None, ()) } @@ -1128,7 +1159,8 @@ impl JustificationImport for ForceFinalized { _number: NumberFor, justification: Justification, ) -> Result<(), Self::Error> { - self.0.finalize_block(BlockId::Hash(hash), Some(justification), true) + self.0 + .finalize_block(BlockId::Hash(hash), Some(justification), true) .map_err(|_| ConsensusError::InvalidJustification.into()) } } @@ -1144,7 +1176,12 @@ impl TestNetFactory for JustificationTestNet { JustificationTestNet(TestNet::from_config(config)) } - fn make_verifier(&self, client: PeersClient, config: &ProtocolConfig, peer_data: &()) -> Self::Verifier { + fn make_verifier( + &self, + client: PeersClient, + config: &ProtocolConfig, + peer_data: &(), + ) -> Self::Verifier { self.0.make_verifier(client, config, peer_data) } @@ -1156,23 +1193,21 @@ impl TestNetFactory for JustificationTestNet { self.0.peers() } - fn mut_peers>, - )>(&mut self, closure: F) { + fn mut_peers>)>( + &mut self, + closure: F, + ) { self.0.mut_peers(closure) } - fn make_block_import(&self, client: PeersClient) - -> ( - BlockImportAdapter, - Option>, - Self::PeerData, - ) - { - ( - client.as_block_import(), - Some(Box::new(ForceFinalized(client))), - Default::default(), - ) + fn make_block_import( + &self, + client: PeersClient, + ) -> ( + BlockImportAdapter, + Option>, + Self::PeerData, + ) { + (client.as_block_import(), Some(Box::new(ForceFinalized(client))), Default::default()) } } diff --git a/client/network/test/src/sync.rs b/client/network/test/src/sync.rs index f998c9ebde757..153a0f905bff7 100644 --- a/client/network/test/src/sync.rs +++ b/client/network/test/src/sync.rs @@ -16,13 +16,12 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sp_consensus::BlockOrigin; -use std::time::Duration; -use futures::{Future, executor::block_on}; use super::*; -use sp_consensus::block_validation::Validation; -use substrate_test_runtime::Header; +use futures::{executor::block_on, Future}; +use sp_consensus::{block_validation::Validation, BlockOrigin}; use sp_runtime::Justifications; +use std::time::Duration; +use substrate_test_runtime::Header; fn test_ancestor_search_when_common_is(n: usize) { sp_tracing::try_init_simple(); @@ -254,9 +253,18 @@ fn sync_justifications() { // we finalize block #10, #15 and #20 for peer 0 with a justification let just = (*b"FRNK", Vec::new()); - net.peer(0).client().finalize_block(BlockId::Number(10), Some(just.clone()), true).unwrap(); - net.peer(0).client().finalize_block(BlockId::Number(15), Some(just.clone()), true).unwrap(); - net.peer(0).client().finalize_block(BlockId::Number(20), Some(just.clone()), true).unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Number(10), Some(just.clone()), true) + .unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Number(15), Some(just.clone()), true) + .unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Number(20), Some(just.clone()), true) + .unwrap(); let h1 = net.peer(1).client().header(&BlockId::Number(10)).unwrap().unwrap(); let h2 = net.peer(1).client().header(&BlockId::Number(15)).unwrap().unwrap(); @@ -271,21 +279,15 @@ fn sync_justifications() { net.poll(cx); for height in (10..21).step_by(5) { - if net - .peer(0) - .client() - .justifications(&BlockId::Number(height)) - .unwrap() != Some(Justifications::from((*b"FRNK", Vec::new()))) + if net.peer(0).client().justifications(&BlockId::Number(height)).unwrap() != + Some(Justifications::from((*b"FRNK", Vec::new()))) { - return Poll::Pending; + return Poll::Pending } - if net - .peer(1) - .client() - .justifications(&BlockId::Number(height)) - .unwrap() != Some(Justifications::from((*b"FRNK", Vec::new()))) + if net.peer(1).client().justifications(&BlockId::Number(height)).unwrap() != + Some(Justifications::from((*b"FRNK", Vec::new()))) { - return Poll::Pending; + return Poll::Pending } } @@ -308,7 +310,10 @@ fn sync_justifications_across_forks() { net.block_until_sync(); let just = (*b"FRNK", Vec::new()); - net.peer(0).client().finalize_block(BlockId::Hash(f1_best), Some(just), true).unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Hash(f1_best), Some(just), true) + .unwrap(); net.peer(1).request_justification(&f1_best, 10); net.peer(1).request_justification(&f2_best, 11); @@ -316,16 +321,10 @@ fn sync_justifications_across_forks() { block_on(futures::future::poll_fn::<(), _>(|cx| { net.poll(cx); - if net - .peer(0) - .client() - .justifications(&BlockId::Number(10)) - .unwrap() == Some(Justifications::from((*b"FRNK", Vec::new()))) - && net - .peer(1) - .client() - .justifications(&BlockId::Number(10)) - .unwrap() == Some(Justifications::from((*b"FRNK", Vec::new()))) + if net.peer(0).client().justifications(&BlockId::Number(10)).unwrap() == + Some(Justifications::from((*b"FRNK", Vec::new()))) && + net.peer(1).client().justifications(&BlockId::Number(10)).unwrap() == + Some(Justifications::from((*b"FRNK", Vec::new()))) { Poll::Ready(()) } else { @@ -380,7 +379,8 @@ fn own_blocks_are_announced() { sp_tracing::try_init_simple(); let mut net = TestNet::new(3); net.block_until_sync(); // connect'em - net.peer(0).generate_blocks(1, BlockOrigin::Own, |builder| builder.build().unwrap().block); + net.peer(0) + .generate_blocks(1, BlockOrigin::Own, |builder| builder.build().unwrap().block); net.block_until_sync(); @@ -573,7 +573,7 @@ fn can_sync_explicit_forks() { // poll until the two nodes connect, otherwise announcing the block will not work block_on(futures::future::poll_fn::<(), _>(|cx| { net.poll(cx); - if net.peer(0).num_peers() == 0 || net.peer(1).num_peers() == 0 { + if net.peer(0).num_peers() == 0 || net.peer(1).num_peers() == 0 { Poll::Pending } else { Poll::Ready(()) @@ -658,7 +658,7 @@ fn full_sync_requires_block_body() { // Wait for nodes to connect block_on(futures::future::poll_fn::<(), _>(|cx| { net.poll(cx); - if net.peer(0).num_peers() == 0 || net.peer(1).num_peers() == 0 { + if net.peer(0).num_peers() == 0 || net.peer(1).num_peers() == 0 { Poll::Pending } else { Poll::Ready(()) @@ -718,8 +718,14 @@ fn can_sync_to_peers_with_wrong_common_block() { // both peers re-org to the same fork without notifying each other let just = Some((*b"FRNK", Vec::new())); - net.peer(0).client().finalize_block(BlockId::Hash(fork_hash), just.clone(), true).unwrap(); - net.peer(1).client().finalize_block(BlockId::Hash(fork_hash), just, true).unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Hash(fork_hash), just.clone(), true) + .unwrap(); + net.peer(1) + .client() + .finalize_block(BlockId::Hash(fork_hash), just, true) + .unwrap(); let final_hash = net.peer(0).push_blocks(1, false); net.block_until_sync(); @@ -735,7 +741,8 @@ impl BlockAnnounceValidator for NewBestBlockAnnounceValidator { &mut self, _: &Header, _: &[u8], - ) -> Pin>> + Send>> { + ) -> Pin>> + Send>> + { async { Ok(Validation::Success { is_new_best: true }) }.boxed() } } @@ -748,16 +755,18 @@ impl BlockAnnounceValidator for FailingBlockAnnounceValidator { &mut self, header: &Header, _: &[u8], - ) -> Pin>> + Send>> { + ) -> Pin>> + Send>> + { let number = *header.number(); let target_number = self.0; - async move { Ok( - if number == target_number { + async move { + Ok(if number == target_number { Validation::Failure { disconnect: false } } else { Validation::Success { is_new_best: true } - } - ) }.boxed() + }) + } + .boxed() } } @@ -794,11 +803,13 @@ impl BlockAnnounceValidator for DeferredBlockAnnounceValidator { &mut self, _: &Header, _: &[u8], - ) -> Pin>> + Send>> { + ) -> Pin>> + Send>> + { async { futures_timer::Delay::new(std::time::Duration::from_millis(500)).await; Ok(Validation::Success { is_new_best: false }) - }.boxed() + } + .boxed() } } @@ -863,17 +874,12 @@ fn sync_to_tip_when_we_sync_together_with_multiple_peers() { let mut net = TestNet::new(3); - let block_hash = net.peer(0).push_blocks_at_without_informing_sync( - BlockId::Number(0), - 10_000, - false, - ); + let block_hash = + net.peer(0) + .push_blocks_at_without_informing_sync(BlockId::Number(0), 10_000, false); - net.peer(1).push_blocks_at_without_informing_sync( - BlockId::Number(0), - 5_000, - false, - ); + net.peer(1) + .push_blocks_at_without_informing_sync(BlockId::Number(0), 5_000, false); net.block_until_connected(); net.block_until_idle(); @@ -897,7 +903,9 @@ fn block_announce_data_is_propagated() { &mut self, _: &Header, data: &[u8], - ) -> Pin>> + Send>> { + ) -> Pin< + Box>> + Send>, + > { let correct = data.get(0) == Some(&137); async move { if correct { @@ -905,7 +913,8 @@ fn block_announce_data_is_propagated() { } else { Ok(Validation::Failure { disconnect: false }) } - }.boxed() + } + .boxed() } } @@ -950,15 +959,19 @@ fn continue_to_sync_after_some_block_announcement_verifications_failed() { &mut self, header: &Header, _: &[u8], - ) -> Pin>> + Send>> { + ) -> Pin< + Box>> + Send>, + > { let number = *header.number(); async move { if number < 100 { - Err(Box::::from(String::from("error")) as Box<_>) + Err(Box::::from(String::from("error")) + as Box<_>) } else { Ok(Validation::Success { is_new_best: false }) } - }.boxed() + } + .boxed() } } @@ -1010,22 +1023,18 @@ fn multiple_requests_are_accepted_as_long_as_they_are_not_fulfilled() { } // Finalize the block and make the justification available. - net.peer(0).client().finalize_block( - BlockId::Number(10), - Some((*b"FRNK", Vec::new())), - true, - ).unwrap(); + net.peer(0) + .client() + .finalize_block(BlockId::Number(10), Some((*b"FRNK", Vec::new())), true) + .unwrap(); block_on(futures::future::poll_fn::<(), _>(|cx| { net.poll(cx); - if net - .peer(1) - .client() - .justifications(&BlockId::Number(10)) - .unwrap() != Some(Justifications::from((*b"FRNK", Vec::new()))) + if net.peer(1).client().justifications(&BlockId::Number(10)).unwrap() != + Some(Justifications::from((*b"FRNK", Vec::new()))) { - return Poll::Pending; + return Poll::Pending } Poll::Ready(()) @@ -1091,7 +1100,7 @@ fn syncs_after_missing_announcement() { #[test] fn syncs_state() { sp_tracing::try_init_simple(); - for skip_proofs in &[ false, true ] { + for skip_proofs in &[false, true] { let mut net = TestNet::new(0); net.add_full_peer_with_config(Default::default()); net.add_full_peer_with_config(FullPeerConfig { @@ -1104,7 +1113,10 @@ fn syncs_state() { assert!(!net.peer(1).client().has_state_at(&BlockId::Number(64))); let just = (*b"FRNK", Vec::new()); - net.peer(1).client().finalize_block(BlockId::Number(60), Some(just), true).unwrap(); + net.peer(1) + .client() + .finalize_block(BlockId::Number(60), Some(just), true) + .unwrap(); // Wait for state sync. block_on(futures::future::poll_fn::<(), _>(|cx| { net.poll(cx); @@ -1133,10 +1145,7 @@ fn syncs_indexed_blocks() { sp_tracing::try_init_simple(); let mut net = TestNet::new(0); let mut n: u64 = 0; - net.add_full_peer_with_config(FullPeerConfig { - storage_chain: true, - ..Default::default() - }); + net.add_full_peer_with_config(FullPeerConfig { storage_chain: true, ..Default::default() }); net.add_full_peer_with_config(FullPeerConfig { storage_chain: true, sync_mode: SyncMode::Fast { skip_proofs: false, storage_chain_mode: true }, @@ -1145,7 +1154,8 @@ fn syncs_indexed_blocks() { net.peer(0).generate_blocks_at( BlockId::number(0), 64, - BlockOrigin::Own, |mut builder| { + BlockOrigin::Own, + |mut builder| { let ex = Extrinsic::Store(n.to_le_bytes().to_vec()); n += 1; builder.push(ex).unwrap(); @@ -1156,10 +1166,30 @@ fn syncs_indexed_blocks() { true, ); let indexed_key = sp_runtime::traits::BlakeTwo256::hash(&42u64.to_le_bytes()); - assert!(net.peer(0).client().as_full().unwrap().indexed_transaction(&indexed_key).unwrap().is_some()); - assert!(net.peer(1).client().as_full().unwrap().indexed_transaction(&indexed_key).unwrap().is_none()); + assert!(net + .peer(0) + .client() + .as_full() + .unwrap() + .indexed_transaction(&indexed_key) + .unwrap() + .is_some()); + assert!(net + .peer(1) + .client() + .as_full() + .unwrap() + .indexed_transaction(&indexed_key) + .unwrap() + .is_none()); net.block_until_sync(); - assert!(net.peer(1).client().as_full().unwrap().indexed_transaction(&indexed_key).unwrap().is_some()); + assert!(net + .peer(1) + .client() + .as_full() + .unwrap() + .indexed_transaction(&indexed_key) + .unwrap() + .is_some()); } - diff --git a/client/offchain/src/api.rs b/client/offchain/src/api.rs index 9b5ff69b726a8..46ba1a0f3cbc6 100644 --- a/client/offchain/src/api.rs +++ b/client/offchain/src/api.rs @@ -16,25 +16,21 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{ - str::FromStr, - sync::Arc, - convert::TryFrom, - thread::sleep, - collections::HashSet, -}; +use std::{collections::HashSet, convert::TryFrom, str::FromStr, sync::Arc, thread::sleep}; use crate::NetworkProvider; +use codec::{Decode, Encode}; use futures::Future; -use sc_network::{PeerId, Multiaddr}; -use codec::{Encode, Decode}; -use sp_core::OpaquePeerId; -use sp_core::offchain::{ - self, HttpRequestId, Timestamp, HttpRequestStatus, HttpError, - OffchainStorage, OpaqueNetworkState, OpaqueMultiaddr, StorageKind, +pub use http::SharedClient; +use sc_network::{Multiaddr, PeerId}; +use sp_core::{ + offchain::{ + self, HttpError, HttpRequestId, HttpRequestStatus, OffchainStorage, OpaqueMultiaddr, + OpaqueNetworkState, StorageKind, Timestamp, + }, + OpaquePeerId, }; pub use sp_offchain::STORAGE_PREFIX; -pub use http::SharedClient; #[cfg(not(target_os = "unknown"))] mod http; @@ -71,16 +67,15 @@ impl Db { } /// Create new instance of Offchain DB, backed by given backend. - pub fn factory_from_backend(backend: &Backend) -> Option< - Box - > where + pub fn factory_from_backend( + backend: &Backend, + ) -> Option> + where Backend: sc_client_api::Backend, Block: sp_runtime::traits::Block, Storage: 'static, { - sc_client_api::Backend::offchain_storage(backend).map(|db| - Box::new(Self::new(db)) as _ - ) + sc_client_api::Backend::offchain_storage(backend).map(|db| Box::new(Self::new(db)) as _) } } @@ -123,9 +118,8 @@ impl offchain::DbExternalities for Db { old_value.as_ref().map(hex::encode), ); match kind { - StorageKind::PERSISTENT => { - self.persistent.compare_and_set(STORAGE_PREFIX, key, old_value, new_value) - }, + StorageKind::PERSISTENT => + self.persistent.compare_and_set(STORAGE_PREFIX, key, old_value, new_value), StorageKind::LOCAL => unavailable_yet(LOCAL_DB), } } @@ -167,10 +161,7 @@ impl offchain::Externalities for Api { fn network_state(&self) -> Result { let external_addresses = self.network_provider.external_addresses(); - let state = NetworkState::new( - self.network_provider.local_peer_id(), - external_addresses, - ); + let state = NetworkState::new(self.network_provider.local_peer_id(), external_addresses); Ok(OpaqueNetworkState::from(state)) } @@ -190,7 +181,7 @@ impl offchain::Externalities for Api { &mut self, method: &str, uri: &str, - _meta: &[u8] + _meta: &[u8], ) -> Result { self.http.request_start(method, uri) } @@ -199,7 +190,7 @@ impl offchain::Externalities for Api { &mut self, request_id: HttpRequestId, name: &str, - value: &str + value: &str, ) -> Result<(), ()> { self.http.request_add_header(request_id, name, value) } @@ -208,7 +199,7 @@ impl offchain::Externalities for Api { &mut self, request_id: HttpRequestId, chunk: &[u8], - deadline: Option + deadline: Option, ) -> Result<(), HttpError> { self.http.request_write_body(request_id, chunk, deadline) } @@ -216,15 +207,12 @@ impl offchain::Externalities for Api { fn http_response_wait( &mut self, ids: &[HttpRequestId], - deadline: Option + deadline: Option, ) -> Vec { self.http.response_wait(ids, deadline) } - fn http_response_headers( - &mut self, - request_id: HttpRequestId - ) -> Vec<(Vec, Vec)> { + fn http_response_headers(&mut self, request_id: HttpRequestId) -> Vec<(Vec, Vec)> { self.http.response_headers(request_id) } @@ -232,15 +220,14 @@ impl offchain::Externalities for Api { &mut self, request_id: HttpRequestId, buffer: &mut [u8], - deadline: Option + deadline: Option, ) -> Result { self.http.response_read_body(request_id, buffer, deadline) } fn set_authorized_nodes(&mut self, nodes: Vec, authorized_only: bool) { - let peer_ids: HashSet = nodes.into_iter() - .filter_map(|node| PeerId::from_bytes(&node.0).ok()) - .collect(); + let peer_ids: HashSet = + nodes.into_iter().filter_map(|node| PeerId::from_bytes(&node.0).ok()).collect(); self.network_provider.set_authorized_peers(peer_ids); self.network_provider.set_authorized_only(authorized_only); @@ -256,10 +243,7 @@ pub struct NetworkState { impl NetworkState { fn new(peer_id: PeerId, external_addresses: Vec) -> Self { - NetworkState { - peer_id, - external_addresses, - } + NetworkState { peer_id, external_addresses } } } @@ -277,10 +261,7 @@ impl From for OpaqueNetworkState { }) .collect(); - OpaqueNetworkState { - peer_id, - external_addresses, - } + OpaqueNetworkState { peer_id, external_addresses } } } @@ -293,7 +274,8 @@ impl TryFrom for NetworkState { let bytes: Vec = Decode::decode(&mut &inner_vec[..]).map_err(|_| ())?; let peer_id = PeerId::from_bytes(&bytes).map_err(|_| ())?; - let external_addresses: Result, Self::Error> = state.external_addresses + let external_addresses: Result, Self::Error> = state + .external_addresses .iter() .map(|enc_multiaddr| -> Result { let inner_vec = &enc_multiaddr.0; @@ -305,10 +287,7 @@ impl TryFrom for NetworkState { .collect(); let external_addresses = external_addresses?; - Ok(NetworkState { - peer_id, - external_addresses, - }) + Ok(NetworkState { peer_id, external_addresses }) } } @@ -329,15 +308,9 @@ impl AsyncApi { ) -> (Api, Self) { let (http_api, http_worker) = http::http(shared_client); - let api = Api { - network_provider, - is_validator, - http: http_api, - }; + let api = Api { network_provider, is_validator, http: http_api }; - let async_api = Self { - http: Some(http_worker), - }; + let async_api = Self { http: Some(http_worker) }; (api, async_api) } @@ -355,8 +328,11 @@ mod tests { use super::*; use sc_client_db::offchain::LocalStorage; use sc_network::{NetworkStateInfo, PeerId}; - use sp_core::offchain::{Externalities, DbExternalities}; - use std::{convert::{TryFrom, TryInto}, time::SystemTime}; + use sp_core::offchain::{DbExternalities, Externalities}; + use std::{ + convert::{TryFrom, TryInto}, + time::SystemTime, + }; struct TestNetwork(); @@ -385,11 +361,7 @@ mod tests { let mock = Arc::new(TestNetwork()); let shared_client = SharedClient::new(); - AsyncApi::new( - mock, - false, - shared_client, - ) + AsyncApi::new(mock, false, shared_client) } fn offchain_db() -> Db { @@ -402,7 +374,12 @@ mod tests { // Get timestamp from std. let now = SystemTime::now(); - let d: u64 = now.duration_since(SystemTime::UNIX_EPOCH).unwrap().as_millis().try_into().unwrap(); + let d: u64 = now + .duration_since(SystemTime::UNIX_EPOCH) + .unwrap() + .as_millis() + .try_into() + .unwrap(); // Get timestamp from offchain api. let timestamp = api.timestamp(); diff --git a/client/offchain/src/api/http.rs b/client/offchain/src/api/http.rs index f03f7a93b856c..75a27f0c7cfbe 100644 --- a/client/offchain/src/api/http.rs +++ b/client/offchain/src/api/http.rs @@ -28,16 +28,22 @@ //! actively calling any function. use crate::api::timestamp; -use bytes::buf::ext::{Reader, BufExt}; +use bytes::buf::ext::{BufExt, Reader}; use fnv::FnvHashMap; -use futures::{prelude::*, future, channel::mpsc}; -use log::error; -use sp_core::offchain::{HttpRequestId, Timestamp, HttpRequestStatus, HttpError}; -use std::{convert::TryFrom, fmt, io::Read as _, pin::Pin, task::{Context, Poll}}; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender, TracingUnboundedReceiver}; -use std::sync::Arc; -use hyper::{Client as HyperClient, Body, client}; +use futures::{channel::mpsc, future, prelude::*}; +use hyper::{client, Body, Client as HyperClient}; use hyper_rustls::HttpsConnector; +use log::error; +use sp_core::offchain::{HttpError, HttpRequestId, HttpRequestStatus, Timestamp}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + convert::TryFrom, + fmt, + io::Read as _, + pin::Pin, + sync::Arc, + task::{Context, Poll}, +}; /// Wrapper struct used for keeping the hyper_rustls client running. #[derive(Clone)] @@ -63,12 +69,8 @@ pub fn http(shared_client: SharedClient) -> (HttpApi, HttpWorker) { requests: FnvHashMap::default(), }; - let engine = HttpWorker { - to_api, - from_api, - http_client: shared_client.0, - requests: Vec::new(), - }; + let engine = + HttpWorker { to_api, from_api, http_client: shared_client.0, requests: Vec::new() }; (api, engine) } @@ -127,11 +129,7 @@ struct HttpApiRequestRp { impl HttpApi { /// Mimics the corresponding method in the offchain API. - pub fn request_start( - &mut self, - method: &str, - uri: &str - ) -> Result { + pub fn request_start(&mut self, method: &str, uri: &str) -> Result { // Start by building the prototype of the request. // We do this first so that we don't touch anything in `self` if building the prototype // fails. @@ -146,10 +144,11 @@ impl HttpApi { Some(new_id) => self.next_id.0 = new_id, None => { error!("Overflow in offchain worker HTTP request ID assignment"); - return Err(()); - } + return Err(()) + }, }; - self.requests.insert(new_id, HttpApiRequest::NotDispatched(request, body_sender)); + self.requests + .insert(new_id, HttpApiRequest::NotDispatched(request, body_sender)); Ok(new_id) } @@ -159,11 +158,11 @@ impl HttpApi { &mut self, request_id: HttpRequestId, name: &str, - value: &str + value: &str, ) -> Result<(), ()> { let request = match self.requests.get_mut(&request_id) { Some(&mut HttpApiRequest::NotDispatched(ref mut rq, _)) => rq, - _ => return Err(()) + _ => return Err(()), }; let name = hyper::header::HeaderName::try_from(name).map_err(drop)?; @@ -179,7 +178,7 @@ impl HttpApi { &mut self, request_id: HttpRequestId, chunk: &[u8], - deadline: Option + deadline: Option, ) -> Result<(), HttpError> { // Extract the request from the list. // Don't forget to add it back if necessary when returning. @@ -193,76 +192,83 @@ impl HttpApi { let mut when_ready = future::maybe_done(future::poll_fn(|cx| sender.poll_ready(cx))); futures::executor::block_on(future::select(&mut when_ready, &mut deadline)); match when_ready { - future::MaybeDone::Done(Ok(())) => {} + future::MaybeDone::Done(Ok(())) => {}, future::MaybeDone::Done(Err(_)) => return Err(HttpError::IoError), - future::MaybeDone::Future(_) | - future::MaybeDone::Gone => { + future::MaybeDone::Future(_) | future::MaybeDone::Gone => { debug_assert!(matches!(deadline, future::MaybeDone::Done(..))); return Err(HttpError::DeadlineReached) - } + }, }; - futures::executor::block_on(sender.send_data(hyper::body::Bytes::from(chunk.to_owned()))) - .map_err(|_| { - error!("HTTP sender refused data despite being ready"); - HttpError::IoError - }) + futures::executor::block_on( + sender.send_data(hyper::body::Bytes::from(chunk.to_owned())), + ) + .map_err(|_| { + error!("HTTP sender refused data despite being ready"); + HttpError::IoError + }) }; loop { request = match request { HttpApiRequest::NotDispatched(request, sender) => { // If the request is not dispatched yet, dispatch it and loop again. - let _ = self.to_worker.unbounded_send(ApiToWorker::Dispatch { - id: request_id, - request - }); + let _ = self + .to_worker + .unbounded_send(ApiToWorker::Dispatch { id: request_id, request }); HttpApiRequest::Dispatched(Some(sender)) - } + }, HttpApiRequest::Dispatched(Some(mut sender)) => if !chunk.is_empty() { match poll_sender(&mut sender) { Err(HttpError::IoError) => return Err(HttpError::IoError), other => { - self.requests.insert( - request_id, - HttpApiRequest::Dispatched(Some(sender)) - ); + self.requests + .insert(request_id, HttpApiRequest::Dispatched(Some(sender))); return other - } + }, } } else { // Writing an empty body is a hint that we should stop writing. Dropping // the sender. self.requests.insert(request_id, HttpApiRequest::Dispatched(None)); return Ok(()) - } + }, - HttpApiRequest::Response(mut response @ HttpApiRequestRp { sending_body: Some(_), .. }) => + HttpApiRequest::Response( + mut response @ HttpApiRequestRp { sending_body: Some(_), .. }, + ) => if !chunk.is_empty() { - match poll_sender(response.sending_body.as_mut() - .expect("Can only enter this match branch if Some; qed")) { + match poll_sender( + response + .sending_body + .as_mut() + .expect("Can only enter this match branch if Some; qed"), + ) { Err(HttpError::IoError) => return Err(HttpError::IoError), other => { - self.requests.insert(request_id, HttpApiRequest::Response(response)); + self.requests + .insert(request_id, HttpApiRequest::Response(response)); return other - } + }, } - } else { // Writing an empty body is a hint that we should stop writing. Dropping // the sender. - self.requests.insert(request_id, HttpApiRequest::Response(HttpApiRequestRp { - sending_body: None, - ..response - })); + self.requests.insert( + request_id, + HttpApiRequest::Response(HttpApiRequestRp { + sending_body: None, + ..response + }), + ); return Ok(()) - } + }, HttpApiRequest::Fail(_) => - // If the request has already failed, return without putting back the request - // in the list. + // If the request has already failed, return without putting back the request + // in the list. return Err(HttpError::IoError), v @ HttpApiRequest::Dispatched(None) | @@ -270,7 +276,7 @@ impl HttpApi { // We have already finished sending this body. self.requests.insert(request_id, v); return Err(HttpError::Invalid) - } + }, } } } @@ -279,30 +285,27 @@ impl HttpApi { pub fn response_wait( &mut self, ids: &[HttpRequestId], - deadline: Option + deadline: Option, ) -> Vec { // First of all, dispatch all the non-dispatched requests and drop all senders so that the // user can't write anymore data. for id in ids { match self.requests.get_mut(id) { - Some(HttpApiRequest::NotDispatched(_, _)) => {} + Some(HttpApiRequest::NotDispatched(_, _)) => {}, Some(HttpApiRequest::Dispatched(sending_body)) | Some(HttpApiRequest::Response(HttpApiRequestRp { sending_body, .. })) => { let _ = sending_body.take(); continue - } - _ => continue + }, + _ => continue, }; let (request, _sender) = match self.requests.remove(id) { Some(HttpApiRequest::NotDispatched(rq, s)) => (rq, s), - _ => unreachable!("we checked for NotDispatched above; qed") + _ => unreachable!("we checked for NotDispatched above; qed"), }; - let _ = self.to_worker.unbounded_send(ApiToWorker::Dispatch { - id: *id, - request - }); + let _ = self.to_worker.unbounded_send(ApiToWorker::Dispatch { id: *id, request }); // We also destroy the sender in order to forbid writing more data. self.requests.insert(*id, HttpApiRequest::Dispatched(None)); @@ -319,25 +322,24 @@ impl HttpApi { for id in ids { output.push(match self.requests.get(id) { None => HttpRequestStatus::Invalid, - Some(HttpApiRequest::NotDispatched(_, _)) => - unreachable!("we replaced all the NotDispatched with Dispatched earlier; qed"), + Some(HttpApiRequest::NotDispatched(_, _)) => unreachable!( + "we replaced all the NotDispatched with Dispatched earlier; qed" + ), Some(HttpApiRequest::Dispatched(_)) => { must_wait_more = true; HttpRequestStatus::DeadlineReached }, Some(HttpApiRequest::Fail(_)) => HttpRequestStatus::IoError, - Some(HttpApiRequest::Response(HttpApiRequestRp { status_code, .. })) => - HttpRequestStatus::Finished(status_code.as_u16()), + Some(HttpApiRequest::Response(HttpApiRequestRp { + status_code, .. + })) => HttpRequestStatus::Finished(status_code.as_u16()), }); } debug_assert_eq!(output.len(), ids.len()); // Are we ready to call `return`? - let is_done = if let future::MaybeDone::Done(_) = deadline { - true - } else { - !must_wait_more - }; + let is_done = + if let future::MaybeDone::Done(_) = deadline { true } else { !must_wait_more }; if is_done { // Requests in "fail" mode are purged before returning. @@ -369,47 +371,45 @@ impl HttpApi { Some(WorkerToApi::Response { id, status_code, headers, body }) => match self.requests.remove(&id) { Some(HttpApiRequest::Dispatched(sending_body)) => { - self.requests.insert(id, HttpApiRequest::Response(HttpApiRequestRp { - sending_body, - status_code, - headers, - body: body.fuse(), - current_read_chunk: None, - })); - } - None => {} // can happen if we detected an IO error when sending the body + self.requests.insert( + id, + HttpApiRequest::Response(HttpApiRequestRp { + sending_body, + status_code, + headers, + body: body.fuse(), + current_read_chunk: None, + }), + ); + }, + None => {}, // can happen if we detected an IO error when sending the body _ => error!("State mismatch between the API and worker"), - } + }, - Some(WorkerToApi::Fail { id, error }) => - match self.requests.remove(&id) { - Some(HttpApiRequest::Dispatched(_)) => { - self.requests.insert(id, HttpApiRequest::Fail(error)); - } - None => {} // can happen if we detected an IO error when sending the body - _ => error!("State mismatch between the API and worker"), - } + Some(WorkerToApi::Fail { id, error }) => match self.requests.remove(&id) { + Some(HttpApiRequest::Dispatched(_)) => { + self.requests.insert(id, HttpApiRequest::Fail(error)); + }, + None => {}, // can happen if we detected an IO error when sending the body + _ => error!("State mismatch between the API and worker"), + }, None => { error!("Worker has crashed"); return ids.iter().map(|_| HttpRequestStatus::IoError).collect() - } + }, } - } } /// Mimics the corresponding method in the offchain API. - pub fn response_headers( - &mut self, - request_id: HttpRequestId - ) -> Vec<(Vec, Vec)> { + pub fn response_headers(&mut self, request_id: HttpRequestId) -> Vec<(Vec, Vec)> { // Do an implicit non-blocking wait on the request. let _ = self.response_wait(&[request_id], Some(timestamp::now())); let headers = match self.requests.get(&request_id) { Some(HttpApiRequest::Response(HttpApiRequestRp { headers, .. })) => headers, - _ => return Vec::new() + _ => return Vec::new(), }; headers @@ -423,7 +423,7 @@ impl HttpApi { &mut self, request_id: HttpRequestId, buffer: &mut [u8], - deadline: Option + deadline: Option, ) -> Result { // Do an implicit wait on the request. let _ = self.response_wait(&[request_id], deadline); @@ -439,14 +439,13 @@ impl HttpApi { return Err(HttpError::DeadlineReached) }, // The request has failed. - Some(HttpApiRequest::Fail { .. }) => - return Err(HttpError::IoError), + Some(HttpApiRequest::Fail { .. }) => return Err(HttpError::IoError), // Request hasn't been dispatched yet; reading the body is invalid. Some(rq @ HttpApiRequest::NotDispatched(_, _)) => { self.requests.insert(request_id, rq); return Err(HttpError::Invalid) - } - None => return Err(HttpError::Invalid) + }, + None => return Err(HttpError::Invalid), }; // Convert the deadline into a `Future` that resolves when the deadline is reached. @@ -456,19 +455,22 @@ impl HttpApi { // First read from `current_read_chunk`. if let Some(mut current_read_chunk) = response.current_read_chunk.take() { match current_read_chunk.read(buffer) { - Ok(0) => {} + Ok(0) => {}, Ok(n) => { - self.requests.insert(request_id, HttpApiRequest::Response(HttpApiRequestRp { - current_read_chunk: Some(current_read_chunk), - .. response - })); + self.requests.insert( + request_id, + HttpApiRequest::Response(HttpApiRequestRp { + current_read_chunk: Some(current_read_chunk), + ..response + }), + ); return Ok(n) }, Err(err) => { // This code should never be reached unless there's a logic error somewhere. error!("Failed to read from current read chunk: {:?}", err); return Err(HttpError::IoError) - } + }, } } @@ -482,7 +484,7 @@ impl HttpApi { match next_body { Some(Ok(chunk)) => response.current_read_chunk = Some(chunk.reader()), Some(Err(_)) => return Err(HttpError::IoError), - None => return Ok(0), // eof + None => return Ok(0), // eof } } @@ -496,9 +498,7 @@ impl HttpApi { impl fmt::Debug for HttpApi { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.requests.iter()) - .finish() + f.debug_list().entries(self.requests.iter()).finish() } } @@ -507,12 +507,13 @@ impl fmt::Debug for HttpApiRequest { match self { HttpApiRequest::NotDispatched(_, _) => f.debug_tuple("HttpApiRequest::NotDispatched").finish(), - HttpApiRequest::Dispatched(_) => - f.debug_tuple("HttpApiRequest::Dispatched").finish(), - HttpApiRequest::Response(HttpApiRequestRp { status_code, headers, .. }) => - f.debug_tuple("HttpApiRequest::Response").field(status_code).field(headers).finish(), - HttpApiRequest::Fail(err) => - f.debug_tuple("HttpApiRequest::Fail").field(err).finish(), + HttpApiRequest::Dispatched(_) => f.debug_tuple("HttpApiRequest::Dispatched").finish(), + HttpApiRequest::Response(HttpApiRequestRp { status_code, headers, .. }) => f + .debug_tuple("HttpApiRequest::Response") + .field(status_code) + .field(headers) + .finish(), + HttpApiRequest::Fail(err) => f.debug_tuple("HttpApiRequest::Fail").field(err).finish(), } } } @@ -525,7 +526,7 @@ enum ApiToWorker { id: HttpRequestId, /// Request to start executing. request: hyper::Request, - } + }, } /// Message send from the API to the worker. @@ -605,8 +606,8 @@ impl Future for HttpWorker { Poll::Ready(Ok(response)) => response, Poll::Ready(Err(error)) => { let _ = me.to_api.unbounded_send(WorkerToApi::Fail { id, error }); - continue; // don't insert the request back - } + continue // don't insert the request back + }, }; // We received a response! Decompose it into its parts. @@ -622,20 +623,20 @@ impl Future for HttpWorker { }); me.requests.push((id, HttpWorkerRequest::ReadBody { body, tx: body_tx })); - cx.waker().wake_by_ref(); // reschedule in order to poll the new future + cx.waker().wake_by_ref(); // reschedule in order to poll the new future continue - } + }, HttpWorkerRequest::ReadBody { mut body, mut tx } => { // Before reading from the HTTP response, check that `tx` is ready to accept // a new chunk. match tx.poll_ready(cx) { - Poll::Ready(Ok(())) => {} - Poll::Ready(Err(_)) => continue, // don't insert the request back + Poll::Ready(Ok(())) => {}, + Poll::Ready(Err(_)) => continue, // don't insert the request back Poll::Pending => { me.requests.push((id, HttpWorkerRequest::ReadBody { body, tx })); continue - } + }, } // `tx` is ready. Read a chunk from the socket and send it to the channel. @@ -643,31 +644,31 @@ impl Future for HttpWorker { Poll::Ready(Some(Ok(chunk))) => { let _ = tx.start_send(Ok(chunk)); me.requests.push((id, HttpWorkerRequest::ReadBody { body, tx })); - cx.waker().wake_by_ref(); // reschedule in order to continue reading - } + cx.waker().wake_by_ref(); // reschedule in order to continue reading + }, Poll::Ready(Some(Err(err))) => { let _ = tx.start_send(Err(err)); // don't insert the request back }, - Poll::Ready(None) => {} // EOF; don't insert the request back + Poll::Ready(None) => {}, // EOF; don't insert the request back Poll::Pending => { me.requests.push((id, HttpWorkerRequest::ReadBody { body, tx })); }, } - } + }, } } // Check for messages coming from the [`HttpApi`]. match Stream::poll_next(Pin::new(&mut me.from_api), cx) { Poll::Pending => {}, - Poll::Ready(None) => return Poll::Ready(()), // stops the worker + Poll::Ready(None) => return Poll::Ready(()), // stops the worker Poll::Ready(Some(ApiToWorker::Dispatch { id, request })) => { let future = me.http_client.request(request); debug_assert!(me.requests.iter().all(|(i, _)| *i != id)); me.requests.push((id, HttpWorkerRequest::Dispatched(future))); - cx.waker().wake_by_ref(); // reschedule the task to poll the request - } + cx.waker().wake_by_ref(); // reschedule the task to poll the request + }, } Poll::Pending @@ -676,9 +677,7 @@ impl Future for HttpWorker { impl fmt::Debug for HttpWorker { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.requests.iter()) - .finish() + f.debug_list().entries(self.requests.iter()).finish() } } @@ -695,13 +694,13 @@ impl fmt::Debug for HttpWorkerRequest { #[cfg(test)] mod tests { - use core::convert::Infallible; - use crate::api::timestamp; use super::{http, SharedClient}; - use sp_core::offchain::{HttpError, HttpRequestId, HttpRequestStatus, Duration}; + use crate::api::timestamp; + use core::convert::Infallible; use futures::future; use lazy_static::lazy_static; - + use sp_core::offchain::{Duration, HttpError, HttpRequestId, HttpRequestStatus}; + // Using lazy_static to avoid spawning lots of different SharedClients, // as spawning a SharedClient is CPU-intensive and opens lots of fds. lazy_static! { @@ -720,14 +719,17 @@ mod tests { let mut rt = tokio::runtime::Runtime::new().unwrap(); let worker = rt.spawn(worker); let server = rt.spawn(async move { - let server = hyper::Server::bind(&"127.0.0.1:0".parse().unwrap()) - .serve(hyper::service::make_service_fn(|_| { async move { - Ok::<_, Infallible>(hyper::service::service_fn(move |_req| async move { - Ok::<_, Infallible>( - hyper::Response::new(hyper::Body::from("Hello World!")) - ) - })) - }})); + let server = hyper::Server::bind(&"127.0.0.1:0".parse().unwrap()).serve( + hyper::service::make_service_fn(|_| async move { + Ok::<_, Infallible>(hyper::service::service_fn( + move |_req| async move { + Ok::<_, Infallible>(hyper::Response::new(hyper::Body::from( + "Hello World!", + ))) + }, + )) + }), + ); let _ = addr_tx.send(server.local_addr()); server.await.map_err(drop) }); @@ -750,7 +752,7 @@ mod tests { match api.response_wait(&[id], Some(deadline))[0] { HttpRequestStatus::Finished(200) => {}, - v => panic!("Connecting to localhost failed: {:?}", v) + v => panic!("Connecting to localhost failed: {:?}", v), } let headers = api.response_headers(id); @@ -766,13 +768,13 @@ mod tests { let (mut api, addr) = build_api_server!(); match api.request_start("\0", &format!("http://{}", addr)) { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; match api.request_start("GET", "http://\0localhost") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; } @@ -781,42 +783,42 @@ mod tests { let (mut api, addr) = build_api_server!(); match api.request_add_header(HttpRequestId(0xdead), "Foo", "bar") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; let id = api.request_start("GET", &format!("http://{}", addr)).unwrap(); match api.request_add_header(id, "\0", "bar") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); match api.request_add_header(id, "Foo", "\0") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.request_add_header(id, "Foo", "Bar").unwrap(); api.request_write_body(id, &[1, 2, 3, 4], None).unwrap(); match api.request_add_header(id, "Foo2", "Bar") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; let id = api.request_start("GET", &format!("http://{}", addr)).unwrap(); api.response_headers(id); match api.request_add_header(id, "Foo2", "Bar") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; let id = api.request_start("GET", &format!("http://{}", addr)).unwrap(); api.response_read_body(id, &mut [], None).unwrap(); match api.request_add_header(id, "Foo2", "Bar") { - Err(()) => {} - Ok(_) => panic!() + Err(()) => {}, + Ok(_) => panic!(), }; } @@ -825,13 +827,13 @@ mod tests { let (mut api, addr) = build_api_server!(); match api.request_write_body(HttpRequestId(0xdead), &[1, 2, 3], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; match api.request_write_body(HttpRequestId(0xdead), &[], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); @@ -839,8 +841,8 @@ mod tests { api.request_write_body(id, &[1, 2, 3, 4], None).unwrap(); api.request_write_body(id, &[], None).unwrap(); match api.request_write_body(id, &[], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); @@ -848,52 +850,52 @@ mod tests { api.request_write_body(id, &[1, 2, 3, 4], None).unwrap(); api.request_write_body(id, &[], None).unwrap(); match api.request_write_body(id, &[1, 2, 3, 4], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.request_write_body(id, &[1, 2, 3, 4], None).unwrap(); api.response_wait(&[id], None); match api.request_write_body(id, &[], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.request_write_body(id, &[1, 2, 3, 4], None).unwrap(); api.response_wait(&[id], None); match api.request_write_body(id, &[1, 2, 3, 4], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.response_headers(id); match api.request_write_body(id, &[1, 2, 3, 4], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("GET", &format!("http://{}", addr)).unwrap(); api.response_headers(id); match api.request_write_body(id, &[], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.response_read_body(id, &mut [], None).unwrap(); match api.request_write_body(id, &[1, 2, 3, 4], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; let id = api.request_start("POST", &format!("http://{}", addr)).unwrap(); api.response_read_body(id, &mut [], None).unwrap(); match api.request_write_body(id, &[], None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), }; } @@ -948,15 +950,15 @@ mod tests { let mut buf = [0; 512]; match api.response_read_body(HttpRequestId(0xdead), &mut buf, None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), } let id = api.request_start("GET", &format!("http://{}", addr)).unwrap(); while api.response_read_body(id, &mut buf, None).unwrap() != 0 {} match api.response_read_body(id, &mut buf, None) { - Err(HttpError::Invalid) => {} - _ => panic!() + Err(HttpError::Invalid) => {}, + _ => panic!(), } } @@ -973,16 +975,26 @@ mod tests { for _ in 0..250 { match rand::random::() % 6 { - 0 => { let _ = api.request_add_header(id, "Foo", "Bar"); } - 1 => { let _ = api.request_write_body(id, &[1, 2, 3, 4], None); } - 2 => { let _ = api.request_write_body(id, &[], None); } - 3 => { let _ = api.response_wait(&[id], None); } - 4 => { let _ = api.response_headers(id); } + 0 => { + let _ = api.request_add_header(id, "Foo", "Bar"); + }, + 1 => { + let _ = api.request_write_body(id, &[1, 2, 3, 4], None); + }, + 2 => { + let _ = api.request_write_body(id, &[], None); + }, + 3 => { + let _ = api.response_wait(&[id], None); + }, + 4 => { + let _ = api.response_headers(id); + }, 5 => { let mut buf = [0; 512]; let _ = api.response_read_body(id, &mut buf, None); - } - 6 ..= 255 => unreachable!() + }, + 6..=255 => unreachable!(), } } } diff --git a/client/offchain/src/api/http_dummy.rs b/client/offchain/src/api/http_dummy.rs index ff9c2fb2aa029..386fc445d4e99 100644 --- a/client/offchain/src/api/http_dummy.rs +++ b/client/offchain/src/api/http_dummy.rs @@ -18,8 +18,12 @@ //! Contains the same API as the `http` module, except that everything returns an error. -use sp_core::offchain::{HttpRequestId, Timestamp, HttpRequestStatus, HttpError}; -use std::{future::Future, pin::Pin, task::Context, task::Poll}; +use sp_core::offchain::{HttpError, HttpRequestId, HttpRequestStatus, Timestamp}; +use std::{ + future::Future, + pin::Pin, + task::{Context, Poll}, +}; /// Wrapper struct (wrapping nothing in case of http_dummy) used for keeping the hyper_rustls client running. #[derive(Clone)] @@ -46,24 +50,17 @@ pub struct HttpWorker; impl HttpApi { /// Mimics the corresponding method in the offchain API. - pub fn request_start( - &mut self, - _: &str, - _: &str - ) -> Result { + pub fn request_start(&mut self, _: &str, _: &str) -> Result { /// Because this always returns an error, none of the other methods should ever be called. Err(()) } /// Mimics the corresponding method in the offchain API. - pub fn request_add_header( - &mut self, - _: HttpRequestId, - _: &str, - _: &str - ) -> Result<(), ()> { - unreachable!("Creating a request always fails, thus this function will \ - never be called; qed") + pub fn request_add_header(&mut self, _: HttpRequestId, _: &str, _: &str) -> Result<(), ()> { + unreachable!( + "Creating a request always fails, thus this function will \ + never be called; qed" + ) } /// Mimics the corresponding method in the offchain API. @@ -71,33 +68,36 @@ impl HttpApi { &mut self, _: HttpRequestId, _: &[u8], - _: Option + _: Option, ) -> Result<(), HttpError> { - unreachable!("Creating a request always fails, thus this function will \ - never be called; qed") + unreachable!( + "Creating a request always fails, thus this function will \ + never be called; qed" + ) } /// Mimics the corresponding method in the offchain API. pub fn response_wait( &mut self, requests: &[HttpRequestId], - _: Option + _: Option, ) -> Vec { if requests.is_empty() { Vec::new() } else { - unreachable!("Creating a request always fails, thus the list of requests should \ - always be empty; qed") + unreachable!( + "Creating a request always fails, thus the list of requests should \ + always be empty; qed" + ) } } /// Mimics the corresponding method in the offchain API. - pub fn response_headers( - &mut self, - _: HttpRequestId - ) -> Vec<(Vec, Vec)> { - unreachable!("Creating a request always fails, thus this function will \ - never be called; qed") + pub fn response_headers(&mut self, _: HttpRequestId) -> Vec<(Vec, Vec)> { + unreachable!( + "Creating a request always fails, thus this function will \ + never be called; qed" + ) } /// Mimics the corresponding method in the offchain API. @@ -105,10 +105,12 @@ impl HttpApi { &mut self, _: HttpRequestId, _: &mut [u8], - _: Option + _: Option, ) -> Result { - unreachable!("Creating a request always fails, thus this function will \ - never be called; qed") + unreachable!( + "Creating a request always fails, thus this function will \ + never be called; qed" + ) } } diff --git a/client/offchain/src/api/timestamp.rs b/client/offchain/src/api/timestamp.rs index 6ea0f000f8d19..f1c8c004a0198 100644 --- a/client/offchain/src/api/timestamp.rs +++ b/client/offchain/src/api/timestamp.rs @@ -19,8 +19,10 @@ //! Helper methods dedicated to timestamps. use sp_core::offchain::Timestamp; -use std::convert::TryInto; -use std::time::{SystemTime, Duration}; +use std::{ + convert::TryInto, + time::{Duration, SystemTime}, +}; /// Returns the current time as a `Timestamp`. pub fn now() -> Timestamp { @@ -34,9 +36,12 @@ pub fn now() -> Timestamp { Ok(d) => { let duration = d.as_millis(); // Assuming overflow won't happen for a few hundred years. - Timestamp::from_unix_millis(duration.try_into() - .expect("epoch milliseconds won't overflow u64 for hundreds of years; qed")) - } + Timestamp::from_unix_millis( + duration + .try_into() + .expect("epoch milliseconds won't overflow u64 for hundreds of years; qed"), + ) + }, } } @@ -60,7 +65,6 @@ pub fn deadline_to_future( // Only apply delay if we need to wait a non-zero duration Some(duration) if duration <= Duration::from_secs(0) => Either::Right(Either::Left(future::ready(()))), - Some(duration) => - Either::Right(Either::Right(futures_timer::Delay::new(duration))), + Some(duration) => Either::Right(Either::Right(futures_timer::Delay::new(duration))), }) } diff --git a/client/offchain/src/lib.rs b/client/offchain/src/lib.rs index 21b1b7b7d21ca..be6e4238ca5f1 100644 --- a/client/offchain/src/lib.rs +++ b/client/offchain/src/lib.rs @@ -35,20 +35,22 @@ #![warn(missing_docs)] -use std::{ - fmt, marker::PhantomData, sync::Arc, - collections::HashSet, -}; +use std::{collections::HashSet, fmt, marker::PhantomData, sync::Arc}; -use parking_lot::Mutex; -use threadpool::ThreadPool; -use sp_api::{ApiExt, ProvideRuntimeApi}; -use futures::future::Future; +use futures::{ + future::{ready, Future}, + prelude::*, +}; use log::{debug, warn}; +use parking_lot::Mutex; use sc_network::{ExHashT, NetworkService, NetworkStateInfo, PeerId}; -use sp_core::{offchain, ExecutionContext, traits::SpawnNamed}; -use sp_runtime::{generic::BlockId, traits::{self, Header}}; -use futures::{prelude::*, future::ready}; +use sp_api::{ApiExt, ProvideRuntimeApi}; +use sp_core::{offchain, traits::SpawnNamed, ExecutionContext}; +use sp_runtime::{ + generic::BlockId, + traits::{self, Header}, +}; +use threadpool::ThreadPool; mod api; @@ -94,25 +96,23 @@ impl OffchainWorkers { Self { client, _block: PhantomData, - thread_pool: Mutex::new(ThreadPool::with_name("offchain-worker".into(), num_cpus::get())), + thread_pool: Mutex::new(ThreadPool::with_name( + "offchain-worker".into(), + num_cpus::get(), + )), shared_client, } } } -impl fmt::Debug for OffchainWorkers< - Client, - Block, -> { +impl fmt::Debug for OffchainWorkers { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("OffchainWorkers").finish() } } -impl OffchainWorkers< - Client, - Block, -> where +impl OffchainWorkers +where Block: traits::Block, Client: ProvideRuntimeApi + Send + Sync + 'static, Client::Api: OffchainWorkerApi, @@ -127,28 +127,22 @@ impl OffchainWorkers< ) -> impl Future { let runtime = self.client.runtime_api(); let at = BlockId::hash(header.hash()); - let has_api_v1 = runtime.has_api_with::, _>( - &at, |v| v == 1 - ); - let has_api_v2 = runtime.has_api_with::, _>( - &at, |v| v == 2 - ); + let has_api_v1 = runtime.has_api_with::, _>(&at, |v| v == 1); + let has_api_v2 = runtime.has_api_with::, _>(&at, |v| v == 2); let version = match (has_api_v1, has_api_v2) { (_, Ok(true)) => 2, (Ok(true), _) => 1, err => { - let help = "Consider turning off offchain workers if they are not part of your runtime."; + let help = + "Consider turning off offchain workers if they are not part of your runtime."; log::error!("Unsupported Offchain Worker API version: {:?}. {}.", err, help); 0 - } + }, }; debug!("Checking offchain workers at {:?}: version:{}", at, version); if version > 0 { - let (api, runner) = api::AsyncApi::new( - network_provider, - is_validator, - self.shared_client.clone(), - ); + let (api, runner) = + api::AsyncApi::new(network_provider, is_validator, self.shared_client.clone()); debug!("Spawning offchain workers at {:?}", at); let header = header.clone(); let client = self.client.clone(); @@ -156,18 +150,19 @@ impl OffchainWorkers< let runtime = client.runtime_api(); let api = Box::new(api); debug!("Running offchain workers at {:?}", at); - let context = ExecutionContext::OffchainCall(Some( - (api, offchain::Capabilities::all()) - )); + let context = + ExecutionContext::OffchainCall(Some((api, offchain::Capabilities::all()))); let run = if version == 2 { runtime.offchain_worker_with_context(&at, context, &header) } else { #[allow(deprecated)] runtime.offchain_worker_before_version_2_with_context( - &at, context, *header.number() + &at, + context, + *header.number(), ) }; - if let Err(e) = run { + if let Err(e) = run { log::error!("Error running offchain workers at {:?}: {:?}", at, e); } }); @@ -197,50 +192,51 @@ pub async fn notification_future( offchain: Arc>, spawner: Spawner, network_provider: Arc, -) - where - Block: traits::Block, - Client: ProvideRuntimeApi + sc_client_api::BlockchainEvents + Send + Sync + 'static, - Client::Api: OffchainWorkerApi, - Spawner: SpawnNamed +) where + Block: traits::Block, + Client: + ProvideRuntimeApi + sc_client_api::BlockchainEvents + Send + Sync + 'static, + Client::Api: OffchainWorkerApi, + Spawner: SpawnNamed, { - client.import_notification_stream().for_each(move |n| { - if n.is_new_best { - spawner.spawn( - "offchain-on-block", - offchain.on_block_imported( - &n.header, - network_provider.clone(), - is_validator, - ).boxed(), - ); - } else { - log::debug!( - target: "sc_offchain", - "Skipping offchain workers for non-canon block: {:?}", - n.header, - ) - } + client + .import_notification_stream() + .for_each(move |n| { + if n.is_new_best { + spawner.spawn( + "offchain-on-block", + offchain + .on_block_imported(&n.header, network_provider.clone(), is_validator) + .boxed(), + ); + } else { + log::debug!( + target: "sc_offchain", + "Skipping offchain workers for non-canon block: {:?}", + n.header, + ) + } - ready(()) - }).await; + ready(()) + }) + .await; } #[cfg(test)] mod tests { use super::*; - use std::sync::Arc; + use futures::executor::block_on; + use sc_block_builder::BlockBuilderProvider as _; + use sc_client_api::Backend as _; use sc_network::{Multiaddr, PeerId}; - use substrate_test_runtime_client::{ - TestClient, runtime::Block, TestClientBuilderExt, - DefaultTestClientBuilderExt, ClientBlockImportExt, - }; use sc_transaction_pool::{BasicPool, FullChainApi}; - use sc_transaction_pool_api::{TransactionPool, InPoolTransaction}; + use sc_transaction_pool_api::{InPoolTransaction, TransactionPool}; use sp_consensus::BlockOrigin; - use sc_client_api::Backend as _; - use sc_block_builder::BlockBuilderProvider as _; - use futures::executor::block_on; + use std::sync::Arc; + use substrate_test_runtime_client::{ + runtime::Block, ClientBlockImportExt, DefaultTestClientBuilderExt, TestClient, + TestClientBuilderExt, + }; struct TestNetwork(); @@ -264,9 +260,7 @@ mod tests { } } - struct TestPool( - Arc, Block>> - ); + struct TestPool(Arc, Block>>); impl sc_transaction_pool_api::OffchainSubmitTransaction for TestPool { fn submit_at( @@ -299,9 +293,7 @@ mod tests { // when let offchain = OffchainWorkers::new(client); - futures::executor::block_on( - offchain.on_block_imported(&header, network, false) - ); + futures::executor::block_on(offchain.on_block_imported(&header, network, false)); // then assert_eq!(pool.0.status().ready, 1); @@ -314,22 +306,21 @@ mod tests { sp_tracing::try_init_simple(); - let (client, backend) = - substrate_test_runtime_client::TestClientBuilder::new() - .enable_offchain_indexing_api() - .build_with_backend(); + let (client, backend) = substrate_test_runtime_client::TestClientBuilder::new() + .enable_offchain_indexing_api() + .build_with_backend(); let mut client = Arc::new(client); let offchain_db = backend.offchain_storage().unwrap(); let key = &b"hello"[..]; let value = &b"world"[..]; let mut block_builder = client.new_block(Default::default()).unwrap(); - block_builder.push( - substrate_test_runtime_client::runtime::Extrinsic::OffchainIndexSet( + block_builder + .push(substrate_test_runtime_client::runtime::Extrinsic::OffchainIndexSet( key.to_vec(), value.to_vec(), - ), - ).unwrap(); + )) + .unwrap(); let block = block_builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); @@ -337,9 +328,11 @@ mod tests { assert_eq!(value, &offchain_db.get(sp_offchain::STORAGE_PREFIX, &key).unwrap()); let mut block_builder = client.new_block(Default::default()).unwrap(); - block_builder.push( - substrate_test_runtime_client::runtime::Extrinsic::OffchainIndexClear(key.to_vec()), - ).unwrap(); + block_builder + .push(substrate_test_runtime_client::runtime::Extrinsic::OffchainIndexClear( + key.to_vec(), + )) + .unwrap(); let block = block_builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); diff --git a/client/peerset/src/lib.rs b/client/peerset/src/lib.rs index 1efb21dd5389e..26ff3edabe18f 100644 --- a/client/peerset/src/lib.rs +++ b/client/peerset/src/lib.rs @@ -34,13 +34,17 @@ mod peersstate; -use std::{collections::HashSet, collections::VecDeque}; use futures::prelude::*; use log::{debug, error, trace}; use serde_json::json; -use std::{collections::HashMap, pin::Pin, task::{Context, Poll}, time::Duration}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + collections::{HashMap, HashSet, VecDeque}, + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::{Delay, Instant}; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender, TracingUnboundedReceiver}; pub use libp2p::PeerId; @@ -262,23 +266,22 @@ impl Peerset { pub fn from_config(config: PeersetConfig) -> (Peerset, PeersetHandle) { let (tx, rx) = tracing_unbounded("mpsc_peerset_messages"); - let handle = PeersetHandle { - tx: tx.clone(), - }; + let handle = PeersetHandle { tx: tx.clone() }; let mut peerset = { let now = Instant::now(); Peerset { - data: peersstate::PeersState::new(config.sets.iter().map(|set| peersstate::SetConfig { - in_peers: set.in_peers, - out_peers: set.out_peers, + data: peersstate::PeersState::new(config.sets.iter().map(|set| { + peersstate::SetConfig { in_peers: set.in_peers, out_peers: set.out_peers } })), tx, rx, - reserved_nodes: config.sets.iter().map(|set| { - (set.reserved_nodes.clone(), set.reserved_only) - }).collect(), + reserved_nodes: config + .sets + .iter() + .map(|set| (set.reserved_nodes.clone(), set.reserved_only)) + .collect(), message_queue: VecDeque::new(), created: now, latest_time_update: now, @@ -310,7 +313,7 @@ impl Peerset { fn on_add_reserved_peer(&mut self, set_id: SetId, peer_id: PeerId) { let newly_inserted = self.reserved_nodes[set_id.0].0.insert(peer_id.clone()); if !newly_inserted { - return; + return } self.data.add_no_slot_node(set_id.0, peer_id); @@ -319,34 +322,36 @@ impl Peerset { fn on_remove_reserved_peer(&mut self, set_id: SetId, peer_id: PeerId) { if !self.reserved_nodes[set_id.0].0.remove(&peer_id) { - return; + return } self.data.remove_no_slot_node(set_id.0, &peer_id); // Nothing more to do if not in reserved-only mode. if !self.reserved_nodes[set_id.0].1 { - return; + return } // If, however, the peerset is in reserved-only mode, then the removed node needs to be // disconnected. if let peersstate::Peer::Connected(peer) = self.data.peer(set_id.0, &peer_id) { peer.disconnect(); - self.message_queue.push_back(Message::Drop { - set_id, - peer_id, - }); + self.message_queue.push_back(Message::Drop { set_id, peer_id }); } } fn on_set_reserved_peers(&mut self, set_id: SetId, peer_ids: HashSet) { // Determine the difference between the current group and the new list. let (to_insert, to_remove) = { - let to_insert = peer_ids.difference(&self.reserved_nodes[set_id.0].0) - .cloned().collect::>(); - let to_remove = self.reserved_nodes[set_id.0].0.difference(&peer_ids) - .cloned().collect::>(); + let to_insert = peer_ids + .difference(&self.reserved_nodes[set_id.0].0) + .cloned() + .collect::>(); + let to_remove = self.reserved_nodes[set_id.0] + .0 + .difference(&peer_ids) + .cloned() + .collect::>(); (to_insert, to_remove) }; @@ -364,20 +369,19 @@ impl Peerset { if reserved_only { // Disconnect all the nodes that aren't reserved. - for peer_id in self.data.connected_peers(set_id.0).cloned().collect::>().into_iter() { + for peer_id in + self.data.connected_peers(set_id.0).cloned().collect::>().into_iter() + { if self.reserved_nodes[set_id.0].0.contains(&peer_id) { - continue; + continue } - let peer = self.data.peer(set_id.0, &peer_id).into_connected() - .expect("We are enumerating connected peers, therefore the peer is connected; qed"); + let peer = self.data.peer(set_id.0, &peer_id).into_connected().expect( + "We are enumerating connected peers, therefore the peer is connected; qed", + ); peer.disconnect(); - self.message_queue.push_back(Message::Drop { - set_id, - peer_id - }); + self.message_queue.push_back(Message::Drop { set_id, peer_id }); } - } else { self.alloc_slots(set_id); } @@ -402,19 +406,19 @@ impl Peerset { fn on_remove_from_peers_set(&mut self, set_id: SetId, peer_id: PeerId) { // Don't do anything if node is reserved. if self.reserved_nodes[set_id.0].0.contains(&peer_id) { - return; + return } match self.data.peer(set_id.0, &peer_id) { peersstate::Peer::Connected(peer) => { - self.message_queue.push_back(Message::Drop { - set_id, - peer_id: peer.peer_id().clone(), - }); + self.message_queue + .push_back(Message::Drop { set_id, peer_id: peer.peer_id().clone() }); peer.disconnect().forget_peer(); - } - peersstate::Peer::NotConnected(peer) => { peer.forget_peer(); } - peersstate::Peer::Unknown(_) => {} + }, + peersstate::Peer::NotConnected(peer) => { + peer.forget_peer(); + }, + peersstate::Peer::Unknown(_) => {}, } } @@ -428,7 +432,7 @@ impl Peerset { trace!(target: "peerset", "Report {}: {:+} to {}. Reason: {}", peer_id, change.value, reputation.reputation(), change.reason ); - return; + return } debug!(target: "peerset", "Report {}: {:+} to {}. Reason: {}, Disconnecting", @@ -490,7 +494,7 @@ impl Peerset { peer_reputation.set_reputation(after); if after != 0 { - continue; + continue } drop(peer_reputation); @@ -499,15 +503,15 @@ impl Peerset { // forget it. for set_index in 0..self.data.num_sets() { match self.data.peer(set_index, &peer_id) { - peersstate::Peer::Connected(_) => {} + peersstate::Peer::Connected(_) => {}, peersstate::Peer::NotConnected(peer) => { if peer.last_connected_or_discovered() + FORGET_AFTER < now { peer.forget_peer(); } - } + }, peersstate::Peer::Unknown(_) => { // Happens if this peer does not belong to this set. - } + }, } } } @@ -531,14 +535,13 @@ impl Peerset { // remove that check. If necessary, the peerset should be refactored to give more // control over what happens in that situation. if entry.reputation() < BANNED_THRESHOLD { - break; + break } match entry.try_outgoing() { - Ok(conn) => self.message_queue.push_back(Message::Connect { - set_id, - peer_id: conn.into_peer_id() - }), + Ok(conn) => self + .message_queue + .push_back(Message::Connect { set_id, peer_id: conn.into_peer_id() }), Err(_) => { // An error is returned only if no slot is available. Reserved nodes are // marked in the state machine with a flag saying "doesn't occupy a slot", @@ -548,7 +551,7 @@ impl Peerset { target: "peerset", "Not enough slots to connect to reserved node" ); - } + }, } } @@ -556,7 +559,7 @@ impl Peerset { // Nothing more to do if we're in reserved mode. if self.reserved_nodes[set_id.0].1 { - return; + return } // Try to grab the next node to attempt to connect to. @@ -565,25 +568,24 @@ impl Peerset { while self.data.has_free_outgoing_slot(set_id.0) { let next = match self.data.highest_not_connected_peer(set_id.0) { Some(n) => n, - None => break + None => break, }; // Don't connect to nodes with an abysmal reputation. if next.reputation() < BANNED_THRESHOLD { - break; + break } match next.try_outgoing() { - Ok(conn) => self.message_queue.push_back(Message::Connect { - set_id, - peer_id: conn.into_peer_id() - }), + Ok(conn) => self + .message_queue + .push_back(Message::Connect { set_id, peer_id: conn.into_peer_id() }), Err(_) => { // This branch can only be entered if there is no free slot, which is // checked above. debug_assert!(false); - break; - } + break + }, } } } @@ -606,7 +608,7 @@ impl Peerset { if self.reserved_nodes[set_id.0].1 { if !self.reserved_nodes[set_id.0].0.contains(&peer_id) { self.message_queue.push_back(Message::Reject(index)); - return; + return } } @@ -646,7 +648,7 @@ impl Peerset { trace!(target: "peerset", "Dropping {}: {:+} to {}", peer_id, DISCONNECT_REPUTATION_CHANGE, entry.reputation()); entry.disconnect(); - } + }, peersstate::Peer::NotConnected(_) | peersstate::Peer::Unknown(_) => error!(target: "peerset", "Received dropped() for non-connected node"), } @@ -710,10 +712,11 @@ impl Stream for Peerset { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll> { loop { if let Some(message) = self.message_queue.pop_front() { - return Poll::Ready(Some(message)); + return Poll::Ready(Some(message)) } - if let Poll::Ready(_) = Future::poll(Pin::new(&mut self.next_periodic_alloc_slots), cx) { + if let Poll::Ready(_) = Future::poll(Pin::new(&mut self.next_periodic_alloc_slots), cx) + { self.next_periodic_alloc_slots = Delay::new(Duration::new(1, 0)); for set_index in 0..self.data.num_sets() { @@ -736,8 +739,7 @@ impl Stream for Peerset { self.on_set_reserved_peers(set_id, peer_ids), Action::SetReservedOnly(set_id, reserved) => self.on_set_reserved_only(set_id, reserved), - Action::ReportPeer(peer_id, score_diff) => - self.on_report_peer(peer_id, score_diff), + Action::ReportPeer(peer_id, score_diff) => self.on_report_peer(peer_id, score_diff), Action::AddToPeersSet(sets_name, peer_id) => self.add_to_peers_set(sets_name, peer_id), Action::RemoveFromPeersSet(sets_name, peer_id) => @@ -760,9 +762,12 @@ pub enum DropReason { #[cfg(test)] mod tests { - use libp2p::PeerId; + use super::{ + IncomingIndex, Message, Peerset, PeersetConfig, ReputationChange, SetConfig, SetId, + BANNED_THRESHOLD, + }; use futures::prelude::*; - use super::{PeersetConfig, Peerset, Message, IncomingIndex, ReputationChange, SetConfig, SetId, BANNED_THRESHOLD}; + use libp2p::PeerId; use std::{pin::Pin, task::Poll, thread, time::Duration}; fn assert_messages(mut peerset: Peerset, messages: Vec) -> Peerset { @@ -799,10 +804,13 @@ mod tests { handle.add_reserved_peer(SetId::from(0), reserved_peer.clone()); handle.add_reserved_peer(SetId::from(0), reserved_peer2.clone()); - assert_messages(peerset, vec![ - Message::Connect { set_id: SetId::from(0), peer_id: reserved_peer }, - Message::Connect { set_id: SetId::from(0), peer_id: reserved_peer2 } - ]); + assert_messages( + peerset, + vec![ + Message::Connect { set_id: SetId::from(0), peer_id: reserved_peer }, + Message::Connect { set_id: SetId::from(0), peer_id: reserved_peer2 }, + ], + ); } #[test] @@ -831,12 +839,15 @@ mod tests { peerset.incoming(SetId::from(0), incoming2.clone(), ii2); peerset.incoming(SetId::from(0), incoming3.clone(), ii3); - assert_messages(peerset, vec![ - Message::Connect { set_id: SetId::from(0), peer_id: bootnode.clone() }, - Message::Accept(ii), - Message::Accept(ii2), - Message::Reject(ii3), - ]); + assert_messages( + peerset, + vec![ + Message::Connect { set_id: SetId::from(0), peer_id: bootnode.clone() }, + Message::Accept(ii), + Message::Accept(ii2), + Message::Reject(ii3), + ], + ); } #[test] @@ -856,9 +867,7 @@ mod tests { let (mut peerset, _) = Peerset::from_config(config); peerset.incoming(SetId::from(0), incoming.clone(), ii); - assert_messages(peerset, vec![ - Message::Reject(ii), - ]); + assert_messages(peerset, vec![Message::Reject(ii)]); } #[test] @@ -881,10 +890,13 @@ mod tests { peerset.add_to_peers_set(SetId::from(0), discovered.clone()); peerset.add_to_peers_set(SetId::from(0), discovered2); - assert_messages(peerset, vec![ - Message::Connect { set_id: SetId::from(0), peer_id: bootnode }, - Message::Connect { set_id: SetId::from(0), peer_id: discovered }, - ]); + assert_messages( + peerset, + vec![ + Message::Connect { set_id: SetId::from(0), peer_id: bootnode }, + Message::Connect { set_id: SetId::from(0), peer_id: discovered }, + ], + ); } #[test] diff --git a/client/peerset/src/peersstate.rs b/client/peerset/src/peersstate.rs index 9f54a7714fd05..b034158d7f9d1 100644 --- a/client/peerset/src/peersstate.rs +++ b/client/peerset/src/peersstate.rs @@ -32,7 +32,10 @@ use libp2p::PeerId; use log::error; use std::{ borrow::Cow, - collections::{HashMap, HashSet, hash_map::{Entry, OccupiedEntry}}, + collections::{ + hash_map::{Entry, OccupiedEntry}, + HashMap, HashSet, + }, }; use wasm_timer::Instant; @@ -104,10 +107,7 @@ struct Node { impl Node { fn new(num_sets: usize) -> Node { - Node { - sets: (0..num_sets).map(|_| MembershipState::NotMember).collect(), - reputation: 0, - } + Node { sets: (0..num_sets).map(|_| MembershipState::NotMember).collect(), reputation: 0 } } } @@ -192,18 +192,10 @@ impl PeersState { assert!(set < self.sets.len()); match self.nodes.get_mut(peer_id).map(|p| &p.sets[set]) { - None | Some(MembershipState::NotMember) => Peer::Unknown(UnknownPeer { - parent: self, - set, - peer_id: Cow::Borrowed(peer_id), - }), - Some(MembershipState::In) | Some(MembershipState::Out) => { - Peer::Connected(ConnectedPeer { - state: self, - set, - peer_id: Cow::Borrowed(peer_id), - }) - } + None | Some(MembershipState::NotMember) => + Peer::Unknown(UnknownPeer { parent: self, set, peer_id: Cow::Borrowed(peer_id) }), + Some(MembershipState::In) | Some(MembershipState::Out) => + Peer::Connected(ConnectedPeer { state: self, set, peer_id: Cow::Borrowed(peer_id) }), Some(MembershipState::NotConnected { .. }) => Peer::NotConnected(NotConnectedPeer { state: self, set, @@ -254,18 +246,16 @@ impl PeersState { let outcome = self .nodes .iter_mut() - .filter(|(_, Node { sets, .. })| { - match sets[set] { - MembershipState::NotMember => false, - MembershipState::In => false, - MembershipState::Out => false, - MembershipState::NotConnected { .. } => true, - } + .filter(|(_, Node { sets, .. })| match sets[set] { + MembershipState::NotMember => false, + MembershipState::In => false, + MembershipState::Out => false, + MembershipState::NotConnected { .. } => true, }) .fold(None::<(&PeerId, &mut Node)>, |mut cur_node, to_try| { if let Some(cur_node) = cur_node.take() { if cur_node.1.reputation >= to_try.1.reputation { - return Some(cur_node); + return Some(cur_node) } } Some(to_try) @@ -273,10 +263,10 @@ impl PeersState { .map(|(peer_id, _)| peer_id.clone()); outcome.map(move |peer_id| NotConnectedPeer { - state: self, - set, - peer_id: Cow::Owned(peer_id), - }) + state: self, + set, + peer_id: Cow::Owned(peer_id), + }) } /// Returns `true` if there is a free outgoing slot available related to this set. @@ -290,14 +280,14 @@ impl PeersState { pub fn add_no_slot_node(&mut self, set: usize, peer_id: PeerId) { // Reminder: `HashSet::insert` returns false if the node was already in the set if !self.sets[set].no_slot_nodes.insert(peer_id.clone()) { - return; + return } if let Some(peer) = self.nodes.get_mut(&peer_id) { match peer.sets[set] { MembershipState::In => self.sets[set].num_in -= 1, MembershipState::Out => self.sets[set].num_out -= 1, - MembershipState::NotConnected { .. } | MembershipState::NotMember => {} + MembershipState::NotConnected { .. } | MembershipState::NotMember => {}, } } } @@ -308,14 +298,14 @@ impl PeersState { pub fn remove_no_slot_node(&mut self, set: usize, peer_id: &PeerId) { // Reminder: `HashSet::remove` returns false if the node was already not in the set if !self.sets[set].no_slot_nodes.remove(peer_id) { - return; + return } if let Some(peer) = self.nodes.get_mut(peer_id) { match peer.sets[set] { MembershipState::In => self.sets[set].num_in += 1, MembershipState::Out => self.sets[set].num_out += 1, - MembershipState::NotConnected { .. } | MembershipState::NotMember => {} + MembershipState::NotConnected { .. } | MembershipState::NotMember => {}, } } } @@ -396,24 +386,15 @@ impl<'a> ConnectedPeer<'a> { false, "State inconsistency: disconnecting a disconnected node" ) - } + }, } } - node.sets[self.set] = MembershipState::NotConnected { - last_connected: Instant::now(), - }; + node.sets[self.set] = MembershipState::NotConnected { last_connected: Instant::now() }; } else { - debug_assert!( - false, - "State inconsistency: disconnecting a disconnected node" - ); + debug_assert!(false, "State inconsistency: disconnecting a disconnected node"); } - NotConnectedPeer { - state: self.state, - set: self.set, - peer_id: self.peer_id, - } + NotConnectedPeer { state: self.state, set: self.set, peer_id: self.peer_id } } /// Performs an arithmetic addition on the reputation score of that peer. @@ -425,10 +406,7 @@ impl<'a> ConnectedPeer<'a> { if let Some(node) = self.state.nodes.get_mut(&*self.peer_id) { node.reputation = node.reputation.saturating_add(modifier); } else { - debug_assert!( - false, - "State inconsistency: add_reputation on an unknown node" - ); + debug_assert!(false, "State inconsistency: add_reputation on an unknown node"); } } @@ -436,10 +414,7 @@ impl<'a> ConnectedPeer<'a> { /// /// > **Note**: Reputation values aren't specific to a set but are global per peer. pub fn reputation(&self) -> i32 { - self.state - .nodes - .get(&*self.peer_id) - .map_or(0, |p| p.reputation) + self.state.nodes.get(&*self.peer_id).map_or(0, |p| p.reputation) } } @@ -483,8 +458,8 @@ impl<'a> NotConnectedPeer<'a> { "State inconsistency with {}; not connected after borrow", self.peer_id ); - return Instant::now(); - } + return Instant::now() + }, }; match state.sets[self.set] { @@ -492,7 +467,7 @@ impl<'a> NotConnectedPeer<'a> { _ => { error!(target: "peerset", "State inconsistency with {}", self.peer_id); Instant::now() - } + }, } } @@ -508,7 +483,7 @@ impl<'a> NotConnectedPeer<'a> { // Note that it is possible for num_out to be strictly superior to the max, in case we were // connected to reserved node then marked them as not reserved. if !self.state.has_free_outgoing_slot(self.set) && !is_no_slot_occupy { - return Err(self); + return Err(self) } if let Some(peer) = self.state.nodes.get_mut(&*self.peer_id) { @@ -517,17 +492,10 @@ impl<'a> NotConnectedPeer<'a> { self.state.sets[self.set].num_out += 1; } } else { - debug_assert!( - false, - "State inconsistency: try_outgoing on an unknown node" - ); + debug_assert!(false, "State inconsistency: try_outgoing on an unknown node"); } - Ok(ConnectedPeer { - state: self.state, - set: self.set, - peer_id: self.peer_id, - }) + Ok(ConnectedPeer { state: self.state, set: self.set, peer_id: self.peer_id }) } /// Tries to accept the peer as an incoming connection. @@ -541,10 +509,10 @@ impl<'a> NotConnectedPeer<'a> { // Note that it is possible for num_in to be strictly superior to the max, in case we were // connected to reserved node then marked them as not reserved. - if self.state.sets[self.set].num_in >= self.state.sets[self.set].max_in - && !is_no_slot_occupy + if self.state.sets[self.set].num_in >= self.state.sets[self.set].max_in && + !is_no_slot_occupy { - return Err(self); + return Err(self) } if let Some(peer) = self.state.nodes.get_mut(&*self.peer_id) { @@ -553,27 +521,17 @@ impl<'a> NotConnectedPeer<'a> { self.state.sets[self.set].num_in += 1; } } else { - debug_assert!( - false, - "State inconsistency: try_accept_incoming on an unknown node" - ); + debug_assert!(false, "State inconsistency: try_accept_incoming on an unknown node"); } - Ok(ConnectedPeer { - state: self.state, - set: self.set, - peer_id: self.peer_id, - }) + Ok(ConnectedPeer { state: self.state, set: self.set, peer_id: self.peer_id }) } /// Returns the reputation value of the node. /// /// > **Note**: Reputation values aren't specific to a set but are global per peer. pub fn reputation(&self) -> i32 { - self.state - .nodes - .get(&*self.peer_id) - .map_or(0, |p| p.reputation) + self.state.nodes.get(&*self.peer_id).map_or(0, |p| p.reputation) } /// Sets the reputation of the peer. @@ -584,10 +542,7 @@ impl<'a> NotConnectedPeer<'a> { if let Some(node) = self.state.nodes.get_mut(&*self.peer_id) { node.reputation = value; } else { - debug_assert!( - false, - "State inconsistency: set_reputation on an unknown node" - ); + debug_assert!(false, "State inconsistency: set_reputation on an unknown node"); } } @@ -598,10 +553,8 @@ impl<'a> NotConnectedPeer<'a> { peer.sets[self.set] = MembershipState::NotMember; // Remove the peer from `self.state.nodes` entirely if it isn't a member of any set. - if peer.reputation == 0 && peer - .sets - .iter() - .all(|set| matches!(set, MembershipState::NotMember)) + if peer.reputation == 0 && + peer.sets.iter().all(|set| matches!(set, MembershipState::NotMember)) { self.state.nodes.remove(&*self.peer_id); } @@ -614,11 +567,7 @@ impl<'a> NotConnectedPeer<'a> { ); }; - UnknownPeer { - parent: self.state, - set: self.set, - peer_id: self.peer_id, - } + UnknownPeer { parent: self.state, set: self.set, peer_id: self.peer_id } } } @@ -641,15 +590,9 @@ impl<'a> UnknownPeer<'a> { .nodes .entry(self.peer_id.clone().into_owned()) .or_insert_with(|| Node::new(num_sets)) - .sets[self.set] = MembershipState::NotConnected { - last_connected: Instant::now(), - }; + .sets[self.set] = MembershipState::NotConnected { last_connected: Instant::now() }; - NotConnectedPeer { - state: self.parent, - set: self.set, - peer_id: self.peer_id, - } + NotConnectedPeer { state: self.parent, set: self.set, peer_id: self.peer_id } } } @@ -699,10 +642,7 @@ mod tests { #[test] fn full_slots_in() { - let mut peers_state = PeersState::new(iter::once(SetConfig { - in_peers: 1, - out_peers: 1, - })); + let mut peers_state = PeersState::new(iter::once(SetConfig { in_peers: 1, out_peers: 1 })); let id1 = PeerId::random(); let id2 = PeerId::random(); @@ -717,10 +657,7 @@ mod tests { #[test] fn no_slot_node_doesnt_use_slot() { - let mut peers_state = PeersState::new(iter::once(SetConfig { - in_peers: 1, - out_peers: 1, - })); + let mut peers_state = PeersState::new(iter::once(SetConfig { in_peers: 1, out_peers: 1 })); let id1 = PeerId::random(); let id2 = PeerId::random(); @@ -740,10 +677,7 @@ mod tests { #[test] fn disconnecting_frees_slot() { - let mut peers_state = PeersState::new(iter::once(SetConfig { - in_peers: 1, - out_peers: 1, - })); + let mut peers_state = PeersState::new(iter::once(SetConfig { in_peers: 1, out_peers: 1 })); let id1 = PeerId::random(); let id2 = PeerId::random(); @@ -761,11 +695,7 @@ mod tests { .discover() .try_accept_incoming() .is_err()); - peers_state - .peer(0, &id1) - .into_connected() - .unwrap() - .disconnect(); + peers_state.peer(0, &id1).into_connected().unwrap().disconnect(); assert!(peers_state .peer(0, &id2) .into_not_connected() @@ -776,41 +706,21 @@ mod tests { #[test] fn highest_not_connected_peer() { - let mut peers_state = PeersState::new(iter::once(SetConfig { - in_peers: 25, - out_peers: 25, - })); + let mut peers_state = + PeersState::new(iter::once(SetConfig { in_peers: 25, out_peers: 25 })); let id1 = PeerId::random(); let id2 = PeerId::random(); assert!(peers_state.highest_not_connected_peer(0).is_none()); - peers_state - .peer(0, &id1) - .into_unknown() - .unwrap() - .discover() - .set_reputation(50); - peers_state - .peer(0, &id2) - .into_unknown() - .unwrap() - .discover() - .set_reputation(25); + peers_state.peer(0, &id1).into_unknown().unwrap().discover().set_reputation(50); + peers_state.peer(0, &id2).into_unknown().unwrap().discover().set_reputation(25); assert_eq!( - peers_state - .highest_not_connected_peer(0) - .map(|p| p.into_peer_id()), + peers_state.highest_not_connected_peer(0).map(|p| p.into_peer_id()), Some(id1.clone()) ); - peers_state - .peer(0, &id2) - .into_not_connected() - .unwrap() - .set_reputation(75); + peers_state.peer(0, &id2).into_not_connected().unwrap().set_reputation(75); assert_eq!( - peers_state - .highest_not_connected_peer(0) - .map(|p| p.into_peer_id()), + peers_state.highest_not_connected_peer(0).map(|p| p.into_peer_id()), Some(id2.clone()) ); peers_state @@ -820,46 +730,25 @@ mod tests { .try_accept_incoming() .unwrap(); assert_eq!( - peers_state - .highest_not_connected_peer(0) - .map(|p| p.into_peer_id()), + peers_state.highest_not_connected_peer(0).map(|p| p.into_peer_id()), Some(id1.clone()) ); - peers_state - .peer(0, &id1) - .into_not_connected() - .unwrap() - .set_reputation(100); - peers_state - .peer(0, &id2) - .into_connected() - .unwrap() - .disconnect(); + peers_state.peer(0, &id1).into_not_connected().unwrap().set_reputation(100); + peers_state.peer(0, &id2).into_connected().unwrap().disconnect(); assert_eq!( - peers_state - .highest_not_connected_peer(0) - .map(|p| p.into_peer_id()), + peers_state.highest_not_connected_peer(0).map(|p| p.into_peer_id()), Some(id1.clone()) ); - peers_state - .peer(0, &id1) - .into_not_connected() - .unwrap() - .set_reputation(-100); + peers_state.peer(0, &id1).into_not_connected().unwrap().set_reputation(-100); assert_eq!( - peers_state - .highest_not_connected_peer(0) - .map(|p| p.into_peer_id()), + peers_state.highest_not_connected_peer(0).map(|p| p.into_peer_id()), Some(id2.clone()) ); } #[test] fn disconnect_no_slot_doesnt_panic() { - let mut peers_state = PeersState::new(iter::once(SetConfig { - in_peers: 1, - out_peers: 1, - })); + let mut peers_state = PeersState::new(iter::once(SetConfig { in_peers: 1, out_peers: 1 })); let id = PeerId::random(); peers_state.add_no_slot_node(0, id.clone()); let peer = peers_state diff --git a/client/peerset/tests/fuzz.rs b/client/peerset/tests/fuzz.rs index 96d1a48683f18..3a9ba686ee95c 100644 --- a/client/peerset/tests/fuzz.rs +++ b/client/peerset/tests/fuzz.rs @@ -18,10 +18,18 @@ use futures::prelude::*; use libp2p::PeerId; -use rand::distributions::{Distribution, Uniform, WeightedIndex}; -use rand::seq::IteratorRandom; -use sc_peerset::{DropReason, IncomingIndex, Message, Peerset, PeersetConfig, ReputationChange, SetConfig, SetId}; -use std::{collections::HashMap, collections::HashSet, pin::Pin, task::Poll}; +use rand::{ + distributions::{Distribution, Uniform, WeightedIndex}, + seq::IteratorRandom, +}; +use sc_peerset::{ + DropReason, IncomingIndex, Message, Peerset, PeersetConfig, ReputationChange, SetConfig, SetId, +}; +use std::{ + collections::{HashMap, HashSet}, + pin::Pin, + task::Poll, +}; #[test] fn run() { @@ -40,30 +48,28 @@ fn test_once() { let mut reserved_nodes = HashSet::::new(); let (mut peerset, peerset_handle) = Peerset::from_config(PeersetConfig { - sets: vec![ - SetConfig { - bootnodes: (0..Uniform::new_inclusive(0, 4).sample(&mut rng)) + sets: vec![SetConfig { + bootnodes: (0..Uniform::new_inclusive(0, 4).sample(&mut rng)) + .map(|_| { + let id = PeerId::random(); + known_nodes.insert(id.clone()); + id + }) + .collect(), + reserved_nodes: { + (0..Uniform::new_inclusive(0, 2).sample(&mut rng)) .map(|_| { let id = PeerId::random(); known_nodes.insert(id.clone()); + reserved_nodes.insert(id.clone()); id }) - .collect(), - reserved_nodes: { - (0..Uniform::new_inclusive(0, 2).sample(&mut rng)) - .map(|_| { - let id = PeerId::random(); - known_nodes.insert(id.clone()); - reserved_nodes.insert(id.clone()); - id - }) - .collect() - }, - in_peers: Uniform::new_inclusive(0, 25).sample(&mut rng), - out_peers: Uniform::new_inclusive(0, 25).sample(&mut rng), - reserved_only: Uniform::new_inclusive(0, 10).sample(&mut rng) == 0, + .collect() }, - ], + in_peers: Uniform::new_inclusive(0, 25).sample(&mut rng), + out_peers: Uniform::new_inclusive(0, 25).sample(&mut rng), + reserved_only: Uniform::new_inclusive(0, 10).sample(&mut rng) == 0, + }], }); futures::executor::block_on(futures::future::poll_fn(move |cx| { @@ -81,33 +87,28 @@ fn test_once() { for _ in 0..2500 { // Each of these weights corresponds to an action that we may perform. let action_weights = [150, 90, 90, 30, 30, 1, 1, 4, 4]; - match WeightedIndex::new(&action_weights) - .unwrap() - .sample(&mut rng) - { + match WeightedIndex::new(&action_weights).unwrap().sample(&mut rng) { // If we generate 0, poll the peerset. 0 => match Stream::poll_next(Pin::new(&mut peerset), cx) { Poll::Ready(Some(Message::Connect { peer_id, .. })) => { - if let Some(id) = incoming_nodes - .iter() - .find(|(_, v)| **v == peer_id) - .map(|(&id, _)| id) + if let Some(id) = + incoming_nodes.iter().find(|(_, v)| **v == peer_id).map(|(&id, _)| id) { incoming_nodes.remove(&id); } assert!(connected_nodes.insert(peer_id)); - } + }, Poll::Ready(Some(Message::Drop { peer_id, .. })) => { connected_nodes.remove(&peer_id); - } + }, Poll::Ready(Some(Message::Accept(n))) => { assert!(connected_nodes.insert(incoming_nodes.remove(&n).unwrap())) - } + }, Poll::Ready(Some(Message::Reject(n))) => { assert!(!connected_nodes.contains(&incoming_nodes.remove(&n).unwrap())) - } + }, Poll::Ready(None) => panic!(), - Poll::Pending => {} + Poll::Pending => {}, }, // If we generate 1, discover a new node. @@ -115,32 +116,29 @@ fn test_once() { let new_id = PeerId::random(); known_nodes.insert(new_id.clone()); peerset.add_to_peers_set(SetId::from(0), new_id); - } + }, // If we generate 2, adjust a random reputation. - 2 => { + 2 => if let Some(id) = known_nodes.iter().choose(&mut rng) { - let val = Uniform::new_inclusive(i32::MIN, i32::MAX) - .sample(&mut rng); + let val = Uniform::new_inclusive(i32::MIN, i32::MAX).sample(&mut rng); peerset_handle.report_peer(id.clone(), ReputationChange::new(val, "")); - } - } + }, // If we generate 3, disconnect from a random node. - 3 => { + 3 => if let Some(id) = connected_nodes.iter().choose(&mut rng).cloned() { connected_nodes.remove(&id); peerset.dropped(SetId::from(0), id, DropReason::Unknown); - } - } + }, // If we generate 4, connect to a random node. 4 => { if let Some(id) = known_nodes .iter() .filter(|n| { - incoming_nodes.values().all(|m| m != *n) - && !connected_nodes.contains(*n) + incoming_nodes.values().all(|m| m != *n) && + !connected_nodes.contains(*n) }) .choose(&mut rng) { @@ -148,7 +146,7 @@ fn test_once() { incoming_nodes.insert(next_incoming_id.clone(), id.clone()); next_incoming_id.0 += 1; } - } + }, // 5 and 6 are the reserved-only mode. 5 => peerset_handle.set_reserved_only(SetId::from(0), true), @@ -156,21 +154,18 @@ fn test_once() { // 7 and 8 are about switching a random node in or out of reserved mode. 7 => { - if let Some(id) = known_nodes - .iter() - .filter(|n| !reserved_nodes.contains(*n)) - .choose(&mut rng) + if let Some(id) = + known_nodes.iter().filter(|n| !reserved_nodes.contains(*n)).choose(&mut rng) { peerset_handle.add_reserved_peer(SetId::from(0), id.clone()); reserved_nodes.insert(id.clone()); } - } - 8 => { + }, + 8 => if let Some(id) = reserved_nodes.iter().choose(&mut rng).cloned() { reserved_nodes.remove(&id); peerset_handle.remove_reserved_peer(SetId::from(0), id); - } - } + }, _ => unreachable!(), } diff --git a/client/proposer-metrics/src/lib.rs b/client/proposer-metrics/src/lib.rs index 8fec9779de472..da29fb2951995 100644 --- a/client/proposer-metrics/src/lib.rs +++ b/client/proposer-metrics/src/lib.rs @@ -18,7 +18,9 @@ //! Prometheus basic proposer metrics. -use prometheus_endpoint::{register, PrometheusError, Registry, Histogram, HistogramOpts, Gauge, U64}; +use prometheus_endpoint::{ + register, Gauge, Histogram, HistogramOpts, PrometheusError, Registry, U64, +}; /// Optional shareable link to basic authorship metrics. #[derive(Clone, Default)] @@ -26,13 +28,13 @@ pub struct MetricsLink(Option); impl MetricsLink { pub fn new(registry: Option<&Registry>) -> Self { - Self( - registry.and_then(|registry| - Metrics::register(registry) - .map_err(|err| log::warn!("Failed to register proposer prometheus metrics: {}", err)) - .ok() - ) - ) + Self(registry.and_then(|registry| { + Metrics::register(registry) + .map_err(|err| { + log::warn!("Failed to register proposer prometheus metrics: {}", err) + }) + .ok() + })) } pub fn report(&self, do_this: impl FnOnce(&Metrics) -> O) -> Option { diff --git a/client/rpc-api/src/author/error.rs b/client/rpc-api/src/author/error.rs index 009a0a290d6ba..0c963d4e4c259 100644 --- a/client/rpc-api/src/author/error.rs +++ b/client/rpc-api/src/author/error.rs @@ -32,33 +32,33 @@ pub type FutureResult = Box #[derive(Debug, derive_more::Display, derive_more::From)] pub enum Error { /// Client error. - #[display(fmt="Client error: {}", _0)] + #[display(fmt = "Client error: {}", _0)] #[from(ignore)] Client(Box), /// Transaction pool error, - #[display(fmt="Transaction pool error: {}", _0)] + #[display(fmt = "Transaction pool error: {}", _0)] Pool(sc_transaction_pool_api::error::Error), /// Verification error - #[display(fmt="Extrinsic verification error: {}", _0)] + #[display(fmt = "Extrinsic verification error: {}", _0)] #[from(ignore)] Verification(Box), /// Incorrect extrinsic format. - #[display(fmt="Invalid extrinsic format: {}", _0)] + #[display(fmt = "Invalid extrinsic format: {}", _0)] BadFormat(codec::Error), /// Incorrect seed phrase. - #[display(fmt="Invalid seed phrase/SURI")] + #[display(fmt = "Invalid seed phrase/SURI")] BadSeedPhrase, /// Key type ID has an unknown format. - #[display(fmt="Invalid key type ID format (should be of length four)")] + #[display(fmt = "Invalid key type ID format (should be of length four)")] BadKeyType, /// Key type ID has some unsupported crypto. - #[display(fmt="The crypto of key type ID is unknown")] + #[display(fmt = "The crypto of key type ID is unknown")] UnsupportedKeyType, /// Some random issue with the key store. Shouldn't happen. - #[display(fmt="The key store is unavailable")] + #[display(fmt = "The key store is unavailable")] KeyStoreUnavailable, /// Invalid session keys encoding. - #[display(fmt="Session keys are not encoded correctly")] + #[display(fmt = "Session keys are not encoded correctly")] InvalidSessionKeys, /// Call to an unsafe RPC was denied. UnsafeRpcCalled(crate::policy::UnsafeRpcError), @@ -105,7 +105,7 @@ const POOL_UNACTIONABLE: i64 = POOL_INVALID_TX + 8; impl From for rpc::Error { fn from(e: Error) -> Self { - use sc_transaction_pool_api::error::{Error as PoolError}; + use sc_transaction_pool_api::error::Error as PoolError; match e { Error::BadFormat(e) => rpc::Error { diff --git a/client/rpc-api/src/author/hash.rs b/client/rpc-api/src/author/hash.rs index 618159a8ad4d5..c4acfb819ddbb 100644 --- a/client/rpc-api/src/author/hash.rs +++ b/client/rpc-api/src/author/hash.rs @@ -18,8 +18,8 @@ //! Extrinsic helpers for author RPC module. +use serde::{Deserialize, Serialize}; use sp_core::Bytes; -use serde::{Serialize, Deserialize}; /// RPC Extrinsic or hash /// diff --git a/client/rpc-api/src/author/mod.rs b/client/rpc-api/src/author/mod.rs index 70da73ee8a00e..dbf729ea18adc 100644 --- a/client/rpc-api/src/author/mod.rs +++ b/client/rpc-api/src/author/mod.rs @@ -21,11 +21,11 @@ pub mod error; pub mod hash; +use self::error::{FutureResult, Result}; use jsonrpc_derive::rpc; use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId}; -use sp_core::Bytes; use sc_transaction_pool_api::TransactionStatus; -use self::error::{FutureResult, Result}; +use sp_core::Bytes; pub use self::gen_client::Client as AuthorClient; @@ -41,12 +41,7 @@ pub trait AuthorApi { /// Insert a key into the keystore. #[rpc(name = "author_insertKey")] - fn insert_key( - &self, - key_type: String, - suri: String, - public: Bytes, - ) -> Result<()>; + fn insert_key(&self, key_type: String, suri: String, public: Bytes) -> Result<()>; /// Generate new session keys and returns the corresponding public keys. #[rpc(name = "author_rotateKeys")] @@ -72,8 +67,9 @@ pub trait AuthorApi { /// Remove given extrinsic from the pool and temporarily ban it to prevent reimporting. #[rpc(name = "author_removeExtrinsic")] - fn remove_extrinsic(&self, - bytes_or_hash: Vec> + fn remove_extrinsic( + &self, + bytes_or_hash: Vec>, ) -> Result>; /// Submit an extrinsic to watch. @@ -85,10 +81,11 @@ pub trait AuthorApi { subscribe, name = "author_submitAndWatchExtrinsic" )] - fn watch_extrinsic(&self, + fn watch_extrinsic( + &self, metadata: Self::Metadata, subscriber: Subscriber>, - bytes: Bytes + bytes: Bytes, ); /// Unsubscribe from extrinsic watching. @@ -97,8 +94,9 @@ pub trait AuthorApi { unsubscribe, name = "author_unwatchExtrinsic" )] - fn unwatch_extrinsic(&self, + fn unwatch_extrinsic( + &self, metadata: Option, - id: SubscriptionId + id: SubscriptionId, ) -> Result; } diff --git a/client/rpc-api/src/chain/error.rs b/client/rpc-api/src/chain/error.rs index 59a0c0a2f840f..9bedd328d0015 100644 --- a/client/rpc-api/src/chain/error.rs +++ b/client/rpc-api/src/chain/error.rs @@ -31,7 +31,7 @@ pub type FutureResult = Box #[derive(Debug, derive_more::Display, derive_more::From)] pub enum Error { /// Client error. - #[display(fmt="Client error: {}", _0)] + #[display(fmt = "Client error: {}", _0)] Client(Box), /// Other error type. Other(String), diff --git a/client/rpc-api/src/chain/mod.rs b/client/rpc-api/src/chain/mod.rs index 5e2d484413047..79ae80d0c4d1d 100644 --- a/client/rpc-api/src/chain/mod.rs +++ b/client/rpc-api/src/chain/mod.rs @@ -20,11 +20,11 @@ pub mod error; +use self::error::{FutureResult, Result}; use jsonrpc_core::Result as RpcResult; use jsonrpc_derive::rpc; use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId}; -use sp_rpc::{number::NumberOrHex, list::ListOrValue}; -use self::error::{FutureResult, Result}; +use sp_rpc::{list::ListOrValue, number::NumberOrHex}; pub use self::gen_client::Client as ChainClient; diff --git a/client/rpc-api/src/child_state/mod.rs b/client/rpc-api/src/child_state/mod.rs index 99990017fd826..7abda0a63134d 100644 --- a/client/rpc-api/src/child_state/mod.rs +++ b/client/rpc-api/src/child_state/mod.rs @@ -18,9 +18,9 @@ //! Substrate state API. -use jsonrpc_derive::rpc; -use sp_core::storage::{StorageKey, PrefixedStorageKey, StorageData}; use crate::state::error::FutureResult; +use jsonrpc_derive::rpc; +use sp_core::storage::{PrefixedStorageKey, StorageData, StorageKey}; pub use self::gen_client::Client as ChildStateClient; use crate::state::ReadProof; @@ -41,7 +41,7 @@ pub trait ChildStateApi { &self, child_storage_key: PrefixedStorageKey, prefix: StorageKey, - hash: Option + hash: Option, ) -> FutureResult>; /// Returns the keys with prefix from a child storage with pagination support. @@ -63,7 +63,7 @@ pub trait ChildStateApi { &self, child_storage_key: PrefixedStorageKey, key: StorageKey, - hash: Option + hash: Option, ) -> FutureResult>; /// Returns the hash of a child storage entry at a block's state. @@ -72,7 +72,7 @@ pub trait ChildStateApi { &self, child_storage_key: PrefixedStorageKey, key: StorageKey, - hash: Option + hash: Option, ) -> FutureResult>; /// Returns the size of a child storage entry at a block's state. @@ -81,7 +81,7 @@ pub trait ChildStateApi { &self, child_storage_key: PrefixedStorageKey, key: StorageKey, - hash: Option + hash: Option, ) -> FutureResult>; /// Returns proof of storage for child key entries at a specific block's state. diff --git a/client/rpc-api/src/helpers.rs b/client/rpc-api/src/helpers.rs index e85c26062b50d..bb37cfbbb780e 100644 --- a/client/rpc-api/src/helpers.rs +++ b/client/rpc-api/src/helpers.rs @@ -16,8 +16,8 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use jsonrpc_core::futures::prelude::*; use futures::{channel::oneshot, compat::Compat}; +use jsonrpc_core::futures::prelude::*; /// Wraps around `oneshot::Receiver` and adjusts the error type to produce an internal error if the /// sender gets dropped. diff --git a/client/rpc-api/src/lib.rs b/client/rpc-api/src/lib.rs index 814319add2a3e..488ae429c1f42 100644 --- a/client/rpc-api/src/lib.rs +++ b/client/rpc-api/src/lib.rs @@ -34,7 +34,7 @@ pub use policy::DenyUnsafe; pub mod author; pub mod chain; +pub mod child_state; pub mod offchain; pub mod state; -pub mod child_state; pub mod system; diff --git a/client/rpc-api/src/metadata.rs b/client/rpc-api/src/metadata.rs index efe090acc621e..bda7b8f7ba36b 100644 --- a/client/rpc-api/src/metadata.rs +++ b/client/rpc-api/src/metadata.rs @@ -20,7 +20,7 @@ use std::sync::Arc; use jsonrpc_core::futures::sync::mpsc; -use jsonrpc_pubsub::{Session, PubSubMetadata}; +use jsonrpc_pubsub::{PubSubMetadata, Session}; /// RPC Metadata. /// @@ -42,9 +42,7 @@ impl PubSubMetadata for Metadata { impl Metadata { /// Create new `Metadata` with session (Pub/Sub) support. pub fn new(transport: mpsc::Sender) -> Self { - Metadata { - session: Some(Arc::new(Session::new(transport))), - } + Metadata { session: Some(Arc::new(Session::new(transport))) } } /// Create new `Metadata` for tests. diff --git a/client/rpc-api/src/offchain/error.rs b/client/rpc-api/src/offchain/error.rs index f74d419e54424..f2567707bc5f2 100644 --- a/client/rpc-api/src/offchain/error.rs +++ b/client/rpc-api/src/offchain/error.rs @@ -27,7 +27,7 @@ pub type Result = std::result::Result; #[derive(Debug, derive_more::Display, derive_more::From)] pub enum Error { /// Unavailable storage kind error. - #[display(fmt="This storage kind is not available yet.")] + #[display(fmt = "This storage kind is not available yet.")] UnavailableStorageKind, /// Call to an unsafe RPC was denied. UnsafeRpcCalled(crate::policy::UnsafeRpcError), @@ -50,7 +50,7 @@ impl From for rpc::Error { match e { Error::UnavailableStorageKind => rpc::Error { code: rpc::ErrorCode::ServerError(BASE_ERROR + 1), - message: "This storage kind is not available yet" .into(), + message: "This storage kind is not available yet".into(), data: None, }, Error::UnsafeRpcCalled(e) => e.into(), diff --git a/client/rpc-api/src/offchain/mod.rs b/client/rpc-api/src/offchain/mod.rs index 7a1f6db9e80be..333892fc19c4c 100644 --- a/client/rpc-api/src/offchain/mod.rs +++ b/client/rpc-api/src/offchain/mod.rs @@ -20,9 +20,9 @@ pub mod error; -use jsonrpc_derive::rpc; use self::error::Result; -use sp_core::{Bytes, offchain::StorageKind}; +use jsonrpc_derive::rpc; +use sp_core::{offchain::StorageKind, Bytes}; pub use self::gen_client::Client as OffchainClient; diff --git a/client/rpc-api/src/state/error.rs b/client/rpc-api/src/state/error.rs index 4f2a2c854ae00..30437246e6ea7 100644 --- a/client/rpc-api/src/state/error.rs +++ b/client/rpc-api/src/state/error.rs @@ -31,7 +31,7 @@ pub type FutureResult = Box #[derive(Debug, derive_more::Display, derive_more::From)] pub enum Error { /// Client error. - #[display(fmt="Client error: {}", _0)] + #[display(fmt = "Client error: {}", _0)] Client(Box), /// Provided block range couldn't be resolved to a list of blocks. #[display(fmt = "Cannot resolve a block range ['{:?}' ... '{:?}]. {}", from, to, details)] diff --git a/client/rpc-api/src/state/helpers.rs b/client/rpc-api/src/state/helpers.rs index cb7bd380afa51..718ad69ac232c 100644 --- a/client/rpc-api/src/state/helpers.rs +++ b/client/rpc-api/src/state/helpers.rs @@ -18,8 +18,8 @@ //! Substrate state API helpers. +use serde::{Deserialize, Serialize}; use sp_core::Bytes; -use serde::{Serialize, Deserialize}; /// ReadProof struct returned by the RPC #[derive(Debug, PartialEq, Serialize, Deserialize)] diff --git a/client/rpc-api/src/state/mod.rs b/client/rpc-api/src/state/mod.rs index 0ebc553b41178..d54921c0d606b 100644 --- a/client/rpc-api/src/state/mod.rs +++ b/client/rpc-api/src/state/mod.rs @@ -21,16 +21,17 @@ pub mod error; pub mod helpers; +use self::error::FutureResult; use jsonrpc_core::Result as RpcResult; use jsonrpc_derive::rpc; use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId}; -use sp_core::Bytes; -use sp_core::storage::{StorageKey, StorageData, StorageChangeSet}; +use sp_core::{ + storage::{StorageChangeSet, StorageData, StorageKey}, + Bytes, +}; use sp_version::RuntimeVersion; -use self::error::FutureResult; -pub use self::gen_client::Client as StateClient; -pub use self::helpers::ReadProof; +pub use self::{gen_client::Client as StateClient, helpers::ReadProof}; /// Substrate state API #[rpc] @@ -45,11 +46,16 @@ pub trait StateApi { /// DEPRECATED: Please use `state_getKeysPaged` with proper paging support. /// Returns the keys with prefix, leave empty to get all the keys. #[rpc(name = "state_getKeys")] - fn storage_keys(&self, prefix: StorageKey, hash: Option) -> FutureResult>; + fn storage_keys(&self, prefix: StorageKey, hash: Option) + -> FutureResult>; /// Returns the keys with prefix, leave empty to get all the keys #[rpc(name = "state_getPairs")] - fn storage_pairs(&self, prefix: StorageKey, hash: Option) -> FutureResult>; + fn storage_pairs( + &self, + prefix: StorageKey, + hash: Option, + ) -> FutureResult>; /// Returns the keys with prefix with pagination support. /// Up to `count` keys will be returned. @@ -92,7 +98,7 @@ pub trait StateApi { &self, keys: Vec, block: Hash, - hash: Option + hash: Option, ) -> FutureResult>>; /// Query storage entries (by key) starting at block hash given as the second parameter. @@ -105,7 +111,11 @@ pub trait StateApi { /// Returns proof of storage entries at a specific block's state. #[rpc(name = "state_getReadProof")] - fn read_proof(&self, keys: Vec, hash: Option) -> FutureResult>; + fn read_proof( + &self, + keys: Vec, + hash: Option, + ) -> FutureResult>; /// New runtime version subscription #[pubsub( @@ -114,7 +124,11 @@ pub trait StateApi { name = "state_subscribeRuntimeVersion", alias("chain_subscribeRuntimeVersion") )] - fn subscribe_runtime_version(&self, metadata: Self::Metadata, subscriber: Subscriber); + fn subscribe_runtime_version( + &self, + metadata: Self::Metadata, + subscriber: Subscriber, + ); /// Unsubscribe from runtime version subscription #[pubsub( @@ -123,18 +137,27 @@ pub trait StateApi { name = "state_unsubscribeRuntimeVersion", alias("chain_unsubscribeRuntimeVersion") )] - fn unsubscribe_runtime_version(&self, metadata: Option, id: SubscriptionId) -> RpcResult; + fn unsubscribe_runtime_version( + &self, + metadata: Option, + id: SubscriptionId, + ) -> RpcResult; /// New storage subscription #[pubsub(subscription = "state_storage", subscribe, name = "state_subscribeStorage")] fn subscribe_storage( - &self, metadata: Self::Metadata, subscriber: Subscriber>, keys: Option> + &self, + metadata: Self::Metadata, + subscriber: Subscriber>, + keys: Option>, ); /// Unsubscribe from storage subscription #[pubsub(subscription = "state_storage", unsubscribe, name = "state_unsubscribeStorage")] fn unsubscribe_storage( - &self, metadata: Option, id: SubscriptionId + &self, + metadata: Option, + id: SubscriptionId, ) -> RpcResult; /// The `state_traceBlock` RPC provides a way to trace the re-execution of a single diff --git a/client/rpc-api/src/system/error.rs b/client/rpc-api/src/system/error.rs index a0dfd863ce3aa..b16a7abb6ea52 100644 --- a/client/rpc-api/src/system/error.rs +++ b/client/rpc-api/src/system/error.rs @@ -48,10 +48,10 @@ impl From for rpc::Error { data: serde_json::to_value(h).ok(), }, Error::MalformattedPeerArg(ref e) => rpc::Error { - code :rpc::ErrorCode::ServerError(BASE_ERROR + 2), + code: rpc::ErrorCode::ServerError(BASE_ERROR + 2), message: e.clone(), data: None, - } + }, } } } diff --git a/client/rpc-api/src/system/helpers.rs b/client/rpc-api/src/system/helpers.rs index c8124d9c67526..96e8aeb1ae3d7 100644 --- a/client/rpc-api/src/system/helpers.rs +++ b/client/rpc-api/src/system/helpers.rs @@ -18,9 +18,9 @@ //! Substrate system API helpers. +use sc_chain_spec::{ChainType, Properties}; +use serde::{Deserialize, Serialize}; use std::fmt; -use serde::{Serialize, Deserialize}; -use sc_chain_spec::{Properties, ChainType}; /// Running node's static details. #[derive(Clone, Debug)] @@ -53,9 +53,7 @@ pub struct Health { impl fmt::Display for Health { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "{} peers ({})", self.peers, if self.is_syncing { - "syncing" - } else { "idle" }) + write!(fmt, "{} peers ({})", self.peers, if self.is_syncing { "syncing" } else { "idle" }) } } @@ -107,7 +105,8 @@ mod tests { peers: 1, is_syncing: false, should_have_peers: true, - }).unwrap(), + }) + .unwrap(), r#"{"peers":1,"isSyncing":false,"shouldHavePeers":true}"#, ); } @@ -120,7 +119,8 @@ mod tests { roles: "a".into(), best_hash: 5u32, best_number: 6u32, - }).unwrap(), + }) + .unwrap(), r#"{"peerId":"2","roles":"a","bestHash":5,"bestNumber":6}"#, ); } @@ -132,7 +132,8 @@ mod tests { starting_block: 12u32, current_block: 50u32, highest_block: Some(128u32), - }).unwrap(), + }) + .unwrap(), r#"{"startingBlock":12,"currentBlock":50,"highestBlock":128}"#, ); @@ -141,7 +142,8 @@ mod tests { starting_block: 12u32, current_block: 50u32, highest_block: None, - }).unwrap(), + }) + .unwrap(), r#"{"startingBlock":12,"currentBlock":50}"#, ); } diff --git a/client/rpc-api/src/system/mod.rs b/client/rpc-api/src/system/mod.rs index e820fb2e702e3..2f9ed45cd2e25 100644 --- a/client/rpc-api/src/system/mod.rs +++ b/client/rpc-api/src/system/mod.rs @@ -22,13 +22,15 @@ pub mod error; pub mod helpers; use crate::helpers::Receiver; +use futures::{compat::Compat, future::BoxFuture}; use jsonrpc_derive::rpc; -use futures::{future::BoxFuture, compat::Compat}; use self::error::Result as SystemResult; -pub use self::helpers::{SystemInfo, Health, PeerInfo, NodeRole, SyncState}; -pub use self::gen_client::Client as SystemClient; +pub use self::{ + gen_client::Client as SystemClient, + helpers::{Health, NodeRole, PeerInfo, SyncState, SystemInfo}, +}; /// Substrate system RPC API #[rpc] @@ -74,8 +76,9 @@ pub trait SystemApi { /// Returns currently connected peers #[rpc(name = "system_peers", returns = "Vec>")] - fn system_peers(&self) - -> Compat>>>>; + fn system_peers( + &self, + ) -> Compat>>>>; /// Returns current state of the network. /// @@ -84,8 +87,9 @@ pub trait SystemApi { // TODO: the future of this call is uncertain: https://github.com/paritytech/substrate/issues/1890 // https://github.com/paritytech/substrate/issues/5541 #[rpc(name = "system_unstable_networkState", returns = "jsonrpc_core::Value")] - fn system_network_state(&self) - -> Compat>>; + fn system_network_state( + &self, + ) -> Compat>>; /// Adds a reserved peer. Returns the empty string or an error. The string /// parameter should encode a `p2p` multiaddr. @@ -93,14 +97,18 @@ pub trait SystemApi { /// `/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV` /// is an example of a valid, passing multiaddr with PeerId attached. #[rpc(name = "system_addReservedPeer", returns = "()")] - fn system_add_reserved_peer(&self, peer: String) - -> Compat>>; + fn system_add_reserved_peer( + &self, + peer: String, + ) -> Compat>>; /// Remove a reserved peer. Returns the empty string or an error. The string /// should encode only the PeerId e.g. `QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV`. #[rpc(name = "system_removeReservedPeer", returns = "()")] - fn system_remove_reserved_peer(&self, peer_id: String) - -> Compat>>; + fn system_remove_reserved_peer( + &self, + peer_id: String, + ) -> Compat>>; /// Returns the list of reserved peers #[rpc(name = "system_reservedPeers", returns = "Vec")] @@ -121,11 +129,9 @@ pub trait SystemApi { /// /// `sync=debug,state=trace` #[rpc(name = "system_addLogFilter", returns = "()")] - fn system_add_log_filter(&self, directives: String) - -> Result<(), jsonrpc_core::Error>; + fn system_add_log_filter(&self, directives: String) -> Result<(), jsonrpc_core::Error>; /// Resets the log filter to Substrate defaults #[rpc(name = "system_resetLogFilter", returns = "()")] - fn system_reset_log_filter(&self) - -> Result<(), jsonrpc_core::Error>; + fn system_reset_log_filter(&self) -> Result<(), jsonrpc_core::Error>; } diff --git a/client/rpc-servers/src/lib.rs b/client/rpc-servers/src/lib.rs index c93451e5cc678..7f14cee39f20f 100644 --- a/client/rpc-servers/src/lib.rs +++ b/client/rpc-servers/src/lib.rs @@ -22,10 +22,10 @@ mod middleware; -use std::io; use jsonrpc_core::{IoHandlerExtension, MetaIoHandler}; use log::error; use pubsub::PubSubMetadata; +use std::io; const MEGABYTE: usize = 1024 * 1024; @@ -42,7 +42,7 @@ const HTTP_THREADS: usize = 4; pub type RpcHandler = pubsub::PubSubHandler; pub use self::inner::*; -pub use middleware::{RpcMiddleware, RpcMetrics}; +pub use middleware::{RpcMetrics, RpcMiddleware}; /// Construct rpc `IoHandler` pub fn rpc_handler( @@ -60,10 +60,12 @@ pub fn rpc_handler( let methods = serde_json::to_value(&methods) .expect("Serialization of Vec is infallible; qed"); - move |_| Ok(serde_json::json!({ - "version": 1, - "methods": methods.clone(), - })) + move |_| { + Ok(serde_json::json!({ + "version": 1, + "methods": methods.clone(), + })) + } }); io } @@ -89,17 +91,14 @@ mod inner { io: RpcHandler, maybe_max_payload_mb: Option, ) -> io::Result { - let max_request_body_size = maybe_max_payload_mb.map(|mb| mb.saturating_mul(MEGABYTE)) + let max_request_body_size = maybe_max_payload_mb + .map(|mb| mb.saturating_mul(MEGABYTE)) .unwrap_or(RPC_MAX_PAYLOAD_DEFAULT); http::ServerBuilder::new(io) .threads(thread_pool_size.unwrap_or(HTTP_THREADS)) .health_api(("/health", "system_health")) .allowed_hosts(hosts_filtering(cors.is_some())) - .rest_api(if cors.is_some() { - http::RestApi::Secure - } else { - http::RestApi::Unsecure - }) + .rest_api(if cors.is_some() { http::RestApi::Secure } else { http::RestApi::Unsecure }) .cors(map_cors::(cors)) .max_request_body_size(max_request_body_size) .start_http(addr) @@ -134,28 +133,32 @@ mod inner { io: RpcHandler, maybe_max_payload_mb: Option, ) -> io::Result { - let rpc_max_payload = maybe_max_payload_mb.map(|mb| mb.saturating_mul(MEGABYTE)) + let rpc_max_payload = maybe_max_payload_mb + .map(|mb| mb.saturating_mul(MEGABYTE)) .unwrap_or(RPC_MAX_PAYLOAD_DEFAULT); - ws::ServerBuilder::with_meta_extractor(io, |context: &ws::RequestContext| context.sender().into()) - .max_payload(rpc_max_payload) - .max_connections(max_connections.unwrap_or(WS_MAX_CONNECTIONS)) - .allowed_origins(map_cors(cors)) - .allowed_hosts(hosts_filtering(cors.is_some())) - .start(addr) - .map_err(|err| match err { - ws::Error::Io(io) => io, - ws::Error::ConnectionClosed => io::ErrorKind::BrokenPipe.into(), - e => { - error!("{}", e); - io::ErrorKind::Other.into() - } - }) + ws::ServerBuilder::with_meta_extractor(io, |context: &ws::RequestContext| { + context.sender().into() + }) + .max_payload(rpc_max_payload) + .max_connections(max_connections.unwrap_or(WS_MAX_CONNECTIONS)) + .allowed_origins(map_cors(cors)) + .allowed_hosts(hosts_filtering(cors.is_some())) + .start(addr) + .map_err(|err| match err { + ws::Error::Io(io) => io, + ws::Error::ConnectionClosed => io::ErrorKind::BrokenPipe.into(), + e => { + error!("{}", e); + io::ErrorKind::Other.into() + }, + }) } fn map_cors From<&'a str>>( - cors: Option<&Vec> + cors: Option<&Vec>, ) -> http::DomainsValidation { - cors.map(|x| x.iter().map(AsRef::as_ref).map(Into::into).collect::>()).into() + cors.map(|x| x.iter().map(AsRef::as_ref).map(Into::into).collect::>()) + .into() } fn hosts_filtering(enable: bool) -> http::DomainsValidation { @@ -171,5 +174,4 @@ mod inner { } #[cfg(target_os = "unknown")] -mod inner { -} +mod inner {} diff --git a/client/rpc-servers/src/middleware.rs b/client/rpc-servers/src/middleware.rs index 2cbc61716c317..d87c653e2b250 100644 --- a/client/rpc-servers/src/middleware.rs +++ b/client/rpc-servers/src/middleware.rs @@ -19,13 +19,9 @@ //! Middleware for RPC requests. use jsonrpc_core::{ - Middleware as RequestMiddleware, Metadata, - Request, Response, FutureResponse, FutureOutput -}; -use prometheus_endpoint::{ - Registry, CounterVec, PrometheusError, - Opts, register, U64 + FutureOutput, FutureResponse, Metadata, Middleware as RequestMiddleware, Request, Response, }; +use prometheus_endpoint::{register, CounterVec, Opts, PrometheusError, Registry, U64}; use futures::{future::Either, Future}; @@ -39,18 +35,17 @@ impl RpcMetrics { /// Create an instance of metrics pub fn new(metrics_registry: Option<&Registry>) -> Result { Ok(Self { - rpc_calls: metrics_registry.map(|r| - register( - CounterVec::new( - Opts::new( - "rpc_calls_total", - "Number of rpc calls received", - ), - &["protocol"] - )?, - r, - ) - ).transpose()?, + rpc_calls: metrics_registry + .map(|r| { + register( + CounterVec::new( + Opts::new("rpc_calls_total", "Number of rpc calls received"), + &["protocol"], + )?, + r, + ) + }) + .transpose()?, }) } } @@ -67,10 +62,7 @@ impl RpcMiddleware { /// - `metrics`: Will be used to report statistics. /// - `transport_label`: The label that is used when reporting the statistics. pub fn new(metrics: RpcMetrics, transport_label: &str) -> Self { - RpcMiddleware { - metrics, - transport_label: String::from(transport_label), - } + RpcMiddleware { metrics, transport_label: String::from(transport_label) } } } diff --git a/client/rpc/src/author/mod.rs b/client/rpc/src/author/mod.rs index ed7899d524801..966959050c172 100644 --- a/client/rpc/src/author/mod.rs +++ b/client/rpc/src/author/mod.rs @@ -21,30 +21,33 @@ #[cfg(test)] mod tests; -use std::{sync::Arc, convert::TryInto}; use log::warn; +use std::{convert::TryInto, sync::Arc}; use sp_blockchain::HeaderBackend; -use rpc::futures::{Sink, Future, future::result}; -use futures::{StreamExt as _, compat::Compat}; -use futures::future::{ready, FutureExt, TryFutureExt}; +use codec::{Decode, Encode}; +use futures::{ + compat::Compat, + future::{ready, FutureExt, TryFutureExt}, + StreamExt as _, +}; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; +use rpc::futures::{future::result, Future, Sink}; use sc_rpc_api::DenyUnsafe; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; -use codec::{Encode, Decode}; -use sp_core::Bytes; -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; -use sp_api::ProvideRuntimeApi; -use sp_runtime::generic; use sc_transaction_pool_api::{ - TransactionPool, InPoolTransaction, TransactionStatus, TransactionSource, - BlockHash, TxHash, TransactionFor, error::IntoPoolError, + error::IntoPoolError, BlockHash, InPoolTransaction, TransactionFor, TransactionPool, + TransactionSource, TransactionStatus, TxHash, }; +use sp_api::ProvideRuntimeApi; +use sp_core::Bytes; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use sp_runtime::generic; use sp_session::SessionKeys; +use self::error::{Error, FutureResult, Result}; /// Re-export the API for backward compatibility. pub use sc_rpc_api::author::*; -use self::error::{Error, FutureResult, Result}; /// Authoring API pub struct Author { @@ -69,13 +72,7 @@ impl Author { keystore: SyncCryptoStorePtr, deny_unsafe: DenyUnsafe, ) -> Self { - Author { - client, - pool, - subscriptions, - keystore, - deny_unsafe, - } + Author { client, pool, subscriptions, keystore, deny_unsafe } } } @@ -87,19 +84,14 @@ impl Author { const TX_SOURCE: TransactionSource = TransactionSource::External; impl AuthorApi, BlockHash

> for Author - where - P: TransactionPool + Sync + Send + 'static, - Client: HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, - Client::Api: SessionKeys, +where + P: TransactionPool + Sync + Send + 'static, + Client: HeaderBackend + ProvideRuntimeApi + Send + Sync + 'static, + Client::Api: SessionKeys, { type Metadata = crate::Metadata; - fn insert_key( - &self, - key_type: String, - suri: String, - public: Bytes, - ) -> Result<()> { + fn insert_key(&self, key_type: String, suri: String, public: Bytes) -> Result<()> { self.deny_unsafe.check_if_safe()?; let key_type = key_type.as_str().try_into().map_err(|_| Error::BadKeyType)?; @@ -112,20 +104,22 @@ impl AuthorApi, BlockHash

> for Author self.deny_unsafe.check_if_safe()?; let best_block_hash = self.client.info().best_hash; - self.client.runtime_api().generate_session_keys( - &generic::BlockId::Hash(best_block_hash), - None, - ).map(Into::into).map_err(|e| Error::Client(Box::new(e))) + self.client + .runtime_api() + .generate_session_keys(&generic::BlockId::Hash(best_block_hash), None) + .map(Into::into) + .map_err(|e| Error::Client(Box::new(e))) } fn has_session_keys(&self, session_keys: Bytes) -> Result { self.deny_unsafe.check_if_safe()?; let best_block_hash = self.client.info().best_hash; - let keys = self.client.runtime_api().decode_session_keys( - &generic::BlockId::Hash(best_block_hash), - session_keys.to_vec(), - ).map_err(|e| Error::Client(Box::new(e)))? + let keys = self + .client + .runtime_api() + .decode_session_keys(&generic::BlockId::Hash(best_block_hash), session_keys.to_vec()) + .map_err(|e| Error::Client(Box::new(e)))? .ok_or_else(|| Error::InvalidSessionKeys)?; Ok(SyncCryptoStore::has_keys(&*self.keystore, &keys)) @@ -144,12 +138,15 @@ impl AuthorApi, BlockHash

> for Author Err(err) => return Box::new(result(Err(err.into()))), }; let best_block_hash = self.client.info().best_hash; - Box::new(self.pool - .submit_one(&generic::BlockId::hash(best_block_hash), TX_SOURCE, xt) - .compat() - .map_err(|e| e.into_pool_error() - .map(Into::into) - .unwrap_or_else(|e| error::Error::Verification(Box::new(e)).into())) + Box::new( + self.pool + .submit_one(&generic::BlockId::hash(best_block_hash), TX_SOURCE, xt) + .compat() + .map_err(|e| { + e.into_pool_error() + .map(Into::into) + .unwrap_or_else(|e| error::Error::Verification(Box::new(e)).into()) + }), ) } @@ -163,7 +160,8 @@ impl AuthorApi, BlockHash

> for Author ) -> Result>> { self.deny_unsafe.check_if_safe()?; - let hashes = bytes_or_hash.into_iter() + let hashes = bytes_or_hash + .into_iter() .map(|x| match x { hash::ExtrinsicOrHash::Hash(h) => Ok(h), hash::ExtrinsicOrHash::Extrinsic(bytes) => { @@ -173,32 +171,31 @@ impl AuthorApi, BlockHash

> for Author }) .collect::>>()?; - Ok( - self.pool - .remove_invalid(&hashes) - .into_iter() - .map(|tx| tx.hash().clone()) - .collect() - ) + Ok(self + .pool + .remove_invalid(&hashes) + .into_iter() + .map(|tx| tx.hash().clone()) + .collect()) } - fn watch_extrinsic(&self, + fn watch_extrinsic( + &self, _metadata: Self::Metadata, subscriber: Subscriber, BlockHash

>>, xt: Bytes, ) { let submit = || -> Result<_> { let best_block_hash = self.client.info().best_hash; - let dxt = TransactionFor::

::decode(&mut &xt[..]) - .map_err(error::Error::from)?; - Ok( - self.pool - .submit_and_watch(&generic::BlockId::hash(best_block_hash), TX_SOURCE, dxt) - .map_err(|e| e.into_pool_error() + let dxt = TransactionFor::

::decode(&mut &xt[..]).map_err(error::Error::from)?; + Ok(self + .pool + .submit_and_watch(&generic::BlockId::hash(best_block_hash), TX_SOURCE, dxt) + .map_err(|e| { + e.into_pool_error() .map(error::Error::from) .unwrap_or_else(|e| error::Error::Verification(Box::new(e)).into()) - ) - ) + })) }; let subscriptions = self.subscriptions.clone(); @@ -211,8 +208,7 @@ impl AuthorApi, BlockHash

> for Author .map(move |result| match result { Ok(watcher) => { subscriptions.add(subscriber, move |sink| { - sink - .sink_map_err(|e| log::debug!("Subscription sink failed: {:?}", e)) + sink.sink_map_err(|e| log::debug!("Subscription sink failed: {:?}", e)) .send_all(Compat::new(watcher)) .map(|_| ()) }); @@ -224,14 +220,20 @@ impl AuthorApi, BlockHash

> for Author }, }); - let res = self.subscriptions.executor() + let res = self + .subscriptions + .executor() .execute(Box::new(Compat::new(future.map(|_| Ok(()))))); if res.is_err() { warn!("Error spawning subscription RPC task."); } } - fn unwatch_extrinsic(&self, _metadata: Option, id: SubscriptionId) -> Result { + fn unwatch_extrinsic( + &self, + _metadata: Option, + id: SubscriptionId, + ) -> Result { Ok(self.subscriptions.cancel(id)) } } diff --git a/client/rpc/src/author/tests.rs b/client/rpc/src/author/tests.rs index 0e7cb5539501d..80cef0a58f03a 100644 --- a/client/rpc/src/author/tests.rs +++ b/client/rpc/src/author/tests.rs @@ -18,37 +18,35 @@ use super::*; -use std::{mem, sync::Arc}; use assert_matches::assert_matches; use codec::Encode; +use futures::{compat::Future01CompatExt, executor}; +use rpc::futures::Stream as _; +use sc_transaction_pool::{BasicPool, FullChainApi}; use sp_core::{ - ed25519, sr25519, - H256, blake2_256, hexdisplay::HexDisplay, testing::{ED25519, SR25519}, + blake2_256, crypto::{CryptoTypePublicPair, Pair, Public}, + ed25519, + hexdisplay::HexDisplay, + sr25519, + testing::{ED25519, SR25519}, + H256, }; use sp_keystore::testing::KeyStore; -use rpc::futures::Stream as _; +use std::{mem, sync::Arc}; use substrate_test_runtime_client::{ - self, AccountKeyring, runtime::{Extrinsic, Transfer, SessionKeys, Block}, - DefaultTestClientBuilderExt, TestClientBuilderExt, Backend, Client, + self, + runtime::{Block, Extrinsic, SessionKeys, Transfer}, + AccountKeyring, Backend, Client, DefaultTestClientBuilderExt, TestClientBuilderExt, }; -use sc_transaction_pool::{BasicPool, FullChainApi}; -use futures::{executor, compat::Future01CompatExt}; fn uxt(sender: AccountKeyring, nonce: u64) -> Extrinsic { - let tx = Transfer { - amount: Default::default(), - nonce, - from: sender.into(), - to: Default::default(), - }; + let tx = + Transfer { amount: Default::default(), nonce, from: sender.into(), to: Default::default() }; tx.into_signed_tx() } -type FullTransactionPool = BasicPool< - FullChainApi, Block>, - Block, ->; +type FullTransactionPool = BasicPool, Block>, Block>; struct TestSetup { pub client: Arc>, @@ -63,18 +61,9 @@ impl Default for TestSetup { let client = Arc::new(client_builder.set_keystore(keystore.clone()).build()); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); - TestSetup { - client, - keystore, - pool, - } + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); + TestSetup { client, keystore, pool } } } @@ -100,9 +89,7 @@ fn submit_transaction_should_not_cause_error() { AuthorApi::submit_extrinsic(&p, xt.clone().into()).wait(), Ok(h2) if h == h2 ); - assert!( - AuthorApi::submit_extrinsic(&p, xt.into()).wait().is_err() - ); + assert!(AuthorApi::submit_extrinsic(&p, xt.into()).wait().is_err()); } #[test] @@ -115,9 +102,7 @@ fn submit_rich_transaction_should_not_cause_error() { AuthorApi::submit_extrinsic(&p, xt.clone().into()).wait(), Ok(h2) if h == h2 ); - assert!( - AuthorApi::submit_extrinsic(&p, xt.into()).wait().is_err() - ); + assert!(AuthorApi::submit_extrinsic(&p, xt.into()).wait().is_err()); } #[test] @@ -182,7 +167,11 @@ fn should_return_watch_validation_error() { let (subscriber, id_rx, _data) = jsonrpc_pubsub::typed::Subscriber::new_test("test"); // when - p.watch_extrinsic(Default::default(), subscriber, uxt(AccountKeyring::Alice, 179).encode().into()); + p.watch_extrinsic( + Default::default(), + subscriber, + uxt(AccountKeyring::Alice, 179).encode().into(), + ); // then let res = executor::block_on(id_rx.compat()).unwrap(); @@ -215,11 +204,13 @@ fn should_remove_extrinsics() { assert_eq!(setup.pool.status().ready, 3); // now remove all 3 - let removed = p.remove_extrinsic(vec![ - hash::ExtrinsicOrHash::Hash(hash3), - // Removing this one will also remove ex2 - hash::ExtrinsicOrHash::Extrinsic(ex1.encode().into()), - ]).unwrap(); + let removed = p + .remove_extrinsic(vec![ + hash::ExtrinsicOrHash::Hash(hash3), + // Removing this one will also remove ex2 + hash::ExtrinsicOrHash::Extrinsic(ex1.encode().into()), + ]) + .unwrap(); assert_eq!(removed.len(), 3); } @@ -235,11 +226,13 @@ fn should_insert_key() { String::from_utf8(ED25519.0.to_vec()).expect("Keytype is a valid string"), suri.to_string(), key_pair.public().0.to_vec().into(), - ).expect("Insert key"); + ) + .expect("Insert key"); let public_keys = SyncCryptoStore::keys(&*setup.keystore, ED25519).unwrap(); - assert!(public_keys.contains(&CryptoTypePublicPair(ed25519::CRYPTO_ID, key_pair.public().to_raw_vec()))); + assert!(public_keys + .contains(&CryptoTypePublicPair(ed25519::CRYPTO_ID, key_pair.public().to_raw_vec()))); } #[test] @@ -249,14 +242,16 @@ fn should_rotate_keys() { let new_public_keys = p.rotate_keys().expect("Rotates the keys"); - let session_keys = SessionKeys::decode(&mut &new_public_keys[..]) - .expect("SessionKeys decode successfully"); + let session_keys = + SessionKeys::decode(&mut &new_public_keys[..]).expect("SessionKeys decode successfully"); let ed25519_public_keys = SyncCryptoStore::keys(&*setup.keystore, ED25519).unwrap(); let sr25519_public_keys = SyncCryptoStore::keys(&*setup.keystore, SR25519).unwrap(); - assert!(ed25519_public_keys.contains(&CryptoTypePublicPair(ed25519::CRYPTO_ID, session_keys.ed25519.to_raw_vec()))); - assert!(sr25519_public_keys.contains(&CryptoTypePublicPair(sr25519::CRYPTO_ID, session_keys.sr25519.to_raw_vec()))); + assert!(ed25519_public_keys + .contains(&CryptoTypePublicPair(ed25519::CRYPTO_ID, session_keys.ed25519.to_raw_vec()))); + assert!(sr25519_public_keys + .contains(&CryptoTypePublicPair(sr25519::CRYPTO_ID, session_keys.sr25519.to_raw_vec()))); } #[test] @@ -264,10 +259,8 @@ fn test_has_session_keys() { let setup = TestSetup::default(); let p = setup.author(); - let non_existent_public_keys = TestSetup::default() - .author() - .rotate_keys() - .expect("Rotates the keys"); + let non_existent_public_keys = + TestSetup::default().author().rotate_keys().expect("Rotates the keys"); let public_keys = p.rotate_keys().expect("Rotates the keys"); let test_vectors = vec![ @@ -295,7 +288,8 @@ fn test_has_key() { String::from_utf8(ED25519.0.to_vec()).expect("Keytype is a valid string"), suri.to_string(), alice_key_pair.public().0.to_vec().into(), - ).expect("Insert key"); + ) + .expect("Insert key"); let bob_key_pair = ed25519::Pair::from_string("//Bob", None).expect("Generates keypair"); let test_vectors = vec![ @@ -310,7 +304,8 @@ fn test_has_key() { p.has_key( key, String::from_utf8(key_type.0.to_vec()).expect("Keytype is a valid string"), - ).map_err(|e| mem::discriminant(&e)), + ) + .map_err(|e| mem::discriminant(&e)), ); } } diff --git a/client/rpc/src/chain/chain_full.rs b/client/rpc/src/chain/chain_full.rs index 9687b13d50fc7..8d0f622d1e7aa 100644 --- a/client/rpc/src/chain/chain_full.rs +++ b/client/rpc/src/chain/chain_full.rs @@ -18,16 +18,19 @@ //! Blockchain API backend for full nodes. -use std::sync::Arc; -use rpc::futures::future::result; use jsonrpc_pubsub::manager::SubscriptionManager; +use rpc::futures::future::result; +use std::sync::Arc; -use sc_client_api::{BlockchainEvents, BlockBackend}; -use sp_runtime::{generic::{BlockId, SignedBlock}, traits::{Block as BlockT}}; +use sc_client_api::{BlockBackend, BlockchainEvents}; +use sp_runtime::{ + generic::{BlockId, SignedBlock}, + traits::Block as BlockT, +}; -use super::{ChainBackend, client_err, error::FutureResult}; -use std::marker::PhantomData; +use super::{client_err, error::FutureResult, ChainBackend}; use sp_blockchain::HeaderBackend; +use std::marker::PhantomData; /// Blockchain API backend for full nodes. Reads all the data from local database. pub struct FullChain { @@ -42,15 +45,12 @@ pub struct FullChain { impl FullChain { /// Create new Chain API RPC handler. pub fn new(client: Arc, subscriptions: SubscriptionManager) -> Self { - Self { - client, - subscriptions, - _phantom: PhantomData, - } + Self { client, subscriptions, _phantom: PhantomData } } } -impl ChainBackend for FullChain where +impl ChainBackend for FullChain +where Block: BlockT + 'static, Client: BlockBackend + HeaderBackend + BlockchainEvents + 'static, { @@ -63,18 +63,14 @@ impl ChainBackend for FullChain whe } fn header(&self, hash: Option) -> FutureResult> { - Box::new(result(self.client - .header(BlockId::Hash(self.unwrap_or_best(hash))) - .map_err(client_err) + Box::new(result( + self.client.header(BlockId::Hash(self.unwrap_or_best(hash))).map_err(client_err), )) } - fn block(&self, hash: Option) - -> FutureResult>> - { - Box::new(result(self.client - .block(&BlockId::Hash(self.unwrap_or_best(hash))) - .map_err(client_err) + fn block(&self, hash: Option) -> FutureResult>> { + Box::new(result( + self.client.block(&BlockId::Hash(self.unwrap_or_best(hash))).map_err(client_err), )) } } diff --git a/client/rpc/src/chain/chain_light.rs b/client/rpc/src/chain/chain_light.rs index a3f3db9b7116c..ebca664c0f23d 100644 --- a/client/rpc/src/chain/chain_light.rs +++ b/client/rpc/src/chain/chain_light.rs @@ -18,20 +18,20 @@ //! Blockchain API backend for light nodes. -use std::sync::Arc; use futures::{future::ready, FutureExt, TryFutureExt}; -use rpc::futures::future::{result, Future, Either}; use jsonrpc_pubsub::manager::SubscriptionManager; +use rpc::futures::future::{result, Either, Future}; +use std::sync::Arc; -use sc_client_api::light::{Fetcher, RemoteBodyRequest, RemoteBlockchain}; +use sc_client_api::light::{Fetcher, RemoteBlockchain, RemoteBodyRequest}; use sp_runtime::{ generic::{BlockId, SignedBlock}, - traits::{Block as BlockT}, + traits::Block as BlockT, }; -use super::{ChainBackend, client_err, error::FutureResult}; -use sp_blockchain::HeaderBackend; +use super::{client_err, error::FutureResult, ChainBackend}; use sc_client_api::BlockchainEvents; +use sp_blockchain::HeaderBackend; /// Blockchain API backend for light nodes. Reads all the data from local /// database, if available, or fetches it from remote node otherwise. @@ -54,16 +54,12 @@ impl> LightChain { remote_blockchain: Arc>, fetcher: Arc, ) -> Self { - Self { - client, - subscriptions, - remote_blockchain, - fetcher, - } + Self { client, subscriptions, remote_blockchain, fetcher } } } -impl ChainBackend for LightChain where +impl ChainBackend for LightChain +where Block: BlockT + 'static, Client: BlockchainEvents + HeaderBackend + Send + Sync + 'static, F: Fetcher + Send + Sync + 'static, @@ -86,32 +82,32 @@ impl ChainBackend for LightChain) - -> FutureResult>> - { + fn block(&self, hash: Option) -> FutureResult>> { let fetcher = self.fetcher.clone(); - let block = self.header(hash) - .and_then(move |header| match header { - Some(header) => Either::A(fetcher + let block = self.header(hash).and_then(move |header| match header { + Some(header) => Either::A( + fetcher .remote_body(RemoteBodyRequest { header: header.clone(), retry_count: Default::default(), }) .boxed() .compat() - .map(move |body| Some(SignedBlock { - block: Block::new(header, body), - justifications: None, - })) - .map_err(client_err) - ), - None => Either::B(result(Ok(None))), - }); + .map(move |body| { + Some(SignedBlock { block: Block::new(header, body), justifications: None }) + }) + .map_err(client_err), + ), + None => Either::B(result(Ok(None))), + }); Box::new(block) } diff --git a/client/rpc/src/chain/mod.rs b/client/rpc/src/chain/mod.rs index 1380927bca2f4..f78188249f6fd 100644 --- a/client/rpc/src/chain/mod.rs +++ b/client/rpc/src/chain/mod.rs @@ -24,33 +24,36 @@ mod chain_light; #[cfg(test)] mod tests; -use std::sync::Arc; use futures::{future, StreamExt, TryStreamExt}; use log::warn; use rpc::{ - Result as RpcResult, futures::{stream, Future, Sink, Stream}, + Result as RpcResult, }; +use std::sync::Arc; -use sc_client_api::{BlockchainEvents, light::{Fetcher, RemoteBlockchain}}; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; -use sp_rpc::{number::NumberOrHex, list::ListOrValue}; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; +use sc_client_api::{ + light::{Fetcher, RemoteBlockchain}, + BlockchainEvents, +}; +use sp_rpc::{list::ListOrValue, number::NumberOrHex}; use sp_runtime::{ generic::{BlockId, SignedBlock}, traits::{Block as BlockT, Header, NumberFor}, }; -use self::error::{Result, Error, FutureResult}; +use self::error::{Error, FutureResult, Result}; +use sc_client_api::BlockBackend; pub use sc_rpc_api::chain::*; use sp_blockchain::HeaderBackend; -use sc_client_api::BlockBackend; /// Blockchain backend API trait ChainBackend: Send + Sync + 'static - where - Block: BlockT + 'static, - Client: HeaderBackend + BlockchainEvents + 'static, +where + Block: BlockT + 'static, + Client: HeaderBackend + BlockchainEvents + 'static, { /// Get client reference. fn client(&self) -> &Arc; @@ -94,7 +97,7 @@ trait ChainBackend: Send + Sync + 'static .header(BlockId::number(block_num)) .map_err(client_err)? .map(|h| h.hash())) - } + }, } } @@ -114,9 +117,12 @@ trait ChainBackend: Send + Sync + 'static self.subscriptions(), subscriber, || self.client().info().best_hash, - || self.client().import_notification_stream() - .map(|notification| Ok::<_, ()>(notification.header)) - .compat(), + || { + self.client() + .import_notification_stream() + .map(|notification| Ok::<_, ()>(notification.header)) + .compat() + }, ) } @@ -140,10 +146,13 @@ trait ChainBackend: Send + Sync + 'static self.subscriptions(), subscriber, || self.client().info().best_hash, - || self.client().import_notification_stream() - .filter(|notification| future::ready(notification.is_new_best)) - .map(|notification| Ok::<_, ()>(notification.header)) - .compat(), + || { + self.client() + .import_notification_stream() + .filter(|notification| future::ready(notification.is_new_best)) + .map(|notification| Ok::<_, ()>(notification.header)) + .compat() + }, ) } @@ -167,9 +176,12 @@ trait ChainBackend: Send + Sync + 'static self.subscriptions(), subscriber, || self.client().info().finalized_hash, - || self.client().finality_notification_stream() - .map(|notification| Ok::<_, ()>(notification.header)) - .compat(), + || { + self.client() + .finality_notification_stream() + .map(|notification| Ok::<_, ()>(notification.header)) + .compat() + }, ) } @@ -188,13 +200,11 @@ pub fn new_full( client: Arc, subscriptions: SubscriptionManager, ) -> Chain - where - Block: BlockT + 'static, - Client: BlockBackend + HeaderBackend + BlockchainEvents + 'static, +where + Block: BlockT + 'static, + Client: BlockBackend + HeaderBackend + BlockchainEvents + 'static, { - Chain { - backend: Box::new(self::chain_full::FullChain::new(client, subscriptions)), - } + Chain { backend: Box::new(self::chain_full::FullChain::new(client, subscriptions)) } } /// Create new state API that works on light node. @@ -204,10 +214,10 @@ pub fn new_light>( remote_blockchain: Arc>, fetcher: Arc, ) -> Chain - where - Block: BlockT + 'static, - Client: BlockBackend + HeaderBackend + BlockchainEvents + 'static, - F: Send + Sync + 'static, +where + Block: BlockT + 'static, + Client: BlockBackend + HeaderBackend + BlockchainEvents + 'static, + F: Send + Sync + 'static, { Chain { backend: Box::new(self::chain_light::LightChain::new( @@ -224,11 +234,11 @@ pub struct Chain { backend: Box>, } -impl ChainApi, Block::Hash, Block::Header, SignedBlock> for - Chain - where - Block: BlockT + 'static, - Client: HeaderBackend + BlockchainEvents + 'static, +impl ChainApi, Block::Hash, Block::Header, SignedBlock> + for Chain +where + Block: BlockT + 'static, + Client: HeaderBackend + BlockchainEvents + 'static, { type Metadata = crate::Metadata; @@ -236,8 +246,7 @@ impl ChainApi, Block::Hash, Block::Header, Signe self.backend.header(hash) } - fn block(&self, hash: Option) -> FutureResult>> - { + fn block(&self, hash: Option) -> FutureResult>> { self.backend.block(hash) } @@ -247,12 +256,13 @@ impl ChainApi, Block::Hash, Block::Header, Signe ) -> Result>> { match number { None => self.backend.block_hash(None).map(ListOrValue::Value), - Some(ListOrValue::Value(number)) => self.backend.block_hash(Some(number)).map(ListOrValue::Value), - Some(ListOrValue::List(list)) => Ok(ListOrValue::List(list - .into_iter() - .map(|number| self.backend.block_hash(Some(number))) - .collect::>()? - )) + Some(ListOrValue::Value(number)) => + self.backend.block_hash(Some(number)).map(ListOrValue::Value), + Some(ListOrValue::List(list)) => Ok(ListOrValue::List( + list.into_iter() + .map(|number| self.backend.block_hash(Some(number))) + .collect::>()?, + )), } } @@ -264,7 +274,11 @@ impl ChainApi, Block::Hash, Block::Header, Signe self.backend.subscribe_all_heads(metadata, subscriber) } - fn unsubscribe_all_heads(&self, metadata: Option, id: SubscriptionId) -> RpcResult { + fn unsubscribe_all_heads( + &self, + metadata: Option, + id: SubscriptionId, + ) -> RpcResult { self.backend.unsubscribe_all_heads(metadata, id) } @@ -272,15 +286,27 @@ impl ChainApi, Block::Hash, Block::Header, Signe self.backend.subscribe_new_heads(metadata, subscriber) } - fn unsubscribe_new_heads(&self, metadata: Option, id: SubscriptionId) -> RpcResult { + fn unsubscribe_new_heads( + &self, + metadata: Option, + id: SubscriptionId, + ) -> RpcResult { self.backend.unsubscribe_new_heads(metadata, id) } - fn subscribe_finalized_heads(&self, metadata: Self::Metadata, subscriber: Subscriber) { + fn subscribe_finalized_heads( + &self, + metadata: Self::Metadata, + subscriber: Subscriber, + ) { self.backend.subscribe_finalized_heads(metadata, subscriber) } - fn unsubscribe_finalized_heads(&self, metadata: Option, id: SubscriptionId) -> RpcResult { + fn unsubscribe_finalized_heads( + &self, + metadata: Option, + id: SubscriptionId, + ) -> RpcResult { self.backend.unsubscribe_finalized_heads(metadata, id) } } @@ -298,15 +324,14 @@ fn subscribe_headers( F: FnOnce() -> S, G: FnOnce() -> Block::Hash, ERR: ::std::fmt::Debug, - S: Stream + Send + 'static, + S: Stream + Send + 'static, { subscriptions.add(subscriber, |sink| { // send current head right at the start. - let header = client.header(BlockId::Hash(best_block_hash())) + let header = client + .header(BlockId::Hash(best_block_hash())) .map_err(client_err) - .and_then(|header| { - header.ok_or_else(|| "Best header missing.".to_owned().into()) - }) + .and_then(|header| header.ok_or_else(|| "Best header missing.".to_owned().into())) .map_err(Into::into); // send further subscriptions @@ -314,12 +339,8 @@ fn subscribe_headers( .map(|res| Ok(res)) .map_err(|e| warn!("Block notification stream error: {:?}", e)); - sink - .sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) - .send_all( - stream::iter_result(vec![Ok(header)]) - .chain(stream) - ) + sink.sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) + .send_all(stream::iter_result(vec![Ok(header)]).chain(stream)) // we ignore the resulting Stream (if the first stream is over we are unsubscribed) .map(|_| ()) }); diff --git a/client/rpc/src/chain/tests.rs b/client/rpc/src/chain/tests.rs index bb673d65ea0f2..9bd08a1796adc 100644 --- a/client/rpc/src/chain/tests.rs +++ b/client/rpc/src/chain/tests.rs @@ -17,16 +17,19 @@ // along with this program. If not, see . use super::*; +use crate::testing::TaskExecutor; use assert_matches::assert_matches; +use futures::{ + compat::{Future01CompatExt, Stream01CompatExt}, + executor, +}; +use sc_block_builder::BlockBuilderProvider; +use sp_rpc::list::ListOrValue; use substrate_test_runtime_client::{ prelude::*, + runtime::{Block, Header, H256}, sp_consensus::BlockOrigin, - runtime::{H256, Block, Header}, }; -use sp_rpc::list::ListOrValue; -use sc_block_builder::BlockBuilderProvider; -use futures::{executor, compat::{Future01CompatExt, Stream01CompatExt}}; -use crate::testing::TaskExecutor; #[test] fn should_return_header() { @@ -105,10 +108,7 @@ fn should_return_a_block() { } ); - assert_matches!( - api.block(Some(H256::from_low_u64_be(5)).into()).wait(), - Ok(None) - ); + assert_matches!(api.block(Some(H256::from_low_u64_be(5)).into()).wait(), Ok(None)); } #[test] @@ -121,7 +121,6 @@ fn should_return_block_hash() { Ok(ListOrValue::Value(Some(ref x))) if x == &client.genesis_hash() ); - assert_matches!( api.block_hash(Some(ListOrValue::Value(0u64.into())).into()), Ok(ListOrValue::Value(Some(ref x))) if x == &client.genesis_hash() @@ -154,7 +153,6 @@ fn should_return_block_hash() { ); } - #[test] fn should_return_finalized_hash() { let mut client = Arc::new(substrate_test_runtime_client::new()); @@ -193,10 +191,7 @@ fn should_notify_about_latest_block() { api.subscribe_all_heads(Default::default(), subscriber); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); let block = client.new_block(Default::default()).unwrap().build().unwrap().block; executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); @@ -223,10 +218,7 @@ fn should_notify_about_best_block() { api.subscribe_new_heads(Default::default(), subscriber); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); let block = client.new_block(Default::default()).unwrap().build().unwrap().block; executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); @@ -253,10 +245,7 @@ fn should_notify_about_finalized_block() { api.subscribe_finalized_heads(Default::default(), subscriber); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); let block = client.new_block(Default::default()).unwrap().build().unwrap().block; executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); diff --git a/client/rpc/src/lib.rs b/client/rpc/src/lib.rs index 7b3af8cb2f328..ebdec6647f43a 100644 --- a/client/rpc/src/lib.rs +++ b/client/rpc/src/lib.rs @@ -23,12 +23,12 @@ #![warn(missing_docs)] use futures::{compat::Future01CompatExt, FutureExt}; -use rpc::futures::future::{Executor, ExecuteError, Future}; +use rpc::futures::future::{ExecuteError, Executor, Future}; use sp_core::traits::SpawnNamed; use std::sync::Arc; -pub use sc_rpc_api::{DenyUnsafe, Metadata}; pub use rpc::IoHandlerExtension as RpcExtension; +pub use sc_rpc_api::{DenyUnsafe, Metadata}; pub mod author; pub mod chain; diff --git a/client/rpc/src/offchain/mod.rs b/client/rpc/src/offchain/mod.rs index dbb48a9e51934..9d1cc702b51e0 100644 --- a/client/rpc/src/offchain/mod.rs +++ b/client/rpc/src/offchain/mod.rs @@ -21,15 +21,15 @@ #[cfg(test)] mod tests; +use self::error::{Error, Result}; +use parking_lot::RwLock; /// Re-export the API for backward compatibility. pub use sc_rpc_api::offchain::*; use sc_rpc_api::DenyUnsafe; -use self::error::{Error, Result}; use sp_core::{ - Bytes, offchain::{OffchainStorage, StorageKind}, + Bytes, }; -use parking_lot::RwLock; use std::sync::Arc; /// Offchain API @@ -43,10 +43,7 @@ pub struct Offchain { impl Offchain { /// Create new instance of Offchain API. pub fn new(storage: T, deny_unsafe: DenyUnsafe) -> Self { - Offchain { - storage: Arc::new(RwLock::new(storage)), - deny_unsafe, - } + Offchain { storage: Arc::new(RwLock::new(storage)), deny_unsafe } } } diff --git a/client/rpc/src/offchain/tests.rs b/client/rpc/src/offchain/tests.rs index b8054d816325f..f9629e70198a3 100644 --- a/client/rpc/src/offchain/tests.rs +++ b/client/rpc/src/offchain/tests.rs @@ -18,7 +18,7 @@ use super::*; use assert_matches::assert_matches; -use sp_core::{Bytes, offchain::storage::InMemOffchainStorage}; +use sp_core::{offchain::storage::InMemOffchainStorage, Bytes}; #[test] fn local_storage_should_work() { diff --git a/client/rpc/src/state/mod.rs b/client/rpc/src/state/mod.rs index 35680b0fa41dd..9137404df3ee2 100644 --- a/client/rpc/src/state/mod.rs +++ b/client/rpc/src/state/mod.rs @@ -24,34 +24,39 @@ mod state_light; #[cfg(test)] mod tests; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; +use rpc::{ + futures::{future::result, Future}, + Result as RpcResult, +}; use std::sync::Arc; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; -use rpc::{Result as RpcResult, futures::{Future, future::result}}; -use sc_rpc_api::{DenyUnsafe, state::ReadProof}; -use sc_client_api::light::{RemoteBlockchain, Fetcher}; -use sp_core::{Bytes, storage::{StorageKey, PrefixedStorageKey, StorageData, StorageChangeSet}}; -use sp_version::RuntimeVersion; +use sc_client_api::light::{Fetcher, RemoteBlockchain}; +use sc_rpc_api::{state::ReadProof, DenyUnsafe}; +use sp_core::{ + storage::{PrefixedStorageKey, StorageChangeSet, StorageData, StorageKey}, + Bytes, +}; use sp_runtime::traits::Block as BlockT; +use sp_version::RuntimeVersion; -use sp_api::{Metadata, ProvideRuntimeApi, CallApiAt}; +use sp_api::{CallApiAt, Metadata, ProvideRuntimeApi}; use self::error::{Error, FutureResult}; -pub use sc_rpc_api::state::*; -pub use sc_rpc_api::child_state::*; use sc_client_api::{ - ExecutorProvider, StorageProvider, BlockchainEvents, Backend, BlockBackend, ProofProvider + Backend, BlockBackend, BlockchainEvents, ExecutorProvider, ProofProvider, StorageProvider, }; -use sp_blockchain::{HeaderMetadata, HeaderBackend}; +pub use sc_rpc_api::{child_state::*, state::*}; +use sp_blockchain::{HeaderBackend, HeaderMetadata}; const STORAGE_KEYS_PAGED_MAX_COUNT: u32 = 1000; /// State backend API. pub trait StateBackend: Send + Sync + 'static - where - Block: BlockT + 'static, - Client: Send + Sync + 'static, +where + Block: BlockT + 'static, + Client: Send + Sync + 'static, { /// Call runtime method at given block. fn call( @@ -129,7 +134,7 @@ pub trait StateBackend: Send + Sync + 'static fn query_storage_at( &self, keys: Vec, - at: Option + at: Option, ) -> FutureResult>>; /// Returns proof of storage entries at a specific block's state. @@ -184,21 +189,30 @@ pub fn new_full( deny_unsafe: DenyUnsafe, rpc_max_payload: Option, ) -> (State, ChildState) - where - Block: BlockT + 'static, - BE: Backend + 'static, - Client: ExecutorProvider + StorageProvider + ProofProvider - + HeaderMetadata + BlockchainEvents - + CallApiAt + HeaderBackend - + BlockBackend + ProvideRuntimeApi + Send + Sync + 'static, - Client::Api: Metadata, +where + Block: BlockT + 'static, + BE: Backend + 'static, + Client: ExecutorProvider + + StorageProvider + + ProofProvider + + HeaderMetadata + + BlockchainEvents + + CallApiAt + + HeaderBackend + + BlockBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + Client::Api: Metadata, { - let child_backend = Box::new( - self::state_full::FullState::new( - client.clone(), subscriptions.clone(), rpc_max_payload - ) - ); - let backend = Box::new(self::state_full::FullState::new(client, subscriptions, rpc_max_payload)); + let child_backend = Box::new(self::state_full::FullState::new( + client.clone(), + subscriptions.clone(), + rpc_max_payload, + )); + let backend = + Box::new(self::state_full::FullState::new(client, subscriptions, rpc_max_payload)); (State { backend, deny_unsafe }, ChildState { backend: child_backend }) } @@ -210,27 +224,32 @@ pub fn new_light>( fetcher: Arc, deny_unsafe: DenyUnsafe, ) -> (State, ChildState) - where - Block: BlockT + 'static, - BE: Backend + 'static, - Client: ExecutorProvider + StorageProvider - + HeaderMetadata - + ProvideRuntimeApi + HeaderBackend + BlockchainEvents - + Send + Sync + 'static, - F: Send + Sync + 'static, +where + Block: BlockT + 'static, + BE: Backend + 'static, + Client: ExecutorProvider + + StorageProvider + + HeaderMetadata + + ProvideRuntimeApi + + HeaderBackend + + BlockchainEvents + + Send + + Sync + + 'static, + F: Send + Sync + 'static, { let child_backend = Box::new(self::state_light::LightState::new( - client.clone(), - subscriptions.clone(), - remote_blockchain.clone(), - fetcher.clone(), + client.clone(), + subscriptions.clone(), + remote_blockchain.clone(), + fetcher.clone(), )); let backend = Box::new(self::state_light::LightState::new( - client, - subscriptions, - remote_blockchain, - fetcher, + client, + subscriptions, + remote_blockchain, + fetcher, )); (State { backend, deny_unsafe }, ChildState { backend: child_backend }) } @@ -243,9 +262,9 @@ pub struct State { } impl StateApi for State - where - Block: BlockT + 'static, - Client: Send + Sync + 'static, +where + Block: BlockT + 'static, + Client: Send + Sync + 'static, { type Metadata = crate::Metadata; @@ -281,25 +300,35 @@ impl StateApi for State block: Option, ) -> FutureResult> { if count > STORAGE_KEYS_PAGED_MAX_COUNT { - return Box::new(result(Err( - Error::InvalidCount { - value: count, - max: STORAGE_KEYS_PAGED_MAX_COUNT, - } - ))); + return Box::new(result(Err(Error::InvalidCount { + value: count, + max: STORAGE_KEYS_PAGED_MAX_COUNT, + }))) } self.backend.storage_keys_paged(block, prefix, count, start_key) } - fn storage(&self, key: StorageKey, block: Option) -> FutureResult> { + fn storage( + &self, + key: StorageKey, + block: Option, + ) -> FutureResult> { self.backend.storage(block, key) } - fn storage_hash(&self, key: StorageKey, block: Option) -> FutureResult> { + fn storage_hash( + &self, + key: StorageKey, + block: Option, + ) -> FutureResult> { self.backend.storage_hash(block, key) } - fn storage_size(&self, key: StorageKey, block: Option) -> FutureResult> { + fn storage_size( + &self, + key: StorageKey, + block: Option, + ) -> FutureResult> { self.backend.storage_size(block, key) } @@ -311,7 +340,7 @@ impl StateApi for State &self, keys: Vec, from: Block::Hash, - to: Option + to: Option, ) -> FutureResult>> { if let Err(err) = self.deny_unsafe.check_if_safe() { return Box::new(result(Err(err.into()))) @@ -323,12 +352,16 @@ impl StateApi for State fn query_storage_at( &self, keys: Vec, - at: Option + at: Option, ) -> FutureResult>> { self.backend.query_storage_at(keys, at) } - fn read_proof(&self, keys: Vec, block: Option) -> FutureResult> { + fn read_proof( + &self, + keys: Vec, + block: Option, + ) -> FutureResult> { self.backend.read_proof(block, keys) } @@ -336,12 +369,16 @@ impl StateApi for State &self, meta: Self::Metadata, subscriber: Subscriber>, - keys: Option> + keys: Option>, ) { self.backend.subscribe_storage(meta, subscriber, keys); } - fn unsubscribe_storage(&self, meta: Option, id: SubscriptionId) -> RpcResult { + fn unsubscribe_storage( + &self, + meta: Option, + id: SubscriptionId, + ) -> RpcResult { self.backend.unsubscribe_storage(meta, id) } @@ -349,7 +386,11 @@ impl StateApi for State self.backend.runtime_version(at) } - fn subscribe_runtime_version(&self, meta: Self::Metadata, subscriber: Subscriber) { + fn subscribe_runtime_version( + &self, + meta: Self::Metadata, + subscriber: Subscriber, + ) { self.backend.subscribe_runtime_version(meta, subscriber); } @@ -367,9 +408,10 @@ impl StateApi for State /// Note: requires the node to run with `--rpc-methods=Unsafe`. /// Note: requires runtimes compiled with wasm tracing support, `--features with-tracing`. fn trace_block( - &self, block: Block::Hash, + &self, + block: Block::Hash, targets: Option, - storage_keys: Option + storage_keys: Option, ) -> FutureResult { if let Err(err) = self.deny_unsafe.check_if_safe() { return Box::new(result(Err(err.into()))) @@ -381,9 +423,9 @@ impl StateApi for State /// Child state backend API. pub trait ChildStateBackend: Send + Sync + 'static - where - Block: BlockT + 'static, - Client: Send + Sync + 'static, +where + Block: BlockT + 'static, + Client: Send + Sync + 'static, { /// Returns proof of storage for a child key entries at a specific block's state. fn read_child_proof( @@ -435,8 +477,7 @@ pub trait ChildStateBackend: Send + Sync + 'static storage_key: PrefixedStorageKey, key: StorageKey, ) -> FutureResult> { - Box::new(self.storage(block, storage_key, key) - .map(|x| x.map(|x| x.0.len() as u64))) + Box::new(self.storage(block, storage_key, key).map(|x| x.map(|x| x.0.len() as u64))) } } @@ -446,9 +487,9 @@ pub struct ChildState { } impl ChildStateApi for ChildState - where - Block: BlockT + 'static, - Client: Send + Sync + 'static, +where + Block: BlockT + 'static, + Client: Send + Sync + 'static, { type Metadata = crate::Metadata; @@ -465,7 +506,7 @@ impl ChildStateApi for ChildState &self, storage_key: PrefixedStorageKey, key: StorageKey, - block: Option + block: Option, ) -> FutureResult> { self.backend.storage(block, storage_key, key) } @@ -474,7 +515,7 @@ impl ChildStateApi for ChildState &self, storage_key: PrefixedStorageKey, key_prefix: StorageKey, - block: Option + block: Option, ) -> FutureResult> { self.backend.storage_keys(block, storage_key, key_prefix) } @@ -494,7 +535,7 @@ impl ChildStateApi for ChildState &self, storage_key: PrefixedStorageKey, key: StorageKey, - block: Option + block: Option, ) -> FutureResult> { self.backend.storage_hash(block, storage_key, key) } @@ -503,11 +544,10 @@ impl ChildStateApi for ChildState &self, storage_key: PrefixedStorageKey, key: StorageKey, - block: Option + block: Option, ) -> FutureResult> { self.backend.storage_size(block, storage_key, key) } - } fn client_err(err: sp_blockchain::Error) -> Error { diff --git a/client/rpc/src/state/state_full.rs b/client/rpc/src/state/state_full.rs index 58209e452e818..313e89bdf80b4 100644 --- a/client/rpc/src/state/state_full.rs +++ b/client/rpc/src/state/state_full.rs @@ -18,36 +18,49 @@ //! State API backend for full nodes. -use std::collections::{BTreeMap, HashMap}; -use std::sync::Arc; -use std::ops::Range; use futures::{future, StreamExt as _, TryStreamExt as _}; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; use log::warn; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; -use rpc::{Result as RpcResult, futures::{stream, Future, Sink, Stream, future::result}}; +use rpc::{ + futures::{future::result, stream, Future, Sink, Stream}, + Result as RpcResult, +}; +use std::{ + collections::{BTreeMap, HashMap}, + ops::Range, + sync::Arc, +}; use sc_rpc_api::state::ReadProof; use sp_blockchain::{ - Result as ClientResult, Error as ClientError, HeaderMetadata, CachedHeaderMetadata, - HeaderBackend + CachedHeaderMetadata, Error as ClientError, HeaderBackend, HeaderMetadata, + Result as ClientResult, }; use sp_core::{ - Bytes, storage::{well_known_keys, StorageKey, StorageData, StorageChangeSet, - ChildInfo, ChildType, PrefixedStorageKey}, + storage::{ + well_known_keys, ChildInfo, ChildType, PrefixedStorageKey, StorageChangeSet, StorageData, + StorageKey, + }, + Bytes, }; -use sp_version::RuntimeVersion; use sp_runtime::{ - generic::BlockId, traits::{Block as BlockT, NumberFor, SaturatedConversion, CheckedSub}, + generic::BlockId, + traits::{Block as BlockT, CheckedSub, NumberFor, SaturatedConversion}, }; +use sp_version::RuntimeVersion; -use sp_api::{Metadata, ProvideRuntimeApi, CallApiAt}; +use sp_api::{CallApiAt, Metadata, ProvideRuntimeApi}; -use super::{StateBackend, ChildStateBackend, error::{FutureResult, Error, Result}, client_err}; -use std::marker::PhantomData; +use super::{ + client_err, + error::{Error, FutureResult, Result}, + ChildStateBackend, StateBackend, +}; use sc_client_api::{ - Backend, BlockBackend, BlockchainEvents, CallExecutor, StorageProvider, ExecutorProvider, - ProofProvider + Backend, BlockBackend, BlockchainEvents, CallExecutor, ExecutorProvider, ProofProvider, + StorageProvider, }; +use std::marker::PhantomData; /// Ranges to query in state_queryStorage. struct QueryStorageRange { @@ -72,11 +85,13 @@ pub struct FullState { } impl FullState - where - BE: Backend, - Client: StorageProvider + HeaderBackend + BlockBackend - + HeaderMetadata, - Block: BlockT + 'static, +where + BE: Backend, + Client: StorageProvider + + HeaderBackend + + BlockBackend + + HeaderMetadata, + Block: BlockT + 'static, { /// Create new state API backend for full nodes. pub fn new( @@ -98,16 +113,23 @@ impl FullState fn split_query_storage_range( &self, from: Block::Hash, - to: Option + to: Option, ) -> Result> { - let to = self.block_or_best(to).map_err(|e| invalid_block::(from, to, e.to_string()))?; + let to = self + .block_or_best(to) + .map_err(|e| invalid_block::(from, to, e.to_string()))?; - let invalid_block_err = |e: ClientError| invalid_block::(from, Some(to), e.to_string()); + let invalid_block_err = + |e: ClientError| invalid_block::(from, Some(to), e.to_string()); let from_meta = self.client.header_metadata(from).map_err(invalid_block_err)?; let to_meta = self.client.header_metadata(to).map_err(invalid_block_err)?; if from_meta.number > to_meta.number { - return Err(invalid_block_range(&from_meta, &to_meta, "from number > to number".to_owned())) + return Err(invalid_block_range( + &from_meta, + &to_meta, + "from number > to number".to_owned(), + )) } // check if we can get from `to` to `from` by going through parent_hashes. @@ -116,28 +138,33 @@ impl FullState let mut hashes = vec![to_meta.hash]; let mut last = to_meta.clone(); while last.number > from_number { - let header_metadata = self.client + let header_metadata = self + .client .header_metadata(last.parent) .map_err(|e| invalid_block_range::(&last, &to_meta, e.to_string()))?; hashes.push(header_metadata.hash); last = header_metadata; } if last.hash != from_meta.hash { - return Err(invalid_block_range(&from_meta, &to_meta, "from and to are on different forks".to_owned())) + return Err(invalid_block_range( + &from_meta, + &to_meta, + "from and to are on different forks".to_owned(), + )) } hashes.reverse(); hashes }; // check if we can filter blocks-with-changes from some (sub)range using changes tries - let changes_trie_range = self.client + let changes_trie_range = self + .client .max_key_changes_range(from_number, BlockId::Hash(to_meta.hash)) .map_err(client_err)?; - let filtered_range_begin = changes_trie_range - .and_then(|(begin, _)| { - // avoids a corner case where begin < from_number (happens when querying genesis) - begin.checked_sub(&from_number).map(|x| x.saturated_into::()) - }); + let filtered_range_begin = changes_trie_range.and_then(|(begin, _)| { + // avoids a corner case where begin < from_number (happens when querying genesis) + begin.checked_sub(&from_number).map(|x| x.saturated_into::()) + }); let (unfiltered_range, filtered_range) = split_range(hashes.len(), filtered_range_begin); Ok(QueryStorageRange { @@ -158,7 +185,8 @@ impl FullState ) -> Result<()> { for block in range.unfiltered_range.start..range.unfiltered_range.end { let block_hash = range.hashes[block].clone(); - let mut block_changes = StorageChangeSet { block: block_hash.clone(), changes: Vec::new() }; + let mut block_changes = + StorageChangeSet { block: block_hash.clone(), changes: Vec::new() }; let id = BlockId::hash(block_hash); for key in keys { let (has_changed, data) = { @@ -191,30 +219,34 @@ impl FullState let (begin, end) = match range.filtered_range { Some(ref filtered_range) => ( range.first_number + filtered_range.start.saturated_into(), - BlockId::Hash(range.hashes[filtered_range.end - 1].clone()) + BlockId::Hash(range.hashes[filtered_range.end - 1].clone()), ), None => return Ok(()), }; - let mut changes_map: BTreeMap, StorageChangeSet> = BTreeMap::new(); + let mut changes_map: BTreeMap, StorageChangeSet> = + BTreeMap::new(); for key in keys { let mut last_block = None; let mut last_value = last_values.get(key).cloned().unwrap_or_default(); let key_changes = self.client.key_changes(begin, end, None, key).map_err(client_err)?; for (block, _) in key_changes.into_iter().rev() { if last_block == Some(block) { - continue; + continue } - let block_hash = range.hashes[(block - range.first_number).saturated_into::()].clone(); + let block_hash = + range.hashes[(block - range.first_number).saturated_into::()].clone(); let id = BlockId::Hash(block_hash); let value_at_block = self.client.storage(&id, key).map_err(client_err)?; if last_value == value_at_block { - continue; + continue } - changes_map.entry(block) + changes_map + .entry(block) .or_insert_with(|| StorageChangeSet { block: block_hash, changes: Vec::new() }) - .changes.push((key.clone(), value_at_block.clone())); + .changes + .push((key.clone(), value_at_block.clone())); last_block = Some(block); last_value = value_at_block; } @@ -227,15 +259,22 @@ impl FullState } } -impl StateBackend for FullState where +impl StateBackend for FullState +where Block: BlockT + 'static, BE: Backend + 'static, - Client: ExecutorProvider + StorageProvider - + ProofProvider + HeaderBackend - + HeaderMetadata + BlockchainEvents - + CallApiAt + ProvideRuntimeApi + Client: ExecutorProvider + + StorageProvider + + ProofProvider + + HeaderBackend + + HeaderMetadata + + BlockchainEvents + + CallApiAt + + ProvideRuntimeApi + BlockBackend - + Send + Sync + 'static, + + Send + + Sync + + 'static, Client::Api: Metadata, { fn call( @@ -244,19 +283,21 @@ impl StateBackend for FullState FutureResult { - let r = self.block_or_best(block) - .and_then(|block| self - .client - .executor() - .call( - &BlockId::Hash(block), - &method, - &*call_data, - self.client.execution_extensions().strategies().other, - None, - ) - .map(Into::into) - ).map_err(client_err); + let r = self + .block_or_best(block) + .and_then(|block| { + self.client + .executor() + .call( + &BlockId::Hash(block), + &method, + &*call_data, + self.client.execution_extensions().strategies().other, + None, + ) + .map(Into::into) + }) + .map_err(client_err); Box::new(result(r)) } @@ -268,7 +309,8 @@ impl StateBackend for FullState StateBackend for FullState StateBackend for FullState FutureResult> { Box::new(result( self.block_or_best(block) - .and_then(|block| + .and_then(|block| { self.client.storage_keys_iter( - &BlockId::Hash(block), prefix.as_ref(), start_key.as_ref() + &BlockId::Hash(block), + prefix.as_ref(), + start_key.as_ref(), ) - ) + }) .map(|iter| iter.take(count as usize).collect()) - .map_err(client_err))) + .map_err(client_err), + )) } fn storage( @@ -308,7 +354,8 @@ impl StateBackend for FullState StateBackend for FullState(); if item_sum > 0 { @@ -337,7 +385,7 @@ impl StateBackend for FullState StateBackend for FullState) -> FutureResult { - Box::new(result( - self.block_or_best(block) - .map_err(client_err) - .and_then(|block| - self.client.runtime_api().metadata(&BlockId::Hash(block)) - .map(Into::into) - .map_err(|e| Error::Client(Box::new(e)))) - )) + Box::new(result(self.block_or_best(block).map_err(client_err).and_then(|block| { + self.client + .runtime_api() + .metadata(&BlockId::Hash(block)) + .map(Into::into) + .map_err(|e| Error::Client(Box::new(e))) + }))) } fn runtime_version(&self, block: Option) -> FutureResult { - Box::new(result( - self.block_or_best(block) - .map_err(client_err) - .and_then(|block| - self.client.runtime_version_at(&BlockId::Hash(block)) - .map_err(|e| Error::Client(Box::new(e))) - ) - )) + Box::new(result(self.block_or_best(block).map_err(client_err).and_then(|block| { + self.client + .runtime_version_at(&BlockId::Hash(block)) + .map_err(|e| Error::Client(Box::new(e))) + }))) } fn query_storage( @@ -394,7 +439,7 @@ impl StateBackend for FullState, - at: Option + at: Option, ) -> FutureResult>> { let at = at.unwrap_or_else(|| self.client.info().best_hash); self.query_storage(at, Some(at), keys) @@ -432,14 +477,12 @@ impl StateBackend for FullState stream, Err(err) => { let _ = subscriber.reject(Error::from(client_err(err)).into()); - return; - } + return + }, }; self.subscriptions.add(subscriber, |sink| { - let version = self.runtime_version(None.into()) - .map_err(Into::into) - .wait(); + let version = self.runtime_version(None.into()).map_err(Into::into).wait(); let client = self.client.clone(); let mut previous_version = version.clone(); @@ -460,12 +503,8 @@ impl StateBackend for FullState StateBackend for FullState>, ) { let keys = Into::>>::into(keys); - let stream = match self.client.storage_changes_notification_stream( - keys.as_ref().map(|x| &**x), - None - ) { + let stream = match self + .client + .storage_changes_notification_stream(keys.as_ref().map(|x| &**x), None) + { Ok(stream) => stream, Err(err) => { let _ = subscriber.reject(client_err(err).into()); - return; + return }, }; // initial values - let initial = stream::iter_result(keys - .map(|keys| { + let initial = stream::iter_result( + keys.map(|keys| { let block = self.client.info().best_hash; let changes = keys .into_iter() - .map(|key| StateBackend::storage(self, Some(block.clone()).into(), key.clone()) - .map(|val| (key.clone(), val)) - .wait() - .unwrap_or_else(|_| (key, None)) - ) + .map(|key| { + StateBackend::storage(self, Some(block.clone()).into(), key.clone()) + .map(|val| (key.clone(), val)) + .wait() + .unwrap_or_else(|_| (key, None)) + }) .collect(); vec![Ok(Ok(StorageChangeSet { block, changes }))] - }).unwrap_or_default()); + }) + .unwrap_or_default(), + ); self.subscriptions.add(subscriber, |sink| { let stream = stream - .map(|(block, changes)| Ok::<_, ()>(Ok(StorageChangeSet { - block, - changes: changes.iter() - .filter_map(|(o_sk, k, v)| if o_sk.is_none() { - Some((k.clone(),v.cloned())) - } else { None }).collect(), - }))) + .map(|(block, changes)| { + Ok::<_, ()>(Ok(StorageChangeSet { + block, + changes: changes + .iter() + .filter_map(|(o_sk, k, v)| { + if o_sk.is_none() { + Some((k.clone(), v.cloned())) + } else { + None + } + }) + .collect(), + })) + }) .compat(); - sink - .sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) + sink.sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) .send_all(initial.chain(stream)) // we ignore the resulting Stream (if the first stream is over we are unsubscribed) .map(|_| ()) @@ -553,21 +602,29 @@ impl StateBackend for FullState(block, None, e.to_string())) + block_executor + .trace_block() + .map_err(|e| invalid_block::(block, None, e.to_string())), )) } } -impl ChildStateBackend for FullState where +impl ChildStateBackend for FullState +where Block: BlockT + 'static, BE: Backend + 'static, - Client: ExecutorProvider + StorageProvider + Client: ExecutorProvider + + StorageProvider + ProofProvider - + HeaderBackend + BlockBackend - + HeaderMetadata + BlockchainEvents - + CallApiAt + ProvideRuntimeApi - + Send + Sync + 'static, + + HeaderBackend + + BlockBackend + + HeaderMetadata + + BlockchainEvents + + CallApiAt + + ProvideRuntimeApi + + Send + + Sync + + 'static, Client::Api: Metadata, { fn read_child_proof( @@ -580,7 +637,8 @@ impl ChildStateBackend for FullState ChildInfo::new_default(storage_key), + Some((ChildType::ParentKeyId, storage_key)) => + ChildInfo::new_default(storage_key), None => return Err(sp_blockchain::Error::InvalidChildStorageKey), }; self.client @@ -606,16 +664,14 @@ impl ChildStateBackend for FullState ChildInfo::new_default(storage_key), + Some((ChildType::ParentKeyId, storage_key)) => + ChildInfo::new_default(storage_key), None => return Err(sp_blockchain::Error::InvalidChildStorageKey), }; - self.client.child_storage_keys( - &BlockId::Hash(block), - &child_info, - &prefix, - ) + self.client.child_storage_keys(&BlockId::Hash(block), &child_info, &prefix) }) - .map_err(client_err))) + .map_err(client_err), + )) } fn storage_keys_paged( @@ -630,15 +686,20 @@ impl ChildStateBackend for FullState ChildInfo::new_default(storage_key), + Some((ChildType::ParentKeyId, storage_key)) => + ChildInfo::new_default(storage_key), None => return Err(sp_blockchain::Error::InvalidChildStorageKey), }; self.client.child_storage_keys_iter( - &BlockId::Hash(block), child_info, prefix.as_ref(), start_key.as_ref(), + &BlockId::Hash(block), + child_info, + prefix.as_ref(), + start_key.as_ref(), ) }) .map(|iter| iter.take(count as usize).collect()) - .map_err(client_err))) + .map_err(client_err), + )) } fn storage( @@ -651,16 +712,14 @@ impl ChildStateBackend for FullState ChildInfo::new_default(storage_key), + Some((ChildType::ParentKeyId, storage_key)) => + ChildInfo::new_default(storage_key), None => return Err(sp_blockchain::Error::InvalidChildStorageKey), }; - self.client.child_storage( - &BlockId::Hash(block), - &child_info, - &key, - ) + self.client.child_storage(&BlockId::Hash(block), &child_info, &key) }) - .map_err(client_err))) + .map_err(client_err), + )) } fn storage_hash( @@ -673,23 +732,24 @@ impl ChildStateBackend for FullState ChildInfo::new_default(storage_key), + Some((ChildType::ParentKeyId, storage_key)) => + ChildInfo::new_default(storage_key), None => return Err(sp_blockchain::Error::InvalidChildStorageKey), }; - self.client.child_storage_hash( - &BlockId::Hash(block), - &child_info, - &key, - ) + self.client.child_storage_hash(&BlockId::Hash(block), &child_info, &key) }) - .map_err(client_err))) + .map_err(client_err), + )) } } /// Splits passed range into two subranges where: /// - first range has at least one element in it; /// - second range (optionally) starts at given `middle` element. -pub(crate) fn split_range(size: usize, middle: Option) -> (Range, Option>) { +pub(crate) fn split_range( + size: usize, + middle: Option, +) -> (Range, Option>) { // check if we can filter blocks-with-changes from some (sub)range using changes tries let range2_begin = match middle { // some of required changes tries are pruned => use available tries @@ -714,21 +774,9 @@ fn invalid_block_range( ) -> Error { let to_string = |h: &CachedHeaderMetadata| format!("{} ({:?})", h.number, h.hash); - Error::InvalidBlockRange { - from: to_string(from), - to: to_string(to), - details, - } + Error::InvalidBlockRange { from: to_string(from), to: to_string(to), details } } -fn invalid_block( - from: B::Hash, - to: Option, - details: String, -) -> Error { - Error::InvalidBlockRange { - from: format!("{:?}", from), - to: format!("{:?}", to), - details, - } +fn invalid_block(from: B::Hash, to: Option, details: String) -> Error { + Error::InvalidBlockRange { from: format!("{:?}", from), to: format!("{:?}", to), details } } diff --git a/client/rpc/src/state/state_light.rs b/client/rpc/src/state/state_light.rs index a2f69df9d0271..274eabe376d98 100644 --- a/client/rpc/src/state/state_light.rs +++ b/client/rpc/src/state/state_light.rs @@ -18,45 +18,53 @@ //! State API backend for light nodes. -use std::{ - sync::Arc, - collections::{HashSet, HashMap, hash_map::Entry}, -}; use codec::Decode; use futures::{ - future::{ready, Either}, channel::oneshot::{channel, Sender}, - FutureExt, TryFutureExt, - StreamExt as _, TryStreamExt as _, + future::{ready, Either}, + FutureExt, StreamExt as _, TryFutureExt, TryStreamExt as _, }; use hash_db::Hasher; -use jsonrpc_pubsub::{typed::Subscriber, SubscriptionId, manager::SubscriptionManager}; +use jsonrpc_pubsub::{manager::SubscriptionManager, typed::Subscriber, SubscriptionId}; use log::warn; use parking_lot::Mutex; use rpc::{ + futures::{ + future::{result, Future}, + stream::Stream, + Sink, + }, Result as RpcResult, - futures::Sink, - futures::future::{result, Future}, - futures::stream::Stream, +}; +use std::{ + collections::{hash_map::Entry, HashMap, HashSet}, + sync::Arc, }; -use sc_rpc_api::state::ReadProof; -use sp_blockchain::{Error as ClientError, HeaderBackend}; use sc_client_api::{ - BlockchainEvents, light::{ - RemoteCallRequest, RemoteReadRequest, RemoteReadChildRequest, - RemoteBlockchain, Fetcher, future_header, + future_header, Fetcher, RemoteBlockchain, RemoteCallRequest, RemoteReadChildRequest, + RemoteReadRequest, }, + BlockchainEvents, }; +use sc_rpc_api::state::ReadProof; +use sp_blockchain::{Error as ClientError, HeaderBackend}; use sp_core::{ + storage::{PrefixedStorageKey, StorageChangeSet, StorageData, StorageKey}, Bytes, OpaqueMetadata, - storage::{StorageKey, PrefixedStorageKey, StorageData, StorageChangeSet}, +}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, HashFor}, }; use sp_version::RuntimeVersion; -use sp_runtime::{generic::BlockId, traits::{Block as BlockT, HashFor}}; -use super::{StateBackend, ChildStateBackend, error::{FutureResult, Error}, client_err}; +use super::{ + client_err, + error::{Error, FutureResult}, + ChildStateBackend, StateBackend, +}; /// Storage data map of storage keys => (optional) storage value. type StorageMap = HashMap>; @@ -77,11 +85,7 @@ trait SharedRequests: Clone + Send + Sync { /// Tries to listen for already issued request, or issues request. /// /// Returns true if requests has been issued. - fn listen_request( - &self, - block: Hash, - sender: Sender>, - ) -> bool; + fn listen_request(&self, block: Hash, sender: Sender>) -> bool; /// Returns (and forgets) all listeners for given request. fn on_response_received(&self, block: Hash) -> Vec>>; @@ -97,12 +101,10 @@ struct StorageSubscriptions { subscriptions_by_key: HashMap>, } -impl SharedRequests for Arc>> { - fn listen_request( - &self, - block: Block::Hash, - sender: Sender>, - ) -> bool { +impl SharedRequests + for Arc>> +{ + fn listen_request(&self, block: Block::Hash, sender: Sender>) -> bool { let mut subscriptions = self.lock(); let active_requests_at = subscriptions.active_requests.entry(block).or_default(); active_requests_at.push(sender); @@ -117,15 +119,12 @@ impl SharedRequests for Arc = Arc>>>>>; -impl SharedRequests for SimpleSubscriptions where +impl SharedRequests for SimpleSubscriptions +where Hash: Send + Eq + std::hash::Hash, V: Send, { - fn listen_request( - &self, - block: Hash, - sender: Sender>, - ) -> bool { + fn listen_request(&self, block: Hash, sender: Sender>) -> bool { let mut subscriptions = self.lock(); let active_requests_at = subscriptions.entry(block).or_default(); active_requests_at.push(sender); @@ -138,9 +137,9 @@ impl SharedRequests for SimpleSubscriptions where } impl + 'static, Client> LightState - where - Block: BlockT, - Client: HeaderBackend + Send + Sync + 'static, +where + Block: BlockT, + Client: HeaderBackend + Send + Sync + 'static, { /// Create new state API backend for light nodes. pub fn new( @@ -170,10 +169,10 @@ impl + 'static, Client> LightState StateBackend for LightState - where - Block: BlockT, - Client: BlockchainEvents + HeaderBackend + Send + Sync + 'static, - F: Fetcher + 'static +where + Block: BlockT, + Client: BlockchainEvents + HeaderBackend + Send + Sync + 'static, + F: Fetcher + 'static, { fn call( &self, @@ -181,13 +180,17 @@ impl StateBackend for LightState FutureResult { - Box::new(call( - &*self.remote_blockchain, - self.fetcher.clone(), - self.block_or_best(block), - method, - call_data, - ).boxed().compat()) + Box::new( + call( + &*self.remote_blockchain, + self.fetcher.clone(), + self.block_or_best(block), + method, + call_data, + ) + .boxed() + .compat(), + ) } fn storage_keys( @@ -216,11 +219,7 @@ impl StateBackend for LightState, - _: StorageKey, - ) -> FutureResult> { + fn storage_size(&self, _: Option, _: StorageKey) -> FutureResult> { Box::new(result(Err(client_err(ClientError::NotAvailableOnLightClient)))) } @@ -229,15 +228,21 @@ impl StateBackend for LightState, key: StorageKey, ) -> FutureResult> { - Box::new(storage( - &*self.remote_blockchain, - self.fetcher.clone(), - self.block_or_best(block), - vec![key.0.clone()], - ).boxed().compat().map(move |mut values| values - .remove(&key) - .expect("successful request has entries for all requested keys; qed") - )) + Box::new( + storage( + &*self.remote_blockchain, + self.fetcher.clone(), + self.block_or_best(block), + vec![key.0.clone()], + ) + .boxed() + .compat() + .map(move |mut values| { + values + .remove(&key) + .expect("successful request has entries for all requested keys; qed") + }), + ) } fn storage_hash( @@ -245,31 +250,38 @@ impl StateBackend for LightState, key: StorageKey, ) -> FutureResult> { - Box::new(StateBackend::storage(self, block, key) - .and_then(|maybe_storage| - result(Ok(maybe_storage.map(|storage| HashFor::::hash(&storage.0)))) - ) - ) + Box::new(StateBackend::storage(self, block, key).and_then(|maybe_storage| { + result(Ok(maybe_storage.map(|storage| HashFor::::hash(&storage.0)))) + })) } fn metadata(&self, block: Option) -> FutureResult { - let metadata = self.call(block, "Metadata_metadata".into(), Bytes(Vec::new())) - .and_then(|metadata| OpaqueMetadata::decode(&mut &metadata.0[..]) - .map(Into::into) - .map_err(|decode_err| client_err(ClientError::CallResultDecode( - "Unable to decode metadata", - decode_err, - )))); + let metadata = + self.call(block, "Metadata_metadata".into(), Bytes(Vec::new())) + .and_then(|metadata| { + OpaqueMetadata::decode(&mut &metadata.0[..]).map(Into::into).map_err( + |decode_err| { + client_err(ClientError::CallResultDecode( + "Unable to decode metadata", + decode_err, + )) + }, + ) + }); Box::new(metadata) } fn runtime_version(&self, block: Option) -> FutureResult { - Box::new(runtime_version( - &*self.remote_blockchain, - self.fetcher.clone(), - self.block_or_best(block), - ).boxed().compat()) + Box::new( + runtime_version( + &*self.remote_blockchain, + self.fetcher.clone(), + self.block_or_best(block), + ) + .boxed() + .compat(), + ) } fn query_storage( @@ -284,7 +296,7 @@ impl StateBackend for LightState, - _at: Option + _at: Option, ) -> FutureResult>> { Box::new(result(Err(client_err(ClientError::NotAvailableOnLightClient)))) } @@ -301,14 +313,14 @@ impl StateBackend for LightState>, - keys: Option> + keys: Option>, ) { let keys = match keys { Some(keys) if !keys.is_empty() => keys, _ => { warn!("Cannot subscribe to all keys on light client. Subscription rejected."); - return; - } + return + }, }; let keys = keys.iter().cloned().collect::>(); @@ -326,12 +338,10 @@ impl StateBackend for LightState(notification.hash)) .compat(), - display_error(storage( - &*remote_blockchain, - fetcher.clone(), - initial_block, - initial_keys, - ).map(move |r| r.map(|r| (initial_block, r)))), + display_error( + storage(&*remote_blockchain, fetcher.clone(), initial_block, initial_keys) + .map(move |r| r.map(|r| (initial_block, r))), + ), move |block| { // there'll be single request per block for all active subscriptions // with all subscribed keys @@ -342,12 +352,7 @@ impl StateBackend for LightState StateBackend for LightState None, } - } + }, ); - sink - .sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) + sink.sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) .send_all(changes_stream.map(|changes| Ok(changes))) // we ignore the resulting Stream (if the first stream is over we are unsubscribed) .map(|_| ()) @@ -382,7 +386,9 @@ impl StateBackend for LightState StateBackend for LightState RpcResult { if !self.subscriptions.cancel(id.clone()) { - return Ok(false); + return Ok(false) } // forget subscription keys @@ -406,14 +412,16 @@ impl StateBackend for LightState unreachable!("every key from keys_by_subscription has\ - corresponding entry in subscriptions_by_key; qed"), + Entry::Vacant(_) => unreachable!( + "every key from keys_by_subscription has\ + corresponding entry in subscriptions_by_key; qed" + ), Entry::Occupied(mut entry) => { entry.get_mut().remove(&id); if entry.get().is_empty() { entry.remove(); } - } + }, } } @@ -437,16 +445,11 @@ impl StateBackend for LightState(notification.hash)) .compat(), - display_error(runtime_version( - &*remote_blockchain, - fetcher.clone(), - initial_block, - ).map(move |r| r.map(|r| (initial_block, r)))), - move |block| runtime_version( - &*remote_blockchain, - fetcher.clone(), - block, + display_error( + runtime_version(&*remote_blockchain, fetcher.clone(), initial_block) + .map(move |r| r.map(|r| (initial_block, r))), ), + move |block| runtime_version(&*remote_blockchain, fetcher.clone(), block), |_, old_version, new_version| { let version_differs = old_version .as_ref() @@ -456,11 +459,10 @@ impl StateBackend for LightState Some(new_version.clone()), false => None, } - } + }, ); - sink - .sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) + sink.sink_map_err(|e| warn!("Error sending notifications: {:?}", e)) .send_all(versions_stream.map(|version| Ok(version))) // we ignore the resulting Stream (if the first stream is over we are unsubscribed) .map(|_| ()) @@ -486,10 +488,10 @@ impl StateBackend for LightState ChildStateBackend for LightState - where - Block: BlockT, - Client: BlockchainEvents + HeaderBackend + Send + Sync + 'static, - F: Fetcher + 'static +where + Block: BlockT, + Client: BlockchainEvents + HeaderBackend + Send + Sync + 'static, + F: Fetcher + 'static, { fn read_child_proof( &self, @@ -528,23 +530,34 @@ impl ChildStateBackend for LightState FutureResult> { let block = self.block_or_best(block); let fetcher = self.fetcher.clone(); - let child_storage = resolve_header(&*self.remote_blockchain, &*self.fetcher, block) - .then(move |result| match result { - Ok(header) => Either::Left(fetcher.remote_read_child(RemoteReadChildRequest { - block, - header, - storage_key, - keys: vec![key.0.clone()], - retry_count: Default::default(), - }).then(move |result| ready(result - .map(|mut data| data - .remove(&key.0) - .expect("successful result has entry for all keys; qed") - .map(StorageData) - ) - .map_err(client_err) - ))), - Err(error) => Either::Right(ready(Err(error))), + let child_storage = + resolve_header(&*self.remote_blockchain, &*self.fetcher, block).then(move |result| { + match result { + Ok(header) => Either::Left( + fetcher + .remote_read_child(RemoteReadChildRequest { + block, + header, + storage_key, + keys: vec![key.0.clone()], + retry_count: Default::default(), + }) + .then(move |result| { + ready( + result + .map(|mut data| { + data.remove(&key.0) + .expect( + "successful result has entry for all keys; qed", + ) + .map(StorageData) + }) + .map_err(client_err), + ) + }), + ), + Err(error) => Either::Right(ready(Err(error))), + } }); Box::new(child_storage.boxed().compat()) @@ -556,11 +569,11 @@ impl ChildStateBackend for LightState FutureResult> { - Box::new(ChildStateBackend::storage(self, block, storage_key, key) - .and_then(|maybe_storage| + Box::new(ChildStateBackend::storage(self, block, storage_key, key).and_then( + |maybe_storage| { result(Ok(maybe_storage.map(|storage| HashFor::::hash(&storage.0)))) - ) - ) + }, + )) } } @@ -570,17 +583,17 @@ fn resolve_header>( fetcher: &F, block: Block::Hash, ) -> impl std::future::Future> { - let maybe_header = future_header( - remote_blockchain, - fetcher, - BlockId::Hash(block), - ); - - maybe_header.then(move |result| - ready(result.and_then(|maybe_header| - maybe_header.ok_or_else(|| ClientError::UnknownBlock(format!("{}", block))) - ).map_err(client_err)), - ) + let maybe_header = future_header(remote_blockchain, fetcher, BlockId::Hash(block)); + + maybe_header.then(move |result| { + ready( + result + .and_then(|maybe_header| { + maybe_header.ok_or_else(|| ClientError::UnknownBlock(format!("{}", block))) + }) + .map_err(client_err), + ) + }) } /// Call runtime method at given block @@ -591,17 +604,20 @@ fn call>( method: String, call_data: Bytes, ) -> impl std::future::Future> { - resolve_header(remote_blockchain, &*fetcher, block) - .then(move |result| match result { - Ok(header) => Either::Left(fetcher.remote_call(RemoteCallRequest { - block, - header, - method, - call_data: call_data.0, - retry_count: Default::default(), - }).then(|result| ready(result.map(Bytes).map_err(client_err)))), - Err(error) => Either::Right(ready(Err(error))), - }) + resolve_header(remote_blockchain, &*fetcher, block).then(move |result| match result { + Ok(header) => Either::Left( + fetcher + .remote_call(RemoteCallRequest { + block, + header, + method, + call_data: call_data.0, + retry_count: Default::default(), + }) + .then(|result| ready(result.map(Bytes).map_err(client_err))), + ), + Err(error) => Either::Right(ready(Err(error))), + }) } /// Get runtime version at given block. @@ -610,17 +626,14 @@ fn runtime_version>( fetcher: Arc, block: Block::Hash, ) -> impl std::future::Future> { - call( - remote_blockchain, - fetcher, - block, - "Core_version".into(), - Bytes(Vec::new()), + call(remote_blockchain, fetcher, block, "Core_version".into(), Bytes(Vec::new())).then( + |version| { + ready(version.and_then(|version| { + Decode::decode(&mut &version.0[..]) + .map_err(|e| client_err(ClientError::VersionInvalid(e.to_string()))) + })) + }, ) - .then(|version| ready(version.and_then(|version| - Decode::decode(&mut &version.0[..]) - .map_err(|e| client_err(ClientError::VersionInvalid(e.to_string()))) - ))) } /// Get storage value at given key at given block. @@ -630,22 +643,30 @@ fn storage>( block: Block::Hash, keys: Vec>, ) -> impl std::future::Future>, Error>> { - resolve_header(remote_blockchain, &*fetcher, block) - .then(move |result| match result { - Ok(header) => Either::Left(fetcher.remote_read(RemoteReadRequest { - block, - header, - keys, - retry_count: Default::default(), - }).then(|result| ready(result - .map(|result| result - .into_iter() - .map(|(key, value)| (StorageKey(key), value.map(StorageData))) - .collect() - ).map_err(client_err) - ))), - Err(error) => Either::Right(ready(Err(error))), - }) + resolve_header(remote_blockchain, &*fetcher, block).then(move |result| match result { + Ok(header) => Either::Left( + fetcher + .remote_read(RemoteReadRequest { + block, + header, + keys, + retry_count: Default::default(), + }) + .then(|result| { + ready( + result + .map(|result| { + result + .into_iter() + .map(|(key, value)| (StorageKey(key), value.map(StorageData))) + .collect() + }) + .map_err(client_err), + ) + }), + ), + Err(error) => Either::Right(ready(Err(error))), + }) } /// Returns subscription stream that issues request on every imported block and @@ -654,9 +675,11 @@ fn subscription_stream< Block, Requests, FutureBlocksStream, - V, N, + V, + N, InitialRequestFuture, - IssueRequest, IssueRequestFuture, + IssueRequest, + IssueRequestFuture, CompareValues, >( shared_requests: Requests, @@ -664,12 +687,14 @@ fn subscription_stream< initial_request: InitialRequestFuture, issue_request: IssueRequest, compare_values: CompareValues, -) -> impl Stream where +) -> impl Stream +where Block: BlockT, Requests: 'static + SharedRequests, - FutureBlocksStream: Stream, + FutureBlocksStream: Stream, V: Send + 'static + Clone, - InitialRequestFuture: std::future::Future> + Send + 'static, + InitialRequestFuture: + std::future::Future> + Send + 'static, IssueRequest: 'static + Fn(Block::Hash) -> IssueRequestFuture, IssueRequestFuture: std::future::Future> + Send + 'static, CompareValues: Fn(Block::Hash, Option<&V>, &V) -> Option, @@ -678,33 +703,39 @@ fn subscription_stream< let previous_value = Arc::new(Mutex::new(None)); // prepare 'stream' of initial values - let initial_value_stream = ignore_error(initial_request) - .boxed() - .compat() - .into_stream(); + let initial_value_stream = ignore_error(initial_request).boxed().compat().into_stream(); // prepare stream of future values // // we do not want to stop stream if single request fails // (the warning should have been already issued by the request issuer) - let future_values_stream = future_blocks_stream - .and_then(move |block| ignore_error(maybe_share_remote_request::( - shared_requests.clone(), - block, - &issue_request, - ).map(move |r| r.map(|v| (block, v)))).boxed().compat()); + let future_values_stream = future_blocks_stream.and_then(move |block| { + ignore_error( + maybe_share_remote_request::( + shared_requests.clone(), + block, + &issue_request, + ) + .map(move |r| r.map(|v| (block, v))), + ) + .boxed() + .compat() + }); // now let's return changed values for selected blocks initial_value_stream .chain(future_values_stream) - .filter_map(move |block_and_new_value| block_and_new_value.and_then(|(block, new_value)| { - let mut previous_value = previous_value.lock(); - compare_values(block, previous_value.as_ref(), &new_value) - .map(|notification_value| { - *previous_value = Some(new_value); - notification_value - }) - })) + .filter_map(move |block_and_new_value| { + block_and_new_value.and_then(|(block, new_value)| { + let mut previous_value = previous_value.lock(); + compare_values(block, previous_value.as_ref(), &new_value).map( + |notification_value| { + *previous_value = Some(new_value); + notification_value + }, + ) + }) + }) .map_err(|_| ()) } @@ -714,7 +745,8 @@ fn maybe_share_remote_request impl std::future::Future> where +) -> impl std::future::Future> +where V: Clone, Requests: SharedRequests, IssueRequest: Fn(Block::Hash) -> IssueRequestFuture, @@ -725,55 +757,58 @@ fn maybe_share_remote_request(future: F) -> impl std::future::Future> where - F: std::future::Future> +fn display_error(future: F) -> impl std::future::Future> +where + F: std::future::Future>, { - future.then(|result| ready(result.or_else(|err| { + future.then(|result| { + ready(result.or_else(|err| { warn!("Remote request for subscription data has failed with: {:?}", err); Err(()) - }))) + })) + }) } /// Convert successful future result into Ok(Some(result)) and error into Ok(None), /// displaying warning. -fn ignore_error(future: F) -> impl std::future::Future, ()>> where - F: std::future::Future> +fn ignore_error(future: F) -> impl std::future::Future, ()>> +where + F: std::future::Future>, { - future.then(|result| ready(match result { - Ok(result) => Ok(Some(result)), - Err(()) => Ok(None), - })) + future.then(|result| { + ready(match result { + Ok(result) => Ok(Some(result)), + Err(()) => Ok(None), + }) + }) } #[cfg(test)] mod tests { + use super::*; use rpc::futures::stream::futures_ordered; - use substrate_test_runtime_client::runtime::Block; use sp_core::H256; - use super::*; + use substrate_test_runtime_client::runtime::Block; #[test] fn subscription_stream_works() { @@ -789,13 +824,10 @@ mod tests { |_, old_value, new_value| match old_value == Some(new_value) { true => None, false => Some(new_value.clone()), - } + }, ); - assert_eq!( - stream.collect().wait(), - Ok(vec![100, 200]) - ); + assert_eq!(stream.collect().wait(), Ok(vec![100, 200])); } #[test] @@ -812,13 +844,10 @@ mod tests { |_, old_value, new_value| match old_value == Some(new_value) { true => None, false => Some(new_value.clone()), - } + }, ); - assert_eq!( - stream.collect().wait(), - Ok(vec![100, 200]) - ); + assert_eq!(stream.collect().wait(), Ok(vec![100, 200])); } #[test] @@ -828,10 +857,7 @@ mod tests { let shared_requests = SimpleSubscriptions::default(); // let's 'issue' requests for B1 - shared_requests.lock().insert( - H256::from([1; 32]), - vec![channel().0], - ); + shared_requests.lock().insert(H256::from([1; 32]), vec![channel().0]); // make sure that no additional requests are issued when we're asking for B1 let _ = maybe_share_remote_request::( diff --git a/client/rpc/src/state/tests.rs b/client/rpc/src/state/tests.rs index c9cb0bde89c1a..2a73ae31f357d 100644 --- a/client/rpc/src/state/tests.rs +++ b/client/rpc/src/state/tests.rs @@ -16,26 +16,20 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use super::*; -use super::state_full::split_range; use self::error::Error; +use super::{state_full::split_range, *}; -use std::sync::Arc; +use crate::testing::TaskExecutor; use assert_matches::assert_matches; +use futures::{compat::Future01CompatExt, executor}; use futures01::stream::Stream; -use sp_core::{storage::ChildInfo, ChangesTrieConfiguration}; -use sp_core::hash::H256; use sc_block_builder::BlockBuilderProvider; -use sp_io::hashing::blake2_256; -use substrate_test_runtime_client::{ - prelude::*, - sp_consensus::BlockOrigin, - runtime, -}; use sc_rpc_api::DenyUnsafe; +use sp_core::{hash::H256, storage::ChildInfo, ChangesTrieConfiguration}; +use sp_io::hashing::blake2_256; use sp_runtime::generic::BlockId; -use crate::testing::TaskExecutor; -use futures::{executor, compat::Future01CompatExt}; +use std::sync::Arc; +use substrate_test_runtime_client::{prelude::*, runtime, sp_consensus::BlockOrigin}; const STORAGE_KEY: &[u8] = b"child"; @@ -68,12 +62,18 @@ fn should_return_storage() { let key = StorageKey(KEY.to_vec()); assert_eq!( - client.storage(key.clone(), Some(genesis_hash).into()).wait() - .map(|x| x.map(|x| x.0.len())).unwrap().unwrap() as usize, + client + .storage(key.clone(), Some(genesis_hash).into()) + .wait() + .map(|x| x.map(|x| x.0.len())) + .unwrap() + .unwrap() as usize, VALUE.len(), ); assert_matches!( - client.storage_hash(key.clone(), Some(genesis_hash).into()).wait() + client + .storage_hash(key.clone(), Some(genesis_hash).into()) + .wait() .map(|x| x.is_some()), Ok(true) ); @@ -87,10 +87,13 @@ fn should_return_storage() { ); assert_eq!( executor::block_on( - child.storage(prefixed_storage_key(), key, Some(genesis_hash).into()) + child + .storage(prefixed_storage_key(), key, Some(genesis_hash).into()) .map(|x| x.map(|x| x.0.len())) .compat(), - ).unwrap().unwrap() as usize, + ) + .unwrap() + .unwrap() as usize, CHILD_VALUE.len(), ); } @@ -98,20 +101,17 @@ fn should_return_storage() { #[test] fn should_return_child_storage() { let child_info = ChildInfo::new_default(STORAGE_KEY); - let client = Arc::new(substrate_test_runtime_client::TestClientBuilder::new() - .add_child_storage(&child_info, "key", vec![42_u8]) - .build()); - let genesis_hash = client.genesis_hash(); - let (_client, child) = new_full( - client, - SubscriptionManager::new(Arc::new(TaskExecutor)), - DenyUnsafe::No, - None, + let client = Arc::new( + substrate_test_runtime_client::TestClientBuilder::new() + .add_child_storage(&child_info, "key", vec![42_u8]) + .build(), ); + let genesis_hash = client.genesis_hash(); + let (_client, child) = + new_full(client, SubscriptionManager::new(Arc::new(TaskExecutor)), DenyUnsafe::No, None); let child_key = prefixed_storage_key(); let key = StorageKey(b"key".to_vec()); - assert_matches!( child.storage( child_key.clone(), @@ -121,36 +121,26 @@ fn should_return_child_storage() { Ok(Some(StorageData(ref d))) if d[0] == 42 && d.len() == 1 ); assert_matches!( - child.storage_hash( - child_key.clone(), - key.clone(), - Some(genesis_hash).into(), - ).wait().map(|x| x.is_some()), + child + .storage_hash(child_key.clone(), key.clone(), Some(genesis_hash).into(),) + .wait() + .map(|x| x.is_some()), Ok(true) ); - assert_matches!( - child.storage_size( - child_key.clone(), - key.clone(), - None, - ).wait(), - Ok(Some(1)) - ); + assert_matches!(child.storage_size(child_key.clone(), key.clone(), None,).wait(), Ok(Some(1))); } #[test] fn should_call_contract() { let client = Arc::new(substrate_test_runtime_client::new()); let genesis_hash = client.genesis_hash(); - let (client, _child) = new_full( - client, - SubscriptionManager::new(Arc::new(TaskExecutor)), - DenyUnsafe::No, - None, - ); + let (client, _child) = + new_full(client, SubscriptionManager::new(Arc::new(TaskExecutor)), DenyUnsafe::No, None); assert_matches!( - client.call("balanceOf".into(), Bytes(vec![1,2,3]), Some(genesis_hash).into()).wait(), + client + .call("balanceOf".into(), Bytes(vec![1, 2, 3]), Some(genesis_hash).into()) + .wait(), Err(Error::Client(_)) ) } @@ -171,18 +161,17 @@ fn should_notify_about_storage_changes() { api.subscribe_storage(Default::default(), subscriber, None.into()); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); let mut builder = client.new_block(Default::default()).unwrap(); - builder.push_transfer(runtime::Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 42, - nonce: 0, - }).unwrap(); + builder + .push_transfer(runtime::Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 42, + nonce: 0, + }) + .unwrap(); let block = builder.build().unwrap().block; executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); } @@ -207,25 +196,27 @@ fn should_send_initial_storage_changes_and_notifications() { None, ); - let alice_balance_key = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Alice.into())); + let alice_balance_key = + blake2_256(&runtime::system::balance_of_key(AccountKeyring::Alice.into())); - api.subscribe_storage(Default::default(), subscriber, Some(vec![ - StorageKey(alice_balance_key.to_vec()), - ]).into()); + api.subscribe_storage( + Default::default(), + subscriber, + Some(vec![StorageKey(alice_balance_key.to_vec())]).into(), + ); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); let mut builder = client.new_block(Default::default()).unwrap(); - builder.push_transfer(runtime::Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 42, - nonce: 0, - }).unwrap(); + builder + .push_transfer(runtime::Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 42, + nonce: 0, + }) + .unwrap(); let block = builder.build().unwrap().block; executor::block_on(client.import(BlockOrigin::Own, block)).unwrap(); } @@ -257,9 +248,13 @@ fn should_query_storage() { // fake change: None -> Some(value) -> Some(value) builder.push_storage_change(vec![2], Some(vec![2])).unwrap(); // actual change: None -> Some(value) -> None - builder.push_storage_change(vec![3], if nonce == 0 { Some(vec![3]) } else { None }).unwrap(); + builder + .push_storage_change(vec![3], if nonce == 0 { Some(vec![3]) } else { None }) + .unwrap(); // actual change: None -> Some(value) - builder.push_storage_change(vec![4], if nonce == 0 { None } else { Some(vec![4]) }).unwrap(); + builder + .push_storage_change(vec![4], if nonce == 0 { None } else { Some(vec![4]) }) + .unwrap(); // actual change: Some(value1) -> Some(value2) builder.push_storage_change(vec![5], Some(vec![nonce as u8])).unwrap(); let block = builder.build().unwrap().block; @@ -301,20 +296,12 @@ fn should_query_storage() { // Query changes only up to block1 let keys = (1..6).map(|k| StorageKey(vec![k])).collect::>(); - let result = api.query_storage( - keys.clone(), - genesis_hash, - Some(block1_hash).into(), - ); + let result = api.query_storage(keys.clone(), genesis_hash, Some(block1_hash).into()); assert_eq!(result.wait().unwrap(), expected); // Query all changes - let result = api.query_storage( - keys.clone(), - genesis_hash, - None.into(), - ); + let result = api.query_storage(keys.clone(), genesis_hash, None.into()); expected.push(StorageChangeSet { block: block2_hash, @@ -327,20 +314,12 @@ fn should_query_storage() { assert_eq!(result.wait().unwrap(), expected); // Query changes up to block2. - let result = api.query_storage( - keys.clone(), - genesis_hash, - Some(block2_hash), - ); + let result = api.query_storage(keys.clone(), genesis_hash, Some(block2_hash)); assert_eq!(result.wait().unwrap(), expected); // Inverted range. - let result = api.query_storage( - keys.clone(), - block1_hash, - Some(genesis_hash), - ); + let result = api.query_storage(keys.clone(), block1_hash, Some(genesis_hash)); assert_eq!( result.wait().map_err(|e| e.to_string()), @@ -348,18 +327,15 @@ fn should_query_storage() { from: format!("1 ({:?})", block1_hash), to: format!("0 ({:?})", genesis_hash), details: "from number > to number".to_owned(), - }).map_err(|e| e.to_string()) + }) + .map_err(|e| e.to_string()) ); let random_hash1 = H256::random(); let random_hash2 = H256::random(); // Invalid second hash. - let result = api.query_storage( - keys.clone(), - genesis_hash, - Some(random_hash1), - ); + let result = api.query_storage(keys.clone(), genesis_hash, Some(random_hash1)); assert_eq!( result.wait().map_err(|e| e.to_string()), @@ -367,15 +343,12 @@ fn should_query_storage() { from: format!("{:?}", genesis_hash), to: format!("{:?}", Some(random_hash1)), details: format!("UnknownBlock: header not found in db: {}", random_hash1), - }).map_err(|e| e.to_string()) + }) + .map_err(|e| e.to_string()) ); // Invalid first hash with Some other hash. - let result = api.query_storage( - keys.clone(), - random_hash1, - Some(genesis_hash), - ); + let result = api.query_storage(keys.clone(), random_hash1, Some(genesis_hash)); assert_eq!( result.wait().map_err(|e| e.to_string()), @@ -383,15 +356,12 @@ fn should_query_storage() { from: format!("{:?}", random_hash1), to: format!("{:?}", Some(genesis_hash)), details: format!("UnknownBlock: header not found in db: {}", random_hash1), - }).map_err(|e| e.to_string()), + }) + .map_err(|e| e.to_string()), ); // Invalid first hash with None. - let result = api.query_storage( - keys.clone(), - random_hash1, - None, - ); + let result = api.query_storage(keys.clone(), random_hash1, None); assert_eq!( result.wait().map_err(|e| e.to_string()), @@ -399,15 +369,12 @@ fn should_query_storage() { from: format!("{:?}", random_hash1), to: format!("{:?}", Some(block2_hash)), // Best block hash. details: format!("UnknownBlock: header not found in db: {}", random_hash1), - }).map_err(|e| e.to_string()), + }) + .map_err(|e| e.to_string()), ); // Both hashes invalid. - let result = api.query_storage( - keys.clone(), - random_hash1, - Some(random_hash2), - ); + let result = api.query_storage(keys.clone(), random_hash1, Some(random_hash2)); assert_eq!( result.wait().map_err(|e| e.to_string()), @@ -415,29 +382,25 @@ fn should_query_storage() { from: format!("{:?}", random_hash1), // First hash not found. to: format!("{:?}", Some(random_hash2)), details: format!("UnknownBlock: header not found in db: {}", random_hash1), - }).map_err(|e| e.to_string()), + }) + .map_err(|e| e.to_string()), ); // single block range - let result = api.query_storage_at( - keys.clone(), - Some(block1_hash), - ); + let result = api.query_storage_at(keys.clone(), Some(block1_hash)); assert_eq!( result.wait().unwrap(), - vec![ - StorageChangeSet { - block: block1_hash, - changes: vec![ - (StorageKey(vec![1_u8]), None), - (StorageKey(vec![2_u8]), Some(StorageData(vec![2_u8]))), - (StorageKey(vec![3_u8]), Some(StorageData(vec![3_u8]))), - (StorageKey(vec![4_u8]), None), - (StorageKey(vec![5_u8]), Some(StorageData(vec![0_u8]))), - ] - } - ] + vec![StorageChangeSet { + block: block1_hash, + changes: vec![ + (StorageKey(vec![1_u8]), None), + (StorageKey(vec![2_u8]), Some(StorageData(vec![2_u8]))), + (StorageKey(vec![3_u8]), Some(StorageData(vec![3_u8]))), + (StorageKey(vec![4_u8]), None), + (StorageKey(vec![5_u8]), Some(StorageData(vec![0_u8]))), + ] + }] ); } @@ -461,7 +424,6 @@ fn should_split_ranges() { assert_eq!(split_range(100, Some(99)), (0..99, Some(99..100))); } - #[test] fn should_return_runtime_version() { let client = Arc::new(substrate_test_runtime_client::new()); @@ -503,17 +465,13 @@ fn should_notify_on_runtime_version_initially() { api.subscribe_runtime_version(Default::default(), subscriber); // assert id assigned - assert!(matches!( - executor::block_on(id.compat()), - Ok(Ok(SubscriptionId::String(_))) - )); - + assert!(matches!(executor::block_on(id.compat()), Ok(Ok(SubscriptionId::String(_))))); } // assert initial version sent. let (notification, next) = executor::block_on(transport.into_future().compat()).unwrap(); assert!(notification.is_some()); - // no more notifications on this channel + // no more notifications on this channel assert_eq!(executor::block_on(next.into_future().compat()).unwrap().0, None); } diff --git a/client/rpc/src/system/mod.rs b/client/rpc/src/system/mod.rs index d405755731ccb..08258640ad7a0 100644 --- a/client/rpc/src/system/mod.rs +++ b/client/rpc/src/system/mod.rs @@ -21,24 +21,25 @@ #[cfg(test)] mod tests; -use futures::{future::BoxFuture, FutureExt, TryFutureExt}; -use futures::{channel::oneshot, compat::Compat}; +use futures::{channel::oneshot, compat::Compat, future::BoxFuture, FutureExt, TryFutureExt}; use sc_rpc_api::{DenyUnsafe, Receiver}; use sc_tracing::logging; -use sp_utils::mpsc::TracingUnboundedSender; use sp_runtime::traits::{self, Header as HeaderT}; +use sp_utils::mpsc::TracingUnboundedSender; use self::error::Result; +pub use self::{ + gen_client::Client as SystemClient, + helpers::{Health, NodeRole, PeerInfo, SyncState, SystemInfo}, +}; pub use sc_rpc_api::system::*; -pub use self::helpers::{SystemInfo, Health, PeerInfo, NodeRole, SyncState}; -pub use self::gen_client::Client as SystemClient; /// Early exit for RPCs that require `--rpc-methods=Unsafe` to be enabled macro_rules! bail_if_unsafe { ($value: expr) => { if let Err(err) = $value.check_if_safe() { - return async move { Err(err.into()) }.boxed().compat(); + return async move { Err(err.into()) }.boxed().compat() } }; } @@ -85,11 +86,7 @@ impl System { send_back: TracingUnboundedSender>, deny_unsafe: DenyUnsafe, ) -> Self { - System { - info, - send_back, - deny_unsafe, - } + System { info, send_back, deny_unsafe } } } @@ -132,35 +129,36 @@ impl SystemApi::Number> for Sy Receiver(Compat::new(rx)) } - fn system_peers(&self) - -> Compat::Number>>>>> - { + fn system_peers( + &self, + ) -> Compat< + BoxFuture<'static, rpc::Result::Number>>>>, + > { bail_if_unsafe!(self.deny_unsafe); let (tx, rx) = oneshot::channel(); let _ = self.send_back.unbounded_send(Request::Peers(tx)); - async move { - rx.await.map_err(|_| rpc::Error::internal_error()) - }.boxed().compat() + async move { rx.await.map_err(|_| rpc::Error::internal_error()) } + .boxed() + .compat() } - fn system_network_state(&self) - -> Compat>> - { + fn system_network_state(&self) -> Compat>> { bail_if_unsafe!(self.deny_unsafe); let (tx, rx) = oneshot::channel(); let _ = self.send_back.unbounded_send(Request::NetworkState(tx)); - async move { - rx.await.map_err(|_| rpc::Error::internal_error()) - }.boxed().compat() + async move { rx.await.map_err(|_| rpc::Error::internal_error()) } + .boxed() + .compat() } - fn system_add_reserved_peer(&self, peer: String) - -> Compat>> - { + fn system_add_reserved_peer( + &self, + peer: String, + ) -> Compat>> { bail_if_unsafe!(self.deny_unsafe); let (tx, rx) = oneshot::channel(); @@ -171,12 +169,15 @@ impl SystemApi::Number> for Sy Ok(Err(e)) => Err(rpc::Error::from(e)), Err(_) => Err(rpc::Error::internal_error()), } - }.boxed().compat() + } + .boxed() + .compat() } - fn system_remove_reserved_peer(&self, peer: String) - -> Compat>> - { + fn system_remove_reserved_peer( + &self, + peer: String, + ) -> Compat>> { bail_if_unsafe!(self.deny_unsafe); let (tx, rx) = oneshot::channel(); @@ -187,7 +188,9 @@ impl SystemApi::Number> for Sy Ok(Err(e)) => Err(rpc::Error::from(e)), Err(_) => Err(rpc::Error::internal_error()), } - }.boxed().compat() + } + .boxed() + .compat() } fn system_reserved_peers(&self) -> Receiver> { @@ -214,7 +217,7 @@ impl SystemApi::Number> for Sy logging::reload_filter().map_err(|_e| rpc::Error::internal_error()) } - fn system_reset_log_filter(&self)-> std::result::Result<(), rpc::Error> { + fn system_reset_log_filter(&self) -> std::result::Result<(), rpc::Error> { self.deny_unsafe.check_if_safe()?; logging::reset_log_filter().map_err(|_e| rpc::Error::internal_error()) } diff --git a/client/rpc/src/system/tests.rs b/client/rpc/src/system/tests.rs index 6e22004cd65f7..906bd60229d1f 100644 --- a/client/rpc/src/system/tests.rs +++ b/client/rpc/src/system/tests.rs @@ -18,13 +18,17 @@ use super::*; -use sc_network::{self, PeerId}; -use sc_network::config::Role; -use substrate_test_runtime_client::runtime::Block; use assert_matches::assert_matches; use futures::prelude::*; +use sc_network::{self, config::Role, PeerId}; use sp_utils::mpsc::tracing_unbounded; -use std::{process::{Stdio, Command}, env, io::{BufReader, BufRead, Write}, thread}; +use std::{ + env, + io::{BufRead, BufReader, Write}, + process::{Command, Stdio}, + thread, +}; +use substrate_test_runtime_client::runtime::Block; struct Status { pub peers: usize, @@ -35,12 +39,7 @@ struct Status { impl Default for Status { fn default() -> Status { - Status { - peer_id: PeerId::random(), - peers: 0, - is_syncing: false, - is_dev: false, - } + Status { peer_id: PeerId::random(), peers: 0, is_syncing: false, is_dev: false } } } @@ -59,7 +58,8 @@ fn api>>(sync: T) -> System { }); }, Request::LocalPeerId(sender) => { - let _ = sender.send("QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string()); + let _ = + sender.send("QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string()); }, Request::LocalListenAddresses(sender) => { let _ = sender.send(vec![ @@ -78,42 +78,48 @@ fn api>>(sync: T) -> System { }); } let _ = sender.send(peers); - } + }, Request::NetworkState(sender) => { - let _ = sender.send(serde_json::to_value(&sc_network::network_state::NetworkState { - peer_id: String::new(), - listened_addresses: Default::default(), - external_addresses: Default::default(), - connected_peers: Default::default(), - not_connected_peers: Default::default(), - peerset: serde_json::Value::Null, - }).unwrap()); + let _ = sender.send( + serde_json::to_value(&sc_network::network_state::NetworkState { + peer_id: String::new(), + listened_addresses: Default::default(), + external_addresses: Default::default(), + connected_peers: Default::default(), + not_connected_peers: Default::default(), + peerset: serde_json::Value::Null, + }) + .unwrap(), + ); }, Request::NetworkAddReservedPeer(peer, sender) => { let _ = match sc_network::config::parse_str_addr(&peer) { Ok(_) => sender.send(Ok(())), - Err(s) => sender.send(Err(error::Error::MalformattedPeerArg(s.to_string()))), + Err(s) => + sender.send(Err(error::Error::MalformattedPeerArg(s.to_string()))), }; }, Request::NetworkRemoveReservedPeer(peer, sender) => { let _ = match peer.parse::() { Ok(_) => sender.send(Ok(())), - Err(s) => sender.send(Err(error::Error::MalformattedPeerArg(s.to_string()))), + Err(s) => + sender.send(Err(error::Error::MalformattedPeerArg(s.to_string()))), }; - } + }, Request::NetworkReservedPeers(sender) => { - let _ = sender.send(vec!["QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string()]); - } + let _ = sender + .send(vec!["QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string()]); + }, Request::NodeRoles(sender) => { let _ = sender.send(vec![NodeRole::Authority]); - } + }, Request::SyncState(sender) => { let _ = sender.send(SyncState { starting_block: 1, current_block: 2, highest_block: Some(3), }); - } + }, }; future::ready(()) @@ -128,7 +134,7 @@ fn api>>(sync: T) -> System { chain_type: Default::default(), }, tx, - sc_rpc_api::DenyUnsafe::No + sc_rpc_api::DenyUnsafe::No, ) } @@ -139,95 +145,58 @@ fn wait_receiver(rx: Receiver) -> T { #[test] fn system_name_works() { - assert_eq!( - api(None).system_name().unwrap(), - "testclient".to_owned(), - ); + assert_eq!(api(None).system_name().unwrap(), "testclient".to_owned(),); } #[test] fn system_version_works() { - assert_eq!( - api(None).system_version().unwrap(), - "0.2.0".to_owned(), - ); + assert_eq!(api(None).system_version().unwrap(), "0.2.0".to_owned(),); } #[test] fn system_chain_works() { - assert_eq!( - api(None).system_chain().unwrap(), - "testchain".to_owned(), - ); + assert_eq!(api(None).system_chain().unwrap(), "testchain".to_owned(),); } #[test] fn system_properties_works() { - assert_eq!( - api(None).system_properties().unwrap(), - serde_json::map::Map::new(), - ); + assert_eq!(api(None).system_properties().unwrap(), serde_json::map::Map::new(),); } #[test] fn system_type_works() { - assert_eq!( - api(None).system_type().unwrap(), - Default::default(), - ); + assert_eq!(api(None).system_type().unwrap(), Default::default(),); } #[test] fn system_health() { assert_matches!( wait_receiver(api(None).system_health()), - Health { - peers: 0, - is_syncing: false, - should_have_peers: true, - } + Health { peers: 0, is_syncing: false, should_have_peers: true } ); assert_matches!( - wait_receiver(api(Status { - peer_id: PeerId::random(), - peers: 5, - is_syncing: true, - is_dev: true, - }).system_health()), - Health { - peers: 5, - is_syncing: true, - should_have_peers: false, - } + wait_receiver( + api(Status { peer_id: PeerId::random(), peers: 5, is_syncing: true, is_dev: true }) + .system_health() + ), + Health { peers: 5, is_syncing: true, should_have_peers: false } ); assert_eq!( - wait_receiver(api(Status { - peer_id: PeerId::random(), - peers: 5, - is_syncing: false, - is_dev: false, - }).system_health()), - Health { - peers: 5, - is_syncing: false, - should_have_peers: true, - } + wait_receiver( + api(Status { peer_id: PeerId::random(), peers: 5, is_syncing: false, is_dev: false }) + .system_health() + ), + Health { peers: 5, is_syncing: false, should_have_peers: true } ); assert_eq!( - wait_receiver(api(Status { - peer_id: PeerId::random(), - peers: 0, - is_syncing: false, - is_dev: true, - }).system_health()), - Health { - peers: 0, - is_syncing: false, - should_have_peers: false, - } + wait_receiver( + api(Status { peer_id: PeerId::random(), peers: 0, is_syncing: false, is_dev: true }) + .system_health() + ), + Health { peers: 0, is_syncing: false, should_have_peers: false } ); } @@ -244,8 +213,10 @@ fn system_local_listen_addresses_works() { assert_eq!( wait_receiver(api(None).system_local_listen_addresses()), vec![ - "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string(), - "/ip4/127.0.0.1/tcp/30334/ws/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV".to_string(), + "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV" + .to_string(), + "/ip4/127.0.0.1/tcp/30334/ws/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV" + .to_string(), ] ); } @@ -255,12 +226,8 @@ fn system_peers() { let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); let peer_id = PeerId::random(); - let req = api(Status { - peer_id: peer_id.clone(), - peers: 1, - is_syncing: false, - is_dev: true, - }).system_peers(); + let req = api(Status { peer_id: peer_id.clone(), peers: 1, is_syncing: false, is_dev: true }) + .system_peers(); let res = runtime.block_on(req).unwrap(); assert_eq!( @@ -295,27 +262,21 @@ fn system_network_state() { #[test] fn system_node_roles() { - assert_eq!( - wait_receiver(api(None).system_node_roles()), - vec![NodeRole::Authority] - ); + assert_eq!(wait_receiver(api(None).system_node_roles()), vec![NodeRole::Authority]); } #[test] fn system_sync_state() { assert_eq!( wait_receiver(api(None).system_sync_state()), - SyncState { - starting_block: 1, - current_block: 2, - highest_block: Some(3), - } + SyncState { starting_block: 1, current_block: 2, highest_block: Some(3) } ); } #[test] fn system_network_add_reserved() { - let good_peer_id = "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV"; + let good_peer_id = + "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV"; let bad_peer_id = "/ip4/198.51.100.19/tcp/30333"; let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); @@ -328,7 +289,8 @@ fn system_network_add_reserved() { #[test] fn system_network_remove_reserved() { let good_peer_id = "QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV"; - let bad_peer_id = "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV"; + let bad_peer_id = + "/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV"; let mut runtime = tokio::runtime::current_thread::Runtime::new().unwrap(); let good_fut = api(None).system_remove_reserved_peer(good_peer_id.into()); @@ -357,15 +319,17 @@ fn test_add_reset_log_filter() { for line in std::io::stdin().lock().lines() { let line = line.expect("Failed to read bytes"); if line.contains("add_reload") { - api(None).system_add_log_filter("test_after_add".into()) + api(None) + .system_add_log_filter("test_after_add".into()) .expect("`system_add_log_filter` failed"); } else if line.contains("add_trace") { - api(None).system_add_log_filter("test_before_add=trace".into()) + api(None) + .system_add_log_filter("test_before_add=trace".into()) .expect("`system_add_log_filter` failed"); } else if line.contains("reset") { api(None).system_reset_log_filter().expect("`system_reset_log_filter` failed"); } else if line.contains("exit") { - return; + return } log::trace!(target: "test_before_add", "{}", EXPECTED_WITH_TRACE); log::debug!(target: "test_before_add", "{}", EXPECTED_BEFORE_ADD); diff --git a/client/rpc/src/testing.rs b/client/rpc/src/testing.rs index b69cc7d4b1940..e6b30ecdb42b4 100644 --- a/client/rpc/src/testing.rs +++ b/client/rpc/src/testing.rs @@ -18,8 +18,8 @@ //! Testing utils used by the RPC tests. +use futures::{compat::Future01CompatExt, executor, FutureExt}; use rpc::futures::future as future01; -use futures::{executor, compat::Future01CompatExt, FutureExt}; // Executor shared by all tests. // @@ -38,7 +38,7 @@ impl future01::Executor for TaskExecutor { fn execute( &self, future: Boxed01Future01, - ) -> std::result::Result<(), future01::ExecuteError>{ + ) -> std::result::Result<(), future01::ExecuteError> { EXECUTOR.spawn_ok(future.compat().map(drop)); Ok(()) } diff --git a/client/service/src/builder.rs b/client/service/src/builder.rs index 46590ce8e8c6c..2885fb6deb54c 100644 --- a/client/service/src/builder.rs +++ b/client/service/src/builder.rs @@ -17,65 +17,52 @@ // along with this program. If not, see . use crate::{ - error::Error, MallocSizeOfWasm, RpcHandlers, - start_rpc_servers, build_network_future, TransactionPoolAdapter, TaskManager, SpawnTaskHandle, - metrics::MetricsService, + build_network_future, client::{light, Client, ClientConfig}, config::{Configuration, KeystoreConfig, PrometheusConfig, TransactionStorageMode}, + error::Error, + metrics::MetricsService, + start_rpc_servers, MallocSizeOfWasm, RpcHandlers, SpawnTaskHandle, TaskManager, + TransactionPoolAdapter, }; -use sc_client_api::{ - light::RemoteBlockchain, ForkBlocks, BadBlocks, UsageProvider, ExecutorProvider, -}; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender}; -use sc_chain_spec::get_extension; -use sp_consensus::{ - block_validation::{BlockAnnounceValidator, DefaultBlockAnnounceValidator, Chain}, - import_queue::ImportQueue, -}; +use futures::{channel::oneshot, future::ready, FutureExt, StreamExt}; use jsonrpc_pubsub::manager::SubscriptionManager; -use futures::{ - FutureExt, StreamExt, - future::ready, - channel::oneshot, -}; -use sc_keystore::LocalKeystore; use log::info; -use sc_network::config::{Role, OnDemand, SyncMode}; -use sc_network::NetworkService; -use sc_network::block_request_handler::{self, BlockRequestHandler}; -use sc_network::state_request_handler::{self, StateRequestHandler}; -use sc_network::light_client_requests::{self, handler::LightClientRequestHandler}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{ - Block as BlockT, HashFor, Zero, BlockIdTo, +use prometheus_endpoint::Registry; +use sc_chain_spec::get_extension; +use sc_client_api::{ + execution_extensions::ExecutionExtensions, light::RemoteBlockchain, + proof_provider::ProofProvider, BadBlocks, BlockBackend, BlockchainEvents, ExecutorProvider, + ForkBlocks, StorageProvider, UsageProvider, }; -use sp_api::{ProvideRuntimeApi, CallApiAt}; -use sc_executor::{NativeExecutor, NativeExecutionDispatch, RuntimeInfo}; -use std::{sync::Arc, str::FromStr}; -use wasm_timer::SystemTime; -use sc_telemetry::{ - telemetry, - ConnectionMessage, - Telemetry, - TelemetryHandle, - SUBSTRATE_INFO, +use sc_client_db::{Backend, DatabaseSettings}; +use sc_executor::{NativeExecutionDispatch, NativeExecutor, RuntimeInfo}; +use sc_keystore::LocalKeystore; +use sc_network::{ + block_request_handler::{self, BlockRequestHandler}, + config::{OnDemand, Role, SyncMode}, + light_client_requests::{self, handler::LightClientRequestHandler}, + state_request_handler::{self, StateRequestHandler}, + NetworkService, }; +use sc_telemetry::{telemetry, ConnectionMessage, Telemetry, TelemetryHandle, SUBSTRATE_INFO}; use sc_transaction_pool_api::MaintainedTransactionPool; -use prometheus_endpoint::Registry; -use sc_client_db::{Backend, DatabaseSettings}; -use sp_core::traits::{ - CodeExecutor, - SpawnNamed, +use sp_api::{CallApiAt, ProvideRuntimeApi}; +use sp_blockchain::{HeaderBackend, HeaderMetadata}; +use sp_consensus::{ + block_validation::{BlockAnnounceValidator, Chain, DefaultBlockAnnounceValidator}, + import_queue::ImportQueue, }; +use sp_core::traits::{CodeExecutor, SpawnNamed}; use sp_keystore::{CryptoStore, SyncCryptoStore, SyncCryptoStorePtr}; -use sp_runtime::BuildStorage; -use sc_client_api::{ - BlockBackend, BlockchainEvents, - StorageProvider, - proof_provider::ProofProvider, - execution_extensions::ExecutionExtensions +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, BlockIdTo, HashFor, Zero}, + BuildStorage, }; -use sp_blockchain::{HeaderMetadata, HeaderBackend}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender}; +use std::{str::FromStr, sync::Arc}; +use wasm_timer::SystemTime; /// A utility trait for building an RPC extension given a `DenyUnsafe` instance. /// This is useful since at service definition time we don't know whether the @@ -95,7 +82,8 @@ pub trait RpcExtensionBuilder { ) -> Self::Output; } -impl RpcExtensionBuilder for F where +impl RpcExtensionBuilder for F +where F: Fn(sc_rpc::DenyUnsafe, sc_rpc::SubscriptionTaskExecutor) -> R, R: sc_rpc::RpcExtension, { @@ -115,7 +103,8 @@ impl RpcExtensionBuilder for F where /// `DenyUnsafe` instance and return a static `RpcExtension` instance. pub struct NoopRpcExtensionBuilder(pub R); -impl RpcExtensionBuilder for NoopRpcExtensionBuilder where +impl RpcExtensionBuilder for NoopRpcExtensionBuilder +where R: Clone + sc_rpc::RpcExtension, { type Output = R; @@ -129,7 +118,8 @@ impl RpcExtensionBuilder for NoopRpcExtensionBuilder where } } -impl From for NoopRpcExtensionBuilder where +impl From for NoopRpcExtensionBuilder +where R: sc_rpc::RpcExtension, { fn from(e: R) -> NoopRpcExtensionBuilder { @@ -137,58 +127,37 @@ impl From for NoopRpcExtensionBuilder where } } - /// Full client type. -pub type TFullClient = Client< - TFullBackend, - TFullCallExecutor, - TBl, - TRtApi, ->; +pub type TFullClient = + Client, TFullCallExecutor, TBl, TRtApi>; /// Full client backend type. pub type TFullBackend = sc_client_db::Backend; /// Full client call executor type. -pub type TFullCallExecutor = crate::client::LocalCallExecutor< - TBl, - sc_client_db::Backend, - NativeExecutor, ->; +pub type TFullCallExecutor = + crate::client::LocalCallExecutor, NativeExecutor>; /// Light client type. -pub type TLightClient = TLightClientWithBackend< - TBl, TRtApi, TExecDisp, TLightBackend ->; +pub type TLightClient = + TLightClientWithBackend>; /// Light client backend type. -pub type TLightBackend = sc_light::Backend< - sc_client_db::light::LightStorage, - HashFor, ->; +pub type TLightBackend = + sc_light::Backend, HashFor>; /// Light call executor type. pub type TLightCallExecutor = sc_light::GenesisCallExecutor< - sc_light::Backend< - sc_client_db::light::LightStorage, - HashFor - >, + sc_light::Backend, HashFor>, crate::client::LocalCallExecutor< TBl, - sc_light::Backend< - sc_client_db::light::LightStorage, - HashFor - >, - NativeExecutor + sc_light::Backend, HashFor>, + NativeExecutor, >, >; -type TFullParts = ( - TFullClient, - Arc>, - KeystoreContainer, - TaskManager, -); +type TFullParts = + (TFullClient, Arc>, KeystoreContainer, TaskManager); type TLightParts = ( Arc>, @@ -199,10 +168,8 @@ type TLightParts = ( ); /// Light client backend type with a specific hash type. -pub type TLightBackendWithHash = sc_light::Backend< - sc_client_db::light::LightStorage, - THash, ->; +pub type TLightBackendWithHash = + sc_light::Backend, THash>; /// Light client type with a specific backend. pub type TLightClientWithBackend = Client< @@ -220,7 +187,10 @@ trait AsCryptoStoreRef { fn sync_keystore_ref(&self) -> Arc; } -impl AsCryptoStoreRef for Arc where T: CryptoStore + SyncCryptoStore + 'static { +impl AsCryptoStoreRef for Arc +where + T: CryptoStore + SyncCryptoStore + 'static, +{ fn keystore_ref(&self) -> Arc { self.clone() } @@ -239,14 +209,12 @@ impl KeystoreContainer { /// Construct KeystoreContainer pub fn new(config: &KeystoreConfig) -> Result { let keystore = Arc::new(match config { - KeystoreConfig::Path { path, password } => LocalKeystore::open( - path.clone(), - password.clone(), - )?, + KeystoreConfig::Path { path, password } => + LocalKeystore::open(path.clone(), password.clone())?, KeystoreConfig::InMemory => LocalKeystore::in_memory(), }); - Ok(Self{remote: Default::default(), local: keystore}) + Ok(Self { remote: Default::default(), local: keystore }) } /// Set the remote keystore. @@ -255,7 +223,8 @@ impl KeystoreContainer { /// does not reset any references previously handed out - they will /// stick around. pub fn set_remote_keystore(&mut self, remote: Arc) - where T: CryptoStore + SyncCryptoStore + 'static + where + T: CryptoStore + SyncCryptoStore + 'static, { self.remote = Some(Box::new(remote)) } @@ -295,7 +264,8 @@ impl KeystoreContainer { pub fn new_full_client( config: &Configuration, telemetry: Option, -) -> Result, Error> where +) -> Result, Error> +where TBl: BlockT, TExecDisp: NativeExecutionDispatch + 'static, TBl::Hash: FromStr, @@ -307,7 +277,8 @@ pub fn new_full_client( pub fn new_full_parts( config: &Configuration, telemetry: Option, -) -> Result, Error> where +) -> Result, Error> +where TBl: BlockT, TExecDisp: NativeExecutionDispatch + 'static, TBl::Hash: FromStr, @@ -337,15 +308,13 @@ pub fn new_full_parts( let (client, backend) = { let db_config = sc_client_db::DatabaseSettings { state_cache_size: config.state_cache_size, - state_cache_child_ratio: - config.state_cache_child_ratio.map(|v| (v, 100)), + state_cache_child_ratio: config.state_cache_child_ratio.map(|v| (v, 100)), state_pruning: config.state_pruning.clone(), source: config.database.clone(), keep_blocks: config.keep_blocks.clone(), transaction_storage: config.transaction_storage.clone(), }; - let backend = new_db_backend(db_config)?; let extensions = sc_client_api::execution_extensions::ExecutionExtensions::new( @@ -354,15 +323,20 @@ pub fn new_full_parts( sc_offchain::OffchainDb::factory_from_backend(&*backend), ); - let wasm_runtime_substitutes = config.chain_spec.code_substitutes().into_iter().map(|(h, c)| { - let hash = TBl::Hash::from_str(&h) - .map_err(|_| - Error::Application(Box::from( - format!("Failed to parse `{}` as block hash for code substitutes.", h) - )) - )?; - Ok((hash, c)) - }).collect::, Error>>()?; + let wasm_runtime_substitutes = config + .chain_spec + .code_substitutes() + .into_iter() + .map(|(h, c)| { + let hash = TBl::Hash::from_str(&h).map_err(|_| { + Error::Application(Box::from(format!( + "Failed to parse `{}` as block hash for code substitutes.", + h + ))) + })?; + Ok((hash, c)) + }) + .collect::, Error>>()?; let client = new_client( backend.clone(), @@ -375,10 +349,13 @@ pub fn new_full_parts( config.prometheus_config.as_ref().map(|config| config.registry.clone()), telemetry, ClientConfig { - offchain_worker_enabled : config.offchain_worker.enabled, + offchain_worker_enabled: config.offchain_worker.enabled, offchain_indexing_api: config.offchain_worker.indexing_enabled, wasm_runtime_overrides: config.wasm_runtime_overrides.clone(), - no_genesis: matches!(config.network.sync_mode, sc_network::config::SyncMode::Fast {..}), + no_genesis: matches!( + config.network.sync_mode, + sc_network::config::SyncMode::Fast { .. } + ), wasm_runtime_substitutes, }, )?; @@ -386,19 +363,15 @@ pub fn new_full_parts( (client, backend) }; - Ok(( - client, - backend, - keystore_container, - task_manager, - )) + Ok((client, backend, keystore_container, task_manager)) } /// Create the initial parts of a light node. pub fn new_light_parts( config: &Configuration, telemetry: Option, -) -> Result, Error> where +) -> Result, Error> +where TBl: BlockT, TExecDisp: NativeExecutionDispatch + 'static, { @@ -417,8 +390,7 @@ pub fn new_light_parts( let db_storage = { let db_settings = sc_client_db::DatabaseSettings { state_cache_size: config.state_cache_size, - state_cache_child_ratio: - config.state_cache_child_ratio.map(|v| (v, 100)), + state_cache_child_ratio: config.state_cache_child_ratio.map(|v| (v, 100)), state_pruning: config.state_pruning.clone(), source: config.database.clone(), keep_blocks: config.keep_blocks.clone(), @@ -427,13 +399,11 @@ pub fn new_light_parts( sc_client_db::light::LightStorage::new(db_settings)? }; let light_blockchain = sc_light::new_light_blockchain(db_storage); - let fetch_checker = Arc::new( - sc_light::new_fetch_checker::<_, TBl, _>( - light_blockchain.clone(), - executor.clone(), - Box::new(task_manager.spawn_handle()), - ), - ); + let fetch_checker = Arc::new(sc_light::new_fetch_checker::<_, TBl, _>( + light_blockchain.clone(), + executor.clone(), + Box::new(task_manager.spawn_handle()), + )); let on_demand = Arc::new(sc_network::config::OnDemand::new(fetch_checker)); let backend = sc_light::new_light_backend(light_blockchain); let client = Arc::new(light::new_light( @@ -451,7 +421,8 @@ pub fn new_light_parts( /// Create an instance of default DB-backend backend. pub fn new_db_backend( settings: DatabaseSettings, -) -> Result>, sp_blockchain::Error> where +) -> Result>, sp_blockchain::Error> +where Block: BlockT, { const CANONICALIZATION_DELAY: u64 = 4096; @@ -480,11 +451,16 @@ pub fn new_client( >, sp_blockchain::Error, > - where - Block: BlockT, - E: CodeExecutor + RuntimeInfo, +where + Block: BlockT, + E: CodeExecutor + RuntimeInfo, { - let executor = crate::client::LocalCallExecutor::new(backend.clone(), executor, spawn_handle, config.clone())?; + let executor = crate::client::LocalCallExecutor::new( + backend.clone(), + executor, + spawn_handle, + config.clone(), + )?; Ok(crate::client::Client::new( backend, executor, @@ -534,10 +510,10 @@ pub fn build_offchain_workers( client: Arc, network: Arc::Hash>>, ) -> Option>> - where - TBl: BlockT, - TCl: Send + Sync + ProvideRuntimeApi + BlockchainEvents + 'static, - >::Api: sc_offchain::OffchainWorkerApi, +where + TBl: BlockT, + TCl: Send + Sync + ProvideRuntimeApi + BlockchainEvents + 'static, + >::Api: sc_offchain::OffchainWorkerApi, { let offchain_workers = Some(Arc::new(sc_offchain::OffchainWorkers::new(client.clone()))); @@ -551,7 +527,7 @@ pub fn build_offchain_workers( offchain, Clone::clone(&spawn_handle), network.clone(), - ) + ), ); } @@ -562,22 +538,32 @@ pub fn build_offchain_workers( pub fn spawn_tasks( params: SpawnTasksParams, ) -> Result - where - TCl: ProvideRuntimeApi + HeaderMetadata + Chain + - BlockBackend + BlockIdTo + ProofProvider + - HeaderBackend + BlockchainEvents + ExecutorProvider + UsageProvider + - StorageProvider + CallApiAt + Send + 'static, - >::Api: - sp_api::Metadata + - sc_offchain::OffchainWorkerApi + - sp_transaction_pool::runtime_api::TaggedTransactionQueue + - sp_session::SessionKeys + - sp_api::ApiExt, - TBl: BlockT, - TBackend: 'static + sc_client_api::backend::Backend + Send, - TExPool: MaintainedTransactionPool::Hash> + - MallocSizeOfWasm + 'static, - TRpc: sc_rpc::RpcExtension +where + TCl: ProvideRuntimeApi + + HeaderMetadata + + Chain + + BlockBackend + + BlockIdTo + + ProofProvider + + HeaderBackend + + BlockchainEvents + + ExecutorProvider + + UsageProvider + + StorageProvider + + CallApiAt + + Send + + 'static, + >::Api: sp_api::Metadata + + sc_offchain::OffchainWorkerApi + + sp_transaction_pool::runtime_api::TaggedTransactionQueue + + sp_session::SessionKeys + + sp_api::ApiExt, + TBl: BlockT, + TBackend: 'static + sc_client_api::backend::Backend + Send, + TExPool: MaintainedTransactionPool::Hash> + + MallocSizeOfWasm + + 'static, + TRpc: sc_rpc::RpcExtension, { let SpawnTasksParams { mut config, @@ -600,17 +586,11 @@ pub fn spawn_tasks( client.clone(), &BlockId::Hash(chain_info.best_hash), config.dev_key_seed.clone().map(|s| vec![s]).unwrap_or_default(), - ).map_err(|e| Error::Application(Box::new(e)))?; + ) + .map_err(|e| Error::Application(Box::new(e)))?; let telemetry = telemetry - .map(|telemetry| { - init_telemetry( - &mut config, - network.clone(), - client.clone(), - telemetry, - ) - }) + .map(|telemetry| init_telemetry(&mut config, network.clone(), client.clone(), telemetry)) .transpose()?; info!("📦 Highest known block at #{}", chain_info.best_number); @@ -625,63 +605,69 @@ pub fn spawn_tasks( spawn_handle.spawn( "on-transaction-imported", - transaction_notifications( - transaction_pool.clone(), - network.clone(), - telemetry.clone(), - ), + transaction_notifications(transaction_pool.clone(), network.clone(), telemetry.clone()), ); // Prometheus metrics. - let metrics_service = if let Some(PrometheusConfig { port, registry }) = - config.prometheus_config.clone() - { - // Set static metrics. - let metrics = MetricsService::with_prometheus(telemetry.clone(), ®istry, &config)?; - spawn_handle.spawn( - "prometheus-endpoint", - prometheus_endpoint::init_prometheus(port, registry).map(drop) - ); + let metrics_service = + if let Some(PrometheusConfig { port, registry }) = config.prometheus_config.clone() { + // Set static metrics. + let metrics = MetricsService::with_prometheus(telemetry.clone(), ®istry, &config)?; + spawn_handle.spawn( + "prometheus-endpoint", + prometheus_endpoint::init_prometheus(port, registry).map(drop), + ); - metrics - } else { - MetricsService::new(telemetry.clone()) - }; + metrics + } else { + MetricsService::new(telemetry.clone()) + }; // Periodically updated metrics and telemetry updates. - spawn_handle.spawn("telemetry-periodic-send", - metrics_service.run( - client.clone(), - transaction_pool.clone(), - network.clone(), - ) + spawn_handle.spawn( + "telemetry-periodic-send", + metrics_service.run(client.clone(), transaction_pool.clone(), network.clone()), ); // RPC - let gen_handler = | - deny_unsafe: sc_rpc::DenyUnsafe, - rpc_middleware: sc_rpc_server::RpcMiddleware - | gen_handler( - deny_unsafe, rpc_middleware, &config, task_manager.spawn_handle(), - client.clone(), transaction_pool.clone(), keystore.clone(), - on_demand.clone(), remote_blockchain.clone(), &*rpc_extensions_builder, - backend.offchain_storage(), system_rpc_tx.clone() - ); + let gen_handler = |deny_unsafe: sc_rpc::DenyUnsafe, + rpc_middleware: sc_rpc_server::RpcMiddleware| { + gen_handler( + deny_unsafe, + rpc_middleware, + &config, + task_manager.spawn_handle(), + client.clone(), + transaction_pool.clone(), + keystore.clone(), + on_demand.clone(), + remote_blockchain.clone(), + &*rpc_extensions_builder, + backend.offchain_storage(), + system_rpc_tx.clone(), + ) + }; let rpc_metrics = sc_rpc_server::RpcMetrics::new(config.prometheus_registry())?; let rpc = start_rpc_servers(&config, gen_handler, rpc_metrics.clone())?; // This is used internally, so don't restrict access to unsafe RPC - let rpc_handlers = RpcHandlers(Arc::new(gen_handler( - sc_rpc::DenyUnsafe::No, - sc_rpc_server::RpcMiddleware::new(rpc_metrics, "inbrowser") - ).into())); + let rpc_handlers = RpcHandlers(Arc::new( + gen_handler( + sc_rpc::DenyUnsafe::No, + sc_rpc_server::RpcMiddleware::new(rpc_metrics, "inbrowser"), + ) + .into(), + )); // Spawn informant task - spawn_handle.spawn("informant", sc_informant::build( - client.clone(), - network.clone(), - transaction_pool.clone(), - config.informant_output_format, - )); + spawn_handle.spawn( + "informant", + sc_informant::build( + client.clone(), + network.clone(), + transaction_pool.clone(), + config.informant_output_format, + ), + ); task_manager.keep_alive((config.base_path, rpc, rpc_handlers.clone())); @@ -692,10 +678,9 @@ async fn transaction_notifications( transaction_pool: Arc, network: Arc::Hash>>, telemetry: Option, -) - where - TBl: BlockT, - TExPool: MaintainedTransactionPool::Hash>, +) where + TBl: BlockT, + TExPool: MaintainedTransactionPool::Hash>, { // transaction notifications transaction_pool @@ -730,9 +715,11 @@ fn init_telemetry>( chain: config.chain_spec.name().to_owned(), genesis_hash: format!("{:?}", genesis_hash), authority: config.role.is_authority(), - startup_time: SystemTime::UNIX_EPOCH.elapsed() + startup_time: SystemTime::UNIX_EPOCH + .elapsed() .map(|dur| dur.as_millis()) - .unwrap_or(0).to_string(), + .unwrap_or(0) + .to_string(), network_id: network.local_peer_id().to_base58(), }; @@ -753,22 +740,28 @@ fn gen_handler( remote_blockchain: Option>>, rpc_extensions_builder: &(dyn RpcExtensionBuilder + Send), offchain_storage: Option<>::OffchainStorage>, - system_rpc_tx: TracingUnboundedSender> + system_rpc_tx: TracingUnboundedSender>, ) -> sc_rpc_server::RpcHandler - where - TBl: BlockT, - TCl: ProvideRuntimeApi + BlockchainEvents + HeaderBackend + - HeaderMetadata + ExecutorProvider + - CallApiAt + ProofProvider + - StorageProvider + BlockBackend + Send + Sync + 'static, - TExPool: MaintainedTransactionPool::Hash> + 'static, - TBackend: sc_client_api::backend::Backend + 'static, - TRpc: sc_rpc::RpcExtension, - >::Api: - sp_session::SessionKeys + - sp_api::Metadata, +where + TBl: BlockT, + TCl: ProvideRuntimeApi + + BlockchainEvents + + HeaderBackend + + HeaderMetadata + + ExecutorProvider + + CallApiAt + + ProofProvider + + StorageProvider + + BlockBackend + + Send + + Sync + + 'static, + TExPool: MaintainedTransactionPool::Hash> + 'static, + TBackend: sc_client_api::backend::Backend + 'static, + TRpc: sc_rpc::RpcExtension, + >::Api: sp_session::SessionKeys + sp_api::Metadata, { - use sc_rpc::{chain, state, author, system, offchain}; + use sc_rpc::{author, chain, offchain, state, system}; let system_info = sc_rpc::system::SystemInfo { chain_name: config.chain_spec.name().into(), @@ -781,43 +774,37 @@ fn gen_handler( let task_executor = sc_rpc::SubscriptionTaskExecutor::new(spawn_handle); let subscriptions = SubscriptionManager::new(Arc::new(task_executor.clone())); - let (chain, state, child_state) = if let (Some(remote_blockchain), Some(on_demand)) = - (remote_blockchain, on_demand) { - // Light clients - let chain = sc_rpc::chain::new_light( - client.clone(), - subscriptions.clone(), - remote_blockchain.clone(), - on_demand.clone(), - ); - let (state, child_state) = sc_rpc::state::new_light( - client.clone(), - subscriptions.clone(), - remote_blockchain.clone(), - on_demand, - deny_unsafe, - ); - (chain, state, child_state) - - } else { - // Full nodes - let chain = sc_rpc::chain::new_full(client.clone(), subscriptions.clone()); - let (state, child_state) = sc_rpc::state::new_full( - client.clone(), - subscriptions.clone(), - deny_unsafe, - config.rpc_max_payload, - ); - (chain, state, child_state) - }; + let (chain, state, child_state) = + if let (Some(remote_blockchain), Some(on_demand)) = (remote_blockchain, on_demand) { + // Light clients + let chain = sc_rpc::chain::new_light( + client.clone(), + subscriptions.clone(), + remote_blockchain.clone(), + on_demand.clone(), + ); + let (state, child_state) = sc_rpc::state::new_light( + client.clone(), + subscriptions.clone(), + remote_blockchain.clone(), + on_demand, + deny_unsafe, + ); + (chain, state, child_state) + } else { + // Full nodes + let chain = sc_rpc::chain::new_full(client.clone(), subscriptions.clone()); + let (state, child_state) = sc_rpc::state::new_full( + client.clone(), + subscriptions.clone(), + deny_unsafe, + config.rpc_max_payload, + ); + (chain, state, child_state) + }; - let author = sc_rpc::author::Author::new( - client, - transaction_pool, - subscriptions, - keystore, - deny_unsafe, - ); + let author = + sc_rpc::author::Author::new(client, transaction_pool, subscriptions, keystore, deny_unsafe); let system = system::System::new(system_info, system_rpc_tx, deny_unsafe); let maybe_offchain_rpc = offchain_storage.map(|storage| { @@ -835,7 +822,7 @@ fn gen_handler( system::SystemApi::to_delegate(system), rpc_extensions_builder.build(deny_unsafe, task_executor), ), - rpc_middleware + rpc_middleware, ) } @@ -854,32 +841,42 @@ pub struct BuildNetworkParams<'a, TBl: BlockT, TExPool, TImpQu, TCl> { /// An optional, shared data fetcher for light clients. pub on_demand: Option>>, /// A block announce validator builder. - pub block_announce_validator_builder: Option) -> Box + Send> + Send - >>, + pub block_announce_validator_builder: + Option) -> Box + Send> + Send>>, } /// Build the network service, the network status sinks and an RPC sender. pub fn build_network( - params: BuildNetworkParams + params: BuildNetworkParams, ) -> Result< ( Arc::Hash>>, TracingUnboundedSender>, NetworkStarter, ), - Error + Error, > - where - TBl: BlockT, - TCl: ProvideRuntimeApi + HeaderMetadata + Chain + - BlockBackend + BlockIdTo + ProofProvider + - HeaderBackend + BlockchainEvents + 'static, - TExPool: MaintainedTransactionPool::Hash> + 'static, - TImpQu: ImportQueue + 'static, +where + TBl: BlockT, + TCl: ProvideRuntimeApi + + HeaderMetadata + + Chain + + BlockBackend + + BlockIdTo + + ProofProvider + + HeaderBackend + + BlockchainEvents + + 'static, + TExPool: MaintainedTransactionPool::Hash> + 'static, + TImpQu: ImportQueue + 'static, { let BuildNetworkParams { - config, client, transaction_pool, spawn_handle, import_queue, on_demand, + config, + client, + transaction_pool, + spawn_handle, + import_queue, + on_demand, block_announce_validator_builder, } = params; @@ -906,8 +903,8 @@ pub fn build_network( let (handler, protocol_config) = BlockRequestHandler::new( &protocol_id, client.clone(), - config.network.default_peers_set.in_peers as usize - + config.network.default_peers_set.out_peers as usize, + config.network.default_peers_set.in_peers as usize + + config.network.default_peers_set.out_peers as usize, ); spawn_handle.spawn("block_request_handler", handler.run()); protocol_config @@ -923,8 +920,8 @@ pub fn build_network( let (handler, protocol_config) = StateRequestHandler::new( &protocol_id, client.clone(), - config.network.default_peers_set.in_peers as usize - + config.network.default_peers_set.out_peers as usize, + config.network.default_peers_set.in_peers as usize + + config.network.default_peers_set.out_peers as usize, ); spawn_handle.spawn("state_request_handler", handler.run()); protocol_config @@ -937,10 +934,8 @@ pub fn build_network( light_client_requests::generate_protocol_config(&protocol_id) } else { // Allow both outgoing and incoming requests. - let (handler, protocol_config) = LightClientRequestHandler::new( - &protocol_id, - client.clone(), - ); + let (handler, protocol_config) = + LightClientRequestHandler::new(&protocol_id, client.clone()); spawn_handle.spawn("light_client_request_handler", handler.run()); protocol_config } @@ -962,7 +957,7 @@ pub fn build_network( }, network_config: config.network.clone(), chain: client.clone(), - on_demand: on_demand, + on_demand, transaction_pool: transaction_pool_adapter as _, import_queue: Box::new(import_queue), protocol_id, @@ -976,10 +971,8 @@ pub fn build_network( // Storage chains don't keep full block history and can't be synced in full mode. // Force fast sync when storage chain mode is enabled. if matches!(config.transaction_storage, TransactionStorageMode::StorageChain) { - network_params.network_config.sync_mode = SyncMode::Fast { - storage_chain_mode: true, - skip_proofs: false, - }; + network_params.network_config.sync_mode = + SyncMode::Fast { storage_chain_mode: true, skip_proofs: false }; } let has_bootnodes = !network_params.network_config.boot_nodes.is_empty(); @@ -1028,7 +1021,7 @@ pub fn build_network( ); // This `return` might seem unnecessary, but we don't want to make it look like // everything is working as normal even though the user is clearly misusing the API. - return; + return } future.await diff --git a/client/service/src/chain_ops/check_block.rs b/client/service/src/chain_ops/check_block.rs index 94f6d25c9eb8f..ab924a3f7d9dd 100644 --- a/client/service/src/chain_ops/check_block.rs +++ b/client/service/src/chain_ops/check_block.rs @@ -17,22 +17,20 @@ // along with this program. If not, see . use crate::error::Error; -use futures::{future, prelude::*}; -use sp_runtime::traits::Block as BlockT; -use sp_runtime::generic::BlockId; use codec::Encode; -use sp_consensus::import_queue::ImportQueue; +use futures::{future, prelude::*}; use sc_client_api::{BlockBackend, UsageProvider}; +use sp_consensus::import_queue::ImportQueue; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; -use std::pin::Pin; -use std::sync::Arc; use crate::chain_ops::import_blocks; +use std::{pin::Pin, sync::Arc}; /// Re-validate known block. pub fn check_block( client: Arc, import_queue: IQ, - block_id: BlockId + block_id: BlockId, ) -> Pin> + Send>> where C: BlockBackend + UsageProvider + Send + Sync + 'static, @@ -46,7 +44,7 @@ where block.encode_to(&mut buf); let reader = std::io::Cursor::new(buf); import_blocks(client, import_queue, reader, true, true) - } + }, Ok(None) => Box::pin(future::err("Unknown block".into())), Err(e) => Box::pin(future::err(format!("Error reading block: {:?}", e).into())), } diff --git a/client/service/src/chain_ops/export_blocks.rs b/client/service/src/chain_ops/export_blocks.rs index 1d9325d1d7452..8887180103182 100644 --- a/client/service/src/chain_ops/export_blocks.rs +++ b/client/service/src/chain_ops/export_blocks.rs @@ -17,18 +17,16 @@ // along with this program. If not, see . use crate::error::Error; -use log::info; +use codec::Encode; use futures::{future, prelude::*}; -use sp_runtime::traits::{ - Block as BlockT, NumberFor, One, Zero, SaturatedConversion +use log::info; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, NumberFor, One, SaturatedConversion, Zero}, }; -use sp_runtime::generic::BlockId; -use codec::Encode; -use std::{io::Write, pin::Pin}; use sc_client_api::{BlockBackend, UsageProvider}; -use std::sync::Arc; -use std::task::Poll; +use std::{io::Write, pin::Pin, sync::Arc, task::Poll}; /// Performs the blocks export. pub fn export_blocks( @@ -36,7 +34,7 @@ pub fn export_blocks( mut output: impl Write + 'static, from: NumberFor, to: Option>, - binary: bool + binary: bool, ) -> Pin>>> where C: BlockBackend + UsageProvider + 'static, @@ -63,7 +61,7 @@ where let client = &client; if last < block { - return Poll::Ready(Err("Invalid block range specified".into())); + return Poll::Ready(Err("Invalid block range specified".into())) } if !wrote_header { @@ -78,14 +76,13 @@ where } match client.block(&BlockId::number(block))? { - Some(block) => { + Some(block) => if binary { output.write_all(&block.encode())?; } else { serde_json::to_writer(&mut output, &block) .map_err(|e| format!("Error writing JSON: {}", e))?; - } - }, + }, // Reached end of the chain. None => return Poll::Ready(Ok(())), } @@ -93,7 +90,7 @@ where info!("#{}", block); } if block == last { - return Poll::Ready(Ok(())); + return Poll::Ready(Ok(())) } block += One::one(); diff --git a/client/service/src/chain_ops/export_raw_state.rs b/client/service/src/chain_ops/export_raw_state.rs index 71822cf6275f8..975149c61cfab 100644 --- a/client/service/src/chain_ops/export_raw_state.rs +++ b/client/service/src/chain_ops/export_raw_state.rs @@ -17,10 +17,9 @@ // along with this program. If not, see . use crate::error::Error; -use sp_runtime::traits::Block as BlockT; -use sp_runtime::generic::BlockId; -use sp_core::storage::{StorageKey, well_known_keys, ChildInfo, Storage, StorageChild, StorageMap}; use sc_client_api::{StorageProvider, UsageProvider}; +use sp_core::storage::{well_known_keys, ChildInfo, Storage, StorageChild, StorageKey, StorageMap}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; use std::{collections::HashMap, sync::Arc}; @@ -35,9 +34,7 @@ where B: BlockT, BA: sc_client_api::backend::Backend, { - let block = block.unwrap_or_else( - || BlockId::Hash(client.usage_info().chain.best_hash) - ); + let block = block.unwrap_or_else(|| BlockId::Hash(client.usage_info().chain.best_hash)); let empty_key = StorageKey(Vec::new()); let mut top_storage = client.storage_pairs(&block, &empty_key)?; @@ -47,12 +44,12 @@ where // pairs. while let Some(pos) = top_storage .iter() - .position(|(k, _)| k.0.starts_with(well_known_keys::DEFAULT_CHILD_STORAGE_KEY_PREFIX)) { + .position(|(k, _)| k.0.starts_with(well_known_keys::DEFAULT_CHILD_STORAGE_KEY_PREFIX)) + { let (key, _) = top_storage.swap_remove(pos); - let key = StorageKey( - key.0[well_known_keys::DEFAULT_CHILD_STORAGE_KEY_PREFIX.len()..].to_vec(), - ); + let key = + StorageKey(key.0[well_known_keys::DEFAULT_CHILD_STORAGE_KEY_PREFIX.len()..].to_vec()); let child_info = ChildInfo::new_default(&key.0); let keys = client.child_storage_keys(&block, &child_info, &empty_key)?; diff --git a/client/service/src/chain_ops/import_blocks.rs b/client/service/src/chain_ops/import_blocks.rs index 75ea6670f3525..ecf028ffeb3f0 100644 --- a/client/service/src/chain_ops/import_blocks.rs +++ b/client/service/src/chain_ops/import_blocks.rs @@ -16,29 +16,31 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::error; -use crate::error::Error; -use sc_chain_spec::ChainSpec; -use log::{warn, info}; -use futures::{future, prelude::*}; -use sp_runtime::traits::{ - Block as BlockT, NumberFor, Zero, Header, MaybeSerializeDeserialize, -}; -use sp_runtime::generic::SignedBlock; +use crate::{error, error::Error}; use codec::{Decode, IoReader as CodecIoReader}; +use futures::{future, prelude::*}; +use log::{info, warn}; +use sc_chain_spec::ChainSpec; use sp_consensus::{ + import_queue::{BlockImportError, BlockImportResult, ImportQueue, IncomingBlock, Link}, BlockOrigin, - import_queue::{IncomingBlock, Link, BlockImportError, BlockImportResult, ImportQueue}, +}; +use sp_runtime::{ + generic::SignedBlock, + traits::{Block as BlockT, Header, MaybeSerializeDeserialize, NumberFor, Zero}, }; -use std::{io::{Read, Seek}, pin::Pin}; -use std::time::{Duration, Instant}; use futures_timer::Delay; -use std::task::Poll; +use sc_client_api::UsageProvider; use serde_json::{de::IoRead as JsonIoRead, Deserializer, StreamDeserializer}; -use std::convert::{TryFrom, TryInto}; use sp_runtime::traits::{CheckedDiv, Saturating}; -use sc_client_api::UsageProvider; +use std::{ + convert::{TryFrom, TryInto}, + io::{Read, Seek}, + pin::Pin, + task::Poll, + time::{Duration, Instant}, +}; /// Number of blocks we will add to the queue before waiting for the queue to catch up. const MAX_PENDING_BLOCKS: u64 = 1_024; @@ -56,11 +58,11 @@ pub fn build_spec(spec: &dyn ChainSpec, raw: bool) -> error::Result { spec.as_json(raw).map_err(Into::into) } - /// Helper enum that wraps either a binary decoder (from parity-scale-codec), or a JSON decoder /// (from serde_json). Implements the Iterator Trait, calling `next()` will decode the next /// SignedBlock and return it. -enum BlockIter where +enum BlockIter +where R: std::io::Read + std::io::Seek, { Binary { @@ -79,7 +81,8 @@ enum BlockIter where }, } -impl BlockIter where +impl BlockIter +where R: Read + Seek + 'static, B: BlockT + MaybeSerializeDeserialize, { @@ -90,40 +93,32 @@ impl BlockIter where // of blocks that are going to be decoded. We read it and add it to our enum struct. let num_expected_blocks: u64 = Decode::decode(&mut reader) .map_err(|e| format!("Failed to decode the number of blocks: {:?}", e))?; - Ok(BlockIter::Binary { - num_expected_blocks, - read_block_count: 0, - reader, - }) + Ok(BlockIter::Binary { num_expected_blocks, read_block_count: 0, reader }) } else { - let stream_deser = Deserializer::from_reader(input) - .into_iter::>(); - Ok(BlockIter::Json { - reader: stream_deser, - read_block_count: 0, - }) + let stream_deser = Deserializer::from_reader(input).into_iter::>(); + Ok(BlockIter::Json { reader: stream_deser, read_block_count: 0 }) } } /// Returns the number of blocks read thus far. fn read_block_count(&self) -> u64 { match self { - BlockIter::Binary { read_block_count, .. } - | BlockIter::Json { read_block_count, .. } - => *read_block_count, + BlockIter::Binary { read_block_count, .. } | + BlockIter::Json { read_block_count, .. } => *read_block_count, } } /// Returns the total number of blocks to be imported, if possible. fn num_expected_blocks(&self) -> Option { match self { - BlockIter::Binary { num_expected_blocks, ..} => Some(*num_expected_blocks), - BlockIter::Json {..} => None + BlockIter::Binary { num_expected_blocks, .. } => Some(*num_expected_blocks), + BlockIter::Json { .. } => None, } } } -impl Iterator for BlockIter where +impl Iterator for BlockIter +where R: Read + Seek + 'static, B: BlockT + MaybeSerializeDeserialize, { @@ -133,20 +128,20 @@ impl Iterator for BlockIter where match self { BlockIter::Binary { num_expected_blocks, read_block_count, reader } => { if read_block_count < num_expected_blocks { - let block_result: Result, _> = SignedBlock::::decode(reader) - .map_err(|e| e.to_string()); + let block_result: Result, _> = + SignedBlock::::decode(reader).map_err(|e| e.to_string()); *read_block_count += 1; Some(block_result) } else { // `read_block_count` == `num_expected_blocks` so we've read enough blocks. None } - } + }, BlockIter::Json { reader, read_block_count } => { let res = Some(reader.next()?.map_err(|e| e.to_string())); *read_block_count += 1; res - } + }, } } } @@ -155,7 +150,7 @@ impl Iterator for BlockIter where fn import_block_to_queue( signed_block: SignedBlock, queue: &mut TImpQu, - force: bool + force: bool, ) where TBl: BlockT + MaybeSerializeDeserialize, TImpQu: 'static + ImportQueue, @@ -163,8 +158,9 @@ fn import_block_to_queue( let (header, extrinsics) = signed_block.block.deconstruct(); let hash = header.hash(); // import queue handles verification and importing it into the client. - queue.import_blocks(BlockOrigin::File, vec![ - IncomingBlock:: { + queue.import_blocks( + BlockOrigin::File, + vec![IncomingBlock:: { hash, header: Some(header), body: Some(extrinsics), @@ -175,15 +171,15 @@ fn import_block_to_queue( import_existing: force, state: None, skip_execution: false, - } - ]); + }], + ); } /// Returns true if we have imported every block we were supposed to import, else returns false. fn importing_is_done( num_expected_blocks: Option, read_block_count: u64, - imported_blocks: u64 + imported_blocks: u64, ) -> bool { if let Some(num_expected_blocks) = num_expected_blocks { imported_blocks >= num_expected_blocks @@ -209,7 +205,7 @@ impl Speedometer { } } - /// Calculates `(best_number - last_number) / (now - last_update)` and + /// Calculates `(best_number - last_number) / (now - last_update)` and /// logs the speed of import. fn display_speed(&self) { // Number of milliseconds elapsed since last time. @@ -223,24 +219,28 @@ impl Speedometer { // Number of blocks that have been imported since last time. let diff = match self.last_number { None => return, - Some(n) => self.best_number.saturating_sub(n) + Some(n) => self.best_number.saturating_sub(n), }; if let Ok(diff) = TryInto::::try_into(diff) { // If the number of blocks can be converted to a regular integer, then it's easy: just // do the math and turn it into a `f64`. - let speed = diff.saturating_mul(10_000).checked_div(u128::from(elapsed_ms)) - .map_or(0.0, |s| s as f64) / 10.0; + let speed = diff + .saturating_mul(10_000) + .checked_div(u128::from(elapsed_ms)) + .map_or(0.0, |s| s as f64) / + 10.0; info!("📦 Current best block: {} ({:4.1} bps)", self.best_number, speed); } else { // If the number of blocks can't be converted to a regular integer, then we need a more // algebraic approach and we stay within the realm of integers. let one_thousand = NumberFor::::from(1_000u32); - let elapsed = NumberFor::::from( - >::try_from(elapsed_ms).unwrap_or(u32::MAX) - ); + let elapsed = + NumberFor::::from(>::try_from(elapsed_ms).unwrap_or(u32::MAX)); - let speed = diff.saturating_mul(one_thousand).checked_div(&elapsed) + let speed = diff + .saturating_mul(one_thousand) + .checked_div(&elapsed) .unwrap_or_else(Zero::zero); info!("📦 Current best block: {} ({} bps)", self.best_number, speed) } @@ -265,22 +265,23 @@ impl Speedometer { } /// Different State that the `import_blocks` future could be in. -enum ImportState where +enum ImportState +where R: Read + Seek + 'static, B: BlockT + MaybeSerializeDeserialize, { /// We are reading from the BlockIter structure, adding those blocks to the queue if possible. - Reading{block_iter: BlockIter}, + Reading { block_iter: BlockIter }, /// The queue is full (contains at least MAX_PENDING_BLOCKS blocks) and we are waiting for it to /// catch up. - WaitingForImportQueueToCatchUp{ + WaitingForImportQueueToCatchUp { block_iter: BlockIter, delay: Delay, - block: SignedBlock + block: SignedBlock, }, // We have added all the blocks to the queue but they are still being processed. - WaitingForImportQueueToFinish{ - num_expected_blocks: Option, + WaitingForImportQueueToFinish { + num_expected_blocks: Option, read_block_count: u64, delay: Delay, }, @@ -306,10 +307,7 @@ where impl WaitLink { fn new() -> WaitLink { - WaitLink { - imported_blocks: 0, - has_error: false, - } + WaitLink { imported_blocks: 0, has_error: false } } } @@ -318,7 +316,7 @@ where &mut self, imported: usize, _num_expected_blocks: usize, - results: Vec<(Result>, BlockImportError>, B::Hash)> + results: Vec<(Result>, BlockImportError>, B::Hash)>, ) { self.imported_blocks += imported as u64; @@ -326,7 +324,7 @@ where if let (Err(err), hash) = result { warn!("There was an error importing block with hash {:?}: {:?}", hash, err); self.has_error = true; - break; + break } } } @@ -338,13 +336,13 @@ where let block_iter = match block_iter_res { Ok(block_iter) => block_iter, Err(e) => { - // We've encountered an error while creating the block iterator + // We've encountered an error while creating the block iterator // so we can just return a future that returns an error. return future::ready(Err(Error::Other(e))).boxed() - } + }, }; - let mut state = Some(ImportState::Reading{block_iter}); + let mut state = Some(ImportState::Reading { block_iter }); let mut speedometer = Speedometer::::new(); // Importing blocks is implemented as a future, because we want the operation to be @@ -358,7 +356,7 @@ where let client = &client; let queue = &mut import_queue; match state.take().expect("state should never be None; qed") { - ImportState::Reading{mut block_iter} => { + ImportState::Reading { mut block_iter } => { match block_iter.next() { None => { // The iterator is over: we now need to wait for the import queue to finish. @@ -366,7 +364,9 @@ where let read_block_count = block_iter.read_block_count(); let delay = Delay::new(Duration::from_millis(DELAY_TIME)); state = Some(ImportState::WaitingForImportQueueToFinish { - num_expected_blocks, read_block_count, delay + num_expected_blocks, + read_block_count, + delay, }); }, Some(block_result) => { @@ -378,32 +378,35 @@ where // until the queue has made some progress. let delay = Delay::new(Duration::from_millis(DELAY_TIME)); state = Some(ImportState::WaitingForImportQueueToCatchUp { - block_iter, delay, block + block_iter, + delay, + block, }); } else { // Queue is not full, we can keep on adding blocks to the queue. import_block_to_queue(block, queue, force); - state = Some(ImportState::Reading{block_iter}); + state = Some(ImportState::Reading { block_iter }); } - } - Err(e) => { - return Poll::Ready( - Err(Error::Other( - format!("Error reading block #{}: {}", read_block_count, e) - ))) - } + }, + Err(e) => + return Poll::Ready(Err(Error::Other(format!( + "Error reading block #{}: {}", + read_block_count, e + )))), } - } + }, } }, - ImportState::WaitingForImportQueueToCatchUp{block_iter, mut delay, block} => { + ImportState::WaitingForImportQueueToCatchUp { block_iter, mut delay, block } => { let read_block_count = block_iter.read_block_count(); if read_block_count - link.imported_blocks >= MAX_PENDING_BLOCKS { // Queue is still full, so wait until there is room to insert our block. match Pin::new(&mut delay).poll(cx) { Poll::Pending => { state = Some(ImportState::WaitingForImportQueueToCatchUp { - block_iter, delay, block + block_iter, + delay, + block, }); return Poll::Pending }, @@ -412,25 +415,30 @@ where }, } state = Some(ImportState::WaitingForImportQueueToCatchUp { - block_iter, delay, block + block_iter, + delay, + block, }); } else { // Queue is no longer full, so we can add our block to the queue. import_block_to_queue(block, queue, force); // Switch back to Reading state. - state = Some(ImportState::Reading{block_iter}); + state = Some(ImportState::Reading { block_iter }); } }, ImportState::WaitingForImportQueueToFinish { - num_expected_blocks, read_block_count, mut delay + num_expected_blocks, + read_block_count, + mut delay, } => { - // All the blocks have been added to the queue, which doesn't mean they + // All the blocks have been added to the queue, which doesn't mean they // have all been properly imported. if importing_is_done(num_expected_blocks, read_block_count, link.imported_blocks) { // Importing is done, we can log the result and return. info!( "🎉 Imported {} blocks. Best: #{}", - read_block_count, client.usage_info().chain.best_number + read_block_count, + client.usage_info().chain.best_number ); return Poll::Ready(Ok(())) } else { @@ -439,7 +447,9 @@ where match Pin::new(&mut delay).poll(cx) { Poll::Pending => { state = Some(ImportState::WaitingForImportQueueToFinish { - num_expected_blocks, read_block_count, delay + num_expected_blocks, + read_block_count, + delay, }); return Poll::Pending }, @@ -449,10 +459,12 @@ where } state = Some(ImportState::WaitingForImportQueueToFinish { - num_expected_blocks, read_block_count, delay + num_expected_blocks, + read_block_count, + delay, }); } - } + }, } queue.poll_actions(cx, &mut link); @@ -461,11 +473,10 @@ where speedometer.notify_user(best_number); if link.has_error { - return Poll::Ready(Err( - Error::Other( - format!("Stopping after #{} blocks because of an error", link.imported_blocks) - ) - )) + return Poll::Ready(Err(Error::Other(format!( + "Stopping after #{} blocks because of an error", + link.imported_blocks + )))) } cx.waker().wake_by_ref(); diff --git a/client/service/src/chain_ops/revert_chain.rs b/client/service/src/chain_ops/revert_chain.rs index e3301eb2627e2..63f1cbd15dd63 100644 --- a/client/service/src/chain_ops/revert_chain.rs +++ b/client/service/src/chain_ops/revert_chain.rs @@ -18,15 +18,15 @@ use crate::error::Error; use log::info; -use sp_runtime::traits::{Block as BlockT, NumberFor, Zero}; use sc_client_api::{Backend, UsageProvider}; +use sp_runtime::traits::{Block as BlockT, NumberFor, Zero}; use std::sync::Arc; /// Performs a revert of `blocks` blocks. pub fn revert_chain( client: Arc, backend: Arc, - blocks: NumberFor + blocks: NumberFor, ) -> Result<(), Error> where B: BlockT, diff --git a/client/service/src/client/block_rules.rs b/client/service/src/client/block_rules.rs index 1af06666339cc..4bdf338362960 100644 --- a/client/service/src/client/block_rules.rs +++ b/client/service/src/client/block_rules.rs @@ -20,11 +20,9 @@ use std::collections::{HashMap, HashSet}; -use sp_runtime::{ - traits::{Block as BlockT, NumberFor}, -}; +use sp_runtime::traits::{Block as BlockT, NumberFor}; -use sc_client_api::{ForkBlocks, BadBlocks}; +use sc_client_api::{BadBlocks, ForkBlocks}; /// Chain specification rules lookup result. pub enum LookupResult { @@ -33,7 +31,7 @@ pub enum LookupResult { /// The block is known to be bad and should not be imported KnownBad, /// There is a specified canonical block hash for the given height - Expected(B::Hash) + Expected(B::Hash), } /// Chain-specific block filtering rules. @@ -47,10 +45,7 @@ pub struct BlockRules { impl BlockRules { /// New block rules with provided black and white lists. - pub fn new( - fork_blocks: ForkBlocks, - bad_blocks: BadBlocks, - ) -> Self { + pub fn new(fork_blocks: ForkBlocks, bad_blocks: BadBlocks) -> Self { Self { bad: bad_blocks.unwrap_or_else(|| HashSet::new()), forks: fork_blocks.unwrap_or_else(|| vec![]).into_iter().collect(), @@ -66,7 +61,7 @@ impl BlockRules { pub fn lookup(&self, number: NumberFor, hash: &B::Hash) -> LookupResult { if let Some(hash_for_height) = self.forks.get(&number) { if hash_for_height != hash { - return LookupResult::Expected(hash_for_height.clone()); + return LookupResult::Expected(hash_for_height.clone()) } } diff --git a/client/service/src/client/call_executor.rs b/client/service/src/client/call_executor.rs index a444819947607..6d4fe3c360131 100644 --- a/client/service/src/client/call_executor.rs +++ b/client/service/src/client/call_executor.rs @@ -16,23 +16,25 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{sync::Arc, panic::UnwindSafe, result, cell::RefCell}; -use codec::{Encode, Decode}; +use super::{client::ClientConfig, wasm_override::WasmOverride, wasm_substitutes::WasmSubstitutes}; +use codec::{Decode, Encode}; +use sc_client_api::{backend, call_executor::CallExecutor}; +use sc_executor::{NativeVersion, RuntimeInfo, RuntimeVersion}; +use sp_api::{ProofRecorder, StorageTransactionCache}; +use sp_core::{ + traits::{CodeExecutor, RuntimeCode, SpawnNamed}, + NativeOrEncoded, NeverNativeValue, +}; +use sp_externalities::Extensions; use sp_runtime::{ - generic::BlockId, traits::{Block as BlockT, HashFor, NumberFor}, + generic::BlockId, + traits::{Block as BlockT, HashFor, NumberFor}, }; use sp_state_machine::{ - self, OverlayedChanges, Ext, ExecutionManager, StateMachine, ExecutionStrategy, - backend::Backend as _, StorageProof, + self, backend::Backend as _, ExecutionManager, ExecutionStrategy, Ext, OverlayedChanges, + StateMachine, StorageProof, }; -use sc_executor::{RuntimeVersion, RuntimeInfo, NativeVersion}; -use sp_externalities::Extensions; -use sp_core::{ - NativeOrEncoded, NeverNativeValue, traits::{CodeExecutor, SpawnNamed, RuntimeCode}, -}; -use sp_api::{ProofRecorder, StorageTransactionCache}; -use sc_client_api::{backend, call_executor::CallExecutor}; -use super::{client::ClientConfig, wasm_override::WasmOverride, wasm_substitutes::WasmSubstitutes}; +use std::{cell::RefCell, panic::UnwindSafe, result, sync::Arc}; /// Call executor that executes methods locally, querying all required /// data from local backend. @@ -57,7 +59,8 @@ where spawn_handle: Box, client_config: ClientConfig, ) -> sp_blockchain::Result { - let wasm_override = client_config.wasm_runtime_overrides + let wasm_override = client_config + .wasm_runtime_overrides .as_ref() .map(|p| WasmOverride::new(p.clone(), executor.clone())) .transpose()?; @@ -91,10 +94,12 @@ where B: backend::Backend, { let spec = self.runtime_version(id)?.spec_version; - let code = if let Some(d) = self.wasm_override + let code = if let Some(d) = self + .wasm_override .as_ref() .map(|o| o.get(&spec, onchain_code.heap_pages)) - .flatten() { + .flatten() + { log::debug!(target: "wasm_overrides", "using WASM override for block {}", id); d } else if let Some(s) = self.wasm_substitutes.get(spec, onchain_code.heap_pages, id) { @@ -113,7 +118,10 @@ where } } -impl Clone for LocalCallExecutor where E: Clone { +impl Clone for LocalCallExecutor +where + E: Clone, +{ fn clone(&self) -> Self { LocalCallExecutor { backend: self.backend.clone(), @@ -145,13 +153,12 @@ where extensions: Option, ) -> sp_blockchain::Result> { let mut changes = OverlayedChanges::default(); - let changes_trie = backend::changes_tries_state_at_block( - id, self.backend.changes_trie_storage() - )?; + let changes_trie = + backend::changes_tries_state_at_block(id, self.backend.changes_trie_storage())?; let state = self.backend.state_at(*id)?; let state_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(&state); - let runtime_code = state_runtime_code.runtime_code() - .map_err(sp_blockchain::Error::RuntimeCode)?; + let runtime_code = + state_runtime_code.runtime_code().map_err(sp_blockchain::Error::RuntimeCode)?; let runtime_code = self.check_override(runtime_code, id)?; let return_data = StateMachine::new( @@ -164,7 +171,8 @@ where extensions.unwrap_or_default(), &runtime_code, self.spawn_handle.clone(), - ).execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( + ) + .execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( strategy.get_manager(), None, )?; @@ -175,7 +183,7 @@ where fn contextual_call< EM: Fn( Result, Self::Error>, - Result, Self::Error> + Result, Self::Error>, ) -> Result, Self::Error>, R: Encode + Decode + PartialEq, NC: FnOnce() -> result::Result + UnwindSafe, @@ -185,15 +193,17 @@ where method: &str, call_data: &[u8], changes: &RefCell, - storage_transaction_cache: Option<&RefCell< - StorageTransactionCache - >>, + storage_transaction_cache: Option<&RefCell>>, execution_manager: ExecutionManager, native_call: Option, recorder: &Option>, extensions: Option, - ) -> Result, sp_blockchain::Error> where ExecutionManager: Clone { - let changes_trie_state = backend::changes_tries_state_at_block(at, self.backend.changes_trie_storage())?; + ) -> Result, sp_blockchain::Error> + where + ExecutionManager: Clone, + { + let changes_trie_state = + backend::changes_tries_state_at_block(at, self.backend.changes_trie_storage())?; let mut storage_transaction_cache = storage_transaction_cache.map(|c| c.borrow_mut()); let mut state = self.backend.state_at(*at)?; @@ -202,16 +212,17 @@ where match recorder { Some(recorder) => { - let trie_state = state.as_trie_backend() - .ok_or_else(|| - Box::new(sp_state_machine::ExecutionError::UnableToGenerateProof) as Box - )?; + let trie_state = state.as_trie_backend().ok_or_else(|| { + Box::new(sp_state_machine::ExecutionError::UnableToGenerateProof) + as Box + })?; - let state_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(trie_state); + let state_runtime_code = + sp_state_machine::backend::BackendRuntimeCode::new(trie_state); // It is important to extract the runtime code here before we create the proof // recorder. - let runtime_code = state_runtime_code.runtime_code() - .map_err(sp_blockchain::Error::RuntimeCode)?; + let runtime_code = + state_runtime_code.runtime_code().map_err(sp_blockchain::Error::RuntimeCode)?; let runtime_code = self.check_override(runtime_code, at)?; let backend = sp_state_machine::ProvingBackend::new_with_recorder( @@ -239,8 +250,8 @@ where }, None => { let state_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(&state); - let runtime_code = state_runtime_code.runtime_code() - .map_err(sp_blockchain::Error::RuntimeCode)?; + let runtime_code = + state_runtime_code.runtime_code().map_err(sp_blockchain::Error::RuntimeCode)?; let runtime_code = self.check_override(runtime_code, at)?; let mut state_machine = StateMachine::new( @@ -253,34 +264,31 @@ where extensions.unwrap_or_default(), &runtime_code, self.spawn_handle.clone(), - ).with_storage_transaction_cache(storage_transaction_cache.as_mut().map(|c| &mut **c)); + ) + .with_storage_transaction_cache( + storage_transaction_cache.as_mut().map(|c| &mut **c), + ); state_machine.execute_using_consensus_failure_handler( execution_manager, native_call.map(|n| || (n)().map_err(|e| Box::new(e) as Box<_>)), ) - } - }.map_err(Into::into) + }, + } + .map_err(Into::into) } fn runtime_version(&self, id: &BlockId) -> sp_blockchain::Result { let mut overlay = OverlayedChanges::default(); - let changes_trie_state = backend::changes_tries_state_at_block( - id, - self.backend.changes_trie_storage(), - )?; + let changes_trie_state = + backend::changes_tries_state_at_block(id, self.backend.changes_trie_storage())?; let state = self.backend.state_at(*id)?; let mut cache = StorageTransactionCache::::default(); - let mut ext = Ext::new( - &mut overlay, - &mut cache, - &state, - changes_trie_state, - None, - ); + let mut ext = Ext::new(&mut overlay, &mut cache, &state, changes_trie_state, None); let state_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(&state); - let runtime_code = state_runtime_code.runtime_code() - .map_err(sp_blockchain::Error::RuntimeCode)?; - self.executor.runtime_version(&mut ext, &runtime_code) + let runtime_code = + state_runtime_code.runtime_code().map_err(sp_blockchain::Error::RuntimeCode)?; + self.executor + .runtime_version(&mut ext, &runtime_code) .map_err(|e| sp_blockchain::Error::VersionInvalid(format!("{:?}", e)).into()) } @@ -289,11 +297,11 @@ where trie_state: &sp_state_machine::TrieBackend>, overlay: &mut OverlayedChanges, method: &str, - call_data: &[u8] + call_data: &[u8], ) -> Result<(Vec, StorageProof), sp_blockchain::Error> { let state_runtime_code = sp_state_machine::backend::BackendRuntimeCode::new(trie_state); - let runtime_code = state_runtime_code.runtime_code() - .map_err(sp_blockchain::Error::RuntimeCode)?; + let runtime_code = + state_runtime_code.runtime_code().map_err(sp_blockchain::Error::RuntimeCode)?; sp_state_machine::prove_execution_on_trie_backend::<_, _, NumberFor, _, _>( trie_state, overlay, @@ -312,19 +320,16 @@ where } impl sp_version::GetRuntimeVersion for LocalCallExecutor - where - B: backend::Backend, - E: CodeExecutor + RuntimeInfo + Clone + 'static, - Block: BlockT, +where + B: backend::Backend, + E: CodeExecutor + RuntimeInfo + Clone + 'static, + Block: BlockT, { fn native_version(&self) -> &sp_version::NativeVersion { self.executor.native_version() } - fn runtime_version( - &self, - at: &BlockId, - ) -> Result { + fn runtime_version(&self, at: &BlockId) -> Result { CallExecutor::runtime_version(self, at).map_err(|e| format!("{:?}", e)) } } @@ -332,10 +337,13 @@ impl sp_version::GetRuntimeVersion for LocalCallExecutor = Mutex>>; /// Substrate Client -pub struct Client where Block: BlockT { +pub struct Client +where + Block: BlockT, +{ backend: Arc, executor: E, storage_notifications: Mutex>, @@ -157,7 +153,7 @@ enum PrepareStorageChangesResult, Block: BlockT> { } /// Create an instance of in-memory client. -#[cfg(feature="test-helpers")] +#[cfg(feature = "test-helpers")] pub fn new_in_mem( executor: E, genesis_storage: &S, @@ -166,12 +162,10 @@ pub fn new_in_mem( telemetry: Option, spawn_handle: Box, config: ClientConfig, -) -> sp_blockchain::Result, - LocalCallExecutor, E>, - Block, - RA ->> where +) -> sp_blockchain::Result< + Client, LocalCallExecutor, E>, Block, RA>, +> +where E: CodeExecutor + RuntimeInfo, S: BuildStorage, Block: BlockT, @@ -218,7 +212,7 @@ impl Default for ClientConfig { /// Create a client with the explicitly provided backend. /// This is useful for testing backend implementations. -#[cfg(feature="test-helpers")] +#[cfg(feature = "test-helpers")] pub fn new_with_backend( backend: Arc, executor: E, @@ -229,13 +223,14 @@ pub fn new_with_backend( telemetry: Option, config: ClientConfig, ) -> sp_blockchain::Result, Block, RA>> - where - E: CodeExecutor + RuntimeInfo, - S: BuildStorage, - Block: BlockT, - B: backend::LocalBackend + 'static, +where + E: CodeExecutor + RuntimeInfo, + S: BuildStorage, + Block: BlockT, + B: backend::LocalBackend + 'static, { - let call_executor = LocalCallExecutor::new(backend.clone(), executor, spawn_handle, config.clone())?; + let call_executor = + LocalCallExecutor::new(backend.clone(), executor, spawn_handle, config.clone())?; let extensions = ExecutionExtensions::new( Default::default(), keystore, @@ -254,7 +249,8 @@ pub fn new_with_backend( ) } -impl BlockOf for Client where +impl BlockOf for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -263,15 +259,15 @@ impl BlockOf for Client where } impl LockImportRun for Client - where - B: backend::Backend, - E: CallExecutor, - Block: BlockT, +where + B: backend::Backend, + E: CallExecutor, + Block: BlockT, { fn lock_import_and_run(&self, f: F) -> Result - where - F: FnOnce(&mut ClientImportOperation) -> Result, - Err: From, + where + F: FnOnce(&mut ClientImportOperation) -> Result, + Err: From, { let inner = || { let _import_lock = self.backend.get_import_lock().write(); @@ -301,21 +297,22 @@ impl LockImportRun for Client } impl LockImportRun for &Client - where - Block: BlockT, - B: backend::Backend, - E: CallExecutor, +where + Block: BlockT, + B: backend::Backend, + E: CallExecutor, { fn lock_import_and_run(&self, f: F) -> Result - where - F: FnOnce(&mut ClientImportOperation) -> Result, - Err: From, + where + F: FnOnce(&mut ClientImportOperation) -> Result, + Err: From, { (**self).lock_import_and_run(f) } } -impl Client where +impl Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -335,12 +332,13 @@ impl Client where ) -> sp_blockchain::Result { let info = backend.blockchain().info(); if info.finalized_state.is_none() { - let genesis_storage = build_genesis_storage.build_storage() - .map_err(sp_blockchain::Error::Storage)?; + let genesis_storage = + build_genesis_storage.build_storage().map_err(sp_blockchain::Error::Storage)?; let mut op = backend.begin_operation()?; let state_root = op.set_genesis_state(genesis_storage, !config.no_genesis)?; let genesis_block = genesis::construct_genesis_block::(state_root.into()); - info!("🔨 Initializing Genesis block/state (state: {}, header-hash: {})", + info!( + "🔨 Initializing Genesis block/state (state: {}, header-hash: {})", genesis_block.header().state_root(), genesis_block.header().hash() ); @@ -396,8 +394,11 @@ impl Client where /// Get the code at a given block. pub fn code_at(&self, id: &BlockId) -> sp_blockchain::Result> { Ok(StorageProvider::storage(self, id, &StorageKey(well_known_keys::CODE.to_vec()))? - .expect("None is returned if there's no value stored for the given key;\ - ':code' key is always defined; qed").0) + .expect( + "None is returned if there's no value stored for the given key;\ + ':code' key is always defined; qed", + ) + .0) } /// Get the RuntimeVersion at a given block. @@ -411,7 +412,9 @@ impl Client where id: &BlockId, cht_size: NumberFor, ) -> sp_blockchain::Result<(Block::Header, StorageProof)> { - let proof_error = || sp_blockchain::Error::Backend(format!("Failed to generate header proof for {:?}", id)); + let proof_error = || { + sp_blockchain::Error::Backend(format!("Failed to generate header proof for {:?}", id)) + }; let header = self.backend.blockchain().expect_header(*id)?; let block_num = *header.number(); let cht_num = cht::block_to_cht_number(cht_size, block_num).ok_or_else(proof_error)?; @@ -449,12 +452,13 @@ impl Client where required_roots_proofs: Mutex, Block::Hash>>, } - impl<'a, Block: BlockT> ChangesTrieRootsStorage, NumberFor> for - AccessedRootsRecorder<'a, Block> + impl<'a, Block: BlockT> ChangesTrieRootsStorage, NumberFor> + for AccessedRootsRecorder<'a, Block> { - fn build_anchor(&self, hash: Block::Hash) - -> Result>, String> - { + fn build_anchor( + &self, + hash: Block::Hash, + ) -> Result>, String> { self.storage.build_anchor(hash) } @@ -466,22 +470,19 @@ impl Client where let root = self.storage.root(anchor, block)?; if block < self.min { if let Some(ref root) = root { - self.required_roots_proofs.lock().insert( - block, - root.clone() - ); + self.required_roots_proofs.lock().insert(block, root.clone()); } } Ok(root) } } - impl<'a, Block: BlockT> ChangesTrieStorage, NumberFor> for - AccessedRootsRecorder<'a, Block> + impl<'a, Block: BlockT> ChangesTrieStorage, NumberFor> + for AccessedRootsRecorder<'a, Block> { - fn as_roots_storage(&self) - -> &dyn sp_state_machine::ChangesTrieRootsStorage, NumberFor> - { + fn as_roots_storage( + &self, + ) -> &dyn sp_state_machine::ChangesTrieRootsStorage, NumberFor> { self } @@ -498,10 +499,11 @@ impl Client where } } - let first_number = self.backend.blockchain() - .expect_block_number_from_id(&BlockId::Hash(first))?; + let first_number = + self.backend.blockchain().expect_block_number_from_id(&BlockId::Hash(first))?; let (storage, configs) = self.require_changes_trie(first_number, last, true)?; - let min_number = self.backend.blockchain().expect_block_number_from_id(&BlockId::Hash(min))?; + let min_number = + self.backend.blockchain().expect_block_number_from_id(&BlockId::Hash(min))?; let recording_storage = AccessedRootsRecorder:: { storage: storage.storage(), @@ -517,8 +519,8 @@ impl Client where // fetch key changes proof let mut proof = Vec::new(); for (config_zero, config_end, config) in configs { - let last_number = self.backend.blockchain() - .expect_block_number_from_id(&BlockId::Hash(last))?; + let last_number = + self.backend.blockchain().expect_block_number_from_id(&BlockId::Hash(last))?; let config_range = ChangesTrieConfigurationRange { config: &config, zero: config_zero, @@ -528,10 +530,7 @@ impl Client where config_range, &recording_storage, first_number, - &ChangesTrieAnchorBlockId { - hash: convert_hash(&last), - number: last_number, - }, + &ChangesTrieAnchorBlockId { hash: convert_hash(&last), number: last_number }, max_number, storage_key, &key.0, @@ -554,20 +553,26 @@ impl Client where } /// Generate CHT-based proof for roots of changes tries at given blocks. - fn changes_trie_roots_proof>>( + fn changes_trie_roots_proof>>( &self, cht_size: NumberFor, - blocks: I + blocks: I, ) -> sp_blockchain::Result { // most probably we have touched several changes tries that are parts of the single CHT // => GroupBy changes tries by CHT number and then gather proof for the whole group at once let mut proofs = Vec::new(); - cht::for_each_cht_group::(cht_size, blocks, |_, cht_num, cht_blocks| { - let cht_proof = self.changes_trie_roots_proof_at_cht(cht_size, cht_num, cht_blocks)?; - proofs.push(cht_proof); - Ok(()) - }, ())?; + cht::for_each_cht_group::( + cht_size, + blocks, + |_, cht_num, cht_blocks| { + let cht_proof = + self.changes_trie_roots_proof_at_cht(cht_size, cht_num, cht_blocks)?; + proofs.push(cht_proof); + Ok(()) + }, + (), + )?; Ok(StorageProof::merge(proofs)) } @@ -577,7 +582,7 @@ impl Client where &self, cht_size: NumberFor, cht_num: NumberFor, - blocks: Vec> + blocks: Vec>, ) -> sp_blockchain::Result { let cht_start = cht::start_number(cht_size, cht_num); let mut current_num = cht_start; @@ -586,16 +591,14 @@ impl Client where current_num = current_num + One::one(); Some(old_current_num) }); - let roots = cht_range - .map(|num| self.header(&BlockId::Number(num)) - .map(|block| - block.and_then(|block| block.digest().log(DigestItem::as_changes_trie_root).cloned())) - ); + let roots = cht_range.map(|num| { + self.header(&BlockId::Number(num)).map(|block| { + block + .and_then(|block| block.digest().log(DigestItem::as_changes_trie_root).cloned()) + }) + }); let proof = cht::build_proof::, _, _>( - cht_size, - cht_num, - blocks, - roots, + cht_size, cht_num, blocks, roots, )?; Ok(proof) } @@ -616,7 +619,9 @@ impl Client where &dyn PrunableStateChangesTrieStorage, Vec<(NumberFor, Option<(NumberFor, Block::Hash)>, ChangesTrieConfiguration)>, )> { - let storage = self.backend.changes_trie_storage() + let storage = self + .backend + .changes_trie_storage() .ok_or_else(|| sp_blockchain::Error::ChangesTriesNotSupported)?; let mut configs = Vec::with_capacity(1); @@ -630,10 +635,14 @@ impl Client where } if config_range.zero.0 < first { - break; + break } - current = *self.backend.blockchain().expect_header(BlockId::Hash(config_range.zero.1))?.parent_hash(); + current = *self + .backend + .blockchain() + .expect_header(BlockId::Hash(config_range.zero.1))? + .parent_hash(); } Ok((storage, configs)) @@ -646,11 +655,14 @@ impl Client where operation: &mut ClientImportOperation, import_block: BlockImportParams>, new_cache: HashMap>, - storage_changes: Option>>, - ) -> sp_blockchain::Result where + storage_changes: Option< + sp_consensus::StorageChanges>, + >, + ) -> sp_blockchain::Result + where Self: ProvideRuntimeApi, - >::Api: CoreApi + - ApiExt, + >::Api: + CoreApi + ApiExt, { let BlockImportParams { origin, @@ -711,9 +723,7 @@ impl Client where // don't send telemetry block import events during initial sync for every // block to avoid spamming the telemetry server, these events will be randomly // sent at a rate of 1/10. - if origin != BlockOrigin::NetworkInitialSync || - rand::thread_rng().gen_bool(0.1) - { + if origin != BlockOrigin::NetworkInitialSync || rand::thread_rng().gen_bool(0.1) { telemetry!( self.telemetry; SUBSTRATE_INFO; @@ -738,23 +748,26 @@ impl Client where justifications: Option, body: Option>, indexed_body: Option>>, - storage_changes: Option>>, + storage_changes: Option< + sp_consensus::StorageChanges>, + >, new_cache: HashMap>, finalized: bool, aux: Vec<(Vec, Option>)>, fork_choice: ForkChoiceStrategy, import_existing: bool, - ) -> sp_blockchain::Result where + ) -> sp_blockchain::Result + where Self: ProvideRuntimeApi, - >::Api: CoreApi + - ApiExt, + >::Api: + CoreApi + ApiExt, { let parent_hash = import_headers.post().parent_hash().clone(); let status = self.backend.blockchain().status(BlockId::Hash(hash))?; match (import_existing, status) { (false, blockchain::BlockStatus::InChain) => return Ok(ImportResult::AlreadyInChain), (false, blockchain::BlockStatus::Unknown) => {}, - (true, blockchain::BlockStatus::InChain) => {}, + (true, blockchain::BlockStatus::InChain) => {}, (true, blockchain::BlockStatus::Unknown) => {}, } @@ -762,17 +775,18 @@ impl Client where // the block is lower than our last finalized block so it must revert // finality, refusing import. - if status == blockchain::BlockStatus::Unknown - && *import_headers.post().number() <= info.finalized_number + if status == blockchain::BlockStatus::Unknown && + *import_headers.post().number() <= info.finalized_number { - return Err(sp_blockchain::Error::NotInFinalizedChain); + return Err(sp_blockchain::Error::NotInFinalizedChain) } // this is a fairly arbitrary choice of where to draw the line on making notifications, // but the general goal is to only make notifications when we are already fully synced // and get a new chain head. let make_notifications = match origin { - BlockOrigin::NetworkBroadcast | BlockOrigin::Own | BlockOrigin::ConsensusBroadcast => true, + BlockOrigin::NetworkBroadcast | BlockOrigin::Own | BlockOrigin::ConsensusBroadcast => + true, BlockOrigin::Genesis | BlockOrigin::NetworkInitialSync | BlockOrigin::File => false, }; @@ -780,15 +794,10 @@ impl Client where Some(storage_changes) => { let storage_changes = match storage_changes { sp_consensus::StorageChanges::Changes(storage_changes) => { - self.backend.begin_state_operation(&mut operation.op, BlockId::Hash(parent_hash))?; - let ( - main_sc, - child_sc, - offchain_sc, - tx, _, - changes_trie_tx, - tx_index, - ) = storage_changes.into_inner(); + self.backend + .begin_state_operation(&mut operation.op, BlockId::Hash(parent_hash))?; + let (main_sc, child_sc, offchain_sc, tx, _, changes_trie_tx, tx_index) = + storage_changes.into_inner(); if self.config.offchain_indexing_api { operation.op.update_offchain_storage(offchain_sc)?; @@ -803,7 +812,7 @@ impl Client where } Some((main_sc, child_sc)) - } + }, sp_consensus::StorageChanges::Import(changes) => { let storage = sp_storage::Storage { top: changes.state.into_iter().collect(), @@ -815,10 +824,10 @@ impl Client where // State root mismatch when importing state. This should not happen in safe fast sync mode, // but may happen in unsafe mode. warn!("Error imporing state: State root mismatch."); - return Err(Error::InvalidStateRoot); + return Err(Error::InvalidStateRoot) } None - } + }, }; // ensure parent block is finalized to maintain invariant that @@ -835,15 +844,16 @@ impl Client where operation.op.update_cache(new_cache); storage_changes - }, None => None, }; - let is_new_best = finalized || match fork_choice { - ForkChoiceStrategy::LongestChain => import_headers.post().number() > &info.best_number, - ForkChoiceStrategy::Custom(v) => v, - }; + let is_new_best = finalized || + match fork_choice { + ForkChoiceStrategy::LongestChain => + import_headers.post().number() > &info.best_number, + ForkChoiceStrategy::Custom(v) => v, + }; let leaf_state = if finalized { NewBlockState::Final @@ -854,11 +864,8 @@ impl Client where }; let tree_route = if is_new_best && info.best_hash != parent_hash { - let route_from_best = sp_blockchain::tree_route( - self.backend.blockchain(), - info.best_hash, - parent_hash, - )?; + let route_from_best = + sp_blockchain::tree_route(self.backend.blockchain(), info.best_hash, parent_hash)?; Some(route_from_best) } else { None @@ -910,20 +917,24 @@ impl Client where &self, import_block: &mut BlockImportParams>, ) -> sp_blockchain::Result> - where - Self: ProvideRuntimeApi, - >::Api: CoreApi + - ApiExt, + where + Self: ProvideRuntimeApi, + >::Api: + CoreApi + ApiExt, { let parent_hash = import_block.header.parent_hash(); let at = BlockId::Hash(*parent_hash); let state_action = std::mem::replace(&mut import_block.state_action, StateAction::Skip); let (enact_state, storage_changes) = match (self.block_status(&at)?, state_action) { - (BlockStatus::Unknown, _) => return Ok(PrepareStorageChangesResult::Discard(ImportResult::UnknownParent)), - (BlockStatus::KnownBad, _) => return Ok(PrepareStorageChangesResult::Discard(ImportResult::KnownBad)), + (BlockStatus::Unknown, _) => + return Ok(PrepareStorageChangesResult::Discard(ImportResult::UnknownParent)), + (BlockStatus::KnownBad, _) => + return Ok(PrepareStorageChangesResult::Discard(ImportResult::KnownBad)), (_, StateAction::Skip) => (false, None), - (BlockStatus::InChainPruned, StateAction::ApplyChanges(sp_consensus::StorageChanges::Changes(_))) => - return Ok(PrepareStorageChangesResult::Discard(ImportResult::MissingState)), + ( + BlockStatus::InChainPruned, + StateAction::ApplyChanges(sp_consensus::StorageChanges::Changes(_)), + ) => return Ok(PrepareStorageChangesResult::Discard(ImportResult::MissingState)), (BlockStatus::InChainPruned, StateAction::Execute) => return Ok(PrepareStorageChangesResult::Discard(ImportResult::MissingState)), (BlockStatus::InChainPruned, StateAction::ExecuteIfPossible) => (false, None), @@ -953,19 +964,14 @@ impl Client where )?; let state = self.backend.state_at(at)?; - let changes_trie_state = changes_tries_state_at_block( - &at, - self.backend.changes_trie_storage(), - )?; + let changes_trie_state = + changes_tries_state_at_block(&at, self.backend.changes_trie_storage())?; - let gen_storage_changes = runtime_api.into_storage_changes( - &state, - changes_trie_state.as_ref(), - *parent_hash, - ).map_err(sp_blockchain::Error::Storage)?; + let gen_storage_changes = runtime_api + .into_storage_changes(&state, changes_trie_state.as_ref(), *parent_hash) + .map_err(sp_blockchain::Error::Storage)?; - if import_block.header.state_root() - != &gen_storage_changes.transaction_storage_root + if import_block.header.state_root() != &gen_storage_changes.transaction_storage_root { return Err(Error::InvalidStateRoot) } @@ -992,20 +998,28 @@ impl Client where let last_finalized = self.backend.blockchain().last_finalized()?; if block == last_finalized { - warn!("Possible safety violation: attempted to re-finalize last finalized block {:?} ", last_finalized); - return Ok(()); + warn!( + "Possible safety violation: attempted to re-finalize last finalized block {:?} ", + last_finalized + ); + return Ok(()) } - let route_from_finalized = sp_blockchain::tree_route(self.backend.blockchain(), last_finalized, block)?; + let route_from_finalized = + sp_blockchain::tree_route(self.backend.blockchain(), last_finalized, block)?; if let Some(retracted) = route_from_finalized.retracted().get(0) { - warn!("Safety violation: attempted to revert finalized block {:?} which is not in the \ - same chain as last finalized {:?}", retracted, last_finalized); + warn!( + "Safety violation: attempted to revert finalized block {:?} which is not in the \ + same chain as last finalized {:?}", + retracted, last_finalized + ); - return Err(sp_blockchain::Error::NotInFinalizedChain); + return Err(sp_blockchain::Error::NotInFinalizedChain) } - let route_from_best = sp_blockchain::tree_route(self.backend.blockchain(), best_block, block)?; + let route_from_best = + sp_blockchain::tree_route(self.backend.blockchain(), best_block, block)?; // if the block is not a direct ancestor of the current best chain, // then some other block is the common ancestor. @@ -1042,10 +1056,7 @@ impl Client where Ok(()) } - fn notify_finalized( - &self, - notify_finalized: Vec, - ) -> sp_blockchain::Result<()> { + fn notify_finalized(&self, notify_finalized: Vec) -> sp_blockchain::Result<()> { let mut sinks = self.finality_notification_sinks.lock(); if notify_finalized.is_empty() { @@ -1054,17 +1065,16 @@ impl Client where // would also remove any closed sinks. sinks.retain(|sink| !sink.is_closed()); - return Ok(()); + return Ok(()) } // We assume the list is sorted and only want to inform the // telemetry once about the finalized block. if let Some(last) = notify_finalized.last() { - let header = self.header(&BlockId::Hash(*last))? - .expect( - "Header already known to exist in DB because it is \ - indicated in the tree route; qed" - ); + let header = self.header(&BlockId::Hash(*last))?.expect( + "Header already known to exist in DB because it is \ + indicated in the tree route; qed", + ); telemetry!( self.telemetry; @@ -1076,16 +1086,12 @@ impl Client where } for finalized_hash in notify_finalized { - let header = self.header(&BlockId::Hash(finalized_hash))? - .expect( - "Header already known to exist in DB because it is \ - indicated in the tree route; qed" - ); + let header = self.header(&BlockId::Hash(finalized_hash))?.expect( + "Header already known to exist in DB because it is \ + indicated in the tree route; qed", + ); - let notification = FinalityNotification { - header, - hash: finalized_hash, - }; + let notification = FinalityNotification { header, hash: finalized_hash }; sinks.retain(|sink| sink.unbounded_send(notification.clone()).is_ok()); } @@ -1106,22 +1112,19 @@ impl Client where // won't send any import notifications which could lead to a // temporary leak of closed/discarded notification sinks (e.g. // from consensus code). - self.import_notification_sinks - .lock() - .retain(|sink| !sink.is_closed()); + self.import_notification_sinks.lock().retain(|sink| !sink.is_closed()); - return Ok(()); - } + return Ok(()) + }, }; if let Some(storage_changes) = notify_import.storage_changes { // TODO [ToDr] How to handle re-orgs? Should we re-emit all storage changes? - self.storage_notifications.lock() - .trigger( - ¬ify_import.hash, - storage_changes.0.into_iter(), - storage_changes.1.into_iter().map(|(sk, v)| (sk, v.into_iter())), - ); + self.storage_notifications.lock().trigger( + ¬ify_import.hash, + storage_changes.0.into_iter(), + storage_changes.1.into_iter().map(|(sk, v)| (sk, v.into_iter())), + ); } let notification = BlockImportNotification:: { @@ -1132,7 +1135,8 @@ impl Client where tree_route: notify_import.tree_route.map(Arc::new), }; - self.import_notification_sinks.lock() + self.import_notification_sinks + .lock() .retain(|sink| sink.unbounded_send(notification.clone()).is_ok()); Ok(()) @@ -1179,7 +1183,7 @@ impl Client where // this can probably be implemented more efficiently if let BlockId::Hash(ref h) = id { if self.importing_block.read().as_ref().map_or(false, |importing| h == importing) { - return Ok(BlockStatus::Queued); + return Ok(BlockStatus::Queued) } } let hash_and_number = match id.clone() { @@ -1187,24 +1191,29 @@ impl Client where BlockId::Number(n) => self.backend.blockchain().hash(n)?.map(|hash| (hash, n)), }; match hash_and_number { - Some((hash, number)) => { + Some((hash, number)) => if self.backend.have_state_at(&hash, number) { Ok(BlockStatus::InChainWithState) } else { Ok(BlockStatus::InChainPruned) - } - } + }, None => Ok(BlockStatus::Unknown), } } /// Get block header by id. - pub fn header(&self, id: &BlockId) -> sp_blockchain::Result::Header>> { + pub fn header( + &self, + id: &BlockId, + ) -> sp_blockchain::Result::Header>> { self.backend.blockchain().header(*id) } /// Get block body by id. - pub fn body(&self, id: &BlockId) -> sp_blockchain::Result::Extrinsic>>> { + pub fn body( + &self, + id: &BlockId, + ) -> sp_blockchain::Result::Extrinsic>>> { self.backend.blockchain().body(*id) } @@ -1215,13 +1224,15 @@ impl Client where max_generation: NumberFor, ) -> sp_blockchain::Result> { let load_header = |id: Block::Hash| -> sp_blockchain::Result { - self.backend.blockchain().header(BlockId::Hash(id))? + self.backend + .blockchain() + .header(BlockId::Hash(id))? .ok_or_else(|| Error::UnknownBlock(format!("{:?}", id))) }; let genesis_hash = self.backend.blockchain().info().genesis_hash; if genesis_hash == target_hash { - return Ok(Vec::new()); + return Ok(Vec::new()) } let mut current_hash = target_hash; @@ -1237,7 +1248,7 @@ impl Client where current_hash = ancestor_hash; if genesis_hash == current_hash { - break; + break } current = ancestor; @@ -1250,21 +1261,20 @@ impl Client where } } -impl UsageProvider for Client where +impl UsageProvider for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, { /// Get usage info about current client. fn usage_info(&self) -> ClientInfo { - ClientInfo { - chain: self.chain_info(), - usage: self.backend.usage_info(), - } + ClientInfo { chain: self.chain_info(), usage: self.backend.usage_info() } } } -impl ProofProvider for Client where +impl ProofProvider for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -1272,29 +1282,26 @@ impl ProofProvider for Client where fn read_proof( &self, id: &BlockId, - keys: &mut dyn Iterator, + keys: &mut dyn Iterator, ) -> sp_blockchain::Result { - self.state_at(id) - .and_then(|state| prove_read(state, keys) - .map_err(Into::into)) + self.state_at(id).and_then(|state| prove_read(state, keys).map_err(Into::into)) } fn read_child_proof( &self, id: &BlockId, child_info: &ChildInfo, - keys: &mut dyn Iterator, + keys: &mut dyn Iterator, ) -> sp_blockchain::Result { self.state_at(id) - .and_then(|state| prove_child_read(state, child_info, keys) - .map_err(Into::into)) + .and_then(|state| prove_child_read(state, child_info, keys).map_err(Into::into)) } fn execution_proof( &self, id: &BlockId, method: &str, - call_data: &[u8] + call_data: &[u8], ) -> sp_blockchain::Result<(Vec, StorageProof)> { // Make sure we include the `:code` and `:heap_pages` in the execution proof to be // backwards compatible. @@ -1306,17 +1313,14 @@ impl ProofProvider for Client where )?; let state = self.state_at(id)?; - prove_execution( - state, - &self.executor, - method, - call_data, - ).map(|(r, p)| { - (r, StorageProof::merge(vec![p, code_proof])) - }) + prove_execution(state, &self.executor, method, call_data) + .map(|(r, p)| (r, StorageProof::merge(vec![p, code_proof]))) } - fn header_proof(&self, id: &BlockId) -> sp_blockchain::Result<(Block::Header, StorageProof)> { + fn header_proof( + &self, + id: &BlockId, + ) -> sp_blockchain::Result<(Block::Header, StorageProof)> { self.header_proof_with_cht_size(id, cht::size()) } @@ -1329,15 +1333,7 @@ impl ProofProvider for Client where storage_key: Option<&PrefixedStorageKey>, key: &StorageKey, ) -> sp_blockchain::Result> { - self.key_changes_proof_with_cht_size( - first, - last, - min, - max, - storage_key, - key, - cht::size(), - ) + self.key_changes_proof_with_cht_size(first, last, min, max, storage_key, key, cht::size()) } fn read_proof_collection( @@ -1348,11 +1344,11 @@ impl ProofProvider for Client where ) -> sp_blockchain::Result<(StorageProof, u32)> { let state = self.state_at(id)?; Ok(prove_range_read_with_size::<_, HashFor>( - state, - None, - None, - size_limit, - Some(start_key) + state, + None, + None, + size_limit, + Some(start_key), )?) } @@ -1376,14 +1372,13 @@ impl ProofProvider for Client where .unwrap_or_default(); let size = value.len() + next_key.len(); if total_size + size > size_limit && !entries.is_empty() { - break; + break } total_size += size; entries.push((next_key.clone(), value)); current_key = next_key; } Ok(entries) - } fn verify_range_proof( @@ -1393,25 +1388,24 @@ impl ProofProvider for Client where start_key: &[u8], ) -> sp_blockchain::Result<(Vec<(Vec, Vec)>, bool)> { Ok(read_range_proof_check::>( - root, - proof, - None, - None, - None, - Some(start_key), + root, + proof, + None, + None, + None, + Some(start_key), )?) } } - impl BlockBuilderProvider for Client - where - B: backend::Backend + Send + Sync + 'static, - E: CallExecutor + Send + Sync + 'static, - Block: BlockT, - Self: ChainHeaderBackend + ProvideRuntimeApi, - >::Api: ApiExt> - + BlockBuilderApi, +where + B: backend::Backend + Send + Sync + 'static, + E: CallExecutor + Send + Sync + 'static, + Block: BlockT, + Self: ChainHeaderBackend + ProvideRuntimeApi, + >::Api: + ApiExt> + BlockBuilderApi, { fn new_block_at>( &self, @@ -1425,7 +1419,7 @@ impl BlockBuilderProvider for Client BlockBuilderProvider for Client ExecutorProvider for Client where +impl ExecutorProvider for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -1461,19 +1456,26 @@ impl ExecutorProvider for Client where } } -impl StorageProvider for Client where +impl StorageProvider for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, { - fn storage_keys(&self, id: &BlockId, key_prefix: &StorageKey) -> sp_blockchain::Result> { + fn storage_keys( + &self, + id: &BlockId, + key_prefix: &StorageKey, + ) -> sp_blockchain::Result> { let keys = self.state_at(id)?.keys(&key_prefix.0).into_iter().map(StorageKey).collect(); Ok(keys) } - fn storage_pairs(&self, id: &BlockId, key_prefix: &StorageKey) - -> sp_blockchain::Result> - { + fn storage_pairs( + &self, + id: &BlockId, + key_prefix: &StorageKey, + ) -> sp_blockchain::Result> { let state = self.state_at(id)?; let keys = state .keys(&key_prefix.0) @@ -1490,13 +1492,10 @@ impl StorageProvider for Client wher &self, id: &BlockId, prefix: Option<&'a StorageKey>, - start_key: Option<&StorageKey> + start_key: Option<&StorageKey>, ) -> sp_blockchain::Result> { let state = self.state_at(id)?; - let start_key = start_key - .or(prefix) - .map(|key| key.0.clone()) - .unwrap_or_else(Vec::new); + let start_key = start_key.or(prefix).map(|key| key.0.clone()).unwrap_or_else(Vec::new); Ok(KeyIterator::new(state, prefix, start_key)) } @@ -1505,13 +1504,10 @@ impl StorageProvider for Client wher id: &BlockId, child_info: ChildInfo, prefix: Option<&'a StorageKey>, - start_key: Option<&StorageKey> + start_key: Option<&StorageKey>, ) -> sp_blockchain::Result> { let state = self.state_at(id)?; - let start_key = start_key - .or(prefix) - .map(|key| key.0.clone()) - .unwrap_or_else(Vec::new); + let start_key = start_key.or(prefix).map(|key| key.0.clone()).unwrap_or_else(Vec::new); Ok(KeyIterator::new_child(state, child_info, prefix, start_key)) } @@ -1520,30 +1516,32 @@ impl StorageProvider for Client wher id: &BlockId, key: &StorageKey, ) -> sp_blockchain::Result> { - Ok(self.state_at(id)? - .storage(&key.0).map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))? - .map(StorageData) - ) + Ok(self + .state_at(id)? + .storage(&key.0) + .map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))? + .map(StorageData)) } - fn storage_hash( &self, id: &BlockId, key: &StorageKey, ) -> sp_blockchain::Result> { - Ok(self.state_at(id)? - .storage_hash(&key.0).map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))? - ) + Ok(self + .state_at(id)? + .storage_hash(&key.0) + .map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))?) } fn child_storage_keys( &self, id: &BlockId, child_info: &ChildInfo, - key_prefix: &StorageKey + key_prefix: &StorageKey, ) -> sp_blockchain::Result> { - let keys = self.state_at(id)? + let keys = self + .state_at(id)? .child_keys(child_info, &key_prefix.0) .into_iter() .map(StorageKey) @@ -1555,9 +1553,10 @@ impl StorageProvider for Client wher &self, id: &BlockId, child_info: &ChildInfo, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result> { - Ok(self.state_at(id)? + Ok(self + .state_at(id)? .child_storage(child_info, &key.0) .map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))? .map(StorageData)) @@ -1567,12 +1566,12 @@ impl StorageProvider for Client wher &self, id: &BlockId, child_info: &ChildInfo, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result> { - Ok(self.state_at(id)? + Ok(self + .state_at(id)? .child_storage_hash(child_info, &key.0) - .map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))? - ) + .map_err(|e| sp_blockchain::Error::from_state(Box::new(e)))?) } fn max_key_changes_range( @@ -1583,7 +1582,9 @@ impl StorageProvider for Client wher let last_number = self.backend.blockchain().expect_block_number_from_id(&last)?; let last_hash = self.backend.blockchain().expect_block_hash_from_id(&last)?; if first > last_number { - return Err(sp_blockchain::Error::ChangesTrieAccessFailed("Invalid changes trie range".into())); + return Err(sp_blockchain::Error::ChangesTrieAccessFailed( + "Invalid changes trie range".into(), + )) } let (storage, configs) = match self.require_changes_trie(first, last_hash, false).ok() { @@ -1598,7 +1599,7 @@ impl StorageProvider for Client wher let first = std::cmp::max(first_available_changes_trie, oldest_unpruned); Ok(Some((first, last))) }, - None => Ok(None) + None => Ok(None), } } @@ -1607,7 +1608,7 @@ impl StorageProvider for Client wher first: NumberFor, last: BlockId, storage_key: Option<&PrefixedStorageKey>, - key: &StorageKey + key: &StorageKey, ) -> sp_blockchain::Result, u32)>> { let last_number = self.backend.blockchain().expect_block_number_from_id(&last)?; let last_hash = self.backend.blockchain().expect_block_hash_from_id(&last)?; @@ -1618,12 +1619,20 @@ impl StorageProvider for Client wher for (config_zero, config_end, config) in configs { let range_first = ::std::cmp::max(first, config_zero + One::one()); let range_anchor = match config_end { - Some((config_end_number, config_end_hash)) => if last_number > config_end_number { - ChangesTrieAnchorBlockId { hash: config_end_hash, number: config_end_number } - } else { - ChangesTrieAnchorBlockId { hash: convert_hash(&last_hash), number: last_number } - }, - None => ChangesTrieAnchorBlockId { hash: convert_hash(&last_hash), number: last_number }, + Some((config_end_number, config_end_hash)) => + if last_number > config_end_number { + ChangesTrieAnchorBlockId { + hash: config_end_hash, + number: config_end_number, + } + } else { + ChangesTrieAnchorBlockId { + hash: convert_hash(&last_hash), + number: last_number, + } + }, + None => + ChangesTrieAnchorBlockId { hash: convert_hash(&last_hash), number: last_number }, }; let config_range = ChangesTrieConfigurationRange { @@ -1638,9 +1647,10 @@ impl StorageProvider for Client wher &range_anchor, best_number, storage_key, - &key.0) - .and_then(|r| r.map(|r| r.map(|(block, tx)| (block, tx))).collect::>()) - .map_err(|err| sp_blockchain::Error::ChangesTrieAccessFailed(err))?; + &key.0, + ) + .and_then(|r| r.map(|r| r.map(|(block, tx)| (block, tx))).collect::>()) + .map_err(|err| sp_blockchain::Error::ChangesTrieAccessFailed(err))?; result.extend(result_range); } @@ -1648,14 +1658,18 @@ impl StorageProvider for Client wher } } -impl HeaderMetadata for Client where +impl HeaderMetadata for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, { type Error = sp_blockchain::Error; - fn header_metadata(&self, hash: Block::Hash) -> Result, Self::Error> { + fn header_metadata( + &self, + hash: Block::Hash, + ) -> Result, Self::Error> { self.backend.blockchain().header_metadata(hash) } @@ -1668,21 +1682,26 @@ impl HeaderMetadata for Client where } } -impl ProvideUncles for Client where +impl ProvideUncles for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, { - fn uncles(&self, target_hash: Block::Hash, max_generation: NumberFor) -> sp_blockchain::Result> { + fn uncles( + &self, + target_hash: Block::Hash, + max_generation: NumberFor, + ) -> sp_blockchain::Result> { Ok(Client::uncles(self, target_hash, max_generation)? .into_iter() .filter_map(|hash| Client::header(self, &BlockId::Hash(hash)).unwrap_or(None)) - .collect() - ) + .collect()) } } -impl ChainHeaderBackend for Client where +impl ChainHeaderBackend for Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, @@ -1700,7 +1719,10 @@ impl ChainHeaderBackend for Client wher self.backend.blockchain().status(id) } - fn number(&self, hash: Block::Hash) -> sp_blockchain::Result::Header as HeaderT>::Number>> { + fn number( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Header as HeaderT>::Number>> { self.backend.blockchain().number(hash) } @@ -1709,7 +1731,8 @@ impl ChainHeaderBackend for Client wher } } -impl sp_runtime::traits::BlockIdTo for Client where +impl sp_runtime::traits::BlockIdTo for Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, @@ -1721,12 +1744,16 @@ impl sp_runtime::traits::BlockIdTo for Client) -> sp_blockchain::Result>> { + fn to_number( + &self, + block_id: &BlockId, + ) -> sp_blockchain::Result>> { self.block_number_from_id(block_id) } } -impl ChainHeaderBackend for &Client where +impl ChainHeaderBackend for &Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, @@ -1744,7 +1771,10 @@ impl ChainHeaderBackend for &Client whe (**self).status(id) } - fn number(&self, hash: Block::Hash) -> sp_blockchain::Result::Header as HeaderT>::Number>> { + fn number( + &self, + hash: Block::Hash, + ) -> sp_blockchain::Result::Header as HeaderT>::Number>> { (**self).number(hash) } @@ -1753,7 +1783,8 @@ impl ChainHeaderBackend for &Client whe } } -impl ProvideCache for Client where +impl ProvideCache for Client +where B: backend::Backend, Block: BlockT, { @@ -1762,7 +1793,8 @@ impl ProvideCache for Client where } } -impl ProvideRuntimeApi for Client where +impl ProvideRuntimeApi for Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, @@ -1775,7 +1807,8 @@ impl ProvideRuntimeApi for Client where } } -impl CallApiAt for Client where +impl CallApiAt for Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, @@ -1792,28 +1825,25 @@ impl CallApiAt for Client where ) -> Result, sp_api::ApiError> { let at = params.at; - let (manager, extensions) = self.execution_extensions.manager_and_extensions( - at, - params.context, - ); - - self.executor.contextual_call:: _, _, _>( - at, - params.function, - ¶ms.arguments, - params.overlayed_changes, - Some(params.storage_transaction_cache), - manager, - params.native_call, - params.recorder, - Some(extensions), - ).map_err(Into::into) + let (manager, extensions) = + self.execution_extensions.manager_and_extensions(at, params.context); + + self.executor + .contextual_call:: _, _, _>( + at, + params.function, + ¶ms.arguments, + params.overlayed_changes, + Some(params.storage_transaction_cache), + manager, + params.native_call, + params.recorder, + Some(extensions), + ) + .map_err(Into::into) } - fn runtime_version_at( - &self, - at: &BlockId, - ) -> Result { + fn runtime_version_at(&self, at: &BlockId) -> Result { self.runtime_version_at(at).map_err(Into::into) } } @@ -1822,13 +1852,14 @@ impl CallApiAt for Client where /// objects. Otherwise, importing blocks directly into the client would be bypassing /// important verification work. #[async_trait::async_trait] -impl sp_consensus::BlockImport for &Client where +impl sp_consensus::BlockImport for &Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, Client: ProvideRuntimeApi, - as ProvideRuntimeApi>::Api: CoreApi + - ApiExt, + as ProvideRuntimeApi>::Api: + CoreApi + ApiExt, RA: Sync + Send, backend::TransactionFor: Send + 'static, { @@ -1852,17 +1883,19 @@ impl sp_consensus::BlockImport for &Client return Ok(res), - PrepareStorageChangesResult::Import(storage_changes) => storage_changes, - }; + let storage_changes = + match self.prepare_block_storage_changes(&mut import_block).map_err(|e| { + warn!("Block prepare storage changes error:\n{:?}", e); + ConsensusError::ClientImport(e.to_string()) + })? { + PrepareStorageChangesResult::Discard(res) => return Ok(res), + PrepareStorageChangesResult::Import(storage_changes) => storage_changes, + }; self.lock_import_and_run(|operation| { self.apply_block(operation, import_block, new_cache, storage_changes) - }).map_err(|e| { + }) + .map_err(|e| { warn!("Block import error:\n{:?}", e); ConsensusError::ClientImport(e.to_string()).into() }) @@ -1873,18 +1906,15 @@ impl sp_consensus::BlockImport for &Client, ) -> Result { - let BlockCheckParams { hash, number, parent_hash, allow_missing_state, import_existing } = block; + let BlockCheckParams { hash, number, parent_hash, allow_missing_state, import_existing } = + block; // Check the block against white and black lists if any are defined // (i.e. fork blocks and bad blocks respectively) match self.block_rules.lookup(number, &hash) { BlockLookupResult::KnownBad => { - trace!( - "Rejecting known bad block: #{} {:?}", - number, - hash, - ); - return Ok(ImportResult::KnownBad); + trace!("Rejecting known bad block: #{} {:?}", number, hash,); + return Ok(ImportResult::KnownBad) }, BlockLookupResult::Expected(expected_hash) => { trace!( @@ -1893,51 +1923,51 @@ impl sp_consensus::BlockImport for &Client {} + BlockLookupResult::NotSpecial => {}, } // Own status must be checked first. If the block and ancestry is pruned // this function must return `AlreadyInChain` rather than `MissingState` - match self.block_status(&BlockId::Hash(hash)) + match self + .block_status(&BlockId::Hash(hash)) .map_err(|e| ConsensusError::ClientImport(e.to_string()))? { - BlockStatus::InChainWithState | BlockStatus::Queued if !import_existing => { - return Ok(ImportResult::AlreadyInChain) - }, + BlockStatus::InChainWithState | BlockStatus::Queued if !import_existing => + return Ok(ImportResult::AlreadyInChain), BlockStatus::InChainWithState | BlockStatus::Queued => {}, - BlockStatus::InChainPruned if !import_existing => { - return Ok(ImportResult::AlreadyInChain) - }, + BlockStatus::InChainPruned if !import_existing => + return Ok(ImportResult::AlreadyInChain), BlockStatus::InChainPruned => {}, BlockStatus::Unknown => {}, BlockStatus::KnownBad => return Ok(ImportResult::KnownBad), } - match self.block_status(&BlockId::Hash(parent_hash)) + match self + .block_status(&BlockId::Hash(parent_hash)) .map_err(|e| ConsensusError::ClientImport(e.to_string()))? - { - BlockStatus::InChainWithState | BlockStatus::Queued => {}, - BlockStatus::Unknown => return Ok(ImportResult::UnknownParent), - BlockStatus::InChainPruned if allow_missing_state => {}, - BlockStatus::InChainPruned => return Ok(ImportResult::MissingState), - BlockStatus::KnownBad => return Ok(ImportResult::KnownBad), - } - + { + BlockStatus::InChainWithState | BlockStatus::Queued => {}, + BlockStatus::Unknown => return Ok(ImportResult::UnknownParent), + BlockStatus::InChainPruned if allow_missing_state => {}, + BlockStatus::InChainPruned => return Ok(ImportResult::MissingState), + BlockStatus::KnownBad => return Ok(ImportResult::KnownBad), + } Ok(ImportResult::imported(false)) } } #[async_trait::async_trait] -impl sp_consensus::BlockImport for Client where +impl sp_consensus::BlockImport for Client +where B: backend::Backend, E: CallExecutor + Send + Sync, Block: BlockT, Self: ProvideRuntimeApi, - >::Api: CoreApi + - ApiExt, + >::Api: + CoreApi + ApiExt, RA: Sync + Send, backend::TransactionFor: Send + 'static, { @@ -1960,7 +1990,8 @@ impl sp_consensus::BlockImport for Client Finalizer for Client where +impl Finalizer for Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -1995,8 +2026,8 @@ impl Finalizer for Client where } } - -impl Finalizer for &Client where +impl Finalizer for &Client +where B: backend::Backend, E: CallExecutor, Block: BlockT, @@ -2050,10 +2081,10 @@ where } impl BlockBackend for Client - where - B: backend::Backend, - E: CallExecutor, - Block: BlockT, +where + B: backend::Backend, + E: CallExecutor, + Block: BlockT, { fn block_body( &self, @@ -2092,35 +2123,37 @@ impl BlockBackend for Client fn block_indexed_body( &self, - id: &BlockId + id: &BlockId, ) -> sp_blockchain::Result>>> { self.backend.blockchain().block_indexed_body(*id) } } impl backend::AuxStore for Client - where - B: backend::Backend, - E: CallExecutor, - Block: BlockT, - Self: ProvideRuntimeApi, - >::Api: CoreApi, +where + B: backend::Backend, + E: CallExecutor, + Block: BlockT, + Self: ProvideRuntimeApi, + >::Api: CoreApi, { /// Insert auxiliary data into key-value store. fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { // Import is locked here because we may have other block import // operations that tries to set aux data. Note that for consensus // layer, one can always use atomic operations to make sure // import is only locked once. - self.lock_import_and_run(|operation| { - apply_aux(operation, insert, delete) - }) + self.lock_import_and_run(|operation| apply_aux(operation, insert, delete)) } /// Query auxiliary data from key-value store. fn get_aux(&self, key: &[u8]) -> sp_blockchain::Result>> { @@ -2129,20 +2162,24 @@ impl backend::AuxStore for Client } impl backend::AuxStore for &Client - where - B: backend::Backend, - E: CallExecutor, - Block: BlockT, - Client: ProvideRuntimeApi, - as ProvideRuntimeApi>::Api: CoreApi, +where + B: backend::Backend, + E: CallExecutor, + Block: BlockT, + Client: ProvideRuntimeApi, + as ProvideRuntimeApi>::Api: CoreApi, { fn insert_aux< 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, delete: D) -> sp_blockchain::Result<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + delete: D, + ) -> sp_blockchain::Result<()> { (**self).insert_aux(insert, delete) } @@ -2152,10 +2189,10 @@ impl backend::AuxStore for &Client } impl sp_consensus::block_validation::Chain for Client - where - BE: backend::Backend, - E: CallExecutor, - B: BlockT, +where + BE: backend::Backend, + E: CallExecutor, + B: BlockT, { fn block_status( &self, @@ -2174,8 +2211,10 @@ where fn block_indexed_body( &self, number: NumberFor, - ) ->Result>>, sp_transaction_storage_proof::Error> { - self.backend.blockchain().block_indexed_body(BlockId::number(number)) + ) -> Result>>, sp_transaction_storage_proof::Error> { + self.backend + .blockchain() + .block_indexed_body(BlockId::number(number)) .map_err(|e| sp_transaction_storage_proof::Error::Application(Box::new(e))) } @@ -2183,7 +2222,9 @@ where &self, hash: B::Hash, ) -> Result>, sp_transaction_storage_proof::Error> { - self.backend.blockchain().number(hash) + self.backend + .blockchain() + .number(hash) .map_err(|e| sp_transaction_storage_proof::Error::Application(Box::new(e))) } } diff --git a/client/service/src/client/genesis.rs b/client/service/src/client/genesis.rs index 08235f7efb6e3..e764e8e24f105 100644 --- a/client/service/src/client/genesis.rs +++ b/client/service/src/client/genesis.rs @@ -18,17 +18,12 @@ //! Tool for creating the genesis block. -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Hash as HashT, Zero}; +use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT, Zero}; /// Create a genesis block, given the initial storage. -pub fn construct_genesis_block< - Block: BlockT -> ( - state_root: Block::Hash -) -> Block { - let extrinsics_root = <<::Header as HeaderT>::Hashing as HashT>::trie_root( - Vec::new(), - ); +pub fn construct_genesis_block(state_root: Block::Hash) -> Block { + let extrinsics_root = + <<::Header as HeaderT>::Hashing as HashT>::trie_root(Vec::new()); Block::new( <::Header as HeaderT>::new( @@ -36,8 +31,8 @@ pub fn construct_genesis_block< extrinsics_root, state_root, Default::default(), - Default::default() + Default::default(), ), - Default::default() + Default::default(), ) } diff --git a/client/service/src/client/light.rs b/client/service/src/client/light.rs index 3a09bcbd78de5..82fe17e6855e9 100644 --- a/client/service/src/client/light.rs +++ b/client/service/src/client/light.rs @@ -20,15 +20,20 @@ use std::sync::Arc; +use prometheus_endpoint::Registry; use sc_executor::RuntimeInfo; -use sp_core::traits::{CodeExecutor, SpawnNamed}; use sc_telemetry::TelemetryHandle; -use sp_runtime::BuildStorage; -use sp_runtime::traits::{Block as BlockT, HashFor}; use sp_blockchain::Result as ClientResult; -use prometheus_endpoint::Registry; +use sp_core::traits::{CodeExecutor, SpawnNamed}; +use sp_runtime::{ + traits::{Block as BlockT, HashFor}, + BuildStorage, +}; -use super::{call_executor::LocalCallExecutor, client::{Client, ClientConfig}}; +use super::{ + call_executor::LocalCallExecutor, + client::{Client, ClientConfig}, +}; use sc_client_api::light::Storage as BlockchainStorage; use sc_light::{Backend, GenesisCallExecutor}; @@ -41,26 +46,26 @@ pub fn new_light( prometheus_registry: Option, telemetry: Option, ) -> ClientResult< - Client< + Client< + Backend>, + GenesisCallExecutor< Backend>, - GenesisCallExecutor< - Backend>, - LocalCallExecutor>, E> - >, - B, - RA - > - > - where - B: BlockT, - S: BlockchainStorage + 'static, - E: CodeExecutor + RuntimeInfo + Clone + 'static, + LocalCallExecutor>, E>, + >, + B, + RA, + >, +> +where + B: BlockT, + S: BlockchainStorage + 'static, + E: CodeExecutor + RuntimeInfo + Clone + 'static, { let local_executor = LocalCallExecutor::new( backend.clone(), code_executor, spawn_handle.clone(), - ClientConfig::default() + ClientConfig::default(), )?; let executor = GenesisCallExecutor::new(backend.clone(), local_executor); Client::new( diff --git a/client/service/src/client/mod.rs b/client/service/src/client/mod.rs index dd0b70b551bf4..754309e864ebd 100644 --- a/client/service/src/client/mod.rs +++ b/client/service/src/client/mod.rs @@ -45,11 +45,11 @@ //! the ways in which the runtime can interface with the outside. Any code that builds a `Client` //! is responsible for putting the right marker. -pub mod genesis; -pub mod light; +mod block_rules; mod call_executor; mod client; -mod block_rules; +pub mod genesis; +pub mod light; mod wasm_override; mod wasm_substitutes; @@ -58,5 +58,5 @@ pub use self::{ client::{Client, ClientConfig}, }; -#[cfg(feature="test-helpers")] -pub use self::client::{new_with_backend, new_in_mem}; +#[cfg(feature = "test-helpers")] +pub use self::client::{new_in_mem, new_with_backend}; diff --git a/client/service/src/client/wasm_override.rs b/client/service/src/client/wasm_override.rs index 06a719c346ca6..83c0fff3b5894 100644 --- a/client/service/src/client/wasm_override.rs +++ b/client/service/src/client/wasm_override.rs @@ -36,17 +36,17 @@ //! required to overrides multiple runtimes, multiple WASM blobs matching each of the spec versions //! needed must be provided in the given directory. //! +use sc_executor::RuntimeInfo; +use sp_blockchain::Result; +use sp_core::traits::{FetchRuntimeCode, RuntimeCode}; +use sp_state_machine::BasicExternalities; +use sp_version::RuntimeVersion; use std::{ - fs, collections::{HashMap, hash_map::DefaultHasher}, - path::{Path, PathBuf}, + collections::{hash_map::DefaultHasher, HashMap}, + fs, hash::Hasher as _, + path::{Path, PathBuf}, }; -use sp_core::traits::FetchRuntimeCode; -use sp_state_machine::BasicExternalities; -use sp_blockchain::Result; -use sc_executor::RuntimeInfo; -use sp_version::RuntimeVersion; -use sp_core::traits::RuntimeCode; #[derive(Clone, Debug, PartialEq)] /// Auxiliary structure that holds a wasm blob and its hash. @@ -62,11 +62,7 @@ impl WasmBlob { } fn runtime_code(&self, heap_pages: Option) -> RuntimeCode { - RuntimeCode { - code_fetcher: self, - hash: self.hash.clone(), - heap_pages, - } + RuntimeCode { code_fetcher: self, hash: self.hash.clone(), heap_pages } } } @@ -117,7 +113,7 @@ pub struct WasmOverride { impl WasmOverride where - E: RuntimeInfo + Clone + 'static + E: RuntimeInfo + Clone + 'static, { pub fn new

(path: P, executor: E) -> Result where @@ -130,26 +126,19 @@ where /// Gets an override by it's runtime spec version. /// /// Returns `None` if an override for a spec version does not exist. - pub fn get<'a, 'b: 'a>( - &'b self, - spec: &u32, - pages: Option, - ) -> Option> { - self.overrides - .get(spec) - .map(|w| w.runtime_code(pages)) + pub fn get<'a, 'b: 'a>(&'b self, spec: &u32, pages: Option) -> Option> { + self.overrides.get(spec).map(|w| w.runtime_code(pages)) } /// Scrapes a folder for WASM runtimes. /// Returns a hashmap of the runtime version and wasm runtime code. fn scrape_overrides(dir: &Path, executor: &E) -> Result> { - - let handle_err = |e: std::io::Error | -> sp_blockchain::Error { + let handle_err = |e: std::io::Error| -> sp_blockchain::Error { WasmOverrideError::Io(dir.to_owned(), e).into() }; if !dir.is_dir() { - return Err(WasmOverrideError::NotADirectory(dir.to_owned()).into()); + return Err(WasmOverrideError::NotADirectory(dir.to_owned()).into()) } let mut overrides = HashMap::new(); @@ -176,13 +165,13 @@ where ); duplicates.push(format!("{}", path.display())); } - } - _ => () + }, + _ => (), } } if !duplicates.is_empty() { - return Err(WasmOverrideError::DuplicateRuntime(duplicates).into()); + return Err(WasmOverrideError::DuplicateRuntime(duplicates).into()) } Ok(overrides) @@ -194,7 +183,8 @@ where heap_pages: Option, ) -> Result { let mut ext = BasicExternalities::default(); - executor.runtime_version(&mut ext, &code.runtime_code(heap_pages)) + executor + .runtime_version(&mut ext, &code.runtime_code(heap_pages)) .map_err(|e| WasmOverrideError::VersionInvalid(format!("{:?}", e)).into()) } } @@ -203,28 +193,25 @@ where #[cfg(test)] pub fn dummy_overrides(executor: &E) -> WasmOverride where - E: RuntimeInfo + Clone + 'static + E: RuntimeInfo + Clone + 'static, { let mut overrides = HashMap::new(); overrides.insert(0, WasmBlob::new(vec![0, 0, 0, 0, 0, 0, 0, 0])); overrides.insert(1, WasmBlob::new(vec![1, 1, 1, 1, 1, 1, 1, 1])); overrides.insert(2, WasmBlob::new(vec![2, 2, 2, 2, 2, 2, 2, 2])); - WasmOverride { - overrides, - executor: executor.clone() - } + WasmOverride { overrides, executor: executor.clone() } } #[cfg(test)] mod tests { use super::*; use sc_executor::{NativeExecutor, WasmExecutionMethod}; - use substrate_test_runtime_client::LocalExecutor; use std::fs::{self, File}; + use substrate_test_runtime_client::LocalExecutor; fn wasm_test(fun: F) where - F: Fn(&Path, &[u8], &NativeExecutor::) + F: Fn(&Path, &[u8], &NativeExecutor), { let exec = NativeExecutor::::new( WasmExecutionMethod::Interpreted, @@ -252,8 +239,8 @@ mod tests { fn should_scrape_wasm() { wasm_test(|dir, wasm_bytes, exec| { fs::write(dir.join("test.wasm"), wasm_bytes).expect("Create test file"); - let overrides = WasmOverride::scrape_overrides(dir, exec) - .expect("HashMap of u32 and WasmBlob"); + let overrides = + WasmOverride::scrape_overrides(dir, exec).expect("HashMap of u32 and WasmBlob"); let wasm = overrides.get(&2).expect("WASM binary"); assert_eq!(wasm.code, substrate_test_runtime::wasm_binary_unwrap().to_vec()) }); @@ -272,10 +259,10 @@ mod tests { Some(WasmOverrideError::DuplicateRuntime(duplicates)) => { assert_eq!(duplicates.len(), 1); }, - _ => panic!("Test should end with Msg Error Variant") + _ => panic!("Test should end with Msg Error Variant"), } }, - _ => panic!("Test should end in error") + _ => panic!("Test should end in error"), } }); } @@ -286,8 +273,8 @@ mod tests { File::create(dir.join("README.md")).expect("Create test file"); File::create(dir.join("LICENSE")).expect("Create a test file"); fs::write(dir.join("test0.wasm"), wasm_bytes).expect("Create test file"); - let scraped = WasmOverride::scrape_overrides(dir, exec) - .expect("HashMap of u32 and WasmBlob"); + let scraped = + WasmOverride::scrape_overrides(dir, exec).expect("HashMap of u32 and WasmBlob"); assert_eq!(scraped.len(), 1); }); } diff --git a/client/service/src/client/wasm_substitutes.rs b/client/service/src/client/wasm_substitutes.rs index e947e4566f332..ac48059fc2f37 100644 --- a/client/service/src/client/wasm_substitutes.rs +++ b/client/service/src/client/wasm_substitutes.rs @@ -18,15 +18,22 @@ //! # WASM substitutes -use std::{collections::{HashMap, hash_map::DefaultHasher}, hash::Hasher as _, sync::Arc}; +use parking_lot::RwLock; +use sc_client_api::backend; +use sc_executor::RuntimeInfo; +use sp_blockchain::{HeaderBackend, Result}; use sp_core::traits::{FetchRuntimeCode, RuntimeCode}; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, NumberFor}, +}; use sp_state_machine::BasicExternalities; -use sp_blockchain::{Result, HeaderBackend}; -use sc_executor::RuntimeInfo; use sp_version::RuntimeVersion; -use sc_client_api::backend; -use sp_runtime::{traits::{NumberFor, Block as BlockT}, generic::BlockId}; -use parking_lot::RwLock; +use std::{ + collections::{hash_map::DefaultHasher, HashMap}, + hash::Hasher as _, + sync::Arc, +}; /// A wasm substitute for the on chain wasm. #[derive(Debug)] @@ -51,11 +58,7 @@ impl WasmSubstitute { } fn runtime_code(&self, heap_pages: Option) -> RuntimeCode { - RuntimeCode { - code_fetcher: self, - hash: self.hash.clone(), - heap_pages, - } + RuntimeCode { code_fetcher: self, hash: self.hash.clone(), heap_pages } } /// Returns `true` when the substitute matches for the given `block_id`. @@ -82,7 +85,8 @@ impl WasmSubstitute { block_number }; - let requested_block_number = backend.blockchain().block_number_from_id(&block_id).ok().flatten(); + let requested_block_number = + backend.blockchain().block_number_from_id(&block_id).ok().flatten(); Some(block_number) <= requested_block_number } @@ -145,11 +149,14 @@ where executor: Executor, backend: Arc, ) -> Result { - let substitutes = substitutes.into_iter().map(|(parent_block_hash, code)| { - let substitute = WasmSubstitute::new(code, parent_block_hash, &*backend)?; - let version = Self::runtime_version(&executor, &substitute)?; - Ok((version.spec_version, substitute)) - }).collect::>>()?; + let substitutes = substitutes + .into_iter() + .map(|(parent_block_hash, code)| { + let substitute = WasmSubstitute::new(code, parent_block_hash, &*backend)?; + let version = Self::runtime_version(&executor, &substitute)?; + Ok((version.spec_version, substitute)) + }) + .collect::>>()?; Ok(Self { executor, substitutes: Arc::new(substitutes), backend }) } @@ -172,8 +179,8 @@ where code: &WasmSubstitute, ) -> Result { let mut ext = BasicExternalities::default(); - executor.runtime_version(&mut ext, &code.runtime_code(None)) + executor + .runtime_version(&mut ext, &code.runtime_code(None)) .map_err(|e| WasmSubstituteError::VersionInvalid(format!("{:?}", e)).into()) } } - diff --git a/client/service/src/config.rs b/client/service/src/config.rs index be14b4e322e76..c915978f5384e 100644 --- a/client/service/src/config.rs +++ b/client/service/src/config.rs @@ -18,25 +18,34 @@ //! Service configuration. +pub use sc_client_api::execution_extensions::{ExecutionStrategies, ExecutionStrategy}; pub use sc_client_db::{ - Database, PruningMode, DatabaseSettingsSrc as DatabaseConfig, - KeepBlocks, TransactionStorageMode -}; -pub use sc_network::Multiaddr; -pub use sc_network::config::{ - ExtTransport, MultiaddrWithPeerId, NetworkConfiguration, Role, NodeKeyConfig, - SetConfig, NonDefaultSetConfig, TransportConfig, - RequestResponseConfig, IncomingRequest, OutgoingResponse, + Database, DatabaseSettingsSrc as DatabaseConfig, KeepBlocks, PruningMode, + TransactionStorageMode, }; pub use sc_executor::WasmExecutionMethod; -pub use sc_client_api::execution_extensions::{ExecutionStrategies, ExecutionStrategy}; +pub use sc_network::{ + config::{ + ExtTransport, IncomingRequest, MultiaddrWithPeerId, NetworkConfiguration, NodeKeyConfig, + NonDefaultSetConfig, OutgoingResponse, RequestResponseConfig, Role, SetConfig, + TransportConfig, + }, + Multiaddr, +}; -use std::{io, future::Future, path::{PathBuf, Path}, pin::Pin, net::SocketAddr, sync::Arc}; -pub use sc_transaction_pool::Options as TransactionPoolOptions; +use prometheus_endpoint::Registry; use sc_chain_spec::ChainSpec; -use sp_core::crypto::SecretString; pub use sc_telemetry::TelemetryEndpoints; -use prometheus_endpoint::Registry; +pub use sc_transaction_pool::Options as TransactionPoolOptions; +use sp_core::crypto::SecretString; +use std::{ + future::Future, + io, + net::SocketAddr, + path::{Path, PathBuf}, + pin::Pin, + sync::Arc, +}; #[cfg(not(target_os = "unknown"))] use tempfile::TempDir; @@ -153,7 +162,7 @@ pub enum KeystoreConfig { /// The path of the keystore. path: PathBuf, /// Node keystore's password. - password: Option + password: Option, }, /// In-memory keystore. Recommended for in-browser nodes. InMemory, @@ -194,7 +203,7 @@ impl PrometheusConfig { Self { port, registry: Registry::new_custom(Some("substrate".into()), None) - .expect("this can only fail if the prefix is empty") + .expect("this can only fail if the prefix is empty"), } } } @@ -215,11 +224,13 @@ impl Configuration { let protocol_id_full = match self.chain_spec.protocol_id() { Some(pid) => pid, None => { - log::warn!("Using default protocol ID {:?} because none is configured in the \ - chain specs", crate::DEFAULT_PROTOCOL_ID + log::warn!( + "Using default protocol ID {:?} because none is configured in the \ + chain specs", + crate::DEFAULT_PROTOCOL_ID ); crate::DEFAULT_PROTOCOL_ID - } + }, }; sc_network::config::ProtocolId::from(protocol_id_full) } @@ -261,9 +272,7 @@ impl BasePath { /// instance is dropped. #[cfg(not(target_os = "unknown"))] pub fn new_temp_dir() -> io::Result { - Ok(BasePath::Temporary( - tempfile::Builder::new().prefix("substrate").tempdir()?, - )) + Ok(BasePath::Temporary(tempfile::Builder::new().prefix("substrate").tempdir()?)) } /// Create a `BasePath` instance based on an existing path on disk. diff --git a/client/service/src/error.rs b/client/service/src/error.rs index 9c653219ca130..1acd33ead6777 100644 --- a/client/service/src/error.rs +++ b/client/service/src/error.rs @@ -18,10 +18,10 @@ //! Errors that can occur during the service operation. -use sc_network; use sc_keystore; -use sp_consensus; +use sc_network; use sp_blockchain; +use sp_consensus; /// Service Result typedef. pub type Result = std::result::Result; diff --git a/client/service/src/lib.rs b/client/service/src/lib.rs index 40cb1aeea6a9e..5d7c490db6abf 100644 --- a/client/service/src/lib.rs +++ b/client/service/src/lib.rs @@ -22,65 +22,62 @@ #![warn(missing_docs)] #![recursion_limit = "1024"] -pub mod config; pub mod chain_ops; +pub mod config; pub mod error; -mod metrics; mod builder; #[cfg(feature = "test-helpers")] pub mod client; #[cfg(not(feature = "test-helpers"))] mod client; +mod metrics; mod task_manager; -use std::{io, pin::Pin}; -use std::net::SocketAddr; -use std::collections::HashMap; -use std::task::Poll; +use std::{collections::HashMap, io, net::SocketAddr, pin::Pin, task::Poll}; -use futures::{Future, FutureExt, Stream, StreamExt, stream, compat::*}; -use sc_network::PeerId; -use log::{warn, debug, error}; -use codec::{Encode, Decode}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT}; +use codec::{Decode, Encode}; +use futures::{compat::*, stream, Future, FutureExt, Stream, StreamExt}; +use log::{debug, error, warn}; use parity_util_mem::MallocSizeOf; +use sc_network::PeerId; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT}, +}; use sp_utils::mpsc::TracingUnboundedReceiver; -pub use self::error::Error; -pub use self::builder::{ - new_full_client, new_db_backend, new_client, new_full_parts, new_light_parts, - spawn_tasks, build_network, build_offchain_workers, - BuildNetworkParams, KeystoreContainer, NetworkStarter, SpawnTasksParams, TFullClient, TLightClient, - TFullBackend, TLightBackend, TLightBackendWithHash, TLightClientWithBackend, - TFullCallExecutor, TLightCallExecutor, RpcExtensionBuilder, NoopRpcExtensionBuilder, +pub use self::{ + builder::{ + build_network, build_offchain_workers, new_client, new_db_backend, new_full_client, + new_full_parts, new_light_parts, spawn_tasks, BuildNetworkParams, KeystoreContainer, + NetworkStarter, NoopRpcExtensionBuilder, RpcExtensionBuilder, SpawnTasksParams, + TFullBackend, TFullCallExecutor, TFullClient, TLightBackend, TLightBackendWithHash, + TLightCallExecutor, TLightClient, TLightClientWithBackend, + }, + client::{ClientConfig, LocalCallExecutor}, + error::Error, }; pub use config::{ - BasePath, Configuration, DatabaseConfig, PruningMode, Role, RpcMethods, TaskExecutor, TaskType, - KeepBlocks, TransactionStorageMode, + BasePath, Configuration, DatabaseConfig, KeepBlocks, PruningMode, Role, RpcMethods, + TaskExecutor, TaskType, TransactionStorageMode, }; pub use sc_chain_spec::{ - ChainSpec, GenericChainSpec, Properties, RuntimeGenesis, Extension as ChainSpecExtension, - NoExtension, ChainType, + ChainSpec, ChainType, Extension as ChainSpecExtension, GenericChainSpec, NoExtension, + Properties, RuntimeGenesis, }; -pub use sc_transaction_pool_api::{TransactionPool, InPoolTransaction, error::IntoPoolError}; -pub use sc_transaction_pool::Options as TransactionPoolOptions; -pub use sc_rpc::Metadata as RpcMetadata; +use sc_client_api::{blockchain::HeaderBackend, BlockchainEvents}; pub use sc_executor::NativeExecutionDispatch; #[doc(hidden)] -pub use std::{ops::Deref, result::Result, sync::Arc}; -#[doc(hidden)] -pub use sc_network::config::{ - OnDemand, TransactionImport, - TransactionImportFuture, -}; +pub use sc_network::config::{OnDemand, TransactionImport, TransactionImportFuture}; +pub use sc_rpc::Metadata as RpcMetadata; pub use sc_tracing::TracingReceiver; -pub use task_manager::SpawnTaskHandle; -pub use task_manager::TaskManager; +pub use sc_transaction_pool::Options as TransactionPoolOptions; +pub use sc_transaction_pool_api::{error::IntoPoolError, InPoolTransaction, TransactionPool}; pub use sp_consensus::import_queue::ImportQueue; -pub use self::client::{LocalCallExecutor, ClientConfig}; -use sc_client_api::{blockchain::HeaderBackend, BlockchainEvents}; +#[doc(hidden)] +pub use std::{ops::Deref, result::Result, sync::Arc}; +pub use task_manager::{SpawnTaskHandle, TaskManager}; const DEFAULT_PROTOCOL_ID: &str = "sup"; @@ -96,7 +93,9 @@ impl MallocSizeOfWasm for T {} /// RPC handlers that can perform RPC queries. #[derive(Clone)] -pub struct RpcHandlers(Arc>); +pub struct RpcHandlers( + Arc>, +); impl RpcHandlers { /// Starts an RPC query. @@ -108,17 +107,22 @@ impl RpcHandlers { /// /// If the request subscribes you to events, the `Sender` in the `RpcSession` object is used to /// send back spontaneous events. - pub fn rpc_query(&self, mem: &RpcSession, request: &str) - -> Pin> + Send>> { - self.0.handle_request(request, mem.metadata.clone()) + pub fn rpc_query( + &self, + mem: &RpcSession, + request: &str, + ) -> Pin> + Send>> { + self.0 + .handle_request(request, mem.metadata.clone()) .compat() .map(|res| res.expect("this should never fail")) .boxed() } /// Provides access to the underlying `MetaIoHandler` - pub fn io_handler(&self) - -> Arc> { + pub fn io_handler( + &self, + ) -> Arc> { self.0.clone() } } @@ -149,8 +153,8 @@ pub struct PartialComponents + HeaderBackend, - H: sc_network::ExHashT -> ( + H: sc_network::ExHashT, +>( role: Role, mut network: sc_network::NetworkWorker, client: Arc, @@ -171,7 +175,9 @@ async fn build_network_future< // ready. This way, we only get the latest finalized block. stream::poll_fn(move |cx| { let mut last = None; - while let Poll::Ready(Some(item)) = Pin::new(&mut finality_notification_stream).poll_next(cx) { + while let Poll::Ready(Some(item)) = + Pin::new(&mut finality_notification_stream).poll_next(cx) + { last = Some(item); } if let Some(last) = last { @@ -179,11 +185,12 @@ async fn build_network_future< } else { Poll::Pending } - }).fuse() + }) + .fuse() }; loop { - futures::select!{ + futures::select! { // List of blocks that the client has imported. notification = imported_blocks_stream.next() => { let notification = match notification { @@ -338,79 +345,90 @@ mod waiting { /// Starts RPC servers that run in their own thread, and returns an opaque object that keeps them alive. #[cfg(not(target_os = "unknown"))] fn start_rpc_servers< - H: FnMut(sc_rpc::DenyUnsafe, sc_rpc_server::RpcMiddleware) - -> sc_rpc_server::RpcHandler + H: FnMut( + sc_rpc::DenyUnsafe, + sc_rpc_server::RpcMiddleware, + ) -> sc_rpc_server::RpcHandler, >( config: &Configuration, mut gen_handler: H, rpc_metrics: sc_rpc_server::RpcMetrics, ) -> Result, error::Error> { - fn maybe_start_server(address: Option, mut start: F) -> Result, io::Error> - where F: FnMut(&SocketAddr) -> Result, - { - address.map(|mut address| start(&address) - .or_else(|e| match e.kind() { - io::ErrorKind::AddrInUse | - io::ErrorKind::PermissionDenied => { + fn maybe_start_server( + address: Option, + mut start: F, + ) -> Result, io::Error> + where + F: FnMut(&SocketAddr) -> Result, + { + address + .map(|mut address| { + start(&address).or_else(|e| match e.kind() { + io::ErrorKind::AddrInUse | io::ErrorKind::PermissionDenied => { warn!("Unable to bind RPC server to {}. Trying random port.", address); address.set_port(0); start(&address) }, _ => Err(e), - } - ) ).transpose() - } + }) + }) + .transpose() + } fn deny_unsafe(addr: &SocketAddr, methods: &RpcMethods) -> sc_rpc::DenyUnsafe { let is_exposed_addr = !addr.ip().is_loopback(); match (is_exposed_addr, methods) { - | (_, RpcMethods::Unsafe) - | (false, RpcMethods::Auto) => sc_rpc::DenyUnsafe::No, - _ => sc_rpc::DenyUnsafe::Yes + | (_, RpcMethods::Unsafe) | (false, RpcMethods::Auto) => sc_rpc::DenyUnsafe::No, + _ => sc_rpc::DenyUnsafe::Yes, } } Ok(Box::new(( - config.rpc_ipc.as_ref().map(|path| sc_rpc_server::start_ipc( - &*path, gen_handler( - sc_rpc::DenyUnsafe::No, - sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "ipc") + config.rpc_ipc.as_ref().map(|path| { + sc_rpc_server::start_ipc( + &*path, + gen_handler( + sc_rpc::DenyUnsafe::No, + sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "ipc"), + ), ) - )), - maybe_start_server( - config.rpc_http, - |address| sc_rpc_server::start_http( + }), + maybe_start_server(config.rpc_http, |address| { + sc_rpc_server::start_http( address, config.rpc_http_threads, config.rpc_cors.as_ref(), gen_handler( deny_unsafe(&address, &config.rpc_methods), - sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "http") + sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "http"), ), - config.rpc_max_payload - ), - )?.map(|s| waiting::HttpServer(Some(s))), - maybe_start_server( - config.rpc_ws, - |address| sc_rpc_server::start_ws( + config.rpc_max_payload, + ) + })? + .map(|s| waiting::HttpServer(Some(s))), + maybe_start_server(config.rpc_ws, |address| { + sc_rpc_server::start_ws( address, config.rpc_ws_max_connections, config.rpc_cors.as_ref(), gen_handler( deny_unsafe(&address, &config.rpc_methods), - sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "ws") + sc_rpc_server::RpcMiddleware::new(rpc_metrics.clone(), "ws"), ), - config.rpc_max_payload - ), - )?.map(|s| waiting::WsServer(Some(s))), + config.rpc_max_payload, + ) + })? + .map(|s| waiting::WsServer(Some(s))), ))) } /// Starts RPC servers that run in their own thread, and returns an opaque object that keeps them alive. #[cfg(target_os = "unknown")] fn start_rpc_servers< - H: FnMut(sc_rpc::DenyUnsafe, sc_rpc_server::RpcMiddleware) - -> sc_rpc_server::RpcHandler + H: FnMut( + sc_rpc::DenyUnsafe, + sc_rpc_server::RpcMiddleware, + ) -> sc_rpc_server::RpcHandler, >( _: &Configuration, _: H, @@ -434,9 +452,7 @@ impl RpcSession { /// /// The `RpcSession` must be kept alive in order to receive messages on the sender. pub fn new(sender: futures01::sync::mpsc::Sender) -> RpcSession { - RpcSession { - metadata: sender.into(), - } + RpcSession { metadata: sender.into() } } } @@ -450,10 +466,9 @@ pub struct TransactionPoolAdapter { /// Get transactions for propagation. /// /// Function extracted to simplify the test and prevent creating `ServiceFactory`. -fn transactions_to_propagate(pool: &Pool) - -> Vec<(H, B::Extrinsic)> +fn transactions_to_propagate(pool: &Pool) -> Vec<(H, B::Extrinsic)> where - Pool: TransactionPool, + Pool: TransactionPool, B: BlockT, H: std::hash::Hash + Eq + sp_runtime::traits::Member + sp_runtime::traits::MaybeSerialize, E: IntoPoolError + From, @@ -468,11 +483,10 @@ where .collect() } -impl sc_network::config::TransactionPool for - TransactionPoolAdapter +impl sc_network::config::TransactionPool for TransactionPoolAdapter where C: sc_network::config::Client + Send + Sync, - Pool: 'static + TransactionPool, + Pool: 'static + TransactionPool, B: BlockT, H: std::hash::Hash + Eq + sp_runtime::traits::Member + sp_runtime::traits::MaybeSerialize, E: 'static + IntoPoolError + From, @@ -485,10 +499,7 @@ where self.pool.hash_of(transaction) } - fn import( - &self, - transaction: B::Extrinsic, - ) -> TransactionImportFuture { + fn import(&self, transaction: B::Extrinsic) -> TransactionImportFuture { if !self.imports_external_transactions { debug!("Transaction rejected"); Box::pin(futures::future::ready(TransactionImport::None)); @@ -499,28 +510,33 @@ where Ok(uxt) => uxt, Err(e) => { debug!("Transaction invalid: {:?}", e); - return Box::pin(futures::future::ready(TransactionImport::Bad)); - } + return Box::pin(futures::future::ready(TransactionImport::Bad)) + }, }; let best_block_id = BlockId::hash(self.client.info().best_hash); - let import_future = self.pool.submit_one(&best_block_id, sc_transaction_pool_api::TransactionSource::External, uxt); + let import_future = self.pool.submit_one( + &best_block_id, + sc_transaction_pool_api::TransactionSource::External, + uxt, + ); Box::pin(async move { match import_future.await { Ok(_) => TransactionImport::NewGood, Err(e) => match e.into_pool_error() { - Ok(sc_transaction_pool_api::error::Error::AlreadyImported(_)) => TransactionImport::KnownGood, + Ok(sc_transaction_pool_api::error::Error::AlreadyImported(_)) => + TransactionImport::KnownGood, Ok(e) => { debug!("Error adding transaction to the pool: {:?}", e); TransactionImport::Bad - } + }, Err(e) => { debug!("Error converting pool error: {:?}", e); // it is not bad at least, just some internal node logic error, so peer is innocent. TransactionImport::KnownGood - } - } + }, + }, } }) } @@ -530,11 +546,10 @@ where } fn transaction(&self, hash: &H) -> Option { - self.pool.ready_transaction(hash) - .and_then( - // Only propagable transactions should be resolved for network service. - |tx| if tx.is_propagable() { Some(tx.data().clone()) } else { None } - ) + self.pool.ready_transaction(hash).and_then( + // Only propagable transactions should be resolved for network service. + |tx| if tx.is_propagable() { Some(tx.data().clone()) } else { None }, + ) } } @@ -542,10 +557,13 @@ where mod tests { use super::*; use futures::executor::block_on; + use sc_transaction_pool::BasicPool; use sp_consensus::SelectChain; use sp_runtime::traits::BlindCheckable; - use substrate_test_runtime_client::{prelude::*, runtime::{Extrinsic, Transfer}}; - use sc_transaction_pool::BasicPool; + use substrate_test_runtime_client::{ + prelude::*, + runtime::{Extrinsic, Transfer}, + }; #[test] fn should_not_propagate_transactions_that_are_marked_as_such() { @@ -553,13 +571,8 @@ mod tests { let (client, longest_chain) = TestClientBuilder::new().build_with_longest_chain(); let client = Arc::new(client); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); let source = sp_runtime::transaction_validity::TransactionSource::External; let best = block_on(longest_chain.best_chain()).unwrap(); let transaction = Transfer { @@ -569,12 +582,14 @@ mod tests { to: Default::default(), } .into_signed_tx(); + block_on(pool.submit_one(&BlockId::hash(best.hash()), source, transaction.clone())) + .unwrap(); block_on(pool.submit_one( - &BlockId::hash(best.hash()), source, transaction.clone()), - ).unwrap(); - block_on(pool.submit_one( - &BlockId::hash(best.hash()), source, Extrinsic::IncludeData(vec![1])), - ).unwrap(); + &BlockId::hash(best.hash()), + source, + Extrinsic::IncludeData(vec![1]), + )) + .unwrap(); assert_eq!(pool.status().ready, 2); // when diff --git a/client/service/src/metrics.rs b/client/service/src/metrics.rs index 7c74b327ea260..cd03916c9261b 100644 --- a/client/service/src/metrics.rs +++ b/client/service/src/metrics.rs @@ -20,16 +20,15 @@ use std::{convert::TryFrom, time::SystemTime}; use crate::config::Configuration; use futures_timer::Delay; -use prometheus_endpoint::{register, Gauge, U64, Registry, PrometheusError, Opts, GaugeVec}; +use prometheus_endpoint::{register, Gauge, GaugeVec, Opts, PrometheusError, Registry, U64}; +use sc_client_api::{ClientInfo, UsageProvider}; +use sc_network::{config::Role, NetworkService, NetworkStatus}; use sc_telemetry::{telemetry, TelemetryHandle, SUBSTRATE_INFO}; +use sc_transaction_pool_api::{MaintainedTransactionPool, PoolStatus}; use sp_api::ProvideRuntimeApi; -use sp_runtime::traits::{NumberFor, Block, SaturatedConversion, UniqueSaturatedInto}; -use sc_transaction_pool_api::{PoolStatus, MaintainedTransactionPool}; +use sp_runtime::traits::{Block, NumberFor, SaturatedConversion, UniqueSaturatedInto}; use sp_utils::metrics::register_globals; -use sc_client_api::{ClientInfo, UsageProvider}; -use sc_network::{config::Role, NetworkStatus, NetworkService}; -use std::sync::Arc; -use std::time::Duration; +use std::{sync::Arc, time::Duration}; use wasm_timer::Instant; struct PrometheusMetrics { @@ -51,54 +50,74 @@ impl PrometheusMetrics { version: &str, roles: u64, ) -> Result { - register(Gauge::::with_opts( - Opts::new( - "build_info", - "A metric with a constant '1' value labeled by name, version" - ) + register( + Gauge::::with_opts( + Opts::new( + "build_info", + "A metric with a constant '1' value labeled by name, version", + ) .const_label("name", name) - .const_label("version", version) - )?, ®istry)?.set(1); + .const_label("version", version), + )?, + ®istry, + )? + .set(1); - register(Gauge::::new( - "node_roles", "The roles the node is running as", - )?, ®istry)?.set(roles); + register(Gauge::::new("node_roles", "The roles the node is running as")?, ®istry)? + .set(roles); register_globals(registry)?; - let start_time_since_epoch = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) - .unwrap_or_default(); - register(Gauge::::new( - "process_start_time_seconds", - "Number of seconds between the UNIX epoch and the moment the process started", - )?, registry)?.set(start_time_since_epoch.as_secs()); + let start_time_since_epoch = + SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).unwrap_or_default(); + register( + Gauge::::new( + "process_start_time_seconds", + "Number of seconds between the UNIX epoch and the moment the process started", + )?, + registry, + )? + .set(start_time_since_epoch.as_secs()); Ok(Self { // generic internals - block_height: register(GaugeVec::new( - Opts::new("block_height", "Block height info of the chain"), - &["status"] - )?, registry)?, - - number_leaves: register(Gauge::new( - "number_leaves", "Number of known chain leaves (aka forks)", - )?, registry)?, - - ready_transactions_number: register(Gauge::new( - "ready_transactions_number", "Number of transactions in the ready queue", - )?, registry)?, + block_height: register( + GaugeVec::new( + Opts::new("block_height", "Block height info of the chain"), + &["status"], + )?, + registry, + )?, + + number_leaves: register( + Gauge::new("number_leaves", "Number of known chain leaves (aka forks)")?, + registry, + )?, + + ready_transactions_number: register( + Gauge::new( + "ready_transactions_number", + "Number of transactions in the ready queue", + )?, + registry, + )?, // I/ O - database_cache: register(Gauge::new( - "database_cache_bytes", "RocksDB cache size in bytes", - )?, registry)?, - state_cache: register(Gauge::new( - "state_cache_bytes", "State cache size in bytes", - )?, registry)?, - state_db: register(GaugeVec::new( - Opts::new("state_db_cache_bytes", "State DB cache in bytes"), - &["subtype"] - )?, registry)?, + database_cache: register( + Gauge::new("database_cache_bytes", "RocksDB cache size in bytes")?, + registry, + )?, + state_cache: register( + Gauge::new("state_cache_bytes", "State cache size in bytes")?, + registry, + )?, + state_db: register( + GaugeVec::new( + Opts::new("state_db_cache_bytes", "State DB cache in bytes"), + &["subtype"], + )?, + registry, + )?, }) } } @@ -179,11 +198,7 @@ impl MetricsService { let net_status = network.status().await.ok(); // Update / Send the metrics. - self.update( - &client.usage_info(), - &transactions.status(), - net_status, - ); + self.update(&client.usage_info(), &transactions.status(), net_status); // Schedule next tick. timer.reset(timer_interval); @@ -220,14 +235,8 @@ impl MetricsService { ); if let Some(metrics) = self.metrics.as_ref() { - metrics - .block_height - .with_label_values(&["finalized"]) - .set(finalized_number); - metrics - .block_height - .with_label_values(&["best"]) - .set(best_number); + metrics.block_height.with_label_values(&["finalized"]).set(finalized_number); + metrics.block_height.with_label_values(&["best"]).set(best_number); if let Ok(leaves) = u64::try_from(info.chain.number_leaves) { metrics.number_leaves.set(leaves); @@ -239,15 +248,17 @@ impl MetricsService { metrics.database_cache.set(info.memory.database_cache.as_bytes() as u64); metrics.state_cache.set(info.memory.state_cache.as_bytes() as u64); - metrics.state_db.with_label_values(&["non_canonical"]).set( - info.memory.state_db.non_canonical.as_bytes() as u64, - ); + metrics + .state_db + .with_label_values(&["non_canonical"]) + .set(info.memory.state_db.non_canonical.as_bytes() as u64); if let Some(pruning) = info.memory.state_db.pruning { metrics.state_db.with_label_values(&["pruning"]).set(pruning.as_bytes() as u64); } - metrics.state_db.with_label_values(&["pinned"]).set( - info.memory.state_db.pinned.as_bytes() as u64, - ); + metrics + .state_db + .with_label_values(&["pinned"]) + .set(info.memory.state_db.pinned.as_bytes() as u64); } } @@ -259,14 +270,13 @@ impl MetricsService { let diff_bytes_inbound = total_bytes_inbound - self.last_total_bytes_inbound; let diff_bytes_outbound = total_bytes_outbound - self.last_total_bytes_outbound; - let (avg_bytes_per_sec_inbound, avg_bytes_per_sec_outbound) = - if elapsed > 0 { - self.last_total_bytes_inbound = total_bytes_inbound; - self.last_total_bytes_outbound = total_bytes_outbound; - (diff_bytes_inbound / elapsed, diff_bytes_outbound / elapsed) - } else { - (diff_bytes_inbound, diff_bytes_outbound) - }; + let (avg_bytes_per_sec_inbound, avg_bytes_per_sec_outbound) = if elapsed > 0 { + self.last_total_bytes_inbound = total_bytes_inbound; + self.last_total_bytes_outbound = total_bytes_outbound; + (diff_bytes_inbound / elapsed, diff_bytes_outbound / elapsed) + } else { + (diff_bytes_inbound, diff_bytes_outbound) + }; telemetry!( self.telemetry; @@ -278,9 +288,10 @@ impl MetricsService { ); if let Some(metrics) = self.metrics.as_ref() { - let best_seen_block: Option = net_status - .best_seen_block - .map(|num: NumberFor| UniqueSaturatedInto::::unique_saturated_into(num)); + let best_seen_block: Option = + net_status.best_seen_block.map(|num: NumberFor| { + UniqueSaturatedInto::::unique_saturated_into(num) + }); if let Some(best_seen_block) = best_seen_block { metrics.block_height.with_label_values(&["sync_target"]).set(best_seen_block); diff --git a/client/service/src/task_manager/mod.rs b/client/service/src/task_manager/mod.rs index c7254f1f894de..d759798f744b6 100644 --- a/client/service/src/task_manager/mod.rs +++ b/client/service/src/task_manager/mod.rs @@ -18,22 +18,24 @@ //! Substrate service tasks management module. -use std::{panic, result::Result, pin::Pin}; +use crate::{ + config::{JoinFuture, TaskExecutor, TaskType}, + Error, +}; use exit_future::Signal; -use log::{debug, error}; use futures::{ - Future, FutureExt, StreamExt, - future::{select, Either, BoxFuture, join_all, try_join_all, pending}, + future::{join_all, pending, select, try_join_all, BoxFuture, Either}, sink::SinkExt, + Future, FutureExt, StreamExt, }; +use log::{debug, error}; use prometheus_endpoint::{ - exponential_buckets, register, - PrometheusError, - CounterVec, HistogramOpts, HistogramVec, Opts, Registry, U64 + exponential_buckets, register, CounterVec, HistogramOpts, HistogramVec, Opts, PrometheusError, + Registry, U64, }; -use sp_utils::mpsc::{TracingUnboundedSender, TracingUnboundedReceiver, tracing_unbounded}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{panic, pin::Pin, result::Result}; use tracing_futures::Instrument; -use crate::{config::{TaskExecutor, TaskType, JoinFuture}, Error}; mod prometheus_future; #[cfg(test)] @@ -62,7 +64,11 @@ impl SpawnTaskHandle { } /// Spawns the blocking task with the given name. See also `spawn`. - pub fn spawn_blocking(&self, name: &'static str, task: impl Future + Send + 'static) { + pub fn spawn_blocking( + &self, + name: &'static str, + task: impl Future + Send + 'static, + ) { self.spawn_inner(name, task, TaskType::Blocking) } @@ -75,7 +81,7 @@ impl SpawnTaskHandle { ) { if self.task_notifier.is_closed() { debug!("Attempt to spawn a new task has been prevented: {}", name); - return; + return } let on_exit = self.on_exit.clone(); @@ -95,7 +101,8 @@ impl SpawnTaskHandle { let task = { let poll_duration = metrics.poll_duration.with_label_values(&[name]); let poll_start = metrics.poll_start.with_label_values(&[name]); - let inner = prometheus_future::with_poll_durations(poll_duration, poll_start, task); + let inner = + prometheus_future::with_poll_durations(poll_duration, poll_start, task); // The logic of `AssertUnwindSafe` here is ok considering that we throw // away the `Future` after it has panicked. panic::AssertUnwindSafe(inner).catch_unwind() @@ -106,16 +113,15 @@ impl SpawnTaskHandle { Either::Right((Err(payload), _)) => { metrics.tasks_ended.with_label_values(&[name, "panic"]).inc(); panic::resume_unwind(payload) - } + }, Either::Right((Ok(()), _)) => { metrics.tasks_ended.with_label_values(&[name, "finished"]).inc(); - } + }, Either::Left(((), _)) => { // The `on_exit` has triggered. metrics.tasks_ended.with_label_values(&[name, "interrupted"]).inc(); - } + }, } - } else { futures::pin_mut!(task); let _ = select(on_exit, task).await; @@ -162,10 +168,7 @@ impl SpawnEssentialTaskHandle { essential_failed_tx: TracingUnboundedSender<()>, spawn_task_handle: SpawnTaskHandle, ) -> SpawnEssentialTaskHandle { - SpawnEssentialTaskHandle { - essential_failed_tx, - inner: spawn_task_handle, - } + SpawnEssentialTaskHandle { essential_failed_tx, inner: spawn_task_handle } } /// Spawns the given task with the given name. @@ -193,12 +196,10 @@ impl SpawnEssentialTaskHandle { task_type: TaskType, ) { let essential_failed = self.essential_failed_tx.clone(); - let essential_task = std::panic::AssertUnwindSafe(task) - .catch_unwind() - .map(move |_| { - log::error!("Essential task `{}` failed. Shutting down service.", name); - let _ = essential_failed.close_channel(); - }); + let essential_task = std::panic::AssertUnwindSafe(task).catch_unwind().map(move |_| { + log::error!("Essential task `{}` failed. Shutting down service.", name); + let _ = essential_failed.close_channel(); + }); let _ = self.inner.spawn_inner(name, essential_task, task_type); } @@ -260,10 +261,8 @@ impl TaskManager { // NOTE: for_each_concurrent will await on all the JoinHandle futures at the same time. It // is possible to limit this but it's actually better for the memory foot print to await // them all to not accumulate anything on that stream. - let completion_future = executor.spawn( - Box::pin(background_tasks.for_each_concurrent(None, |x| x)), - TaskType::Async, - ); + let completion_future = executor + .spawn(Box::pin(background_tasks.for_each_concurrent(None, |x| x)), TaskType::Async); Ok(Self { on_exit, @@ -323,16 +322,21 @@ impl TaskManager { /// /// This function will not wait until the end of the remaining task. You must call and await /// `clean_shutdown()` after this. - pub fn future<'a>(&'a mut self) -> Pin> + Send + 'a>> { + pub fn future<'a>( + &'a mut self, + ) -> Pin> + Send + 'a>> { Box::pin(async move { let mut t1 = self.essential_failed_rx.next().fuse(); let mut t2 = self.on_exit.clone().fuse(); let mut t3 = try_join_all( - self.children.iter_mut().map(|x| x.future()) + self.children + .iter_mut() + .map(|x| x.future()) // Never end this future if there is no error because if there is no children, // it must not stop - .chain(std::iter::once(pending().boxed())) - ).fuse(); + .chain(std::iter::once(pending().boxed())), + ) + .fuse(); futures::select! { _ = t1 => Err(Error::Other("Essential task failed.".into())), diff --git a/client/service/src/task_manager/prometheus_future.rs b/client/service/src/task_manager/prometheus_future.rs index 6d2a52354d6ca..43a76a0f596c2 100644 --- a/client/service/src/task_manager/prometheus_future.rs +++ b/client/service/src/task_manager/prometheus_future.rs @@ -20,20 +20,20 @@ use futures::prelude::*; use prometheus_endpoint::{Counter, Histogram, U64}; -use std::{fmt, pin::Pin, task::{Context, Poll}}; +use std::{ + fmt, + pin::Pin, + task::{Context, Poll}, +}; /// Wraps around a `Future`. Report the polling duration to the `Histogram` and when the polling /// starts to the `Counter`. pub fn with_poll_durations( poll_duration: Histogram, poll_start: Counter, - inner: T + inner: T, ) -> PrometheusFuture { - PrometheusFuture { - inner, - poll_duration, - poll_start, - } + PrometheusFuture { inner, poll_duration, poll_start } } /// Wraps around `Future` and adds diagnostics to it. diff --git a/client/service/src/task_manager/tests.rs b/client/service/src/task_manager/tests.rs index 09768a19339f2..d8789e556e1e9 100644 --- a/client/service/src/task_manager/tests.rs +++ b/client/service/src/task_manager/tests.rs @@ -16,8 +16,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use crate::config::TaskExecutor; -use crate::task_manager::TaskManager; +use crate::{config::TaskExecutor, task_manager::TaskManager}; use futures::{future::FutureExt, pin_mut, select}; use parking_lot::Mutex; use std::{any::Any, sync::Arc, time::Duration}; @@ -205,7 +204,9 @@ fn ensure_task_manager_future_ends_with_error_when_essential_task_fails() { runtime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await }); assert_eq!(drop_tester, 2); spawn_essential_handle.spawn("task3", async { panic!("task failed") }); - runtime.block_on(task_manager.future()).expect_err("future()'s Result must be Err"); + runtime + .block_on(task_manager.future()) + .expect_err("future()'s Result must be Err"); assert_eq!(drop_tester, 2); runtime.block_on(task_manager.clean_shutdown()); assert_eq!(drop_tester, 0); @@ -265,7 +266,9 @@ fn ensure_task_manager_future_ends_with_error_when_childs_essential_task_fails() runtime.block_on(async { tokio::time::delay_for(Duration::from_secs(1)).await }); assert_eq!(drop_tester, 4); spawn_essential_handle_child_1.spawn("task5", async { panic!("task failed") }); - runtime.block_on(task_manager.future()).expect_err("future()'s Result must be Err"); + runtime + .block_on(task_manager.future()) + .expect_err("future()'s Result must be Err"); assert_eq!(drop_tester, 4); runtime.block_on(task_manager.clean_shutdown()); assert_eq!(drop_tester, 0); diff --git a/client/service/test/src/client/db.rs b/client/service/test/src/client/db.rs index a86e8f2de467c..5278c9a13a4d7 100644 --- a/client/service/test/src/client/db.rs +++ b/client/service/test/src/client/db.rs @@ -16,7 +16,7 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use sp_core::offchain::{OffchainStorage, storage::InMemOffchainStorage}; +use sp_core::offchain::{storage::InMemOffchainStorage, OffchainStorage}; use std::sync::Arc; type TestBackend = sc_client_api::in_mem::Backend; @@ -32,12 +32,13 @@ fn test_leaves_with_complex_block_tree() { fn test_blockchain_query_by_number_gets_canonical() { let backend = Arc::new(TestBackend::new()); - substrate_test_runtime_client::trait_tests::test_blockchain_query_by_number_gets_canonical(backend); + substrate_test_runtime_client::trait_tests::test_blockchain_query_by_number_gets_canonical( + backend, + ); } #[test] fn in_memory_offchain_storage() { - let mut storage = InMemOffchainStorage::default(); assert_eq!(storage.get(b"A", b"B"), None); assert_eq!(storage.get(b"B", b"A"), None); diff --git a/client/service/test/src/client/light.rs b/client/service/test/src/client/light.rs index 4d620139fa49e..8d1411214d346 100644 --- a/client/service/test/src/client/light.rs +++ b/client/service/test/src/client/light.rs @@ -16,53 +16,52 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use super::prepare_client_with_key_changes; +use parity_scale_codec::{Decode, Encode}; +use parking_lot::Mutex; +use sc_block_builder::BlockBuilderProvider; +use sc_client_api::{ + backend::NewBlockState, + blockchain::Info, + cht, + in_mem::{Backend as InMemBackend, Blockchain as InMemoryBlockchain}, + AuxStore, Backend as ClientBackend, BlockBackend, BlockImportOperation, CallExecutor, + ChangesProof, ExecutionStrategy, FetchChecker, ProofProvider, ProvideChtRoots, + RemoteBodyRequest, RemoteCallRequest, RemoteChangesRequest, RemoteHeaderRequest, + RemoteReadChildRequest, RemoteReadRequest, Storage, StorageProof, StorageProvider, +}; +use sc_executor::{NativeExecutor, NativeVersion, RuntimeVersion, WasmExecutionMethod}; use sc_light::{ - call_executor::{ - GenesisCallExecutor, - check_execution_proof, - }, - fetcher::LightDataChecker, - blockchain::{BlockchainCache, Blockchain}, backend::{Backend, GenesisOrUnavailableState}, + blockchain::{Blockchain, BlockchainCache}, + call_executor::{check_execution_proof, GenesisCallExecutor}, + fetcher::LightDataChecker, +}; +use sp_api::{ProofRecorder, StorageTransactionCache}; +use sp_blockchain::{ + well_known_cache_keys, BlockStatus, CachedHeaderMetadata, Error as ClientError, HeaderBackend, + Result as ClientResult, }; -use std::sync::Arc; +use sp_consensus::BlockOrigin; +use sp_core::{testing::TaskExecutor, NativeOrEncoded, H256}; +use sp_externalities::Extensions; use sp_runtime::{ generic::BlockId, traits::{BlakeTwo256, Block as _, HashFor, Header as HeaderT, NumberFor}, Digest, Justifications, }; -use std::collections::HashMap; -use parking_lot::Mutex; +use sp_state_machine::{ExecutionManager, OverlayedChanges}; +use std::{cell::RefCell, collections::HashMap, panic::UnwindSafe, sync::Arc}; use substrate_test_runtime_client::{ - runtime::{Hash, Block, Header}, TestClient, ClientBlockImportExt, -}; -use sp_api::{StorageTransactionCache, ProofRecorder}; -use sp_consensus::BlockOrigin; -use sc_executor::{NativeExecutor, WasmExecutionMethod, RuntimeVersion, NativeVersion}; -use sp_core::{H256, NativeOrEncoded, testing::TaskExecutor}; -use sc_client_api::{ - blockchain::Info, backend::NewBlockState, Backend as ClientBackend, ProofProvider, - in_mem::{Backend as InMemBackend, Blockchain as InMemoryBlockchain}, ProvideChtRoots, - AuxStore, Storage, CallExecutor, cht, ExecutionStrategy, StorageProof, BlockImportOperation, - RemoteCallRequest, StorageProvider, ChangesProof, RemoteBodyRequest, RemoteReadRequest, - RemoteChangesRequest, FetchChecker, RemoteReadChildRequest, RemoteHeaderRequest, BlockBackend, -}; -use sp_externalities::Extensions; -use sc_block_builder::BlockBuilderProvider; -use sp_blockchain::{ - BlockStatus, Result as ClientResult, Error as ClientError, CachedHeaderMetadata, - HeaderBackend, well_known_cache_keys -}; -use std::panic::UnwindSafe; -use std::cell::RefCell; -use sp_state_machine::{OverlayedChanges, ExecutionManager}; -use parity_scale_codec::{Decode, Encode}; -use super::prepare_client_with_key_changes; -use substrate_test_runtime_client::{ - AccountKeyring, runtime::{self, Extrinsic}, + runtime::{self, Block, Extrinsic, Hash, Header}, + AccountKeyring, ClientBlockImportExt, TestClient, }; -use sp_core::{blake2_256, ChangesTrieConfiguration, storage::{well_known_keys, StorageKey, ChildInfo}}; +use sp_core::{ + blake2_256, + storage::{well_known_keys, ChildInfo, StorageKey}, + ChangesTrieConfiguration, +}; use sp_state_machine::Backend as _; pub type DummyBlockchain = Blockchain; @@ -115,7 +114,8 @@ impl sp_blockchain::HeaderMetadata for DummyStorage { type Error = ClientError; fn header_metadata(&self, hash: Hash) -> Result, Self::Error> { - self.header(BlockId::hash(hash))?.map(|header| CachedHeaderMetadata::from(&header)) + self.header(BlockId::hash(hash))? + .map(|header| CachedHeaderMetadata::from(&header)) .ok_or(ClientError::UnknownBlock("header not found".to_owned())) } fn insert_header_metadata(&self, _hash: Hash, _metadata: CachedHeaderMetadata) {} @@ -127,9 +127,13 @@ impl AuxStore for DummyStorage { 'a, 'b: 'a, 'c: 'a, - I: IntoIterator, - D: IntoIterator, - >(&self, insert: I, _delete: D) -> ClientResult<()> { + I: IntoIterator, + D: IntoIterator, + >( + &self, + insert: I, + _delete: D, + ) -> ClientResult<()> { for (k, v) in insert.into_iter() { self.aux_store.lock().insert(k.to_vec(), v.to_vec()); } @@ -182,9 +186,10 @@ impl ProvideChtRoots for DummyStorage { cht::block_to_cht_number(cht_size, block) .and_then(|cht_num| self.changes_tries_cht_roots.get(&cht_num)) .cloned() - .ok_or_else(|| ClientError::Backend( - format!("Test error: CHT for block #{} not found", block) - ).into()) + .ok_or_else(|| { + ClientError::Backend(format!("Test error: CHT for block #{} not found", block)) + .into() + }) .map(Some) } } @@ -210,7 +215,7 @@ impl CallExecutor for DummyCallExecutor { fn contextual_call< EM: Fn( Result, Self::Error>, - Result, Self::Error> + Result, Self::Error>, ) -> Result, Self::Error>, R: Encode + Decode + PartialEq, NC: FnOnce() -> Result + UnwindSafe, @@ -220,17 +225,22 @@ impl CallExecutor for DummyCallExecutor { _method: &str, _call_data: &[u8], _changes: &RefCell, - _storage_transaction_cache: Option<&RefCell< - StorageTransactionCache< - Block, - >::State, - > - >>, + _storage_transaction_cache: Option< + &RefCell< + StorageTransactionCache< + Block, + >::State, + >, + >, + >, _execution_manager: ExecutionManager, _native_call: Option, _proof_recorder: &Option>, _extensions: Option, - ) -> ClientResult> where ExecutionManager: Clone { + ) -> ClientResult> + where + ExecutionManager: Clone, + { unreachable!() } @@ -243,7 +253,7 @@ impl CallExecutor for DummyCallExecutor { _trie_state: &sp_state_machine::TrieBackend>, _overlay: &mut OverlayedChanges, _method: &str, - _call_data: &[u8] + _call_data: &[u8], ) -> Result<(Vec, StorageProof), ClientError> { unreachable!() } @@ -260,11 +270,11 @@ fn local_executor() -> NativeExecutor = Backend::new( - Arc::new(DummyBlockchain::new(DummyStorage::new())), - ); + let backend: Backend<_, BlakeTwo256> = + Backend::new(Arc::new(DummyBlockchain::new(DummyStorage::new()))); let mut op = backend.begin_operation().unwrap(); op.set_block_data(header0, None, None, None, NewBlockState::Final).unwrap(); op.set_genesis_state(Default::default(), true).unwrap(); @@ -278,9 +288,8 @@ fn local_state_is_created_when_genesis_state_is_available() { #[test] fn unavailable_state_is_created_when_genesis_state_is_unavailable() { - let backend: Backend<_, BlakeTwo256> = Backend::new( - Arc::new(DummyBlockchain::new(DummyStorage::new())), - ); + let backend: Backend<_, BlakeTwo256> = + Backend::new(Arc::new(DummyBlockchain::new(DummyStorage::new()))); match backend.state_at(BlockId::Number(0)).unwrap() { GenesisOrUnavailableState::Unavailable => (), @@ -305,11 +314,8 @@ fn execution_proof_is_generated_and_checked() { let remote_header = remote_client.header(&remote_block_id).unwrap().unwrap(); // 'fetch' execution proof from remote node - let (remote_result, remote_execution_proof) = remote_client.execution_proof( - &remote_block_id, - method, - &[] - ).unwrap(); + let (remote_result, remote_execution_proof) = + remote_client.execution_proof(&remote_block_id, method, &[]).unwrap(); // check remote execution proof locally let local_result = check_execution_proof::<_, _, BlakeTwo256>( @@ -323,7 +329,8 @@ fn execution_proof_is_generated_and_checked() { retry_count: None, }, remote_execution_proof, - ).unwrap(); + ) + .unwrap(); (remote_result, local_result) } @@ -333,17 +340,20 @@ fn execution_proof_is_generated_and_checked() { let remote_header = remote_client.header(&remote_block_id).unwrap().unwrap(); // 'fetch' execution proof from remote node - let (_, remote_execution_proof) = remote_client.execution_proof( - &remote_block_id, - "Core_initialize_block", - &Header::new( - at, - Default::default(), - Default::default(), - Default::default(), - Default::default(), - ).encode(), - ).unwrap(); + let (_, remote_execution_proof) = remote_client + .execution_proof( + &remote_block_id, + "Core_initialize_block", + &Header::new( + at, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ) + .encode(), + ) + .unwrap(); // check remote execution proof locally let execution_result = check_execution_proof::<_, _, BlakeTwo256>( @@ -359,7 +369,8 @@ fn execution_proof_is_generated_and_checked() { Default::default(), remote_header.hash(), remote_header.digest().clone(), // this makes next header wrong - ).encode(), + ) + .encode(), retry_count: None, }, remote_execution_proof, @@ -379,7 +390,8 @@ fn execution_proof_is_generated_and_checked() { BlockOrigin::Own, remote_client.new_block(digest).unwrap().build().unwrap().block, Justifications::from((*b"TEST", Default::default())), - )).unwrap(); + )) + .unwrap(); } // check method that doesn't requires environment @@ -401,22 +413,26 @@ fn execution_proof_is_generated_and_checked() { fn code_is_executed_at_genesis_only() { let backend = Arc::new(InMemBackend::::new()); let def = H256::default(); - let header0 = substrate_test_runtime_client::runtime::Header::new(0, def, def, def, Default::default()); + let header0 = + substrate_test_runtime_client::runtime::Header::new(0, def, def, def, Default::default()); let hash0 = header0.hash(); - let header1 = substrate_test_runtime_client::runtime::Header::new(1, def, def, hash0, Default::default()); + let header1 = + substrate_test_runtime_client::runtime::Header::new(1, def, def, hash0, Default::default()); let hash1 = header1.hash(); - backend.blockchain().insert(hash0, header0, None, None, NewBlockState::Final).unwrap(); - backend.blockchain().insert(hash1, header1, None, None, NewBlockState::Final).unwrap(); + backend + .blockchain() + .insert(hash0, header0, None, None, NewBlockState::Final) + .unwrap(); + backend + .blockchain() + .insert(hash1, header1, None, None, NewBlockState::Final) + .unwrap(); let genesis_executor = GenesisCallExecutor::new(backend, DummyCallExecutor); assert_eq!( - genesis_executor.call( - &BlockId::Number(0), - "test_method", - &[], - ExecutionStrategy::NativeElseWasm, - None, - ).unwrap(), + genesis_executor + .call(&BlockId::Number(0), "test_method", &[], ExecutionStrategy::NativeElseWasm, None,) + .unwrap(), vec![42], ); @@ -434,7 +450,6 @@ fn code_is_executed_at_genesis_only() { } } - type TestChecker = LightDataChecker< NativeExecutor, BlakeTwo256, @@ -448,27 +463,28 @@ fn prepare_for_read_proof_check() -> (TestChecker, Header, StorageProof, u32) { let remote_block_id = BlockId::Number(0); let remote_block_hash = remote_client.block_hash(0).unwrap().unwrap(); let mut remote_block_header = remote_client.header(&remote_block_id).unwrap().unwrap(); - remote_block_header.state_root = remote_client.state_at(&remote_block_id).unwrap() - .storage_root(::std::iter::empty()).0.into(); + remote_block_header.state_root = remote_client + .state_at(&remote_block_id) + .unwrap() + .storage_root(::std::iter::empty()) + .0 + .into(); // 'fetch' read proof from remote node - let heap_pages = remote_client.storage(&remote_block_id, &StorageKey(well_known_keys::HEAP_PAGES.to_vec())) + let heap_pages = remote_client + .storage(&remote_block_id, &StorageKey(well_known_keys::HEAP_PAGES.to_vec())) .unwrap() - .and_then(|v| Decode::decode(&mut &v.0[..]).ok()).unwrap(); - let remote_read_proof = remote_client.read_proof( - &remote_block_id, - &mut std::iter::once(well_known_keys::HEAP_PAGES), - ).unwrap(); + .and_then(|v| Decode::decode(&mut &v.0[..]).ok()) + .unwrap(); + let remote_read_proof = remote_client + .read_proof(&remote_block_id, &mut std::iter::once(well_known_keys::HEAP_PAGES)) + .unwrap(); // check remote read proof locally let local_storage = InMemoryBlockchain::::new(); - local_storage.insert( - remote_block_hash, - remote_block_header.clone(), - None, - None, - NewBlockState::Final, - ).unwrap(); + local_storage + .insert(remote_block_hash, remote_block_header.clone(), None, None, NewBlockState::Final) + .unwrap(); let local_checker = LightDataChecker::new( Arc::new(DummyBlockchain::new(DummyStorage::new())), local_executor(), @@ -478,45 +494,39 @@ fn prepare_for_read_proof_check() -> (TestChecker, Header, StorageProof, u32) { } fn prepare_for_read_child_proof_check() -> (TestChecker, Header, StorageProof, Vec) { - use substrate_test_runtime_client::DefaultTestClientBuilderExt; - use substrate_test_runtime_client::TestClientBuilderExt; + use substrate_test_runtime_client::{DefaultTestClientBuilderExt, TestClientBuilderExt}; let child_info = ChildInfo::new_default(b"child1"); let child_info = &child_info; // prepare remote client let remote_client = substrate_test_runtime_client::TestClientBuilder::new() - .add_extra_child_storage( - child_info, - b"key1".to_vec(), - b"value1".to_vec(), - ).build(); + .add_extra_child_storage(child_info, b"key1".to_vec(), b"value1".to_vec()) + .build(); let remote_block_id = BlockId::Number(0); let remote_block_hash = remote_client.block_hash(0).unwrap().unwrap(); let mut remote_block_header = remote_client.header(&remote_block_id).unwrap().unwrap(); - remote_block_header.state_root = remote_client.state_at(&remote_block_id).unwrap() - .storage_root(::std::iter::empty()).0.into(); + remote_block_header.state_root = remote_client + .state_at(&remote_block_id) + .unwrap() + .storage_root(::std::iter::empty()) + .0 + .into(); // 'fetch' child read proof from remote node - let child_value = remote_client.child_storage( - &remote_block_id, - child_info, - &StorageKey(b"key1".to_vec()), - ).unwrap().unwrap().0; + let child_value = remote_client + .child_storage(&remote_block_id, child_info, &StorageKey(b"key1".to_vec())) + .unwrap() + .unwrap() + .0; assert_eq!(b"value1"[..], child_value[..]); - let remote_read_proof = remote_client.read_child_proof( - &remote_block_id, - child_info, - &mut std::iter::once("key1".as_bytes()), - ).unwrap(); + let remote_read_proof = remote_client + .read_child_proof(&remote_block_id, child_info, &mut std::iter::once("key1".as_bytes())) + .unwrap(); // check locally let local_storage = InMemoryBlockchain::::new(); - local_storage.insert( - remote_block_hash, - remote_block_header.clone(), - None, - None, - NewBlockState::Final, - ).unwrap(); + local_storage + .insert(remote_block_hash, remote_block_header.clone(), None, None, NewBlockState::Final) + .unwrap(); let local_checker = LightDataChecker::new( Arc::new(DummyBlockchain::new(DummyStorage::new())), local_executor(), @@ -533,18 +543,21 @@ fn prepare_for_header_proof_check(insert_cht: bool) -> (TestChecker, Hash, Heade let block = remote_client.new_block(Default::default()).unwrap().build().unwrap().block; futures::executor::block_on(remote_client.import(BlockOrigin::Own, block)).unwrap(); local_headers_hashes.push( - remote_client.block_hash(i + 1) - .map_err(|_| ClientError::Backend("TestError".into())) + remote_client + .block_hash(i + 1) + .map_err(|_| ClientError::Backend("TestError".into())), ); } // 'fetch' header proof from remote node let remote_block_id = BlockId::Number(1); - let (remote_block_header, remote_header_proof) = remote_client.header_proof_with_cht_size(&remote_block_id, 4).unwrap(); + let (remote_block_header, remote_header_proof) = + remote_client.header_proof_with_cht_size(&remote_block_id, 4).unwrap(); // check remote read proof locally let local_storage = InMemoryBlockchain::::new(); - let local_cht_root = cht::compute_root::(4, 0, local_headers_hashes).unwrap(); + let local_cht_root = + cht::compute_root::(4, 0, local_headers_hashes).unwrap(); if insert_cht { local_storage.insert_cht_root(1, local_cht_root); } @@ -557,7 +570,7 @@ fn prepare_for_header_proof_check(insert_cht: bool) -> (TestChecker, Hash, Heade } fn header_with_computed_extrinsics_root(extrinsics: Vec) -> Header { - use sp_trie::{TrieConfiguration, trie_types::Layout}; + use sp_trie::{trie_types::Layout, TrieConfiguration}; let iter = extrinsics.iter().map(Encode::encode); let extrinsics_root = Layout::::ordered_trie_root(iter); @@ -567,66 +580,106 @@ fn header_with_computed_extrinsics_root(extrinsics: Vec) -> Header { #[test] fn storage_read_proof_is_generated_and_checked() { - let (local_checker, remote_block_header, remote_read_proof, heap_pages) = prepare_for_read_proof_check(); - assert_eq!((&local_checker as &dyn FetchChecker).check_read_proof(&RemoteReadRequest::

{ - block: remote_block_header.hash(), - header: remote_block_header, - keys: vec![well_known_keys::HEAP_PAGES.to_vec()], - retry_count: None, - }, remote_read_proof).unwrap().remove(well_known_keys::HEAP_PAGES).unwrap().unwrap()[0], heap_pages as u8); + let (local_checker, remote_block_header, remote_read_proof, heap_pages) = + prepare_for_read_proof_check(); + assert_eq!( + (&local_checker as &dyn FetchChecker) + .check_read_proof( + &RemoteReadRequest::
{ + block: remote_block_header.hash(), + header: remote_block_header, + keys: vec![well_known_keys::HEAP_PAGES.to_vec()], + retry_count: None, + }, + remote_read_proof + ) + .unwrap() + .remove(well_known_keys::HEAP_PAGES) + .unwrap() + .unwrap()[0], + heap_pages as u8 + ); } #[test] fn storage_child_read_proof_is_generated_and_checked() { let child_info = ChildInfo::new_default(&b"child1"[..]); - let ( - local_checker, - remote_block_header, - remote_read_proof, - result, - ) = prepare_for_read_child_proof_check(); - assert_eq!((&local_checker as &dyn FetchChecker).check_read_child_proof( - &RemoteReadChildRequest::
{ - block: remote_block_header.hash(), - header: remote_block_header, - storage_key: child_info.prefixed_storage_key(), - keys: vec![b"key1".to_vec()], - retry_count: None, - }, - remote_read_proof - ).unwrap().remove(b"key1".as_ref()).unwrap().unwrap(), result); + let (local_checker, remote_block_header, remote_read_proof, result) = + prepare_for_read_child_proof_check(); + assert_eq!( + (&local_checker as &dyn FetchChecker) + .check_read_child_proof( + &RemoteReadChildRequest::
{ + block: remote_block_header.hash(), + header: remote_block_header, + storage_key: child_info.prefixed_storage_key(), + keys: vec![b"key1".to_vec()], + retry_count: None, + }, + remote_read_proof + ) + .unwrap() + .remove(b"key1".as_ref()) + .unwrap() + .unwrap(), + result + ); } #[test] fn header_proof_is_generated_and_checked() { - let (local_checker, local_cht_root, remote_block_header, remote_header_proof) = prepare_for_header_proof_check(true); - assert_eq!((&local_checker as &dyn FetchChecker).check_header_proof(&RemoteHeaderRequest::
{ - cht_root: local_cht_root, - block: 1, - retry_count: None, - }, Some(remote_block_header.clone()), remote_header_proof).unwrap(), remote_block_header); + let (local_checker, local_cht_root, remote_block_header, remote_header_proof) = + prepare_for_header_proof_check(true); + assert_eq!( + (&local_checker as &dyn FetchChecker) + .check_header_proof( + &RemoteHeaderRequest::
{ + cht_root: local_cht_root, + block: 1, + retry_count: None, + }, + Some(remote_block_header.clone()), + remote_header_proof + ) + .unwrap(), + remote_block_header + ); } #[test] fn check_header_proof_fails_if_cht_root_is_invalid() { - let (local_checker, _, mut remote_block_header, remote_header_proof) = prepare_for_header_proof_check(true); + let (local_checker, _, mut remote_block_header, remote_header_proof) = + prepare_for_header_proof_check(true); remote_block_header.number = 100; - assert!((&local_checker as &dyn FetchChecker).check_header_proof(&RemoteHeaderRequest::
{ - cht_root: Default::default(), - block: 1, - retry_count: None, - }, Some(remote_block_header.clone()), remote_header_proof).is_err()); + assert!((&local_checker as &dyn FetchChecker) + .check_header_proof( + &RemoteHeaderRequest::
{ + cht_root: Default::default(), + block: 1, + retry_count: None, + }, + Some(remote_block_header.clone()), + remote_header_proof + ) + .is_err()); } #[test] fn check_header_proof_fails_if_invalid_header_provided() { - let (local_checker, local_cht_root, mut remote_block_header, remote_header_proof) = prepare_for_header_proof_check(true); + let (local_checker, local_cht_root, mut remote_block_header, remote_header_proof) = + prepare_for_header_proof_check(true); remote_block_header.number = 100; - assert!((&local_checker as &dyn FetchChecker).check_header_proof(&RemoteHeaderRequest::
{ - cht_root: local_cht_root, - block: 1, - retry_count: None, - }, Some(remote_block_header.clone()), remote_header_proof).is_err()); + assert!((&local_checker as &dyn FetchChecker) + .check_header_proof( + &RemoteHeaderRequest::
{ + cht_root: local_cht_root, + block: 1, + retry_count: None, + }, + Some(remote_block_header.clone()), + remote_header_proof + ) + .is_err()); } #[test] @@ -647,9 +700,9 @@ fn changes_proof_is_generated_and_checked_when_headers_are_not_pruned() { // 'fetch' changes proof from remote node let key = StorageKey(key); - let remote_proof = remote_client.key_changes_proof( - begin_hash, end_hash, begin_hash, max_hash, None, &key - ).unwrap(); + let remote_proof = remote_client + .key_changes_proof(begin_hash, end_hash, begin_hash, max_hash, None, &key) + .unwrap(); // check proof on local client let local_roots_range = local_roots.clone()[(begin - 1) as usize..].to_vec(); @@ -668,20 +721,23 @@ fn changes_proof_is_generated_and_checked_when_headers_are_not_pruned() { storage_key: None, retry_count: None, }; - let local_result = local_checker.check_changes_proof(&request, ChangesProof { - max_block: remote_proof.max_block, - proof: remote_proof.proof, - roots: remote_proof.roots, - roots_proof: remote_proof.roots_proof, - }).unwrap(); + let local_result = local_checker + .check_changes_proof( + &request, + ChangesProof { + max_block: remote_proof.max_block, + proof: remote_proof.proof, + roots: remote_proof.roots, + roots_proof: remote_proof.roots_proof, + }, + ) + .unwrap(); // ..and ensure that result is the same as on remote node if local_result != expected_result { panic!( "Failed test {}: local = {:?}, expected = {:?}", - index, - local_result, - expected_result, + index, local_result, expected_result, ); } } @@ -702,12 +758,17 @@ fn changes_proof_is_generated_and_checked_when_headers_are_pruned() { let b1 = remote_client.block_hash_from_id(&BlockId::Number(1)).unwrap().unwrap(); let b3 = remote_client.block_hash_from_id(&BlockId::Number(3)).unwrap().unwrap(); let b4 = remote_client.block_hash_from_id(&BlockId::Number(4)).unwrap().unwrap(); - let remote_proof = remote_client.key_changes_proof_with_cht_size( - b1, b4, b3, b4, None, &dave, 4 - ).unwrap(); + let remote_proof = remote_client + .key_changes_proof_with_cht_size(b1, b4, b3, b4, None, &dave, 4) + .unwrap(); // prepare local checker, having a root of changes trie CHT#0 - let local_cht_root = cht::compute_root::(4, 0, remote_roots.iter().cloned().map(|ct| Ok(Some(ct)))).unwrap(); + let local_cht_root = cht::compute_root::( + 4, + 0, + remote_roots.iter().cloned().map(|ct| Ok(Some(ct))), + ) + .unwrap(); let mut local_storage = DummyStorage::new(); local_storage.changes_tries_cht_roots.insert(0, local_cht_root); let local_checker = TestChecker::new( @@ -732,12 +793,18 @@ fn changes_proof_is_generated_and_checked_when_headers_are_pruned() { key: dave.0, retry_count: None, }; - let local_result = local_checker.check_changes_proof_with_cht_size(&request, ChangesProof { - max_block: remote_proof.max_block, - proof: remote_proof.proof, - roots: remote_proof.roots, - roots_proof: remote_proof.roots_proof, - }, 4).unwrap(); + let local_result = local_checker + .check_changes_proof_with_cht_size( + &request, + ChangesProof { + max_block: remote_proof.max_block, + proof: remote_proof.proof, + roots: remote_proof.roots, + roots_proof: remote_proof.roots_proof, + }, + 4, + ) + .unwrap(); assert_eq!(local_result, vec![(4, 0), (1, 1), (1, 0)]); } @@ -760,8 +827,9 @@ fn check_changes_proof_fails_if_proof_is_wrong() { // 'fetch' changes proof from remote node let key = StorageKey(key); - let remote_proof = remote_client.key_changes_proof( - begin_hash, end_hash, begin_hash, max_hash, None, &key).unwrap(); + let remote_proof = remote_client + .key_changes_proof(begin_hash, end_hash, begin_hash, max_hash, None, &key) + .unwrap(); let local_roots_range = local_roots.clone()[(begin - 1) as usize..].to_vec(); let config = ChangesTrieConfiguration::new(4, 2); @@ -781,34 +849,54 @@ fn check_changes_proof_fails_if_proof_is_wrong() { }; // check proof on local client using max from the future - assert!(local_checker.check_changes_proof(&request, ChangesProof { - max_block: remote_proof.max_block + 1, - proof: remote_proof.proof.clone(), - roots: remote_proof.roots.clone(), - roots_proof: remote_proof.roots_proof.clone(), - }).is_err()); + assert!(local_checker + .check_changes_proof( + &request, + ChangesProof { + max_block: remote_proof.max_block + 1, + proof: remote_proof.proof.clone(), + roots: remote_proof.roots.clone(), + roots_proof: remote_proof.roots_proof.clone(), + } + ) + .is_err()); // check proof on local client using broken proof - assert!(local_checker.check_changes_proof(&request, ChangesProof { - max_block: remote_proof.max_block, - proof: local_roots_range.clone().into_iter().map(|v| v.as_ref().to_vec()).collect(), - roots: remote_proof.roots, - roots_proof: remote_proof.roots_proof, - }).is_err()); + assert!(local_checker + .check_changes_proof( + &request, + ChangesProof { + max_block: remote_proof.max_block, + proof: local_roots_range.clone().into_iter().map(|v| v.as_ref().to_vec()).collect(), + roots: remote_proof.roots, + roots_proof: remote_proof.roots_proof, + } + ) + .is_err()); // extra roots proofs are provided - assert!(local_checker.check_changes_proof(&request, ChangesProof { - max_block: remote_proof.max_block, - proof: remote_proof.proof.clone(), - roots: vec![(begin - 1, Default::default())].into_iter().collect(), - roots_proof: StorageProof::empty(), - }).is_err()); - assert!(local_checker.check_changes_proof(&request, ChangesProof { - max_block: remote_proof.max_block, - proof: remote_proof.proof.clone(), - roots: vec![(end + 1, Default::default())].into_iter().collect(), - roots_proof: StorageProof::empty(), - }).is_err()); + assert!(local_checker + .check_changes_proof( + &request, + ChangesProof { + max_block: remote_proof.max_block, + proof: remote_proof.proof.clone(), + roots: vec![(begin - 1, Default::default())].into_iter().collect(), + roots_proof: StorageProof::empty(), + } + ) + .is_err()); + assert!(local_checker + .check_changes_proof( + &request, + ChangesProof { + max_block: remote_proof.max_block, + proof: remote_proof.proof.clone(), + roots: vec![(end + 1, Default::default())].into_iter().collect(), + roots_proof: StorageProof::empty(), + } + ) + .is_err()); } #[test] @@ -817,7 +905,11 @@ fn check_changes_tries_proof_fails_if_proof_is_wrong() { // (1, 4, dave.clone(), vec![(4, 0), (1, 1), (1, 0)]), let (remote_client, remote_roots, _) = prepare_client_with_key_changes(); let local_cht_root = cht::compute_root::( - 4, 0, remote_roots.iter().cloned().map(|ct| Ok(Some(ct)))).unwrap(); + 4, + 0, + remote_roots.iter().cloned().map(|ct| Ok(Some(ct))), + ) + .unwrap(); let dave = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Dave.into())).to_vec(); let dave = StorageKey(dave); @@ -828,9 +920,9 @@ fn check_changes_tries_proof_fails_if_proof_is_wrong() { let b1 = remote_client.block_hash_from_id(&BlockId::Number(1)).unwrap().unwrap(); let b3 = remote_client.block_hash_from_id(&BlockId::Number(3)).unwrap().unwrap(); let b4 = remote_client.block_hash_from_id(&BlockId::Number(4)).unwrap().unwrap(); - let remote_proof = remote_client.key_changes_proof_with_cht_size( - b1, b4, b3, b4, None, &dave, 4 - ).unwrap(); + let remote_proof = remote_client + .key_changes_proof_with_cht_size(b1, b4, b3, b4, None, &dave, 4) + .unwrap(); // fails when changes trie CHT is missing from the local db let local_checker = TestChecker::new( @@ -838,8 +930,9 @@ fn check_changes_tries_proof_fails_if_proof_is_wrong() { local_executor(), Box::new(TaskExecutor::new()), ); - assert!(local_checker.check_changes_tries_proof(4, &remote_proof.roots, - remote_proof.roots_proof.clone()).is_err()); + assert!(local_checker + .check_changes_tries_proof(4, &remote_proof.roots, remote_proof.roots_proof.clone()) + .is_err()); // fails when proof is broken let mut local_storage = DummyStorage::new(); @@ -849,17 +942,15 @@ fn check_changes_tries_proof_fails_if_proof_is_wrong() { local_executor(), Box::new(TaskExecutor::new()), ); - let result = local_checker.check_changes_tries_proof( - 4, &remote_proof.roots, StorageProof::empty() - ); + let result = + local_checker.check_changes_tries_proof(4, &remote_proof.roots, StorageProof::empty()); assert!(result.is_err()); } #[test] fn check_body_proof_faulty() { - let header = header_with_computed_extrinsics_root( - vec![Extrinsic::IncludeData(vec![1, 2, 3, 4])] - ); + let header = + header_with_computed_extrinsics_root(vec![Extrinsic::IncludeData(vec![1, 2, 3, 4])]); let block = Block::new(header.clone(), Vec::new()); let local_checker = TestChecker::new( @@ -868,10 +959,7 @@ fn check_body_proof_faulty() { Box::new(TaskExecutor::new()), ); - let body_request = RemoteBodyRequest { - header: header.clone(), - retry_count: None, - }; + let body_request = RemoteBodyRequest { header: header.clone(), retry_count: None }; assert!( local_checker.check_body_proof(&body_request, block.extrinsics).is_err(), @@ -892,10 +980,7 @@ fn check_body_proof_of_same_data_should_succeed() { Box::new(TaskExecutor::new()), ); - let body_request = RemoteBodyRequest { - header: header.clone(), - retry_count: None, - }; + let body_request = RemoteBodyRequest { header: header.clone(), retry_count: None }; assert!(local_checker.check_body_proof(&body_request, block.extrinsics).is_ok()); } diff --git a/client/service/test/src/client/mod.rs b/client/service/test/src/client/mod.rs index bdd693f57b2d0..794fc758443b5 100644 --- a/client/service/test/src/client/mod.rs +++ b/client/service/test/src/client/mod.rs @@ -16,48 +16,50 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use parity_scale_codec::{Encode, Decode, Joiner}; -use sc_executor::native_executor_instance; -use sp_state_machine::{StateMachine, OverlayedChanges, ExecutionStrategy, InMemoryBackend}; -use substrate_test_runtime_client::{ - prelude::*, - runtime::{ - self, genesismap::{GenesisConfig, insert_genesis_block}, - Hash, Transfer, Block, BlockNumber, Header, Digest, RuntimeApi, - }, - AccountKeyring, Sr25519Keyring, TestClientBuilder, ClientBlockImportExt, - BlockBuilderExt, DefaultTestClientBuilderExt, TestClientBuilderExt, ClientExt, -}; -use sc_client_api::{ - StorageProvider, BlockBackend, in_mem, BlockchainEvents, -}; +use futures::executor::block_on; +use hex_literal::hex; +use parity_scale_codec::{Decode, Encode, Joiner}; +use sc_block_builder::BlockBuilderProvider; +use sc_client_api::{in_mem, BlockBackend, BlockchainEvents, StorageProvider}; use sc_client_db::{ - Backend, DatabaseSettings, DatabaseSettingsSrc, PruningMode, KeepBlocks, TransactionStorageMode + Backend, DatabaseSettings, DatabaseSettingsSrc, KeepBlocks, PruningMode, TransactionStorageMode, }; -use sc_block_builder::BlockBuilderProvider; -use sc_service::client::{self, Client, LocalCallExecutor, new_in_mem}; +use sc_executor::native_executor_instance; +use sc_service::client::{self, new_in_mem, Client, LocalCallExecutor}; +use sp_api::ProvideRuntimeApi; +use sp_consensus::{ + BlockCheckParams, BlockImport, BlockImportParams, BlockOrigin, BlockStatus, + Error as ConsensusError, ForkChoiceStrategy, ImportResult, SelectChain, +}; +use sp_core::{blake2_256, testing::TaskExecutor, ChangesTrieConfiguration, H256}; use sp_runtime::{ - ConsensusEngineId, + generic::BlockId, traits::{BlakeTwo256, Block as BlockT, Header as HeaderT}, + ConsensusEngineId, DigestItem, Justifications, +}; +use sp_state_machine::{ + backend::Backend as _, ExecutionStrategy, InMemoryBackend, OverlayedChanges, StateMachine, +}; +use sp_storage::{ChildInfo, StorageKey}; +use sp_trie::{trie_types::Layout, TrieConfiguration}; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, }; use substrate_test_runtime::TestAPI; -use sp_state_machine::backend::Backend as _; -use sp_api::ProvideRuntimeApi; -use sp_core::{H256, ChangesTrieConfiguration, blake2_256, testing::TaskExecutor}; -use std::collections::{HashMap, HashSet}; -use std::sync::Arc; -use sp_consensus::{ - BlockOrigin, SelectChain, BlockImport, Error as ConsensusError, BlockCheckParams, ImportResult, - BlockStatus, BlockImportParams, ForkChoiceStrategy, +use substrate_test_runtime_client::{ + prelude::*, + runtime::{ + self, + genesismap::{insert_genesis_block, GenesisConfig}, + Block, BlockNumber, Digest, Hash, Header, RuntimeApi, Transfer, + }, + AccountKeyring, BlockBuilderExt, ClientBlockImportExt, ClientExt, DefaultTestClientBuilderExt, + Sr25519Keyring, TestClientBuilder, TestClientBuilderExt, }; -use sp_storage::{StorageKey, ChildInfo}; -use sp_trie::{TrieConfiguration, trie_types::Layout}; -use sp_runtime::{generic::BlockId, DigestItem, Justifications}; -use hex_literal::hex; -use futures::executor::block_on; -mod light; mod db; +mod light; const TEST_ENGINE_ID: ConsensusEngineId = *b"TEST"; @@ -68,11 +70,7 @@ native_executor_instance!( ); fn executor() -> sc_executor::NativeExecutor { - sc_executor::NativeExecutor::new( - sc_executor::WasmExecutionMethod::Interpreted, - None, - 8, - ) + sc_executor::NativeExecutor::new(sc_executor::WasmExecutionMethod::Interpreted, None, 8) } pub fn prepare_client_with_key_changes() -> ( @@ -80,14 +78,17 @@ pub fn prepare_client_with_key_changes() -> ( substrate_test_runtime_client::Backend, substrate_test_runtime_client::Executor, Block, - RuntimeApi + RuntimeApi, >, Vec, Vec<(u64, u64, Vec, Vec<(u64, u32)>)>, ) { // prepare block structure let blocks_transfers = vec![ - vec![(AccountKeyring::Alice, AccountKeyring::Dave), (AccountKeyring::Bob, AccountKeyring::Dave)], + vec![ + (AccountKeyring::Alice, AccountKeyring::Dave), + (AccountKeyring::Bob, AccountKeyring::Dave), + ], vec![(AccountKeyring::Charlie, AccountKeyring::Eve)], vec![], vec![(AccountKeyring::Alice, AccountKeyring::Dave)], @@ -101,18 +102,22 @@ pub fn prepare_client_with_key_changes() -> ( for (i, block_transfers) in blocks_transfers.into_iter().enumerate() { let mut builder = remote_client.new_block(Default::default()).unwrap(); for (from, to) in block_transfers { - builder.push_transfer(Transfer { - from: from.into(), - to: to.into(), - amount: 1, - nonce: *nonces.entry(from).and_modify(|n| { *n = *n + 1 }).or_default(), - }).unwrap(); + builder + .push_transfer(Transfer { + from: from.into(), + to: to.into(), + amount: 1, + nonce: *nonces.entry(from).and_modify(|n| *n = *n + 1).or_default(), + }) + .unwrap(); } let block = builder.build().unwrap().block; block_on(remote_client.import(BlockOrigin::Own, block)).unwrap(); let header = remote_client.header(&BlockId::Number(i as u64 + 1)).unwrap().unwrap(); - let trie_root = header.digest().log(DigestItem::as_changes_trie_root) + let trie_root = header + .digest() + .log(DigestItem::as_changes_trie_root) .map(|root| H256::from_slice(root.as_ref())) .unwrap(); local_roots.push(trie_root); @@ -121,10 +126,12 @@ pub fn prepare_client_with_key_changes() -> ( // prepare test cases let alice = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Alice.into())).to_vec(); let bob = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Bob.into())).to_vec(); - let charlie = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Charlie.into())).to_vec(); + let charlie = + blake2_256(&runtime::system::balance_of_key(AccountKeyring::Charlie.into())).to_vec(); let dave = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Dave.into())).to_vec(); let eve = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Eve.into())).to_vec(); - let ferdie = blake2_256(&runtime::system::balance_of_key(AccountKeyring::Ferdie.into())).to_vec(); + let ferdie = + blake2_256(&runtime::system::balance_of_key(AccountKeyring::Ferdie.into())).to_vec(); let test_cases = vec![ (1, 4, alice.clone(), vec![(4, 0), (1, 0)]), (1, 3, alice.clone(), vec![(1, 0)]), @@ -181,9 +188,9 @@ fn construct_block( Default::default(), &runtime_code, task_executor.clone() as Box<_>, - ).execute( - ExecutionStrategy::NativeElseWasm, - ).unwrap(); + ) + .execute(ExecutionStrategy::NativeElseWasm) + .unwrap(); for tx in transactions.iter() { StateMachine::new( @@ -196,9 +203,9 @@ fn construct_block( Default::default(), &runtime_code, task_executor.clone() as Box<_>, - ).execute( - ExecutionStrategy::NativeElseWasm, - ).unwrap(); + ) + .execute(ExecutionStrategy::NativeElseWasm) + .unwrap(); } let ret_data = StateMachine::new( @@ -211,9 +218,9 @@ fn construct_block( Default::default(), &runtime_code, task_executor.clone() as Box<_>, - ).execute( - ExecutionStrategy::NativeElseWasm, - ).unwrap(); + ) + .execute(ExecutionStrategy::NativeElseWasm) + .unwrap(); header = Header::decode(&mut &ret_data[..]).unwrap(); (vec![].and(&Block { header, extrinsics: transactions }), hash) @@ -243,7 +250,8 @@ fn construct_genesis_should_work_with_native() { 1000, None, Default::default(), - ).genesis_map(); + ) + .genesis_map(); let genesis_hash = insert_genesis_block(&mut storage); let backend = InMemoryBackend::from(storage); @@ -263,9 +271,9 @@ fn construct_genesis_should_work_with_native() { Default::default(), &runtime_code, TaskExecutor::new(), - ).execute( - ExecutionStrategy::NativeElseWasm, - ).unwrap(); + ) + .execute(ExecutionStrategy::NativeElseWasm) + .unwrap(); } #[test] @@ -277,7 +285,8 @@ fn construct_genesis_should_work_with_wasm() { 1000, None, Default::default(), - ).genesis_map(); + ) + .genesis_map(); let genesis_hash = insert_genesis_block(&mut storage); let backend = InMemoryBackend::from(storage); @@ -297,9 +306,9 @@ fn construct_genesis_should_work_with_wasm() { Default::default(), &runtime_code, TaskExecutor::new(), - ).execute( - ExecutionStrategy::AlwaysWasm, - ).unwrap(); + ) + .execute(ExecutionStrategy::AlwaysWasm) + .unwrap(); } #[test] @@ -311,7 +320,8 @@ fn construct_genesis_with_bad_transaction_should_panic() { 68, None, Default::default(), - ).genesis_map(); + ) + .genesis_map(); let genesis_hash = insert_genesis_block(&mut storage); let backend = InMemoryBackend::from(storage); @@ -331,9 +341,8 @@ fn construct_genesis_with_bad_transaction_should_panic() { Default::default(), &runtime_code, TaskExecutor::new(), - ).execute( - ExecutionStrategy::NativeElseWasm, - ); + ) + .execute(ExecutionStrategy::NativeElseWasm); assert!(r.is_err()); } @@ -342,17 +351,23 @@ fn client_initializes_from_genesis_ok() { let client = substrate_test_runtime_client::new(); assert_eq!( - client.runtime_api().balance_of( - &BlockId::Number(client.chain_info().best_number), - AccountKeyring::Alice.into(), - ).unwrap(), + client + .runtime_api() + .balance_of( + &BlockId::Number(client.chain_info().best_number), + AccountKeyring::Alice.into(), + ) + .unwrap(), 1000 ); assert_eq!( - client.runtime_api().balance_of( - &BlockId::Number(client.chain_info().best_number), - AccountKeyring::Ferdie.into(), - ).unwrap(), + client + .runtime_api() + .balance_of( + &BlockId::Number(client.chain_info().best_number), + AccountKeyring::Ferdie.into(), + ) + .unwrap(), 0 ); } @@ -374,12 +389,14 @@ fn block_builder_works_with_transactions() { let mut builder = client.new_block(Default::default()).unwrap(); - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 42, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 42, + nonce: 0, + }) + .unwrap(); let block = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); @@ -390,17 +407,23 @@ fn block_builder_works_with_transactions() { client.state_at(&BlockId::Number(0)).unwrap().pairs() ); assert_eq!( - client.runtime_api().balance_of( - &BlockId::Number(client.chain_info().best_number), - AccountKeyring::Alice.into(), - ).unwrap(), + client + .runtime_api() + .balance_of( + &BlockId::Number(client.chain_info().best_number), + AccountKeyring::Alice.into(), + ) + .unwrap(), 958 ); assert_eq!( - client.runtime_api().balance_of( - &BlockId::Number(client.chain_info().best_number), - AccountKeyring::Ferdie.into(), - ).unwrap(), + client + .runtime_api() + .balance_of( + &BlockId::Number(client.chain_info().best_number), + AccountKeyring::Ferdie.into(), + ) + .unwrap(), 42 ); } @@ -411,21 +434,23 @@ fn block_builder_does_not_include_invalid() { let mut builder = client.new_block(Default::default()).unwrap(); - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 42, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 42, + nonce: 0, + }) + .unwrap(); - assert!( - builder.push_transfer(Transfer { + assert!(builder + .push_transfer(Transfer { from: AccountKeyring::Eve.into(), to: AccountKeyring::Alice.into(), amount: 42, nonce: 0, - }).is_err() - ); + }) + .is_err()); let block = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); @@ -462,12 +487,7 @@ fn best_containing_with_hash_not_found() { let (client, longest_chain_select) = TestClientBuilder::new().build_with_longest_chain(); - let uninserted_block = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let uninserted_block = client.new_block(Default::default()).unwrap().build().unwrap().block; assert_eq!( None, @@ -507,98 +527,104 @@ fn uncles_with_multiple_forks() { block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); // A2 -> A3 - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a3.clone())).unwrap(); // A3 -> A4 - let a4 = client.new_block_at( - &BlockId::Hash(a3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a4 = client + .new_block_at(&BlockId::Hash(a3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a4.clone())).unwrap(); // A4 -> A5 - let a5 = client.new_block_at( - &BlockId::Hash(a4.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a5 = client + .new_block_at(&BlockId::Hash(a4.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a5.clone())).unwrap(); // A1 -> B2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise B2 has the same hash as A2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 41, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 41, + nonce: 0, + }) + .unwrap(); let b2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // B2 -> B3 - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); // B3 -> B4 - let b4 = client.new_block_at( - &BlockId::Hash(b3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b4 = client + .new_block_at(&BlockId::Hash(b3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b4.clone())).unwrap(); // // B2 -> C3 - let mut builder = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise C3 has the same hash as B3 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 1, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 1, + }) + .unwrap(); let c3 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, c3.clone())).unwrap(); // A1 -> D2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise D2 has the same hash as B2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 0, + }) + .unwrap(); let d2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, d2.clone())).unwrap(); @@ -631,21 +657,11 @@ fn best_containing_on_longest_chain_with_single_chain_3_blocks() { let (mut client, longest_chain_select) = TestClientBuilder::new().build_with_longest_chain(); // G -> A1 - let a1 = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let a1 = client.new_block(Default::default()).unwrap().build().unwrap().block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let a2 = client.new_block(Default::default()).unwrap().build().unwrap().block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); let genesis_hash = client.chain_info().genesis_hash; @@ -684,67 +700,73 @@ fn best_containing_on_longest_chain_with_multiple_forks() { block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); // A2 -> A3 - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a3.clone())).unwrap(); // A3 -> A4 - let a4 = client.new_block_at( - &BlockId::Hash(a3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a4 = client + .new_block_at(&BlockId::Hash(a3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a4.clone())).unwrap(); // A4 -> A5 - let a5 = client.new_block_at( - &BlockId::Hash(a4.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a5 = client + .new_block_at(&BlockId::Hash(a4.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a5.clone())).unwrap(); // A1 -> B2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise B2 has the same hash as A2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 41, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 41, + nonce: 0, + }) + .unwrap(); let b2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // B2 -> B3 - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); // B3 -> B4 - let b4 = client.new_block_at( - &BlockId::Hash(b3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b4 = client + .new_block_at(&BlockId::Hash(b3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b4.clone())).unwrap(); // B2 -> C3 @@ -764,18 +786,18 @@ fn best_containing_on_longest_chain_with_multiple_forks() { block_on(client.import(BlockOrigin::Own, c3.clone())).unwrap(); // A1 -> D2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise D2 has the same hash as B2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 0, + }) + .unwrap(); let d2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, d2.clone())).unwrap(); @@ -957,10 +979,7 @@ fn best_containing_on_longest_chain_with_multiple_forks() { .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a5.hash(), Some(4))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(a5.hash(), Some(4))).unwrap()); assert_eq!( b4.hash(), block_on(longest_chain_select.finality_target(b2.hash(), Some(4))) @@ -1017,14 +1036,8 @@ fn best_containing_on_longest_chain_with_multiple_forks() { .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a4.hash(), Some(3))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a5.hash(), Some(3))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(a4.hash(), Some(3))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a5.hash(), Some(3))).unwrap()); assert_eq!( b3.hash(), block_on(longest_chain_select.finality_target(b2.hash(), Some(3))) @@ -1037,10 +1050,7 @@ fn best_containing_on_longest_chain_with_multiple_forks() { .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b4.hash(), Some(3))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(b4.hash(), Some(3))).unwrap()); assert_eq!( c3.hash(), block_on(longest_chain_select.finality_target(c3.hash(), Some(3))) @@ -1073,36 +1083,18 @@ fn best_containing_on_longest_chain_with_multiple_forks() { .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a3.hash(), Some(2))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a4.hash(), Some(2))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a5.hash(), Some(2))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(a3.hash(), Some(2))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a4.hash(), Some(2))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a5.hash(), Some(2))).unwrap()); assert_eq!( b2.hash(), block_on(longest_chain_select.finality_target(b2.hash(), Some(2))) .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b3.hash(), Some(2))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b4.hash(), Some(2))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(c3.hash(), Some(2))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(b3.hash(), Some(2))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b4.hash(), Some(2))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(c3.hash(), Some(2))).unwrap()); assert_eq!( d2.hash(), block_on(longest_chain_select.finality_target(d2.hash(), Some(2))) @@ -1123,83 +1115,32 @@ fn best_containing_on_longest_chain_with_multiple_forks() { .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a2.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a3.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a4.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a5.hash(), Some(1))).unwrap() - ); - - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b2.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b3.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b4.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(c3.hash(), Some(1))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(d2.hash(), Some(1))).unwrap() - ); - - // search only blocks with number <= 0 + assert_eq!(None, block_on(longest_chain_select.finality_target(a2.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a3.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a4.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a5.hash(), Some(1))).unwrap()); + + assert_eq!(None, block_on(longest_chain_select.finality_target(b2.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b3.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b4.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(c3.hash(), Some(1))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(d2.hash(), Some(1))).unwrap()); + + // search only blocks with number <= 0 assert_eq!( genesis_hash, block_on(longest_chain_select.finality_target(genesis_hash, Some(0))) .unwrap() .unwrap() ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a1.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a2.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a3.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a4.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(a5.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b2.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b3.hash(), Some(0))).unwrap() - ); - assert_eq!( - None, - block_on(longest_chain_select.finality_target(b4.hash(), Some(0))).unwrap() - ); + assert_eq!(None, block_on(longest_chain_select.finality_target(a1.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a2.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a3.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a4.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(a5.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b2.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b3.hash(), Some(0))).unwrap()); + assert_eq!(None, block_on(longest_chain_select.finality_target(b4.hash(), Some(0))).unwrap()); assert_eq!( None, block_on(longest_chain_select.finality_target(c3.hash().clone(), Some(0))).unwrap(), @@ -1218,21 +1159,11 @@ fn best_containing_on_longest_chain_with_max_depth_higher_than_best() { let (mut client, longest_chain_select) = TestClientBuilder::new().build_with_longest_chain(); // G -> A1 - let a1 = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let a1 = client.new_block(Default::default()).unwrap().build().unwrap().block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let a2 = client.new_block(Default::default()).unwrap().build().unwrap().block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); let genesis_hash = client.chain_info().genesis_hash; @@ -1251,18 +1182,12 @@ fn key_changes_works() { for (index, (begin, end, key, expected_result)) in test_cases.into_iter().enumerate() { let end = client.block_hash(end).unwrap().unwrap(); - let actual_result = client.key_changes( - begin, - BlockId::Hash(end), - None, - &StorageKey(key), - ).unwrap(); + let actual_result = + client.key_changes(begin, BlockId::Hash(end), None, &StorageKey(key)).unwrap(); if actual_result != expected_result { panic!( "Failed test {}: actual = {:?}, expected = {:?}", - index, - actual_result, - expected_result, + index, actual_result, expected_result, ); } } @@ -1277,41 +1202,31 @@ fn import_with_justification() { block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); // A2 -> A3 let justification = Justifications::from((TEST_ENGINE_ID, vec![1, 2, 3])); - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import_justified(BlockOrigin::Own, a3.clone(), justification.clone())).unwrap(); - assert_eq!( - client.chain_info().finalized_hash, - a3.hash(), - ); + assert_eq!(client.chain_info().finalized_hash, a3.hash(),); - assert_eq!( - client.justifications(&BlockId::Hash(a3.hash())).unwrap(), - Some(justification), - ); + assert_eq!(client.justifications(&BlockId::Hash(a3.hash())).unwrap(), Some(justification),); - assert_eq!( - client.justifications(&BlockId::Hash(a1.hash())).unwrap(), - None, - ); + assert_eq!(client.justifications(&BlockId::Hash(a1.hash())).unwrap(), None,); - assert_eq!( - client.justifications(&BlockId::Hash(a2.hash())).unwrap(), - None, - ); + assert_eq!(client.justifications(&BlockId::Hash(a2.hash())).unwrap(), None,); } #[test] @@ -1321,54 +1236,44 @@ fn importing_diverged_finalized_block_should_trigger_reorg() { // G -> A1 -> A2 // \ // -> B1 - let a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a1 = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); - let mut b1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap(); + let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); // needed to make sure B1 gets a different hash from A1 b1.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Ferdie.into(), amount: 1, nonce: 0, - }).unwrap(); + }) + .unwrap(); // create but don't import B1 just yet let b1 = b1.build().unwrap().block; // A2 is the current best since it's the longest chain - assert_eq!( - client.chain_info().best_hash, - a2.hash(), - ); + assert_eq!(client.chain_info().best_hash, a2.hash(),); // importing B1 as finalized should trigger a re-org and set it as new best let justification = Justifications::from((TEST_ENGINE_ID, vec![1, 2, 3])); block_on(client.import_justified(BlockOrigin::Own, b1.clone(), justification)).unwrap(); - assert_eq!( - client.chain_info().best_hash, - b1.hash(), - ); + assert_eq!(client.chain_info().best_hash, b1.hash(),); - assert_eq!( - client.chain_info().finalized_hash, - b1.hash(), - ); + assert_eq!(client.chain_info().finalized_hash, b1.hash(),); } #[test] @@ -1378,84 +1283,70 @@ fn finalizing_diverged_block_should_trigger_reorg() { // G -> A1 -> A2 // \ // -> B1 -> B2 - let a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a1 = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); - let mut b1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap(); + let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); // needed to make sure B1 gets a different hash from A1 b1.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Ferdie.into(), amount: 1, nonce: 0, - }).unwrap(); + }) + .unwrap(); let b1 = b1.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b1.clone())).unwrap(); - let b2 = client.new_block_at( - &BlockId::Hash(b1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b2 = client + .new_block_at(&BlockId::Hash(b1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // A2 is the current best since it's the longest chain - assert_eq!( - client.chain_info().best_hash, - a2.hash(), - ); + assert_eq!(client.chain_info().best_hash, a2.hash(),); // we finalize block B1 which is on a different branch from current best // which should trigger a re-org. ClientExt::finalize_block(&client, BlockId::Hash(b1.hash()), None).unwrap(); // B1 should now be the latest finalized - assert_eq!( - client.chain_info().finalized_hash, - b1.hash(), - ); + assert_eq!(client.chain_info().finalized_hash, b1.hash(),); // and B1 should be the new best block (`finalize_block` as no way of // knowing about B2) - assert_eq!( - client.chain_info().best_hash, - b1.hash(), - ); + assert_eq!(client.chain_info().best_hash, b1.hash(),); // `SelectChain` should report B2 as best block though - assert_eq!( - block_on(select_chain.best_chain()).unwrap().hash(), - b2.hash(), - ); + assert_eq!(block_on(select_chain.best_chain()).unwrap().hash(), b2.hash(),); // after we build B3 on top of B2 and import it // it should be the new best block, - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); - assert_eq!( - client.chain_info().best_hash, - b3.hash(), - ); + assert_eq!(client.chain_info().best_hash, b3.hash(),); } #[test] @@ -1473,55 +1364,53 @@ fn state_reverted_on_reorg() { sp_tracing::try_init_simple(); let mut client = substrate_test_runtime_client::new(); - let current_balance = |client: &substrate_test_runtime_client::TestClient| - client.runtime_api().balance_of( - &BlockId::number(client.chain_info().best_number), AccountKeyring::Alice.into(), - ).unwrap(); + let current_balance = |client: &substrate_test_runtime_client::TestClient| { + client + .runtime_api() + .balance_of( + &BlockId::number(client.chain_info().best_number), + AccountKeyring::Alice.into(), + ) + .unwrap() + }; // G -> A1 -> A2 // \ // -> B1 - let mut a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap(); + let mut a1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); a1.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Bob.into(), amount: 10, nonce: 0, - }).unwrap(); + }) + .unwrap(); let a1 = a1.build().unwrap().block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); - let mut b1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap(); + let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); b1.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Ferdie.into(), amount: 50, nonce: 0, - }).unwrap(); + }) + .unwrap(); let b1 = b1.build().unwrap().block; // Reorg to B1 block_on(client.import_as_best(BlockOrigin::Own, b1.clone())).unwrap(); assert_eq!(950, current_balance(&client)); - let mut a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); a2.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Charlie.into(), amount: 10, nonce: 1, - }).unwrap(); + }) + .unwrap(); let a2 = a2.build().unwrap().block; // Re-org to A2 block_on(client.import_as_best(BlockOrigin::Own, a2)).unwrap(); @@ -1535,20 +1424,20 @@ fn doesnt_import_blocks_that_revert_finality() { // we need to run with archive pruning to avoid pruning non-canonical // states - let backend = Arc::new(Backend::new( - DatabaseSettings { - state_cache_size: 1 << 20, - state_cache_child_ratio: None, - state_pruning: PruningMode::ArchiveAll, - keep_blocks: KeepBlocks::All, - transaction_storage: TransactionStorageMode::BlockBody, - source: DatabaseSettingsSrc::RocksDb { - path: tmp.path().into(), - cache_size: 1024, + let backend = Arc::new( + Backend::new( + DatabaseSettings { + state_cache_size: 1 << 20, + state_cache_child_ratio: None, + state_pruning: PruningMode::ArchiveAll, + keep_blocks: KeepBlocks::All, + transaction_storage: TransactionStorageMode::BlockBody, + source: DatabaseSettingsSrc::RocksDb { path: tmp.path().into(), cache_size: 1024 }, }, - }, - u64::MAX, - ).unwrap()); + u64::MAX, + ) + .unwrap(), + ); let mut client = TestClientBuilder::with_backend(backend).build(); @@ -1558,18 +1447,20 @@ fn doesnt_import_blocks_that_revert_finality() { // \ // -> B1 -> B2 -> B3 - let a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a1 = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); @@ -1580,18 +1471,27 @@ fn doesnt_import_blocks_that_revert_finality() { to: AccountKeyring::Ferdie.into(), amount: 1, nonce: 0, - }).unwrap(); + }) + .unwrap(); let b1 = b1.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b1.clone())).unwrap(); - let b2 = client.new_block_at(&BlockId::Hash(b1.hash()), Default::default(), false) - .unwrap().build().unwrap().block; + let b2 = client + .new_block_at(&BlockId::Hash(b1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // prepare B3 before we finalize A2, because otherwise we won't be able to // read changes trie configuration after A2 is finalized - let b3 = client.new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) - .unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; // we will finalize A2 which should make it impossible to import a new // B3 at the same height but that doesn't include it @@ -1599,15 +1499,13 @@ fn doesnt_import_blocks_that_revert_finality() { let import_err = block_on(client.import(BlockOrigin::Own, b3)).err().unwrap(); let expected_err = ConsensusError::ClientImport( - sp_blockchain::Error::RuntimeApiError( - sp_api::ApiError::Application(Box::new(sp_blockchain::Error::NotInFinalizedChain)) - ).to_string() + sp_blockchain::Error::RuntimeApiError(sp_api::ApiError::Application(Box::new( + sp_blockchain::Error::NotInFinalizedChain, + ))) + .to_string(), ); - assert_eq!( - import_err.to_string(), - expected_err.to_string(), - ); + assert_eq!(import_err.to_string(), expected_err.to_string(),); // adding a C1 block which is lower than the last finalized should also // fail (with a cheaper check that doesn't require checking ancestry). @@ -1619,18 +1517,15 @@ fn doesnt_import_blocks_that_revert_finality() { to: AccountKeyring::Ferdie.into(), amount: 2, nonce: 0, - }).unwrap(); + }) + .unwrap(); let c1 = c1.build().unwrap().block; let import_err = block_on(client.import(BlockOrigin::Own, c1)).err().unwrap(); - let expected_err = ConsensusError::ClientImport( - sp_blockchain::Error::NotInFinalizedChain.to_string() - ); + let expected_err = + ConsensusError::ClientImport(sp_blockchain::Error::NotInFinalizedChain.to_string()); - assert_eq!( - import_err.to_string(), - expected_err.to_string(), - ); + assert_eq!(import_err.to_string(), expected_err.to_string(),); } #[test] @@ -1644,15 +1539,16 @@ fn respects_block_rules() { TestClientBuilder::new().build() } else { TestClientBuilder::new() - .set_block_rules( - Some(fork_rules.clone()), - Some(known_bad.clone()), - ) + .set_block_rules(Some(fork_rules.clone()), Some(known_bad.clone())) .build() }; - let block_ok = client.new_block_at(&BlockId::Number(0), Default::default(), false) - .unwrap().build().unwrap().block; + let block_ok = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; let params = BlockCheckParams { hash: block_ok.hash().clone(), @@ -1664,8 +1560,8 @@ fn respects_block_rules() { assert_eq!(block_on(client.check_block(params)).unwrap(), ImportResult::imported(false)); // this is 0x0d6d6612a10485370d9e085aeea7ec427fb3f34d961c6a816cdbe5cde2278864 - let mut block_not_ok = client.new_block_at(&BlockId::Number(0), Default::default(), false) - .unwrap(); + let mut block_not_ok = + client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); block_not_ok.push_storage_change(vec![0], Some(vec![1])).unwrap(); let block_not_ok = block_not_ok.build().unwrap().block; @@ -1686,8 +1582,8 @@ fn respects_block_rules() { block_on(client.import_as_final(BlockOrigin::Own, block_ok)).unwrap(); // And check good fork - let mut block_ok = client.new_block_at(&BlockId::Number(1), Default::default(), false) - .unwrap(); + let mut block_ok = + client.new_block_at(&BlockId::Number(1), Default::default(), false).unwrap(); block_ok.push_storage_change(vec![0], Some(vec![2])).unwrap(); let block_ok = block_ok.build().unwrap().block; @@ -1704,8 +1600,8 @@ fn respects_block_rules() { assert_eq!(block_on(client.check_block(params)).unwrap(), ImportResult::imported(false)); // And now try bad fork - let mut block_not_ok = client.new_block_at(&BlockId::Number(1), Default::default(), false) - .unwrap(); + let mut block_not_ok = + client.new_block_at(&BlockId::Number(1), Default::default(), false).unwrap(); block_not_ok.push_storage_change(vec![0], Some(vec![3])).unwrap(); let block_not_ok = block_not_ok.build().unwrap().block; @@ -1739,28 +1635,29 @@ fn returns_status_for_pruned_blocks() { // set to prune after 1 block // states - let backend = Arc::new(Backend::new( - DatabaseSettings { - state_cache_size: 1 << 20, - state_cache_child_ratio: None, - state_pruning: PruningMode::keep_blocks(1), - keep_blocks: KeepBlocks::All, - transaction_storage: TransactionStorageMode::BlockBody, - source: DatabaseSettingsSrc::RocksDb { - path: tmp.path().into(), - cache_size: 1024, + let backend = Arc::new( + Backend::new( + DatabaseSettings { + state_cache_size: 1 << 20, + state_cache_child_ratio: None, + state_pruning: PruningMode::keep_blocks(1), + keep_blocks: KeepBlocks::All, + transaction_storage: TransactionStorageMode::BlockBody, + source: DatabaseSettingsSrc::RocksDb { path: tmp.path().into(), cache_size: 1024 }, }, - }, - u64::MAX, - ).unwrap()); + u64::MAX, + ) + .unwrap(), + ); let mut client = TestClientBuilder::with_backend(backend).build(); - let a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a1 = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); @@ -1770,7 +1667,8 @@ fn returns_status_for_pruned_blocks() { to: AccountKeyring::Ferdie.into(), amount: 1, nonce: 0, - }).unwrap(); + }) + .unwrap(); let b1 = b1.build().unwrap().block; let check_block_a1 = BlockCheckParams { @@ -1801,11 +1699,12 @@ fn returns_status_for_pruned_blocks() { BlockStatus::InChainWithState, ); - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import_as_final(BlockOrigin::Own, a2.clone())).unwrap(); let check_block_a2 = BlockCheckParams { @@ -1833,11 +1732,12 @@ fn returns_status_for_pruned_blocks() { BlockStatus::InChainWithState, ); - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import_as_final(BlockOrigin::Own, a3.clone())).unwrap(); let check_block_a3 = BlockCheckParams { @@ -1904,7 +1804,8 @@ fn imports_blocks_with_changes_tries_config_change() { .changes_trie_config(Some(ChangesTrieConfiguration { digest_interval: 4, digest_levels: 2, - })).build(); + })) + .build(); // =================================================================== // blocks 1,2,3,4,5,6,7,8,9,10 are empty @@ -1923,70 +1824,114 @@ fn imports_blocks_with_changes_tries_config_change() { // block 31 is L1 digest that covers this change // =================================================================== (1..11).for_each(|number| { - let block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false) - .unwrap().build().unwrap().block; + let block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (11..12).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (12..23).for_each(|number| { - let block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false) - .unwrap().build().unwrap().block; + let block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (23..24).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_changes_trie_configuration_update(Some(ChangesTrieConfiguration { - digest_interval: 5, - digest_levels: 1, - })).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_changes_trie_configuration_update(Some(ChangesTrieConfiguration { + digest_interval: 5, + digest_levels: 1, + })) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (24..26).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (26..27).for_each(|number| { - let block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false) - .unwrap().build().unwrap().block; + let block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (27..28).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (28..29).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_changes_trie_configuration_update(Some(ChangesTrieConfiguration { - digest_interval: 3, - digest_levels: 1, - })).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_changes_trie_configuration_update(Some(ChangesTrieConfiguration { + digest_interval: 3, + digest_levels: 1, + })) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (29..30).for_each(|number| { - let block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false) - .unwrap().build().unwrap().block; + let block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (30..31).for_each(|number| { - let mut block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false).unwrap(); - block.push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())).unwrap(); + let mut block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap(); + block + .push_storage_change(vec![42], Some(number.to_le_bytes().to_vec())) + .unwrap(); let block = block.build().unwrap().block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); (31..32).for_each(|number| { - let block = client.new_block_at(&BlockId::Number(number - 1), Default::default(), false) - .unwrap().build().unwrap().block; + let block = client + .new_block_at(&BlockId::Number(number - 1), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, block)).unwrap(); }); @@ -2010,44 +1955,53 @@ fn storage_keys_iter_prefix_and_start_key_works() { let prefix = StorageKey(hex!("3a").to_vec()); let child_prefix = StorageKey(b"sec".to_vec()); - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), None) + let res: Vec<_> = client + .storage_keys_iter(&BlockId::Number(0), Some(&prefix), None) .unwrap() .map(|x| x.0) .collect(); - assert_eq!(res, [ - child_root.clone(), - hex!("3a636f6465").to_vec(), - hex!("3a686561707061676573").to_vec(), - ]); + assert_eq!( + res, + [child_root.clone(), hex!("3a636f6465").to_vec(), hex!("3a686561707061676573").to_vec(),] + ); - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), Some(&StorageKey(hex!("3a636f6465").to_vec()))) + let res: Vec<_> = client + .storage_keys_iter( + &BlockId::Number(0), + Some(&prefix), + Some(&StorageKey(hex!("3a636f6465").to_vec())), + ) .unwrap() .map(|x| x.0) .collect(); assert_eq!(res, [hex!("3a686561707061676573").to_vec()]); - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), Some(&StorageKey(hex!("3a686561707061676573").to_vec()))) + let res: Vec<_> = client + .storage_keys_iter( + &BlockId::Number(0), + Some(&prefix), + Some(&StorageKey(hex!("3a686561707061676573").to_vec())), + ) .unwrap() .map(|x| x.0) .collect(); assert_eq!(res, Vec::>::new()); - let res: Vec<_> = client.child_storage_keys_iter( - &BlockId::Number(0), - child_info.clone(), - Some(&child_prefix), - None, - ).unwrap() + let res: Vec<_> = client + .child_storage_keys_iter(&BlockId::Number(0), child_info.clone(), Some(&child_prefix), None) + .unwrap() .map(|x| x.0) .collect(); assert_eq!(res, [b"second".to_vec()]); - let res: Vec<_> = client.child_storage_keys_iter( - &BlockId::Number(0), - child_info, - None, - Some(&StorageKey(b"second".to_vec())), - ).unwrap() + let res: Vec<_> = client + .child_storage_keys_iter( + &BlockId::Number(0), + child_info, + None, + Some(&StorageKey(b"second".to_vec())), + ) + .unwrap() .map(|x| x.0) .collect(); assert_eq!(res, [b"third".to_vec()]); @@ -2059,30 +2013,52 @@ fn storage_keys_iter_works() { let prefix = StorageKey(hex!("").to_vec()); - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), None) + let res: Vec<_> = client + .storage_keys_iter(&BlockId::Number(0), Some(&prefix), None) .unwrap() .take(2) .map(|x| x.0) .collect(); - assert_eq!(res, [hex!("0befda6e1ca4ef40219d588a727f1271").to_vec(), hex!("3a636f6465").to_vec()]); + assert_eq!( + res, + [hex!("0befda6e1ca4ef40219d588a727f1271").to_vec(), hex!("3a636f6465").to_vec()] + ); - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), Some(&StorageKey(hex!("3a636f6465").to_vec()))) + let res: Vec<_> = client + .storage_keys_iter( + &BlockId::Number(0), + Some(&prefix), + Some(&StorageKey(hex!("3a636f6465").to_vec())), + ) .unwrap() .take(3) .map(|x| x.0) .collect(); - assert_eq!(res, [ - hex!("3a686561707061676573").to_vec(), - hex!("6644b9b8bc315888ac8e41a7968dc2b4141a5403c58acdf70b7e8f7e07bf5081").to_vec(), - hex!("79c07e2b1d2e2abfd4855b936617eeff5e0621c4869aa60c02be9adcc98a0d1d").to_vec(), - ]); - - let res: Vec<_> = client.storage_keys_iter(&BlockId::Number(0), Some(&prefix), Some(&StorageKey(hex!("79c07e2b1d2e2abfd4855b936617eeff5e0621c4869aa60c02be9adcc98a0d1d").to_vec()))) + assert_eq!( + res, + [ + hex!("3a686561707061676573").to_vec(), + hex!("6644b9b8bc315888ac8e41a7968dc2b4141a5403c58acdf70b7e8f7e07bf5081").to_vec(), + hex!("79c07e2b1d2e2abfd4855b936617eeff5e0621c4869aa60c02be9adcc98a0d1d").to_vec(), + ] + ); + + let res: Vec<_> = client + .storage_keys_iter( + &BlockId::Number(0), + Some(&prefix), + Some(&StorageKey( + hex!("79c07e2b1d2e2abfd4855b936617eeff5e0621c4869aa60c02be9adcc98a0d1d").to_vec(), + )), + ) .unwrap() .take(1) .map(|x| x.0) .collect(); - assert_eq!(res, [hex!("cf722c0832b5231d35e29f319ff27389f5032bfc7bfc3ba5ed7839f2042fb99f").to_vec()]); + assert_eq!( + res, + [hex!("cf722c0832b5231d35e29f319ff27389f5032bfc7bfc3ba5ed7839f2042fb99f").to_vec()] + ); } #[test] @@ -2092,26 +2068,29 @@ fn cleans_up_closed_notification_sinks_on_block_import() { // NOTE: we need to build the client here instead of using the client // provided by test_runtime_client otherwise we can't access the private // `import_notification_sinks` and `finality_notification_sinks` fields. - let mut client = - new_in_mem::< - _, - substrate_test_runtime_client::runtime::Block, - _, - substrate_test_runtime_client::runtime::RuntimeApi, - >( - substrate_test_runtime_client::new_native_executor(), - &substrate_test_runtime_client::GenesisParameters::default().genesis_storage(), - None, - None, - None, - Box::new(TaskExecutor::new()), - Default::default(), - ) - .unwrap(); + let mut client = new_in_mem::< + _, + substrate_test_runtime_client::runtime::Block, + _, + substrate_test_runtime_client::runtime::RuntimeApi, + >( + substrate_test_runtime_client::new_native_executor(), + &substrate_test_runtime_client::GenesisParameters::default().genesis_storage(), + None, + None, + None, + Box::new(TaskExecutor::new()), + Default::default(), + ) + .unwrap(); type TestClient = Client< in_mem::Backend, - LocalCallExecutor, sc_executor::NativeExecutor>, + LocalCallExecutor< + Block, + in_mem::Backend, + sc_executor::NativeExecutor, + >, substrate_test_runtime_client::runtime::Block, substrate_test_runtime_client::runtime::RuntimeApi, >; @@ -2123,12 +2102,7 @@ fn cleans_up_closed_notification_sinks_on_block_import() { // for some reason I can't seem to use `ClientBlockImportExt` let bake_and_import_block = |client: &mut TestClient, origin| { - let block = client - .new_block(Default::default()) - .unwrap() - .build() - .unwrap() - .block; + let block = client.new_block(Default::default()).unwrap().build().unwrap().block; let (header, extrinsics) = block.deconstruct(); let mut import = BlockImportParams::new(origin, header); @@ -2168,44 +2142,43 @@ fn cleans_up_closed_notification_sinks_on_block_import() { fn reorg_triggers_a_notification_even_for_sources_that_should_not_trigger_notifications() { let mut client = TestClientBuilder::new().build(); - let mut notification_stream = futures::executor::block_on_stream( - client.import_notification_stream() - ); + let mut notification_stream = + futures::executor::block_on_stream(client.import_notification_stream()); - let a1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a1 = client + .new_block_at(&BlockId::Number(0), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::NetworkInitialSync, a1.clone())).unwrap(); - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::NetworkInitialSync, a2.clone())).unwrap(); - let mut b1 = client.new_block_at( - &BlockId::Number(0), - Default::default(), - false, - ).unwrap(); + let mut b1 = client.new_block_at(&BlockId::Number(0), Default::default(), false).unwrap(); // needed to make sure B1 gets a different hash from A1 b1.push_transfer(Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Ferdie.into(), amount: 1, nonce: 0, - }).unwrap(); + }) + .unwrap(); let b1 = b1.build().unwrap().block; block_on(client.import(BlockOrigin::NetworkInitialSync, b1.clone())).unwrap(); - let b2 = client.new_block_at( - &BlockId::Hash(b1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b2 = client + .new_block_at(&BlockId::Hash(b1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; // Should trigger a notification because we reorg block_on(client.import_as_best(BlockOrigin::NetworkInitialSync, b2.clone())).unwrap(); diff --git a/client/service/test/src/lib.rs b/client/service/test/src/lib.rs index 44228d1575cc2..9433ed0bde06f 100644 --- a/client/service/test/src/lib.rs +++ b/client/service/test/src/lib.rs @@ -18,38 +18,27 @@ //! Service integration test utils. -use std::iter; -use std::sync::Arc; -use std::net::Ipv4Addr; -use std::pin::Pin; -use std::time::Duration; -use log::{info, debug}; -use futures01::{Future, Stream, Poll}; use futures::{FutureExt as _, TryFutureExt as _}; -use tempfile::TempDir; -use tokio::{runtime::Runtime, prelude::FutureExt}; -use tokio::timer::Interval; +use futures01::{Future, Poll, Stream}; +use log::{debug, info}; +use parking_lot::Mutex; +use sc_client_api::{Backend, CallExecutor}; +use sc_network::{ + config::{NetworkConfiguration, TransportConfig}, + multiaddr, Multiaddr, +}; use sc_service::{ - TaskManager, - SpawnTaskHandle, - GenericChainSpec, - ChainSpecExtension, - Configuration, - KeepBlocks, TransactionStorageMode, - config::{BasePath, DatabaseConfig, KeystoreConfig}, - RuntimeGenesis, - Role, - Error, - TaskExecutor, client::Client, + config::{BasePath, DatabaseConfig, KeystoreConfig}, + ChainSpecExtension, Configuration, Error, GenericChainSpec, KeepBlocks, Role, RuntimeGenesis, + SpawnTaskHandle, TaskExecutor, TaskManager, TransactionStorageMode, }; +use sc_transaction_pool_api::TransactionPool; use sp_blockchain::HeaderBackend; -use sc_network::{multiaddr, Multiaddr}; -use sc_network::config::{NetworkConfiguration, TransportConfig}; use sp_runtime::{generic::BlockId, traits::Block as BlockT}; -use sc_transaction_pool_api::TransactionPool; -use sc_client_api::{Backend, CallExecutor}; -use parking_lot::Mutex; +use std::{iter, net::Ipv4Addr, pin::Pin, sync::Arc, time::Duration}; +use tempfile::TempDir; +use tokio::{prelude::FutureExt, runtime::Runtime, timer::Interval}; #[cfg(test)] mod client; @@ -67,7 +56,9 @@ struct TestNet { nodes: usize, } -pub trait TestNetNode: Clone + Future + Send + 'static { +pub trait TestNetNode: + Clone + Future + Send + 'static +{ type Block: BlockT; type Backend: Backend; type Executor: CallExecutor + Send + Sync; @@ -76,7 +67,9 @@ pub trait TestNetNode: Clone + Future + Se fn client(&self) -> Arc>; fn transaction_pool(&self) -> Arc; - fn network(&self) -> Arc::Hash>>; + fn network( + &self, + ) -> Arc::Hash>>; fn spawn_handle(&self) -> SpawnTaskHandle; } @@ -88,23 +81,21 @@ pub struct TestNetComponents { } impl -TestNetComponents { + TestNetComponents +{ pub fn new( task_manager: TaskManager, client: Arc>, network: Arc::Hash>>, transaction_pool: Arc, ) -> Self { - Self { - client, transaction_pool, network, - task_manager: Arc::new(Mutex::new(task_manager)), - } + Self { client, transaction_pool, network, task_manager: Arc::new(Mutex::new(task_manager)) } } } - -impl Clone for -TestNetComponents { +impl Clone + for TestNetComponents +{ fn clone(&self) -> Self { Self { task_manager: self.task_manager.clone(), @@ -115,8 +106,8 @@ TestNetComponents { } } -impl Future for - TestNetComponents +impl Future + for TestNetComponents { type Item = (); type Error = sc_service::Error; @@ -126,14 +117,14 @@ impl Future for } } -impl TestNetNode for -TestNetComponents - where - TBl: BlockT, - TBackend: sc_client_api::Backend + Send + Sync + 'static, - TExec: CallExecutor + Send + Sync + 'static, - TRtApi: Send + Sync + 'static, - TExPool: TransactionPool + Send + Sync + 'static, +impl TestNetNode + for TestNetComponents +where + TBl: BlockT, + TBackend: sc_client_api::Backend + Send + Sync + 'static, + TExec: CallExecutor + Send + Sync + 'static, + TRtApi: Send + Sync + 'static, + TExPool: TransactionPool + Send + Sync + 'static, { type Block = TBl; type Backend = TBackend; @@ -147,7 +138,9 @@ TestNetComponents fn transaction_pool(&self) -> Arc { self.transaction_pool.clone() } - fn network(&self) -> Arc::Hash>> { + fn network( + &self, + ) -> Arc::Hash>> { self.network.clone() } fn spawn_handle(&self) -> SpawnTaskHandle { @@ -156,33 +149,32 @@ TestNetComponents } impl TestNet -where F: Clone + Send + 'static, L: Clone + Send +'static, U: Clone + Send + 'static +where + F: Clone + Send + 'static, + L: Clone + Send + 'static, + U: Clone + Send + 'static, { - pub fn run_until_all_full( - &mut self, - full_predicate: FP, - light_predicate: LP, - ) - where - FP: Send + Fn(usize, &F) -> bool + 'static, - LP: Send + Fn(usize, &L) -> bool + 'static, + pub fn run_until_all_full(&mut self, full_predicate: FP, light_predicate: LP) + where + FP: Send + Fn(usize, &F) -> bool + 'static, + LP: Send + Fn(usize, &L) -> bool + 'static, { let full_nodes = self.full_nodes.clone(); let light_nodes = self.light_nodes.clone(); let interval = Interval::new_interval(Duration::from_millis(100)) .map_err(|_| ()) .for_each(move |_| { - let full_ready = full_nodes.iter().all(|&(ref id, ref service, _, _)| - full_predicate(*id, service) - ); + let full_ready = full_nodes + .iter() + .all(|&(ref id, ref service, _, _)| full_predicate(*id, service)); if !full_ready { - return Ok(()); + return Ok(()) } - let light_ready = light_nodes.iter().all(|&(ref id, ref service, _)| - light_predicate(*id, service) - ); + let light_ready = light_nodes + .iter() + .all(|&(ref id, ref service, _)| light_predicate(*id, service)); if !light_ready { Ok(()) @@ -200,7 +192,10 @@ where F: Clone + Send + 'static, L: Clone + Send +'static, U: Clone + Send + 'st } } -fn node_config ( +fn node_config< + G: RuntimeGenesis + 'static, + E: ChainSpecExtension + Clone + 'static + Send + Sync, +>( index: usize, spec: &GenericChainSpec, role: Role, @@ -208,8 +203,7 @@ fn node_config, base_port: u16, root: &TempDir, -) -> Configuration -{ +) -> Configuration { let root = root.path().join(format!("node-{}", index)); let mut network_config = NetworkConfiguration::new( @@ -224,7 +218,7 @@ fn node_config TestNet where +impl TestNet +where F: TestNetNode, L: TestNetNode, E: ChainSpecExtension + Clone + 'static + Send + Sync, @@ -295,11 +284,8 @@ impl TestNet where spec: GenericChainSpec, full: impl Iterator Result<(F, U), Error>>, light: impl Iterator Result>, - authorities: impl Iterator Result<(F, U), Error> - )>, - base_port: u16 + authorities: impl Iterator Result<(F, U), Error>)>, + base_port: u16, ) -> TestNet { sp_tracing::try_init_simple(); fdlimit::raise_fd_limit(); @@ -322,7 +308,7 @@ impl TestNet where temp: &TempDir, full: impl Iterator Result<(F, U), Error>>, light: impl Iterator Result>, - authorities: impl Iterator Result<(F, U), Error>)> + authorities: impl Iterator Result<(F, U), Error>)>, ) { let executor = self.runtime.executor(); let task_executor: TaskExecutor = { @@ -330,7 +316,8 @@ impl TestNet where (move |fut: Pin + Send>>, _| { executor.spawn(fut.unit_error().compat()); async {} - }).into() + }) + .into() }; for (key, authority) in authorities { @@ -344,10 +331,12 @@ impl TestNet where &temp, ); let addr = node_config.network.listen_addresses.iter().next().unwrap().clone(); - let (service, user_data) = authority(node_config).expect("Error creating test node service"); + let (service, user_data) = + authority(node_config).expect("Error creating test node service"); executor.spawn(service.clone().map_err(|_| ())); - let addr = addr.with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); + let addr = addr + .with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); self.authority_nodes.push((self.nodes, service, user_data, addr)); self.nodes += 1; } @@ -366,7 +355,8 @@ impl TestNet where let (service, user_data) = full(node_config).expect("Error creating test node service"); executor.spawn(service.clone().map_err(|_| ())); - let addr = addr.with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); + let addr = addr + .with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); self.full_nodes.push((self.nodes, service, user_data, addr)); self.nodes += 1; } @@ -385,7 +375,8 @@ impl TestNet where let service = light(node_config).expect("Error creating test node service"); executor.spawn(service.clone().map_err(|_| ())); - let addr = addr.with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); + let addr = addr + .with(multiaddr::Protocol::P2p(service.network().local_peer_id().clone().into())); self.light_nodes.push((self.nodes, service, addr)); self.nodes += 1; } @@ -393,7 +384,10 @@ impl TestNet where } fn tempdir_with_prefix(prefix: &str) -> TempDir { - tempfile::Builder::new().prefix(prefix).tempdir().expect("Error creating test dir") + tempfile::Builder::new() + .prefix(prefix) + .tempdir() + .expect("Error creating test dir") } pub fn connectivity( @@ -420,8 +414,8 @@ pub fn connectivity( let mut network = TestNet::new( &temp, spec.clone(), - (0..NUM_FULL_NODES).map(|_| { |cfg| full_builder(cfg).map(|s| (s, ())) }), - (0..NUM_LIGHT_NODES).map(|_| { |cfg| light_builder(cfg) }), + (0..NUM_FULL_NODES).map(|_| |cfg| full_builder(cfg).map(|s| (s, ()))), + (0..NUM_LIGHT_NODES).map(|_| |cfg| light_builder(cfg)), // Note: this iterator is empty but we can't just use `iter::empty()`, otherwise // the type of the closure cannot be inferred. (0..0).map(|_| (String::new(), { |cfg| full_builder(cfg).map(|s| (s, ())) })), @@ -430,11 +424,15 @@ pub fn connectivity( info!("Checking star topology"); let first_address = network.full_nodes[0].3.clone(); for (_, service, _, _) in network.full_nodes.iter().skip(1) { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } for (_, service, _) in network.light_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } @@ -464,8 +462,8 @@ pub fn connectivity( let mut network = TestNet::new( &temp, spec, - (0..NUM_FULL_NODES).map(|_| { |cfg| full_builder(cfg).map(|s| (s, ())) }), - (0..NUM_LIGHT_NODES).map(|_| { |cfg| light_builder(cfg) }), + (0..NUM_FULL_NODES).map(|_| |cfg| full_builder(cfg).map(|s| (s, ()))), + (0..NUM_LIGHT_NODES).map(|_| |cfg| light_builder(cfg)), // Note: this iterator is empty but we can't just use `iter::empty()`, otherwise // the type of the closure cannot be inferred. (0..0).map(|_| (String::new(), { |cfg| full_builder(cfg).map(|s| (s, ())) })), @@ -477,14 +475,18 @@ pub fn connectivity( for i in 0..max_nodes { if i != 0 { if let Some((_, service, _, node_id)) = network.full_nodes.get(i) { - service.network().add_reserved_peer(address.to_string()) + service + .network() + .add_reserved_peer(address.to_string()) .expect("Error adding reserved peer"); address = node_id.clone(); } } if let Some((_, service, node_id)) = network.light_nodes.get(i) { - service.network().add_reserved_peer(address.to_string()) + service + .network() + .add_reserved_peer(address.to_string()) .expect("Error adding reserved peer"); address = node_id.clone(); } @@ -512,7 +514,7 @@ pub fn sync( full_builder: Fb, light_builder: Lb, mut make_block_and_import: B, - mut extrinsic_factory: ExF + mut extrinsic_factory: ExF, ) where Fb: Fn(Configuration) -> Result<(F, U), Error>, F: TestNetNode, @@ -532,8 +534,8 @@ pub fn sync( let mut network = TestNet::new( &temp, spec, - (0..NUM_FULL_NODES).map(|_| { |cfg| full_builder(cfg) }), - (0..NUM_LIGHT_NODES).map(|_| { |cfg| light_builder(cfg) }), + (0..NUM_FULL_NODES).map(|_| |cfg| full_builder(cfg)), + (0..NUM_LIGHT_NODES).map(|_| |cfg| light_builder(cfg)), // Note: this iterator is empty but we can't just use `iter::empty()`, otherwise // the type of the closure cannot be inferred. (0..0).map(|_| (String::new(), { |cfg| full_builder(cfg) })), @@ -542,7 +544,7 @@ pub fn sync( info!("Checking block sync"); let first_address = { let &mut (_, ref first_service, ref mut first_user_data, _) = &mut network.full_nodes[0]; - for i in 0 .. NUM_BLOCKS { + for i in 0..NUM_BLOCKS { if i % 128 == 0 { info!("Generating #{}", i + 1); } @@ -550,24 +552,29 @@ pub fn sync( make_block_and_import(&first_service, first_user_data); } let info = network.full_nodes[0].1.client().info(); - network.full_nodes[0].1.network().new_best_block_imported(info.best_hash, info.best_number); + network.full_nodes[0] + .1 + .network() + .new_best_block_imported(info.best_hash, info.best_number); network.full_nodes[0].3.clone() }; info!("Running sync"); for (_, service, _, _) in network.full_nodes.iter().skip(1) { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } for (_, service, _) in network.light_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } network.run_until_all_full( - |_index, service| - service.client().info().best_number == (NUM_BLOCKS as u32).into(), - |_index, service| - service.client().info().best_number == (NUM_BLOCKS as u32).into(), + |_index, service| service.client().info().best_number == (NUM_BLOCKS as u32).into(), + |_index, service| service.client().info().best_number == (NUM_BLOCKS as u32).into(), ); info!("Checking extrinsic propagation"); @@ -577,9 +584,12 @@ pub fn sync( let extrinsic = extrinsic_factory(&first_service, first_user_data); let source = sc_transaction_pool_api::TransactionSource::External; - futures::executor::block_on( - first_service.transaction_pool().submit_one(&best_block, source, extrinsic) - ).expect("failed to submit extrinsic"); + futures::executor::block_on(first_service.transaction_pool().submit_one( + &best_block, + source, + extrinsic, + )) + .expect("failed to submit extrinsic"); network.run_until_all_full( |_index, service| service.transaction_pool().ready().count() == 1, @@ -591,7 +601,7 @@ pub fn consensus( spec: GenericChainSpec, full_builder: Fb, light_builder: Lb, - authorities: impl IntoIterator + authorities: impl IntoIterator, ) where Fb: Fn(Configuration) -> Result, F: TestNetNode, @@ -607,54 +617,64 @@ pub fn consensus( let mut network = TestNet::new( &temp, spec, - (0..NUM_FULL_NODES / 2).map(|_| { |cfg| full_builder(cfg).map(|s| (s, ())) }), - (0..NUM_LIGHT_NODES / 2).map(|_| { |cfg| light_builder(cfg) }), - authorities.into_iter().map(|key| (key, { |cfg| full_builder(cfg).map(|s| (s, ())) })), + (0..NUM_FULL_NODES / 2).map(|_| |cfg| full_builder(cfg).map(|s| (s, ()))), + (0..NUM_LIGHT_NODES / 2).map(|_| |cfg| light_builder(cfg)), + authorities + .into_iter() + .map(|key| (key, { |cfg| full_builder(cfg).map(|s| (s, ())) })), 30600, ); info!("Checking consensus"); let first_address = network.authority_nodes[0].3.clone(); for (_, service, _, _) in network.full_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } for (_, service, _) in network.light_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } for (_, service, _, _) in network.authority_nodes.iter().skip(1) { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } network.run_until_all_full( - |_index, service| - service.client().info().finalized_number >= (NUM_BLOCKS as u32 / 2).into(), - |_index, service| - service.client().info().best_number >= (NUM_BLOCKS as u32 / 2).into(), + |_index, service| { + service.client().info().finalized_number >= (NUM_BLOCKS as u32 / 2).into() + }, + |_index, service| service.client().info().best_number >= (NUM_BLOCKS as u32 / 2).into(), ); info!("Adding more peers"); network.insert_nodes( &temp, - (0..NUM_FULL_NODES / 2).map(|_| { |cfg| full_builder(cfg).map(|s| (s, ())) }), - (0..NUM_LIGHT_NODES / 2).map(|_| { |cfg| light_builder(cfg) }), + (0..NUM_FULL_NODES / 2).map(|_| |cfg| full_builder(cfg).map(|s| (s, ()))), + (0..NUM_LIGHT_NODES / 2).map(|_| |cfg| light_builder(cfg)), // Note: this iterator is empty but we can't just use `iter::empty()`, otherwise // the type of the closure cannot be inferred. (0..0).map(|_| (String::new(), { |cfg| full_builder(cfg).map(|s| (s, ())) })), ); for (_, service, _, _) in network.full_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } for (_, service, _) in network.light_nodes.iter() { - service.network().add_reserved_peer(first_address.to_string()) + service + .network() + .add_reserved_peer(first_address.to_string()) .expect("Error adding reserved peer"); } network.run_until_all_full( - |_index, service| - service.client().info().finalized_number >= (NUM_BLOCKS as u32).into(), - |_index, service| - service.client().info().best_number >= (NUM_BLOCKS as u32).into(), + |_index, service| service.client().info().finalized_number >= (NUM_BLOCKS as u32).into(), + |_index, service| service.client().info().best_number >= (NUM_BLOCKS as u32).into(), ); } diff --git a/client/state-db/src/lib.rs b/client/state-db/src/lib.rs index 1340442061aba..cdff39895d229 100644 --- a/client/state-db/src/lib.rs +++ b/client/state-db/src/lib.rs @@ -44,15 +44,17 @@ mod pruning; #[cfg(test)] mod test; -use std::fmt; -use parking_lot::RwLock; use codec::Codec; -use std::collections::{HashMap, hash_map::Entry}; +use log::trace; use noncanonical::NonCanonicalOverlay; +use parity_util_mem::{malloc_size, MallocSizeOf}; +use parking_lot::RwLock; use pruning::RefWindow; -use log::trace; -use parity_util_mem::{MallocSizeOf, malloc_size}; -use sc_client_api::{StateDbMemoryInfo, MemorySize}; +use sc_client_api::{MemorySize, StateDbMemoryInfo}; +use std::{ + collections::{hash_map::Entry, HashMap}, + fmt, +}; const PRUNING_MODE: &[u8] = b"mode"; const PRUNING_MODE_ARCHIVE: &[u8] = b"archive"; @@ -63,8 +65,35 @@ const PRUNING_MODE_CONSTRAINED: &[u8] = b"constrained"; pub type DBValue = Vec; /// Basic set of requirements for the Block hash and node key types. -pub trait Hash: Send + Sync + Sized + Eq + PartialEq + Clone + Default + fmt::Debug + Codec + std::hash::Hash + 'static {} -impl Hash for T {} +pub trait Hash: + Send + + Sync + + Sized + + Eq + + PartialEq + + Clone + + Default + + fmt::Debug + + Codec + + std::hash::Hash + + 'static +{ +} +impl< + T: Send + + Sync + + Sized + + Eq + + PartialEq + + Clone + + Default + + fmt::Debug + + Codec + + std::hash::Hash + + 'static, + > Hash for T +{ +} /// Backend database trait. Read-only. pub trait MetaDb { @@ -168,17 +197,14 @@ pub enum PruningMode { impl PruningMode { /// Create a mode that keeps given number of blocks. pub fn keep_blocks(n: u32) -> PruningMode { - PruningMode::Constrained(Constraints { - max_blocks: Some(n), - max_mem: None, - }) + PruningMode::Constrained(Constraints { max_blocks: Some(n), max_mem: None }) } /// Is this an archive (either ArchiveAll or ArchiveCanonical) pruning mode? pub fn is_archive(&self) -> bool { match *self { PruningMode::ArchiveAll | PruningMode::ArchiveCanonical => true, - PruningMode::Constrained(_) => false + PruningMode::Constrained(_) => false, } } @@ -224,20 +250,12 @@ impl StateDbSync = NonCanonicalOverlay::new(db)?; let pruning: Option> = match mode { - PruningMode::Constrained(Constraints { - max_mem: Some(_), - .. - }) => unimplemented!(), + PruningMode::Constrained(Constraints { max_mem: Some(_), .. }) => unimplemented!(), PruningMode::Constrained(_) => Some(RefWindow::new(db, ref_counting)?), PruningMode::ArchiveAll | PruningMode::ArchiveCanonical => None, }; - Ok(StateDbSync { - mode, - non_canonical, - pruning, - pinned: Default::default(), - }) + Ok(StateDbSync { mode, non_canonical, pruning, pinned: Default::default() }) } fn check_meta(mode: &PruningMode, db: &D) -> Result<(), Error> { @@ -270,10 +288,7 @@ impl StateDbSync { changeset.deleted.clear(); // write changes immediately - Ok(CommitSet { - data: changeset, - meta, - }) + Ok(CommitSet { data: changeset, meta }) }, PruningMode::Constrained(_) | PruningMode::ArchiveCanonical => { let commit = self.non_canonical.insert(hash, number, parent_hash, changeset); @@ -281,7 +296,7 @@ impl StateDbSync StateDbSync { + Ok(()) => if self.mode == PruningMode::ArchiveCanonical { commit.data.deleted.clear(); - } - } + }, Err(e) => return Err(e), }; if let Some(ref mut pruning) = self.pruning { @@ -319,31 +333,30 @@ impl StateDbSync c).unwrap_or(true) { !self.non_canonical.have_block(hash) } else { - self.pruning - .as_ref() - .map_or( - false, - |pruning| number < pruning.pending() || !pruning.have_block(hash), - ) + self.pruning.as_ref().map_or(false, |pruning| { + number < pruning.pending() || !pruning.have_block(hash) + }) } - } + }, } } fn prune(&mut self, commit: &mut CommitSet) { - if let (&mut Some(ref mut pruning), &PruningMode::Constrained(ref constraints)) = (&mut self.pruning, &self.mode) { + if let (&mut Some(ref mut pruning), &PruningMode::Constrained(ref constraints)) = + (&mut self.pruning, &self.mode) + { loop { if pruning.window_size() <= constraints.max_blocks.unwrap_or(0) as u64 { - break; + break } if constraints.max_mem.map_or(false, |m| pruning.mem_used() > m) { - break; + break } let pinned = &self.pinned; if pruning.next_hash().map_or(false, |h| pinned.contains_key(&h)) { - break; + break } pruning.prune_one(commit); } @@ -355,23 +368,17 @@ impl StateDbSync Option> { match self.mode { - PruningMode::ArchiveAll => { - Some(CommitSet::default()) - }, - PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => { - self.non_canonical.revert_one() - }, + PruningMode::ArchiveAll => Some(CommitSet::default()), + PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => + self.non_canonical.revert_one(), } } fn remove(&mut self, hash: &BlockHash) -> Option> { match self.mode { - PruningMode::ArchiveAll => { - Some(CommitSet::default()) - }, - PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => { - self.non_canonical.remove(hash) - }, + PruningMode::ArchiveAll => Some(CommitSet::default()), + PruningMode::ArchiveCanonical | PruningMode::Constrained(_) => + self.non_canonical.remove(hash), } } @@ -392,7 +399,7 @@ impl StateDbSync StateDbSync(&self, key: &Q, db: &D) -> Result, Error> + pub fn get( + &self, + key: &Q, + db: &D, + ) -> Result, Error> where Q: AsRef, Key: std::borrow::Borrow, Q: std::hash::Hash + Eq, { if let Some(value) = self.non_canonical.get(key) { - return Ok(Some(value)); + return Ok(Some(value)) } db.get(key.as_ref()).map_err(|e| Error::Db(e)) } @@ -469,9 +480,7 @@ impl StateDb Result, Error> { - Ok(StateDb { - db: RwLock::new(StateDbSync::new(mode, ref_counting, db)?) - }) + Ok(StateDb { db: RwLock::new(StateDbSync::new(mode, ref_counting, db)?) }) } /// Add a new non-canonical block. @@ -504,11 +513,15 @@ impl StateDb(&self, key: &Q, db: &D) -> Result, Error> - where - Q: AsRef, - Key: std::borrow::Borrow, - Q: std::hash::Hash + Eq, + pub fn get( + &self, + key: &Q, + db: &D, + ) -> Result, Error> + where + Q: AsRef, + Key: std::borrow::Borrow, + Q: std::hash::Hash + Eq, { self.db.read().get(key, db) } @@ -554,10 +567,12 @@ impl StateDb (TestDb, StateDb) { let mut db = make_db(&[91, 921, 922, 93, 94]); @@ -679,13 +694,13 @@ mod tests { let state_db = StateDb::new(PruningMode::ArchiveAll, false, &db).unwrap(); db.commit( &state_db - .insert_block::( - &H256::from_low_u64_be(0), - 0, - &H256::from_low_u64_be(0), - make_changeset(&[], &[]), - ) - .unwrap(), + .insert_block::( + &H256::from_low_u64_be(0), + 0, + &H256::from_low_u64_be(0), + make_changeset(&[], &[]), + ) + .unwrap(), ); let new_mode = PruningMode::Constrained(Constraints { max_blocks: Some(2), max_mem: None }); let state_db: Result, _> = StateDb::new(new_mode, false, &db); diff --git a/client/state-db/src/noncanonical.rs b/client/state-db/src/noncanonical.rs index de6d1bfcf8bb2..c9f04750335a6 100644 --- a/client/state-db/src/noncanonical.rs +++ b/client/state-db/src/noncanonical.rs @@ -22,11 +22,13 @@ //! All pending changes are kept in memory until next call to `apply_pending` or //! `revert_pending` -use std::fmt; -use std::collections::{HashMap, VecDeque, hash_map::Entry}; -use super::{Error, DBValue, ChangeSet, CommitSet, MetaDb, Hash, to_meta_key}; -use codec::{Encode, Decode}; +use super::{to_meta_key, ChangeSet, CommitSet, DBValue, Error, Hash, MetaDb}; +use codec::{Decode, Encode}; use log::trace; +use std::{ + collections::{hash_map::Entry, HashMap, VecDeque}, + fmt, +}; const NON_CANONICAL_JOURNAL: &[u8] = b"noncanonical_journal"; const LAST_CANONICAL: &[u8] = b"last_canonical"; @@ -69,10 +71,7 @@ impl OverlayLevel { } fn new() -> OverlayLevel { - OverlayLevel { - blocks: Vec::new(), - used_indicies: 0, - } + OverlayLevel { blocks: Vec::new(), used_indicies: 0 } } } @@ -98,7 +97,10 @@ struct BlockOverlay { deleted: Vec, } -fn insert_values(values: &mut HashMap, inserted: Vec<(Key, DBValue)>) { +fn insert_values( + values: &mut HashMap, + inserted: Vec<(Key, DBValue)>, +) { for (k, v) in inserted { debug_assert!(values.get(&k).map_or(true, |(_, value)| *value == v)); let (ref mut counter, _) = values.entry(k).or_insert_with(|| (0, v)); @@ -118,7 +120,7 @@ fn discard_values(values: &mut HashMap, inserted }, Entry::Vacant(_) => { debug_assert!(false, "Trying to discard missing value"); - } + }, } } } @@ -142,10 +144,12 @@ fn discard_descendants( }; let mut pinned_children = 0; if let Some(level) = first { - while let Some(i) = level.blocks.iter().position(|overlay| parents.get(&overlay.hash) - .expect("there is a parent entry for each entry in levels; qed") - == hash) - { + while let Some(i) = level.blocks.iter().position(|overlay| { + parents + .get(&overlay.hash) + .expect("there is a parent entry for each entry in levels; qed") == + hash + }) { let overlay = level.remove(i); let mut num_pinned = discard_descendants( &mut remainder, @@ -153,7 +157,7 @@ fn discard_descendants( parents, pinned, pinned_insertions, - &overlay.hash + &overlay.hash, ); if pinned.contains_key(&overlay.hash) { num_pinned += 1; @@ -175,10 +179,11 @@ fn discard_descendants( impl NonCanonicalOverlay { /// Creates a new instance. Does not expect any metadata to be present in the DB. pub fn new(db: &D) -> Result, Error> { - let last_canonicalized = db.get_meta(&to_meta_key(LAST_CANONICAL, &())) - .map_err(|e| Error::Db(e))?; + let last_canonicalized = + db.get_meta(&to_meta_key(LAST_CANONICAL, &())).map_err(|e| Error::Db(e))?; let last_canonicalized = last_canonicalized - .map(|buffer| <(BlockHash, u64)>::decode(&mut buffer.as_slice())).transpose()?; + .map(|buffer| <(BlockHash, u64)>::decode(&mut buffer.as_slice())) + .transpose()?; let mut levels = VecDeque::new(); let mut parents = HashMap::new(); let mut values = HashMap::new(); @@ -189,16 +194,17 @@ impl NonCanonicalOverlay { block += 1; loop { let mut level = OverlayLevel::new(); - for index in 0 .. MAX_BLOCKS_PER_LEVEL { + for index in 0..MAX_BLOCKS_PER_LEVEL { let journal_key = to_journal_key(block, index); if let Some(record) = db.get_meta(&journal_key).map_err(|e| Error::Db(e))? { - let record: JournalRecord = Decode::decode(&mut record.as_slice())?; + let record: JournalRecord = + Decode::decode(&mut record.as_slice())?; let inserted = record.inserted.iter().map(|(k, _)| k.clone()).collect(); let overlay = BlockOverlay { hash: record.hash.clone(), journal_index: index, journal_key, - inserted: inserted, + inserted, deleted: record.deleted, }; insert_values(&mut values, record.inserted); @@ -216,7 +222,7 @@ impl NonCanonicalOverlay { } } if level.blocks.is_empty() { - break; + break } levels.push_back(level); block += 1; @@ -231,38 +237,55 @@ impl NonCanonicalOverlay { pending_insertions: Default::default(), pinned: Default::default(), pinned_insertions: Default::default(), - values: values, + values, }) } /// Insert a new block into the overlay. If inserted on the second level or lover expects parent to be present in the window. - pub fn insert(&mut self, hash: &BlockHash, number: u64, parent_hash: &BlockHash, changeset: ChangeSet) -> Result, Error> { + pub fn insert( + &mut self, + hash: &BlockHash, + number: u64, + parent_hash: &BlockHash, + changeset: ChangeSet, + ) -> Result, Error> { let mut commit = CommitSet::default(); let front_block_number = self.front_block_number(); if self.levels.is_empty() && self.last_canonicalized.is_none() && number > 0 { // assume that parent was canonicalized let last_canonicalized = (parent_hash.clone(), number - 1); - commit.meta.inserted.push((to_meta_key(LAST_CANONICAL, &()), last_canonicalized.encode())); + commit + .meta + .inserted + .push((to_meta_key(LAST_CANONICAL, &()), last_canonicalized.encode())); self.last_canonicalized = Some(last_canonicalized); } else if self.last_canonicalized.is_some() { - if number < front_block_number || number >= front_block_number + self.levels.len() as u64 + 1 { + if number < front_block_number || + number >= front_block_number + self.levels.len() as u64 + 1 + { trace!(target: "state-db", "Failed to insert block {}, current is {} .. {})", number, front_block_number, front_block_number + self.levels.len() as u64, ); - return Err(Error::InvalidBlockNumber); + return Err(Error::InvalidBlockNumber) } // check for valid parent if inserting on second level or higher if number == front_block_number { - if !self.last_canonicalized.as_ref().map_or(false, |&(ref h, n)| h == parent_hash && n == number - 1) { - return Err(Error::InvalidParent); + if !self + .last_canonicalized + .as_ref() + .map_or(false, |&(ref h, n)| h == parent_hash && n == number - 1) + { + return Err(Error::InvalidParent) } } else if !self.parents.contains_key(&parent_hash) { - return Err(Error::InvalidParent); + return Err(Error::InvalidParent) } } - let level = if self.levels.is_empty() || number == front_block_number + self.levels.len() as u64 { + let level = if self.levels.is_empty() || + number == front_block_number + self.levels.len() as u64 + { self.levels.push_back(OverlayLevel::new()); self.levels.back_mut().expect("can't be empty after insertion; qed") } else { @@ -271,7 +294,7 @@ impl NonCanonicalOverlay { }; if level.blocks.len() >= MAX_BLOCKS_PER_LEVEL as usize { - return Err(Error::TooManySiblingBlocks); + return Err(Error::TooManySiblingBlocks) } let index = level.available_index(); @@ -282,7 +305,7 @@ impl NonCanonicalOverlay { hash: hash.clone(), journal_index: index, journal_key: journal_key.clone(), - inserted: inserted, + inserted, deleted: changeset.deleted.clone(), }; level.push(overlay); @@ -305,15 +328,24 @@ impl NonCanonicalOverlay { level_index: usize, discarded_journals: &mut Vec>, discarded_blocks: &mut Vec, - hash: &BlockHash + hash: &BlockHash, ) { if let Some(level) = self.levels.get(level_index) { level.blocks.iter().for_each(|overlay| { - let parent = self.parents.get(&overlay.hash).expect("there is a parent entry for each entry in levels; qed").clone(); + let parent = self + .parents + .get(&overlay.hash) + .expect("there is a parent entry for each entry in levels; qed") + .clone(); if parent == *hash { discarded_journals.push(overlay.journal_key.clone()); discarded_blocks.push(overlay.hash.clone()); - self.discard_journals(level_index + 1, discarded_journals, discarded_blocks, &overlay.hash); + self.discard_journals( + level_index + 1, + discarded_journals, + discarded_blocks, + &overlay.hash, + ); } }); } @@ -326,7 +358,8 @@ impl NonCanonicalOverlay { pub fn last_canonicalized_block_number(&self) -> Option { match self.last_canonicalized.as_ref().map(|&(_, n)| n) { Some(n) => Some(n + self.pending_canonicalizations.len() as u64), - None if !self.pending_canonicalizations.is_empty() => Some(self.pending_canonicalizations.len() as u64), + None if !self.pending_canonicalizations.is_empty() => + Some(self.pending_canonicalizations.len() as u64), _ => None, } } @@ -351,8 +384,12 @@ impl NonCanonicalOverlay { commit: &mut CommitSet, ) -> Result<(), Error> { trace!(target: "state-db", "Canonicalizing {:?}", hash); - let level = self.levels.get(self.pending_canonicalizations.len()).ok_or_else(|| Error::InvalidBlock)?; - let index = level.blocks + let level = self + .levels + .get(self.pending_canonicalizations.len()) + .ok_or_else(|| Error::InvalidBlock)?; + let index = level + .blocks .iter() .position(|overlay| overlay.hash == *hash) .ok_or_else(|| Error::InvalidBlock)?; @@ -365,7 +402,7 @@ impl NonCanonicalOverlay { self.pending_canonicalizations.len() + 1, &mut discarded_journals, &mut discarded_blocks, - &overlay.hash + &overlay.hash, ); } discarded_journals.push(overlay.journal_key.clone()); @@ -374,13 +411,25 @@ impl NonCanonicalOverlay { // get the one we need to canonicalize let overlay = &level.blocks[index]; - commit.data.inserted.extend(overlay.inserted.iter() - .map(|k| (k.clone(), self.values.get(k).expect("For each key in overlays there's a value in values").1.clone()))); + commit.data.inserted.extend(overlay.inserted.iter().map(|k| { + ( + k.clone(), + self.values + .get(k) + .expect("For each key in overlays there's a value in values") + .1 + .clone(), + ) + })); commit.data.deleted.extend(overlay.deleted.clone()); commit.meta.deleted.append(&mut discarded_journals); - let canonicalized = (hash.clone(), self.front_block_number() + self.pending_canonicalizations.len() as u64); - commit.meta.inserted.push((to_meta_key(LAST_CANONICAL, &()), canonicalized.encode())); + let canonicalized = + (hash.clone(), self.front_block_number() + self.pending_canonicalizations.len() as u64); + commit + .meta + .inserted + .push((to_meta_key(LAST_CANONICAL, &()), canonicalized.encode())); trace!(target: "state-db", "Discarding {} records", commit.meta.deleted.len()); self.pending_canonicalizations.push(hash.clone()); Ok(()) @@ -391,8 +440,10 @@ impl NonCanonicalOverlay { let count = self.pending_canonicalizations.len() as u64; for hash in self.pending_canonicalizations.drain(..) { trace!(target: "state-db", "Post canonicalizing {:?}", hash); - let level = self.levels.pop_front().expect("Hash validity is checked in `canonicalize`"); - let index = level.blocks + let level = + self.levels.pop_front().expect("Hash validity is checked in `canonicalize`"); + let index = level + .blocks .iter() .position(|overlay| overlay.hash == hash) .expect("Hash validity is checked in `canonicalize`"); @@ -415,7 +466,8 @@ impl NonCanonicalOverlay { pinned_children += 1; } if pinned_children != 0 { - self.pinned_insertions.insert(overlay.hash.clone(), (overlay.inserted, pinned_children)); + self.pinned_insertions + .insert(overlay.hash.clone(), (overlay.inserted, pinned_children)); } else { self.parents.remove(&overlay.hash); discard_values(&mut self.values, overlay.inserted); @@ -423,7 +475,10 @@ impl NonCanonicalOverlay { } } if let Some(hash) = last { - let last_canonicalized = (hash, self.last_canonicalized.as_ref().map(|(_, n)| n + count).unwrap_or(count - 1)); + let last_canonicalized = ( + hash, + self.last_canonicalized.as_ref().map(|(_, n)| n + count).unwrap_or(count - 1), + ); self.last_canonicalized = Some(last_canonicalized); } } @@ -435,15 +490,15 @@ impl NonCanonicalOverlay { Q: std::hash::Hash + Eq, { if let Some((_, value)) = self.values.get(&key) { - return Some(value.clone()); + return Some(value.clone()) } None } /// Check if the block is in the canonicalization queue. pub fn have_block(&self, hash: &BlockHash) -> bool { - (self.parents.contains_key(hash) || self.pending_insertions.contains(hash)) - && !self.pending_canonicalizations.contains(hash) + (self.parents.contains_key(hash) || self.pending_insertions.contains(hash)) && + !self.pending_canonicalizations.contains(hash) } /// Revert a single level. Returns commit set that deletes the journal or `None` if not possible. @@ -471,13 +526,13 @@ impl NonCanonicalOverlay { // Check that it does not have any children if (level_index != level_count - 1) && self.parents.values().any(|h| h == hash) { log::debug!(target: "state-db", "Trying to remove block {:?} with children", hash); - return None; + return None } let overlay = level.remove(index); commit.meta.deleted.push(overlay.journal_key); self.parents.remove(&overlay.hash); discard_values(&mut self.values, overlay.inserted); - break; + break } if self.levels.back().map_or(false, |l| l.blocks.is_empty()) { self.levels.pop_back(); @@ -494,9 +549,13 @@ impl NonCanonicalOverlay { for hash in self.pending_insertions.drain(..) { self.parents.remove(&hash); // find a level. When iterating insertions backwards the hash is always last in the level. - let level_index = - self.levels.iter().position(|level| - level.blocks.last().expect("Hash is added in `insert` in reverse order").hash == hash) + let level_index = self + .levels + .iter() + .position(|level| { + level.blocks.last().expect("Hash is added in `insert` in reverse order").hash == + hash + }) .expect("Hash is added in insert"); let overlay_index = self.levels[level_index].blocks.len() - 1; @@ -526,7 +585,7 @@ impl NonCanonicalOverlay { if self.pending_insertions.contains(hash) { // Pinning pending state is not implemented. Pending states // won't be pruned for quite some time anyway, so it's not a big deal. - return; + return } let refs = self.pinned.entry(hash.clone()).or_default(); if *refs == 0 { @@ -576,14 +635,17 @@ impl NonCanonicalOverlay { #[cfg(test)] mod tests { - use std::io; + use super::{to_journal_key, NonCanonicalOverlay}; + use crate::{ + test::{make_changeset, make_db}, + ChangeSet, CommitSet, MetaDb, + }; use sp_core::H256; - use super::{NonCanonicalOverlay, to_journal_key}; - use crate::{ChangeSet, CommitSet, MetaDb}; - use crate::test::{make_db, make_changeset}; + use std::io; fn contains(overlay: &NonCanonicalOverlay, key: u64) -> bool { - overlay.get(&H256::from_low_u64_be(key)) == Some(H256::from_low_u64_be(key).as_bytes().to_vec()) + overlay.get(&H256::from_low_u64_be(key)) == + Some(H256::from_low_u64_be(key).as_bytes().to_vec()) } #[test] @@ -611,7 +673,9 @@ mod tests { let h1 = H256::random(); let h2 = H256::random(); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - overlay.insert::(&h1, 2, &H256::default(), ChangeSet::default()).unwrap(); + overlay + .insert::(&h1, 2, &H256::default(), ChangeSet::default()) + .unwrap(); overlay.insert::(&h2, 1, &h1, ChangeSet::default()).unwrap(); } @@ -622,7 +686,9 @@ mod tests { let h2 = H256::random(); let db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - overlay.insert::(&h1, 1, &H256::default(), ChangeSet::default()).unwrap(); + overlay + .insert::(&h1, 1, &H256::default(), ChangeSet::default()) + .unwrap(); overlay.insert::(&h2, 3, &h1, ChangeSet::default()).unwrap(); } @@ -633,8 +699,12 @@ mod tests { let h1 = H256::random(); let h2 = H256::random(); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - overlay.insert::(&h1, 1, &H256::default(), ChangeSet::default()).unwrap(); - overlay.insert::(&h2, 2, &H256::default(), ChangeSet::default()).unwrap(); + overlay + .insert::(&h1, 1, &H256::default(), ChangeSet::default()) + .unwrap(); + overlay + .insert::(&h2, 2, &H256::default(), ChangeSet::default()) + .unwrap(); } #[test] @@ -644,7 +714,9 @@ mod tests { let h2 = H256::random(); let db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - overlay.insert::(&h1, 1, &H256::default(), ChangeSet::default()).unwrap(); + overlay + .insert::(&h1, 1, &H256::default(), ChangeSet::default()) + .unwrap(); let mut commit = CommitSet::default(); overlay.canonicalize::(&h2, &mut commit).unwrap(); } @@ -655,7 +727,9 @@ mod tests { let mut db = make_db(&[1, 2]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); let changeset = make_changeset(&[3, 4], &[2]); - let insertion = overlay.insert::(&h1, 1, &H256::default(), changeset.clone()).unwrap(); + let insertion = overlay + .insert::(&h1, 1, &H256::default(), changeset.clone()) + .unwrap(); assert_eq!(insertion.data.inserted.len(), 0); assert_eq!(insertion.data.deleted.len(), 0); assert_eq!(insertion.meta.inserted.len(), 2); @@ -677,7 +751,11 @@ mod tests { let h2 = H256::random(); let mut db = make_db(&[1, 2]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - db.commit(&overlay.insert::(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2])).unwrap()); + db.commit( + &overlay + .insert::(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2])) + .unwrap(), + ); db.commit(&overlay.insert::(&h2, 11, &h1, make_changeset(&[5], &[3])).unwrap()); assert_eq!(db.meta.len(), 3); @@ -693,7 +771,11 @@ mod tests { let h2 = H256::random(); let mut db = make_db(&[1, 2]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - db.commit(&overlay.insert::(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2])).unwrap()); + db.commit( + &overlay + .insert::(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2])) + .unwrap(), + ); db.commit(&overlay.insert::(&h2, 11, &h1, make_changeset(&[5], &[3])).unwrap()); let mut commit = CommitSet::default(); overlay.canonicalize::(&h1, &mut commit).unwrap(); @@ -768,7 +850,11 @@ mod tests { let mut db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); let changeset = make_changeset(&[], &[]); - db.commit(&overlay.insert::(&h1, 1, &H256::default(), changeset.clone()).unwrap()); + db.commit( + &overlay + .insert::(&h1, 1, &H256::default(), changeset.clone()) + .unwrap(), + ); db.commit(&overlay.insert::(&h2, 2, &h1, changeset.clone()).unwrap()); overlay.apply_pending(); let mut commit = CommitSet::default(); @@ -1035,14 +1121,18 @@ mod tests { let h21 = H256::random(); let mut db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - db.commit(&overlay.insert::(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap()); + db.commit( + &overlay + .insert::(&root, 10, &H256::default(), make_changeset(&[], &[])) + .unwrap(), + ); db.commit(&overlay.insert::(&h1, 11, &root, make_changeset(&[1], &[])).unwrap()); db.commit(&overlay.insert::(&h2, 11, &root, make_changeset(&[2], &[])).unwrap()); db.commit(&overlay.insert::(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap()); db.commit(&overlay.insert::(&h21, 12, &h2, make_changeset(&[21], &[])).unwrap()); let mut commit = CommitSet::default(); overlay.canonicalize::(&root, &mut commit).unwrap(); - overlay.canonicalize::(&h2, &mut commit).unwrap(); // h11 should stay in the DB + overlay.canonicalize::(&h2, &mut commit).unwrap(); // h11 should stay in the DB db.commit(&commit); overlay.apply_pending(); assert_eq!(overlay.levels.len(), 1); @@ -1056,7 +1146,7 @@ mod tests { assert!(contains(&overlay, 21)); let mut commit = CommitSet::default(); - overlay.canonicalize::(&h21, &mut commit).unwrap(); // h11 should stay in the DB + overlay.canonicalize::(&h21, &mut commit).unwrap(); // h11 should stay in the DB db.commit(&commit); overlay.apply_pending(); assert!(!contains(&overlay, 21)); @@ -1073,19 +1163,23 @@ mod tests { let h21 = H256::random(); let mut db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - db.commit(&overlay.insert::(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap()); + db.commit( + &overlay + .insert::(&root, 10, &H256::default(), make_changeset(&[], &[])) + .unwrap(), + ); db.commit(&overlay.insert::(&h1, 11, &root, make_changeset(&[1], &[])).unwrap()); db.commit(&overlay.insert::(&h2, 11, &root, make_changeset(&[2], &[])).unwrap()); db.commit(&overlay.insert::(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap()); db.commit(&overlay.insert::(&h21, 12, &h2, make_changeset(&[21], &[])).unwrap()); let mut commit = CommitSet::default(); overlay.canonicalize::(&root, &mut commit).unwrap(); - overlay.canonicalize::(&h2, &mut commit).unwrap(); // h11 should stay in the DB + overlay.canonicalize::(&h2, &mut commit).unwrap(); // h11 should stay in the DB db.commit(&commit); overlay.apply_pending(); // add another block at top level. It should reuse journal index 0 of previously discarded block - let h22 = H256::random(); + let h22 = H256::random(); db.commit(&overlay.insert::(&h22, 12, &h2, make_changeset(&[22], &[])).unwrap()); assert_eq!(overlay.levels[0].blocks[0].journal_index, 1); assert_eq!(overlay.levels[0].blocks[1].journal_index, 0); @@ -1106,7 +1200,11 @@ mod tests { let h21 = H256::random(); let mut db = make_db(&[]); let mut overlay = NonCanonicalOverlay::::new(&db).unwrap(); - db.commit(&overlay.insert::(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap()); + db.commit( + &overlay + .insert::(&root, 10, &H256::default(), make_changeset(&[], &[])) + .unwrap(), + ); db.commit(&overlay.insert::(&h1, 11, &root, make_changeset(&[1], &[])).unwrap()); db.commit(&overlay.insert::(&h2, 11, &root, make_changeset(&[2], &[])).unwrap()); db.commit(&overlay.insert::(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap()); diff --git a/client/state-db/src/pruning.rs b/client/state-db/src/pruning.rs index 0c682d8954b13..bb0f7f7961446 100644 --- a/client/state-db/src/pruning.rs +++ b/client/state-db/src/pruning.rs @@ -24,10 +24,10 @@ //! the death list. //! The changes are journaled in the DB. -use std::collections::{HashMap, HashSet, VecDeque}; -use codec::{Encode, Decode}; -use crate::{CommitSet, Error, MetaDb, to_meta_key, Hash}; +use crate::{to_meta_key, CommitSet, Error, Hash, MetaDb}; +use codec::{Decode, Encode}; use log::{trace, warn}; +use std::collections::{HashMap, HashSet, VecDeque}; const LAST_PRUNED: &[u8] = b"last_pruned"; const PRUNING_JOURNAL: &[u8] = b"pruning_journal"; @@ -72,9 +72,11 @@ fn to_journal_key(block: u64) -> Vec { } impl RefWindow { - pub fn new(db: &D, count_insertions: bool) -> Result, Error> { - let last_pruned = db.get_meta(&to_meta_key(LAST_PRUNED, &())) - .map_err(|e| Error::Db(e))?; + pub fn new( + db: &D, + count_insertions: bool, + ) -> Result, Error> { + let last_pruned = db.get_meta(&to_meta_key(LAST_PRUNED, &())).map_err(|e| Error::Db(e))?; let pending_number: u64 = match last_pruned { Some(buffer) => u64::decode(&mut buffer.as_slice())? + 1, None => 0, @@ -83,7 +85,7 @@ impl RefWindow { let mut pruning = RefWindow { death_rows: Default::default(), death_index: Default::default(), - pending_number: pending_number, + pending_number, pending_canonicalizations: 0, pending_prunings: 0, count_insertions, @@ -94,9 +96,15 @@ impl RefWindow { let journal_key = to_journal_key(block); match db.get_meta(&journal_key).map_err(|e| Error::Db(e))? { Some(record) => { - let record: JournalRecord = Decode::decode(&mut record.as_slice())?; + let record: JournalRecord = + Decode::decode(&mut record.as_slice())?; trace!(target: "state-db", "Pruning journal entry {} ({} inserted, {} deleted)", block, record.inserted.len(), record.deleted.len()); - pruning.import(&record.hash, journal_key, record.inserted.into_iter(), record.deleted); + pruning.import( + &record.hash, + journal_key, + record.inserted.into_iter(), + record.deleted, + ); }, None => break, } @@ -105,7 +113,13 @@ impl RefWindow { Ok(pruning) } - fn import>(&mut self, hash: &BlockHash, journal_key: Vec, inserted: I, deleted: Vec) { + fn import>( + &mut self, + hash: &BlockHash, + journal_key: Vec, + inserted: I, + deleted: Vec, + ) { if self.count_insertions { // remove all re-inserted keys from death rows for k in inserted { @@ -120,13 +134,11 @@ impl RefWindow { self.death_index.insert(k.clone(), imported_block); } } - self.death_rows.push_back( - DeathRow { - hash: hash.clone(), - deleted: deleted.into_iter().collect(), - journal_key: journal_key, - } - ); + self.death_rows.push_back(DeathRow { + hash: hash.clone(), + deleted: deleted.into_iter().collect(), + journal_key, + }); } pub fn window_size(&self) -> u64 { @@ -172,23 +184,27 @@ impl RefWindow { Default::default() }; let deleted = ::std::mem::take(&mut commit.data.deleted); - let journal_record = JournalRecord { - hash: hash.clone(), - inserted, - deleted, - }; + let journal_record = JournalRecord { hash: hash.clone(), inserted, deleted }; let block = self.pending_number + self.death_rows.len() as u64; let journal_key = to_journal_key(block); commit.meta.inserted.push((journal_key.clone(), journal_record.encode())); - self.import(&journal_record.hash, journal_key, journal_record.inserted.into_iter(), journal_record.deleted); + self.import( + &journal_record.hash, + journal_key, + journal_record.inserted.into_iter(), + journal_record.deleted, + ); self.pending_canonicalizations += 1; } /// Apply all pending changes pub fn apply_pending(&mut self) { self.pending_canonicalizations = 0; - for _ in 0 .. self.pending_prunings { - let pruned = self.death_rows.pop_front().expect("pending_prunings is always < death_rows.len()"); + for _ in 0..self.pending_prunings { + let pruned = self + .death_rows + .pop_front() + .expect("pending_prunings is always < death_rows.len()"); trace!(target: "state-db", "Applying pruning {:?} ({} deleted)", pruned.hash, pruned.deleted.len()); if self.count_insertions { for k in pruned.deleted.iter() { @@ -219,9 +235,11 @@ impl RefWindow { #[cfg(test)] mod tests { use super::RefWindow; + use crate::{ + test::{make_commit, make_db, TestDb}, + CommitSet, + }; use sp_core::H256; - use crate::CommitSet; - use crate::test::{make_db, make_commit, TestDb}; fn check_journal(pruning: &RefWindow, db: &TestDb) { let restored: RefWindow = RefWindow::new(db, pruning.count_insertions).unwrap(); @@ -419,5 +437,4 @@ mod tests { assert!(db.data_eq(&make_db(&[1, 3]))); assert!(pruning.death_index.is_empty()); } - } diff --git a/client/state-db/src/test.rs b/client/state-db/src/test.rs index e1bb6d01c37e4..ad5ce8e874cc7 100644 --- a/client/state-db/src/test.rs +++ b/client/state-db/src/test.rs @@ -18,9 +18,9 @@ //! Test utils -use std::collections::HashMap; +use crate::{ChangeSet, CommitSet, DBValue, MetaDb, NodeDb}; use sp_core::H256; -use crate::{DBValue, ChangeSet, CommitSet, MetaDb, NodeDb}; +use std::collections::HashMap; #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct TestDb { @@ -67,30 +67,22 @@ pub fn make_changeset(inserted: &[u64], deleted: &[u64]) -> ChangeSet { ChangeSet { inserted: inserted .iter() - .map(|v| { - (H256::from_low_u64_be(*v), H256::from_low_u64_be(*v).as_bytes().to_vec()) - }) + .map(|v| (H256::from_low_u64_be(*v), H256::from_low_u64_be(*v).as_bytes().to_vec())) .collect(), deleted: deleted.iter().map(|v| H256::from_low_u64_be(*v)).collect(), } } pub fn make_commit(inserted: &[u64], deleted: &[u64]) -> CommitSet { - CommitSet { - data: make_changeset(inserted, deleted), - meta: ChangeSet::default(), - } + CommitSet { data: make_changeset(inserted, deleted), meta: ChangeSet::default() } } pub fn make_db(inserted: &[u64]) -> TestDb { TestDb { data: inserted .iter() - .map(|v| { - (H256::from_low_u64_be(*v), H256::from_low_u64_be(*v).as_bytes().to_vec()) - }) + .map(|v| (H256::from_low_u64_be(*v), H256::from_low_u64_be(*v).as_bytes().to_vec())) .collect(), meta: Default::default(), } } - diff --git a/client/sync-state-rpc/src/lib.rs b/client/sync-state-rpc/src/lib.rs index 4cb4955995540..e786a10cd4406 100644 --- a/client/sync-state-rpc/src/lib.rs +++ b/client/sync-state-rpc/src/lib.rs @@ -21,16 +21,19 @@ #![deny(unused_crate_dependencies)] -use sp_runtime::traits::{Block as BlockT, NumberFor}; use sp_blockchain::HeaderBackend; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, NumberFor}, +}; use std::sync::Arc; -use sp_runtime::generic::BlockId; use jsonrpc_derive::rpc; type SharedAuthoritySet = sc_finality_grandpa::SharedAuthoritySet<::Hash, NumberFor>; -type SharedEpochChanges = sc_consensus_epochs::SharedEpochChanges; +type SharedEpochChanges = + sc_consensus_epochs::SharedEpochChanges; #[derive(Debug, thiserror::Error)] #[allow(missing_docs)] @@ -51,11 +54,7 @@ impl From> for jsonrpc_core::Error { Error::JsonRpc(s) => s, _ => error.to_string(), }; - jsonrpc_core::Error { - message, - code: jsonrpc_core::ErrorCode::ServerError(1), - data: None, - } + jsonrpc_core::Error { message, code: jsonrpc_core::ErrorCode::ServerError(1), data: None } } } @@ -64,8 +63,7 @@ impl From> for jsonrpc_core::Error { pub trait SyncStateRpcApi { /// Returns the json-serialized chainspec running the node, with a sync state. #[rpc(name = "sync_state_genSyncSpec", returns = "jsonrpc_core::Value")] - fn system_gen_sync_spec(&self, raw: bool) - -> jsonrpc_core::Result; + fn system_gen_sync_spec(&self, raw: bool) -> jsonrpc_core::Result; } /// The handler for sync state RPC calls. @@ -78,9 +76,9 @@ pub struct SyncStateRpcHandler { } impl SyncStateRpcHandler - where - TBl: BlockT, - TCl: HeaderBackend + sc_client_api::AuxStore + 'static, +where + TBl: BlockT, + TCl: HeaderBackend + sc_client_api::AuxStore + 'static, { /// Create a new handler. pub fn new( @@ -90,21 +88,19 @@ impl SyncStateRpcHandler shared_epoch_changes: SharedEpochChanges, deny_unsafe: sc_rpc_api::DenyUnsafe, ) -> Self { - Self { - chain_spec, client, shared_authority_set, shared_epoch_changes, deny_unsafe, - } + Self { chain_spec, client, shared_authority_set, shared_epoch_changes, deny_unsafe } } fn build_sync_state(&self) -> Result, Error> { let finalized_hash = self.client.info().finalized_hash; - let finalized_header = self.client.header(BlockId::Hash(finalized_hash))? + let finalized_header = self + .client + .header(BlockId::Hash(finalized_hash))? .ok_or_else(|| sp_blockchain::Error::MissingHeader(finalized_hash.to_string()))?; - let finalized_block_weight = sc_consensus_babe::aux_schema::load_block_weight( - &*self.client, - finalized_hash, - )? - .ok_or_else(|| Error::LoadingBlockWeightFailed(finalized_hash))?; + let finalized_block_weight = + sc_consensus_babe::aux_schema::load_block_weight(&*self.client, finalized_hash)? + .ok_or_else(|| Error::LoadingBlockWeightFailed(finalized_hash))?; Ok(sc_chain_spec::LightSyncState { finalized_block_header: finalized_header, @@ -116,26 +112,23 @@ impl SyncStateRpcHandler } impl SyncStateRpcApi for SyncStateRpcHandler - where - TBl: BlockT, - TCl: HeaderBackend + sc_client_api::AuxStore + 'static, +where + TBl: BlockT, + TCl: HeaderBackend + sc_client_api::AuxStore + 'static, { - fn system_gen_sync_spec(&self, raw: bool) - -> jsonrpc_core::Result - { + fn system_gen_sync_spec(&self, raw: bool) -> jsonrpc_core::Result { if let Err(err) = self.deny_unsafe.check_if_safe() { - return Err(err.into()); + return Err(err.into()) } let mut chain_spec = self.chain_spec.cloned_box(); - let sync_state = self.build_sync_state() - .map_err(map_error::>)?; + let sync_state = self.build_sync_state().map_err(map_error::>)?; chain_spec.set_light_sync_state(sync_state.to_serializable()); - let string = chain_spec.as_json(raw).map_err(map_error::)?; + let string = chain_spec.as_json(raw).map_err(map_error::)?; - serde_json::from_str(&string).map_err(|err| map_error::(err)) + serde_json::from_str(&string).map_err(|err| map_error::(err)) } } diff --git a/client/telemetry/src/endpoints.rs b/client/telemetry/src/endpoints.rs index fe4fa23974a64..62e6180311980 100644 --- a/client/telemetry/src/endpoints.rs +++ b/client/telemetry/src/endpoints.rs @@ -25,8 +25,7 @@ use serde::{Deserialize, Deserializer, Serialize}; /// The URL string can be either a URL or a multiaddress. #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash)] pub struct TelemetryEndpoints( - #[serde(deserialize_with = "url_or_multiaddr_deser")] - pub(crate) Vec<(Multiaddr, u8)>, + #[serde(deserialize_with = "url_or_multiaddr_deser")] pub(crate) Vec<(Multiaddr, u8)>, ); /// Custom deserializer for TelemetryEndpoints, used to convert urls or multiaddr to multiaddr. @@ -36,21 +35,15 @@ where { Vec::<(String, u8)>::deserialize(deserializer)? .iter() - .map(|e| { - url_to_multiaddr(&e.0) - .map_err(serde::de::Error::custom) - .map(|m| (m, e.1)) - }) + .map(|e| url_to_multiaddr(&e.0).map_err(serde::de::Error::custom).map(|m| (m, e.1))) .collect() } impl TelemetryEndpoints { /// Create a `TelemetryEndpoints` based on a list of `(String, u8)`. pub fn new(endpoints: Vec<(String, u8)>) -> Result { - let endpoints: Result, libp2p::multiaddr::Error> = endpoints - .iter() - .map(|e| Ok((url_to_multiaddr(&e.0)?, e.1))) - .collect(); + let endpoints: Result, libp2p::multiaddr::Error> = + endpoints.iter().map(|e| Ok((url_to_multiaddr(&e.0)?, e.1))).collect(); endpoints.map(Self) } } @@ -72,7 +65,7 @@ fn url_to_multiaddr(url: &str) -> Result { // If not, try the `ws://path/url` format. if let Ok(ma) = libp2p::multiaddr::from_url(url) { - return Ok(ma); + return Ok(ma) } // If we have no clue about the format of that string, assume that we were expecting a @@ -82,8 +75,7 @@ fn url_to_multiaddr(url: &str) -> Result { #[cfg(test)] mod tests { - use super::url_to_multiaddr; - use super::TelemetryEndpoints; + use super::{url_to_multiaddr, TelemetryEndpoints}; use libp2p::Multiaddr; #[test] @@ -96,10 +88,7 @@ mod tests { TelemetryEndpoints::new(endp.clone()).expect("Telemetry endpoint should be valid"); let mut res: Vec<(Multiaddr, u8)> = vec![]; for (a, b) in endp.iter() { - res.push(( - url_to_multiaddr(a).expect("provided url should be valid"), - *b, - )) + res.push((url_to_multiaddr(a).expect("provided url should be valid"), *b)) } assert_eq!(telem.0, res); } diff --git a/client/telemetry/src/lib.rs b/client/telemetry/src/lib.rs index 842d89d7edf07..5bd839e074952 100644 --- a/client/telemetry/src/lib.rs +++ b/client/telemetry/src/lib.rs @@ -41,8 +41,10 @@ use libp2p::Multiaddr; use log::{error, warn}; use parking_lot::Mutex; use serde::Serialize; -use std::collections::HashMap; -use std::sync::{atomic, Arc}; +use std::{ + collections::HashMap, + sync::{atomic, Arc}, +}; pub use libp2p::wasm_ext::ExtTransport; pub use log; @@ -191,11 +193,7 @@ impl TelemetryWorker { let input = input.expect("the stream is never closed; qed"); match input { - Register::Telemetry { - id, - endpoints, - connection_message, - } => { + Register::Telemetry { id, endpoints, connection_message } => { let endpoints = endpoints.0; let connection_message = match serde_json::to_value(&connection_message) { @@ -205,10 +203,10 @@ impl TelemetryWorker { obj.insert("id".to_string(), id.into()); obj.insert("payload".to_string(), value.into()); Some(obj) - } + }, Ok(_) => { unreachable!("ConnectionMessage always serialize to an object; qed") - } + }, Err(err) => { log::error!( target: "telemetry", @@ -216,7 +214,7 @@ impl TelemetryWorker { err, ); None - } + }, }; for (addr, verbosity) in endpoints { @@ -225,10 +223,7 @@ impl TelemetryWorker { "Initializing telemetry for: {:?}", addr, ); - node_map - .entry(id.clone()) - .or_default() - .push((verbosity, addr.clone())); + node_map.entry(id.clone()).or_default().push((verbosity, addr.clone())); let node = node_pool.entry(addr.clone()).or_insert_with(|| { Node::new(transport.clone(), addr.clone(), Vec::new(), Vec::new()) @@ -238,32 +233,27 @@ impl TelemetryWorker { pending_connection_notifications.retain(|(addr_b, connection_message)| { if *addr_b == addr { - node.telemetry_connection_notifier - .push(connection_message.clone()); + node.telemetry_connection_notifier.push(connection_message.clone()); false } else { true } }); } - } - Register::Notifier { - addresses, - connection_notifier, - } => { + }, + Register::Notifier { addresses, connection_notifier } => { for addr in addresses { // If the Node has been initialized, we directly push the connection_notifier. // Otherwise we push it to a queue that will be consumed when the connection // initializes, thus ensuring that the connection notifier will be sent to the // Node when it becomes available. if let Some(node) = node_pool.get_mut(&addr) { - node.telemetry_connection_notifier - .push(connection_notifier.clone()); + node.telemetry_connection_notifier.push(connection_notifier.clone()); } else { pending_connection_notifications.push((addr, connection_notifier.clone())); } } - } + }, } } @@ -297,12 +287,12 @@ impl TelemetryWorker { message, )), ); - return; + return }; for (node_max_verbosity, addr) in nodes { if verbosity > *node_max_verbosity { - continue; + continue } if let Some(node) = node_pool.get_mut(&addr) { @@ -376,11 +366,7 @@ impl Telemetry { let endpoints = self.endpoints.take().ok_or_else(|| Error::TelemetryAlreadyInitialized)?; self.register_sender - .unbounded_send(Register::Telemetry { - id: self.id, - endpoints, - connection_message, - }) + .unbounded_send(Register::Telemetry { id: self.id, endpoints, connection_message }) .map_err(|_| Error::TelemetryWorkerDropped) } @@ -407,12 +393,8 @@ pub struct TelemetryHandle { impl TelemetryHandle { /// Send telemetry messages. pub fn send_telemetry(&self, verbosity: VerbosityLevel, payload: TelemetryPayload) { - match self - .message_sender - .lock() - .try_send((self.id, verbosity, payload)) - { - Ok(()) => {} + match self.message_sender.lock().try_send((self.id, verbosity, payload)) { + Ok(()) => {}, Err(err) if err.is_full() => log::trace!( target: "telemetry", "Telemetry channel full.", @@ -461,15 +443,8 @@ impl TelemetryConnectionNotifier { #[derive(Debug)] enum Register { - Telemetry { - id: Id, - endpoints: TelemetryEndpoints, - connection_message: ConnectionMessage, - }, - Notifier { - addresses: Vec, - connection_notifier: ConnectionNotifierSender, - }, + Telemetry { id: Id, endpoints: TelemetryEndpoints, connection_message: ConnectionMessage }, + Notifier { addresses: Vec, connection_notifier: ConnectionNotifierSender }, } /// Report a telemetry. diff --git a/client/telemetry/src/node.rs b/client/telemetry/src/node.rs index 9ac7ada4e5d66..9e5738cb84773 100644 --- a/client/telemetry/src/node.rs +++ b/client/telemetry/src/node.rs @@ -17,12 +17,15 @@ // along with this program. If not, see . use crate::TelemetryPayload; -use futures::channel::mpsc; -use futures::prelude::*; -use libp2p::core::transport::Transport; -use libp2p::Multiaddr; +use futures::{channel::mpsc, prelude::*}; +use libp2p::{core::transport::Transport, Multiaddr}; use rand::Rng as _; -use std::{fmt, mem, pin::Pin, task::Context, task::Poll, time::Duration}; +use std::{ + fmt, mem, + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; use wasm_timer::Delay; pub(crate) type ConnectionNotifierSender = mpsc::Sender<()>; @@ -122,7 +125,7 @@ where ) -> Poll> { while let Some(item) = conn.buf.pop() { if let Err(e) = conn.sink.start_send_unpin(item) { - return Poll::Ready(Err(e)); + return Poll::Ready(Err(e)) } futures::ready!(conn.sink.poll_ready_unpin(cx))?; } @@ -152,25 +155,25 @@ where Poll::Ready(Err(err)) => { log::warn!(target: "telemetry", "⚠️ Disconnected from {}: {:?}", self.addr, err); socket = NodeSocket::wait_reconnect(); - } + }, Poll::Ready(Ok(())) => { self.socket = NodeSocket::Connected(conn); - return Poll::Ready(Ok(())); - } + return Poll::Ready(Ok(())) + }, Poll::Pending => { self.socket = NodeSocket::Connected(conn); - return Poll::Pending; - } + return Poll::Pending + }, } - } + }, Poll::Ready(Err(err)) => { log::warn!(target: "telemetry", "⚠️ Disconnected from {}: {:?}", self.addr, err); socket = NodeSocket::wait_reconnect(); - } + }, Poll::Pending => { self.socket = NodeSocket::Connected(conn); - return Poll::Pending; - } + return Poll::Pending + }, }, NodeSocket::Dialing(mut s) => match Future::poll(Pin::new(&mut s), cx) { Poll::Ready(Ok(sink)) => { @@ -201,39 +204,39 @@ where err, ); None - } + }, }) .collect(); socket = NodeSocket::Connected(NodeSocketConnected { sink, buf }); - } + }, Poll::Pending => break NodeSocket::Dialing(s), Poll::Ready(Err(err)) => { log::warn!(target: "telemetry", "❌ Error while dialing {}: {:?}", self.addr, err); socket = NodeSocket::wait_reconnect(); - } + }, }, NodeSocket::ReconnectNow => match self.transport.clone().dial(self.addr.clone()) { Ok(d) => { log::trace!(target: "telemetry", "Re-dialing {}", self.addr); socket = NodeSocket::Dialing(d); - } + }, Err(err) => { log::warn!(target: "telemetry", "❌ Error while re-dialing {}: {:?}", self.addr, err); socket = NodeSocket::wait_reconnect(); - } + }, }, NodeSocket::WaitingReconnect(mut s) => { if let Poll::Ready(_) = Future::poll(Pin::new(&mut s), cx) { socket = NodeSocket::ReconnectNow; } else { - break NodeSocket::WaitingReconnect(s); + break NodeSocket::WaitingReconnect(s) } - } + }, NodeSocket::Poisoned => { log::error!(target: "telemetry", "‼️ Poisoned connection with {}", self.addr); - break NodeSocket::Poisoned; - } + break NodeSocket::Poisoned + }, } }; @@ -250,7 +253,7 @@ where Ok(data) => { log::trace!(target: "telemetry", "Sending {} bytes", data.len()); let _ = conn.sink.start_send_unpin(data); - } + }, Err(err) => log::debug!( target: "telemetry", "Could not serialize payload: {}", @@ -262,7 +265,7 @@ where // A new connection should be started as soon as possible. NodeSocket::ReconnectNow => log::trace!(target: "telemetry", "Reconnecting"), // Waiting before attempting to dial again. - NodeSocket::WaitingReconnect(_) => {} + NodeSocket::WaitingReconnect(_) => {}, // Temporary transition state. NodeSocket::Poisoned => log::trace!(target: "telemetry", "Poisoned"), } @@ -280,7 +283,7 @@ where log::trace!(target: "telemetry", "[poll_flush] Error: {:?}", e); self.socket = NodeSocket::wait_reconnect(); Poll::Ready(Ok(())) - } + }, Poll::Ready(Ok(())) => Poll::Ready(Ok(())), Poll::Pending => Poll::Pending, }, diff --git a/client/telemetry/src/transport.rs b/client/telemetry/src/transport.rs index 0aed263a7275d..2c309be0ffb68 100644 --- a/client/telemetry/src/transport.rs +++ b/client/telemetry/src/transport.rs @@ -26,9 +26,7 @@ use libp2p::{ core::transport::{timeout::TransportTimeout, OptionalTransport}, wasm_ext, Transport, }; -use std::io; -use std::pin::Pin; -use std::time::Duration; +use std::{io, pin::Pin, time::Duration}; /// Timeout after which a connection attempt is considered failed. Includes the WebSocket HTTP /// upgrading. @@ -111,7 +109,7 @@ impl Stream for StreamSink { Ok(n) => { buf.truncate(n); Poll::Ready(Some(Ok(buf))) - } + }, Err(err) => Poll::Ready(Some(Err(err))), } } @@ -126,7 +124,7 @@ impl StreamSink { log::error!(target: "telemetry", "Detected some internal buffering happening in the telemetry"); let err = io::Error::new(io::ErrorKind::Other, "Internal buffering detected"); - return Poll::Ready(Err(err)); + return Poll::Ready(Err(err)) } } diff --git a/client/tracing/proc-macro/src/lib.rs b/client/tracing/proc-macro/src/lib.rs index 7022d394ed954..e9a4f58705b41 100644 --- a/client/tracing/proc-macro/src/lib.rs +++ b/client/tracing/proc-macro/src/lib.rs @@ -113,7 +113,7 @@ pub fn prefix_logs_with(arg: TokenStream, item: TokenStream) -> TokenStream { "missing argument: name of the node. Example: sc_cli::prefix_logs_with()", ) .to_compile_error() - .into(); + .into() } let name = syn::parse_macro_input!(arg as Expr); @@ -124,12 +124,7 @@ pub fn prefix_logs_with(arg: TokenStream, item: TokenStream) -> TokenStream { Err(e) => return Error::new(Span::call_site(), e).to_compile_error().into(), }; - let ItemFn { - attrs, - vis, - sig, - block, - } = item_fn; + let ItemFn { attrs, vis, sig, block } = item_fn; (quote! { #(#attrs)* diff --git a/client/tracing/src/block/mod.rs b/client/tracing/src/block/mod.rs index cd5cf1052004b..a077f302f9abf 100644 --- a/client/tracing/src/block/mod.rs +++ b/client/tracing/src/block/mod.rs @@ -16,23 +16,34 @@ //! Utilities for tracing block execution -use std::{collections::HashMap, sync::{Arc, atomic::{AtomicU64, Ordering}}, time::Instant}; +use std::{ + collections::HashMap, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, + time::Instant, +}; use parking_lot::Mutex; -use tracing::{Dispatch, dispatcher, Subscriber, Level, span::{Attributes, Record, Id}}; +use tracing::{ + dispatcher, + span::{Attributes, Id, Record}, + Dispatch, Level, Subscriber, +}; +use crate::{SpanDatum, TraceEvent, Values}; use sc_client_api::BlockBackend; use sc_rpc_server::RPC_MAX_PAYLOAD_DEFAULT; -use sp_api::{Core, Metadata, ProvideRuntimeApi, Encode}; +use sp_api::{Core, Encode, Metadata, ProvideRuntimeApi}; use sp_blockchain::HeaderBackend; +use sp_core::hexdisplay::HexDisplay; +use sp_rpc::tracing::{BlockTrace, Span, TraceBlockResponse, TraceError}; use sp_runtime::{ generic::BlockId, traits::{Block as BlockT, Header}, }; -use sp_rpc::tracing::{BlockTrace, Span, TraceError, TraceBlockResponse}; use sp_tracing::{WASM_NAME_KEY, WASM_TARGET_KEY, WASM_TRACE_IDENTIFIER}; -use sp_core::hexdisplay::HexDisplay; -use crate::{SpanDatum, TraceEvent, Values}; // Heuristic for average event size in bytes. const AVG_EVENT: usize = 600 * 8; @@ -53,7 +64,7 @@ const BASE_PAYLOAD: usize = 100; const DEFAULT_TARGETS: &str = "pallet,frame,state"; const TRACE_TARGET: &str = "block_trace"; // The name of a field required for all events. -const REQUIRED_EVENT_FIELD: &str = "method"; +const REQUIRED_EVENT_FIELD: &str = "method"; const MEGABYTE: usize = 1024 * 1024; /// Tracing Block Result type alias @@ -69,7 +80,7 @@ pub enum Error { #[error("Missing block component: {0}")] MissingBlockComponent(String), #[error("Dispatch error: {0}")] - Dispatch(String) + Dispatch(String), } struct BlockSubscriber { @@ -82,10 +93,7 @@ struct BlockSubscriber { impl BlockSubscriber { fn new(targets: &str) -> Self { let next_id = AtomicU64::new(1); - let mut targets: Vec<_> = targets - .split(',') - .map(crate::parse_target) - .collect(); + let mut targets: Vec<_> = targets.split(',').map(crate::parse_target).collect(); // Ensure that WASM traces are always enabled // Filtering happens when decoding the actual target / level targets.push((WASM_TRACE_IDENTIFIER.to_owned(), Level::TRACE)); @@ -101,11 +109,11 @@ impl BlockSubscriber { impl Subscriber for BlockSubscriber { fn enabled(&self, metadata: &tracing::Metadata<'_>) -> bool { if !metadata.is_span() && !metadata.fields().field(REQUIRED_EVENT_FIELD).is_some() { - return false; + return false } for (target, level) in &self.targets { if metadata.level() <= level && metadata.target().starts_with(target) { - return true; + return true } } false @@ -125,7 +133,7 @@ impl Subscriber for BlockSubscriber { line: attrs.metadata().line().unwrap_or(0), start_time: Instant::now(), values, - overall_time: Default::default() + overall_time: Default::default(), }; self.spans.lock().insert(id.clone(), span); @@ -158,11 +166,9 @@ impl Subscriber for BlockSubscriber { self.events.lock().push(trace_event); } - fn enter(&self, _id: &Id) { - } + fn enter(&self, _id: &Id) {} - fn exit(&self, _span: &Id) { - } + fn exit(&self, _span: &Id) {} } /// Holds a reference to the client in order to execute the given block. @@ -179,11 +185,15 @@ pub struct BlockExecutor { } impl BlockExecutor - where - Block: BlockT + 'static, - Client: HeaderBackend + BlockBackend + ProvideRuntimeApi - + Send + Sync + 'static, - Client::Api: Metadata, +where + Block: BlockT + 'static, + Client: HeaderBackend + + BlockBackend + + ProvideRuntimeApi + + Send + + Sync + + 'static, + Client::Api: Metadata, { /// Create a new `BlockExecutor` pub fn new( @@ -193,7 +203,8 @@ impl BlockExecutor storage_keys: Option, rpc_max_payload: Option, ) -> Self { - let rpc_max_payload = rpc_max_payload.map(|mb| mb.saturating_mul(MEGABYTE)) + let rpc_max_payload = rpc_max_payload + .map(|mb| mb.saturating_mul(MEGABYTE)) .unwrap_or(RPC_MAX_PAYLOAD_DEFAULT); Self { client, block, targets, storage_keys, rpc_max_payload } } @@ -205,10 +216,14 @@ impl BlockExecutor tracing::debug!(target: "state_tracing", "Tracing block: {}", self.block); // Prepare the block let id = BlockId::Hash(self.block); - let mut header = self.client.header(id) + let mut header = self + .client + .header(id) .map_err(|e| Error::InvalidBlockId(e))? .ok_or_else(|| Error::MissingBlockComponent("Header not found".to_string()))?; - let extrinsics = self.client.block_body(&id) + let extrinsics = self + .client + .block_body(&id) .map_err(|e| Error::InvalidBlockId(e))? .ok_or_else(|| Error::MissingBlockComponent("Extrinsics not found".to_string()))?; tracing::debug!(target: "state_tracing", "Found {} extrinsics", extrinsics.len()); @@ -231,45 +246,46 @@ impl BlockExecutor ); let _guard = dispatcher_span.enter(); if let Err(e) = dispatcher::with_default(&dispatch, || { - let span = tracing::info_span!( - target: TRACE_TARGET, - "trace_block", - ); + let span = tracing::info_span!(target: TRACE_TARGET, "trace_block",); let _enter = span.enter(); self.client.runtime_api().execute_block(&parent_id, block) }) { - return Err(Error::Dispatch(format!("Failed to collect traces and execute block: {:?}", e).to_string())); + return Err(Error::Dispatch( + format!("Failed to collect traces and execute block: {:?}", e).to_string(), + )) } } - let block_subscriber = dispatch.downcast_ref::() - .ok_or(Error::Dispatch( - "Cannot downcast Dispatch to BlockSubscriber after tracing block".to_string() + let block_subscriber = + dispatch.downcast_ref::().ok_or(Error::Dispatch( + "Cannot downcast Dispatch to BlockSubscriber after tracing block".to_string(), ))?; - let spans: Vec<_> = block_subscriber.spans + let spans: Vec<_> = block_subscriber + .spans .lock() .drain() // Patch wasm identifiers .filter_map(|(_, s)| patch_and_filter(SpanDatum::from(s), targets)) .collect(); - let events: Vec<_> = block_subscriber.events + let events: Vec<_> = block_subscriber + .events .lock() .drain(..) - .filter(|e| self.storage_keys - .as_ref() - .map(|keys| event_key_filter(e, keys)) - .unwrap_or(false) - ) + .filter(|e| { + self.storage_keys + .as_ref() + .map(|keys| event_key_filter(e, keys)) + .unwrap_or(false) + }) .map(|s| s.into()) .collect(); tracing::debug!(target: "state_tracing", "Captured {} spans and {} events", spans.len(), events.len()); let approx_payload_size = BASE_PAYLOAD + events.len() * AVG_EVENT + spans.len() * AVG_SPAN; let response = if approx_payload_size > self.rpc_max_payload { - TraceBlockResponse::TraceError(TraceError { - error: - "Payload likely exceeds max payload size of RPC server.".to_string() - }) + TraceBlockResponse::TraceError(TraceError { + error: "Payload likely exceeds max payload size of RPC server.".to_string(), + }) } else { TraceBlockResponse::BlockTrace(BlockTrace { block_hash: block_id_as_string(id), @@ -286,7 +302,10 @@ impl BlockExecutor } fn event_key_filter(event: &TraceEvent, storage_keys: &str) -> bool { - event.values.string_values.get("key") + event + .values + .string_values + .get("key") .and_then(|key| Some(check_target(storage_keys, key, &event.level))) .unwrap_or(false) } @@ -310,7 +329,7 @@ fn patch_and_filter(mut span: SpanDatum, targets: &str) -> Option { span.target = t; } if !check_target(targets, &span.target, &span.level) { - return None; + return None } } Some(span.into()) @@ -320,15 +339,15 @@ fn patch_and_filter(mut span: SpanDatum, targets: &str) -> Option { fn check_target(targets: &str, target: &str, level: &Level) -> bool { for (t, l) in targets.split(',').map(crate::parse_target) { if target.starts_with(t.as_str()) && level <= &l { - return true; + return true } } false } fn block_id_as_string(block_id: BlockId) -> String { - match block_id { + match block_id { BlockId::Hash(h) => HexDisplay::from(&h.encode()).to_string(), - BlockId::Number(n) => HexDisplay::from(&n.encode()).to_string() + BlockId::Number(n) => HexDisplay::from(&n.encode()).to_string(), } } diff --git a/client/tracing/src/lib.rs b/client/tracing/src/lib.rs index 9f02bb96e4f77..45240c37474bb 100644 --- a/client/tracing/src/lib.rs +++ b/client/tracing/src/lib.rs @@ -34,8 +34,10 @@ pub mod logging; use rustc_hash::FxHashMap; use serde::ser::{Serialize, SerializeMap, Serializer}; use sp_tracing::{WASM_NAME_KEY, WASM_TARGET_KEY, WASM_TRACE_IDENTIFIER}; -use std::fmt; -use std::time::{Duration, Instant}; +use std::{ + fmt, + time::{Duration, Instant}, +}; use tracing::{ event::Event, field::{Field, Visit}, @@ -43,8 +45,10 @@ use tracing::{ subscriber::Subscriber, Level, }; -use tracing_subscriber::layer::{Context, Layer}; -use tracing_subscriber::registry::LookupSpan; +use tracing_subscriber::{ + layer::{Context, Layer}, + registry::LookupSpan, +}; #[doc(hidden)] pub use tracing; @@ -137,10 +141,10 @@ impl Values { /// Checks if all individual collections are empty pub fn is_empty(&self) -> bool { - self.bool_values.is_empty() - && self.i64_values.is_empty() - && self.u64_values.is_empty() - && self.string_values.is_empty() + self.bool_values.is_empty() && + self.i64_values.is_empty() && + self.u64_values.is_empty() && + self.string_values.is_empty() } } @@ -162,15 +166,20 @@ impl Visit for Values { } fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) { - self.string_values.insert(field.name().to_string(), format!("{:?}", value).to_owned()); + self.string_values + .insert(field.name().to_string(), format!("{:?}", value).to_owned()); } } impl Serialize for Values { fn serialize(&self, serializer: S) -> Result - where S: Serializer, + where + S: Serializer, { - let len = self.bool_values.len() + self.i64_values.len() + self.u64_values.len() + self.string_values.len(); + let len = self.bool_values.len() + + self.i64_values.len() + + self.u64_values.len() + + self.string_values.len(); let mut map = serializer.serialize_map(Some(len))?; for (k, v) in &self.bool_values { map.serialize_entry(k, v)?; @@ -194,7 +203,12 @@ impl fmt::Display for Values { let i64_iter = self.i64_values.iter().map(|(k, v)| format!("{}={}", k, v)); let u64_iter = self.u64_values.iter().map(|(k, v)| format!("{}={}", k, v)); let string_iter = self.string_values.iter().map(|(k, v)| format!("{}=\"{}\"", k, v)); - let values = bool_iter.chain(i64_iter).chain(u64_iter).chain(string_iter).collect::>().join(", "); + let values = bool_iter + .chain(i64_iter) + .chain(u64_iter) + .chain(string_iter) + .collect::>() + .join(", "); write!(f, "{}", values) } } @@ -217,16 +231,13 @@ impl ProfilingLayer { /// wasm_tracing indicates whether to enable wasm traces pub fn new_with_handler(trace_handler: Box, targets: &str) -> Self { let targets: Vec<_> = targets.split(',').map(|s| parse_target(s)).collect(); - Self { - targets, - trace_handler, - } + Self { targets, trace_handler } } fn check_target(&self, target: &str, level: &Level) -> bool { for t in &self.targets { if target.starts_with(t.0.as_str()) && level <= &t.1 { - return true; + return true } } false @@ -245,8 +256,8 @@ fn parse_target(s: &str) -> (String, Level) { } else { (target, Level::TRACE) } - } - None => (s.to_string(), Level::TRACE) + }, + None => (s.to_string(), Level::TRACE), } } @@ -329,10 +340,7 @@ where if let Some(mut span_datum) = extensions.remove::() { span_datum.overall_time += end_time - span_datum.start_time; if span_datum.name == WASM_TRACE_IDENTIFIER { - span_datum - .values - .bool_values - .insert("wasm".to_owned(), true); + span_datum.values.bool_values.insert("wasm".to_owned(), true); if let Some(n) = span_datum.values.string_values.remove(WASM_NAME_KEY) { span_datum.name = n; } @@ -404,13 +412,11 @@ impl TraceHandler for LogTraceHandler { impl From for sp_rpc::tracing::Event { fn from(trace_event: TraceEvent) -> Self { - let data = sp_rpc::tracing::Data { - string_values: trace_event.values.string_values - }; + let data = sp_rpc::tracing::Data { string_values: trace_event.values.string_values }; sp_rpc::tracing::Event { target: trace_event.target, data, - parent_id: trace_event.parent_id.map(|id| id.into_u64()) + parent_id: trace_event.parent_id.map(|id| id.into_u64()), } } } @@ -453,18 +459,12 @@ mod tests { fn setup_subscriber() -> ( impl tracing::Subscriber + Send + Sync, Arc>>, - Arc>> + Arc>>, ) { let spans = Arc::new(Mutex::new(Vec::new())); let events = Arc::new(Mutex::new(Vec::new())); - let handler = TestTraceHandler { - spans: spans.clone(), - events: events.clone(), - }; - let layer = ProfilingLayer::new_with_handler( - Box::new(handler), - "test_target", - ); + let handler = TestTraceHandler { spans: spans.clone(), events: events.clone() }; + let layer = ProfilingLayer::new_with_handler(Box::new(handler), "test_target"); let subscriber = tracing_subscriber::fmt().with_writer(std::io::sink).finish().with(layer); (subscriber, spans, events) } @@ -542,7 +542,10 @@ mod tests { let _sub_guard = tracing::subscriber::set_default(sub); tracing::event!(target: "test_target", tracing::Level::INFO, "test_event"); let mut te1 = events.lock().remove(0); - assert_eq!(te1.values.string_values.remove(&"message".to_owned()).unwrap(), "test_event".to_owned()); + assert_eq!( + te1.values.string_values.remove(&"message".to_owned()).unwrap(), + "test_event".to_owned() + ); } #[test] @@ -596,7 +599,7 @@ mod tests { tracing::event!(target: "test_target", tracing::Level::INFO, "test_event1"); for msg in rx.recv() { if msg == false { - break; + break } } // guard2 and span2 dropped / exited diff --git a/client/tracing/src/logging/directives.rs b/client/tracing/src/logging/directives.rs index 0e6d949a41391..5aaeb4d17e7d3 100644 --- a/client/tracing/src/logging/directives.rs +++ b/client/tracing/src/logging/directives.rs @@ -63,12 +63,7 @@ pub fn reload_filter() -> Result<(), String> { let mut env_filter = EnvFilter::default(); if let Some(current_directives) = CURRENT_DIRECTIVES.get() { // Use join and then split in case any directives added together - for directive in current_directives - .lock() - .join(",") - .split(',') - .map(|d| d.parse()) - { + for directive in current_directives.lock().join(",").split(',').map(|d| d.parse()) { match directive { Ok(dir) => env_filter = env_filter.add_directive(dir), Err(invalid_directive) => { @@ -77,7 +72,7 @@ pub fn reload_filter() -> Result<(), String> { "Unable to parse directive while setting log filter: {:?}", invalid_directive, ); - } + }, } } } @@ -99,14 +94,9 @@ pub fn reload_filter() -> Result<(), String> { /// /// Includes substrate defaults and CLI supplied directives. pub fn reset_log_filter() -> Result<(), String> { - let directive = DEFAULT_DIRECTIVES - .get_or_init(|| Mutex::new(Vec::new())) - .lock() - .clone(); + let directive = DEFAULT_DIRECTIVES.get_or_init(|| Mutex::new(Vec::new())).lock().clone(); - *CURRENT_DIRECTIVES - .get_or_init(|| Mutex::new(Vec::new())) - .lock() = directive; + *CURRENT_DIRECTIVES.get_or_init(|| Mutex::new(Vec::new())).lock() = directive; reload_filter() } diff --git a/client/tracing/src/logging/event_format.rs b/client/tracing/src/logging/event_format.rs index 5e7a5246cca00..01847bc2b5cb4 100644 --- a/client/tracing/src/logging/event_format.rs +++ b/client/tracing/src/logging/event_format.rs @@ -79,11 +79,11 @@ where match current_thread.name() { Some(name) => { write!(writer, "{} ", FmtThreadName::new(name))?; - } + }, // fall-back to thread id when name is absent and ids are not enabled None => { write!(writer, "{:0>2?} ", current_thread.id())?; - } + }, } } @@ -98,7 +98,7 @@ where let exts = span.extensions(); if let Some(prefix) = exts.get::() { write!(writer, "{}", prefix.as_str())?; - break; + break } } } @@ -125,11 +125,11 @@ where writer: &mut dyn fmt::Write, event: &Event, ) -> fmt::Result { - if self.dup_to_stdout && ( - event.metadata().level() == &Level::INFO || - event.metadata().level() == &Level::WARN || - event.metadata().level() == &Level::ERROR - ) { + if self.dup_to_stdout && + (event.metadata().level() == &Level::INFO || + event.metadata().level() == &Level::WARN || + event.metadata().level() == &Level::ERROR) + { let mut out = String::new(); self.format_event_custom(CustomFmtContext::FmtContext(ctx), &mut out, event)?; writer.write_str(&out)?; @@ -271,9 +271,8 @@ where ) -> fmt::Result { match self { CustomFmtContext::FmtContext(fmt_ctx) => fmt_ctx.format_fields(writer, fields), - CustomFmtContext::ContextWithFormatFields(_ctx, fmt_fields) => { - fmt_fields.format_fields(writer, fields) - } + CustomFmtContext::ContextWithFormatFields(_ctx, fmt_fields) => + fmt_fields.format_fields(writer, fields), } } } @@ -321,11 +320,7 @@ impl<'a> fmt::Write for MaybeColorWriter<'a> { impl<'a> MaybeColorWriter<'a> { /// Creates a new instance. fn new(enable_color: bool, inner_writer: &'a mut dyn fmt::Write) -> Self { - Self { - enable_color, - inner_writer, - buffer: String::new(), - } + Self { enable_color, inner_writer, buffer: String::new() } } /// Write the buffered content to the `inner_writer`. diff --git a/client/tracing/src/logging/layers/console_log.rs b/client/tracing/src/logging/layers/console_log.rs index be992ae814235..77295110c8962 100644 --- a/client/tracing/src/logging/layers/console_log.rs +++ b/client/tracing/src/logging/layers/console_log.rs @@ -40,11 +40,7 @@ pub struct ConsoleLogLayer ConsoleLogLayer { /// Create a new [`ConsoleLogLayer`] using the `EventFormat` provided in argument. pub fn new(event_format: EventFormat) -> Self { - Self { - event_format, - fmt_fields: Default::default(), - _inner: std::marker::PhantomData, - } + Self { event_format, fmt_fields: Default::default(), _inner: std::marker::PhantomData } } } @@ -90,11 +86,11 @@ where Ok(buf) => { a = buf; &mut *a - } + }, _ => { b = String::new(); &mut b - } + }, }; if self.format_event(&ctx, &mut buf, event).is_ok() { diff --git a/client/tracing/src/logging/layers/prefix_layer.rs b/client/tracing/src/logging/layers/prefix_layer.rs index f35b59e8b9af9..2ad786a092233 100644 --- a/client/tracing/src/logging/layers/prefix_layer.rs +++ b/client/tracing/src/logging/layers/prefix_layer.rs @@ -42,12 +42,12 @@ where "newly created span with ID {:?} did not exist in the registry; this is a bug!", id ); - return; - } + return + }, }; if span.name() != PREFIX_LOG_SPAN { - return; + return } let mut extensions = span.extensions_mut(); diff --git a/client/tracing/src/logging/mod.rs b/client/tracing/src/logging/mod.rs index a3fa3a531b3e4..3d3b40a14d9fa 100644 --- a/client/tracing/src/logging/mod.rs +++ b/client/tracing/src/logging/mod.rs @@ -33,10 +33,9 @@ use std::io; use tracing::Subscriber; use tracing_subscriber::{ filter::LevelFilter, - fmt::time::ChronoLocal, fmt::{ - format, FormatEvent, FormatFields, Formatter, Layer as FmtLayer, MakeWriter, - SubscriberBuilder, + format, time::ChronoLocal, FormatEvent, FormatFields, Formatter, Layer as FmtLayer, + MakeWriter, SubscriberBuilder, }, layer::{self, SubscriberExt}, registry::LookupSpan, @@ -153,9 +152,7 @@ where let max_level_hint = Layer::::max_level_hint(&env_filter); let max_level = to_log_level_filter(max_level_hint); - tracing_log::LogTracer::builder() - .with_max_level(max_level) - .init()?; + tracing_log::LogTracer::builder().with_max_level(max_level).init()?; // If we're only logging `INFO` entries then we'll use a simplified logging format. let simple = match max_level_hint { @@ -276,23 +273,19 @@ impl LoggerBuilder { } } else { if self.log_reloading { - let subscriber = prepare_subscriber( - &self.directives, - None, - self.force_colors, - |builder| enable_log_reloading!(builder), - )?; + let subscriber = + prepare_subscriber(&self.directives, None, self.force_colors, |builder| { + enable_log_reloading!(builder) + })?; tracing::subscriber::set_global_default(subscriber)?; Ok(()) } else { - let subscriber = prepare_subscriber( - &self.directives, - None, - self.force_colors, - |builder| builder, - )?; + let subscriber = + prepare_subscriber(&self.directives, None, self.force_colors, |builder| { + builder + })?; tracing::subscriber::set_global_default(subscriber)?; @@ -410,12 +403,7 @@ mod tests { .unwrap(); let output = String::from_utf8(output.stderr).unwrap(); - assert!( - re.is_match(output.trim()), - "Expected:\n{}\nGot:\n{}", - re, - output, - ); + assert!(re.is_match(output.trim()), "Expected:\n{}\nGot:\n{}", re, output,); } /// This is not an actual test, it is used by the `prefix_in_log_lines` test. @@ -460,12 +448,7 @@ mod tests { .unwrap(); let output = String::from_utf8(output.stderr).unwrap(); - assert!( - re.is_match(output.trim()), - "Expected:\n{}\nGot:\n{}", - re, - output, - ); + assert!(re.is_match(output.trim()), "Expected:\n{}\nGot:\n{}", re, output,); } #[test] @@ -503,18 +486,9 @@ mod tests { eprint!("MAX_LOG_LEVEL={:?}", log::max_level()); } else { assert_eq!("MAX_LOG_LEVEL=Info", run_test(None, None)); - assert_eq!( - "MAX_LOG_LEVEL=Trace", - run_test(Some("test=trace".into()), None) - ); - assert_eq!( - "MAX_LOG_LEVEL=Debug", - run_test(Some("test=debug".into()), None) - ); - assert_eq!( - "MAX_LOG_LEVEL=Trace", - run_test(None, Some("test=info".into())) - ); + assert_eq!("MAX_LOG_LEVEL=Trace", run_test(Some("test=trace".into()), None)); + assert_eq!("MAX_LOG_LEVEL=Debug", run_test(Some("test=debug".into()), None)); + assert_eq!("MAX_LOG_LEVEL=Trace", run_test(None, Some("test=info".into()))); } } } diff --git a/client/transaction-pool/api/src/error.rs b/client/transaction-pool/api/src/error.rs index dd2d6401c1821..365d6a28d6b9b 100644 --- a/client/transaction-pool/api/src/error.rs +++ b/client/transaction-pool/api/src/error.rs @@ -18,7 +18,7 @@ //! Transaction pool errors. use sp_runtime::transaction_validity::{ - TransactionPriority as Priority, InvalidTransaction, UnknownTransaction, + InvalidTransaction, TransactionPriority as Priority, UnknownTransaction, }; /// Transaction pool result. @@ -52,7 +52,7 @@ pub enum Error { /// Transaction already in the pool. old: Priority, /// Transaction entering the pool. - new: Priority + new: Priority, }, #[error("Transaction with cyclic dependency")] CycleDetected, @@ -78,9 +78,13 @@ pub trait IntoPoolError: std::error::Error + Send + Sized { /// This implementation is optional and used only to /// provide more descriptive error messages for end users /// of RPC API. - fn into_pool_error(self) -> std::result::Result { Err(self) } + fn into_pool_error(self) -> std::result::Result { + Err(self) + } } impl IntoPoolError for Error { - fn into_pool_error(self) -> std::result::Result { Ok(self) } + fn into_pool_error(self) -> std::result::Result { + Ok(self) + } } diff --git a/client/transaction-pool/api/src/lib.rs b/client/transaction-pool/api/src/lib.rs index 198d67f71d1b5..8984001e1e54d 100644 --- a/client/transaction-pool/api/src/lib.rs +++ b/client/transaction-pool/api/src/lib.rs @@ -20,21 +20,16 @@ pub mod error; -use std::{ - collections::HashMap, - hash::Hash, - sync::Arc, - pin::Pin, -}; use futures::{Future, Stream}; use serde::{Deserialize, Serialize}; +pub use sp_runtime::transaction_validity::{ + TransactionLongevity, TransactionPriority, TransactionSource, TransactionTag, +}; use sp_runtime::{ generic::BlockId, traits::{Block as BlockT, Member, NumberFor}, }; -pub use sp_runtime::transaction_validity::{ - TransactionLongevity, TransactionPriority, TransactionTag, TransactionSource, -}; +use std::{collections::HashMap, hash::Hash, pin::Pin, sync::Arc}; /// Transaction pool status. #[derive(Debug)] @@ -130,7 +125,8 @@ pub enum TransactionStatus { } /// The stream of transaction events. -pub type TransactionStatusStream = dyn Stream> + Send + Unpin; +pub type TransactionStatusStream = + dyn Stream> + Send + Unpin; /// The import notification event stream. pub type ImportNotificationStream = futures::channel::mpsc::Receiver; @@ -147,7 +143,7 @@ pub type TransactionStatusStreamFor

= TransactionStatusStream, Bloc pub type LocalTransactionFor

= <

::Block as BlockT>::Extrinsic; /// Typical future type used in transaction pool api. -pub type PoolFuture = std::pin::Pin> + Send>>; +pub type PoolFuture = std::pin::Pin> + Send>>; /// In-pool transaction interface. /// @@ -184,7 +180,7 @@ pub trait TransactionPool: Send + Sync { /// In-pool transaction type. type InPoolTransaction: InPoolTransaction< Transaction = TransactionFor, - Hash = TxHash + Hash = TxHash, >; /// Error type. type Error: From + crate::error::IntoPoolError; @@ -220,11 +216,18 @@ pub trait TransactionPool: Send + Sync { /// /// Guarantees to return only when transaction pool got updated at `at` block. /// Guarantees to return immediately when `None` is passed. - fn ready_at(&self, at: NumberFor) - -> Pin> + Send>> + Send>>; + fn ready_at( + &self, + at: NumberFor, + ) -> Pin< + Box< + dyn Future> + Send>> + + Send, + >, + >; /// Get an iterator for ready transactions ordered by priority. - fn ready(&self) -> Box> + Send>; + fn ready(&self) -> Box> + Send>; // *** Block production /// Remove transactions identified by given hashes (and dependent transactions) from the pool. @@ -270,7 +273,7 @@ pub enum ChainEvent { /// Trait for transaction pool maintenance. pub trait MaintainedTransactionPool: TransactionPool { /// Perform maintenance - fn maintain(&self, event: ChainEvent) -> Pin + Send>>; + fn maintain(&self, event: ChainEvent) -> Pin + Send>>; } /// Transaction pool interface for submitting local transactions that exposes a @@ -306,11 +309,7 @@ pub trait OffchainSubmitTransaction: Send + Sync { /// Submit transaction. /// /// The transaction will end up in the pool and be propagated to others. - fn submit_at( - &self, - at: &BlockId, - extrinsic: Block::Extrinsic, - ) -> Result<(), ()>; + fn submit_at(&self, at: &BlockId, extrinsic: Block::Extrinsic) -> Result<(), ()>; } impl OffchainSubmitTransaction for TPool { diff --git a/client/transaction-pool/benches/basics.rs b/client/transaction-pool/benches/basics.rs index 6995491ea22c4..cf30a0200ad76 100644 --- a/client/transaction-pool/benches/basics.rs +++ b/client/transaction-pool/benches/basics.rs @@ -18,18 +18,22 @@ use criterion::{criterion_group, criterion_main, Criterion}; -use futures::{future::{ready, Ready}, executor::block_on}; -use sc_transaction_pool::{*, test_helpers::*}; use codec::Encode; -use substrate_test_runtime::{Block, Extrinsic, Transfer, H256, AccountId}; +use futures::{ + executor::block_on, + future::{ready, Ready}, +}; +use sc_transaction_pool::{test_helpers::*, *}; +use sp_core::blake2_256; use sp_runtime::{ - generic::BlockId, traits::Block as BlockT, + generic::BlockId, + traits::Block as BlockT, transaction_validity::{ - ValidTransaction, InvalidTransaction, TransactionValidity, TransactionTag as Tag, - TransactionSource, + InvalidTransaction, TransactionSource, TransactionTag as Tag, TransactionValidity, + ValidTransaction, }, }; -use sp_core::blake2_256; +use substrate_test_runtime::{AccountId, Block, Extrinsic, Transfer, H256}; #[derive(Clone, Debug, Default)] struct TestApi { @@ -65,25 +69,21 @@ impl ChainApi for TestApi { let from = uxt.transfer().from.clone(); match self.block_id_to_number(at) { - Ok(Some(num)) if num > 5 => { - return ready( - Ok(Err(InvalidTransaction::Stale.into())) - ) - }, + Ok(Some(num)) if num > 5 => return ready(Ok(Err(InvalidTransaction::Stale.into()))), _ => {}, } - ready( - Ok(Ok(ValidTransaction { - priority: 4, - requires: if nonce > 1 && self.nonce_dependant { - vec![to_tag(nonce-1, from.clone())] - } else { vec![] }, - provides: vec![to_tag(nonce, from)], - longevity: 10, - propagate: true, - })) - ) + ready(Ok(Ok(ValidTransaction { + priority: 4, + requires: if nonce > 1 && self.nonce_dependant { + vec![to_tag(nonce - 1, from.clone())] + } else { + vec![] + }, + provides: vec![to_tag(nonce, from)], + longevity: 10, + propagate: true, + }))) } fn block_id_to_number( @@ -156,11 +156,7 @@ fn bench_configured(pool: Pool, number: u64) { // Prune all transactions. let block_num = 6; - block_on(pool.prune_tags( - &BlockId::Number(block_num), - tags, - vec![], - )).expect("Prune failed"); + block_on(pool.prune_tags(&BlockId::Number(block_num), tags, vec![])).expect("Prune failed"); // pool is empty assert_eq!(pool.validated_pool().status().ready, 0); @@ -168,7 +164,6 @@ fn bench_configured(pool: Pool, number: u64) { } fn benchmark_main(c: &mut Criterion) { - c.bench_function("sequential 50 tx", |b| { b.iter(|| { bench_configured( diff --git a/client/transaction-pool/src/api.rs b/client/transaction-pool/src/api.rs index 2eb394f76d554..a7f2b462e946d 100644 --- a/client/transaction-pool/src/api.rs +++ b/client/transaction-pool/src/api.rs @@ -18,26 +18,35 @@ //! Chain api required for the transaction pool. -use std::{marker::PhantomData, pin::Pin, sync::Arc}; use codec::{Decode, Encode}; use futures::{ - channel::{oneshot, mpsc}, future::{Future, FutureExt, ready, Ready}, lock::Mutex, SinkExt, - StreamExt, + channel::{mpsc, oneshot}, + future::{ready, Future, FutureExt, Ready}, + lock::Mutex, + SinkExt, StreamExt, }; +use std::{marker::PhantomData, pin::Pin, sync::Arc}; +use prometheus_endpoint::Registry as PrometheusRegistry; use sc_client_api::{ - blockchain::HeaderBackend, light::{Fetcher, RemoteCallRequest, RemoteBodyRequest}, BlockBackend, + blockchain::HeaderBackend, + light::{Fetcher, RemoteBodyRequest, RemoteCallRequest}, + BlockBackend, }; +use sp_api::{ApiExt, ProvideRuntimeApi}; +use sp_core::traits::SpawnEssentialNamed; use sp_runtime::{ - generic::BlockId, traits::{self, Block as BlockT, BlockIdTo, Header as HeaderT, Hash as HashT}, - transaction_validity::{TransactionValidity, TransactionSource}, + generic::BlockId, + traits::{self, Block as BlockT, BlockIdTo, Hash as HashT, Header as HeaderT}, + transaction_validity::{TransactionSource, TransactionValidity}, }; use sp_transaction_pool::runtime_api::TaggedTransactionQueue; -use sp_api::{ProvideRuntimeApi, ApiExt}; -use prometheus_endpoint::Registry as PrometheusRegistry; -use sp_core::traits::SpawnEssentialNamed; -use crate::{metrics::{ApiMetrics, ApiMetricsExt}, error::{self, Error}, graph}; +use crate::{ + error::{self, Error}, + graph, + metrics::{ApiMetrics, ApiMetricsExt}, +}; /// The transaction pool logic for full client. pub struct FullChainApi { @@ -63,7 +72,8 @@ fn spawn_validation_pool_task( Some(task) => task.await, } } - }.boxed(), + } + .boxed(), ); } @@ -74,18 +84,16 @@ impl FullChainApi { prometheus: Option<&PrometheusRegistry>, spawner: &impl SpawnEssentialNamed, ) -> Self { - let metrics = prometheus.map(ApiMetrics::register).and_then(|r| { - match r { - Err(err) => { - log::warn!( - target: "txpool", - "Failed to register transaction pool api prometheus metrics: {:?}", - err, - ); - None - }, - Ok(api) => Some(Arc::new(api)) - } + let metrics = prometheus.map(ApiMetrics::register).and_then(|r| match r { + Err(err) => { + log::warn!( + target: "txpool", + "Failed to register transaction pool api prometheus metrics: {:?}", + err, + ); + None + }, + Ok(api) => Some(Arc::new(api)), }); let (sender, receiver) = mpsc::channel(0); @@ -106,15 +114,15 @@ impl FullChainApi { impl graph::ChainApi for FullChainApi where Block: BlockT, - Client: ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, + Client: + ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, Client: Send + Sync + 'static, Client::Api: TaggedTransactionQueue, { type Block = Block; type Error = error::Error; - type ValidationFuture = Pin< - Box> + Send> - >; + type ValidationFuture = + Pin> + Send>>; type BodyFuture = Ready::Extrinsic>>>>; fn block_body(&self, id: &BlockId) -> Self::BodyFuture { @@ -136,14 +144,16 @@ where async move { metrics.report(|m| m.validations_scheduled.inc()); - validation_pool.lock() + validation_pool + .lock() .await .send( async move { let res = validate_transaction_blocking(&*client, &at, source, uxt); let _ = tx.send(res); metrics.report(|m| m.validations_finished.inc()); - }.boxed() + } + .boxed(), ) .await .map_err(|e| Error::RuntimeApi(format!("Validation pool down: {:?}", e)))?; @@ -152,30 +162,33 @@ where Ok(r) => r, Err(_) => Err(Error::RuntimeApi("Validation was canceled".into())), } - }.boxed() + } + .boxed() } fn block_id_to_number( &self, at: &BlockId, ) -> error::Result>> { - self.client.to_number(at).map_err(|e| Error::BlockIdConversion(format!("{:?}", e))) + self.client + .to_number(at) + .map_err(|e| Error::BlockIdConversion(format!("{:?}", e))) } fn block_id_to_hash( &self, at: &BlockId, ) -> error::Result>> { - self.client.to_hash(at).map_err(|e| Error::BlockIdConversion(format!("{:?}", e))) + self.client + .to_hash(at) + .map_err(|e| Error::BlockIdConversion(format!("{:?}", e))) } fn hash_and_length( &self, ex: &graph::ExtrinsicFor, ) -> (graph::ExtrinsicHash, usize) { - ex.using_encoded(|x| { - ( as traits::Hash>::hash(x), x.len()) - }) + ex.using_encoded(|x| ( as traits::Hash>::hash(x), x.len())) } fn block_header( @@ -196,7 +209,8 @@ fn validate_transaction_blocking( ) -> error::Result where Block: BlockT, - Client: ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, + Client: + ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, Client: Send + Sync + 'static, Client::Api: TaggedTransactionQueue, { @@ -257,7 +271,8 @@ where impl FullChainApi where Block: BlockT, - Client: ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, + Client: + ProvideRuntimeApi + BlockBackend + BlockIdTo + HeaderBackend, Client: Send + Sync + 'static, Client::Api: TaggedTransactionQueue, { @@ -285,30 +300,25 @@ pub struct LightChainApi { impl LightChainApi { /// Create new transaction pool logic. pub fn new(client: Arc, fetcher: Arc) -> Self { - LightChainApi { - client, - fetcher, - _phantom: Default::default(), - } + LightChainApi { client, fetcher, _phantom: Default::default() } } } -impl graph::ChainApi for - LightChainApi where - Block: BlockT, - Client: HeaderBackend + 'static, - F: Fetcher + 'static, +impl graph::ChainApi for LightChainApi +where + Block: BlockT, + Client: HeaderBackend + 'static, + F: Fetcher + 'static, { type Block = Block; type Error = error::Error; - type ValidationFuture = Box< - dyn Future> + Send + Unpin - >; + type ValidationFuture = + Box> + Send + Unpin>; type BodyFuture = Pin< Box< dyn Future::Extrinsic>>>> - + Send - > + + Send, + >, >; fn validate_transaction( @@ -318,9 +328,11 @@ impl graph::ChainApi for uxt: graph::ExtrinsicFor, ) -> Self::ValidationFuture { let header_hash = self.client.expect_block_hash_from_id(at); - let header_and_hash = header_hash - .and_then(|header_hash| self.client.expect_header(BlockId::Hash(header_hash)) - .map(|header| (header_hash, header))); + let header_and_hash = header_hash.and_then(|header_hash| { + self.client + .expect_header(BlockId::Hash(header_hash)) + .map(|header| (header_hash, header)) + }); let (block, header) = match header_and_hash { Ok((header_hash, header)) => (header_hash, header), Err(err) => return Box::new(ready(Err(err.into()))), @@ -333,13 +345,12 @@ impl graph::ChainApi for retry_count: None, }); let remote_validation_request = remote_validation_request.then(move |result| { - let result: error::Result = result - .map_err(Into::into) - .and_then(|result| Decode::decode(&mut &result[..]) - .map_err(|e| Error::RuntimeApi( - format!("Error decoding tx validation result: {:?}", e) - )) - ); + let result: error::Result = + result.map_err(Into::into).and_then(|result| { + Decode::decode(&mut &result[..]).map_err(|e| { + Error::RuntimeApi(format!("Error decoding tx validation result: {:?}", e)) + }) + }); ready(result) }); @@ -364,30 +375,26 @@ impl graph::ChainApi for &self, ex: &graph::ExtrinsicFor, ) -> (graph::ExtrinsicHash, usize) { - ex.using_encoded(|x| { - (<::Hashing as HashT>::hash(x), x.len()) - }) + ex.using_encoded(|x| (<::Hashing as HashT>::hash(x), x.len())) } fn block_body(&self, id: &BlockId) -> Self::BodyFuture { - let header = self.client.header(*id) + let header = self + .client + .header(*id) .and_then(|h| h.ok_or_else(|| sp_blockchain::Error::UnknownBlock(format!("{}", id)))); let header = match header { Ok(header) => header, Err(err) => { log::warn!(target: "txpool", "Failed to query header: {:?}", err); - return Box::pin(ready(Ok(None))); - } + return Box::pin(ready(Ok(None))) + }, }; let fetcher = self.fetcher.clone(); async move { - let transactions = fetcher.remote_body({ - RemoteBodyRequest { - header, - retry_count: None, - } - }) + let transactions = fetcher + .remote_body({ RemoteBodyRequest { header, retry_count: None } }) .await .unwrap_or_else(|e| { log::warn!(target: "txpool", "Failed to fetch block body: {:?}", e); @@ -395,7 +402,8 @@ impl graph::ChainApi for }); Ok(Some(transactions)) - }.boxed() + } + .boxed() } fn block_header( diff --git a/client/transaction-pool/src/error.rs b/client/transaction-pool/src/error.rs index 23afab0c74a7b..b14e0569f0830 100644 --- a/client/transaction-pool/src/error.rs +++ b/client/transaction-pool/src/error.rs @@ -40,7 +40,6 @@ pub enum Error { RuntimeApi(String), } - impl sc_transaction_pool_api::error::IntoPoolError for Error { fn into_pool_error(self) -> std::result::Result { match self { diff --git a/client/transaction-pool/src/graph/base_pool.rs b/client/transaction-pool/src/graph/base_pool.rs index db5927ea0c998..86433bea49285 100644 --- a/client/transaction-pool/src/graph/base_pool.rs +++ b/client/transaction-pool/src/graph/base_pool.rs @@ -20,24 +20,19 @@ //! //! For a more full-featured pool, have a look at the `pool` module. -use std::{ - collections::HashSet, - fmt, - hash, - sync::Arc, -}; +use std::{collections::HashSet, fmt, hash, sync::Arc}; -use log::{trace, debug, warn}; +use log::{debug, trace, warn}; +use sc_transaction_pool_api::{error, InPoolTransaction, PoolStatus}; use serde::Serialize; use sp_core::hexdisplay::HexDisplay; -use sp_runtime::traits::Member; -use sp_runtime::transaction_validity::{ - TransactionTag as Tag, - TransactionLongevity as Longevity, - TransactionPriority as Priority, - TransactionSource as Source, +use sp_runtime::{ + traits::Member, + transaction_validity::{ + TransactionLongevity as Longevity, TransactionPriority as Priority, + TransactionSource as Source, TransactionTag as Tag, + }, }; -use sc_transaction_pool_api::{error, PoolStatus, InPoolTransaction}; use super::{ future::{FutureTransactions, WaitingTransaction}, @@ -62,7 +57,7 @@ pub enum Imported { Future { /// Hash of transaction that was successfully imported. hash: Hash, - } + }, } impl Imported { @@ -133,7 +128,7 @@ impl InPoolTransaction for Transaction { &self.priority } - fn longevity(&self) ->&Longevity { + fn longevity(&self) -> &Longevity { &self.valid_till } @@ -171,13 +166,17 @@ impl Transaction { } } -impl fmt::Debug for Transaction where +impl fmt::Debug for Transaction +where Hash: fmt::Debug, Extrinsic: fmt::Debug, { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let join_tags = |tags: &[Tag]| { - tags.iter().map(|tag| HexDisplay::from(tag).to_string()).collect::>().join(", ") + tags.iter() + .map(|tag| HexDisplay::from(tag).to_string()) + .collect::>() + .join(", ") }; write!(fmt, "Transaction {{ ")?; @@ -245,7 +244,10 @@ impl BasePool(&mut self, closure: impl FnOnce(&mut Self, bool) -> T) -> T { + pub(crate) fn with_futures_enabled( + &mut self, + closure: impl FnOnce(&mut Self, bool) -> T, + ) -> T { let previous = self.reject_future_transactions; self.reject_future_transactions = false; let return_value = closure(self, previous); @@ -265,19 +267,12 @@ impl BasePool, - ) -> error::Result> { + pub fn import(&mut self, tx: Transaction) -> error::Result> { if self.is_imported(&tx.hash) { return Err(error::Error::AlreadyImported(Box::new(tx.hash))) } - let tx = WaitingTransaction::new( - tx, - self.ready.provided_tags(), - &self.recently_pruned, - ); + let tx = WaitingTransaction::new(tx, self.ready.provided_tags(), &self.recently_pruned); trace!(target: "txpool", "[{:?}] {:?}", tx.transaction.hash, tx); debug!( target: "txpool", @@ -289,12 +284,12 @@ impl BasePool BasePool) -> error::Result> { + fn import_to_ready( + &mut self, + tx: WaitingTransaction, + ) -> error::Result> { let hash = tx.transaction.hash.clone(); let mut promoted = vec![]; let mut failed = vec![]; @@ -328,12 +326,13 @@ impl BasePool if first { - debug!(target: "txpool", "[{:?}] Error importing: {:?}", current_hash, e); - return Err(e) - } else { - failed.push(current_hash); - }, + Err(e) => + if first { + debug!(target: "txpool", "[{:?}] Error importing: {:?}", current_hash, e); + return Err(e) + } else { + failed.push(current_hash); + }, } first = false; } @@ -352,21 +351,16 @@ impl BasePool impl Iterator>> { + pub fn ready(&self) -> impl Iterator>> { self.ready.get() } /// Returns an iterator over future transactions in the pool. - pub fn futures(&self) -> impl Iterator> { + pub fn futures(&self) -> impl Iterator> { self.future.all() } @@ -378,11 +372,7 @@ impl BasePool BasePool Vec>> { + pub fn enforce_limits( + &mut self, + ready: &Limit, + future: &Limit, + ) -> Vec>> { let mut removed = vec![]; while ready.is_exceeded(self.ready.len(), self.ready.bytes()) { // find the worst transaction - let minimal = self.ready - .fold(|minimal, current| { - let transaction = ¤t.transaction; - match minimal { - None => Some(transaction.clone()), - Some(ref tx) if tx.insertion_id > transaction.insertion_id => { - Some(transaction.clone()) - }, - other => other, - } - }); + let minimal = self.ready.fold(|minimal, current| { + let transaction = ¤t.transaction; + match minimal { + None => Some(transaction.clone()), + Some(ref tx) if tx.insertion_id > transaction.insertion_id => + Some(transaction.clone()), + other => other, + } + }); if let Some(minimal) = minimal { removed.append(&mut self.remove_subtree(&[minimal.transaction.hash.clone()])) } else { - break; + break } } while future.is_exceeded(self.future.len(), self.future.bytes()) { // find the worst transaction - let minimal = self.future - .fold(|minimal, current| { - match minimal { - None => Some(current.clone()), - Some(ref tx) if tx.imported_at > current.imported_at => { - Some(current.clone()) - }, - other => other, - } - }); + let minimal = self.future.fold(|minimal, current| match minimal { + None => Some(current.clone()), + Some(ref tx) if tx.imported_at > current.imported_at => Some(current.clone()), + other => other, + }); if let Some(minimal) = minimal { removed.append(&mut self.remove_subtree(&[minimal.transaction.hash.clone()])) } else { - break; + break } } @@ -467,7 +454,7 @@ impl BasePool) -> PruneStatus { + pub fn prune_tags(&mut self, tags: impl IntoIterator) -> PruneStatus { let mut to_import = vec![]; let mut pruned = vec![]; let recently_pruned = &mut self.recently_pruned[self.recently_pruned_index]; @@ -496,11 +483,7 @@ impl BasePool> = Transaction { + const DEFAULT_TX: Transaction> = Transaction { data: vec![], bytes: 1, hash: 1u64, @@ -558,11 +541,8 @@ mod tests { let mut pool = pool(); // when - pool.import(Transaction { - data: vec![1u8], - provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX.clone() }) + .unwrap(); // then assert_eq!(pool.ready().count(), 1); @@ -575,16 +555,10 @@ mod tests { let mut pool = pool(); // when - pool.import(Transaction { - data: vec![1u8], - provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); - pool.import(Transaction { - data: vec![1u8], - provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap_err(); + pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX.clone() }) + .unwrap(); + pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX.clone() }) + .unwrap_err(); // then assert_eq!(pool.ready().count(), 1); @@ -601,16 +575,18 @@ mod tests { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 0); assert_eq!(pool.ready.len(), 0); pool.import(Transaction { data: vec![2u8], hash: 2, provides: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // then assert_eq!(pool.ready().count(), 2); @@ -627,37 +603,43 @@ mod tests { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![2u8], hash: 2, requires: vec![vec![1]], provides: vec![vec![3], vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![4u8], hash: 4, priority: 1_000u64, requires: vec![vec![3], vec![4]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 0); assert_eq!(pool.ready.len(), 0); - let res = pool.import(Transaction { - data: vec![5u8], - hash: 5, - provides: vec![vec![0], vec![4]], - .. DEFAULT_TX.clone() - }).unwrap(); + let res = pool + .import(Transaction { + data: vec![5u8], + hash: 5, + provides: vec![vec![0], vec![4]], + ..DEFAULT_TX.clone() + }) + .unwrap(); // then let mut it = pool.ready().into_iter().map(|tx| tx.data[0]); @@ -668,12 +650,15 @@ mod tests { assert_eq!(it.next(), Some(4)); assert_eq!(it.next(), Some(3)); assert_eq!(it.next(), None); - assert_eq!(res, Imported::Ready { - hash: 5, - promoted: vec![1, 2, 3, 4], - failed: vec![], - removed: vec![], - }); + assert_eq!( + res, + Imported::Ready { + hash: 5, + promoted: vec![1, 2, 3, 4], + failed: vec![], + removed: vec![], + } + ); } #[test] @@ -684,15 +669,17 @@ mod tests { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![1]], provides: vec![vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 0); assert_eq!(pool.ready.len(), 0); @@ -702,8 +689,9 @@ mod tests { hash: 2, requires: vec![vec![2]], provides: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // then { @@ -714,24 +702,24 @@ mod tests { assert_eq!(pool.future.len(), 3); // let's close the cycle with one additional transaction - let res = pool.import(Transaction { - data: vec![4u8], - hash: 4, - priority: 50u64, - provides: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + let res = pool + .import(Transaction { + data: vec![4u8], + hash: 4, + priority: 50u64, + provides: vec![vec![0]], + ..DEFAULT_TX.clone() + }) + .unwrap(); let mut it = pool.ready().into_iter().map(|tx| tx.data[0]); assert_eq!(it.next(), Some(4)); assert_eq!(it.next(), Some(1)); assert_eq!(it.next(), Some(3)); assert_eq!(it.next(), None); - assert_eq!(res, Imported::Ready { - hash: 4, - promoted: vec![1, 3], - failed: vec![2], - removed: vec![], - }); + assert_eq!( + res, + Imported::Ready { hash: 4, promoted: vec![1, 3], failed: vec![2], removed: vec![] } + ); assert_eq!(pool.future.len(), 0); } @@ -743,15 +731,17 @@ mod tests { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![1]], provides: vec![vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 0); assert_eq!(pool.ready.len(), 0); @@ -761,8 +751,9 @@ mod tests { hash: 2, requires: vec![vec![2]], provides: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // then { @@ -773,13 +764,15 @@ mod tests { assert_eq!(pool.future.len(), 3); // let's close the cycle with one additional transaction - let err = pool.import(Transaction { - data: vec![4u8], - hash: 4, - priority: 1u64, // lower priority than Tx(2) - provides: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap_err(); + let err = pool + .import(Transaction { + data: vec![4u8], + hash: 4, + priority: 1u64, // lower priority than Tx(2) + provides: vec![vec![0]], + ..DEFAULT_TX.clone() + }) + .unwrap_err(); let mut it = pool.ready().into_iter().map(|tx| tx.data[0]); assert_eq!(it.next(), None); assert_eq!(pool.ready.len(), 0); @@ -797,14 +790,16 @@ mod tests { data: vec![5u8; 1024], hash: 5, provides: vec![vec![0], vec![4]], - .. DEFAULT_TX.clone() - }).expect("import 1 should be ok"); + ..DEFAULT_TX.clone() + }) + .expect("import 1 should be ok"); pool.import(Transaction { data: vec![3u8; 1024], hash: 7, provides: vec![vec![2], vec![7]], - .. DEFAULT_TX.clone() - }).expect("import 2 should be ok"); + ..DEFAULT_TX.clone() + }) + .expect("import 2 should be ok"); assert!(parity_util_mem::malloc_size(&pool) > 5000); } @@ -817,42 +812,48 @@ mod tests { data: vec![5u8], hash: 5, provides: vec![vec![0], vec![4]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![1u8], requires: vec![vec![0]], provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![2u8], hash: 2, requires: vec![vec![1]], provides: vec![vec![3], vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![4u8], hash: 4, priority: 1_000u64, requires: vec![vec![3], vec![4]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // future pool.import(Transaction { data: vec![6u8], hash: 6, priority: 1_000u64, requires: vec![vec![11]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 5); assert_eq!(pool.future.len(), 1); @@ -874,36 +875,37 @@ mod tests { hash: 5, requires: vec![vec![0]], provides: vec![vec![100]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // ready - pool.import(Transaction { - data: vec![1u8], - provides: vec![vec![1]], - .. DEFAULT_TX.clone() - }).unwrap(); + pool.import(Transaction { data: vec![1u8], provides: vec![vec![1]], ..DEFAULT_TX.clone() }) + .unwrap(); pool.import(Transaction { data: vec![2u8], hash: 2, requires: vec![vec![2]], provides: vec![vec![3]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![3u8], hash: 3, requires: vec![vec![1]], provides: vec![vec![2]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); pool.import(Transaction { data: vec![4u8], hash: 4, priority: 1_000u64, requires: vec![vec![3], vec![2]], provides: vec![vec![4]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); assert_eq!(pool.ready().count(), 4); assert_eq!(pool.future.len(), 1); @@ -914,12 +916,10 @@ mod tests { // then assert_eq!(result.pruned.len(), 2); assert_eq!(result.failed.len(), 0); - assert_eq!(result.promoted[0], Imported::Ready { - hash: 5, - promoted: vec![], - failed: vec![], - removed: vec![], - }); + assert_eq!( + result.promoted[0], + Imported::Ready { hash: 5, promoted: vec![], failed: vec![], removed: vec![] } + ); assert_eq!(result.promoted.len(), 1); assert_eq!(pool.future.len(), 0); assert_eq!(pool.ready.len(), 3); @@ -929,40 +929,52 @@ mod tests { #[test] fn transaction_debug() { assert_eq!( - format!("{:?}", Transaction { - data: vec![4u8], - hash: 4, - priority: 1_000u64, - requires: vec![vec![3], vec![2]], - provides: vec![vec![4]], - .. DEFAULT_TX.clone() - }), + format!( + "{:?}", + Transaction { + data: vec![4u8], + hash: 4, + priority: 1_000u64, + requires: vec![vec![3], vec![2]], + provides: vec![vec![4]], + ..DEFAULT_TX.clone() + } + ), "Transaction { \ hash: 4, priority: 1000, valid_till: 64, bytes: 1, propagate: true, \ -source: TransactionSource::External, requires: [03, 02], provides: [04], data: [4]}".to_owned() +source: TransactionSource::External, requires: [03, 02], provides: [04], data: [4]}" + .to_owned() ); } #[test] fn transaction_propagation() { - assert_eq!(Transaction { + assert_eq!( + Transaction { data: vec![4u8], hash: 4, priority: 1_000u64, requires: vec![vec![3], vec![2]], provides: vec![vec![4]], - .. DEFAULT_TX.clone() - }.is_propagable(), true); + ..DEFAULT_TX.clone() + } + .is_propagable(), + true + ); - assert_eq!(Transaction { + assert_eq!( + Transaction { data: vec![4u8], hash: 4, priority: 1_000u64, requires: vec![vec![3], vec![2]], provides: vec![vec![4]], propagate: false, - .. DEFAULT_TX.clone() - }.is_propagable(), false); + ..DEFAULT_TX.clone() + } + .is_propagable(), + false + ); } #[test] @@ -978,7 +990,7 @@ source: TransactionSource::External, requires: [03, 02], provides: [04], data: [ data: vec![5u8], hash: 5, requires: vec![vec![0]], - .. DEFAULT_TX.clone() + ..DEFAULT_TX.clone() }); if let Err(error::Error::RejectedFutureTransaction) = err { @@ -997,8 +1009,9 @@ source: TransactionSource::External, requires: [03, 02], provides: [04], data: [ data: vec![5u8], hash: 5, requires: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); // then assert_eq!(pool.future.len(), 1); @@ -1022,8 +1035,9 @@ source: TransactionSource::External, requires: [03, 02], provides: [04], data: [ data: vec![5u8], hash: 5, requires: vec![vec![0]], - .. DEFAULT_TX.clone() - }).unwrap(); + ..DEFAULT_TX.clone() + }) + .unwrap(); flag }); diff --git a/client/transaction-pool/src/graph/future.rs b/client/transaction-pool/src/graph/future.rs index 083d3c7ec0613..b0e70698f3832 100644 --- a/client/transaction-pool/src/graph/future.rs +++ b/client/transaction-pool/src/graph/future.rs @@ -18,15 +18,12 @@ use std::{ collections::{HashMap, HashSet}, - fmt, - hash, + fmt, hash, sync::Arc, }; use sp_core::hexdisplay::HexDisplay; -use sp_runtime::transaction_validity::{ - TransactionTag as Tag, -}; +use sp_runtime::transaction_validity::TransactionTag as Tag; use wasm_timer::Instant; use super::base_pool::Transaction; @@ -48,10 +45,13 @@ impl fmt::Debug for WaitingTransaction>().join(", "), + fmt, + "missing_tags: {{{}}}", + self.missing_tags + .iter() + .map(|tag| HexDisplay::from(tag).to_string()) + .collect::>() + .join(", "), )?; write!(fmt, "}}") } @@ -77,22 +77,20 @@ impl WaitingTransaction { provided: &HashMap, recently_pruned: &[HashSet], ) -> Self { - let missing_tags = transaction.requires + let missing_tags = transaction + .requires .iter() .filter(|tag| { // is true if the tag is already satisfied either via transaction in the pool // or one that was recently included. - let is_provided = provided.contains_key(&**tag) || recently_pruned.iter().any(|x| x.contains(&**tag)); + let is_provided = provided.contains_key(&**tag) || + recently_pruned.iter().any(|x| x.contains(&**tag)); !is_provided }) .cloned() .collect(); - Self { - transaction: Arc::new(transaction), - missing_tags, - imported_at: Instant::now(), - } + Self { transaction: Arc::new(transaction), missing_tags, imported_at: Instant::now() } } /// Marks the tag as satisfied. @@ -121,10 +119,7 @@ pub struct FutureTransactions { impl Default for FutureTransactions { fn default() -> Self { - Self { - wanted_tags: Default::default(), - waiting: Default::default(), - } + Self { wanted_tags: Default::default(), waiting: Default::default() } } } @@ -144,7 +139,10 @@ impl FutureTransactions { /// we should remove the transactions from here and move them to the Ready queue. pub fn import(&mut self, tx: WaitingTransaction) { assert!(!tx.is_ready(), "Transaction is ready."); - assert!(!self.waiting.contains_key(&tx.transaction.hash), "Transaction is already imported."); + assert!( + !self.waiting.contains_key(&tx.transaction.hash), + "Transaction is already imported." + ); // Add all tags that are missing for tag in &tx.missing_tags { @@ -163,14 +161,20 @@ impl FutureTransactions { /// Returns a list of known transactions pub fn by_hashes(&self, hashes: &[Hash]) -> Vec>>> { - hashes.iter().map(|h| self.waiting.get(h).map(|x| x.transaction.clone())).collect() + hashes + .iter() + .map(|h| self.waiting.get(h).map(|x| x.transaction.clone())) + .collect() } /// Satisfies provided tags in transactions that are waiting for them. /// /// Returns (and removes) transactions that became ready after their last tag got /// satisfied and now we can remove them from Future and move to Ready queue. - pub fn satisfy_tags>(&mut self, tags: impl IntoIterator) -> Vec> { + pub fn satisfy_tags>( + &mut self, + tags: impl IntoIterator, + ) -> Vec> { let mut became_ready = vec![]; for tag in tags { @@ -205,7 +209,9 @@ impl FutureTransactions { let remove = if let Some(wanted) = self.wanted_tags.get_mut(&tag) { wanted.remove(hash); wanted.is_empty() - } else { false }; + } else { + false + }; if remove { self.wanted_tags.remove(&tag); } @@ -218,14 +224,15 @@ impl FutureTransactions { } /// Fold a list of future transactions to compute a single value. - pub fn fold, &WaitingTransaction) -> Option>(&mut self, f: F) -> Option { - self.waiting - .values() - .fold(None, f) + pub fn fold, &WaitingTransaction) -> Option>( + &mut self, + f: F, + ) -> Option { + self.waiting.values().fold(None, f) } /// Returns iterator over all future transactions - pub fn all(&self) -> impl Iterator> { + pub fn all(&self) -> impl Iterator> { self.waiting.values().map(|waiting| &*waiting.transaction) } @@ -265,7 +272,8 @@ mod tests { provides: vec![vec![3], vec![4]], propagate: true, source: TransactionSource::External, - }.into(), + } + .into(), missing_tags: vec![vec![1u8], vec![2u8]].into_iter().collect(), imported_at: std::time::Instant::now(), }); diff --git a/client/transaction-pool/src/graph/listener.rs b/client/transaction-pool/src/graph/listener.rs index a6987711f1dfb..b8149018f7836 100644 --- a/client/transaction-pool/src/graph/listener.rs +++ b/client/transaction-pool/src/graph/listener.rs @@ -1,4 +1,3 @@ - // This file is part of Substrate. // Copyright (C) 2018-2021 Parity Technologies (UK) Ltd. @@ -17,16 +16,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{ - collections::HashMap, hash, fmt::Debug, -}; +use std::{collections::HashMap, fmt::Debug, hash}; use linked_hash_map::LinkedHashMap; -use serde::Serialize; use log::{debug, trace}; +use serde::Serialize; use sp_runtime::traits; -use super::{watcher, ChainApi, ExtrinsicHash, BlockHash}; +use super::{watcher, BlockHash, ChainApi, ExtrinsicHash}; /// Extrinsic pool default listener. pub struct Listener { @@ -39,15 +36,15 @@ const MAX_FINALITY_WATCHERS: usize = 512; impl Default for Listener { fn default() -> Self { - Self { - watchers: Default::default(), - finality_watchers: Default::default(), - } + Self { watchers: Default::default(), finality_watchers: Default::default() } } } impl Listener { - fn fire(&mut self, hash: &H, fun: F) where F: FnOnce(&mut watcher::Sender>) { + fn fire(&mut self, hash: &H, fun: F) + where + F: FnOnce(&mut watcher::Sender>), + { let clean = if let Some(h) = self.watchers.get_mut(hash) { fun(h); h.is_done() diff --git a/client/transaction-pool/src/graph/mod.rs b/client/transaction-pool/src/graph/mod.rs index 92e76b3ecf90b..3ecfb8fe68c60 100644 --- a/client/transaction-pool/src/graph/mod.rs +++ b/client/transaction-pool/src/graph/mod.rs @@ -31,15 +31,17 @@ mod listener; mod pool; mod ready; mod rotator; -mod validated_pool; mod tracked_map; +mod validated_pool; pub mod base_pool; pub mod watcher; -pub use self::base_pool::Transaction; -pub use validated_pool::{IsValidator, ValidatedTransaction}; -pub use self::pool::{ - BlockHash, ChainApi, EventStream, ExtrinsicFor, ExtrinsicHash, - NumberFor, Options, Pool, TransactionFor, +pub use self::{ + base_pool::Transaction, + pool::{ + BlockHash, ChainApi, EventStream, ExtrinsicFor, ExtrinsicHash, NumberFor, Options, Pool, + TransactionFor, + }, }; +pub use validated_pool::{IsValidator, ValidatedTransaction}; diff --git a/client/transaction-pool/src/graph/pool.rs b/client/transaction-pool/src/graph/pool.rs index 2c24f3779f0e9..c04c167bc750f 100644 --- a/client/transaction-pool/src/graph/pool.rs +++ b/client/transaction-pool/src/graph/pool.rs @@ -16,26 +16,23 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -use std::{ - collections::HashMap, - sync::Arc, -}; +use std::{collections::HashMap, sync::Arc}; -use futures::Future; +use futures::{channel::mpsc::Receiver, Future}; +use sc_transaction_pool_api::error; use sp_runtime::{ generic::BlockId, - traits::{self, SaturatedConversion, Block as BlockT}, + traits::{self, Block as BlockT, SaturatedConversion}, transaction_validity::{ - TransactionValidity, TransactionTag as Tag, TransactionValidityError, TransactionSource, + TransactionSource, TransactionTag as Tag, TransactionValidity, TransactionValidityError, }, }; -use sc_transaction_pool_api::error; use wasm_timer::Instant; -use futures::channel::mpsc::Receiver; use super::{ - base_pool as base, watcher::Watcher, - validated_pool::{IsValidator, ValidatedTransaction, ValidatedPool}, + base_pool as base, + validated_pool::{IsValidator, ValidatedPool, ValidatedTransaction}, + watcher::Watcher, }; /// Modification notification event stream type; @@ -52,11 +49,8 @@ pub type NumberFor = traits::NumberFor<::Block>; /// A type of transaction stored in the pool pub type TransactionFor = Arc, ExtrinsicFor>>; /// A type of validated transaction stored in the pool. -pub type ValidatedTransactionFor = ValidatedTransaction< - ExtrinsicHash, - ExtrinsicFor, - ::Error, ->; +pub type ValidatedTransactionFor = + ValidatedTransaction, ExtrinsicFor, ::Error>; /// Concrete extrinsic validation and query logic. pub trait ChainApi: Send + Sync { @@ -65,11 +59,12 @@ pub trait ChainApi: Send + Sync { /// Error type. type Error: From + error::IntoPoolError; /// Validate transaction future. - type ValidationFuture: Future> + Send + Unpin; + type ValidationFuture: Future> + Send + Unpin; /// Body future (since block body might be remote) - type BodyFuture: Future< - Output = Result::Extrinsic>>, Self::Error> - > + Unpin + Send + 'static; + type BodyFuture: Future::Extrinsic>>, Self::Error>> + + Unpin + + Send + + 'static; /// Verify extrinsic at given block. fn validate_transaction( @@ -118,14 +113,8 @@ pub struct Options { impl Default for Options { fn default() -> Self { Self { - ready: base::Limit { - count: 8192, - total_bytes: 20 * 1024 * 1024, - }, - future: base::Limit { - count: 512, - total_bytes: 1 * 1024 * 1024, - }, + ready: base::Limit { count: 8192, total_bytes: 20 * 1024 * 1024 }, + future: base::Limit { count: 512, total_bytes: 1 * 1024 * 1024 }, reject_future_transactions: false, } } @@ -157,9 +146,7 @@ where impl Pool { /// Create a new transaction pool. pub fn new(options: Options, is_validator: IsValidator, api: Arc) -> Self { - Self { - validated_pool: Arc::new(ValidatedPool::new(options, is_validator, api)), - } + Self { validated_pool: Arc::new(ValidatedPool::new(options, is_validator, api)) } } /// Imports a bunch of unverified extrinsics to the pool @@ -167,7 +154,7 @@ impl Pool { &self, at: &BlockId, source: TransactionSource, - xts: impl IntoIterator>, + xts: impl IntoIterator>, ) -> Result, B::Error>>, B::Error> { let xts = xts.into_iter().map(|xt| (source, xt)); let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::Yes).await?; @@ -181,7 +168,7 @@ impl Pool { &self, at: &BlockId, source: TransactionSource, - xts: impl IntoIterator>, + xts: impl IntoIterator>, ) -> Result, B::Error>>, B::Error> { let xts = xts.into_iter().map(|xt| (source, xt)); let validated_transactions = self.verify(at, xts, CheckBannedBeforeVerify::No).await?; @@ -207,13 +194,9 @@ impl Pool { xt: ExtrinsicFor, ) -> Result, ExtrinsicHash>, B::Error> { let block_number = self.resolve_block_number(at)?; - let (_, tx) = self.verify_one( - at, - block_number, - source, - xt, - CheckBannedBeforeVerify::Yes, - ).await; + let (_, tx) = self + .verify_one(at, block_number, source, xt, CheckBannedBeforeVerify::Yes) + .await; self.validated_pool.submit_and_watch(tx) } @@ -222,7 +205,6 @@ impl Pool { &self, revalidated_transactions: HashMap, ValidatedTransactionFor>, ) { - let now = Instant::now(); self.validated_pool.resubmit(revalidated_transactions); log::debug!(target: "txpool", @@ -243,13 +225,17 @@ impl Pool { hashes: &[ExtrinsicHash], ) -> Result<(), B::Error> { // Get details of all extrinsics that are already in the pool - let in_pool_tags = self.validated_pool.extrinsics_tags(hashes) - .into_iter().filter_map(|x| x).flatten(); + let in_pool_tags = self + .validated_pool + .extrinsics_tags(hashes) + .into_iter() + .filter_map(|x| x) + .flatten(); // Prune all transactions that provide given tags let prune_status = self.validated_pool.prune_tags(in_pool_tags)?; - let pruned_transactions = hashes.iter().cloned() - .chain(prune_status.pruned.iter().map(|tx| tx.hash)); + let pruned_transactions = + hashes.iter().cloned().chain(prune_status.pruned.iter().map(|tx| tx.hash)); self.validated_pool.fire_pruned(at, pruned_transactions) } @@ -272,7 +258,8 @@ impl Pool { extrinsics.len() ); // Get details of all extrinsics that are already in the pool - let in_pool_hashes = extrinsics.iter().map(|extrinsic| self.hash_of(extrinsic)).collect::>(); + let in_pool_hashes = + extrinsics.iter().map(|extrinsic| self.hash_of(extrinsic)).collect::>(); let in_pool_tags = self.validated_pool.extrinsics_tags(&in_pool_hashes); // Zip the ones from the pool with the full list (we get pairs `(Extrinsic, Option>)`) @@ -286,7 +273,9 @@ impl Pool { // if it's not found in the pool query the runtime at parent block // to get validity info and tags that the extrinsic provides. None => { - let validity = self.validated_pool.api() + let validity = self + .validated_pool + .api() .validate_transaction(parent, TransactionSource::InBlock, extrinsic.clone()) .await; @@ -324,8 +313,8 @@ impl Pool { pub async fn prune_tags( &self, at: &BlockId, - tags: impl IntoIterator, - known_imported_hashes: impl IntoIterator> + Clone, + tags: impl IntoIterator, + known_imported_hashes: impl IntoIterator> + Clone, ) -> Result<(), B::Error> { log::debug!(target: "txpool", "Pruning at {:?}", at); // Prune all transactions that provide given tags @@ -334,22 +323,17 @@ impl Pool { // Make sure that we don't revalidate extrinsics that were part of the recently // imported block. This is especially important for UTXO-like chains cause the // inputs are pruned so such transaction would go to future again. - self.validated_pool.ban(&Instant::now(), known_imported_hashes.clone().into_iter()); + self.validated_pool + .ban(&Instant::now(), known_imported_hashes.clone().into_iter()); // Try to re-validate pruned transactions since some of them might be still valid. // note that `known_imported_hashes` will be rejected here due to temporary ban. - let pruned_hashes = prune_status.pruned - .iter() - .map(|tx| tx.hash).collect::>(); - let pruned_transactions = prune_status.pruned - .into_iter() - .map(|tx| (tx.source, tx.data.clone())); + let pruned_hashes = prune_status.pruned.iter().map(|tx| tx.hash).collect::>(); + let pruned_transactions = + prune_status.pruned.into_iter().map(|tx| (tx.source, tx.data.clone())); - let reverified_transactions = self.verify( - at, - pruned_transactions, - CheckBannedBeforeVerify::Yes, - ).await?; + let reverified_transactions = + self.verify(at, pruned_transactions, CheckBannedBeforeVerify::Yes).await?; log::trace!(target: "txpool", "Pruning at {:?}. Resubmitting transactions.", at); // And finally - submit reverified transactions back to the pool @@ -369,16 +353,16 @@ impl Pool { /// Resolves block number by id. fn resolve_block_number(&self, at: &BlockId) -> Result, B::Error> { - self.validated_pool.api().block_id_to_number(at) - .and_then(|number| number.ok_or_else(|| - error::Error::InvalidBlockId(format!("{:?}", at)).into())) + self.validated_pool.api().block_id_to_number(at).and_then(|number| { + number.ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)).into()) + }) } /// Returns future that validates a bunch of transactions at given block. async fn verify( &self, at: &BlockId, - xts: impl IntoIterator)>, + xts: impl IntoIterator)>, check: CheckBannedBeforeVerify, ) -> Result, ValidatedTransactionFor>, B::Error> { // we need a block number to compute tx validity @@ -386,8 +370,11 @@ impl Pool { let res = futures::future::join_all( xts.into_iter() - .map(|(source, xt)| self.verify_one(at, block_number, source, xt, check)) - ).await.into_iter().collect::>(); + .map(|(source, xt)| self.verify_one(at, block_number, source, xt, check)), + ) + .await + .into_iter() + .collect::>(); Ok(res) } @@ -408,11 +395,11 @@ impl Pool { return (hash, ValidatedTransaction::Invalid(hash, err)) } - let validation_result = self.validated_pool.api().validate_transaction( - block_id, - source, - xt.clone(), - ).await; + let validation_result = self + .validated_pool + .api() + .validate_transaction(block_id, source, xt.clone()) + .await; let status = match validation_result { Ok(status) => status, @@ -420,7 +407,7 @@ impl Pool { }; let validity = match status { - Ok(validity) => { + Ok(validity) => if validity.provides.is_empty() { ValidatedTransaction::Invalid(hash, error::Error::NoTagsProvided.into()) } else { @@ -432,8 +419,7 @@ impl Pool { bytes, validity, ) - } - }, + }, Err(TransactionValidityError::Invalid(e)) => ValidatedTransaction::Invalid(hash, error::Error::InvalidTransaction(e).into()), Err(TransactionValidityError::Unknown(e)) => @@ -444,35 +430,32 @@ impl Pool { } /// get a reference to the underlying validated pool. - pub fn validated_pool(&self) -> &ValidatedPool { + pub fn validated_pool(&self) -> &ValidatedPool { &self.validated_pool } } impl Clone for Pool { fn clone(&self) -> Self { - Self { - validated_pool: self.validated_pool.clone(), - } + Self { validated_pool: self.validated_pool.clone() } } } #[cfg(test)] mod tests { - use std::collections::{HashMap, HashSet}; - use parking_lot::Mutex; + use super::{super::base_pool::Limit, *}; + use assert_matches::assert_matches; + use codec::Encode; use futures::executor::block_on; - use super::*; + use parking_lot::Mutex; use sc_transaction_pool_api::TransactionStatus; use sp_runtime::{ traits::Hash, - transaction_validity::{ValidTransaction, InvalidTransaction, TransactionSource}, + transaction_validity::{InvalidTransaction, TransactionSource, ValidTransaction}, }; - use codec::Encode; - use substrate_test_runtime::{Block, Extrinsic, Transfer, H256, AccountId, Hashing}; - use assert_matches::assert_matches; + use std::collections::{HashMap, HashSet}; + use substrate_test_runtime::{AccountId, Block, Extrinsic, Hashing, Transfer, H256}; use wasm_timer::Instant; - use super::super::base_pool::Limit; const INVALID_NONCE: u64 = 254; const SOURCE: TransactionSource = TransactionSource::External; @@ -522,8 +505,16 @@ mod tests { } else { let mut transaction = ValidTransaction { priority: 4, - requires: if nonce > block_number { vec![vec![nonce as u8 - 1]] } else { vec![] }, - provides: if nonce == INVALID_NONCE { vec![] } else { vec![vec![nonce as u8]] }, + requires: if nonce > block_number { + vec![vec![nonce as u8 - 1]] + } else { + vec![] + }, + provides: if nonce == INVALID_NONCE { + vec![] + } else { + vec![vec![nonce as u8]] + }, longevity: 3, propagate: true, }; @@ -539,15 +530,13 @@ mod tests { Ok(transaction) } }, - Extrinsic::IncludeData(_) => { - Ok(ValidTransaction { - priority: 9001, - requires: vec![], - provides: vec![vec![42]], - longevity: 9001, - propagate: false, - }) - }, + Extrinsic::IncludeData(_) => Ok(ValidTransaction { + priority: 9001, + requires: vec![], + provides: vec![vec![42]], + longevity: 9001, + propagate: false, + }), _ => unimplemented!(), }; @@ -613,12 +602,17 @@ mod tests { let pool = pool(); // when - let hash = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); + let hash = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); // then assert_eq!(pool.validated_pool().ready().map(|v| v.hash).collect::>(), vec![hash]); @@ -673,25 +667,40 @@ mod tests { let stream = pool.validated_pool().import_notification_stream(); // when - let hash0 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); - let hash1 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 1, - }))).unwrap(); + let hash0 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); + let hash1 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 1, + }), + )) + .unwrap(); // future doesn't count - let _hash = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 3, - }))).unwrap(); + let _hash = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 3, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 2); assert_eq!(pool.validated_pool().status().future, 1); @@ -710,24 +719,39 @@ mod tests { fn should_clear_stale_transactions() { // given let pool = pool(); - let hash1 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); - let hash2 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 1, - }))).unwrap(); - let hash3 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 3, - }))).unwrap(); + let hash1 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); + let hash2 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 1, + }), + )) + .unwrap(); + let hash3 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 3, + }), + )) + .unwrap(); // when pool.validated_pool.clear_stale(&BlockId::Number(5)).unwrap(); @@ -746,12 +770,17 @@ mod tests { fn should_ban_mined_transactions() { // given let pool = pool(); - let hash1 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); + let hash1 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); // when block_on(pool.prune_tags(&BlockId::Number(1), vec![vec![0]], vec![hash1.clone()])).unwrap(); @@ -763,34 +792,37 @@ mod tests { #[test] fn should_limit_futures() { // given - let limit = Limit { - count: 100, - total_bytes: 200, - }; + let limit = Limit { count: 100, total_bytes: 200 }; - let options = Options { - ready: limit.clone(), - future: limit.clone(), - ..Default::default() - }; + let options = Options { ready: limit.clone(), future: limit.clone(), ..Default::default() }; let pool = Pool::new(options, true.into(), TestApi::default().into()); - let hash1 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 1, - }))).unwrap(); + let hash1 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 1, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().future, 1); // when - let hash2 = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(2)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 10, - }))).unwrap(); + let hash2 = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(2)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 10, + }), + )) + .unwrap(); // then assert_eq!(pool.validated_pool().status().future, 1); @@ -801,26 +833,24 @@ mod tests { #[test] fn should_error_if_reject_immediately() { // given - let limit = Limit { - count: 100, - total_bytes: 10, - }; + let limit = Limit { count: 100, total_bytes: 10 }; - let options = Options { - ready: limit.clone(), - future: limit.clone(), - ..Default::default() - }; + let options = Options { ready: limit.clone(), future: limit.clone(), ..Default::default() }; let pool = Pool::new(options, true.into(), TestApi::default().into()); // when - block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 1, - }))).unwrap_err(); + block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 1, + }), + )) + .unwrap_err(); // then assert_eq!(pool.validated_pool().status().ready, 0); @@ -833,12 +863,17 @@ mod tests { let pool = pool(); // when - let err = block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: INVALID_NONCE, - }))).unwrap_err(); + let err = block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: INVALID_NONCE, + }), + )) + .unwrap_err(); // then assert_eq!(pool.validated_pool().status().ready, 0); @@ -853,12 +888,17 @@ mod tests { fn should_trigger_ready_and_finalized() { // given let pool = pool(); - let watcher = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); + let watcher = block_on(pool.submit_and_watch( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 1); assert_eq!(pool.validated_pool().status().future, 0); @@ -880,19 +920,27 @@ mod tests { fn should_trigger_ready_and_finalized_when_pruning_via_hash() { // given let pool = pool(); - let watcher = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); + let watcher = block_on(pool.submit_and_watch( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 1); assert_eq!(pool.validated_pool().status().future, 0); // when - block_on( - pool.prune_tags(&BlockId::Number(2), vec![vec![0u8]], vec![watcher.hash().clone()]), - ).unwrap(); + block_on(pool.prune_tags( + &BlockId::Number(2), + vec![vec![0u8]], + vec![watcher.hash().clone()], + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 0); assert_eq!(pool.validated_pool().status().future, 0); @@ -909,22 +957,32 @@ mod tests { fn should_trigger_future_and_ready_after_promoted() { // given let pool = pool(); - let watcher = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 1, - }))).unwrap(); + let watcher = block_on(pool.submit_and_watch( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 1, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 0); assert_eq!(pool.validated_pool().status().future, 1); // when - block_on(pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Transfer { - from: AccountId::from_h256(H256::from_low_u64_be(1)), - to: AccountId::from_h256(H256::from_low_u64_be(2)), - amount: 5, - nonce: 0, - }))).unwrap(); + block_on(pool.submit_one( + &BlockId::Number(0), + SOURCE, + uxt(Transfer { + from: AccountId::from_h256(H256::from_low_u64_be(1)), + to: AccountId::from_h256(H256::from_low_u64_be(2)), + amount: 5, + nonce: 0, + }), + )) + .unwrap(); assert_eq!(pool.validated_pool().status().ready, 2); // then @@ -943,13 +1001,13 @@ mod tests { amount: 5, nonce: 0, }); - let watcher = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt)).unwrap(); + let watcher = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt)).unwrap(); assert_eq!(pool.validated_pool().status().ready, 1); // when pool.validated_pool.remove_invalid(&[*watcher.hash()]); - // then let mut stream = futures::executor::block_on_stream(watcher.into_stream()); assert_eq!(stream.next(), Some(TransactionStatus::Ready)); @@ -967,7 +1025,8 @@ mod tests { amount: 5, nonce: 0, }); - let watcher = block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt)).unwrap(); + let watcher = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, uxt)).unwrap(); assert_eq!(pool.validated_pool().status().ready, 1); // when @@ -976,7 +1035,6 @@ mod tests { map.insert(*watcher.hash(), peers.clone()); pool.validated_pool().on_broadcasted(map); - // then let mut stream = futures::executor::block_on_stream(watcher.into_stream()); assert_eq!(stream.next(), Some(TransactionStatus::Ready)); @@ -986,15 +1044,9 @@ mod tests { #[test] fn should_trigger_dropped() { // given - let limit = Limit { - count: 1, - total_bytes: 1000, - }; - let options = Options { - ready: limit.clone(), - future: limit.clone(), - ..Default::default() - }; + let limit = Limit { count: 1, total_bytes: 1000 }; + let options = + Options { ready: limit.clone(), future: limit.clone(), ..Default::default() }; let pool = Pool::new(options, true.into(), TestApi::default().into()); @@ -1064,7 +1116,6 @@ mod tests { block_on(pool.prune_tags(&BlockId::Number(1), vec![provides], vec![])).unwrap(); assert_eq!(pool.validated_pool().status().ready, 0); - // so when we release the verification of the previous one it will have // something in `requires`, but should go to ready directly, since the previous transaction was imported // correctly. diff --git a/client/transaction-pool/src/graph/ready.rs b/client/transaction-pool/src/graph/ready.rs index 46f13f4e82dc9..dcf0e177dae62 100644 --- a/client/transaction-pool/src/graph/ready.rs +++ b/client/transaction-pool/src/graph/ready.rs @@ -17,19 +17,16 @@ // along with this program. If not, see . use std::{ - collections::{HashMap, HashSet, BTreeSet}, cmp, + collections::{BTreeSet, HashMap, HashSet}, hash, sync::Arc, }; -use serde::Serialize; use log::trace; -use sp_runtime::traits::Member; -use sp_runtime::transaction_validity::{ - TransactionTag as Tag, -}; use sc_transaction_pool_api::error; +use serde::Serialize; +use sp_runtime::{traits::Member, transaction_validity::TransactionTag as Tag}; use super::{ base_pool::Transaction, @@ -50,16 +47,15 @@ pub struct TransactionRef { impl Clone for TransactionRef { fn clone(&self) -> Self { - Self { - transaction: self.transaction.clone(), - insertion_id: self.insertion_id, - } + Self { transaction: self.transaction.clone(), insertion_id: self.insertion_id } } } impl Ord for TransactionRef { fn cmp(&self, other: &Self) -> cmp::Ordering { - self.transaction.priority.cmp(&other.transaction.priority) + self.transaction + .priority + .cmp(&other.transaction.priority) .then_with(|| other.transaction.valid_till.cmp(&self.transaction.valid_till)) .then_with(|| other.insertion_id.cmp(&self.insertion_id)) } @@ -157,7 +153,7 @@ impl ReadyTransactions { /// - transactions that are valid for a shorter time go first /// 4. Lastly we sort by the time in the queue /// - transactions that are longer in the queue go first - pub fn get(&self) -> impl Iterator>> { + pub fn get(&self) -> impl Iterator>> { BestIterator { all: self.ready.clone(), best: self.best.clone(), @@ -176,9 +172,13 @@ impl ReadyTransactions { ) -> error::Result>>> { assert!( tx.is_ready(), - "Only ready transactions can be imported. Missing: {:?}", tx.missing_tags + "Only ready transactions can be imported. Missing: {:?}", + tx.missing_tags + ); + assert!( + !self.ready.read().contains_key(&tx.transaction.hash), + "Transaction is already imported." ); - assert!(!self.ready.read().contains_key(&tx.transaction.hash), "Transaction is already imported."); self.insertion_id += 1; let insertion_id = self.insertion_id; @@ -201,7 +201,7 @@ impl ReadyTransactions { } else { requires_offset += 1; } - } + } // update provided_tags // call to replace_previous guarantees that we will be overwriting @@ -210,10 +210,7 @@ impl ReadyTransactions { self.provided_tags.insert(tag.clone(), hash.clone()); } - let transaction = TransactionRef { - insertion_id, - transaction - }; + let transaction = TransactionRef { insertion_id, transaction }; // insert to best if it doesn't require any other transaction to be included before it if goes_to_best { @@ -221,21 +218,17 @@ impl ReadyTransactions { } // insert to Ready - ready.insert(hash, ReadyTx { - transaction, - unlocks, - requires_offset, - }); + ready.insert(hash, ReadyTx { transaction, unlocks, requires_offset }); Ok(replaced) } /// Fold a list of ready transactions to compute a single value. - pub fn fold, &ReadyTx) -> Option>(&mut self, f: F) -> Option { - self.ready - .read() - .values() - .fold(None, f) + pub fn fold, &ReadyTx) -> Option>( + &mut self, + f: F, + ) -> Option { + self.ready.read().values().fold(None, f) } /// Returns true if given transaction is part of the queue. @@ -251,9 +244,10 @@ impl ReadyTransactions { /// Retrieve transactions by hash pub fn by_hashes(&self, hashes: &[Hash]) -> Vec>>> { let ready = self.ready.read(); - hashes.iter().map(|hash| { - ready.get(hash).map(|x| x.transaction.transaction.clone()) - }).collect() + hashes + .iter() + .map(|hash| ready.get(hash).map(|x| x.transaction.transaction.clone())) + .collect() } /// Removes a subtree of transactions from the ready pool. @@ -280,13 +274,12 @@ impl ReadyTransactions { let mut ready = self.ready.write(); while let Some(hash) = to_remove.pop() { if let Some(mut tx) = ready.remove(&hash) { - let invalidated = tx.transaction.transaction.provides - .iter() - .filter(|tag| provides_tag_filter + let invalidated = tx.transaction.transaction.provides.iter().filter(|tag| { + provides_tag_filter .as_ref() .map(|filter| !filter.contains(&**tag)) .unwrap_or(true) - ); + }); let mut removed_some_tags = false; // remove entries from provided_tags @@ -331,7 +324,9 @@ impl ReadyTransactions { let mut to_remove = vec![tag]; while let Some(tag) = to_remove.pop() { - let res = self.provided_tags.remove(&tag) + let res = self + .provided_tags + .remove(&tag) .and_then(|hash| self.ready.write().remove(&hash)); if let Some(tx) = res { @@ -417,19 +412,18 @@ impl ReadyTransactions { fn replace_previous( &mut self, tx: &Transaction, - ) -> error::Result< - (Vec>>, Vec) - > { + ) -> error::Result<(Vec>>, Vec)> { let (to_remove, unlocks) = { // check if we are replacing a transaction - let replace_hashes = tx.provides + let replace_hashes = tx + .provides .iter() .filter_map(|tag| self.provided_tags.get(tag)) .collect::>(); // early exit if we are not replacing anything. if replace_hashes.is_empty() { - return Ok((vec![], vec![])); + return Ok((vec![], vec![])) } // now check if collective priority is lower than the replacement transaction. @@ -438,9 +432,9 @@ impl ReadyTransactions { replace_hashes .iter() .filter_map(|hash| ready.get(hash)) - .fold(0u64, |total, tx| + .fold(0u64, |total, tx| { total.saturating_add(tx.transaction.transaction.priority) - ) + }) }; // bail - the transaction has too low priority to replace the old ones @@ -451,28 +445,22 @@ impl ReadyTransactions { // construct a list of unlocked transactions let unlocks = { let ready = self.ready.read(); - replace_hashes - .iter() - .filter_map(|hash| ready.get(hash)) - .fold(vec![], |mut list, tx| { + replace_hashes.iter().filter_map(|hash| ready.get(hash)).fold( + vec![], + |mut list, tx| { list.extend(tx.unlocks.iter().cloned()); list - }) + }, + ) }; - ( - replace_hashes.into_iter().cloned().collect::>(), - unlocks - ) + (replace_hashes.into_iter().cloned().collect::>(), unlocks) }; let new_provides = tx.provides.iter().cloned().collect::>(); let removed = self.remove_subtree_with_tag_filter(to_remove, Some(new_provides)); - Ok(( - removed, - unlocks - )) + Ok((removed, unlocks)) } /// Returns number of transactions in this queue. @@ -500,7 +488,6 @@ impl BestIterator { if satisfied >= tx_ref.transaction.requires.len() { // If we have satisfied all deps insert to best self.best.insert(tx_ref); - } else { // otherwise we're still awaiting for some deps self.awaiting.insert(tx_ref.transaction.hash.clone(), (satisfied, tx_ref)); @@ -531,7 +518,10 @@ impl Iterator for BestIterator { Some((satisfied, tx_ref)) // then get from the pool } else { - self.all.read().get(hash).map(|next| (next.requires_offset + 1, next.transaction.clone())) + self.all + .read() + .get(hash) + .map(|next| (next.requires_offset + 1, next.transaction.clone())) }; if let Some((satisfied, tx_ref)) = res { self.best_or_awaiting(satisfied, tx_ref) @@ -571,7 +561,7 @@ mod tests { fn import( ready: &mut ReadyTransactions, - tx: Transaction + tx: Transaction, ) -> error::Result>>> { let x = WaitingTransaction::new(tx, ready.provided_tags(), &[]); ready.import(x) @@ -662,7 +652,7 @@ mod tests { bytes: 1, hash: 5, priority: 1, - valid_till: u64::MAX, // use the max here for testing. + valid_till: u64::MAX, // use the max here for testing. requires: vec![tx1.provides[0].clone()], provides: vec![], propagate: true, @@ -695,7 +685,7 @@ mod tests { bytes: 1, hash: 5, priority: 1, - valid_till: u64::MAX, // use the max here for testing. + valid_till: u64::MAX, // use the max here for testing. requires: vec![], provides: vec![], propagate: true, @@ -717,28 +707,19 @@ mod tests { tx }; // higher priority = better - assert!(TransactionRef { - transaction: Arc::new(with_priority(3, 3)), - insertion_id: 1, - } > TransactionRef { - transaction: Arc::new(with_priority(2, 3)), - insertion_id: 2, - }); + assert!( + TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 1 } > + TransactionRef { transaction: Arc::new(with_priority(2, 3)), insertion_id: 2 } + ); // lower validity = better - assert!(TransactionRef { - transaction: Arc::new(with_priority(3, 2)), - insertion_id: 1, - } > TransactionRef { - transaction: Arc::new(with_priority(3, 3)), - insertion_id: 2, - }); + assert!( + TransactionRef { transaction: Arc::new(with_priority(3, 2)), insertion_id: 1 } > + TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 2 } + ); // lower insertion_id = better - assert!(TransactionRef { - transaction: Arc::new(with_priority(3, 3)), - insertion_id: 1, - } > TransactionRef { - transaction: Arc::new(with_priority(3, 3)), - insertion_id: 2, - }); + assert!( + TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 1 } > + TransactionRef { transaction: Arc::new(with_priority(3, 3)), insertion_id: 2 } + ); } } diff --git a/client/transaction-pool/src/graph/rotator.rs b/client/transaction-pool/src/graph/rotator.rs index 0e4fd0abf2974..820fde35dac18 100644 --- a/client/transaction-pool/src/graph/rotator.rs +++ b/client/transaction-pool/src/graph/rotator.rs @@ -21,13 +21,8 @@ //! Keeps only recent extrinsic and discard the ones kept for a significant amount of time. //! Discarded extrinsics are banned so that they don't get re-imported again. -use std::{ - collections::HashMap, - hash, - iter, - time::Duration, -}; use parking_lot::RwLock; +use std::{collections::HashMap, hash, iter, time::Duration}; use wasm_timer::Instant; use super::base_pool::Transaction; @@ -48,10 +43,7 @@ pub struct PoolRotator { impl Default for PoolRotator { fn default() -> Self { - Self { - ban_time: Duration::from_secs(60 * 30), - banned_until: Default::default(), - } + Self { ban_time: Duration::from_secs(60 * 30), banned_until: Default::default() } } } @@ -62,7 +54,7 @@ impl PoolRotator { } /// Bans given set of hashes. - pub fn ban(&self, now: &Instant, hashes: impl IntoIterator) { + pub fn ban(&self, now: &Instant, hashes: impl IntoIterator) { let mut banned = self.banned_until.write(); for hash in hashes { @@ -81,9 +73,14 @@ impl PoolRotator { /// Bans extrinsic if it's stale. /// /// Returns `true` if extrinsic is stale and got banned. - pub fn ban_if_stale(&self, now: &Instant, current_block: u64, xt: &Transaction) -> bool { + pub fn ban_if_stale( + &self, + now: &Instant, + current_block: u64, + xt: &Transaction, + ) -> bool { if xt.valid_till > current_block { - return false; + return false } self.ban(now, iter::once(xt.hash.clone())); @@ -107,10 +104,7 @@ mod tests { type Ex = (); fn rotator() -> PoolRotator { - PoolRotator { - ban_time: Duration::from_millis(10), - ..Default::default() - } + PoolRotator { ban_time: Duration::from_millis(10), ..Default::default() } } fn tx() -> (Hash, Transaction) { @@ -160,7 +154,6 @@ mod tests { assert!(rotator.is_banned(&hash)); } - #[test] fn should_clear_banned() { // given @@ -201,14 +194,14 @@ mod tests { let past_block = 0; // when - for i in 0..2*EXPECTED_SIZE { + for i in 0..2 * EXPECTED_SIZE { let tx = tx_with(i as u64, past_block); assert!(rotator.ban_if_stale(&now, past_block, &tx)); } - assert_eq!(rotator.banned_until.read().len(), 2*EXPECTED_SIZE); + assert_eq!(rotator.banned_until.read().len(), 2 * EXPECTED_SIZE); // then - let tx = tx_with(2*EXPECTED_SIZE as u64, past_block); + let tx = tx_with(2 * EXPECTED_SIZE as u64, past_block); // trigger a garbage collection assert!(rotator.ban_if_stale(&now, past_block, &tx)); assert_eq!(rotator.banned_until.read().len(), EXPECTED_SIZE); diff --git a/client/transaction-pool/src/graph/tracked_map.rs b/client/transaction-pool/src/graph/tracked_map.rs index 98fd9e21b3160..c1fdda227c6ae 100644 --- a/client/transaction-pool/src/graph/tracked_map.rs +++ b/client/transaction-pool/src/graph/tracked_map.rs @@ -16,11 +16,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use parking_lot::{RwLock, RwLockReadGuard, RwLockWriteGuard}; use std::{ collections::HashMap, - sync::{Arc, atomic::{AtomicIsize, Ordering as AtomicOrdering}}, + sync::{ + atomic::{AtomicIsize, Ordering as AtomicOrdering}, + Arc, + }, }; -use parking_lot::{RwLock, RwLockWriteGuard, RwLockReadGuard}; /// Something that can report its size. pub trait Size { @@ -39,11 +42,7 @@ pub struct TrackedMap { impl Default for TrackedMap { fn default() -> Self { - Self { - index: Arc::new(HashMap::default().into()), - bytes: 0.into(), - length: 0.into(), - } + Self { index: Arc::new(HashMap::default().into()), bytes: 0.into(), length: 0.into() } } } @@ -65,9 +64,7 @@ impl TrackedMap { /// Lock map for read. pub fn read(&self) -> TrackedMapReadAccess { - TrackedMapReadAccess { - inner_guard: self.index.read(), - } + TrackedMapReadAccess { inner_guard: self.index.read() } } /// Lock map for write. @@ -87,13 +84,11 @@ pub struct ReadOnlyTrackedMap(Arc>>); impl ReadOnlyTrackedMap where - K: Eq + std::hash::Hash + K: Eq + std::hash::Hash, { /// Lock map for read. pub fn read(&self) -> TrackedMapReadAccess { - TrackedMapReadAccess { - inner_guard: self.0.read(), - } + TrackedMapReadAccess { inner_guard: self.0.read() } } } @@ -103,7 +98,7 @@ pub struct TrackedMapReadAccess<'a, K, V> { impl<'a, K, V> TrackedMapReadAccess<'a, K, V> where - K: Eq + std::hash::Hash + K: Eq + std::hash::Hash, { /// Returns true if map contains key. pub fn contains_key(&self, key: &K) -> bool { @@ -129,7 +124,8 @@ pub struct TrackedMapWriteAccess<'a, K, V> { impl<'a, K, V> TrackedMapWriteAccess<'a, K, V> where - K: Eq + std::hash::Hash, V: Size + K: Eq + std::hash::Hash, + V: Size, { /// Insert value and return previous (if any). pub fn insert(&mut self, key: K, val: V) -> Option { @@ -165,7 +161,9 @@ mod tests { use super::*; impl Size for i32 { - fn size(&self) -> usize { *self as usize / 10 } + fn size(&self) -> usize { + *self as usize / 10 + } } #[test] diff --git a/client/transaction-pool/src/graph/validated_pool.rs b/client/transaction-pool/src/graph/validated_pool.rs index 5feba94dc56b1..3ac7f002077cb 100644 --- a/client/transaction-pool/src/graph/validated_pool.rs +++ b/client/transaction-pool/src/graph/validated_pool.rs @@ -17,27 +17,31 @@ // along with this program. If not, see . use std::{ - collections::{HashSet, HashMap}, + collections::{HashMap, HashSet}, hash, sync::Arc, }; -use serde::Serialize; +use futures::channel::mpsc::{channel, Sender}; use parking_lot::{Mutex, RwLock}; +use retain_mut::RetainMut; +use sc_transaction_pool_api::{error, PoolStatus}; +use serde::Serialize; use sp_runtime::{ generic::BlockId, traits::{self, SaturatedConversion}, - transaction_validity::{TransactionTag as Tag, ValidTransaction, TransactionSource}, + transaction_validity::{TransactionSource, TransactionTag as Tag, ValidTransaction}, }; -use sc_transaction_pool_api::{error, PoolStatus}; use wasm_timer::Instant; -use futures::channel::mpsc::{channel, Sender}; -use retain_mut::RetainMut; use super::{ - base_pool::{self as base, PruneStatus}, watcher::Watcher, - listener::Listener, rotator::PoolRotator, - pool::{EventStream, Options, ChainApi, BlockHash, ExtrinsicHash, ExtrinsicFor, TransactionFor}, + base_pool::{self as base, PruneStatus}, + listener::Listener, + pool::{ + BlockHash, ChainApi, EventStream, ExtrinsicFor, ExtrinsicHash, Options, TransactionFor, + }, + rotator::PoolRotator, + watcher::Watcher, }; /// Pre-validated transaction. Validated pool only accepts transactions wrapped in this enum. @@ -72,19 +76,14 @@ impl ValidatedTransaction { requires: validity.requires, provides: validity.provides, propagate: validity.propagate, - valid_till: at - .saturated_into::() - .saturating_add(validity.longevity), + valid_till: at.saturated_into::().saturating_add(validity.longevity), }) } } /// A type of validated transaction stored in the pool. -pub type ValidatedTransactionFor = ValidatedTransaction< - ExtrinsicHash, - ExtrinsicFor, - ::Error, ->; +pub type ValidatedTransactionFor = + ValidatedTransaction, ExtrinsicFor, ::Error>; /// A closure that returns true if the local node is a validator that can author blocks. pub struct IsValidator(Box bool + Send + Sync>); @@ -107,10 +106,7 @@ pub struct ValidatedPool { is_validator: IsValidator, options: Options, listener: RwLock, B>>, - pool: RwLock, - ExtrinsicFor, - >>, + pool: RwLock, ExtrinsicFor>>, import_notification_sinks: Mutex>>>, rotator: PoolRotator>, } @@ -142,7 +138,7 @@ impl ValidatedPool { } /// Bans given set of hashes. - pub fn ban(&self, now: &Instant, hashes: impl IntoIterator>) { + pub fn ban(&self, now: &Instant, hashes: impl IntoIterator>) { self.rotator.ban(now, hashes) } @@ -173,9 +169,10 @@ impl ValidatedPool { /// Imports a bunch of pre-validated transactions to the pool. pub fn submit( &self, - txs: impl IntoIterator>, + txs: impl IntoIterator>, ) -> Vec, B::Error>> { - let results = txs.into_iter() + let results = txs + .into_iter() .map(|validated_tx| self.submit_one(validated_tx)) .collect::>(); @@ -186,10 +183,14 @@ impl ValidatedPool { Default::default() }; - results.into_iter().map(|res| match res { - Ok(ref hash) if removed.contains(hash) => Err(error::Error::ImmediatelyDropped.into()), - other => other, - }).collect() + results + .into_iter() + .map(|res| match res { + Ok(ref hash) if removed.contains(hash) => + Err(error::Error::ImmediatelyDropped.into()), + other => other, + }) + .collect() } /// Submit single pre-validated transaction to the pool. @@ -197,30 +198,28 @@ impl ValidatedPool { match tx { ValidatedTransaction::Valid(tx) => { if !tx.propagate && !(self.is_validator.0)() { - return Err(error::Error::Unactionable.into()); + return Err(error::Error::Unactionable.into()) } let imported = self.pool.write().import(tx)?; if let base::Imported::Ready { ref hash, .. } = imported { - self.import_notification_sinks.lock() - .retain_mut(|sink| { - match sink.try_send(*hash) { - Ok(()) => true, - Err(e) => { - if e.is_full() { - log::warn!( - target: "txpool", - "[{:?}] Trying to notify an import but the channel is full", - hash, - ); - true - } else { - false - } + self.import_notification_sinks.lock().retain_mut(|sink| { + match sink.try_send(*hash) { + Ok(()) => true, + Err(e) => + if e.is_full() { + log::warn!( + target: "txpool", + "[{:?}] Trying to notify an import but the channel is full", + hash, + ); + true + } else { + false }, - } - }); + } + }); } let mut listener = self.listener.write(); @@ -244,8 +243,8 @@ impl ValidatedPool { let future_limit = &self.options.future; log::debug!(target: "txpool", "Pool Status: {:?}", status); - if ready_limit.is_exceeded(status.ready, status.ready_bytes) - || future_limit.is_exceeded(status.future, status.future_bytes) + if ready_limit.is_exceeded(status.ready, status.ready_bytes) || + future_limit.is_exceeded(status.future, status.future_bytes) { log::debug!( target: "txpool", @@ -257,8 +256,11 @@ impl ValidatedPool { // clean up the pool let removed = { let mut pool = self.pool.write(); - let removed = pool.enforce_limits(ready_limit, future_limit) - .into_iter().map(|x| x.hash).collect::>(); + let removed = pool + .enforce_limits(ready_limit, future_limit) + .into_iter() + .map(|x| x.hash) + .collect::>(); // ban all removed transactions self.rotator.ban(&Instant::now(), removed.iter().copied()); removed @@ -305,9 +307,17 @@ impl ValidatedPool { /// /// Removes and then submits passed transactions and all dependent transactions. /// Transactions that are missing from the pool are not submitted. - pub fn resubmit(&self, mut updated_transactions: HashMap, ValidatedTransactionFor>) { + pub fn resubmit( + &self, + mut updated_transactions: HashMap, ValidatedTransactionFor>, + ) { #[derive(Debug, Clone, Copy, PartialEq)] - enum Status { Future, Ready, Failed, Dropped } + enum Status { + Future, + Ready, + Failed, + Dropped, + } let (mut initial_statuses, final_statuses) = { let mut pool = self.pool.write(); @@ -322,7 +332,11 @@ impl ValidatedPool { let mut initial_statuses = HashMap::new(); let mut txs_to_resubmit = Vec::with_capacity(updated_transactions.len()); while !updated_transactions.is_empty() { - let hash = updated_transactions.keys().next().cloned().expect("transactions is not empty; qed"); + let hash = updated_transactions + .keys() + .next() + .cloned() + .expect("transactions is not empty; qed"); // note we are not considering tx with hash invalid here - we just want // to remove it along with dependent transactions and `remove_subtree()` @@ -390,7 +404,8 @@ impl ValidatedPool { final_statuses.insert(hash, Status::Failed); }, }, - ValidatedTransaction::Invalid(_, _) | ValidatedTransaction::Unknown(_, _) => { + ValidatedTransaction::Invalid(_, _) | + ValidatedTransaction::Unknown(_, _) => { final_statuses.insert(hash, Status::Failed); }, } @@ -425,12 +440,13 @@ impl ValidatedPool { /// For each extrinsic, returns tags that it provides (if known), or None (if it is unknown). pub fn extrinsics_tags(&self, hashes: &[ExtrinsicHash]) -> Vec>> { - self.pool.read() + self.pool + .read() .by_hashes(&hashes) .into_iter() - .map(|existing_in_pool| + .map(|existing_in_pool| { existing_in_pool.map(|transaction| transaction.provides.to_vec()) - ) + }) .collect() } @@ -442,7 +458,7 @@ impl ValidatedPool { /// Prunes ready transactions that provide given list of tags. pub fn prune_tags( &self, - tags: impl IntoIterator, + tags: impl IntoIterator, ) -> Result, ExtrinsicFor>, B::Error> { // Perform tag-based pruning in the base pool let status = self.pool.write().prune_tags(tags); @@ -465,7 +481,7 @@ impl ValidatedPool { pub fn resubmit_pruned( &self, at: &BlockId, - known_imported_hashes: impl IntoIterator> + Clone, + known_imported_hashes: impl IntoIterator> + Clone, pruned_hashes: Vec>, pruned_xts: Vec>, ) -> Result<(), B::Error> { @@ -475,13 +491,12 @@ impl ValidatedPool { let results = self.submit(pruned_xts); // Collect the hashes of transactions that now became invalid (meaning that they are successfully pruned). - let hashes = results - .into_iter() - .enumerate() - .filter_map(|(idx, r)| match r.map_err(error::IntoPoolError::into_pool_error) { + let hashes = results.into_iter().enumerate().filter_map(|(idx, r)| { + match r.map_err(error::IntoPoolError::into_pool_error) { Err(Ok(error::Error::InvalidTransaction(_))) => Some(pruned_hashes[idx]), _ => None, - }); + } + }); // Fire `pruned` notifications for collected hashes and make sure to include // `known_imported_hashes` since they were just imported as part of the block. let hashes = hashes.chain(known_imported_hashes.into_iter()); @@ -497,9 +512,11 @@ impl ValidatedPool { pub fn fire_pruned( &self, at: &BlockId, - hashes: impl Iterator>, + hashes: impl Iterator>, ) -> Result<(), B::Error> { - let header_hash = self.api.block_id_to_hash(at)? + let header_hash = self + .api + .block_id_to_hash(at)? .ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)))?; let mut listener = self.listener.write(); let mut set = HashSet::with_capacity(hashes.size_hint().0); @@ -520,7 +537,9 @@ impl ValidatedPool { /// Note this function does not remove transactions that are already included in the chain. /// See `prune_tags` if you want this. pub fn clear_stale(&self, at: &BlockId) -> Result<(), B::Error> { - let block_number = self.api.block_id_to_number(at)? + let block_number = self + .api + .block_id_to_number(at)? .ok_or_else(|| error::Error::InvalidBlockId(format!("{:?}", at)))? .saturated_into::(); let now = Instant::now(); @@ -589,7 +608,7 @@ impl ValidatedPool { pub fn remove_invalid(&self, hashes: &[ExtrinsicHash]) -> Vec> { // early exit in case there is no invalid transactions. if hashes.is_empty() { - return vec![]; + return vec![] } log::debug!(target: "txpool", "Removing invalid transactions: {:?}", hashes); @@ -610,13 +629,15 @@ impl ValidatedPool { } /// Get an iterator for ready transactions ordered by priority - pub fn ready(&self) -> impl Iterator> + Send { + pub fn ready(&self) -> impl Iterator> + Send { self.pool.read().ready() } /// Returns a Vec of hashes and extrinsics in the future pool. pub fn futures(&self) -> Vec<(ExtrinsicHash, ExtrinsicFor)> { - self.pool.read().futures() + self.pool + .read() + .futures() .map(|tx| (tx.hash.clone(), tx.data.clone())) .collect() } @@ -639,10 +660,8 @@ impl ValidatedPool { } } -fn fire_events( - listener: &mut Listener, - imported: &base::Imported, -) where +fn fire_events(listener: &mut Listener, imported: &base::Imported) +where H: hash::Hash + Eq + traits::Member + Serialize, B: ChainApi, { @@ -653,8 +672,6 @@ fn fire_events( removed.into_iter().for_each(|r| listener.dropped(&r.hash, Some(hash))); promoted.into_iter().for_each(|p| listener.ready(p, None)); }, - base::Imported::Future { ref hash } => { - listener.future(hash) - }, + base::Imported::Future { ref hash } => listener.future(hash), } } diff --git a/client/transaction-pool/src/graph/watcher.rs b/client/transaction-pool/src/graph/watcher.rs index 64e6032f0c2d5..91777117efe94 100644 --- a/client/transaction-pool/src/graph/watcher.rs +++ b/client/transaction-pool/src/graph/watcher.rs @@ -20,7 +20,7 @@ use futures::Stream; use sc_transaction_pool_api::TransactionStatus; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender, TracingUnboundedReceiver}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; /// Extrinsic watcher. /// @@ -41,7 +41,7 @@ impl Watcher { /// Pipe the notifications to given sink. /// /// Make sure to drive the future to completion. - pub fn into_stream(self) -> impl Stream> { + pub fn into_stream(self) -> impl Stream> { self.receiver } } @@ -55,10 +55,7 @@ pub struct Sender { impl Default for Sender { fn default() -> Self { - Sender { - receivers: Default::default(), - is_finalized: false, - } + Sender { receivers: Default::default(), is_finalized: false } } } @@ -67,10 +64,7 @@ impl Sender { pub fn new_watcher(&mut self, hash: H) -> Watcher { let (tx, receiver) = tracing_unbounded("mpsc_txpool_watcher"); self.receivers.push(tx); - Watcher { - receiver, - hash, - } + Watcher { receiver, hash } } /// Transaction became ready. diff --git a/client/transaction-pool/src/lib.rs b/client/transaction-pool/src/lib.rs index 8f89063657c04..302c7a1b59b65 100644 --- a/client/transaction-pool/src/lib.rs +++ b/client/transaction-pool/src/lib.rs @@ -18,14 +18,14 @@ //! Substrate transaction pool implementation. -#![recursion_limit="256"] +#![recursion_limit = "256"] #![warn(missing_docs)] #![warn(unused_extern_crates)] mod api; mod graph; -mod revalidation; mod metrics; +mod revalidation; pub mod error; @@ -33,53 +33,60 @@ pub mod error; #[cfg(feature = "test-helpers")] pub mod test_helpers { pub use super::{ - graph::{ChainApi, Pool, NumberFor, BlockHash, ExtrinsicFor}, + graph::{BlockHash, ChainApi, ExtrinsicFor, NumberFor, Pool}, revalidation::RevalidationQueue, }; } -pub use graph::{Options, Transaction}; pub use crate::api::{FullChainApi, LightChainApi}; -use std::{collections::{HashMap, HashSet}, sync::Arc, pin::Pin, convert::TryInto}; -use futures::{prelude::*, future::{self, ready}, channel::oneshot}; +use futures::{ + channel::oneshot, + future::{self, ready}, + prelude::*, +}; +pub use graph::{Options, Transaction}; use parking_lot::Mutex; +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, + pin::Pin, + sync::Arc, +}; -use sp_runtime::{ - generic::BlockId, - traits::{Block as BlockT, NumberFor, AtLeast32Bit, Extrinsic, Zero, Header as HeaderT}, +use graph::{ExtrinsicHash, IsValidator}; +use sc_transaction_pool_api::{ + ChainEvent, ImportNotificationStream, MaintainedTransactionPool, PoolFuture, PoolStatus, + TransactionFor, TransactionPool, TransactionSource, TransactionStatusStreamFor, TxHash, }; use sp_core::traits::SpawnEssentialNamed; -use sc_transaction_pool_api::{ - TransactionPool, PoolStatus, ImportNotificationStream, TxHash, TransactionFor, - TransactionStatusStreamFor, MaintainedTransactionPool, PoolFuture, ChainEvent, - TransactionSource, +use sp_runtime::{ + generic::BlockId, + traits::{AtLeast32Bit, Block as BlockT, Extrinsic, Header as HeaderT, NumberFor, Zero}, }; -use graph::{IsValidator, ExtrinsicHash}; use wasm_timer::Instant; -use prometheus_endpoint::Registry as PrometheusRegistry; use crate::metrics::MetricsLink as PrometheusMetrics; +use prometheus_endpoint::Registry as PrometheusRegistry; -type BoxedReadyIterator = Box< - dyn Iterator>> + Send ->; +type BoxedReadyIterator = + Box>> + Send>; -type ReadyIteratorFor = BoxedReadyIterator< - graph::ExtrinsicHash, graph::ExtrinsicFor ->; +type ReadyIteratorFor = + BoxedReadyIterator, graph::ExtrinsicFor>; -type PolledIterator = Pin> + Send>>; +type PolledIterator = Pin> + Send>>; /// A transaction pool for a full node. pub type FullPool = BasicPool, Block>; /// A transaction pool for a light node. -pub type LightPool = BasicPool, Block>; +pub type LightPool = + BasicPool, Block>; /// Basic implementation of transaction pool that can be customized by providing PoolApi. pub struct BasicPool - where - Block: BlockT, - PoolApi: graph::ChainApi, +where + Block: BlockT, + PoolApi: graph::ChainApi, { pool: Arc>, api: Arc, @@ -96,19 +103,13 @@ struct ReadyPoll { impl Default for ReadyPoll { fn default() -> Self { - Self { - updated_at: NumberFor::::zero(), - pollers: Default::default(), - } + Self { updated_at: NumberFor::::zero(), pollers: Default::default() } } } impl ReadyPoll { fn new(best_block_number: NumberFor) -> Self { - Self { - updated_at: best_block_number, - pollers: Default::default(), - } + Self { updated_at: best_block_number, pollers: Default::default() } } fn trigger(&mut self, number: NumberFor, iterator_factory: impl Fn() -> T) { @@ -140,7 +141,7 @@ impl ReadyPoll { #[cfg(not(target_os = "unknown"))] impl parity_util_mem::MallocSizeOf for BasicPool where - PoolApi: graph::ChainApi, + PoolApi: graph::ChainApi, Block: BlockT, { fn size_of(&self, ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { @@ -167,15 +168,15 @@ pub enum RevalidationType { } impl BasicPool - where - Block: BlockT, - PoolApi: graph::ChainApi + 'static, +where + Block: BlockT, + PoolApi: graph::ChainApi + 'static, { /// Create new basic transaction pool with provided api, for tests. #[cfg(feature = "test-helpers")] pub fn new_test( pool_api: Arc, - ) -> (Self, Pin + Send>>, intervalier::BackSignalControl) { + ) -> (Self, Pin + Send>>, intervalier::BackSignalControl) { let pool = Arc::new(graph::Pool::new(Default::default(), true.into(), pool_api.clone())); let (revalidation_queue, background_task, notifier) = revalidation::RevalidationQueue::new_test(pool_api.clone(), pool.clone()); @@ -206,15 +207,11 @@ impl BasicPool ) -> Self { let pool = Arc::new(graph::Pool::new(options, is_validator, pool_api.clone())); let (revalidation_queue, background_task) = match revalidation_type { - RevalidationType::Light => ( - revalidation::RevalidationQueue::new(pool_api.clone(), pool.clone()), - None, - ), + RevalidationType::Light => + (revalidation::RevalidationQueue::new(pool_api.clone(), pool.clone()), None), RevalidationType::Full => { - let (queue, background) = revalidation::RevalidationQueue::new_background( - pool_api.clone(), - pool.clone(), - ); + let (queue, background) = + revalidation::RevalidationQueue::new_background(pool_api.clone(), pool.clone()); (queue, Some(background)) }, }; @@ -227,12 +224,11 @@ impl BasicPool api: pool_api, pool, revalidation_queue: Arc::new(revalidation_queue), - revalidation_strategy: Arc::new(Mutex::new( - match revalidation_type { - RevalidationType::Light => RevalidationStrategy::Light(RevalidationStatus::NotScheduled), - RevalidationType::Full => RevalidationStrategy::Always, - } - )), + revalidation_strategy: Arc::new(Mutex::new(match revalidation_type { + RevalidationType::Light => + RevalidationStrategy::Light(RevalidationStatus::NotScheduled), + RevalidationType::Full => RevalidationStrategy::Always, + })), ready_poll: Arc::new(Mutex::new(ReadyPoll::new(best_block_number))), metrics: PrometheusMetrics::new(prometheus), } @@ -251,15 +247,13 @@ impl BasicPool } impl TransactionPool for BasicPool - where - Block: BlockT, - PoolApi: 'static + graph::ChainApi, +where + Block: BlockT, + PoolApi: 'static + graph::ChainApi, { type Block = PoolApi::Block; type Hash = graph::ExtrinsicHash; - type InPoolTransaction = graph::base_pool::Transaction< - TxHash, TransactionFor - >; + type InPoolTransaction = graph::base_pool::Transaction, TransactionFor>; type Error = PoolApi::Error; fn submit_at( @@ -271,7 +265,8 @@ impl TransactionPool for BasicPool let pool = self.pool.clone(); let at = *at; - self.metrics.report(|metrics| metrics.submitted_transactions.inc_by(xts.len() as u64)); + self.metrics + .report(|metrics| metrics.submitted_transactions.inc_by(xts.len() as u64)); async move { pool.submit_at(&at, source, xts).await }.boxed() } @@ -305,12 +300,14 @@ impl TransactionPool for BasicPool pool.submit_and_watch(&at, source, xt) .map(|result| result.map(|watcher| Box::new(watcher.into_stream()) as _)) .await - }.boxed() + } + .boxed() } fn remove_invalid(&self, hashes: &[TxHash]) -> Vec> { let removed = self.pool.validated_pool().remove_invalid(hashes); - self.metrics.report(|metrics| metrics.validations_invalid.inc_by(removed.len() as u64)); + self.metrics + .report(|metrics| metrics.validations_invalid.inc_by(removed.len() as u64)); removed } @@ -347,16 +344,18 @@ impl TransactionPool for BasicPool if self.ready_poll.lock().updated_at() >= at { log::trace!(target: "txpool", "Transaction pool already processed block #{}", at); let iterator: ReadyIteratorFor = Box::new(self.pool.validated_pool().ready()); - return async move { iterator }.boxed(); + return async move { iterator }.boxed() } self.ready_poll .lock() .add(at) - .map(|received| received.unwrap_or_else(|e| { - log::warn!("Error receiving pending set: {:?}", e); - Box::new(std::iter::empty()) - })) + .map(|received| { + received.unwrap_or_else(|e| { + log::warn!("Error receiving pending set: {:?}", e); + Box::new(std::iter::empty()) + }) + }) .boxed() } @@ -452,9 +451,10 @@ where at: &BlockId, xt: sc_transaction_pool_api::LocalTransactionFor, ) -> Result { - use graph::{ValidatedTransaction, ChainApi}; - use sp_runtime::traits::SaturatedConversion; - use sp_runtime::transaction_validity::TransactionValidityError; + use graph::{ChainApi, ValidatedTransaction}; + use sp_runtime::{ + traits::SaturatedConversion, transaction_validity::TransactionValidityError, + }; let validity = self .api @@ -527,10 +527,7 @@ impl RevalidationStrategy { ), resubmit: false, }, - Self::Always => RevalidationAction { - revalidate: true, - resubmit: true, - } + Self::Always => RevalidationAction { revalidate: true, resubmit: true }, } } } @@ -555,15 +552,16 @@ impl RevalidationStatus { revalidate_block_period.map(|period| block + period), ); false - } + }, Self::Scheduled(revalidate_at_time, revalidate_at_block) => { - let is_required = revalidate_at_time.map(|at| Instant::now() >= at).unwrap_or(false) - || revalidate_at_block.map(|at| block >= at).unwrap_or(false); + let is_required = + revalidate_at_time.map(|at| Instant::now() >= at).unwrap_or(false) || + revalidate_at_block.map(|at| block >= at).unwrap_or(false); if is_required { *self = Self::InProgress; } is_required - } + }, Self::InProgress => false, } } @@ -575,16 +573,16 @@ async fn prune_known_txs_for_block, ) -> Vec> { - let extrinsics = api.block_body(&block_id).await + let extrinsics = api + .block_body(&block_id) + .await .unwrap_or_else(|e| { log::warn!("Prune known transactions: error request {:?}!", e); None }) .unwrap_or_default(); - let hashes = extrinsics.iter() - .map(|tx| pool.hash_of(&tx)) - .collect::>(); + let hashes = extrinsics.iter().map(|tx| pool.hash_of(&tx)).collect::>(); log::trace!(target: "txpool", "Pruning transactions: {:?}", hashes); @@ -597,10 +595,11 @@ async fn prune_known_txs_for_block { log::debug!(target: "txpool", "Error retrieving header for {:?}: {:?}", block_id, e); return hashes - } + }, }; - if let Err(e) = pool.prune(&block_id, &BlockId::hash(*header.parent_hash()), &extrinsics).await { + if let Err(e) = pool.prune(&block_id, &BlockId::hash(*header.parent_hash()), &extrinsics).await + { log::error!("Cannot prune known in the pool {:?}!", e); } @@ -608,11 +607,11 @@ async fn prune_known_txs_for_block MaintainedTransactionPool for BasicPool - where - Block: BlockT, - PoolApi: 'static + graph::ChainApi, +where + Block: BlockT, + PoolApi: 'static + graph::ChainApi, { - fn maintain(&self, event: ChainEvent) -> Pin + Send>> { + fn maintain(&self, event: ChainEvent) -> Pin + Send>> { match event { ChainEvent::NewBestBlock { hash, tree_route } => { let pool = self.pool.clone(); @@ -627,8 +626,8 @@ impl MaintainedTransactionPool for BasicPool "Skipping chain event - no number for that block {:?}", id, ); - return Box::pin(ready(())); - } + return Box::pin(ready(())) + }, }; let next_action = self.revalidation_strategy.lock().next( @@ -657,27 +656,21 @@ impl MaintainedTransactionPool for BasicPool pool.validated_pool().on_block_retracted(retracted.hash.clone()); } - future::join_all( - tree_route - .enacted() - .iter() - .map(|h| - prune_known_txs_for_block( - BlockId::Hash(h.hash.clone()), - &*api, - &*pool, - ), - ), - ).await.into_iter().for_each(|enacted_log|{ + future::join_all(tree_route.enacted().iter().map(|h| { + prune_known_txs_for_block(BlockId::Hash(h.hash.clone()), &*api, &*pool) + })) + .await + .into_iter() + .for_each(|enacted_log| { pruned_log.extend(enacted_log); }) } pruned_log.extend(prune_known_txs_for_block(id.clone(), &*api, &*pool).await); - metrics.report( - |metrics| metrics.block_transactions_pruned.inc_by(pruned_log.len() as u64) - ); + metrics.report(|metrics| { + metrics.block_transactions_pruned.inc_by(pruned_log.len() as u64) + }); if let (true, Some(tree_route)) = (next_action.resubmit, tree_route) { let mut resubmit_transactions = Vec::new(); @@ -685,7 +678,8 @@ impl MaintainedTransactionPool for BasicPool for retracted in tree_route.retracted() { let hash = retracted.hash.clone(); - let block_transactions = api.block_body(&BlockId::hash(hash)) + let block_transactions = api + .block_body(&BlockId::hash(hash)) .await .unwrap_or_else(|e| { log::warn!("Failed to fetch block body {:?}!", e); @@ -697,8 +691,8 @@ impl MaintainedTransactionPool for BasicPool let mut resubmitted_to_report = 0; - resubmit_transactions.extend( - block_transactions.into_iter().filter(|tx| { + resubmit_transactions.extend(block_transactions.into_iter().filter( + |tx| { let tx_hash = pool.hash_of(&tx); let contains = pruned_log.contains(&tx_hash); @@ -714,21 +708,24 @@ impl MaintainedTransactionPool for BasicPool ); } !contains - }) - ); + }, + )); - metrics.report( - |metrics| metrics.block_transactions_resubmitted.inc_by(resubmitted_to_report) - ); + metrics.report(|metrics| { + metrics.block_transactions_resubmitted.inc_by(resubmitted_to_report) + }); } - if let Err(e) = pool.resubmit_at( - &id, - // These transactions are coming from retracted blocks, we should - // simply consider them external. - TransactionSource::External, - resubmit_transactions, - ).await { + if let Err(e) = pool + .resubmit_at( + &id, + // These transactions are coming from retracted blocks, we should + // simply consider them external. + TransactionSource::External, + resubmit_transactions, + ) + .await + { log::debug!( target: "txpool", "[{:?}] Error re-submitting transactions: {:?}", @@ -741,22 +738,20 @@ impl MaintainedTransactionPool for BasicPool let extra_pool = pool.clone(); // After #5200 lands, this arguably might be moved to the // handler of "all blocks notification". - ready_poll.lock().trigger( - block_number, - move || Box::new(extra_pool.validated_pool().ready()), - ); + ready_poll.lock().trigger(block_number, move || { + Box::new(extra_pool.validated_pool().ready()) + }); if next_action.revalidate { - let hashes = pool.validated_pool() - .ready() - .map(|tx| tx.hash.clone()) - .collect(); + let hashes = + pool.validated_pool().ready().map(|tx| tx.hash.clone()).collect(); revalidation_queue.revalidate_later(block_number, hashes).await; revalidation_strategy.lock().clear(); } - }.boxed() - } + } + .boxed() + }, ChainEvent::Finalized { hash } => { let pool = self.pool.clone(); async move { @@ -767,28 +762,25 @@ impl MaintainedTransactionPool for BasicPool e, hash ) } - }.boxed() - } + } + .boxed() + }, } } } /// Inform the transaction pool about imported and finalized blocks. -pub async fn notification_future( - client: Arc, - txpool: Arc -) - where - Block: BlockT, - Client: sc_client_api::BlockchainEvents, - Pool: MaintainedTransactionPool, +pub async fn notification_future(client: Arc, txpool: Arc) +where + Block: BlockT, + Client: sc_client_api::BlockchainEvents, + Pool: MaintainedTransactionPool, { - let import_stream = client.import_notification_stream() + let import_stream = client + .import_notification_stream() .filter_map(|n| ready(n.try_into().ok())) .fuse(); - let finality_stream = client.finality_notification_stream() - .map(Into::into) - .fuse(); + let finality_stream = client.finality_notification_stream().map(Into::into).fuse(); futures::stream::select(import_stream, finality_stream) .for_each(|evt| txpool.maintain(evt)) diff --git a/client/transaction-pool/src/metrics.rs b/client/transaction-pool/src/metrics.rs index e0b70183a86b2..d62d64f13a0a4 100644 --- a/client/transaction-pool/src/metrics.rs +++ b/client/transaction-pool/src/metrics.rs @@ -27,13 +27,13 @@ pub struct MetricsLink(Arc>); impl MetricsLink { pub fn new(registry: Option<&Registry>) -> Self { - Self(Arc::new( - registry.and_then(|registry| - Metrics::register(registry) - .map_err(|err| { log::warn!("Failed to register prometheus metrics: {}", err); }) - .ok() - ) - )) + Self(Arc::new(registry.and_then(|registry| { + Metrics::register(registry) + .map_err(|err| { + log::warn!("Failed to register prometheus metrics: {}", err); + }) + .ok() + }))) } pub fn report(&self, do_this: impl FnOnce(&Metrics)) { diff --git a/client/transaction-pool/src/revalidation.rs b/client/transaction-pool/src/revalidation.rs index ffc82bf619ccc..9f15185694d0a 100644 --- a/client/transaction-pool/src/revalidation.rs +++ b/client/transaction-pool/src/revalidation.rs @@ -18,13 +18,19 @@ //! Pool periodic revalidation. -use std::{sync::Arc, pin::Pin, collections::{HashMap, HashSet, BTreeMap}}; - -use crate::graph::{ChainApi, Pool, ExtrinsicHash, NumberFor, ValidatedTransaction}; -use sp_runtime::traits::{Zero, SaturatedConversion}; -use sp_runtime::generic::BlockId; -use sp_runtime::transaction_validity::TransactionValidityError; -use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedSender, TracingUnboundedReceiver}; +use std::{ + collections::{BTreeMap, HashMap, HashSet}, + pin::Pin, + sync::Arc, +}; + +use crate::graph::{ChainApi, ExtrinsicHash, NumberFor, Pool, ValidatedTransaction}; +use sp_runtime::{ + generic::BlockId, + traits::{SaturatedConversion, Zero}, + transaction_validity::TransactionValidityError, +}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; use futures::prelude::*; use std::time::Duration; @@ -63,19 +69,18 @@ async fn batch_revalidate( pool: Arc>, api: Arc, at: NumberFor, - batch: impl IntoIterator>, + batch: impl IntoIterator>, ) { let mut invalid_hashes = Vec::new(); let mut revalidated = HashMap::new(); - let validation_results = futures::future::join_all( - batch.into_iter().filter_map(|ext_hash| { - pool.validated_pool().ready_by_hash(&ext_hash).map(|ext| { - api.validate_transaction(&BlockId::Number(at), ext.source, ext.data.clone()) - .map(move |validation_result| (validation_result, ext_hash, ext)) - }) + let validation_results = futures::future::join_all(batch.into_iter().filter_map(|ext_hash| { + pool.validated_pool().ready_by_hash(&ext_hash).map(|ext| { + api.validate_transaction(&BlockId::Number(at), ext.source, ext.data.clone()) + .map(move |validation_result| (validation_result, ext_hash, ext)) }) - ).await; + })) + .await; for (validation_result, ext_hash, ext) in validation_results { match validation_result { @@ -98,7 +103,7 @@ async fn batch_revalidate( ext.data.clone(), api.hash_and_length(&ext.data).1, validity, - ) + ), ); }, Err(validation_err) => { @@ -109,7 +114,7 @@ async fn batch_revalidate( validation_err ); invalid_hashes.push(ext_hash); - } + }, } } @@ -120,10 +125,7 @@ async fn batch_revalidate( } impl RevalidationWorker { - fn new( - api: Arc, - pool: Arc>, - ) -> Self { + fn new(api: Arc, pool: Arc>) -> Self { Self { api, pool, @@ -135,7 +137,8 @@ impl RevalidationWorker { fn prepare_batch(&mut self) -> Vec> { let mut queued_exts = Vec::new(); - let mut left = std::cmp::max(MIN_BACKGROUND_REVALIDATION_BATCH_SIZE, self.members.len() / 4); + let mut left = + std::cmp::max(MIN_BACKGROUND_REVALIDATION_BATCH_SIZE, self.members.len() / 4); // Take maximum of count transaction by order // which they got into the pool @@ -188,11 +191,14 @@ impl RevalidationWorker { ext_hash, ); - continue; + continue } - self.block_ordered.entry(block_number) - .and_modify(|value| { value.insert(ext_hash.clone()); }) + self.block_ordered + .entry(block_number) + .and_modify(|value| { + value.insert(ext_hash.clone()); + }) .or_insert_with(|| { let mut bt = HashSet::new(); bt.insert(ext_hash.clone()); @@ -211,7 +217,10 @@ impl RevalidationWorker { mut self, from_queue: TracingUnboundedReceiver>, interval: R, - ) where R: Send, R::Guard: Send { + ) where + R: Send, + R::Guard: Send, + { let interval = interval.into_stream().fuse(); let from_queue = from_queue.fuse(); futures::pin_mut!(interval, from_queue); @@ -269,7 +278,6 @@ impl RevalidationWorker { } } - /// Revalidation queue. /// /// Can be configured background (`new_background`) @@ -286,11 +294,7 @@ where { /// New revalidation queue without background worker. pub fn new(api: Arc, pool: Arc>) -> Self { - Self { - api, - pool, - background: None, - } + Self { api, pool, background: None } } /// New revalidation queue with background worker. @@ -298,34 +302,40 @@ where api: Arc, pool: Arc>, interval: R, - ) -> (Self, Pin + Send>>) where R: Send + 'static, R::Guard: Send { + ) -> (Self, Pin + Send>>) + where + R: Send + 'static, + R::Guard: Send, + { let (to_worker, from_queue) = tracing_unbounded("mpsc_revalidation_queue"); let worker = RevalidationWorker::new(api.clone(), pool.clone()); - let queue = - Self { - api, - pool, - background: Some(to_worker), - }; + let queue = Self { api, pool, background: Some(to_worker) }; (queue, worker.run(from_queue, interval).boxed()) } /// New revalidation queue with background worker. - pub fn new_background(api: Arc, pool: Arc>) -> - (Self, Pin + Send>>) - { - Self::new_with_interval(api, pool, intervalier::Interval::new(BACKGROUND_REVALIDATION_INTERVAL)) + pub fn new_background( + api: Arc, + pool: Arc>, + ) -> (Self, Pin + Send>>) { + Self::new_with_interval( + api, + pool, + intervalier::Interval::new(BACKGROUND_REVALIDATION_INTERVAL), + ) } /// New revalidation queue with background worker and test signal. #[cfg(feature = "test-helpers")] - pub fn new_test(api: Arc, pool: Arc>) -> - (Self, Pin + Send>>, intervalier::BackSignalControl) - { - let (interval, notifier) = intervalier::BackSignalInterval::new(BACKGROUND_REVALIDATION_INTERVAL); + pub fn new_test( + api: Arc, + pool: Arc>, + ) -> (Self, Pin + Send>>, intervalier::BackSignalControl) { + let (interval, notifier) = + intervalier::BackSignalInterval::new(BACKGROUND_REVALIDATION_INTERVAL); let (queue, background) = Self::new_with_interval(api, pool, interval); (queue, background, notifier) @@ -361,6 +371,4 @@ where } #[cfg(test)] -mod tests { - -} +mod tests {} diff --git a/client/transaction-pool/tests/pool.rs b/client/transaction-pool/tests/pool.rs index 9a9d59214d0b9..6c34d05cd5dcb 100644 --- a/client/transaction-pool/tests/pool.rs +++ b/client/transaction-pool/tests/pool.rs @@ -17,37 +17,40 @@ // along with this program. If not, see . //! Tests for top-level transaction pool api -use sc_transaction_pool_api::{TransactionStatus, ChainEvent, MaintainedTransactionPool, TransactionPool}; -use futures::executor::{block_on, block_on_stream}; +use codec::Encode; +use futures::{ + executor::{block_on, block_on_stream}, + prelude::*, + task::Poll, +}; +use sc_block_builder::BlockBuilderProvider; +use sc_client_api::client::BlockchainEvents; +use sc_transaction_pool::{test_helpers::*, *}; +use sc_transaction_pool_api::{ + ChainEvent, MaintainedTransactionPool, TransactionPool, TransactionStatus, +}; +use sp_consensus::BlockOrigin; use sp_runtime::{ - generic::BlockId, traits::Block as _, - transaction_validity::{ValidTransaction, TransactionSource, InvalidTransaction}, + generic::BlockId, + traits::Block as _, + transaction_validity::{InvalidTransaction, TransactionSource, ValidTransaction}, }; +use std::{collections::BTreeSet, convert::TryInto, sync::Arc}; use substrate_test_runtime_client::{ - runtime::{Block, Hash, Index, Header, Extrinsic, Transfer}, AccountKeyring::*, + runtime::{Block, Extrinsic, Hash, Header, Index, Transfer}, + AccountKeyring::*, ClientBlockImportExt, }; -use substrate_test_runtime_transaction_pool::{TestApi, uxt}; -use futures::{prelude::*, task::Poll}; -use codec::Encode; -use std::{collections::BTreeSet, sync::Arc, convert::TryInto}; -use sc_client_api::client::BlockchainEvents; -use sc_block_builder::BlockBuilderProvider; -use sp_consensus::BlockOrigin; -use sc_transaction_pool::{*, test_helpers::*}; +use substrate_test_runtime_transaction_pool::{uxt, TestApi}; fn pool() -> Pool { Pool::new(Default::default(), true.into(), TestApi::with_alice_nonce(209).into()) } -fn maintained_pool() -> ( - BasicPool, - futures::executor::ThreadPool, - intervalier::BackSignalControl, -) { - let (pool, background_task, notifier) = BasicPool::new_test( - Arc::new(TestApi::with_alice_nonce(209)), - ); +fn maintained_pool( +) -> (BasicPool, futures::executor::ThreadPool, intervalier::BackSignalControl) { + let (pool, background_task, notifier) = + BasicPool::new_test(Arc::new(TestApi::with_alice_nonce(209))); let thread_pool = futures::executor::ThreadPool::new().unwrap(); thread_pool.spawn_ok(background_task); @@ -107,13 +110,8 @@ fn prune_tags_should_work() { assert_eq!(pending, vec![209, 210]); pool.validated_pool().api().push_block(1, Vec::new(), true); - block_on( - pool.prune_tags( - &BlockId::number(1), - vec![vec![209]], - vec![hash209], - ) - ).expect("Prune tags"); + block_on(pool.prune_tags(&BlockId::number(1), vec![vec![209]], vec![hash209])) + .expect("Prune tags"); let pending: Vec<_> = pool.validated_pool().ready().map(|a| a.data.transfer().nonce).collect(); assert_eq!(pending, vec![210]); @@ -140,17 +138,13 @@ fn only_prune_on_new_best() { let pool = maintained_pool().0; let uxt = uxt(Alice, 209); - let _ = block_on( - pool.submit_and_watch(&BlockId::number(0), SOURCE, uxt.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(0), SOURCE, uxt.clone())) + .expect("1. Imported"); pool.api().push_block(1, vec![uxt.clone()], true); assert_eq!(pool.status().ready, 1); let header = pool.api().push_block(2, vec![uxt], true); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); } @@ -193,10 +187,7 @@ fn should_correctly_prune_transactions_providing_more_than_one_tag() { } fn block_event(header: Header) -> ChainEvent { - ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - } + ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None } } fn block_event_with_retracted( @@ -204,12 +195,10 @@ fn block_event_with_retracted( retracted_start: Hash, api: &TestApi, ) -> ChainEvent { - let tree_route = api.tree_route(retracted_start, header.parent_hash).expect("Tree route exists"); + let tree_route = + api.tree_route(retracted_start, header.parent_hash).expect("Tree route exists"); - ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: Some(Arc::new(tree_route)), - } + ChainEvent::NewBestBlock { hash: header.hash(), tree_route: Some(Arc::new(tree_route)) } } #[test] @@ -266,7 +255,6 @@ fn should_resubmit_from_retracted_during_maintenance() { assert_eq!(pool.status().ready, 1); } - #[test] fn should_not_resubmit_from_retracted_during_maintenance_if_tx_is_also_in_enacted() { let xt = uxt(Alice, 209); @@ -334,7 +322,6 @@ fn should_revalidate_across_many_blocks() { assert_eq!(pool.api().validation_requests().len(), 7); } - #[test] fn should_push_watchers_during_maintenance() { fn alice_uxt(nonce: u64) -> Extrinsic { @@ -345,25 +332,20 @@ fn should_push_watchers_during_maintenance() { let (pool, _guard, mut notifier) = maintained_pool(); let tx0 = alice_uxt(0); - let watcher0 = block_on( - pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx0.clone()) - ).unwrap(); + let watcher0 = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx0.clone())).unwrap(); let tx1 = alice_uxt(1); - let watcher1 = block_on( - pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx1.clone()) - ).unwrap(); + let watcher1 = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx1.clone())).unwrap(); let tx2 = alice_uxt(2); - let watcher2 = block_on( - pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx2.clone()) - ).unwrap(); + let watcher2 = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx2.clone())).unwrap(); let tx3 = alice_uxt(3); - let watcher3 = block_on( - pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx3.clone()) - ).unwrap(); + let watcher3 = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx3.clone())).unwrap(); let tx4 = alice_uxt(4); - let watcher4 = block_on( - pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx4.clone()) - ).unwrap(); + let watcher4 = + block_on(pool.submit_and_watch(&BlockId::Number(0), SOURCE, tx4.clone())).unwrap(); assert_eq!(pool.status().ready, 5); // when @@ -405,21 +387,24 @@ fn should_push_watchers_during_maintenance() { vec![ TransactionStatus::Ready, TransactionStatus::InBlock(header_hash.clone()), - TransactionStatus::Finalized(header_hash.clone())], + TransactionStatus::Finalized(header_hash.clone()) + ], ); assert_eq!( futures::executor::block_on_stream(watcher1).collect::>(), vec![ TransactionStatus::Ready, TransactionStatus::InBlock(header_hash.clone()), - TransactionStatus::Finalized(header_hash.clone())], + TransactionStatus::Finalized(header_hash.clone()) + ], ); assert_eq!( futures::executor::block_on_stream(watcher2).collect::>(), vec![ TransactionStatus::Ready, TransactionStatus::InBlock(header_hash.clone()), - TransactionStatus::Finalized(header_hash.clone())], + TransactionStatus::Finalized(header_hash.clone()) + ], ); } @@ -440,16 +425,12 @@ fn finalization() { let api = TestApi::with_alice_nonce(209); api.push_block(1, vec![], true); let (pool, _background, _) = BasicPool::new_test(api.into()); - let watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone()) - ).expect("1. Imported"); + let watcher = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone())) + .expect("1. Imported"); pool.api().push_block(2, vec![xt.clone()], true); let header = pool.api().chain().read().block_by_number.get(&2).unwrap()[0].0.header().clone(); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; block_on(pool.maintain(event)); let event = ChainEvent::Finalized { hash: header.hash() }; @@ -489,17 +470,14 @@ fn fork_aware_finalization() { // block B1 { - let watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone()) - ).expect("1. Imported"); + let watcher = + block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())) + .expect("1. Imported"); let header = pool.api().push_block(2, vec![from_alice.clone()], true); canon_watchers.push((watcher, header.hash())); assert_eq!(pool.status().ready, 1); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; b1 = header.hash(); block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); @@ -510,14 +488,11 @@ fn fork_aware_finalization() { // block C2 { let header = pool.api().push_block_with_parent(b1, vec![from_dave.clone()], true); - from_dave_watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, from_dave.clone()) - ).expect("1. Imported"); + from_dave_watcher = + block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_dave.clone())) + .expect("1. Imported"); assert_eq!(pool.status().ready, 1); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; c2 = header.hash(); block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); @@ -525,16 +500,13 @@ fn fork_aware_finalization() { // block D2 { - from_bob_watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone()) - ).expect("1. Imported"); + from_bob_watcher = + block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_bob.clone())) + .expect("1. Imported"); assert_eq!(pool.status().ready, 1); let header = pool.api().push_block_with_parent(c2, vec![from_bob.clone()], true); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; d2 = header.hash(); block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); @@ -542,9 +514,9 @@ fn fork_aware_finalization() { // block C1 { - let watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, from_charlie.clone()) - ).expect("1.Imported"); + let watcher = + block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_charlie.clone())) + .expect("1.Imported"); assert_eq!(pool.status().ready, 1); let header = pool.api().push_block(3, vec![from_charlie.clone()], true); @@ -560,17 +532,13 @@ fn fork_aware_finalization() { // block D1 { let xt = uxt(Eve, 0); - let w = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone()) - ).expect("1. Imported"); + let w = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, xt.clone())) + .expect("1. Imported"); assert_eq!(pool.status().ready, 3); let header = pool.api().push_block(4, vec![xt.clone()], true); canon_watchers.push((w, header.hash())); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; d1 = header.hash(); block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 2); @@ -584,16 +552,12 @@ fn fork_aware_finalization() { { let header = pool.api().push_block(5, vec![from_dave, from_bob], true); e1 = header.hash(); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); block_on(pool.maintain(ChainEvent::Finalized { hash: e1 })); } - for (canon_watcher, h) in canon_watchers { let mut stream = futures::executor::block_on_stream(canon_watcher); assert_eq!(stream.next(), Some(TransactionStatus::Ready)); @@ -602,7 +566,6 @@ fn fork_aware_finalization() { assert_eq!(stream.next(), None); } - { let mut stream = futures::executor::block_on_stream(from_dave_watcher); assert_eq!(stream.next(), Some(TransactionStatus::Ready)); @@ -639,19 +602,15 @@ fn prune_and_retract_tx_at_same_time() { let from_alice = uxt(Alice, 1); pool.api().increment_nonce(Alice.into()); - let watcher = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone()) - ).expect("1. Imported"); + let watcher = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, from_alice.clone())) + .expect("1. Imported"); // Block B1 let b1 = { let header = pool.api().push_block(2, vec![from_alice.clone()], true); assert_eq!(pool.status().ready, 1); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); header.hash() @@ -683,7 +642,6 @@ fn prune_and_retract_tx_at_same_time() { } } - /// This test ensures that transactions from a fork are re-submitted if /// the forked block is not part of the retracted blocks. This happens as the /// retracted block list only contains the route from the old best to the new @@ -716,16 +674,12 @@ fn resubmit_tx_of_fork_that_is_not_part_of_retracted() { // Block D0 { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone())) + .expect("1. Imported"); let header = pool.api().push_block(2, vec![tx0.clone()], true); assert_eq!(pool.status().ready, 1); - let event = ChainEvent::NewBestBlock { - hash: header.hash(), - tree_route: None, - }; + let event = ChainEvent::NewBestBlock { hash: header.hash(), tree_route: None }; d0 = header.hash(); block_on(pool.maintain(event)); assert_eq!(pool.status().ready, 0); @@ -733,9 +687,8 @@ fn resubmit_tx_of_fork_that_is_not_part_of_retracted() { // Block D1 { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone())) + .expect("1. Imported"); pool.api().push_block(2, vec![tx1.clone()], false); assert_eq!(pool.status().ready, 1); } @@ -775,9 +728,8 @@ fn resubmit_from_retracted_fork() { // Block D0 { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx0.clone())) + .expect("1. Imported"); let header = pool.api().push_block(2, vec![tx0.clone()], true); assert_eq!(pool.status().ready, 1); @@ -787,9 +739,8 @@ fn resubmit_from_retracted_fork() { // Block E0 { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx1.clone())) + .expect("1. Imported"); let header = pool.api().push_block(3, vec![tx1.clone()], true); block_on(pool.maintain(block_event(header))); assert_eq!(pool.status().ready, 0); @@ -797,9 +748,8 @@ fn resubmit_from_retracted_fork() { // Block F0 let f0 = { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx2.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx2.clone())) + .expect("1. Imported"); let header = pool.api().push_block(4, vec![tx2.clone()], true); block_on(pool.maintain(block_event(header.clone()))); assert_eq!(pool.status().ready, 0); @@ -808,9 +758,8 @@ fn resubmit_from_retracted_fork() { // Block D1 let d1 = { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx3.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx3.clone())) + .expect("1. Imported"); let header = pool.api().push_block(2, vec![tx3.clone()], true); assert_eq!(pool.status().ready, 1); header.hash() @@ -818,9 +767,8 @@ fn resubmit_from_retracted_fork() { // Block E1 let e1 = { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx4.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx4.clone())) + .expect("1. Imported"); let header = pool.api().push_block_with_parent(d1.clone(), vec![tx4.clone()], true); assert_eq!(pool.status().ready, 2); header.hash() @@ -828,9 +776,8 @@ fn resubmit_from_retracted_fork() { // Block F1 let f1_header = { - let _ = block_on( - pool.submit_and_watch(&BlockId::number(1), SOURCE, tx5.clone()) - ).expect("1. Imported"); + let _ = block_on(pool.submit_and_watch(&BlockId::number(1), SOURCE, tx5.clone())) + .expect("1. Imported"); let header = pool.api().push_block_with_parent(e1.clone(), vec![tx5.clone()], true); // Don't announce the block event to the pool directly, because we will // re-org to this block. @@ -892,14 +839,14 @@ fn ready_set_should_eventually_resolve_when_block_update_arrives() { block_on(pool.maintain(block_event(header))); - match ready_set_future.poll_unpin(&mut context) { + match ready_set_future.poll_unpin(&mut context) { Poll::Pending => { panic!("Ready set should become ready after block update!"); }, Poll::Ready(iterator) => { let data = iterator.collect::>(); assert_eq!(data.len(), 1); - } + }, } } @@ -914,22 +861,22 @@ fn should_not_accept_old_signatures() { client, None, &sp_core::testing::TaskExecutor::new(), - ))).0 + ))) + .0, ); - let transfer = Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 0, - amount: 1, - }; + let transfer = Transfer { from: Alice.into(), to: Bob.into(), nonce: 0, amount: 1 }; let _bytes: sp_core::sr25519::Signature = transfer.using_encoded(|e| Alice.sign(e)).into(); // generated with schnorrkel 0.1.1 from `_bytes` - let old_singature = sp_core::sr25519::Signature::try_from(&hex::decode( - "c427eb672e8c441c86d31f1a81b22b43102058e9ce237cabe9897ea5099ffd426\ - cd1c6a1f4f2869c3df57901d36bedcb295657adb3a4355add86ed234eb83108" - ).expect("hex invalid")[..]).expect("signature construction failed"); + let old_singature = sp_core::sr25519::Signature::try_from( + &hex::decode( + "c427eb672e8c441c86d31f1a81b22b43102058e9ce237cabe9897ea5099ffd426\ + cd1c6a1f4f2869c3df57901d36bedcb295657adb3a4355add86ed234eb83108", + ) + .expect("hex invalid")[..], + ) + .expect("signature construction failed"); let xt = Extrinsic::Transfer { transfer, @@ -939,9 +886,9 @@ fn should_not_accept_old_signatures() { assert_matches::assert_matches!( block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.clone())), - Err(error::Error::Pool( - sc_transaction_pool_api::error::Error::InvalidTransaction(InvalidTransaction::BadProof) - )), + Err(error::Error::Pool(sc_transaction_pool_api::error::Error::InvalidTransaction( + InvalidTransaction::BadProof + ))), "Should be invalid transaction with bad proof", ); } @@ -955,7 +902,8 @@ fn import_notification_to_pool_maintain_works() { client.clone(), None, &sp_core::testing::TaskExecutor::new(), - ))).0 + ))) + .0, ); // Prepare the extrisic, push it to the pool and check that it was added. @@ -1021,32 +969,16 @@ fn stale_transactions_are_pruned() { // Our initial transactions let xts = vec![ - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 1, - amount: 1, - }, - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 2, - amount: 1, - }, - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 3, - amount: 1, - }, + Transfer { from: Alice.into(), to: Bob.into(), nonce: 1, amount: 1 }, + Transfer { from: Alice.into(), to: Bob.into(), nonce: 2, amount: 1 }, + Transfer { from: Alice.into(), to: Bob.into(), nonce: 3, amount: 1 }, ]; let (pool, _guard, _notifier) = maintained_pool(); xts.into_iter().for_each(|xt| { - block_on( - pool.submit_one(&BlockId::number(0), SOURCE, xt.into_signed_tx()), - ).expect("1. Imported"); + block_on(pool.submit_one(&BlockId::number(0), SOURCE, xt.into_signed_tx())) + .expect("1. Imported"); }); assert_eq!(pool.status().ready, 0); assert_eq!(pool.status().future, 3); @@ -1054,24 +986,9 @@ fn stale_transactions_are_pruned() { // Almost the same as our initial transactions, but with some different `amount`s to make them // generate a different hash let xts = vec![ - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 1, - amount: 2, - }.into_signed_tx(), - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 2, - amount: 2, - }.into_signed_tx(), - Transfer { - from: Alice.into(), - to: Bob.into(), - nonce: 3, - amount: 2, - }.into_signed_tx(), + Transfer { from: Alice.into(), to: Bob.into(), nonce: 1, amount: 2 }.into_signed_tx(), + Transfer { from: Alice.into(), to: Bob.into(), nonce: 2, amount: 2 }.into_signed_tx(), + Transfer { from: Alice.into(), to: Bob.into(), nonce: 3, amount: 2 }.into_signed_tx(), ]; // Import block diff --git a/client/transaction-pool/tests/revalidation.rs b/client/transaction-pool/tests/revalidation.rs index d720f09a7fce5..b2c8225b78f58 100644 --- a/client/transaction-pool/tests/revalidation.rs +++ b/client/transaction-pool/tests/revalidation.rs @@ -1,32 +1,32 @@ +use futures::executor::block_on; use sc_transaction_pool::test_helpers::{Pool, RevalidationQueue}; use sc_transaction_pool_api::TransactionSource; -use substrate_test_runtime_transaction_pool::{TestApi, uxt}; -use futures::executor::block_on; -use substrate_test_runtime_client::AccountKeyring::*; -use std::sync::Arc; use sp_runtime::generic::BlockId; +use std::sync::Arc; +use substrate_test_runtime_client::AccountKeyring::*; +use substrate_test_runtime_transaction_pool::{uxt, TestApi}; fn setup() -> (Arc, Pool) { - let test_api = Arc::new(TestApi::empty()); - let pool = Pool::new(Default::default(), true.into(), test_api.clone()); - (test_api, pool) + let test_api = Arc::new(TestApi::empty()); + let pool = Pool::new(Default::default(), true.into(), test_api.clone()); + (test_api, pool) } #[test] fn smoky() { - let (api, pool) = setup(); - let pool = Arc::new(pool); - let queue = Arc::new(RevalidationQueue::new(api.clone(), pool.clone())); + let (api, pool) = setup(); + let pool = Arc::new(pool); + let queue = Arc::new(RevalidationQueue::new(api.clone(), pool.clone())); - let uxt = uxt(Alice, 0); - let uxt_hash = block_on( - pool.submit_one(&BlockId::number(0), TransactionSource::External, uxt.clone()) - ).expect("Should be valid"); + let uxt = uxt(Alice, 0); + let uxt_hash = + block_on(pool.submit_one(&BlockId::number(0), TransactionSource::External, uxt.clone())) + .expect("Should be valid"); - block_on(queue.revalidate_later(0, vec![uxt_hash])); + block_on(queue.revalidate_later(0, vec![uxt_hash])); - // revalidated in sync offload 2nd time - assert_eq!(api.validation_requests().len(), 2); - // number of ready - assert_eq!(pool.validated_pool().status().ready, 1); -} \ No newline at end of file + // revalidated in sync offload 2nd time + assert_eq!(api.validation_requests().len(), 2); + // number of ready + assert_eq!(pool.validated_pool().status().ready, 1); +} diff --git a/frame/assets/src/benchmarking.rs b/frame/assets/src/benchmarking.rs index c6925df9ad88f..89a1308db1712 100644 --- a/frame/assets/src/benchmarking.rs +++ b/frame/assets/src/benchmarking.rs @@ -19,23 +19,26 @@ #![cfg(feature = "runtime-benchmarks")] -use sp_std::prelude::*; use super::*; -use sp_runtime::traits::Bounded; -use frame_system::RawOrigin as SystemOrigin; use frame_benchmarking::{ - benchmarks_instance_pallet, account, whitelisted_caller, whitelist_account, impl_benchmark_test_suite + account, benchmarks_instance_pallet, impl_benchmark_test_suite, whitelist_account, + whitelisted_caller, +}; +use frame_support::{ + dispatch::UnfilteredDispatchable, + traits::{EnsureOrigin, Get}, }; -use frame_support::traits::Get; -use frame_support::{traits::EnsureOrigin, dispatch::UnfilteredDispatchable}; +use frame_system::RawOrigin as SystemOrigin; +use sp_runtime::traits::Bounded; +use sp_std::prelude::*; use crate::Pallet as Assets; const SEED: u32 = 0; -fn create_default_asset, I: 'static>(is_sufficient: bool) - -> (T::AccountId, ::Source) -{ +fn create_default_asset, I: 'static>( + is_sufficient: bool, +) -> (T::AccountId, ::Source) { let caller: T::AccountId = whitelisted_caller(); let caller_lookup = T::Lookup::unlookup(caller.clone()); let root = SystemOrigin::Root.into(); @@ -45,14 +48,16 @@ fn create_default_asset, I: 'static>(is_sufficient: bool) caller_lookup.clone(), is_sufficient, 1u32.into(), - ).is_ok()); + ) + .is_ok()); (caller, caller_lookup) } -fn create_default_minted_asset, I: 'static>(is_sufficient: bool, amount: T::Balance) - -> (T::AccountId, ::Source) -{ - let (caller, caller_lookup) = create_default_asset::(is_sufficient); +fn create_default_minted_asset, I: 'static>( + is_sufficient: bool, + amount: T::Balance, +) -> (T::AccountId, ::Source) { + let (caller, caller_lookup) = create_default_asset::(is_sufficient); if !is_sufficient { T::Currency::make_free_balance_be(&caller, T::Currency::minimum_balance()); } @@ -61,14 +66,17 @@ fn create_default_minted_asset, I: 'static>(is_sufficient: bool, am Default::default(), caller_lookup.clone(), amount, - ).is_ok()); + ) + .is_ok()); (caller, caller_lookup) } fn swap_is_sufficient, I: 'static>(s: &mut bool) { - Asset::::mutate(&T::AssetId::default(), |maybe_a| - if let Some(ref mut a) = maybe_a { sp_std::mem::swap(s, &mut a.is_sufficient) } - ); + Asset::::mutate(&T::AssetId::default(), |maybe_a| { + if let Some(ref mut a) = maybe_a { + sp_std::mem::swap(s, &mut a.is_sufficient) + } + }); } fn add_consumers, I: 'static>(minter: T::AccountId, n: u32) { @@ -79,7 +87,13 @@ fn add_consumers, I: 'static>(minter: T::AccountId, n: u32) { let target = account("consumer", i, SEED); T::Currency::make_free_balance_be(&target, T::Currency::minimum_balance()); let target_lookup = T::Lookup::unlookup(target); - assert!(Assets::::mint(origin.clone().into(), Default::default(), target_lookup, 100u32.into()).is_ok()); + assert!(Assets::::mint( + origin.clone().into(), + Default::default(), + target_lookup, + 100u32.into() + ) + .is_ok()); } swap_is_sufficient::(&mut s); } @@ -91,7 +105,13 @@ fn add_sufficients, I: 'static>(minter: T::AccountId, n: u32) { for i in 0..n { let target = account("sufficient", i, SEED); let target_lookup = T::Lookup::unlookup(target); - assert!(Assets::::mint(origin.clone().into(), Default::default(), target_lookup, 100u32.into()).is_ok()); + assert!(Assets::::mint( + origin.clone().into(), + Default::default(), + target_lookup, + 100u32.into() + ) + .is_ok()); } swap_is_sufficient::(&mut s); } @@ -105,7 +125,8 @@ fn add_approvals, I: 'static>(minter: T::AccountId, n: u32) { Default::default(), minter_lookup, (100 * (n + 1)).into(), - ).unwrap(); + ) + .unwrap(); for i in 0..n { let target = account("approval", i, SEED); T::Currency::make_free_balance_be(&target, T::Currency::minimum_balance()); @@ -115,7 +136,8 @@ fn add_approvals, I: 'static>(minter: T::AccountId, n: u32) { Default::default(), target_lookup, 100u32.into(), - ).unwrap(); + ) + .unwrap(); } } diff --git a/frame/assets/src/extra_mutator.rs b/frame/assets/src/extra_mutator.rs index d86d78ce3e376..8c601b746346c 100644 --- a/frame/assets/src/extra_mutator.rs +++ b/frame/assets/src/extra_mutator.rs @@ -34,10 +34,7 @@ pub struct ExtraMutator, I: 'static = ()> { impl, I: 'static> Drop for ExtraMutator { fn drop(&mut self) { - debug_assert!( - self.commit().is_ok(), - "attempt to write to non-existent asset account" - ); + debug_assert!(self.commit().is_ok(), "attempt to write to non-existent asset account"); } } diff --git a/frame/assets/src/functions.rs b/frame/assets/src/functions.rs index c6b5391cff860..6e6847ad7dfb6 100644 --- a/frame/assets/src/functions.rs +++ b/frame/assets/src/functions.rs @@ -38,9 +38,7 @@ impl, I: 'static> Pallet { /// Get the total supply of an asset `id`. pub fn total_supply(id: T::AssetId) -> T::Balance { - Asset::::get(id) - .map(|x| x.supply) - .unwrap_or_else(Zero::zero) + Asset::::get(id).map(|x| x.supply).unwrap_or_else(Zero::zero) } pub(super) fn new_account( @@ -134,7 +132,7 @@ impl, I: 'static> Pallet { match frozen.checked_add(&details.min_balance) { Some(required) if rest < required => return Frozen, None => return Overflow, - _ => {} + _ => {}, } } @@ -171,9 +169,8 @@ impl, I: 'static> Pallet { let amount = if let Some(frozen) = T::Freezer::frozen_balance(id, who) { // Frozen balance: account CANNOT be deleted - let required = frozen - .checked_add(&details.min_balance) - .ok_or(ArithmeticError::Overflow)?; + let required = + frozen.checked_add(&details.min_balance).ok_or(ArithmeticError::Overflow)?; account.balance.saturating_sub(required) } else { let is_provider = false; @@ -219,7 +216,7 @@ impl, I: 'static> Pallet { Err(e) => { debug_assert!(false, "passed from reducible_balance; qed"); return Err(e.into()) - } + }, }; Ok(actual) @@ -268,12 +265,12 @@ impl, I: 'static> Pallet { ) -> DispatchResult { Self::increase_balance(id, beneficiary, amount, |details| -> DispatchResult { if let Some(check_issuer) = maybe_check_issuer { - ensure!( - &check_issuer == &details.issuer, - Error::::NoPermission - ); + ensure!(&check_issuer == &details.issuer, Error::::NoPermission); } - debug_assert!(T::Balance::max_value() - details.supply >= amount, "checked in prep; qed"); + debug_assert!( + T::Balance::max_value() - details.supply >= amount, + "checked in prep; qed" + ); details.supply = details.supply.saturating_add(amount); Ok(()) })?; @@ -295,7 +292,9 @@ impl, I: 'static> Pallet { &mut AssetDetails>, ) -> DispatchResult, ) -> DispatchResult { - if amount.is_zero() { return Ok(()) } + if amount.is_zero() { + return Ok(()) + } Self::can_increase(id, beneficiary, amount).into_result()?; Asset::::try_mutate(id, |maybe_details| -> DispatchResult { @@ -364,7 +363,9 @@ impl, I: 'static> Pallet { &mut AssetDetails>, ) -> DispatchResult, ) -> Result { - if amount.is_zero() { return Ok(amount) } + if amount.is_zero() { + return Ok(amount) + } let actual = Self::prep_debit(id, target, amount, f)?; diff --git a/frame/assets/src/impl_fungibles.rs b/frame/assets/src/impl_fungibles.rs index 71951bae11165..4e85b20a1fbb1 100644 --- a/frame/assets/src/impl_fungibles.rs +++ b/frame/assets/src/impl_fungibles.rs @@ -24,15 +24,11 @@ impl, I: 'static> fungibles::Inspect<::AccountId type Balance = T::Balance; fn total_issuance(asset: Self::AssetId) -> Self::Balance { - Asset::::get(asset) - .map(|x| x.supply) - .unwrap_or_else(Zero::zero) + Asset::::get(asset).map(|x| x.supply).unwrap_or_else(Zero::zero) } fn minimum_balance(asset: Self::AssetId) -> Self::Balance { - Asset::::get(asset) - .map(|x| x.min_balance) - .unwrap_or_else(Zero::zero) + Asset::::get(asset).map(|x| x.min_balance).unwrap_or_else(Zero::zero) } fn balance(asset: Self::AssetId, who: &::AccountId) -> Self::Balance { @@ -78,10 +74,7 @@ impl, I: 'static> fungibles::Mutate<::AccountId> who: &::AccountId, amount: Self::Balance, ) -> Result { - let f = DebitFlags { - keep_alive: false, - best_effort: false, - }; + let f = DebitFlags { keep_alive: false, best_effort: false }; Self::do_burn(asset, who, amount, None, f) } @@ -90,10 +83,7 @@ impl, I: 'static> fungibles::Mutate<::AccountId> who: &::AccountId, amount: Self::Balance, ) -> Result { - let f = DebitFlags { - keep_alive: false, - best_effort: true, - }; + let f = DebitFlags { keep_alive: false, best_effort: true }; Self::do_burn(asset, who, amount, None, f) } } @@ -106,11 +96,7 @@ impl, I: 'static> fungibles::Transfer for Pallet Result { - let f = TransferFlags { - keep_alive, - best_effort: false, - burn_dust: false - }; + let f = TransferFlags { keep_alive, best_effort: false, burn_dust: false }; Self::do_transfer(asset, source, dest, amount, None, f) } } @@ -126,28 +112,35 @@ impl, I: 'static> fungibles::Unbalanced for Pallet Result - { + fn decrease_balance( + asset: T::AssetId, + who: &T::AccountId, + amount: Self::Balance, + ) -> Result { let f = DebitFlags { keep_alive: false, best_effort: false }; Self::decrease_balance(asset, who, amount, f, |_, _| Ok(())) } - fn decrease_balance_at_most(asset: T::AssetId, who: &T::AccountId, amount: Self::Balance) - -> Self::Balance - { + fn decrease_balance_at_most( + asset: T::AssetId, + who: &T::AccountId, + amount: Self::Balance, + ) -> Self::Balance { let f = DebitFlags { keep_alive: false, best_effort: true }; - Self::decrease_balance(asset, who, amount, f, |_, _| Ok(())) - .unwrap_or(Zero::zero()) + Self::decrease_balance(asset, who, amount, f, |_, _| Ok(())).unwrap_or(Zero::zero()) } - fn increase_balance(asset: T::AssetId, who: &T::AccountId, amount: Self::Balance) - -> Result - { + fn increase_balance( + asset: T::AssetId, + who: &T::AccountId, + amount: Self::Balance, + ) -> Result { Self::increase_balance(asset, who, amount, |_| Ok(()))?; Ok(amount) } - fn increase_balance_at_most(asset: T::AssetId, who: &T::AccountId, amount: Self::Balance) - -> Self::Balance - { + fn increase_balance_at_most( + asset: T::AssetId, + who: &T::AccountId, + amount: Self::Balance, + ) -> Self::Balance { match Self::increase_balance(asset, who, amount, |_| Ok(())) { Ok(()) => amount, Err(_) => Zero::zero(), diff --git a/frame/assets/src/lib.rs b/frame/assets/src/lib.rs index 9f8d0eafcc734..89f95b42675fe 100644 --- a/frame/assets/src/lib.rs +++ b/frame/assets/src/lib.rs @@ -122,40 +122,47 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -pub mod weights; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[cfg(test)] pub mod mock; #[cfg(test)] mod tests; +pub mod weights; mod extra_mutator; pub use extra_mutator::*; -mod impl_stored_map; -mod impl_fungibles; mod functions; +mod impl_fungibles; +mod impl_stored_map; mod types; pub use types::*; -use sp_std::{prelude::*, borrow::Borrow, convert::TryInto}; -use sp_runtime::{ - TokenError, ArithmeticError, - traits::{AtLeast32BitUnsigned, Zero, StaticLookup, Saturating, CheckedSub, CheckedAdd, Bounded} -}; use codec::HasCompact; -use frame_support::pallet_prelude::*; -use frame_support::traits::{Currency, ReservableCurrency, BalanceStatus::Reserved, StoredMap}; -use frame_support::traits::tokens::{WithdrawConsequence, DepositConsequence, fungibles}; +use frame_support::{ + pallet_prelude::*, + traits::{ + tokens::{fungibles, DepositConsequence, WithdrawConsequence}, + BalanceStatus::Reserved, + Currency, ReservableCurrency, StoredMap, + }, +}; use frame_system::Config as SystemConfig; +use sp_runtime::{ + traits::{ + AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, Saturating, StaticLookup, Zero, + }, + ArithmeticError, TokenError, +}; +use sp_std::{borrow::Borrow, convert::TryInto, prelude::*}; -pub use weights::WeightInfo; pub use pallet::*; +pub use weights::WeightInfo; #[frame_support::pallet] pub mod pallet { - use frame_system::pallet_prelude::*; use super::*; + use frame_system::pallet_prelude::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -509,13 +516,12 @@ pub mod pallet { } Self::deposit_event(Event::Destroyed(id)); - Ok( - Some(T::WeightInfo::destroy( - details.accounts.saturating_sub(details.sufficients), - details.sufficients, - details.approvals, - )).into() - ) + Ok(Some(T::WeightInfo::destroy( + details.accounts.saturating_sub(details.sufficients), + details.sufficients, + details.approvals, + )) + .into()) }) } @@ -536,7 +542,7 @@ pub mod pallet { origin: OriginFor, #[pallet::compact] id: T::AssetId, beneficiary: ::Source, - #[pallet::compact] amount: T::Balance + #[pallet::compact] amount: T::Balance, ) -> DispatchResult { let origin = ensure_signed(origin)?; let beneficiary = T::Lookup::lookup(beneficiary)?; @@ -564,7 +570,7 @@ pub mod pallet { origin: OriginFor, #[pallet::compact] id: T::AssetId, who: ::Source, - #[pallet::compact] amount: T::Balance + #[pallet::compact] amount: T::Balance, ) -> DispatchResult { let origin = ensure_signed(origin)?; let who = T::Lookup::lookup(who)?; @@ -597,16 +603,12 @@ pub mod pallet { origin: OriginFor, #[pallet::compact] id: T::AssetId, target: ::Source, - #[pallet::compact] amount: T::Balance + #[pallet::compact] amount: T::Balance, ) -> DispatchResult { let origin = ensure_signed(origin)?; let dest = T::Lookup::lookup(target)?; - let f = TransferFlags { - keep_alive: false, - best_effort: false, - burn_dust: false - }; + let f = TransferFlags { keep_alive: false, best_effort: false, burn_dust: false }; Self::do_transfer(id, &origin, &dest, amount, None, f).map(|_| ()) } @@ -633,16 +635,12 @@ pub mod pallet { origin: OriginFor, #[pallet::compact] id: T::AssetId, target: ::Source, - #[pallet::compact] amount: T::Balance + #[pallet::compact] amount: T::Balance, ) -> DispatchResult { let source = ensure_signed(origin)?; let dest = T::Lookup::lookup(target)?; - let f = TransferFlags { - keep_alive: true, - best_effort: false, - burn_dust: false - }; + let f = TransferFlags { keep_alive: true, best_effort: false, burn_dust: false }; Self::do_transfer(id, &source, &dest, amount, None, f).map(|_| ()) } @@ -677,11 +675,7 @@ pub mod pallet { let source = T::Lookup::lookup(source)?; let dest = T::Lookup::lookup(dest)?; - let f = TransferFlags { - keep_alive: false, - best_effort: false, - burn_dust: false - }; + let f = TransferFlags { keep_alive: false, best_effort: false, burn_dust: false }; Self::do_transfer(id, &source, &dest, amount, Some(origin), f).map(|_| ()) } @@ -699,17 +693,14 @@ pub mod pallet { pub fn freeze( origin: OriginFor, #[pallet::compact] id: T::AssetId, - who: ::Source + who: ::Source, ) -> DispatchResult { let origin = ensure_signed(origin)?; let d = Asset::::get(id).ok_or(Error::::Unknown)?; ensure!(&origin == &d.freezer, Error::::NoPermission); let who = T::Lookup::lookup(who)?; - ensure!( - Account::::contains_key(id, &who), - Error::::BalanceZero - ); + ensure!(Account::::contains_key(id, &who), Error::::BalanceZero); Account::::mutate(id, &who, |a| a.is_frozen = true); @@ -730,19 +721,15 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::thaw())] pub fn thaw( origin: OriginFor, - #[pallet::compact] - id: T::AssetId, - who: ::Source + #[pallet::compact] id: T::AssetId, + who: ::Source, ) -> DispatchResult { let origin = ensure_signed(origin)?; let details = Asset::::get(id).ok_or(Error::::Unknown)?; ensure!(&origin == &details.admin, Error::::NoPermission); let who = T::Lookup::lookup(who)?; - ensure!( - Account::::contains_key(id, &who), - Error::::BalanceZero - ); + ensure!(Account::::contains_key(id, &who), Error::::BalanceZero); Account::::mutate(id, &who, |a| a.is_frozen = false); @@ -762,7 +749,7 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::freeze_asset())] pub fn freeze_asset( origin: OriginFor, - #[pallet::compact] id: T::AssetId + #[pallet::compact] id: T::AssetId, ) -> DispatchResult { let origin = ensure_signed(origin)?; @@ -789,7 +776,7 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::thaw_asset())] pub fn thaw_asset( origin: OriginFor, - #[pallet::compact] id: T::AssetId + #[pallet::compact] id: T::AssetId, ) -> DispatchResult { let origin = ensure_signed(origin)?; @@ -827,7 +814,7 @@ pub mod pallet { let details = maybe_details.as_mut().ok_or(Error::::Unknown)?; ensure!(&origin == &details.owner, Error::::NoPermission); if details.owner == owner { - return Ok(()); + return Ok(()) } let metadata_deposit = Metadata::::get(id).deposit; @@ -907,14 +894,10 @@ pub mod pallet { ) -> DispatchResult { let origin = ensure_signed(origin)?; - let bounded_name: BoundedVec = name - .clone() - .try_into() - .map_err(|_| Error::::BadMetadata)?; - let bounded_symbol: BoundedVec = symbol - .clone() - .try_into() - .map_err(|_| Error::::BadMetadata)?; + let bounded_name: BoundedVec = + name.clone().try_into().map_err(|_| Error::::BadMetadata)?; + let bounded_symbol: BoundedVec = + symbol.clone().try_into().map_err(|_| Error::::BadMetadata)?; let d = Asset::::get(id).ok_or(Error::::Unknown)?; ensure!(&origin == &d.owner, Error::::NoPermission); @@ -1003,15 +986,11 @@ pub mod pallet { ) -> DispatchResult { T::ForceOrigin::ensure_origin(origin)?; - let bounded_name: BoundedVec = name - .clone() - .try_into() - .map_err(|_| Error::::BadMetadata)?; + let bounded_name: BoundedVec = + name.clone().try_into().map_err(|_| Error::::BadMetadata)?; - let bounded_symbol: BoundedVec = symbol - .clone() - .try_into() - .map_err(|_| Error::::BadMetadata)?; + let bounded_symbol: BoundedVec = + symbol.clone().try_into().map_err(|_| Error::::BadMetadata)?; ensure!(Asset::::contains_key(id), Error::::Unknown); Metadata::::try_mutate_exists(id, |metadata| { @@ -1140,25 +1119,28 @@ pub mod pallet { let mut d = Asset::::get(id).ok_or(Error::::Unknown)?; ensure!(!d.is_frozen, Error::::Frozen); - Approvals::::try_mutate((id, &owner, &delegate), |maybe_approved| -> DispatchResult { - let mut approved = match maybe_approved.take() { - // an approval already exists and is being updated - Some(a) => a, - // a new approval is created - None => { - d.approvals.saturating_inc(); - Default::default() + Approvals::::try_mutate( + (id, &owner, &delegate), + |maybe_approved| -> DispatchResult { + let mut approved = match maybe_approved.take() { + // an approval already exists and is being updated + Some(a) => a, + // a new approval is created + None => { + d.approvals.saturating_inc(); + Default::default() + }, + }; + let deposit_required = T::ApprovalDeposit::get(); + if approved.deposit < deposit_required { + T::Currency::reserve(&owner, deposit_required - approved.deposit)?; + approved.deposit = deposit_required; } - }; - let deposit_required = T::ApprovalDeposit::get(); - if approved.deposit < deposit_required { - T::Currency::reserve(&owner, deposit_required - approved.deposit)?; - approved.deposit = deposit_required; - } - approved.amount = approved.amount.saturating_add(amount); - *maybe_approved = Some(approved); - Ok(()) - })?; + approved.amount = approved.amount.saturating_add(amount); + *maybe_approved = Some(approved); + Ok(()) + }, + )?; Asset::::insert(id, d); Self::deposit_event(Event::ApprovedTransfer(id, owner, delegate, amount)); @@ -1187,7 +1169,8 @@ pub mod pallet { let owner = ensure_signed(origin)?; let delegate = T::Lookup::lookup(delegate)?; let mut d = Asset::::get(id).ok_or(Error::::Unknown)?; - let approval = Approvals::::take((id, &owner, &delegate)).ok_or(Error::::Unknown)?; + let approval = + Approvals::::take((id, &owner, &delegate)).ok_or(Error::::Unknown)?; T::Currency::unreserve(&owner, approval.deposit); d.approvals.saturating_dec(); @@ -1229,7 +1212,8 @@ pub mod pallet { let owner = T::Lookup::lookup(owner)?; let delegate = T::Lookup::lookup(delegate)?; - let approval = Approvals::::take((id, &owner, &delegate)).ok_or(Error::::Unknown)?; + let approval = + Approvals::::take((id, &owner, &delegate)).ok_or(Error::::Unknown)?; T::Currency::unreserve(&owner, approval.deposit); d.approvals.saturating_dec(); Asset::::insert(id, d); @@ -1268,33 +1252,31 @@ pub mod pallet { let owner = T::Lookup::lookup(owner)?; let destination = T::Lookup::lookup(destination)?; - Approvals::::try_mutate_exists((id, &owner, delegate), |maybe_approved| -> DispatchResult { - let mut approved = maybe_approved.take().ok_or(Error::::Unapproved)?; - let remaining = approved - .amount - .checked_sub(&amount) - .ok_or(Error::::Unapproved)?; - - let f = TransferFlags { - keep_alive: false, - best_effort: false, - burn_dust: false - }; - Self::do_transfer(id, &owner, &destination, amount, None, f)?; - - if remaining.is_zero() { - T::Currency::unreserve(&owner, approved.deposit); - Asset::::mutate(id, |maybe_details| { - if let Some(details) = maybe_details { - details.approvals.saturating_dec(); - } - }); - } else { - approved.amount = remaining; - *maybe_approved = Some(approved); - } - Ok(()) - })?; + Approvals::::try_mutate_exists( + (id, &owner, delegate), + |maybe_approved| -> DispatchResult { + let mut approved = maybe_approved.take().ok_or(Error::::Unapproved)?; + let remaining = + approved.amount.checked_sub(&amount).ok_or(Error::::Unapproved)?; + + let f = + TransferFlags { keep_alive: false, best_effort: false, burn_dust: false }; + Self::do_transfer(id, &owner, &destination, amount, None, f)?; + + if remaining.is_zero() { + T::Currency::unreserve(&owner, approved.deposit); + Asset::::mutate(id, |maybe_details| { + if let Some(details) = maybe_details { + details.approvals.saturating_dec(); + } + }); + } else { + approved.amount = remaining; + *maybe_approved = Some(approved); + } + Ok(()) + }, + )?; Ok(()) } } diff --git a/frame/assets/src/mock.rs b/frame/assets/src/mock.rs index 429548a5d1c28..e4f5763f149fe 100644 --- a/frame/assets/src/mock.rs +++ b/frame/assets/src/mock.rs @@ -20,9 +20,12 @@ use super::*; use crate as pallet_assets; +use frame_support::{construct_runtime, parameter_types}; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; -use frame_support::{parameter_types, construct_runtime}; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -108,8 +111,7 @@ impl Config for Test { type Extra = (); } -use std::cell::RefCell; -use std::collections::HashMap; +use std::{cell::RefCell, collections::HashMap}; #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub(crate) enum Hook { diff --git a/frame/assets/src/tests.rs b/frame/assets/src/tests.rs index c2cf9acf29bdb..99c362c3577ba 100644 --- a/frame/assets/src/tests.rs +++ b/frame/assets/src/tests.rs @@ -18,10 +18,10 @@ //! Tests for Assets pallet. use super::*; -use crate::{Error, mock::*}; -use sp_runtime::{TokenError, traits::ConvertInto}; -use frame_support::{assert_ok, assert_noop, traits::Currency}; +use crate::{mock::*, Error}; +use frame_support::{assert_noop, assert_ok, traits::Currency}; use pallet_balances::Error as BalancesError; +use sp_runtime::{traits::ConvertInto, TokenError}; #[test] fn basic_minting_should_work() { @@ -151,13 +151,25 @@ fn force_cancel_approval_works() { assert_eq!(Asset::::get(0).unwrap().approvals, 1); let e = Error::::NoPermission; assert_noop!(Assets::force_cancel_approval(Origin::signed(2), 0, 1, 2), e); - assert_noop!(Assets::force_cancel_approval(Origin::signed(1), 1, 1, 2), Error::::Unknown); - assert_noop!(Assets::force_cancel_approval(Origin::signed(1), 0, 2, 2), Error::::Unknown); - assert_noop!(Assets::force_cancel_approval(Origin::signed(1), 0, 1, 3), Error::::Unknown); + assert_noop!( + Assets::force_cancel_approval(Origin::signed(1), 1, 1, 2), + Error::::Unknown + ); + assert_noop!( + Assets::force_cancel_approval(Origin::signed(1), 0, 2, 2), + Error::::Unknown + ); + assert_noop!( + Assets::force_cancel_approval(Origin::signed(1), 0, 1, 3), + Error::::Unknown + ); assert_eq!(Asset::::get(0).unwrap().approvals, 1); assert_ok!(Assets::force_cancel_approval(Origin::signed(1), 0, 1, 2)); assert_eq!(Asset::::get(0).unwrap().approvals, 0); - assert_noop!(Assets::force_cancel_approval(Origin::signed(1), 0, 1, 2), Error::::Unknown); + assert_noop!( + Assets::force_cancel_approval(Origin::signed(1), 0, 1, 2), + Error::::Unknown + ); }); } @@ -222,7 +234,6 @@ fn destroy_with_bad_witness_should_not_work() { w.accounts += 2; w.sufficients += 2; assert_ok!(Assets::destroy(Origin::signed(1), 0, w)); - }); } @@ -259,7 +270,10 @@ fn non_providing_should_work() { // ...or transfer... assert_noop!(Assets::transfer(Origin::signed(0), 0, 1, 50), TokenError::CannotCreate); // ...or force-transfer - assert_noop!(Assets::force_transfer(Origin::signed(1), 0, 0, 1, 50), TokenError::CannotCreate); + assert_noop!( + Assets::force_transfer(Origin::signed(1), 0, 0, 1, 50), + TokenError::CannotCreate + ); Balances::make_free_balance_be(&1, 100); Balances::make_free_balance_be(&2, 100); @@ -278,7 +292,10 @@ fn min_balance_should_work() { // Cannot create a new account with a balance that is below minimum... assert_noop!(Assets::mint(Origin::signed(1), 0, 2, 9), TokenError::BelowMinimum); assert_noop!(Assets::transfer(Origin::signed(1), 0, 2, 9), TokenError::BelowMinimum); - assert_noop!(Assets::force_transfer(Origin::signed(1), 0, 1, 2, 9), TokenError::BelowMinimum); + assert_noop!( + Assets::force_transfer(Origin::signed(1), 0, 1, 2, 9), + TokenError::BelowMinimum + ); // When deducting from an account to below minimum, it should be reaped. assert_ok!(Assets::transfer(Origin::signed(1), 0, 2, 91)); @@ -333,7 +350,10 @@ fn transferring_enough_to_kill_source_when_keep_alive_should_fail() { assert_ok!(Assets::force_create(Origin::root(), 0, 1, true, 10)); assert_ok!(Assets::mint(Origin::signed(1), 0, 1, 100)); assert_eq!(Assets::balance(0, 1), 100); - assert_noop!(Assets::transfer_keep_alive(Origin::signed(1), 0, 2, 91), Error::::BalanceLow); + assert_noop!( + Assets::transfer_keep_alive(Origin::signed(1), 0, 2, 91), + Error::::BalanceLow + ); assert_ok!(Assets::transfer_keep_alive(Origin::signed(1), 0, 2, 90)); assert_eq!(Assets::balance(0, 1), 10); assert_eq!(Assets::balance(0, 2), 90); @@ -385,13 +405,19 @@ fn origin_guards_should_work() { new_test_ext().execute_with(|| { assert_ok!(Assets::force_create(Origin::root(), 0, 1, true, 1)); assert_ok!(Assets::mint(Origin::signed(1), 0, 1, 100)); - assert_noop!(Assets::transfer_ownership(Origin::signed(2), 0, 2), Error::::NoPermission); + assert_noop!( + Assets::transfer_ownership(Origin::signed(2), 0, 2), + Error::::NoPermission + ); assert_noop!(Assets::set_team(Origin::signed(2), 0, 2, 2, 2), Error::::NoPermission); assert_noop!(Assets::freeze(Origin::signed(2), 0, 1), Error::::NoPermission); assert_noop!(Assets::thaw(Origin::signed(2), 0, 2), Error::::NoPermission); assert_noop!(Assets::mint(Origin::signed(2), 0, 2, 100), Error::::NoPermission); assert_noop!(Assets::burn(Origin::signed(2), 0, 1, 100), Error::::NoPermission); - assert_noop!(Assets::force_transfer(Origin::signed(2), 0, 1, 2, 100), Error::::NoPermission); + assert_noop!( + Assets::force_transfer(Origin::signed(2), 0, 1, 2, 100), + Error::::NoPermission + ); let w = Asset::::get(0).unwrap().destroy_witness(); assert_noop!(Assets::destroy(Origin::signed(2), 0, w), Error::::NoPermission); }); @@ -410,7 +436,10 @@ fn transfer_owner_should_work() { assert_eq!(Balances::reserved_balance(&2), 1); assert_eq!(Balances::reserved_balance(&1), 0); - assert_noop!(Assets::transfer_ownership(Origin::signed(1), 0, 1), Error::::NoPermission); + assert_noop!( + Assets::transfer_ownership(Origin::signed(1), 0, 1), + Error::::NoPermission + ); // Set metadata now and make sure that deposit gets transferred back. assert_ok!(Assets::set_metadata(Origin::signed(2), 0, vec![0u8; 10], vec![0u8; 10], 12)); @@ -513,25 +542,25 @@ fn set_metadata_should_work() { new_test_ext().execute_with(|| { // Cannot add metadata to unknown asset assert_noop!( - Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 10], vec![0u8; 10], 12), - Error::::Unknown, - ); + Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 10], vec![0u8; 10], 12), + Error::::Unknown, + ); assert_ok!(Assets::force_create(Origin::root(), 0, 1, true, 1)); // Cannot add metadata to unowned asset assert_noop!( - Assets::set_metadata(Origin::signed(2), 0, vec![0u8; 10], vec![0u8; 10], 12), - Error::::NoPermission, - ); + Assets::set_metadata(Origin::signed(2), 0, vec![0u8; 10], vec![0u8; 10], 12), + Error::::NoPermission, + ); // Cannot add oversized metadata assert_noop!( - Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 100], vec![0u8; 10], 12), - Error::::BadMetadata, - ); + Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 100], vec![0u8; 10], 12), + Error::::BadMetadata, + ); assert_noop!( - Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 10], vec![0u8; 100], 12), - Error::::BadMetadata, - ); + Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 10], vec![0u8; 100], 12), + Error::::BadMetadata, + ); // Successfully add metadata and take deposit Balances::make_free_balance_be(&1, 30); @@ -546,9 +575,9 @@ fn set_metadata_should_work() { // Cannot over-reserve assert_noop!( - Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 20], vec![0u8; 20], 12), - BalancesError::::InsufficientBalance, - ); + Assets::set_metadata(Origin::signed(1), 0, vec![0u8; 20], vec![0u8; 20], 12), + BalancesError::::InsufficientBalance, + ); // Clear Metadata assert!(Metadata::::contains_key(0)); @@ -566,7 +595,6 @@ fn freezer_should_work() { assert_ok!(Assets::mint(Origin::signed(1), 0, 1, 100)); assert_eq!(Assets::balance(0, 1), 100); - // freeze 50 of it. set_frozen_balance(0, 1, 50); @@ -626,12 +654,26 @@ fn force_metadata_should_work() { new_test_ext().execute_with(|| { //force set metadata works assert_ok!(Assets::force_create(Origin::root(), 0, 1, true, 1)); - assert_ok!(Assets::force_set_metadata(Origin::root(), 0, vec![0u8; 10], vec![0u8; 10], 8, false)); + assert_ok!(Assets::force_set_metadata( + Origin::root(), + 0, + vec![0u8; 10], + vec![0u8; 10], + 8, + false + )); assert!(Metadata::::contains_key(0)); //overwrites existing metadata let asset_original_metadata = Metadata::::get(0); - assert_ok!(Assets::force_set_metadata(Origin::root(), 0, vec![1u8; 10], vec![1u8; 10], 8, false)); + assert_ok!(Assets::force_set_metadata( + Origin::root(), + 0, + vec![1u8; 10], + vec![1u8; 10], + 8, + false + )); assert_ne!(Metadata::::get(0), asset_original_metadata); //attempt to set metadata for non-existent asset class @@ -643,11 +685,25 @@ fn force_metadata_should_work() { //string length limit check let limit = StringLimit::get() as usize; assert_noop!( - Assets::force_set_metadata(Origin::root(), 0, vec![0u8; limit + 1], vec![0u8; 10], 8, false), + Assets::force_set_metadata( + Origin::root(), + 0, + vec![0u8; limit + 1], + vec![0u8; 10], + 8, + false + ), Error::::BadMetadata ); assert_noop!( - Assets::force_set_metadata(Origin::root(), 0, vec![0u8; 10], vec![0u8; limit + 1], 8, false), + Assets::force_set_metadata( + Origin::root(), + 0, + vec![0u8; 10], + vec![0u8; limit + 1], + 8, + false + ), Error::::BadMetadata ); @@ -662,7 +718,7 @@ fn force_metadata_should_work() { } #[test] -fn force_asset_status_should_work(){ +fn force_asset_status_should_work() { new_test_ext().execute_with(|| { Balances::make_free_balance_be(&1, 10); Balances::make_free_balance_be(&2, 10); @@ -680,7 +736,7 @@ fn force_asset_status_should_work(){ //account on outbound transfer will cleanup for balance < min_balance assert_ok!(Assets::transfer(Origin::signed(1), 0, 2, 1)); - assert_eq!(Assets::balance(0,1), 0); + assert_eq!(Assets::balance(0, 1), 0); //won't create new account with balance below min_balance assert_noop!(Assets::transfer(Origin::signed(2), 0, 3, 50), TokenError::BelowMinimum); @@ -715,7 +771,10 @@ fn balance_conversion_should_work() { Err(ConversionError::AssetMissing) ); assert_eq!( - BalanceToAssetBalance::::to_asset_balance(100, not_sufficient), + BalanceToAssetBalance::::to_asset_balance( + 100, + not_sufficient + ), Err(ConversionError::AssetNotSufficient) ); // 10 / 1 == 10 -> the conversion should 10x the value diff --git a/frame/assets/src/types.rs b/frame/assets/src/types.rs index 72b5e2fc25238..fd85195265c97 100644 --- a/frame/assets/src/types.rs +++ b/frame/assets/src/types.rs @@ -18,22 +18,17 @@ //! Various basic types for use in the assets pallet. use super::*; -use scale_info::TypeInfo; use frame_support::pallet_prelude::*; +use scale_info::TypeInfo; use frame_support::traits::{fungible, tokens::BalanceConversion}; -use sp_runtime::{FixedPointNumber, FixedPointOperand, FixedU128}; -use sp_runtime::traits::Convert; +use sp_runtime::{traits::Convert, FixedPointNumber, FixedPointOperand, FixedU128}; pub(super) type DepositBalanceOf = <>::Currency as Currency<::AccountId>>::Balance; #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] -pub struct AssetDetails< - Balance, - AccountId, - DepositBalance, -> { +pub struct AssetDetails { /// Can change `owner`, `issuer`, `freezer` and `admin` accounts. pub(super) owner: AccountId, /// Can mint tokens. @@ -145,7 +140,9 @@ pub trait FrozenBalance { } impl FrozenBalance for () { - fn frozen_balance(_: AssetId, _: &AccountId) -> Option { None } + fn frozen_balance(_: AssetId, _: &AccountId) -> Option { + None + } fn died(_: AssetId, _: &AccountId) {} } @@ -176,10 +173,7 @@ pub(super) struct DebitFlags { impl From for DebitFlags { fn from(f: TransferFlags) -> Self { - Self { - keep_alive: f.keep_alive, - best_effort: f.best_effort, - } + Self { keep_alive: f.keep_alive, best_effort: f.best_effort } } } @@ -206,7 +200,7 @@ type BalanceOf = >>::Balance; /// minimum balance and the minimum asset balance. pub struct BalanceToAssetBalance(PhantomData<(F, T, CON, I)>); impl BalanceConversion, AssetIdOf, AssetBalanceOf> -for BalanceToAssetBalance + for BalanceToAssetBalance where F: fungible::Inspect>, T: Config, diff --git a/frame/assets/src/weights.rs b/frame/assets/src/weights.rs index ae5462288a306..51565e6a27ba3 100644 --- a/frame/assets/src/weights.rs +++ b/frame/assets/src/weights.rs @@ -35,18 +35,20 @@ // --output=./frame/assets/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_assets. pub trait WeightInfo { fn create() -> Weight; fn force_create() -> Weight; - fn destroy(c: u32, s: u32, a: u32, ) -> Weight; + fn destroy(c: u32, s: u32, a: u32) -> Weight; fn mint() -> Weight; fn burn() -> Weight; fn transfer() -> Weight; @@ -58,9 +60,9 @@ pub trait WeightInfo { fn thaw_asset() -> Weight; fn transfer_ownership() -> Weight; fn set_team() -> Weight; - fn set_metadata(n: u32, s: u32, ) -> Weight; + fn set_metadata(n: u32, s: u32) -> Weight; fn clear_metadata() -> Weight; - fn force_set_metadata(n: u32, s: u32, ) -> Weight; + fn force_set_metadata(n: u32, s: u32) -> Weight; fn force_clear_metadata() -> Weight; fn force_asset_status() -> Weight; fn approve_transfer() -> Weight; @@ -82,7 +84,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn destroy(c: u32, s: u32, a: u32, ) -> Weight { + fn destroy(c: u32, s: u32, a: u32) -> Weight { (0 as Weight) // Standard Error: 34_000 .saturating_add((22_206_000 as Weight).saturating_mul(c as Weight)) @@ -154,7 +156,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_metadata(_n: u32, s: u32, ) -> Weight { + fn set_metadata(_n: u32, s: u32) -> Weight { (47_510_000 as Weight) // Standard Error: 0 .saturating_add((6_000 as Weight).saturating_mul(s as Weight)) @@ -166,7 +168,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn force_set_metadata(_n: u32, s: u32, ) -> Weight { + fn force_set_metadata(_n: u32, s: u32) -> Weight { (24_297_000 as Weight) // Standard Error: 0 .saturating_add((7_000 as Weight).saturating_mul(s as Weight)) @@ -217,7 +219,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn destroy(c: u32, s: u32, a: u32, ) -> Weight { + fn destroy(c: u32, s: u32, a: u32) -> Weight { (0 as Weight) // Standard Error: 34_000 .saturating_add((22_206_000 as Weight).saturating_mul(c as Weight)) @@ -289,7 +291,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_metadata(_n: u32, s: u32, ) -> Weight { + fn set_metadata(_n: u32, s: u32) -> Weight { (47_510_000 as Weight) // Standard Error: 0 .saturating_add((6_000 as Weight).saturating_mul(s as Weight)) @@ -301,7 +303,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn force_set_metadata(_n: u32, s: u32, ) -> Weight { + fn force_set_metadata(_n: u32, s: u32) -> Weight { (24_297_000 as Weight) // Standard Error: 0 .saturating_add((7_000 as Weight).saturating_mul(s as Weight)) diff --git a/frame/atomic-swap/src/lib.rs b/frame/atomic-swap/src/lib.rs index febdb50975665..ce8dc83f584a8 100644 --- a/frame/atomic-swap/src/lib.rs +++ b/frame/atomic-swap/src/lib.rs @@ -42,17 +42,21 @@ mod tests; -use sp_std::{prelude::*, marker::PhantomData, ops::{Deref, DerefMut}}; -use sp_io::hashing::blake2_256; +use codec::{Decode, Encode}; use frame_support::{ - RuntimeDebugNoBound, - traits::{Get, Currency, ReservableCurrency, BalanceStatus}, - weights::Weight, dispatch::DispatchResult, + traits::{BalanceStatus, Currency, Get, ReservableCurrency}, + weights::Weight, + RuntimeDebugNoBound, }; -use codec::{Encode, Decode}; use scale_info::TypeInfo; +use sp_io::hashing::blake2_256; use sp_runtime::RuntimeDebug; +use sp_std::{ + marker::PhantomData, + ops::{Deref, DerefMut}, + prelude::*, +}; /// Pending atomic swap operation. #[derive(Clone, Eq, PartialEq, RuntimeDebugNoBound, Encode, Decode, TypeInfo)] @@ -96,14 +100,20 @@ pub struct BalanceSwapAction> { _marker: PhantomData, } -impl BalanceSwapAction where C: ReservableCurrency { +impl BalanceSwapAction +where + C: ReservableCurrency, +{ /// Create a new swap action value of balance. pub fn new(value: >::Balance) -> Self { Self { value, _marker: PhantomData } } } -impl Deref for BalanceSwapAction where C: ReservableCurrency { +impl Deref for BalanceSwapAction +where + C: ReservableCurrency, +{ type Target = >::Balance; fn deref(&self) -> &Self::Target { @@ -111,14 +121,18 @@ impl Deref for BalanceSwapAction where C: Reservable } } -impl DerefMut for BalanceSwapAction where C: ReservableCurrency { +impl DerefMut for BalanceSwapAction +where + C: ReservableCurrency, +{ fn deref_mut(&mut self) -> &mut Self::Target { &mut self.value } } impl SwapAction for BalanceSwapAction - where C: ReservableCurrency +where + C: ReservableCurrency, { fn reserve(&self, source: &AccountId) -> DispatchResult { C::reserve(&source, self.value) @@ -141,9 +155,9 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// Atomic swap's pallet configuration trait. #[pallet::config] @@ -171,9 +185,12 @@ pub mod pallet { pub struct Pallet(PhantomData); #[pallet::storage] - pub type PendingSwaps = StorageDoubleMap<_, - Twox64Concat, T::AccountId, - Blake2_128Concat, HashedProof, + pub type PendingSwaps = StorageDoubleMap< + _, + Twox64Concat, + T::AccountId, + Blake2_128Concat, + HashedProof, PendingSwap, >; @@ -211,7 +228,7 @@ pub mod pallet { } /// Old name generated by `decl_event`. - #[deprecated(note="use `Event` instead")] + #[deprecated(note = "use `Event` instead")] pub type RawEvent = Event; #[pallet::call] @@ -251,9 +268,7 @@ pub mod pallet { }; PendingSwaps::::insert(target.clone(), hashed_proof.clone(), swap.clone()); - Self::deposit_event( - Event::NewSwap(target, hashed_proof, swap) - ); + Self::deposit_event(Event::NewSwap(target, hashed_proof, swap)); Ok(()) } @@ -276,25 +291,20 @@ pub mod pallet { proof: Vec, action: T::SwapAction, ) -> DispatchResult { - ensure!( - proof.len() <= T::ProofLimit::get() as usize, - Error::::ProofTooLarge, - ); + ensure!(proof.len() <= T::ProofLimit::get() as usize, Error::::ProofTooLarge,); let target = ensure_signed(origin)?; let hashed_proof = blake2_256(&proof); - let swap = PendingSwaps::::get(&target, hashed_proof) - .ok_or(Error::::InvalidProof)?; + let swap = + PendingSwaps::::get(&target, hashed_proof).ok_or(Error::::InvalidProof)?; ensure!(swap.action == action, Error::::ClaimActionMismatch); let succeeded = swap.action.claim(&swap.source, &target); PendingSwaps::::remove(target.clone(), hashed_proof.clone()); - Self::deposit_event( - Event::SwapClaimed(target, hashed_proof, succeeded) - ); + Self::deposit_event(Event::SwapClaimed(target, hashed_proof, succeeded)); Ok(()) } @@ -313,12 +323,8 @@ pub mod pallet { ) -> DispatchResult { let source = ensure_signed(origin)?; - let swap = PendingSwaps::::get(&target, hashed_proof) - .ok_or(Error::::NotExist)?; - ensure!( - swap.source == source, - Error::::SourceMismatch, - ); + let swap = PendingSwaps::::get(&target, hashed_proof).ok_or(Error::::NotExist)?; + ensure!(swap.source == source, Error::::SourceMismatch,); ensure!( frame_system::Pallet::::block_number() >= swap.end_block, Error::::DurationNotPassed, @@ -327,9 +333,7 @@ pub mod pallet { swap.action.cancel(&swap.source); PendingSwaps::::remove(&target, hashed_proof.clone()); - Self::deposit_event( - Event::SwapCancelled(target, hashed_proof) - ); + Self::deposit_event(Event::SwapCancelled(target, hashed_proof)); Ok(()) } diff --git a/frame/atomic-swap/src/tests.rs b/frame/atomic-swap/src/tests.rs index 11e74be9b4e7f..2165b403dd35d 100644 --- a/frame/atomic-swap/src/tests.rs +++ b/frame/atomic-swap/src/tests.rs @@ -84,12 +84,7 @@ const B: u64 = 2; pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - let genesis = pallet_balances::GenesisConfig:: { - balances: vec![ - (A, 100), - (B, 200), - ], - }; + let genesis = pallet_balances::GenesisConfig:: { balances: vec![(A, 100), (B, 200)] }; genesis.assimilate_storage(&mut t).unwrap(); t.into() } @@ -112,7 +107,8 @@ fn two_party_successful_swap() { hashed_proof.clone(), BalanceSwapAction::new(50), 1000, - ).unwrap(); + ) + .unwrap(); assert_eq!(Balances::free_balance(A), 100 - 50); assert_eq!(Balances::free_balance(B), 200); @@ -126,7 +122,8 @@ fn two_party_successful_swap() { hashed_proof.clone(), BalanceSwapAction::new(75), 1000, - ).unwrap(); + ) + .unwrap(); assert_eq!(Balances::free_balance(A), 100); assert_eq!(Balances::free_balance(B), 200 - 75); @@ -134,11 +131,8 @@ fn two_party_successful_swap() { // A reveals the proof and claims the swap on chain2. chain2.execute_with(|| { - AtomicSwap::claim_swap( - Origin::signed(A), - proof.to_vec(), - BalanceSwapAction::new(75), - ).unwrap(); + AtomicSwap::claim_swap(Origin::signed(A), proof.to_vec(), BalanceSwapAction::new(75)) + .unwrap(); assert_eq!(Balances::free_balance(A), 100 + 75); assert_eq!(Balances::free_balance(B), 200 - 75); @@ -146,11 +140,8 @@ fn two_party_successful_swap() { // B use the revealed proof to claim the swap on chain1. chain1.execute_with(|| { - AtomicSwap::claim_swap( - Origin::signed(B), - proof.to_vec(), - BalanceSwapAction::new(50), - ).unwrap(); + AtomicSwap::claim_swap(Origin::signed(B), proof.to_vec(), BalanceSwapAction::new(50)) + .unwrap(); assert_eq!(Balances::free_balance(A), 100 - 50); assert_eq!(Balances::free_balance(B), 200 + 50); diff --git a/frame/aura/src/lib.rs b/frame/aura/src/lib.rs index 7cc9412776df7..41fb69dfb5454 100644 --- a/frame/aura/src/lib.rs +++ b/frame/aura/src/lib.rs @@ -37,20 +37,22 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use frame_support::{ - Parameter, traits::{Get, FindAuthor, OneSessionHandler, OnTimestampSet}, ConsensusEngineId, + traits::{FindAuthor, Get, OnTimestampSet, OneSessionHandler}, + ConsensusEngineId, Parameter, }; +use sp_consensus_aura::{AuthorityIndex, ConsensusLog, Slot, AURA_ENGINE_ID}; use sp_runtime::{ + generic::DigestItem, + traits::{IsMember, Member, SaturatedConversion, Saturating, Zero}, RuntimeAppPublic, - traits::{SaturatedConversion, Saturating, Zero, Member, IsMember}, generic::DigestItem, }; -use sp_consensus_aura::{AURA_ENGINE_ID, ConsensusLog, AuthorityIndex, Slot}; +use sp_std::prelude::*; +pub mod migrations; mod mock; mod tests; -pub mod migrations; pub use pallet::*; @@ -63,7 +65,11 @@ pub mod pallet { #[pallet::config] pub trait Config: pallet_timestamp::Config + frame_system::Config { /// The identifier type for an authority. - type AuthorityId: Member + Parameter + RuntimeAppPublic + Default + MaybeSerializeDeserialize; + type AuthorityId: Member + + Parameter + + RuntimeAppPublic + + Default + + MaybeSerializeDeserialize; } #[pallet::pallet] @@ -123,10 +129,8 @@ impl Pallet { fn change_authorities(new: Vec) { >::put(&new); - let log: DigestItem = DigestItem::Consensus( - AURA_ENGINE_ID, - ConsensusLog::AuthoritiesChange(new).encode() - ); + let log: DigestItem = + DigestItem::Consensus(AURA_ENGINE_ID, ConsensusLog::AuthoritiesChange(new).encode()); >::deposit_log(log.into()); } @@ -143,7 +147,7 @@ impl Pallet { let pre_runtime_digests = digest.logs.iter().filter_map(|d| d.as_pre_runtime()); for (id, mut data) in pre_runtime_digests { if id == AURA_ENGINE_ID { - return Slot::decode(&mut data).ok(); + return Slot::decode(&mut data).ok() } } @@ -166,14 +170,16 @@ impl OneSessionHandler for Pallet { type Key = T::AuthorityId; fn on_genesis_session<'a, I: 'a>(validators: I) - where I: Iterator + where + I: Iterator, { let authorities = validators.map(|(_, k)| k).collect::>(); Self::initialize_authorities(&authorities); } fn on_new_session<'a, I: 'a>(changed: bool, validators: I, _queued_validators: I) - where I: Iterator + where + I: Iterator, { // instant changes if changed { @@ -196,8 +202,9 @@ impl OneSessionHandler for Pallet { } impl FindAuthor for Pallet { - fn find_author<'a, I>(digests: I) -> Option where - I: 'a + IntoIterator + fn find_author<'a, I>(digests: I) -> Option + where + I: 'a + IntoIterator, { for (id, mut data) in digests.into_iter() { if id == AURA_ENGINE_ID { @@ -220,7 +227,8 @@ impl> FindAuthor for FindAccountFromAuthorIndex { fn find_author<'a, I>(digests: I) -> Option - where I: 'a + IntoIterator + where + I: 'a + IntoIterator, { let i = Inner::find_author(digests)?; @@ -234,9 +242,7 @@ pub type AuraAuthorId = FindAccountFromAuthorIndex>; impl IsMember for Pallet { fn is_member(authority_id: &T::AuthorityId) -> bool { - Self::authorities() - .iter() - .any(|id| id == authority_id) + Self::authorities().iter().any(|id| id == authority_id) } } @@ -248,6 +254,9 @@ impl OnTimestampSet for Pallet { let timestamp_slot = moment / slot_duration; let timestamp_slot = Slot::from(timestamp_slot.saturated_into::()); - assert!(CurrentSlot::::get() == timestamp_slot, "Timestamp slot must match `CurrentSlot`"); + assert!( + CurrentSlot::::get() == timestamp_slot, + "Timestamp slot must match `CurrentSlot`" + ); } } diff --git a/frame/aura/src/migrations.rs b/frame/aura/src/migrations.rs index 038c5b3f3f18b..e194c17406b63 100644 --- a/frame/aura/src/migrations.rs +++ b/frame/aura/src/migrations.rs @@ -17,11 +17,13 @@ //! Migrations for the AURA pallet. -use frame_support::{traits::Get, weights::Weight, pallet_prelude::*}; +use frame_support::{pallet_prelude::*, traits::Get, weights::Weight}; struct __LastTimestamp(sp_std::marker::PhantomData); impl frame_support::traits::StorageInstance for __LastTimestamp { - fn pallet_prefix() -> &'static str { T::PalletPrefix::get() } + fn pallet_prefix() -> &'static str { + T::PalletPrefix::get() + } const STORAGE_PREFIX: &'static str = "LastTimestamp"; } diff --git a/frame/aura/src/mock.rs b/frame/aura/src/mock.rs index aff6b76a7a49f..72d457165d3c0 100644 --- a/frame/aura/src/mock.rs +++ b/frame/aura/src/mock.rs @@ -20,10 +20,13 @@ #![cfg(test)] use crate as pallet_aura; -use sp_consensus_aura::ed25519::AuthorityId; -use sp_runtime::{traits::IdentityLookup, testing::{Header, UintAuthorityId}}; use frame_support::{parameter_types, traits::GenesisBuild}; +use sp_consensus_aura::ed25519::AuthorityId; use sp_core::H256; +use sp_runtime::{ + testing::{Header, UintAuthorityId}, + traits::IdentityLookup, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -86,8 +89,10 @@ impl pallet_aura::Config for Test { pub fn new_test_ext(authorities: Vec) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_aura::GenesisConfig::{ + pallet_aura::GenesisConfig:: { authorities: authorities.into_iter().map(|a| UintAuthorityId(a).to_public_key()).collect(), - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/aura/src/tests.rs b/frame/aura/src/tests.rs index 18e14e802bd32..14e79ab54753c 100644 --- a/frame/aura/src/tests.rs +++ b/frame/aura/src/tests.rs @@ -19,7 +19,7 @@ #![cfg(test)] -use crate::mock::{Aura, new_test_ext}; +use crate::mock::{new_test_ext, Aura}; #[test] fn initial_values() { diff --git a/frame/authority-discovery/src/lib.rs b/frame/authority-discovery/src/lib.rs index 1f480926209ec..e30bcb6296620 100644 --- a/frame/authority-discovery/src/lib.rs +++ b/frame/authority-discovery/src/lib.rs @@ -23,16 +23,16 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; use frame_support::traits::OneSessionHandler; use sp_authority_discovery::AuthorityId; +use sp_std::prelude::*; pub use pallet::*; #[frame_support::pallet] pub mod pallet { - use frame_support::pallet_prelude::*; use super::*; + use frame_support::pallet_prelude::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -45,20 +45,12 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn keys)] /// Keys of the current authority set. - pub(super) type Keys = StorageValue< - _, - Vec, - ValueQuery, - >; + pub(super) type Keys = StorageValue<_, Vec, ValueQuery>; #[pallet::storage] #[pallet::getter(fn next_keys)] /// Keys of the next authority set. - pub(super) type NextKeys = StorageValue< - _, - Vec, - ValueQuery, - >; + pub(super) type NextKeys = StorageValue<_, Vec, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig { @@ -68,9 +60,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - keys: Default::default(), - } + Self { keys: Default::default() } } } #[pallet::genesis_build] @@ -148,18 +138,18 @@ impl OneSessionHandler for Pallet { #[cfg(test)] mod tests { - use crate as pallet_authority_discovery; use super::*; - use sp_authority_discovery::AuthorityPair; + use crate as pallet_authority_discovery; + use frame_support::{parameter_types, traits::GenesisBuild}; use sp_application_crypto::Pair; + use sp_authority_discovery::AuthorityPair; use sp_core::{crypto::key_types, H256}; use sp_io::TestExternalities; use sp_runtime::{ - testing::{Header, UintAuthorityId}, traits::{ConvertInto, IdentityLookup, OpaqueKeys}, - Perbill, KeyTypeId, + testing::{Header, UintAuthorityId}, + traits::{ConvertInto, IdentityLookup, OpaqueKeys}, + KeyTypeId, Perbill, }; - use frame_support::parameter_types; - use frame_support::traits::GenesisBuild; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -260,41 +250,44 @@ mod tests { // everywhere. let account_id = AuthorityPair::from_seed_slice(vec![10; 32].as_ref()).unwrap().public(); - let mut first_authorities: Vec = vec![0, 1].into_iter() + let mut first_authorities: Vec = vec![0, 1] + .into_iter() .map(|i| AuthorityPair::from_seed_slice(vec![i; 32].as_ref()).unwrap().public()) .map(AuthorityId::from) .collect(); - let second_authorities: Vec = vec![2, 3].into_iter() + let second_authorities: Vec = vec![2, 3] + .into_iter() .map(|i| AuthorityPair::from_seed_slice(vec![i; 32].as_ref()).unwrap().public()) .map(AuthorityId::from) .collect(); // Needed for `pallet_session::OneSessionHandler::on_new_session`. - let second_authorities_and_account_ids = second_authorities.clone() + let second_authorities_and_account_ids = second_authorities + .clone() .into_iter() .map(|id| (&account_id, id)) - .collect:: >(); + .collect::>(); - let mut third_authorities: Vec = vec![4, 5].into_iter() + let mut third_authorities: Vec = vec![4, 5] + .into_iter() .map(|i| AuthorityPair::from_seed_slice(vec![i; 32].as_ref()).unwrap().public()) .map(AuthorityId::from) .collect(); // Needed for `pallet_session::OneSessionHandler::on_new_session`. - let third_authorities_and_account_ids = third_authorities.clone() + let third_authorities_and_account_ids = third_authorities + .clone() .into_iter() .map(|id| (&account_id, id)) - .collect:: >(); + .collect::>(); // Build genesis. - let mut t = frame_system::GenesisConfig::default() - .build_storage::() - .unwrap(); - + let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); GenesisBuild::::assimilate_storage( - &pallet_authority_discovery::GenesisConfig{keys: vec![]}, - &mut t - ).unwrap(); + &pallet_authority_discovery::GenesisConfig { keys: vec![] }, + &mut t, + ) + .unwrap(); // Create externalities. let mut externalities = TestExternalities::new(t); @@ -303,7 +296,7 @@ mod tests { use frame_support::traits::OneSessionHandler; AuthorityDiscovery::on_genesis_session( - first_authorities.iter().map(|id| (id, id.clone())) + first_authorities.iter().map(|id| (id, id.clone())), ); first_authorities.sort(); let mut authorities_returned = AuthorityDiscovery::authorities(); @@ -318,8 +311,7 @@ mod tests { ); let authorities_returned = AuthorityDiscovery::authorities(); assert_eq!( - first_authorities, - authorities_returned, + first_authorities, authorities_returned, "Expected authority set not to change as `changed` was set to false.", ); @@ -329,7 +321,8 @@ mod tests { second_authorities_and_account_ids.into_iter(), third_authorities_and_account_ids.clone().into_iter(), ); - let mut second_and_third_authorities = second_authorities.iter() + let mut second_and_third_authorities = second_authorities + .iter() .chain(third_authorities.iter()) .cloned() .collect::>(); diff --git a/frame/authorship/src/lib.rs b/frame/authorship/src/lib.rs index de96a1cead7ad..358b5185bbacd 100644 --- a/frame/authorship/src/lib.rs +++ b/frame/authorship/src/lib.rs @@ -21,13 +21,14 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{result, prelude::*, collections::btree_set::BTreeSet}; +use codec::{Decode, Encode}; use frame_support::{ - dispatch, traits::{FindAuthor, VerifySeal, Get}, + dispatch, + traits::{FindAuthor, Get, VerifySeal}, }; -use codec::{Encode, Decode}; +use sp_authorship::{InherentError, UnclesInherentData, INHERENT_IDENTIFIER}; use sp_runtime::traits::{Header as HeaderT, One, Saturating}; -use sp_authorship::{INHERENT_IDENTIFIER, UnclesInherentData, InherentError}; +use sp_std::{collections::btree_set::BTreeSet, prelude::*, result}; const MAX_UNCLES: usize = 10; @@ -56,15 +57,15 @@ pub trait FilterUncle { /// Do additional filtering on a seal-checked uncle block, with the accumulated /// filter. - fn filter_uncle(header: &Header, acc: &mut Self::Accumulator) - -> Result, &'static str>; + fn filter_uncle( + header: &Header, + acc: &mut Self::Accumulator, + ) -> Result, &'static str>; } impl FilterUncle for () { type Accumulator = (); - fn filter_uncle(_: &H, _acc: &mut Self::Accumulator) - -> Result, &'static str> - { + fn filter_uncle(_: &H, _acc: &mut Self::Accumulator) -> Result, &'static str> { Ok(None) } } @@ -74,14 +75,10 @@ impl FilterUncle for () { /// equivocating is high. pub struct SealVerify(sp_std::marker::PhantomData); -impl> FilterUncle - for SealVerify -{ +impl> FilterUncle for SealVerify { type Accumulator = (); - fn filter_uncle(header: &Header, _acc: &mut ()) - -> Result, &'static str> - { + fn filter_uncle(header: &Header, _acc: &mut ()) -> Result, &'static str> { T::verify_seal(header) } } @@ -92,8 +89,7 @@ impl> FilterUncle /// This does O(n log n) work in the number of uncles included. pub struct OnePerAuthorPerHeight(sp_std::marker::PhantomData<(T, N)>); -impl FilterUncle - for OnePerAuthorPerHeight +impl FilterUncle for OnePerAuthorPerHeight where Header: HeaderT + PartialEq, Header::Number: Ord, @@ -102,15 +98,16 @@ where { type Accumulator = BTreeSet<(Header::Number, Author)>; - fn filter_uncle(header: &Header, acc: &mut Self::Accumulator) - -> Result, &'static str> - { + fn filter_uncle( + header: &Header, + acc: &mut Self::Accumulator, + ) -> Result, &'static str> { let author = T::verify_seal(header)?; let number = header.number(); if let Some(ref author) = author { if !acc.insert((number.clone(), author.clone())) { - return Err("more than one uncle per number per author included"); + return Err("more than one uncle per number per author included") } } @@ -126,9 +123,9 @@ enum UncleEntryItem { } #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -161,10 +158,8 @@ pub mod pallet { #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); - #[pallet::hooks] impl Hooks> for Pallet { - fn on_initialize(now: T::BlockNumber) -> Weight { let uncle_generations = T::UncleGenerations::get(); // prune uncles that are older than the allowed number of generations. @@ -189,11 +184,8 @@ pub mod pallet { #[pallet::storage] /// Uncles - pub(super) type Uncles = StorageValue< - _, - Vec>, - ValueQuery, - >; + pub(super) type Uncles = + StorageValue<_, Vec>, ValueQuery>; #[pallet::storage] /// Author of current block. @@ -203,7 +195,6 @@ pub mod pallet { /// Whether uncles were already set in this block. pub(super) type DidSetUncles = StorageValue<_, bool, ValueQuery>; - #[pallet::error] pub enum Error { /// The uncle parent not in the chain. @@ -251,14 +242,16 @@ pub mod pallet { if !uncles.is_empty() { let prev_uncles = >::get(); - let mut existing_hashes: Vec<_> = prev_uncles.into_iter().filter_map(|entry| - match entry { + let mut existing_hashes: Vec<_> = prev_uncles + .into_iter() + .filter_map(|entry| match entry { UncleEntryItem::InclusionHeight(_) => None, UncleEntryItem::Uncle(h, _) => Some(h), - } - ).collect(); + }) + .collect(); - let mut acc: >::Accumulator = Default::default(); + let mut acc: >::Accumulator = + Default::default(); for uncle in uncles { match Self::verify_uncle(&uncle, &existing_hashes, &mut acc) { @@ -270,10 +263,10 @@ pub mod pallet { if new_uncles.len() == MAX_UNCLES { break } - } + }, Err(_) => { // skip this uncle - } + }, } } } @@ -285,14 +278,14 @@ pub mod pallet { } } - fn check_inherent(call: &Self::Call, _data: &InherentData) -> result::Result<(), Self::Error> { + fn check_inherent( + call: &Self::Call, + _data: &InherentData, + ) -> result::Result<(), Self::Error> { match call { - Call::set_uncles { ref new_uncles } if new_uncles.len() > MAX_UNCLES => { - Err(InherentError::Uncles(Error::::TooManyUncles.as_str().into())) - } - _ => { - Ok(()) - }, + Call::set_uncles { ref new_uncles } if new_uncles.len() > MAX_UNCLES => + Err(InherentError::Uncles(Error::::TooManyUncles.as_str().into())), + _ => Ok(()), } } @@ -310,7 +303,7 @@ impl Pallet { pub fn author() -> T::AccountId { // Check the memoized storage value. if let Some(author) = >::get() { - return author; + return author } let digest = >::digest(); @@ -332,11 +325,10 @@ impl Pallet { let mut acc: >::Accumulator = Default::default(); for uncle in new_uncles { - let prev_uncles = uncles.iter().filter_map(|entry| - match entry { - UncleEntryItem::InclusionHeight(_) => None, - UncleEntryItem::Uncle(h, _) => Some(h), - }); + let prev_uncles = uncles.iter().filter_map(|entry| match entry { + UncleEntryItem::InclusionHeight(_) => None, + UncleEntryItem::Uncle(h, _) => Some(h), + }); let author = Self::verify_uncle(&uncle, prev_uncles, &mut acc)?; let hash = uncle.hash(); @@ -351,7 +343,7 @@ impl Pallet { Ok(()) } - fn verify_uncle<'a, I: IntoIterator>( + fn verify_uncle<'a, I: IntoIterator>( uncle: &T::Header, existing_uncles: I, accumulator: &mut >::Accumulator, @@ -368,23 +360,23 @@ impl Pallet { let hash = uncle.hash(); if uncle.number() < &One::one() { - return Err(Error::::GenesisUncle.into()); + return Err(Error::::GenesisUncle.into()) } if uncle.number() > &maximum_height { - return Err(Error::::TooHighUncle.into()); + return Err(Error::::TooHighUncle.into()) } { let parent_number = uncle.number().clone() - One::one(); let parent_hash = >::block_hash(&parent_number); if &parent_hash != uncle.parent_hash() { - return Err(Error::::InvalidUncleParent.into()); + return Err(Error::::InvalidUncleParent.into()) } } if uncle.number() < &minimum_height { - return Err(Error::::OldUncle.into()); + return Err(Error::::OldUncle.into()) } let duplicate = existing_uncles.into_iter().any(|h| *h == hash); @@ -412,13 +404,15 @@ impl Pallet { #[cfg(test)] mod tests { - use crate as pallet_authorship; use super::*; + use crate as pallet_authorship; + use frame_support::{parameter_types, ConsensusEngineId}; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, testing::Header, generic::DigestItem, + generic::DigestItem, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, }; - use frame_support::{parameter_types, ConsensusEngineId}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -483,11 +477,12 @@ mod tests { impl FindAuthor for AuthorGiven { fn find_author<'a, I>(digests: I) -> Option - where I: 'a + IntoIterator + where + I: 'a + IntoIterator, { for (id, data) in digests { if id == TEST_ID { - return u64::decode(&mut &data[..]).ok(); + return u64::decode(&mut &data[..]).ok() } } @@ -502,7 +497,8 @@ mod tests { let pre_runtime_digests = header.digest.logs.iter().filter_map(|d| d.as_pre_runtime()); let seals = header.digest.logs.iter().filter_map(|d| d.as_seal()); - let author = AuthorGiven::find_author(pre_runtime_digests).ok_or_else(|| "no author")?; + let author = + AuthorGiven::find_author(pre_runtime_digests).ok_or_else(|| "no author")?; for (id, seal) in seals { if id == TEST_ID { @@ -510,10 +506,10 @@ mod tests { Err(_) => return Err("wrong seal"), Ok(a) => { if a != author { - return Err("wrong author in seal"); + return Err("wrong author in seal") } break - } + }, } } } @@ -533,13 +529,7 @@ mod tests { } fn create_header(number: u64, parent_hash: H256, state_root: H256) -> Header { - Header::new( - number, - Default::default(), - state_root, - parent_hash, - Default::default(), - ) + Header::new(number, Default::default(), state_root, parent_hash, Default::default()) } fn new_test_ext() -> sp_io::TestExternalities { @@ -554,9 +544,14 @@ mod tests { let hash = Default::default(); let author = Default::default(); let uncles = vec![ - InclusionHeight(1u64), Uncle(hash, Some(author)), Uncle(hash, None), Uncle(hash, None), - InclusionHeight(2u64), Uncle(hash, None), - InclusionHeight(3u64), Uncle(hash, None), + InclusionHeight(1u64), + Uncle(hash, Some(author)), + Uncle(hash, None), + Uncle(hash, None), + InclusionHeight(2u64), + Uncle(hash, None), + InclusionHeight(3u64), + Uncle(hash, None), ]; ::Uncles::put(uncles); @@ -595,15 +590,15 @@ mod tests { } let mut canon_chain = CanonChain { - inner: vec![seal_header(create_header(0, Default::default(), Default::default()), 999)], + inner: vec![seal_header( + create_header(0, Default::default(), Default::default()), + 999, + )], }; - let initialize_block = |number, hash: H256| System::initialize( - &number, - &hash, - &Default::default(), - Default::default() - ); + let initialize_block = |number, hash: H256| { + System::initialize(&number, &hash, &Default::default(), Default::default()) + }; for number in 1..8 { initialize_block(number, canon_chain.best_hash()); @@ -691,18 +686,11 @@ mod tests { fn sets_author_lazily() { new_test_ext().execute_with(|| { let author = 42; - let mut header = seal_header( - create_header(1, Default::default(), [1; 32].into()), - author, - ); + let mut header = + seal_header(create_header(1, Default::default(), [1; 32].into()), author); header.digest_mut().pop(); // pop the seal off. - System::initialize( - &1, - &Default::default(), - header.digest(), - Default::default(), - ); + System::initialize(&1, &Default::default(), header.digest(), Default::default()); assert_eq!(Authorship::author(), author); }); @@ -716,27 +704,15 @@ mod tests { let author_b = 43; let mut acc: >::Accumulator = Default::default(); - let header_a1 = seal_header( - create_header(1, Default::default(), [1; 32].into()), - author_a, - ); - let header_b1 = seal_header( - create_header(1, Default::default(), [1; 32].into()), - author_b, - ); - - let header_a2_1 = seal_header( - create_header(2, Default::default(), [1; 32].into()), - author_a, - ); - let header_a2_2 = seal_header( - create_header(2, Default::default(), [2; 32].into()), - author_a, - ); - - let mut check_filter = move |uncle| { - Filter::filter_uncle(uncle, &mut acc) - }; + let header_a1 = seal_header(create_header(1, Default::default(), [1; 32].into()), author_a); + let header_b1 = seal_header(create_header(1, Default::default(), [1; 32].into()), author_b); + + let header_a2_1 = + seal_header(create_header(2, Default::default(), [1; 32].into()), author_a); + let header_a2_2 = + seal_header(create_header(2, Default::default(), [2; 32].into()), author_a); + + let mut check_filter = move |uncle| Filter::filter_uncle(uncle, &mut acc); // same height, different author is OK. assert_eq!(check_filter(&header_a1), Ok(Some(author_a))); diff --git a/frame/babe/src/benchmarking.rs b/frame/babe/src/benchmarking.rs index 145a82c4f8049..b8a85daf6e666 100644 --- a/frame/babe/src/benchmarking.rs +++ b/frame/babe/src/benchmarking.rs @@ -95,10 +95,7 @@ mod tests { ); println!("equivocation_proof: {:?}", equivocation_proof); - println!( - "equivocation_proof.encode(): {:?}", - equivocation_proof.encode() - ); + println!("equivocation_proof.encode(): {:?}", equivocation_proof.encode()); }); } } diff --git a/frame/babe/src/default_weights.rs b/frame/babe/src/default_weights.rs index f16f589a77cd4..20ac9b961fc8d 100644 --- a/frame/babe/src/default_weights.rs +++ b/frame/babe/src/default_weights.rs @@ -19,7 +19,8 @@ //! This file was not auto-generated. use frame_support::weights::{ - Weight, constants::{WEIGHT_PER_MICROS, WEIGHT_PER_NANOS, RocksDbWeight as DbWeight}, + constants::{RocksDbWeight as DbWeight, WEIGHT_PER_MICROS, WEIGHT_PER_NANOS}, + Weight, }; impl crate::WeightInfo for () { diff --git a/frame/babe/src/equivocation.rs b/frame/babe/src/equivocation.rs index e4a1e81becad3..efc5fdab9f2c0 100644 --- a/frame/babe/src/equivocation.rs +++ b/frame/babe/src/equivocation.rs @@ -37,18 +37,20 @@ use frame_support::traits::{Get, KeyOwnerProofSystem}; use sp_consensus_babe::{EquivocationProof, Slot}; -use sp_runtime::transaction_validity::{ - InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity, - TransactionValidityError, ValidTransaction, +use sp_runtime::{ + transaction_validity::{ + InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity, + TransactionValidityError, ValidTransaction, + }, + DispatchResult, Perbill, }; -use sp_runtime::{DispatchResult, Perbill}; use sp_staking::{ offence::{Kind, Offence, OffenceError, ReportOffence}, SessionIndex, }; use sp_std::prelude::*; -use crate::{Call, Pallet, Config}; +use crate::{Call, Config, Pallet}; /// A trait with utility methods for handling equivocation reports in BABE. /// The trait provides methods for reporting an offence triggered by a valid @@ -115,9 +117,7 @@ pub struct EquivocationHandler { impl Default for EquivocationHandler { fn default() -> Self { - Self { - _phantom: Default::default(), - } + Self { _phantom: Default::default() } } } @@ -188,30 +188,28 @@ impl Pallet { if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { - TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } + TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, _ => { log::warn!( target: "runtime::babe", "rejecting unsigned report equivocation transaction because it is not local/in-block.", ); - return InvalidTransaction::Call.into(); - } + return InvalidTransaction::Call.into() + }, } // check report staleness is_known_offence::(equivocation_proof, key_owner_proof)?; - let longevity = >::ReportLongevity::get(); + let longevity = + >::ReportLongevity::get(); ValidTransaction::with_tag_prefix("BabeEquivocation") // We assign the maximum priority for any equivocation report. .priority(TransactionPriority::max_value()) // Only one equivocation report for the same offender at the same slot. - .and_provides(( - equivocation_proof.offender.clone(), - *equivocation_proof.slot, - )) + .and_provides((equivocation_proof.offender.clone(), *equivocation_proof.slot)) .longevity(longevity) // We don't propagate this. This can never be included on a remote node. .propagate(false) @@ -235,10 +233,7 @@ fn is_known_offence( key_owner_proof: &T::KeyOwnerProof, ) -> Result<(), TransactionValidityError> { // check the membership proof to extract the offender's id - let key = ( - sp_consensus_babe::KEY_TYPE, - equivocation_proof.offender.clone(), - ); + let key = (sp_consensus_babe::KEY_TYPE, equivocation_proof.offender.clone()); let offender = T::KeyOwnerProofSystem::check_proof(key, key_owner_proof.clone()) .ok_or(InvalidTransaction::BadProof)?; diff --git a/frame/babe/src/lib.rs b/frame/babe/src/lib.rs index b52868d1d023f..624531d47b335 100644 --- a/frame/babe/src/lib.rs +++ b/frame/babe/src/lib.rs @@ -24,7 +24,7 @@ use codec::{Decode, Encode}; use frame_support::{ dispatch::DispatchResultWithPostInfo, - traits::{FindAuthor, Get, KeyOwnerProofSystem, OneSessionHandler, OnTimestampSet}, + traits::{FindAuthor, Get, KeyOwnerProofSystem, OnTimestampSet, OneSessionHandler}, weights::{Pays, Weight}, }; use sp_application_crypto::Public; @@ -38,8 +38,8 @@ use sp_std::prelude::*; use sp_consensus_babe::{ digests::{NextConfigDescriptor, NextEpochDescriptor, PreDigest}, - BabeAuthorityWeight, BabeEpochConfiguration, ConsensusLog, Epoch, - EquivocationProof, Slot, BABE_ENGINE_ID, + BabeAuthorityWeight, BabeEpochConfiguration, ConsensusLog, Epoch, EquivocationProof, Slot, + BABE_ENGINE_ID, }; use sp_consensus_vrf::schnorrkel; @@ -80,7 +80,7 @@ pub trait EpochChangeTrigger { pub struct ExternalTrigger; impl EpochChangeTrigger for ExternalTrigger { - fn trigger(_: T::BlockNumber) { } // nothing - trigger is external. + fn trigger(_: T::BlockNumber) {} // nothing - trigger is external. } /// A type signifying to BABE that it should perform epoch changes @@ -104,9 +104,9 @@ type MaybeRandomness = Option; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// The BABE Pallet #[pallet::pallet] @@ -222,11 +222,8 @@ pub mod pallet { /// Next epoch authorities. #[pallet::storage] - pub(super) type NextAuthorities = StorageValue< - _, - Vec<(AuthorityId, BabeAuthorityWeight)>, - ValueQuery, - >; + pub(super) type NextAuthorities = + StorageValue<_, Vec<(AuthorityId, BabeAuthorityWeight)>, ValueQuery>; /// Randomness under construction. /// @@ -242,13 +239,8 @@ pub mod pallet { /// TWOX-NOTE: `SegmentIndex` is an increasing integer, so this is okay. #[pallet::storage] - pub(super) type UnderConstruction = StorageMap< - _, - Twox64Concat, - u32, - Vec, - ValueQuery, - >; + pub(super) type UnderConstruction = + StorageMap<_, Twox64Concat, u32, Vec, ValueQuery>; /// Temporary value (cleared at block finalization) which is `Some` /// if per-block initialization has already been called for current block. @@ -270,11 +262,8 @@ pub mod pallet { /// entropy was fixed (i.e. it was known to chain observers). Since epochs are defined in /// slots, which may be skipped, the block numbers may not line up with the slot numbers. #[pallet::storage] - pub(super) type EpochStart = StorageValue< - _, - (T::BlockNumber, T::BlockNumber), - ValueQuery, - >; + pub(super) type EpochStart = + StorageValue<_, (T::BlockNumber, T::BlockNumber), ValueQuery>; /// How late the current block is compared to its parent. /// @@ -303,10 +292,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - GenesisConfig { - authorities: Default::default(), - epoch_config: Default::default(), - } + GenesisConfig { authorities: Default::default(), epoch_config: Default::default() } } } @@ -315,7 +301,9 @@ pub mod pallet { fn build(&self) { SegmentIndex::::put(0); Pallet::::initialize_authorities(&self.authorities); - EpochConfig::::put(self.epoch_config.clone().expect("epoch_config must not be None")); + EpochConfig::::put( + self.epoch_config.clone().expect("epoch_config must not be None"), + ); } } @@ -359,11 +347,7 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { let reporter = ensure_signed(origin)?; - Self::do_report_equivocation( - Some(reporter), - equivocation_proof, - key_owner_proof, - ) + Self::do_report_equivocation(Some(reporter), equivocation_proof, key_owner_proof) } /// Report authority equivocation/misbehavior. This method will verify @@ -423,8 +407,9 @@ pub mod pallet { pub type BabeKey = [u8; PUBLIC_KEY_LENGTH]; impl FindAuthor for Pallet { - fn find_author<'a, I>(digests: I) -> Option where - I: 'a + IntoIterator + fn find_author<'a, I>(digests: I) -> Option + where + I: 'a + IntoIterator, { for (id, mut data) in digests.into_iter() { if id == BABE_ENGINE_ID { @@ -433,15 +418,13 @@ impl FindAuthor for Pallet { } } - return None; + return None } } impl IsMember for Pallet { fn is_member(authority_id: &AuthorityId) -> bool { - >::authorities() - .iter() - .any(|id| &id.0 == authority_id) + >::authorities().iter().any(|id| &id.0 == authority_id) } } @@ -500,13 +483,11 @@ impl Pallet { // update this function, you must also update the corresponding weight. pub fn next_expected_epoch_change(now: T::BlockNumber) -> Option { let next_slot = Self::current_epoch_start().saturating_add(T::EpochDuration::get()); - next_slot - .checked_sub(*CurrentSlot::::get()) - .map(|slots_remaining| { - // This is a best effort guess. Drifts in the slot/block ratio will cause errors here. - let blocks_remaining: T::BlockNumber = slots_remaining.saturated_into(); - now.saturating_add(blocks_remaining) - }) + next_slot.checked_sub(*CurrentSlot::::get()).map(|slots_remaining| { + // This is a best effort guess. Drifts in the slot/block ratio will cause errors here. + let blocks_remaining: T::BlockNumber = slots_remaining.saturated_into(); + now.saturating_add(blocks_remaining) + }) } /// DANGEROUS: Enact an epoch change. Should be done on every block where `should_epoch_change` has returned `true`, @@ -553,10 +534,8 @@ impl Pallet { // so that nodes can track changes. let next_randomness = NextRandomness::::get(); - let next_epoch = NextEpochDescriptor { - authorities: next_authorities, - randomness: next_randomness, - }; + let next_epoch = + NextEpochDescriptor { authorities: next_authorities, randomness: next_randomness }; Self::deposit_consensus(ConsensusLog::NextEpochData(next_epoch)); if let Some(next_config) = NextEpochConfig::::get() { @@ -587,7 +566,8 @@ impl Pallet { duration: T::EpochDuration::get(), authorities: Self::authorities(), randomness: Self::randomness(), - config: EpochConfig::::get().expect("EpochConfig is initialized in genesis; we never `take` or `kill` it; qed"), + config: EpochConfig::::get() + .expect("EpochConfig is initialized in genesis; we never `take` or `kill` it; qed"), } } @@ -606,7 +586,9 @@ impl Pallet { authorities: NextAuthorities::::get(), randomness: NextRandomness::::get(), config: NextEpochConfig::::get().unwrap_or_else(|| { - EpochConfig::::get().expect("EpochConfig is initialized in genesis; we never `take` or `kill` it; qed") + EpochConfig::::get().expect( + "EpochConfig is initialized in genesis; we never `take` or `kill` it; qed", + ) }), } } @@ -617,9 +599,7 @@ impl Pallet { const PROOF: &str = "slot number is u64; it should relate in some way to wall clock time; \ if u64 is not enough we should crash for safety; qed."; - let epoch_start = epoch_index - .checked_mul(T::EpochDuration::get()) - .expect(PROOF); + let epoch_start = epoch_index.checked_mul(T::EpochDuration::get()).expect(PROOF); epoch_start.checked_add(*GenesisSlot::::get()).expect(PROOF).into() } @@ -649,19 +629,22 @@ impl Pallet { // => let's ensure that we only modify the storage once per block let initialized = Self::initialized().is_some(); if initialized { - return; + return } - let maybe_pre_digest: Option = >::digest() - .logs - .iter() - .filter_map(|s| s.as_pre_runtime()) - .filter_map(|(id, mut data)| if id == BABE_ENGINE_ID { - PreDigest::decode(&mut data).ok() - } else { - None - }) - .next(); + let maybe_pre_digest: Option = + >::digest() + .logs + .iter() + .filter_map(|s| s.as_pre_runtime()) + .filter_map(|(id, mut data)| { + if id == BABE_ENGINE_ID { + PreDigest::decode(&mut data).ok() + } else { + None + } + }) + .next(); let is_primary = matches!(maybe_pre_digest, Some(PreDigest::Primary(..))); @@ -697,31 +680,22 @@ impl Pallet { let authority_index = digest.authority_index(); // Extract out the VRF output if we have it - digest - .vrf_output() - .and_then(|vrf_output| { - // Reconstruct the bytes of VRFInOut using the authority id. - Authorities::::get() - .get(authority_index as usize) - .and_then(|author| { - schnorrkel::PublicKey::from_bytes(author.0.as_slice()).ok() - }) - .and_then(|pubkey| { - let transcript = sp_consensus_babe::make_transcript( - &Self::randomness(), - current_slot, - EpochIndex::::get(), - ); - - vrf_output.0.attach_input_hash( - &pubkey, - transcript - ).ok() - }) - .map(|inout| { - inout.make_bytes(&sp_consensus_babe::BABE_VRF_INOUT_CONTEXT) - }) - }) + digest.vrf_output().and_then(|vrf_output| { + // Reconstruct the bytes of VRFInOut using the authority id. + Authorities::::get() + .get(authority_index as usize) + .and_then(|author| schnorrkel::PublicKey::from_bytes(author.0.as_slice()).ok()) + .and_then(|pubkey| { + let transcript = sp_consensus_babe::make_transcript( + &Self::randomness(), + current_slot, + EpochIndex::::get(), + ); + + vrf_output.0.attach_input_hash(&pubkey, transcript).ok() + }) + .map(|inout| inout.make_bytes(&sp_consensus_babe::BABE_VRF_INOUT_CONTEXT)) + }) }); // For primary VRF output we place it in the `Initialized` storage @@ -774,7 +748,7 @@ impl Pallet { // validate the equivocation proof if !sp_consensus_babe::check_equivocation_proof(equivocation_proof) { - return Err(Error::::InvalidEquivocationProof.into()); + return Err(Error::::InvalidEquivocationProof.into()) } let validator_set_count = key_owner_proof.validator_count(); @@ -786,7 +760,7 @@ impl Pallet { // check that the slot number is consistent with the session index // in the key ownership proof (i.e. slot is for that epoch) if epoch_index != session_index { - return Err(Error::::InvalidKeyOwnershipProof.into()); + return Err(Error::::InvalidKeyOwnershipProof.into()) } // check the membership proof and extract the offender's id @@ -794,12 +768,8 @@ impl Pallet { let offender = T::KeyOwnerProofSystem::check_proof(key, key_owner_proof) .ok_or(Error::::InvalidKeyOwnershipProof)?; - let offence = BabeEquivocationOffence { - slot, - validator_set_count, - offender, - session_index, - }; + let offence = + BabeEquivocationOffence { slot, validator_set_count, offender, session_index }; let reporters = match reporter { Some(id) => vec![id], @@ -837,7 +807,10 @@ impl OnTimestampSet for Pallet { let timestamp_slot = moment / slot_duration; let timestamp_slot = Slot::from(timestamp_slot.saturated_into::()); - assert!(CurrentSlot::::get() == timestamp_slot, "Timestamp slot must match `CurrentSlot`"); + assert!( + CurrentSlot::::get() == timestamp_slot, + "Timestamp slot must match `CurrentSlot`" + ); } } @@ -850,10 +823,7 @@ impl frame_support::traits::EstimateNextSessionRotation::get().saturating_sub(Self::current_epoch_start()) + 1; ( - Some(Permill::from_rational( - *elapsed, - T::EpochDuration::get(), - )), + Some(Permill::from_rational(*elapsed, T::EpochDuration::get())), // Read: Current Slot, Epoch Index, Genesis Slot T::DbWeight::get().reads(3), ) @@ -882,22 +852,20 @@ impl OneSessionHandler for Pallet { type Key = AuthorityId; fn on_genesis_session<'a, I: 'a>(validators: I) - where I: Iterator + where + I: Iterator, { let authorities = validators.map(|(_, k)| (k, 1)).collect::>(); Self::initialize_authorities(&authorities); } fn on_new_session<'a, I: 'a>(_changed: bool, validators: I, queued_validators: I) - where I: Iterator + where + I: Iterator, { - let authorities = validators.map(|(_account, k)| { - (k, 1) - }).collect::>(); + let authorities = validators.map(|(_account, k)| (k, 1)).collect::>(); - let next_authorities = queued_validators.map(|(_account, k)| { - (k, 1) - }).collect::>(); + let next_authorities = queued_validators.map(|(_account, k)| (k, 1)).collect::>(); Self::enact_epoch_change(authorities, next_authorities) } @@ -914,7 +882,7 @@ impl OneSessionHandler for Pallet { fn compute_randomness( last_epoch_randomness: schnorrkel::Randomness, epoch_index: u64, - rho: impl Iterator, + rho: impl Iterator, rho_size_hint: Option, ) -> schnorrkel::Randomness { let mut s = Vec::with_capacity(40 + rho_size_hint.unwrap_or(0) * VRF_OUTPUT_LENGTH); @@ -930,7 +898,7 @@ fn compute_randomness( pub mod migrations { use super::*; - use frame_support::pallet_prelude::{ValueQuery, StorageValue}; + use frame_support::pallet_prelude::{StorageValue, ValueQuery}; /// Something that can return the storage prefix of the `Babe` pallet. pub trait BabePalletPrefix: Config { @@ -939,13 +907,14 @@ pub mod migrations { struct __OldNextEpochConfig(sp_std::marker::PhantomData); impl frame_support::traits::StorageInstance for __OldNextEpochConfig { - fn pallet_prefix() -> &'static str { T::pallet_prefix() } + fn pallet_prefix() -> &'static str { + T::pallet_prefix() + } const STORAGE_PREFIX: &'static str = "NextEpochConfig"; } - type OldNextEpochConfig = StorageValue< - __OldNextEpochConfig, Option, ValueQuery - >; + type OldNextEpochConfig = + StorageValue<__OldNextEpochConfig, Option, ValueQuery>; /// A storage migration that adds the current epoch configuration for Babe /// to storage. diff --git a/frame/babe/src/mock.rs b/frame/babe/src/mock.rs index ea54e9f7cea83..795d51e5876f8 100644 --- a/frame/babe/src/mock.rs +++ b/frame/babe/src/mock.rs @@ -17,27 +17,31 @@ //! Test utilities -use codec::Encode; use crate::{self as pallet_babe, Config, CurrentSlot}; -use sp_runtime::{ - Perbill, impl_opaque_keys, - curve::PiecewiseLinear, - testing::{Digest, DigestItem, Header, TestXt,}, - traits::{Header as _, IdentityLookup, OpaqueKeys}, -}; -use frame_system::InitKind; +use codec::Encode; +use frame_election_provider_support::onchain; use frame_support::{ parameter_types, - traits::{KeyOwnerProofSystem, OnInitialize, GenesisBuild}, + traits::{GenesisBuild, KeyOwnerProofSystem, OnInitialize}, }; -use sp_io; -use sp_core::{H256, U256, crypto::{IsWrappedBy, KeyTypeId, Pair}}; +use frame_system::InitKind; +use pallet_session::historical as pallet_session_historical; +use pallet_staking::EraIndex; use sp_consensus_babe::{AuthorityId, AuthorityPair, Slot}; use sp_consensus_vrf::schnorrkel::{VRFOutput, VRFProof}; +use sp_core::{ + crypto::{IsWrappedBy, KeyTypeId, Pair}, + H256, U256, +}; +use sp_io; +use sp_runtime::{ + curve::PiecewiseLinear, + impl_opaque_keys, + testing::{Digest, DigestItem, Header, TestXt}, + traits::{Header as _, IdentityLookup, OpaqueKeys}, + Perbill, +}; use sp_staking::SessionIndex; -use pallet_staking::EraIndex; -use frame_election_provider_support::onchain; -use pallet_session::historical as pallet_session_historical; type DummyValidatorId = u64; @@ -277,7 +281,7 @@ pub fn go_to_block(n: u64, s: u64) { /// Slots will grow accordingly to blocks pub fn progress_to_block(n: u64) { let mut slot = u64::from(Babe::current_slot()) + 1; - for i in System::block_number() + 1 ..= n { + for i in System::block_number() + 1..=n { go_to_block(i, slot); slot += 1; } @@ -308,7 +312,7 @@ pub fn make_primary_pre_digest( slot, vrf_output, vrf_proof, - } + }, ); let log = DigestItem::PreRuntime(sp_consensus_babe::BABE_ENGINE_ID, digest_data.encode()); Digest { logs: vec![log] } @@ -319,10 +323,7 @@ pub fn make_secondary_plain_pre_digest( slot: sp_consensus_babe::Slot, ) -> Digest { let digest_data = sp_consensus_babe::digests::PreDigest::SecondaryPlain( - sp_consensus_babe::digests::SecondaryPlainPreDigest { - authority_index, - slot, - } + sp_consensus_babe::digests::SecondaryPlainPreDigest { authority_index, slot }, ); let log = DigestItem::PreRuntime(sp_consensus_babe::BABE_ENGINE_ID, digest_data.encode()); Digest { logs: vec![log] } @@ -340,7 +341,7 @@ pub fn make_secondary_vrf_pre_digest( slot, vrf_output, vrf_proof, - } + }, ); let log = DigestItem::PreRuntime(sp_consensus_babe::BABE_ENGINE_ID, digest_data.encode()); Digest { logs: vec![log] } @@ -348,13 +349,13 @@ pub fn make_secondary_vrf_pre_digest( pub fn make_vrf_output( slot: Slot, - pair: &sp_consensus_babe::AuthorityPair + pair: &sp_consensus_babe::AuthorityPair, ) -> (VRFOutput, VRFProof, [u8; 32]) { let pair = sp_core::sr25519::Pair::from_ref(pair).as_ref(); let transcript = sp_consensus_babe::make_transcript(&Babe::randomness(), slot, 0); let vrf_inout = pair.vrf_sign(transcript); - let vrf_randomness: sp_consensus_vrf::schnorrkel::Randomness = vrf_inout.0 - .make_bytes::<[u8; 32]>(&sp_consensus_babe::BABE_VRF_INOUT_CONTEXT); + let vrf_randomness: sp_consensus_vrf::schnorrkel::Randomness = + vrf_inout.0.make_bytes::<[u8; 32]>(&sp_consensus_babe::BABE_VRF_INOUT_CONTEXT); let vrf_output = VRFOutput(vrf_inout.0.to_output()); let vrf_proof = VRFProof(vrf_inout.1); @@ -365,10 +366,12 @@ pub fn new_test_ext(authorities_len: usize) -> sp_io::TestExternalities { new_test_ext_with_pairs(authorities_len).1 } -pub fn new_test_ext_with_pairs(authorities_len: usize) -> (Vec, sp_io::TestExternalities) { - let pairs = (0..authorities_len).map(|i| { - AuthorityPair::from_seed(&U256::from(i).into()) - }).collect::>(); +pub fn new_test_ext_with_pairs( + authorities_len: usize, +) -> (Vec, sp_io::TestExternalities) { + let pairs = (0..authorities_len) + .map(|i| AuthorityPair::from_seed(&U256::from(i).into())) + .collect::>(); let public = pairs.iter().map(|p| p.public()).collect(); @@ -376,13 +379,9 @@ pub fn new_test_ext_with_pairs(authorities_len: usize) -> (Vec, s } pub fn new_test_ext_raw_authorities(authorities: Vec) -> sp_io::TestExternalities { - let mut t = frame_system::GenesisConfig::default() - .build_storage::() - .unwrap(); + let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - let balances: Vec<_> = (0..authorities.len()) - .map(|i| (i as u64, 10_000_000)) - .collect(); + let balances: Vec<_> = (0..authorities.len()).map(|i| (i as u64, 10_000_000)).collect(); pallet_balances::GenesisConfig:: { balances } .assimilate_storage(&mut t) @@ -393,13 +392,7 @@ pub fn new_test_ext_raw_authorities(authorities: Vec) -> sp_io::Tes .iter() .enumerate() .map(|(i, k)| { - ( - i as u64, - i as u64, - MockSessionKeys { - babe_authority: AuthorityId::from(k.clone()), - }, - ) + (i as u64, i as u64, MockSessionKeys { babe_authority: AuthorityId::from(k.clone()) }) }) .collect(); @@ -412,12 +405,7 @@ pub fn new_test_ext_raw_authorities(authorities: Vec) -> sp_io::Tes // controllers are the index + 1000 let stakers: Vec<_> = (0..authorities.len()) .map(|i| { - ( - i as u64, - i as u64 + 1000, - 10_000, - pallet_staking::StakerStatus::::Validator, - ) + (i as u64, i as u64 + 1000, 10_000, pallet_staking::StakerStatus::::Validator) }) .collect(); diff --git a/frame/babe/src/randomness.rs b/frame/babe/src/randomness.rs index a7e8b31577681..7d18629050213 100644 --- a/frame/babe/src/randomness.rs +++ b/frame/babe/src/randomness.rs @@ -21,7 +21,7 @@ use super::{ AuthorVrfRandomness, Config, EpochStart, NextRandomness, Randomness, VRF_OUTPUT_LENGTH, }; -use frame_support::{traits::Randomness as RandomnessT}; +use frame_support::traits::Randomness as RandomnessT; use sp_runtime::traits::Hash; /// Randomness usable by consensus protocols that **depend** upon finality and take action diff --git a/frame/babe/src/tests.rs b/frame/babe/src/tests.rs index 520a808ab4a57..00ffc7b4edacf 100644 --- a/frame/babe/src/tests.rs +++ b/frame/babe/src/tests.rs @@ -19,7 +19,7 @@ use super::{Call, *}; use frame_support::{ - assert_err, assert_ok, assert_noop, + assert_err, assert_noop, assert_ok, traits::{Currency, EstimateNextSessionRotation, OnFinalize}, weights::{GetDispatchInfo, Pays}, }; @@ -29,10 +29,8 @@ use sp_consensus_babe::{AllowedSlots, BabeEpochConfiguration, Slot}; use sp_core::crypto::Pair; const EMPTY_RANDOMNESS: [u8; 32] = [ - 74, 25, 49, 128, 53, 97, 244, 49, - 222, 202, 176, 2, 231, 66, 95, 10, - 133, 49, 213, 228, 86, 161, 164, 127, - 217, 153, 138, 37, 48, 192, 248, 0, + 74, 25, 49, 128, 53, 97, 244, 49, 222, 202, 176, 2, 231, 66, 95, 10, 133, 49, 213, 228, 86, + 161, 164, 127, 217, 153, 138, 37, 48, 192, 248, 0, ]; #[test] @@ -43,17 +41,17 @@ fn empty_randomness_is_correct() { #[test] fn initial_values() { - new_test_ext(4).execute_with(|| { - assert_eq!(Babe::authorities().len(), 4) - }) + new_test_ext(4).execute_with(|| assert_eq!(Babe::authorities().len(), 4)) } #[test] fn check_module() { new_test_ext(4).execute_with(|| { assert!(!Babe::should_end_session(0), "Genesis does not change sessions"); - assert!(!Babe::should_end_session(200000), - "BABE does not include the block number in epoch calculations"); + assert!( + !Babe::should_end_session(200000), + "BABE does not include the block number in epoch calculations" + ); }) } @@ -66,20 +64,10 @@ fn first_block_epoch_zero_start() { let (vrf_output, vrf_proof, vrf_randomness) = make_vrf_output(genesis_slot, &pairs[0]); let first_vrf = vrf_output; - let pre_digest = make_primary_pre_digest( - 0, - genesis_slot, - first_vrf.clone(), - vrf_proof, - ); + let pre_digest = make_primary_pre_digest(0, genesis_slot, first_vrf.clone(), vrf_proof); assert_eq!(Babe::genesis_slot(), Slot::from(0)); - System::initialize( - &1, - &Default::default(), - &pre_digest, - Default::default(), - ); + System::initialize(&1, &Default::default(), &pre_digest, Default::default()); // see implementation of the function for details why: we issue an // epoch-change digest but don't do it via the normal session mechanism. @@ -106,7 +94,7 @@ fn first_block_epoch_zero_start() { sp_consensus_babe::digests::NextEpochDescriptor { authorities: Babe::authorities(), randomness: Babe::randomness(), - } + }, ); let consensus_digest = DigestItem::Consensus(BABE_ENGINE_ID, consensus_log.encode()); @@ -124,12 +112,7 @@ fn author_vrf_output_for_primary() { let (vrf_output, vrf_proof, vrf_randomness) = make_vrf_output(genesis_slot, &pairs[0]); let primary_pre_digest = make_primary_pre_digest(0, genesis_slot, vrf_output, vrf_proof); - System::initialize( - &1, - &Default::default(), - &primary_pre_digest, - Default::default(), - ); + System::initialize(&1, &Default::default(), &primary_pre_digest, Default::default()); Babe::do_initialize(1); assert_eq!(Babe::author_vrf_randomness(), Some(vrf_randomness)); @@ -147,14 +130,10 @@ fn author_vrf_output_for_secondary_vrf() { ext.execute_with(|| { let genesis_slot = Slot::from(10); let (vrf_output, vrf_proof, vrf_randomness) = make_vrf_output(genesis_slot, &pairs[0]); - let secondary_vrf_pre_digest = make_secondary_vrf_pre_digest(0, genesis_slot, vrf_output, vrf_proof); + let secondary_vrf_pre_digest = + make_secondary_vrf_pre_digest(0, genesis_slot, vrf_output, vrf_proof); - System::initialize( - &1, - &Default::default(), - &secondary_vrf_pre_digest, - Default::default(), - ); + System::initialize(&1, &Default::default(), &secondary_vrf_pre_digest, Default::default()); Babe::do_initialize(1); assert_eq!(Babe::author_vrf_randomness(), Some(vrf_randomness)); @@ -192,8 +171,10 @@ fn no_author_vrf_output_for_secondary_plain() { fn authority_index() { new_test_ext(4).execute_with(|| { assert_eq!( - Babe::find_author((&[(BABE_ENGINE_ID, &[][..])]).into_iter().cloned()), None, - "Trivially invalid authorities are ignored") + Babe::find_author((&[(BABE_ENGINE_ID, &[][..])]).into_iter().cloned()), + None, + "Trivially invalid authorities are ignored" + ) }) } @@ -237,7 +218,10 @@ fn can_estimate_current_epoch_progress() { Permill::from_percent(100) ); } else { - assert!(Babe::estimate_current_session_progress(i).0.unwrap() < Permill::from_percent(100)); + assert!( + Babe::estimate_current_session_progress(i).0.unwrap() < + Permill::from_percent(100) + ); } } @@ -287,7 +271,8 @@ fn can_enact_next_config() { c: next_next_config.c, allowed_slots: next_next_config.allowed_slots, }, - ).unwrap(); + ) + .unwrap(); progress_to_block(4); Babe::on_finalize(9); @@ -296,12 +281,11 @@ fn can_enact_next_config() { assert_eq!(EpochConfig::::get(), Some(next_config)); assert_eq!(NextEpochConfig::::get(), Some(next_next_config.clone())); - let consensus_log = sp_consensus_babe::ConsensusLog::NextConfigData( - NextConfigDescriptor::V1 { + let consensus_log = + sp_consensus_babe::ConsensusLog::NextConfigData(NextConfigDescriptor::V1 { c: next_next_config.c, allowed_slots: next_next_config.allowed_slots, - } - ); + }); let consensus_digest = DigestItem::Consensus(BABE_ENGINE_ID, consensus_log.encode()); assert_eq!(header.digest.logs[2], consensus_digest.clone()) @@ -313,29 +297,18 @@ fn only_root_can_enact_config_change() { use sp_runtime::DispatchError; new_test_ext(1).execute_with(|| { - let next_config = NextConfigDescriptor::V1 { - c: (1, 4), - allowed_slots: AllowedSlots::PrimarySlots, - }; + let next_config = + NextConfigDescriptor::V1 { c: (1, 4), allowed_slots: AllowedSlots::PrimarySlots }; - let res = Babe::plan_config_change( - Origin::none(), - next_config.clone(), - ); + let res = Babe::plan_config_change(Origin::none(), next_config.clone()); assert_noop!(res, DispatchError::BadOrigin); - let res = Babe::plan_config_change( - Origin::signed(1), - next_config.clone(), - ); + let res = Babe::plan_config_change(Origin::signed(1), next_config.clone()); assert_noop!(res, DispatchError::BadOrigin); - let res = Babe::plan_config_change( - Origin::root(), - next_config, - ); + let res = Babe::plan_config_change(Origin::root(), next_config); assert!(res.is_ok()); }); @@ -350,10 +323,7 @@ fn can_fetch_current_and_next_epoch_data() { }); // genesis authorities should be used for the first and second epoch - assert_eq!( - Babe::current_epoch().authorities, - Babe::next_epoch().authorities, - ); + assert_eq!(Babe::current_epoch().authorities, Babe::next_epoch().authorities,); // 1 era = 3 epochs // 1 epoch = 3 slots // Eras start from 0. @@ -420,11 +390,7 @@ fn report_equivocation_current_session_works() { assert_eq!( Staking::eras_stakers(1, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } @@ -445,10 +411,7 @@ fn report_equivocation_current_session_works() { ); // create the key ownership proof - let key = ( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - ); + let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); // report the equivocation @@ -460,35 +423,24 @@ fn report_equivocation_current_session_works() { start_era(2); // check that the balance of offending validator is slashed 100%. - assert_eq!( - Balances::total_balance(&offending_validator_id), - 10_000_000 - 10_000 - ); + assert_eq!(Balances::total_balance(&offending_validator_id), 10_000_000 - 10_000); assert_eq!(Staking::slashable_balance_of(&offending_validator_id), 0); assert_eq!( Staking::eras_stakers(2, offending_validator_id), - pallet_staking::Exposure { - total: 0, - own: 0, - others: vec![], - }, + pallet_staking::Exposure { total: 0, own: 0, others: vec![] }, ); // check that the balances of all other validators are left intact. for validator in &validators { if *validator == offending_validator_id { - continue; + continue } assert_eq!(Balances::total_balance(validator), 10_000_000); assert_eq!(Staking::slashable_balance_of(validator), 10_000); assert_eq!( Staking::eras_stakers(2, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } }) @@ -519,10 +471,7 @@ fn report_equivocation_old_session_works() { ); // create the key ownership proof - let key = ( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - ); + let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); // start a new era and report the equivocation @@ -531,10 +480,7 @@ fn report_equivocation_old_session_works() { // check the balance of the offending validator assert_eq!(Balances::total_balance(&offending_validator_id), 10_000_000); - assert_eq!( - Staking::slashable_balance_of(&offending_validator_id), - 10_000 - ); + assert_eq!(Staking::slashable_balance_of(&offending_validator_id), 10_000); // report the equivocation Babe::report_equivocation_unsigned(Origin::none(), equivocation_proof, key_owner_proof) @@ -545,18 +491,11 @@ fn report_equivocation_old_session_works() { start_era(3); // check that the balance of offending validator is slashed 100%. - assert_eq!( - Balances::total_balance(&offending_validator_id), - 10_000_000 - 10_000 - ); + assert_eq!(Balances::total_balance(&offending_validator_id), 10_000_000 - 10_000); assert_eq!(Staking::slashable_balance_of(&offending_validator_id), 0); assert_eq!( Staking::eras_stakers(3, offending_validator_id), - pallet_staking::Exposure { - total: 0, - own: 0, - others: vec![], - }, + pallet_staking::Exposure { total: 0, own: 0, others: vec![] }, ); }) } @@ -585,10 +524,7 @@ fn report_equivocation_invalid_key_owner_proof() { ); // create the key ownership proof - let key = ( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - ); + let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let mut key_owner_proof = Historical::prove(key).unwrap(); // we change the session index in the key ownership proof @@ -640,10 +576,7 @@ fn report_equivocation_invalid_equivocation_proof() { .unwrap(); // create the key ownership proof - let key = ( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - ); + let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); let assert_invalid_equivocation = |equivocation_proof| { @@ -753,10 +686,7 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { CurrentSlot::::get(), ); - let key = ( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - ); + let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); let inner = @@ -815,23 +745,19 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { fn report_equivocation_has_valid_weight() { // the weight depends on the size of the validator set, // but there's a lower bound of 100 validators. - assert!( - (1..=100) - .map(::WeightInfo::report_equivocation) - .collect::>() - .windows(2) - .all(|w| w[0] == w[1]) - ); + assert!((1..=100) + .map(::WeightInfo::report_equivocation) + .collect::>() + .windows(2) + .all(|w| w[0] == w[1])); // after 100 validators the weight should keep increasing // with every extra validator. - assert!( - (100..=1000) - .map(::WeightInfo::report_equivocation) - .collect::>() - .windows(2) - .all(|w| w[0] < w[1]) - ); + assert!((100..=1000) + .map(::WeightInfo::report_equivocation) + .collect::>() + .windows(2) + .all(|w| w[0] < w[1])); } #[test] @@ -848,11 +774,9 @@ fn valid_equivocation_reports_dont_pay_fees() { generate_equivocation_proof(0, &offending_authority_pair, CurrentSlot::::get()); // create the key ownership proof. - let key_owner_proof = Historical::prove(( - sp_consensus_babe::KEY_TYPE, - &offending_authority_pair.public(), - )) - .unwrap(); + let key_owner_proof = + Historical::prove((sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public())) + .unwrap(); // check the dispatch info for the call. let info = Call::::report_equivocation_unsigned( @@ -894,9 +818,7 @@ fn valid_equivocation_reports_dont_pay_fees() { #[test] fn add_epoch_configurations_migration_works() { - use frame_support::storage::migration::{ - put_storage_value, get_storage_value, - }; + use frame_support::storage::migration::{get_storage_value, put_storage_value}; impl crate::migrations::BabePalletPrefix for Test { fn pallet_prefix() -> &'static str { @@ -905,38 +827,31 @@ fn add_epoch_configurations_migration_works() { } new_test_ext(1).execute_with(|| { - let next_config_descriptor = NextConfigDescriptor::V1 { - c: (3, 4), - allowed_slots: AllowedSlots::PrimarySlots - }; + let next_config_descriptor = + NextConfigDescriptor::V1 { c: (3, 4), allowed_slots: AllowedSlots::PrimarySlots }; - put_storage_value( - b"Babe", - b"NextEpochConfig", - &[], - Some(next_config_descriptor.clone()) - ); + put_storage_value(b"Babe", b"NextEpochConfig", &[], Some(next_config_descriptor.clone())); assert!(get_storage_value::>( b"Babe", b"NextEpochConfig", &[], - ).is_some()); + ) + .is_some()); let current_epoch = BabeEpochConfiguration { c: (1, 4), allowed_slots: sp_consensus_babe::AllowedSlots::PrimarySlots, }; - crate::migrations::add_epoch_configuration::( - current_epoch.clone() - ); + crate::migrations::add_epoch_configuration::(current_epoch.clone()); assert!(get_storage_value::>( b"Babe", b"NextEpochConfig", &[], - ).is_none()); + ) + .is_none()); assert_eq!(EpochConfig::::get(), Some(current_epoch)); assert_eq!(PendingEpochConfigChange::::get(), Some(next_config_descriptor)); diff --git a/frame/balances/src/benchmarking.rs b/frame/balances/src/benchmarking.rs index 688bcbc262bdb..97c3c4309a80d 100644 --- a/frame/balances/src/benchmarking.rs +++ b/frame/balances/src/benchmarking.rs @@ -21,8 +21,10 @@ use super::*; +use frame_benchmarking::{ + account, benchmarks_instance_pallet, impl_benchmark_test_suite, whitelisted_caller, +}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks_instance_pallet, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; use crate::Pallet as Balances; @@ -31,7 +33,6 @@ const SEED: u32 = 0; // existential deposit multiplier const ED_MULTIPLIER: u32 = 10; - benchmarks_instance_pallet! { // Benchmark `transfer` extrinsic with the worst possible conditions: // * Transfer will kill the sender account. diff --git a/frame/balances/src/lib.rs b/frame/balances/src/lib.rs index 7ddd3ceda3c93..8e4886c2f0039 100644 --- a/frame/balances/src/lib.rs +++ b/frame/balances/src/lib.rs @@ -150,52 +150,60 @@ #[macro_use] mod tests; -mod tests_local; +mod benchmarking; mod tests_composite; +mod tests_local; mod tests_reentrancy; -mod benchmarking; pub mod weights; -use sp_std::prelude::*; -use sp_std::{cmp, result, mem, fmt::Debug, ops::BitOr}; -use codec::{Codec, Encode, Decode, MaxEncodedLen}; -use scale_info::TypeInfo; +pub use self::imbalances::{NegativeImbalance, PositiveImbalance}; +use codec::{Codec, Decode, Encode, MaxEncodedLen}; +#[cfg(feature = "std")] +use frame_support::traits::GenesisBuild; use frame_support::{ - ensure, WeakBoundedVec, + ensure, traits::{ - Currency, OnUnbalanced, TryDrop, StoredMap, - WithdrawReasons, LockIdentifier, LockableCurrency, ExistenceRequirement, - Imbalance, SignedImbalance, ReservableCurrency, Get, ExistenceRequirement::{AllowDeath, KeepAlive}, - NamedReservableCurrency, - tokens::{fungible, DepositConsequence, WithdrawConsequence, BalanceStatus as Status}, - } + tokens::{fungible, BalanceStatus as Status, DepositConsequence, WithdrawConsequence}, + Currency, ExistenceRequirement, + ExistenceRequirement::{AllowDeath, KeepAlive}, + Get, Imbalance, LockIdentifier, LockableCurrency, NamedReservableCurrency, OnUnbalanced, + ReservableCurrency, SignedImbalance, StoredMap, TryDrop, WithdrawReasons, + }, + WeakBoundedVec, }; -#[cfg(feature = "std")] -use frame_support::traits::GenesisBuild; +use frame_system as system; +use scale_info::TypeInfo; use sp_runtime::{ - RuntimeDebug, DispatchResult, DispatchError, ArithmeticError, traits::{ - Zero, AtLeast32BitUnsigned, StaticLookup, CheckedAdd, CheckedSub, - MaybeSerializeDeserialize, Saturating, Bounded, + AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedSub, MaybeSerializeDeserialize, + Saturating, StaticLookup, Zero, }, + ArithmeticError, DispatchError, DispatchResult, RuntimeDebug, }; -use frame_system as system; -pub use self::imbalances::{PositiveImbalance, NegativeImbalance}; +use sp_std::{cmp, fmt::Debug, mem, ops::BitOr, prelude::*, result}; pub use weights::WeightInfo; pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { /// The balance of an account. - type Balance: Parameter + Member + AtLeast32BitUnsigned + Codec + Default + Copy + - MaybeSerializeDeserialize + Debug + MaxEncodedLen + TypeInfo; + type Balance: Parameter + + Member + + AtLeast32BitUnsigned + + Codec + + Default + + Copy + + MaybeSerializeDeserialize + + Debug + + MaxEncodedLen + + TypeInfo; /// Handler for the unbalanced reduction when removing a dust account. type DustRemoval: OnUnbalanced>; @@ -229,7 +237,7 @@ pub mod pallet { #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] #[pallet::generate_storage_info] - pub struct Pallet(PhantomData<(T, I)>); + pub struct Pallet(PhantomData<(T, I)>); #[pallet::call] impl, I: 'static> Pallet { @@ -268,7 +276,12 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { let transactor = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; - >::transfer(&transactor, &dest, value, ExistenceRequirement::AllowDeath)?; + >::transfer( + &transactor, + &dest, + value, + ExistenceRequirement::AllowDeath, + )?; Ok(().into()) } @@ -346,7 +359,12 @@ pub mod pallet { ensure_root(origin)?; let source = T::Lookup::lookup(source)?; let dest = T::Lookup::lookup(dest)?; - >::transfer(&source, &dest, value, ExistenceRequirement::AllowDeath)?; + >::transfer( + &source, + &dest, + value, + ExistenceRequirement::AllowDeath, + )?; Ok(().into()) } @@ -402,7 +420,12 @@ pub mod pallet { let reducible_balance = Self::reducible_balance(&transactor, keep_alive); let dest = T::Lookup::lookup(dest)?; let keep_alive = if keep_alive { KeepAlive } else { AllowDeath }; - >::transfer(&transactor, &dest, reducible_balance, keep_alive.into())?; + >::transfer( + &transactor, + &dest, + reducible_balance, + keep_alive.into(), + )?; Ok(().into()) } } @@ -496,18 +519,15 @@ pub mod pallet { Blake2_128Concat, T::AccountId, BoundedVec, T::MaxReserves>, - ValueQuery + ValueQuery, >; /// Storage version of the pallet. /// /// This is set to v2.0.0 for new networks. #[pallet::storage] - pub(super) type StorageVersion, I: 'static = ()> = StorageValue< - _, - Releases, - ValueQuery - >; + pub(super) type StorageVersion, I: 'static = ()> = + StorageValue<_, Releases, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig, I: 'static = ()> { @@ -517,18 +537,14 @@ pub mod pallet { #[cfg(feature = "std")] impl, I: 'static> Default for GenesisConfig { fn default() -> Self { - Self { - balances: Default::default(), - } + Self { balances: Default::default() } } } #[pallet::genesis_build] impl, I: 'static> GenesisBuild for GenesisConfig { fn build(&self) { - let total = self.balances - .iter() - .fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); + let total = self.balances.iter().fold(Zero::zero(), |acc: T::Balance, &(_, n)| acc + n); >::put(total); >::put(Releases::V2_0_0); @@ -541,12 +557,21 @@ pub mod pallet { } // ensure no duplicates exist. - let endowed_accounts = self.balances.iter().map(|(x, _)| x).cloned().collect::>(); + let endowed_accounts = self + .balances + .iter() + .map(|(x, _)| x) + .cloned() + .collect::>(); - assert!(endowed_accounts.len() == self.balances.len(), "duplicate balances in genesis."); + assert!( + endowed_accounts.len() == self.balances.len(), + "duplicate balances in genesis." + ); for &(ref who, free) in self.balances.iter() { - assert!(T::AccountStore::insert(who, AccountData { free, ..Default::default() }).is_ok()); + assert!(T::AccountStore::insert(who, AccountData { free, ..Default::default() }) + .is_ok()); } } } @@ -564,10 +589,7 @@ impl, I: 'static> GenesisConfig { /// Direct implementation of `GenesisBuild::assimilate_storage`. /// /// Kept in order not to break dependency. - pub fn assimilate_storage( - &self, - storage: &mut sp_runtime::Storage - ) -> Result<(), String> { + pub fn assimilate_storage(&self, storage: &mut sp_runtime::Storage) -> Result<(), String> { >::assimilate_storage(self, storage) } } @@ -598,7 +620,9 @@ impl From for Reasons { impl BitOr for Reasons { type Output = Reasons; fn bitor(self, other: Reasons) -> Reasons { - if self == other { return self } + if self == other { + return self + } Reasons::All } } @@ -684,7 +708,9 @@ impl Default for Releases { } } -pub struct DustCleaner, I: 'static = ()>(Option<(T::AccountId, NegativeImbalance)>); +pub struct DustCleaner, I: 'static = ()>( + Option<(T::AccountId, NegativeImbalance)>, +); impl, I: 'static> Drop for DustCleaner { fn drop(&mut self) { @@ -752,7 +778,9 @@ impl, I: 'static> Pallet { amount: T::Balance, account: &AccountData, ) -> DepositConsequence { - if amount.is_zero() { return DepositConsequence::Success } + if amount.is_zero() { + return DepositConsequence::Success + } if TotalIssuance::::get().checked_add(&amount).is_none() { return DepositConsequence::Overflow @@ -778,7 +806,9 @@ impl, I: 'static> Pallet { amount: T::Balance, account: &AccountData, ) -> WithdrawConsequence { - if amount.is_zero() { return WithdrawConsequence::Success } + if amount.is_zero() { + return WithdrawConsequence::Success + } if TotalIssuance::::get().checked_sub(&amount).is_none() { return WithdrawConsequence::Underflow @@ -847,11 +877,10 @@ impl, I: 'static> Pallet { who: &T::AccountId, f: impl FnOnce(&mut AccountData, bool) -> Result, ) -> Result { - Self::try_mutate_account_with_dust(who, f) - .map(|(result, dust_cleaner)| { - drop(dust_cleaner); - result - }) + Self::try_mutate_account_with_dust(who, f).map(|(result, dust_cleaner)| { + drop(dust_cleaner); + result + }) } /// Mutate an account to some new value, or delete it entirely with `None`. Will enforce @@ -944,7 +973,6 @@ impl, I: 'static> Pallet { } } - /// Move the reserved balance of one account into the balance of another, according to `status`. /// /// Is a no-op if: @@ -957,13 +985,15 @@ impl, I: 'static> Pallet { best_effort: bool, status: Status, ) -> Result { - if value.is_zero() { return Ok(Zero::zero()) } + if value.is_zero() { + return Ok(Zero::zero()) + } if slashed == beneficiary { return match status { Status::Free => Ok(Self::unreserve(slashed, value)), Status::Reserved => Ok(value.saturating_sub(Self::reserved_balance(slashed))), - }; + } } let ((actual, _maybe_one_dust), _maybe_other_dust) = Self::try_mutate_account_with_dust( @@ -976,21 +1006,30 @@ impl, I: 'static> Pallet { let actual = cmp::min(from_account.reserved, value); ensure!(best_effort || actual == value, Error::::InsufficientBalance); match status { - Status::Free => to_account.free = to_account.free - .checked_add(&actual) - .ok_or(ArithmeticError::Overflow)?, - Status::Reserved => to_account.reserved = to_account.reserved - .checked_add(&actual) - .ok_or(ArithmeticError::Overflow)?, + Status::Free => + to_account.free = to_account + .free + .checked_add(&actual) + .ok_or(ArithmeticError::Overflow)?, + Status::Reserved => + to_account.reserved = to_account + .reserved + .checked_add(&actual) + .ok_or(ArithmeticError::Overflow)?, } from_account.reserved -= actual; Ok(actual) - } + }, ) - } + }, )?; - Self::deposit_event(Event::ReserveRepatriated(slashed.clone(), beneficiary.clone(), actual, status)); + Self::deposit_event(Event::ReserveRepatriated( + slashed.clone(), + beneficiary.clone(), + actual, + status, + )); Ok(actual) } } @@ -1016,21 +1055,27 @@ impl, I: 'static> fungible::Inspect for Pallet } else { // `must_remain_to_exist` is the part of liquid balance which must remain to keep total over // ED. - let must_remain_to_exist = T::ExistentialDeposit::get().saturating_sub(a.total() - liquid); + let must_remain_to_exist = + T::ExistentialDeposit::get().saturating_sub(a.total() - liquid); liquid.saturating_sub(must_remain_to_exist) } } fn can_deposit(who: &T::AccountId, amount: Self::Balance) -> DepositConsequence { Self::deposit_consequence(who, amount, &Self::account(who)) } - fn can_withdraw(who: &T::AccountId, amount: Self::Balance) -> WithdrawConsequence { + fn can_withdraw( + who: &T::AccountId, + amount: Self::Balance, + ) -> WithdrawConsequence { Self::withdraw_consequence(who, amount, &Self::account(who)) } } impl, I: 'static> fungible::Mutate for Pallet { fn mint_into(who: &T::AccountId, amount: Self::Balance) -> DispatchResult { - if amount.is_zero() { return Ok(()) } + if amount.is_zero() { + return Ok(()) + } Self::try_mutate_account(who, |account, _is_new| -> DispatchResult { Self::deposit_consequence(who, amount, &account).into_result()?; account.free += amount; @@ -1040,14 +1085,22 @@ impl, I: 'static> fungible::Mutate for Pallet { Ok(()) } - fn burn_from(who: &T::AccountId, amount: Self::Balance) -> Result { - if amount.is_zero() { return Ok(Self::Balance::zero()); } - let actual = Self::try_mutate_account(who, |account, _is_new| -> Result { - let extra = Self::withdraw_consequence(who, amount, &account).into_result()?; - let actual = amount + extra; - account.free -= actual; - Ok(actual) - })?; + fn burn_from( + who: &T::AccountId, + amount: Self::Balance, + ) -> Result { + if amount.is_zero() { + return Ok(Self::Balance::zero()) + } + let actual = Self::try_mutate_account( + who, + |account, _is_new| -> Result { + let extra = Self::withdraw_consequence(who, amount, &account).into_result()?; + let actual = amount + extra; + account.free -= actual; + Ok(actual) + }, + )?; TotalIssuance::::mutate(|t| *t -= actual); Ok(actual) } @@ -1061,8 +1114,7 @@ impl, I: 'static> fungible::Transfer for Pallet keep_alive: bool, ) -> Result { let er = if keep_alive { KeepAlive } else { AllowDeath }; - >::transfer(source, dest, amount, er) - .map(|_| amount) + >::transfer(source, dest, amount, er).map(|_| amount) } } @@ -1084,7 +1136,9 @@ impl, I: 'static> fungible::InspectHold for Pallet bool { let a = Self::account(who); let min_balance = T::ExistentialDeposit::get().max(a.frozen(Reasons::All)); - if a.reserved.checked_add(&amount).is_none() { return false } + if a.reserved.checked_add(&amount).is_none() { + return false + } // We require it to be min_balance + amount to ensure that the full reserved funds may be // slashed without compromising locked funds or destroying the account. let required_free = match min_balance.checked_add(&amount) { @@ -1096,7 +1150,9 @@ impl, I: 'static> fungible::InspectHold for Pallet, I: 'static> fungible::MutateHold for Pallet { fn hold(who: &T::AccountId, amount: Self::Balance) -> DispatchResult { - if amount.is_zero() { return Ok(()) } + if amount.is_zero() { + return Ok(()) + } ensure!(Self::can_reserve(who, amount), Error::::InsufficientBalance); Self::mutate_account(who, |a| { a.free -= amount; @@ -1104,10 +1160,14 @@ impl, I: 'static> fungible::MutateHold for Pallet Result - { - if amount.is_zero() { return Ok(amount) } + fn release( + who: &T::AccountId, + amount: Self::Balance, + best_effort: bool, + ) -> Result { + if amount.is_zero() { + return Ok(amount) + } // Done on a best-effort basis. Self::try_mutate_account(who, |a, _| { let new_free = a.free.saturating_add(amount.min(a.reserved)); @@ -1134,12 +1194,9 @@ impl, I: 'static> fungible::MutateHold for Pallet, I: 'static> Drop for PositiveImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { - >::mutate( - |v| *v = v.saturating_add(self.0) - ); + >::mutate(|v| *v = v.saturating_add(self.0)); } } impl, I: 'static> Drop for NegativeImbalance { /// Basic drop handler will just square up the total issuance. fn drop(&mut self) { - >::mutate( - |v| *v = v.saturating_sub(self.0) - ); + >::mutate(|v| *v = v.saturating_sub(self.0)); } } } -impl, I: 'static> Currency for Pallet where - T::Balance: MaybeSerializeDeserialize + Debug +impl, I: 'static> Currency for Pallet +where + T::Balance: MaybeSerializeDeserialize + Debug, { type Balance = T::Balance; type PositiveImbalance = PositiveImbalance; @@ -1317,7 +1371,9 @@ impl, I: 'static> Currency for Pallet where // Check if `value` amount of free balance can be slashed from `who`. fn can_slash(who: &T::AccountId, value: Self::Balance) -> bool { - if value.is_zero() { return true } + if value.is_zero() { + return true + } Self::free_balance(who) >= value } @@ -1332,7 +1388,9 @@ impl, I: 'static> Currency for Pallet where // Burn funds from the total issuance, returning a positive imbalance for the amount burned. // Is a no-op if amount to be burned is zero. fn burn(mut amount: Self::Balance) -> Self::PositiveImbalance { - if amount.is_zero() { return PositiveImbalance::zero() } + if amount.is_zero() { + return PositiveImbalance::zero() + } >::mutate(|issued| { *issued = issued.checked_sub(&amount).unwrap_or_else(|| { amount = *issued; @@ -1346,13 +1404,15 @@ impl, I: 'static> Currency for Pallet where // for the amount issued. // Is a no-op if amount to be issued it zero. fn issue(mut amount: Self::Balance) -> Self::NegativeImbalance { - if amount.is_zero() { return NegativeImbalance::zero() } - >::mutate(|issued| + if amount.is_zero() { + return NegativeImbalance::zero() + } + >::mutate(|issued| { *issued = issued.checked_add(&amount).unwrap_or_else(|| { amount = Self::Balance::max_value() - *issued; Self::Balance::max_value() }) - ); + }); NegativeImbalance::new(amount) } @@ -1374,7 +1434,9 @@ impl, I: 'static> Currency for Pallet where reasons: WithdrawReasons, new_balance: T::Balance, ) -> DispatchResult { - if amount.is_zero() { return Ok(()) } + if amount.is_zero() { + return Ok(()) + } let min_balance = Self::account(who).frozen(reasons.into()); ensure!(new_balance >= min_balance, Error::::LiquidityRestrictions); Ok(()) @@ -1388,7 +1450,9 @@ impl, I: 'static> Currency for Pallet where value: Self::Balance, existence_requirement: ExistenceRequirement, ) -> DispatchResult { - if value.is_zero() || transactor == dest { return Ok(()) } + if value.is_zero() || transactor == dest { + return Ok(()) + } Self::try_mutate_account_with_dust( dest, @@ -1396,12 +1460,15 @@ impl, I: 'static> Currency for Pallet where Self::try_mutate_account_with_dust( transactor, |from_account, _| -> DispatchResult { - from_account.free = from_account.free.checked_sub(&value) + from_account.free = from_account + .free + .checked_sub(&value) .ok_or(Error::::InsufficientBalance)?; // NOTE: total stake being stored in the same type means that this could never overflow // but better to be safe than sorry. - to_account.free = to_account.free.checked_add(&value).ok_or(ArithmeticError::Overflow)?; + to_account.free = + to_account.free.checked_add(&value).ok_or(ArithmeticError::Overflow)?; let ed = T::ExistentialDeposit::get(); ensure!(to_account.total() >= ed, Error::::ExistentialDeposit); @@ -1411,18 +1478,24 @@ impl, I: 'static> Currency for Pallet where value, WithdrawReasons::TRANSFER, from_account.free, - ).map_err(|_| Error::::LiquidityRestrictions)?; + ) + .map_err(|_| Error::::LiquidityRestrictions)?; // TODO: This is over-conservative. There may now be other providers, and this pallet // may not even be a provider. let allow_death = existence_requirement == ExistenceRequirement::AllowDeath; - let allow_death = allow_death && !system::Pallet::::is_provider_required(transactor); - ensure!(allow_death || from_account.total() >= ed, Error::::KeepAlive); + let allow_death = + allow_death && !system::Pallet::::is_provider_required(transactor); + ensure!( + allow_death || from_account.total() >= ed, + Error::::KeepAlive + ); Ok(()) - } - ).map(|(_, maybe_dust_cleaner)| maybe_dust_cleaner) - } + }, + ) + .map(|(_, maybe_dust_cleaner)| maybe_dust_cleaner) + }, )?; // Emit transfer event. @@ -1440,23 +1513,30 @@ impl, I: 'static> Currency for Pallet where /// from in extreme circumstances. `can_slash()` should be used prior to `slash()` to avoid having /// to draw from reserved funds, however we err on the side of punishment if things are inconsistent /// or `can_slash` wasn't used appropriately. - fn slash( - who: &T::AccountId, - value: Self::Balance - ) -> (Self::NegativeImbalance, Self::Balance) { - if value.is_zero() { return (NegativeImbalance::zero(), Zero::zero()) } - if Self::total_balance(&who).is_zero() { return (NegativeImbalance::zero(), value) } + fn slash(who: &T::AccountId, value: Self::Balance) -> (Self::NegativeImbalance, Self::Balance) { + if value.is_zero() { + return (NegativeImbalance::zero(), Zero::zero()) + } + if Self::total_balance(&who).is_zero() { + return (NegativeImbalance::zero(), value) + } for attempt in 0..2 { - match Self::try_mutate_account(who, - |account, _is_new| -> Result<(Self::NegativeImbalance, Self::Balance), DispatchError> { + match Self::try_mutate_account( + who, + |account, + _is_new| + -> Result<(Self::NegativeImbalance, Self::Balance), DispatchError> { // Best value is the most amount we can slash following liveness rules. let best_value = match attempt { // First attempt we try to slash the full amount, and see if liveness issues happen. 0 => value, // If acting as a critical provider (i.e. first attempt failed), then slash // as much as possible while leaving at least at ED. - _ => value.min((account.free + account.reserved).saturating_sub(T::ExistentialDeposit::get())), + _ => value.min( + (account.free + account.reserved) + .saturating_sub(T::ExistentialDeposit::get()), + ), }; let free_slash = cmp::min(account.free, best_value); @@ -1478,7 +1558,7 @@ impl, I: 'static> Currency for Pallet where value - free_slash, // Safe because value is gt or eq to total slashed )) } - } + }, ) { Ok(r) => return r, Err(_) => (), @@ -1494,15 +1574,20 @@ impl, I: 'static> Currency for Pallet where /// Is a no-op if the `value` to be deposited is zero. fn deposit_into_existing( who: &T::AccountId, - value: Self::Balance + value: Self::Balance, ) -> Result { - if value.is_zero() { return Ok(PositiveImbalance::zero()) } + if value.is_zero() { + return Ok(PositiveImbalance::zero()) + } - Self::try_mutate_account(who, |account, is_new| -> Result { - ensure!(!is_new, Error::::DeadAccount); - account.free = account.free.checked_add(&value).ok_or(ArithmeticError::Overflow)?; - Ok(PositiveImbalance::new(value)) - }) + Self::try_mutate_account( + who, + |account, is_new| -> Result { + ensure!(!is_new, Error::::DeadAccount); + account.free = account.free.checked_add(&value).ok_or(ArithmeticError::Overflow)?; + Ok(PositiveImbalance::new(value)) + }, + ) } /// Deposit some `value` into the free balance of `who`, possibly creating a new account. @@ -1512,26 +1597,28 @@ impl, I: 'static> Currency for Pallet where /// - the `value` to be deposited is less than the required ED and the account does not yet exist; or /// - the deposit would necessitate the account to exist and there are no provider references; or /// - `value` is so large it would cause the balance of `who` to overflow. - fn deposit_creating( - who: &T::AccountId, - value: Self::Balance, - ) -> Self::PositiveImbalance { - if value.is_zero() { return Self::PositiveImbalance::zero() } - - let r = Self::try_mutate_account(who, |account, is_new| -> Result { - - let ed = T::ExistentialDeposit::get(); - ensure!(value >= ed || !is_new, Error::::ExistentialDeposit); + fn deposit_creating(who: &T::AccountId, value: Self::Balance) -> Self::PositiveImbalance { + if value.is_zero() { + return Self::PositiveImbalance::zero() + } - // defensive only: overflow should never happen, however in case it does, then this - // operation is a no-op. - account.free = match account.free.checked_add(&value) { - Some(x) => x, - None => return Ok(Self::PositiveImbalance::zero()), - }; + let r = Self::try_mutate_account( + who, + |account, is_new| -> Result { + let ed = T::ExistentialDeposit::get(); + ensure!(value >= ed || !is_new, Error::::ExistentialDeposit); + + // defensive only: overflow should never happen, however in case it does, then this + // operation is a no-op. + account.free = match account.free.checked_add(&value) { + Some(x) => x, + None => return Ok(Self::PositiveImbalance::zero()), + }; - Ok(PositiveImbalance::new(value)) - }).unwrap_or_else(|_| Self::PositiveImbalance::zero()); + Ok(PositiveImbalance::new(value)) + }, + ) + .unwrap_or_else(|_| Self::PositiveImbalance::zero()); r } @@ -1545,70 +1632,79 @@ impl, I: 'static> Currency for Pallet where reasons: WithdrawReasons, liveness: ExistenceRequirement, ) -> result::Result { - if value.is_zero() { return Ok(NegativeImbalance::zero()); } + if value.is_zero() { + return Ok(NegativeImbalance::zero()) + } - Self::try_mutate_account(who, |account, _| - -> Result - { - let new_free_account = account.free.checked_sub(&value) - .ok_or(Error::::InsufficientBalance)?; + Self::try_mutate_account( + who, + |account, _| -> Result { + let new_free_account = + account.free.checked_sub(&value).ok_or(Error::::InsufficientBalance)?; - // bail if we need to keep the account alive and this would kill it. - let ed = T::ExistentialDeposit::get(); - let would_be_dead = new_free_account + account.reserved < ed; - let would_kill = would_be_dead && account.free + account.reserved >= ed; - ensure!(liveness == AllowDeath || !would_kill, Error::::KeepAlive); + // bail if we need to keep the account alive and this would kill it. + let ed = T::ExistentialDeposit::get(); + let would_be_dead = new_free_account + account.reserved < ed; + let would_kill = would_be_dead && account.free + account.reserved >= ed; + ensure!(liveness == AllowDeath || !would_kill, Error::::KeepAlive); - Self::ensure_can_withdraw(who, value, reasons, new_free_account)?; + Self::ensure_can_withdraw(who, value, reasons, new_free_account)?; - account.free = new_free_account; + account.free = new_free_account; - Ok(NegativeImbalance::new(value)) - }) + Ok(NegativeImbalance::new(value)) + }, + ) } /// Force the new free balance of a target account `who` to some new value `balance`. - fn make_free_balance_be(who: &T::AccountId, value: Self::Balance) - -> SignedImbalance - { - Self::try_mutate_account(who, |account, is_new| - -> Result, DispatchError> - { - let ed = T::ExistentialDeposit::get(); - let total = value.saturating_add(account.reserved); - // If we're attempting to set an existing account to less than ED, then - // bypass the entire operation. It's a no-op if you follow it through, but - // since this is an instance where we might account for a negative imbalance - // (in the dust cleaner of set_account) before we account for its actual - // equal and opposite cause (returned as an Imbalance), then in the - // instance that there's no other accounts on the system at all, we might - // underflow the issuance and our arithmetic will be off. - ensure!(total >= ed || !is_new, Error::::ExistentialDeposit); - - let imbalance = if account.free <= value { - SignedImbalance::Positive(PositiveImbalance::new(value - account.free)) - } else { - SignedImbalance::Negative(NegativeImbalance::new(account.free - value)) - }; - account.free = value; - Ok(imbalance) - }).unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero())) + fn make_free_balance_be( + who: &T::AccountId, + value: Self::Balance, + ) -> SignedImbalance { + Self::try_mutate_account( + who, + |account, + is_new| + -> Result, DispatchError> { + let ed = T::ExistentialDeposit::get(); + let total = value.saturating_add(account.reserved); + // If we're attempting to set an existing account to less than ED, then + // bypass the entire operation. It's a no-op if you follow it through, but + // since this is an instance where we might account for a negative imbalance + // (in the dust cleaner of set_account) before we account for its actual + // equal and opposite cause (returned as an Imbalance), then in the + // instance that there's no other accounts on the system at all, we might + // underflow the issuance and our arithmetic will be off. + ensure!(total >= ed || !is_new, Error::::ExistentialDeposit); + + let imbalance = if account.free <= value { + SignedImbalance::Positive(PositiveImbalance::new(value - account.free)) + } else { + SignedImbalance::Negative(NegativeImbalance::new(account.free - value)) + }; + account.free = value; + Ok(imbalance) + }, + ) + .unwrap_or_else(|_| SignedImbalance::Positive(Self::PositiveImbalance::zero())) } } -impl, I: 'static> ReservableCurrency for Pallet where - T::Balance: MaybeSerializeDeserialize + Debug +impl, I: 'static> ReservableCurrency for Pallet +where + T::Balance: MaybeSerializeDeserialize + Debug, { /// Check if `who` can reserve `value` from their free balance. /// /// Always `true` if value to be reserved is zero. fn can_reserve(who: &T::AccountId, value: Self::Balance) -> bool { - if value.is_zero() { return true } - Self::account(who).free - .checked_sub(&value) - .map_or(false, |new_balance| - Self::ensure_can_withdraw(who, value, WithdrawReasons::RESERVE, new_balance).is_ok() - ) + if value.is_zero() { + return true + } + Self::account(who).free.checked_sub(&value).map_or(false, |new_balance| { + Self::ensure_can_withdraw(who, value, WithdrawReasons::RESERVE, new_balance).is_ok() + }) } fn reserved_balance(who: &T::AccountId) -> Self::Balance { @@ -1619,11 +1715,15 @@ impl, I: 'static> ReservableCurrency for Pallet /// /// Is a no-op if value to be reserved is zero. fn reserve(who: &T::AccountId, value: Self::Balance) -> DispatchResult { - if value.is_zero() { return Ok(()) } + if value.is_zero() { + return Ok(()) + } Self::try_mutate_account(who, |account, _| -> DispatchResult { - account.free = account.free.checked_sub(&value).ok_or(Error::::InsufficientBalance)?; - account.reserved = account.reserved.checked_add(&value).ok_or(ArithmeticError::Overflow)?; + account.free = + account.free.checked_sub(&value).ok_or(Error::::InsufficientBalance)?; + account.reserved = + account.reserved.checked_add(&value).ok_or(ArithmeticError::Overflow)?; Self::ensure_can_withdraw(&who, value.clone(), WithdrawReasons::RESERVE, account.free) })?; @@ -1635,8 +1735,12 @@ impl, I: 'static> ReservableCurrency for Pallet /// /// Is a no-op if the value to be unreserved is zero or the account does not exist. fn unreserve(who: &T::AccountId, value: Self::Balance) -> Self::Balance { - if value.is_zero() { return Zero::zero() } - if Self::total_balance(&who).is_zero() { return value } + if value.is_zero() { + return Zero::zero() + } + if Self::total_balance(&who).is_zero() { + return value + } let actual = match Self::mutate_account(who, |account| { let actual = cmp::min(account.reserved, value); @@ -1652,7 +1756,7 @@ impl, I: 'static> ReservableCurrency for Pallet // If it ever does, then we should fail gracefully though, indicating that nothing // could be done. return value - } + }, }; Self::deposit_event(Event::Unreserved(who.clone(), actual.clone())); @@ -1665,10 +1769,14 @@ impl, I: 'static> ReservableCurrency for Pallet /// Is a no-op if the value to be slashed is zero or the account does not exist. fn slash_reserved( who: &T::AccountId, - value: Self::Balance + value: Self::Balance, ) -> (Self::NegativeImbalance, Self::Balance) { - if value.is_zero() { return (NegativeImbalance::zero(), Zero::zero()) } - if Self::total_balance(&who).is_zero() { return (NegativeImbalance::zero(), value) } + if value.is_zero() { + return (NegativeImbalance::zero(), Zero::zero()) + } + if Self::total_balance(&who).is_zero() { + return (NegativeImbalance::zero(), value) + } // NOTE: `mutate_account` may fail if it attempts to reduce the balance to the point that an // account is attempted to be illegally destroyed. @@ -1679,7 +1787,10 @@ impl, I: 'static> ReservableCurrency for Pallet 0 => value, // If acting as a critical provider (i.e. first attempt failed), then ensure // slash leaves at least the ED. - _ => value.min((account.free + account.reserved).saturating_sub(T::ExistentialDeposit::get())), + _ => value.min( + (account.free + account.reserved) + .saturating_sub(T::ExistentialDeposit::get()), + ), }; let actual = cmp::min(account.reserved, best_value); @@ -1713,8 +1824,9 @@ impl, I: 'static> ReservableCurrency for Pallet } } -impl, I: 'static> NamedReservableCurrency for Pallet where - T::Balance: MaybeSerializeDeserialize + Debug +impl, I: 'static> NamedReservableCurrency for Pallet +where + T::Balance: MaybeSerializeDeserialize + Debug, { type ReserveIdentifier = T::ReserveIdentifier; @@ -1729,8 +1841,14 @@ impl, I: 'static> NamedReservableCurrency for Pallet< /// Move `value` from the free balance from `who` to a named reserve balance. /// /// Is a no-op if value to be reserved is zero. - fn reserve_named(id: &Self::ReserveIdentifier, who: &T::AccountId, value: Self::Balance) -> DispatchResult { - if value.is_zero() { return Ok(()) } + fn reserve_named( + id: &Self::ReserveIdentifier, + who: &T::AccountId, + value: Self::Balance, + ) -> DispatchResult { + if value.is_zero() { + return Ok(()) + } Reserves::::try_mutate(who, |reserves| -> DispatchResult { match reserves.binary_search_by_key(id, |data| data.id) { @@ -1739,10 +1857,9 @@ impl, I: 'static> NamedReservableCurrency for Pallet< reserves[index].amount = reserves[index].amount.saturating_add(value); }, Err(index) => { - reserves.try_insert(index, ReserveData { - id: id.clone(), - amount: value - }).map_err(|_| Error::::TooManyReserves)?; + reserves + .try_insert(index, ReserveData { id: id.clone(), amount: value }) + .map_err(|_| Error::::TooManyReserves)?; }, }; >::reserve(who, value)?; @@ -1753,8 +1870,14 @@ impl, I: 'static> NamedReservableCurrency for Pallet< /// Unreserve some funds, returning any amount that was unable to be unreserved. /// /// Is a no-op if the value to be unreserved is zero. - fn unreserve_named(id: &Self::ReserveIdentifier, who: &T::AccountId, value: Self::Balance) -> Self::Balance { - if value.is_zero() { return Zero::zero() } + fn unreserve_named( + id: &Self::ReserveIdentifier, + who: &T::AccountId, + value: Self::Balance, + ) -> Self::Balance { + if value.is_zero() { + return Zero::zero() + } Reserves::::mutate_exists(who, |maybe_reserves| -> Self::Balance { if let Some(reserves) = maybe_reserves.as_mut() { @@ -1782,9 +1905,7 @@ impl, I: 'static> NamedReservableCurrency for Pallet< value - actual }, - Err(_) => { - value - }, + Err(_) => value, } } else { value @@ -1799,16 +1920,19 @@ impl, I: 'static> NamedReservableCurrency for Pallet< fn slash_reserved_named( id: &Self::ReserveIdentifier, who: &T::AccountId, - value: Self::Balance + value: Self::Balance, ) -> (Self::NegativeImbalance, Self::Balance) { - if value.is_zero() { return (NegativeImbalance::zero(), Zero::zero()) } + if value.is_zero() { + return (NegativeImbalance::zero(), Zero::zero()) + } Reserves::::mutate(who, |reserves| -> (Self::NegativeImbalance, Self::Balance) { match reserves.binary_search_by_key(id, |data| data.id) { Ok(index) => { let to_change = cmp::min(reserves[index].amount, value); - let (imb, remain) = >::slash_reserved(who, to_change); + let (imb, remain) = + >::slash_reserved(who, to_change); // remain should always be zero but just to be defensive here let actual = to_change.saturating_sub(remain); @@ -1818,9 +1942,7 @@ impl, I: 'static> NamedReservableCurrency for Pallet< (imb, value - actual) }, - Err(_) => { - (NegativeImbalance::zero(), value) - }, + Err(_) => (NegativeImbalance::zero(), value), } }) } @@ -1838,13 +1960,16 @@ impl, I: 'static> NamedReservableCurrency for Pallet< value: Self::Balance, status: Status, ) -> Result { - if value.is_zero() { return Ok(Zero::zero()) } + if value.is_zero() { + return Ok(Zero::zero()) + } if slashed == beneficiary { return match status { Status::Free => Ok(Self::unreserve_named(id, slashed, value)), - Status::Reserved => Ok(value.saturating_sub(Self::reserved_balance_named(id, slashed))), - }; + Status::Reserved => + Ok(value.saturating_sub(Self::reserved_balance_named(id, slashed))), + } } Reserves::::try_mutate(slashed, |reserves| -> Result { @@ -1854,36 +1979,59 @@ impl, I: 'static> NamedReservableCurrency for Pallet< let actual = if status == Status::Reserved { // make it the reserved under same identifier - Reserves::::try_mutate(beneficiary, |reserves| -> Result { - match reserves.binary_search_by_key(id, |data| data.id) { - Ok(index) => { - let remain = >::repatriate_reserved(slashed, beneficiary, to_change, status)?; - - // remain should always be zero but just to be defensive here - let actual = to_change.saturating_sub(remain); - - // this add can't overflow but just to be defensive. - reserves[index].amount = reserves[index].amount.saturating_add(actual); - - Ok(actual) - }, - Err(index) => { - let remain = >::repatriate_reserved(slashed, beneficiary, to_change, status)?; - - // remain should always be zero but just to be defensive here - let actual = to_change.saturating_sub(remain); - - reserves.try_insert(index, ReserveData { - id: id.clone(), - amount: actual - }).map_err(|_| Error::::TooManyReserves)?; - - Ok(actual) - }, - } - })? + Reserves::::try_mutate( + beneficiary, + |reserves| -> Result { + match reserves.binary_search_by_key(id, |data| data.id) { + Ok(index) => { + let remain = + >::repatriate_reserved( + slashed, + beneficiary, + to_change, + status, + )?; + + // remain should always be zero but just to be defensive here + let actual = to_change.saturating_sub(remain); + + // this add can't overflow but just to be defensive. + reserves[index].amount = + reserves[index].amount.saturating_add(actual); + + Ok(actual) + }, + Err(index) => { + let remain = + >::repatriate_reserved( + slashed, + beneficiary, + to_change, + status, + )?; + + // remain should always be zero but just to be defensive here + let actual = to_change.saturating_sub(remain); + + reserves + .try_insert( + index, + ReserveData { id: id.clone(), amount: actual }, + ) + .map_err(|_| Error::::TooManyReserves)?; + + Ok(actual) + }, + } + }, + )? } else { - let remain = >::repatriate_reserved(slashed, beneficiary, to_change, status)?; + let remain = >::repatriate_reserved( + slashed, + beneficiary, + to_change, + status, + )?; // remain should always be zero but just to be defensive here to_change.saturating_sub(remain) @@ -1894,9 +2042,7 @@ impl, I: 'static> NamedReservableCurrency for Pallet< Ok(value - actual) }, - Err(_) => { - Ok(value) - }, + Err(_) => Ok(value), } }) } @@ -1904,7 +2050,7 @@ impl, I: 'static> NamedReservableCurrency for Pallet< impl, I: 'static> LockableCurrency for Pallet where - T::Balance: MaybeSerializeDeserialize + Debug + T::Balance: MaybeSerializeDeserialize + Debug, { type Moment = T::BlockNumber; @@ -1918,9 +2064,12 @@ where amount: T::Balance, reasons: WithdrawReasons, ) { - if amount.is_zero() || reasons.is_empty() { return } + if amount.is_zero() || reasons.is_empty() { + return + } let mut new_lock = Some(BalanceLock { id, amount, reasons: reasons.into() }); - let mut locks = Self::locks(who).into_iter() + let mut locks = Self::locks(who) + .into_iter() .filter_map(|l| if l.id == id { new_lock.take() } else { Some(l) }) .collect::>(); if let Some(lock) = new_lock { @@ -1937,30 +2086,31 @@ where amount: T::Balance, reasons: WithdrawReasons, ) { - if amount.is_zero() || reasons.is_empty() { return } + if amount.is_zero() || reasons.is_empty() { + return + } let mut new_lock = Some(BalanceLock { id, amount, reasons: reasons.into() }); - let mut locks = Self::locks(who).into_iter().filter_map(|l| - if l.id == id { - new_lock.take().map(|nl| { - BalanceLock { + let mut locks = Self::locks(who) + .into_iter() + .filter_map(|l| { + if l.id == id { + new_lock.take().map(|nl| BalanceLock { id: l.id, amount: l.amount.max(nl.amount), reasons: l.reasons | nl.reasons, - } - }) - } else { - Some(l) - }).collect::>(); + }) + } else { + Some(l) + } + }) + .collect::>(); if let Some(lock) = new_lock { locks.push(lock) } Self::update_locks(who, &locks[..]); } - fn remove_lock( - id: LockIdentifier, - who: &T::AccountId, - ) { + fn remove_lock(id: LockIdentifier, who: &T::AccountId) { let mut locks = Self::locks(who); locks.retain(|l| l.id != id); Self::update_locks(who, &locks[..]); diff --git a/frame/balances/src/tests_composite.rs b/frame/balances/src/tests_composite.rs index 1d90b3e70b924..e2d50e8b88aad 100644 --- a/frame/balances/src/tests_composite.rs +++ b/frame/balances/src/tests_composite.rs @@ -19,19 +19,15 @@ #![cfg(test)] -use sp_runtime::{ - traits::IdentityLookup, - testing::Header, +use crate::{self as pallet_balances, decl_tests, Config, Pallet}; +use frame_support::{ + parameter_types, + weights::{DispatchInfo, IdentityFee, Weight}, }; +use pallet_transaction_payment::CurrencyAdapter; use sp_core::H256; use sp_io; -use frame_support::parameter_types; -use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; -use pallet_transaction_payment::CurrencyAdapter; -use crate::{ - self as pallet_balances, - Pallet, Config, decl_tests, -}; +use sp_runtime::{testing::Header, traits::IdentityLookup}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -110,10 +106,7 @@ pub struct ExtBuilder { } impl Default for ExtBuilder { fn default() -> Self { - Self { - existential_deposit: 1, - monied: false, - } + Self { existential_deposit: 1, monied: false } } } impl ExtBuilder { @@ -138,12 +131,14 @@ impl ExtBuilder { (2, 20 * self.existential_deposit), (3, 30 * self.existential_deposit), (4, 40 * self.existential_deposit), - (12, 10 * self.existential_deposit) + (12, 10 * self.existential_deposit), ] } else { vec![] }, - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); @@ -151,4 +146,4 @@ impl ExtBuilder { } } -decl_tests!{ Test, ExtBuilder, EXISTENTIAL_DEPOSIT } +decl_tests! { Test, ExtBuilder, EXISTENTIAL_DEPOSIT } diff --git a/frame/balances/src/tests_local.rs b/frame/balances/src/tests_local.rs index 36351252b445c..668c335376c60 100644 --- a/frame/balances/src/tests_local.rs +++ b/frame/balances/src/tests_local.rs @@ -19,20 +19,16 @@ #![cfg(test)] -use sp_runtime::{ - traits::IdentityLookup, - testing::Header, +use crate::{self as pallet_balances, decl_tests, Config, Pallet}; +use frame_support::{ + parameter_types, + traits::StorageMapShim, + weights::{DispatchInfo, IdentityFee, Weight}, }; +use pallet_transaction_payment::CurrencyAdapter; use sp_core::H256; use sp_io; -use frame_support::parameter_types; -use frame_support::traits::StorageMapShim; -use frame_support::weights::{Weight, DispatchInfo, IdentityFee}; -use crate::{ - self as pallet_balances, - Pallet, Config, decl_tests, -}; -use pallet_transaction_payment::CurrencyAdapter; +use sp_runtime::{testing::Header, traits::IdentityLookup}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -98,12 +94,8 @@ impl Config for Test { type DustRemoval = (); type Event = Event; type ExistentialDeposit = ExistentialDeposit; - type AccountStore = StorageMapShim< - super::Account, - system::Provider, - u64, - super::AccountData, - >; + type AccountStore = + StorageMapShim, system::Provider, u64, super::AccountData>; type MaxLocks = MaxLocks; type MaxReserves = MaxReserves; type ReserveIdentifier = [u8; 8]; @@ -116,10 +108,7 @@ pub struct ExtBuilder { } impl Default for ExtBuilder { fn default() -> Self { - Self { - existential_deposit: 1, - monied: false, - } + Self { existential_deposit: 1, monied: false } } } impl ExtBuilder { @@ -147,12 +136,14 @@ impl ExtBuilder { (2, 20 * self.existential_deposit), (3, 30 * self.existential_deposit), (4, 40 * self.existential_deposit), - (12, 10 * self.existential_deposit) + (12, 10 * self.existential_deposit), ] } else { vec![] }, - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); @@ -160,40 +151,37 @@ impl ExtBuilder { } } -decl_tests!{ Test, ExtBuilder, EXISTENTIAL_DEPOSIT } +decl_tests! { Test, ExtBuilder, EXISTENTIAL_DEPOSIT } #[test] fn emit_events_with_no_existential_deposit_suicide_with_dust() { - ::default() - .existential_deposit(2) - .build() - .execute_with(|| { - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 100, 0)); - - assert_eq!( - events(), - [ - Event::System(system::Event::NewAccount(1)), - Event::Balances(crate::Event::Endowed(1, 100)), - Event::Balances(crate::Event::BalanceSet(1, 100, 0)), - ] - ); - - let res = Balances::slash(&1, 98); - assert_eq!(res, (NegativeImbalance::new(98), 0)); - - // no events - assert_eq!(events(), []); - - let res = Balances::slash(&1, 1); - assert_eq!(res, (NegativeImbalance::new(1), 0)); - - assert_eq!( - events(), - [ - Event::System(system::Event::KilledAccount(1)), - Event::Balances(crate::Event::DustLost(1, 1)), - ] - ); - }); + ::default().existential_deposit(2).build().execute_with(|| { + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 100, 0)); + + assert_eq!( + events(), + [ + Event::System(system::Event::NewAccount(1)), + Event::Balances(crate::Event::Endowed(1, 100)), + Event::Balances(crate::Event::BalanceSet(1, 100, 0)), + ] + ); + + let res = Balances::slash(&1, 98); + assert_eq!(res, (NegativeImbalance::new(98), 0)); + + // no events + assert_eq!(events(), []); + + let res = Balances::slash(&1, 1); + assert_eq!(res, (NegativeImbalance::new(1), 0)); + + assert_eq!( + events(), + [ + Event::System(system::Event::KilledAccount(1)), + Event::Balances(crate::Event::DustLost(1, 1)), + ] + ); + }); } diff --git a/frame/balances/src/tests_reentrancy.rs b/frame/balances/src/tests_reentrancy.rs index 2a3a60dfde842..8682949b2c55d 100644 --- a/frame/balances/src/tests_reentrancy.rs +++ b/frame/balances/src/tests_reentrancy.rs @@ -19,27 +19,17 @@ #![cfg(test)] -use sp_runtime::{ - traits::IdentityLookup, - testing::Header, -}; +use crate::{self as pallet_balances, Config, Pallet}; +use frame_support::{parameter_types, traits::StorageMapShim, weights::IdentityFee}; +use pallet_transaction_payment::CurrencyAdapter; use sp_core::H256; use sp_io; -use frame_support::parameter_types; -use frame_support::traits::StorageMapShim; -use frame_support::weights::{IdentityFee}; -use crate::{ - self as pallet_balances, - Pallet, Config, -}; -use pallet_transaction_payment::CurrencyAdapter; +use sp_runtime::{testing::Header, traits::IdentityLookup}; use crate::*; use frame_support::{ assert_ok, - traits::{ - Currency, ReservableCurrency, - } + traits::{Currency, ReservableCurrency}, }; use frame_system::RawOrigin; @@ -113,12 +103,8 @@ impl Config for Test { type DustRemoval = OnDustRemoval; type Event = Event; type ExistentialDeposit = ExistentialDeposit; - type AccountStore = StorageMapShim< - super::Account, - system::Provider, - u64, - super::AccountData, - >; + type AccountStore = + StorageMapShim, system::Provider, u64, super::AccountData>; type MaxLocks = MaxLocks; type MaxReserves = MaxReserves; type ReserveIdentifier = [u8; 8]; @@ -130,13 +116,10 @@ pub struct ExtBuilder { } impl Default for ExtBuilder { fn default() -> Self { - Self { - existential_deposit: 1, - } + Self { existential_deposit: 1 } } } impl ExtBuilder { - pub fn existential_deposit(mut self, existential_deposit: u64) -> Self { self.existential_deposit = existential_deposit; self @@ -149,9 +132,9 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig:: { - balances: vec![], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![] } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext @@ -160,112 +143,103 @@ impl ExtBuilder { #[test] fn transfer_dust_removal_tst1_should_work() { - ExtBuilder::default() - .existential_deposit(100) - .build() - .execute_with(|| { - // Verification of reentrancy in dust removal - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); - - // In this transaction, account 2 free balance - // drops below existential balance - // and dust balance is removed from account 2 - assert_ok!(Balances::transfer(RawOrigin::Signed(2).into(), 3, 450)); - - // As expected dust balance is removed. - assert_eq!(Balances::free_balance(&2), 0); - - // As expected beneficiary account 3 - // received the transfered fund. - assert_eq!(Balances::free_balance(&3), 450); - - // Dust balance is deposited to account 1 - // during the process of dust removal. - assert_eq!(Balances::free_balance(&1), 1050); - - // Verify the events - // Number of events expected is 8 - assert_eq!(System::events().len(), 11); - - System::assert_has_event(Event::Balances(crate::Event::Transfer(2, 3, 450))); - System::assert_has_event(Event::Balances(crate::Event::DustLost(2, 50))); - } - ); + ExtBuilder::default().existential_deposit(100).build().execute_with(|| { + // Verification of reentrancy in dust removal + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); + + // In this transaction, account 2 free balance + // drops below existential balance + // and dust balance is removed from account 2 + assert_ok!(Balances::transfer(RawOrigin::Signed(2).into(), 3, 450)); + + // As expected dust balance is removed. + assert_eq!(Balances::free_balance(&2), 0); + + // As expected beneficiary account 3 + // received the transfered fund. + assert_eq!(Balances::free_balance(&3), 450); + + // Dust balance is deposited to account 1 + // during the process of dust removal. + assert_eq!(Balances::free_balance(&1), 1050); + + // Verify the events + // Number of events expected is 8 + assert_eq!(System::events().len(), 11); + + System::assert_has_event(Event::Balances(crate::Event::Transfer(2, 3, 450))); + System::assert_has_event(Event::Balances(crate::Event::DustLost(2, 50))); + }); } #[test] fn transfer_dust_removal_tst2_should_work() { - ExtBuilder::default() - .existential_deposit(100) - .build() - .execute_with(|| { - // Verification of reentrancy in dust removal - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); - - // In this transaction, account 2 free balance - // drops below existential balance - // and dust balance is removed from account 2 - assert_ok!(Balances::transfer(RawOrigin::Signed(2).into(), 1, 450)); - - // As expected dust balance is removed. - assert_eq!(Balances::free_balance(&2), 0); - - // Dust balance is deposited to account 1 - // during the process of dust removal. - assert_eq!(Balances::free_balance(&1), 1500); - - // Verify the events - // Number of events expected is 8 - assert_eq!(System::events().len(), 9); - - System::assert_has_event(Event::Balances(crate::Event::Transfer(2, 1, 450))); - System::assert_has_event(Event::Balances(crate::Event::DustLost(2, 50))); - } - ); + ExtBuilder::default().existential_deposit(100).build().execute_with(|| { + // Verification of reentrancy in dust removal + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); + + // In this transaction, account 2 free balance + // drops below existential balance + // and dust balance is removed from account 2 + assert_ok!(Balances::transfer(RawOrigin::Signed(2).into(), 1, 450)); + + // As expected dust balance is removed. + assert_eq!(Balances::free_balance(&2), 0); + + // Dust balance is deposited to account 1 + // during the process of dust removal. + assert_eq!(Balances::free_balance(&1), 1500); + + // Verify the events + // Number of events expected is 8 + assert_eq!(System::events().len(), 9); + + System::assert_has_event(Event::Balances(crate::Event::Transfer(2, 1, 450))); + System::assert_has_event(Event::Balances(crate::Event::DustLost(2, 50))); + }); } #[test] fn repatriating_reserved_balance_dust_removal_should_work() { - ExtBuilder::default() - .existential_deposit(100) - .build() - .execute_with(|| { - // Verification of reentrancy in dust removal - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); - assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); - - // Reserve a value on account 2, - // Such that free balance is lower than - // Exestintial deposit. - assert_ok!(Balances::reserve(&2, 450)); - - // Transfer of reserved fund from slashed account 2 to - // beneficiary account 1 - assert_ok!(Balances::repatriate_reserved(&2, &1, 450, Status::Free), 0); - - // Since free balance of account 2 is lower than - // existential deposit, dust amount is - // removed from the account 2 - assert_eq!(Balances::reserved_balance(2), 0); - assert_eq!(Balances::free_balance(2), 0); - - // account 1 is credited with reserved amount - // together with dust balance during dust - // removal. - assert_eq!(Balances::reserved_balance(1), 0); - assert_eq!(Balances::free_balance(1), 1500); - - // Verify the events - // Number of events expected is 10 - assert_eq!(System::events().len(), 10); - - System::assert_has_event(Event::Balances( - crate::Event::ReserveRepatriated(2, 1, 450, Status::Free), - )); - - System::assert_last_event(Event::Balances(crate::Event::DustLost(2, 50))); - } - ); + ExtBuilder::default().existential_deposit(100).build().execute_with(|| { + // Verification of reentrancy in dust removal + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 1, 1000, 0)); + assert_ok!(Balances::set_balance(RawOrigin::Root.into(), 2, 500, 0)); + + // Reserve a value on account 2, + // Such that free balance is lower than + // Exestintial deposit. + assert_ok!(Balances::reserve(&2, 450)); + + // Transfer of reserved fund from slashed account 2 to + // beneficiary account 1 + assert_ok!(Balances::repatriate_reserved(&2, &1, 450, Status::Free), 0); + + // Since free balance of account 2 is lower than + // existential deposit, dust amount is + // removed from the account 2 + assert_eq!(Balances::reserved_balance(2), 0); + assert_eq!(Balances::free_balance(2), 0); + + // account 1 is credited with reserved amount + // together with dust balance during dust + // removal. + assert_eq!(Balances::reserved_balance(1), 0); + assert_eq!(Balances::free_balance(1), 1500); + + // Verify the events + // Number of events expected is 10 + assert_eq!(System::events().len(), 10); + + System::assert_has_event(Event::Balances(crate::Event::ReserveRepatriated( + 2, + 1, + 450, + Status::Free, + ))); + + System::assert_last_event(Event::Balances(crate::Event::DustLost(2, 50))); + }); } diff --git a/frame/balances/src/weights.rs b/frame/balances/src/weights.rs index 79e6445dd6bb6..2c87f2847d811 100644 --- a/frame/balances/src/weights.rs +++ b/frame/balances/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/balances/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_balances. diff --git a/frame/benchmarking/src/analysis.rs b/frame/benchmarking/src/analysis.rs index f37ffba51f3d0..fffa6828cede9 100644 --- a/frame/benchmarking/src/analysis.rs +++ b/frame/benchmarking/src/analysis.rs @@ -17,10 +17,10 @@ //! Tools for analyzing the benchmark results. -use std::collections::BTreeMap; +use crate::BenchmarkResults; use core::convert::TryFrom; use linregress::{FormulaRegressionBuilder, RegressionDataBuilder}; -use crate::BenchmarkResults; +use std::collections::BTreeMap; pub use linregress::RegressionModel; @@ -63,14 +63,12 @@ impl TryFrom> for AnalysisChoice { fn try_from(s: Option) -> Result { match s { None => Ok(AnalysisChoice::default()), - Some(i) => { - match &i[..] { - "min-squares" | "min_squares" => Ok(AnalysisChoice::MinSquares), - "median-slopes" | "median_slopes" => Ok(AnalysisChoice::MedianSlopes), - "max" => Ok(AnalysisChoice::Max), - _ => Err("invalid analysis string") - } - } + Some(i) => match &i[..] { + "min-squares" | "min_squares" => Ok(AnalysisChoice::MinSquares), + "median-slopes" | "median_slopes" => Ok(AnalysisChoice::MedianSlopes), + "max" => Ok(AnalysisChoice::Max), + _ => Err("invalid analysis string"), + }, } } } @@ -79,17 +77,20 @@ impl Analysis { // Useful for when there are no components, and we just need an median value of the benchmark results. // Note: We choose the median value because it is more robust to outliers. fn median_value(r: &Vec, selector: BenchmarkSelector) -> Option { - if r.is_empty() { return None } + if r.is_empty() { + return None + } - let mut values: Vec = r.iter().map(|result| - match selector { + let mut values: Vec = r + .iter() + .map(|result| match selector { BenchmarkSelector::ExtrinsicTime => result.extrinsic_time, BenchmarkSelector::StorageRootTime => result.storage_root_time, BenchmarkSelector::Reads => result.reads.into(), BenchmarkSelector::Writes => result.writes.into(), BenchmarkSelector::ProofSize => result.proof_size.into(), - } - ).collect(); + }) + .collect(); values.sort(); let mid = values.len() / 2; @@ -104,64 +105,80 @@ impl Analysis { } pub fn median_slopes(r: &Vec, selector: BenchmarkSelector) -> Option { - if r[0].components.is_empty() { return Self::median_value(r, selector) } - - let results = r[0].components.iter().enumerate().map(|(i, &(param, _))| { - let mut counted = BTreeMap::, usize>::new(); - for result in r.iter() { - let mut p = result.components.iter().map(|x| x.1).collect::>(); - p[i] = 0; - *counted.entry(p).or_default() += 1; - } - let others: Vec = counted.iter().max_by_key(|i| i.1).expect("r is not empty; qed").0.clone(); - let values = r.iter() - .filter(|v| - v.components.iter() - .map(|x| x.1) - .zip(others.iter()) - .enumerate() - .all(|(j, (v1, v2))| j == i || v1 == *v2) - ).map(|result| { - // Extract the data we are interested in analyzing - let data = match selector { - BenchmarkSelector::ExtrinsicTime => result.extrinsic_time, - BenchmarkSelector::StorageRootTime => result.storage_root_time, - BenchmarkSelector::Reads => result.reads.into(), - BenchmarkSelector::Writes => result.writes.into(), - BenchmarkSelector::ProofSize => result.proof_size.into(), - }; - (result.components[i].1, data) - }) - .collect::>(); - (format!("{:?}", param), i, others, values) - }).collect::>(); - - let models = results.iter().map(|(_, _, _, ref values)| { - let mut slopes = vec![]; - for (i, &(x1, y1)) in values.iter().enumerate() { - for &(x2, y2) in values.iter().skip(i + 1) { - if x1 != x2 { - slopes.push((y1 as f64 - y2 as f64) / (x1 as f64 - x2 as f64)); + if r[0].components.is_empty() { + return Self::median_value(r, selector) + } + + let results = r[0] + .components + .iter() + .enumerate() + .map(|(i, &(param, _))| { + let mut counted = BTreeMap::, usize>::new(); + for result in r.iter() { + let mut p = result.components.iter().map(|x| x.1).collect::>(); + p[i] = 0; + *counted.entry(p).or_default() += 1; + } + let others: Vec = + counted.iter().max_by_key(|i| i.1).expect("r is not empty; qed").0.clone(); + let values = r + .iter() + .filter(|v| { + v.components + .iter() + .map(|x| x.1) + .zip(others.iter()) + .enumerate() + .all(|(j, (v1, v2))| j == i || v1 == *v2) + }) + .map(|result| { + // Extract the data we are interested in analyzing + let data = match selector { + BenchmarkSelector::ExtrinsicTime => result.extrinsic_time, + BenchmarkSelector::StorageRootTime => result.storage_root_time, + BenchmarkSelector::Reads => result.reads.into(), + BenchmarkSelector::Writes => result.writes.into(), + BenchmarkSelector::ProofSize => result.proof_size.into(), + }; + (result.components[i].1, data) + }) + .collect::>(); + (format!("{:?}", param), i, others, values) + }) + .collect::>(); + + let models = results + .iter() + .map(|(_, _, _, ref values)| { + let mut slopes = vec![]; + for (i, &(x1, y1)) in values.iter().enumerate() { + for &(x2, y2) in values.iter().skip(i + 1) { + if x1 != x2 { + slopes.push((y1 as f64 - y2 as f64) / (x1 as f64 - x2 as f64)); + } } } - } - slopes.sort_by(|a, b| a.partial_cmp(b).expect("values well defined; qed")); - let slope = slopes[slopes.len() / 2]; + slopes.sort_by(|a, b| a.partial_cmp(b).expect("values well defined; qed")); + let slope = slopes[slopes.len() / 2]; - let mut offsets = vec![]; - for &(x, y) in values.iter() { - offsets.push(y as f64 - slope * x as f64); - } - offsets.sort_by(|a, b| a.partial_cmp(b).expect("values well defined; qed")); - let offset = offsets[offsets.len() / 2]; + let mut offsets = vec![]; + for &(x, y) in values.iter() { + offsets.push(y as f64 - slope * x as f64); + } + offsets.sort_by(|a, b| a.partial_cmp(b).expect("values well defined; qed")); + let offset = offsets[offsets.len() / 2]; - (offset, slope) - }).collect::>(); + (offset, slope) + }) + .collect::>(); - let models = models.iter() + let models = models + .iter() .zip(results.iter()) .map(|((offset, slope), (_, i, others, _))| { - let over = others.iter() + let over = others + .iter() .enumerate() .filter(|(j, _)| j != i) .map(|(j, v)| models[j].1 * *v as f64) @@ -183,18 +200,20 @@ impl Analysis { } pub fn min_squares_iqr(r: &Vec, selector: BenchmarkSelector) -> Option { - if r[0].components.is_empty() { return Self::median_value(r, selector) } + if r[0].components.is_empty() { + return Self::median_value(r, selector) + } let mut results = BTreeMap::, Vec>::new(); for result in r.iter() { let p = result.components.iter().map(|x| x.1).collect::>(); results.entry(p).or_default().push(match selector { - BenchmarkSelector::ExtrinsicTime => result.extrinsic_time, - BenchmarkSelector::StorageRootTime => result.storage_root_time, - BenchmarkSelector::Reads => result.reads.into(), - BenchmarkSelector::Writes => result.writes.into(), - BenchmarkSelector::ProofSize => result.proof_size.into(), - }) + BenchmarkSelector::ExtrinsicTime => result.extrinsic_time, + BenchmarkSelector::StorageRootTime => result.storage_root_time, + BenchmarkSelector::Reads => result.reads.into(), + BenchmarkSelector::Writes => result.writes.into(), + BenchmarkSelector::ProofSize => result.proof_size.into(), + }) } for (_, rs) in results.iter_mut() { @@ -203,21 +222,19 @@ impl Analysis { *rs = rs[ql..rs.len() - ql].to_vec(); } - let mut data = vec![("Y", results.iter().flat_map(|x| x.1.iter().map(|v| *v as f64)).collect())]; + let mut data = + vec![("Y", results.iter().flat_map(|x| x.1.iter().map(|v| *v as f64)).collect())]; let names = r[0].components.iter().map(|x| format!("{:?}", x.0)).collect::>(); - data.extend(names.iter() - .enumerate() - .map(|(i, p)| ( + data.extend(names.iter().enumerate().map(|(i, p)| { + ( p.as_str(), - results.iter() - .flat_map(|x| Some(x.0[i] as f64) - .into_iter() - .cycle() - .take(x.1.len()) - ).collect::>() - )) - ); + results + .iter() + .flat_map(|x| Some(x.0[i] as f64).into_iter().cycle().take(x.1.len())) + .collect::>(), + ) + })); let data = RegressionDataBuilder::new().build_from(data).ok()?; @@ -227,25 +244,31 @@ impl Analysis { .fit() .ok()?; - let slopes = model.parameters.regressor_values.iter() + let slopes = model + .parameters + .regressor_values + .iter() .enumerate() .map(|(_, x)| (*x + 0.5) as u128) .collect(); - let value_dists = results.iter().map(|(p, vs)| { - // Avoid divide by zero - if vs.len() == 0 { return (p.clone(), 0, 0) } - let total = vs.iter() - .fold(0u128, |acc, v| acc + *v); - let mean = total / vs.len() as u128; - let sum_sq_diff = vs.iter() - .fold(0u128, |acc, v| { + let value_dists = results + .iter() + .map(|(p, vs)| { + // Avoid divide by zero + if vs.len() == 0 { + return (p.clone(), 0, 0) + } + let total = vs.iter().fold(0u128, |acc, v| acc + *v); + let mean = total / vs.len() as u128; + let sum_sq_diff = vs.iter().fold(0u128, |acc, v| { let d = mean.max(*v) - mean.min(*v); acc + d * d }); - let stddev = (sum_sq_diff as f64 / vs.len() as f64).sqrt() as u128; - (p.clone(), mean, stddev) - }).collect::>(); + let stddev = (sum_sq_diff as f64 / vs.len() as f64).sqrt() as u128; + (p.clone(), mean, stddev) + }) + .collect::>(); Some(Self { base: (model.parameters.intercept_value + 0.5) as u128, @@ -261,32 +284,30 @@ impl Analysis { let min_squares = Self::min_squares_iqr(r, selector); if median_slopes.is_none() || min_squares.is_none() { - return None; + return None } let median_slopes = median_slopes.unwrap(); let min_squares = min_squares.unwrap(); let base = median_slopes.base.max(min_squares.base); - let slopes = median_slopes.slopes.into_iter() + let slopes = median_slopes + .slopes + .into_iter() .zip(min_squares.slopes.into_iter()) - .map(|(a, b): (u128, u128)| { a.max(b) }) + .map(|(a, b): (u128, u128)| a.max(b)) .collect::>(); // components should always be in the same order - median_slopes.names.iter() + median_slopes + .names + .iter() .zip(min_squares.names.iter()) .for_each(|(a, b)| assert!(a == b, "benchmark results not in the same order")); let names = median_slopes.names; let value_dists = min_squares.value_dists; let model = min_squares.model; - Some(Self { - base, - slopes, - names, - value_dists, - model, - }) + Some(Self { base, slopes, names, value_dists, model }) } } @@ -295,7 +316,7 @@ fn ms(mut nanos: u128) -> String { while x > 1 { if nanos > x * 1_000 { nanos = nanos / x * x; - break; + break } x /= 10; } @@ -306,19 +327,35 @@ impl std::fmt::Display for Analysis { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { if let Some(ref value_dists) = self.value_dists { writeln!(f, "\nData points distribution:")?; - writeln!(f, "{} mean µs sigma µs %", self.names.iter().map(|p| format!("{:>5}", p)).collect::>().join(" "))?; + writeln!( + f, + "{} mean µs sigma µs %", + self.names.iter().map(|p| format!("{:>5}", p)).collect::>().join(" ") + )?; for (param_values, mean, sigma) in value_dists.iter() { if *mean == 0 { - writeln!(f, "{} {:>8} {:>8} {:>3}.{}%", - param_values.iter().map(|v| format!("{:>5}", v)).collect::>().join(" "), + writeln!( + f, + "{} {:>8} {:>8} {:>3}.{}%", + param_values + .iter() + .map(|v| format!("{:>5}", v)) + .collect::>() + .join(" "), ms(*mean), ms(*sigma), "?", "?" )?; } else { - writeln!(f, "{} {:>8} {:>8} {:>3}.{}%", - param_values.iter().map(|v| format!("{:>5}", v)).collect::>().join(" "), + writeln!( + f, + "{} {:>8} {:>8} {:>3}.{}%", + param_values + .iter() + .map(|v| format!("{:>5}", v)) + .collect::>() + .join(" "), ms(*mean), ms(*sigma), (sigma * 100 / mean), @@ -350,7 +387,7 @@ impl std::fmt::Debug for Analysis { for (&m, n) in self.slopes.iter().zip(self.names.iter()) { write!(f, " + ({} * {})", m, n)?; } - write!(f,"") + write!(f, "") } } @@ -382,17 +419,66 @@ mod tests { #[test] fn analysis_median_slopes_should_work() { let data = vec![ - benchmark_result(vec![(BenchmarkParameter::n, 1), (BenchmarkParameter::m, 5)], 11_500_000, 0, 3, 10), - benchmark_result(vec![(BenchmarkParameter::n, 2), (BenchmarkParameter::m, 5)], 12_500_000, 0, 4, 10), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 5)], 13_500_000, 0, 5, 10), - benchmark_result(vec![(BenchmarkParameter::n, 4), (BenchmarkParameter::m, 5)], 14_500_000, 0, 6, 10), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 1)], 13_100_000, 0, 5, 2), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 3)], 13_300_000, 0, 5, 6), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 7)], 13_700_000, 0, 5, 14), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 10)], 14_000_000, 0, 5, 20), + benchmark_result( + vec![(BenchmarkParameter::n, 1), (BenchmarkParameter::m, 5)], + 11_500_000, + 0, + 3, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 2), (BenchmarkParameter::m, 5)], + 12_500_000, + 0, + 4, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 5)], + 13_500_000, + 0, + 5, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 4), (BenchmarkParameter::m, 5)], + 14_500_000, + 0, + 6, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 1)], + 13_100_000, + 0, + 5, + 2, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 3)], + 13_300_000, + 0, + 5, + 6, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 7)], + 13_700_000, + 0, + 5, + 14, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 10)], + 14_000_000, + 0, + 5, + 20, + ), ]; - let extrinsic_time = Analysis::median_slopes(&data, BenchmarkSelector::ExtrinsicTime).unwrap(); + let extrinsic_time = + Analysis::median_slopes(&data, BenchmarkSelector::ExtrinsicTime).unwrap(); assert_eq!(extrinsic_time.base, 10_000_000); assert_eq!(extrinsic_time.slopes, vec![1_000_000, 100_000]); @@ -408,17 +494,66 @@ mod tests { #[test] fn analysis_median_min_squares_should_work() { let data = vec![ - benchmark_result(vec![(BenchmarkParameter::n, 1), (BenchmarkParameter::m, 5)], 11_500_000, 0, 3, 10), - benchmark_result(vec![(BenchmarkParameter::n, 2), (BenchmarkParameter::m, 5)], 12_500_000, 0, 4, 10), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 5)], 13_500_000, 0, 5, 10), - benchmark_result(vec![(BenchmarkParameter::n, 4), (BenchmarkParameter::m, 5)], 14_500_000, 0, 6, 10), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 1)], 13_100_000, 0, 5, 2), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 3)], 13_300_000, 0, 5, 6), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 7)], 13_700_000, 0, 5, 14), - benchmark_result(vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 10)], 14_000_000, 0, 5, 20), + benchmark_result( + vec![(BenchmarkParameter::n, 1), (BenchmarkParameter::m, 5)], + 11_500_000, + 0, + 3, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 2), (BenchmarkParameter::m, 5)], + 12_500_000, + 0, + 4, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 5)], + 13_500_000, + 0, + 5, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 4), (BenchmarkParameter::m, 5)], + 14_500_000, + 0, + 6, + 10, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 1)], + 13_100_000, + 0, + 5, + 2, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 3)], + 13_300_000, + 0, + 5, + 6, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 7)], + 13_700_000, + 0, + 5, + 14, + ), + benchmark_result( + vec![(BenchmarkParameter::n, 3), (BenchmarkParameter::m, 10)], + 14_000_000, + 0, + 5, + 20, + ), ]; - let extrinsic_time = Analysis::min_squares_iqr(&data, BenchmarkSelector::ExtrinsicTime).unwrap(); + let extrinsic_time = + Analysis::min_squares_iqr(&data, BenchmarkSelector::ExtrinsicTime).unwrap(); assert_eq!(extrinsic_time.base, 10_000_000); assert_eq!(extrinsic_time.slopes, vec![1_000_000, 100_000]); diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index 98867d9b7b9c3..b78d2605afea7 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -19,35 +19,35 @@ #![cfg_attr(not(feature = "std"), no_std)] -mod tests; -mod utils; #[cfg(feature = "std")] mod analysis; +mod tests; +mod utils; -pub use utils::*; #[cfg(feature = "std")] -pub use analysis::{Analysis, BenchmarkSelector, RegressionModel, AnalysisChoice}; -#[doc(hidden)] -pub use sp_io::storage::root as storage_root; -#[doc(hidden)] -pub use sp_runtime::traits::Zero; +pub use analysis::{Analysis, AnalysisChoice, BenchmarkSelector, RegressionModel}; #[doc(hidden)] pub use frame_support; #[doc(hidden)] -pub use sp_std::{self, vec, prelude::Vec, boxed::Box}; +pub use log; #[doc(hidden)] pub use paste; #[doc(hidden)] -pub use sp_storage::TrackedStorageKey; +pub use sp_io::storage::root as storage_root; #[doc(hidden)] -pub use log; +pub use sp_runtime::traits::Zero; +#[doc(hidden)] +pub use sp_std::{self, boxed::Box, prelude::Vec, vec}; +#[doc(hidden)] +pub use sp_storage::TrackedStorageKey; +pub use utils::*; /// Whitelist the given account. #[macro_export] macro_rules! whitelist { ($acc:ident) => { frame_benchmarking::benchmarking::add_to_whitelist( - frame_system::Account::::hashed_key_for(&$acc).into() + frame_system::Account::::hashed_key_for(&$acc).into(), ); }; } @@ -1267,9 +1267,9 @@ pub fn show_benchmark_debug_info( * Verify: {:?}\n\ * Error message: {}", sp_std::str::from_utf8(instance_string) - .expect("it's all just strings ran through the wasm interface. qed"), + .expect("it's all just strings ran through the wasm interface. qed"), sp_std::str::from_utf8(benchmark) - .expect("it's all just strings ran through the wasm interface. qed"), + .expect("it's all just strings ran through the wasm interface. qed"), lowest_range_values, highest_range_values, steps, diff --git a/frame/benchmarking/src/tests.rs b/frame/benchmarking/src/tests.rs index 646609c7c1e16..7bb1f9d7d62cb 100644 --- a/frame/benchmarking/src/tests.rs +++ b/frame/benchmarking/src/tests.rs @@ -20,9 +20,13 @@ #![cfg(test)] use super::*; -use sp_std::prelude::*; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::{H256, Header}, BuildStorage}; use frame_support::parameter_types; +use sp_runtime::{ + testing::{Header, H256}, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, +}; +use sp_std::prelude::*; mod pallet_test { use frame_support::pallet_prelude::Get; @@ -59,7 +63,8 @@ mod pallet_test { } pub trait Config: frame_system::Config + OtherConfig - where Self::OtherEvent: Into<::Event> + where + Self::OtherEvent: Into<::Event>, { type Event; type LowerBound: Get; @@ -107,7 +112,7 @@ impl frame_system::Config for Test { type OnSetCode = (); } -parameter_types!{ +parameter_types! { pub const LowerBound: u32 = 1; pub const UpperBound: u32 = 100; } @@ -127,16 +132,20 @@ fn new_test_ext() -> sp_io::TestExternalities { } mod benchmarks { - use sp_std::prelude::*; + use super::{ + new_test_ext, + pallet_test::{self, Value}, + Test, + }; + use crate::{account, BenchmarkParameter, BenchmarkingSetup}; + use frame_support::{assert_err, assert_ok, ensure, traits::Get, StorageValue}; use frame_system::RawOrigin; - use super::{Test, pallet_test::{self, Value}, new_test_ext}; - use frame_support::{assert_ok, assert_err, ensure, traits::Get, StorageValue}; - use crate::{BenchmarkingSetup, BenchmarkParameter, account}; + use sp_std::prelude::*; // Additional used internally by the benchmark macro. use super::pallet_test::{Call, Config, Pallet}; - crate::benchmarks!{ + crate::benchmarks! { where_clause { where ::OtherEvent: Into<::Event> + Clone, @@ -204,7 +213,8 @@ mod benchmarks { &selected, &[(BenchmarkParameter::b, 1)], true, - ).expect("failed to create closure"); + ) + .expect("failed to create closure"); new_test_ext().execute_with(|| { assert_ok!(closure()); @@ -222,7 +232,8 @@ mod benchmarks { &selected, &[(BenchmarkParameter::b, 1)], true, - ).expect("failed to create closure"); + ) + .expect("failed to create closure"); new_test_ext().execute_with(|| { assert_ok!(closure()); @@ -240,7 +251,8 @@ mod benchmarks { &selected, &[(BenchmarkParameter::x, 1)], true, - ).expect("failed to create closure"); + ) + .expect("failed to create closure"); assert_ok!(closure()); } @@ -254,7 +266,8 @@ mod benchmarks { &selected, &[(BenchmarkParameter::b, 1)], true, - ).expect("failed to create closure"); + ) + .expect("failed to create closure"); new_test_ext().execute_with(|| { assert_ok!(closure()); @@ -267,7 +280,8 @@ mod benchmarks { &selected, &[(BenchmarkParameter::x, 10000)], true, - ).expect("failed to create closure"); + ) + .expect("failed to create closure"); new_test_ext().execute_with(|| { assert_err!(closure(), "You forgot to sort!"); diff --git a/frame/benchmarking/src/utils.rs b/frame/benchmarking/src/utils.rs index c40434fb1a584..33d479a0b54a7 100644 --- a/frame/benchmarking/src/utils.rs +++ b/frame/benchmarking/src/utils.rs @@ -17,18 +17,43 @@ //! Interfaces, types and utils for benchmarking a FRAME runtime. -use codec::{Encode, Decode}; -use sp_std::{vec::Vec, prelude::Box}; +use codec::{Decode, Encode}; +use frame_support::traits::StorageInfo; use sp_io::hashing::blake2_256; +use sp_std::{prelude::Box, vec::Vec}; use sp_storage::TrackedStorageKey; -use frame_support::traits::StorageInfo; /// An alphabet of possible parameters to use for benchmarking. #[derive(Encode, Decode, Clone, Copy, PartialEq, Debug)] #[allow(missing_docs)] #[allow(non_camel_case_types)] pub enum BenchmarkParameter { - a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u, v, w, x, y, z, + a, + b, + c, + d, + e, + f, + g, + h, + i, + j, + k, + l, + m, + n, + o, + p, + q, + r, + s, + t, + u, + v, + w, + x, + y, + z, } #[cfg(feature = "std")] @@ -105,7 +130,8 @@ pub trait Benchmarking { /// WARNING! This is a non-deterministic call. Do not use this within /// consensus critical logic. fn current_time() -> u128 { - std::time::SystemTime::now().duration_since(std::time::SystemTime::UNIX_EPOCH) + std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) .expect("Unix time doesn't go backwards; qed") .as_nanos() } @@ -153,7 +179,7 @@ pub trait Benchmarking { // If the key does not exist, add it. None => { whitelist.push(add); - } + }, } self.set_whitelist(whitelist); } @@ -217,12 +243,16 @@ pub trait BenchmarkingSetup { fn instance( &self, components: &[(BenchmarkParameter, u32)], - verify: bool + verify: bool, ) -> Result Result<(), &'static str>>, &'static str>; } /// Grab an account, seeded by a name and index. -pub fn account(name: &'static str, index: u32, seed: u32) -> AccountId { +pub fn account( + name: &'static str, + index: u32, + seed: u32, +) -> AccountId { let entropy = (name, index, seed).using_encoded(blake2_256); AccountId::decode(&mut &entropy[..]).unwrap_or_default() } @@ -236,7 +266,7 @@ pub fn whitelisted_caller() -> AccountId { macro_rules! whitelist_account { ($acc:ident) => { frame_benchmarking::benchmarking::add_to_whitelist( - frame_system::Account::::hashed_key_for(&$acc).into() + frame_system::Account::::hashed_key_for(&$acc).into(), ); - } + }; } diff --git a/frame/bounties/src/benchmarking.rs b/frame/bounties/src/benchmarking.rs index 23542e6c31b80..c95c13649b6a4 100644 --- a/frame/bounties/src/benchmarking.rs +++ b/frame/bounties/src/benchmarking.rs @@ -21,10 +21,10 @@ use super::*; -use sp_runtime::traits::Bounded; -use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_support::traits::OnInitialize; +use frame_system::RawOrigin; +use sp_runtime::traits::Bounded; use crate::Module as Bounties; use pallet_treasury::Pallet as Treasury; @@ -33,7 +33,7 @@ const SEED: u32 = 0; // Create bounties that are approved for use in `on_initialize`. fn create_approved_bounties(n: u32) -> Result<(), &'static str> { - for i in 0 .. n { + for i in 0..n { let (caller, _curator, _fee, value, reason) = setup_bounty::(i, MAX_BYTES); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; let bounty_id = BountyCount::get() - 1; @@ -44,13 +44,10 @@ fn create_approved_bounties(n: u32) -> Result<(), &'static str> { } // Create the pre-requisite information needed to create a treasury `propose_bounty`. -fn setup_bounty(u: u32, d: u32) -> ( - T::AccountId, - T::AccountId, - BalanceOf, - BalanceOf, - Vec, -) { +fn setup_bounty( + u: u32, + d: u32, +) -> (T::AccountId, T::AccountId, BalanceOf, BalanceOf, Vec) { let caller = account("caller", u, SEED); let value: BalanceOf = T::BountyValueMinimum::get().saturating_mul(100u32.into()); let fee = value / 2u32.into(); @@ -62,10 +59,8 @@ fn setup_bounty(u: u32, d: u32) -> ( (caller, curator, fee, value, reason) } -fn create_bounty() -> Result<( - ::Source, - BountyIndex, -), &'static str> { +fn create_bounty( +) -> Result<(::Source, BountyIndex), &'static str> { let (caller, curator, fee, value, reason) = setup_bounty::(0, MAX_BYTES); let curator_lookup = T::Lookup::unlookup(curator.clone()); Bounties::::propose_bounty(RawOrigin::Signed(caller).into(), value, reason)?; @@ -216,8 +211,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Bounties, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Bounties, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/bounties/src/lib.rs b/frame/bounties/src/lib.rs index 03fb358ecfb9f..328c1dc13adcb 100644 --- a/frame/bounties/src/lib.rs +++ b/frame/bounties/src/lib.rs @@ -74,30 +74,30 @@ #![cfg_attr(not(feature = "std"), no_std)] -mod tests; mod benchmarking; +mod tests; pub mod weights; use sp_std::prelude::*; -use frame_support::{decl_module, decl_storage, decl_event, ensure, decl_error}; +use frame_support::{decl_error, decl_event, decl_module, decl_storage, ensure}; use frame_support::traits::{ - Currency, Get, Imbalance, OnUnbalanced, ExistenceRequirement::{AllowDeath}, - ReservableCurrency}; + Currency, ExistenceRequirement::AllowDeath, Get, Imbalance, OnUnbalanced, ReservableCurrency, +}; -use sp_runtime::{Permill, RuntimeDebug, DispatchResult, traits::{ - Zero, StaticLookup, AccountIdConversion, Saturating, BadOrigin -}}; +use sp_runtime::{ + traits::{AccountIdConversion, BadOrigin, Saturating, StaticLookup, Zero}, + DispatchResult, Permill, RuntimeDebug, +}; -use frame_support::dispatch::DispatchResultWithPostInfo; -use frame_support::traits::{EnsureOrigin}; +use frame_support::{dispatch::DispatchResultWithPostInfo, traits::EnsureOrigin}; -use frame_support::weights::{Weight}; +use frame_support::weights::Weight; -use codec::{Encode, Decode}; -use scale_info::TypeInfo; +use codec::{Decode, Encode}; use frame_system::{self as system, ensure_signed}; +use scale_info::TypeInfo; pub use weights::WeightInfo; type BalanceOf = pallet_treasury::BalanceOf; @@ -105,7 +105,6 @@ type BalanceOf = pallet_treasury::BalanceOf; type PositiveImbalanceOf = pallet_treasury::PositiveImbalanceOf; pub trait Config: frame_system::Config + pallet_treasury::Config { - /// The amount held on deposit for placing a bounty proposal. type BountyDepositBase: Get>; @@ -693,14 +692,17 @@ impl Module { description: Vec, value: BalanceOf, ) -> DispatchResult { - ensure!(description.len() <= T::MaximumReasonLength::get() as usize, Error::::ReasonTooBig); + ensure!( + description.len() <= T::MaximumReasonLength::get() as usize, + Error::::ReasonTooBig + ); ensure!(value >= T::BountyValueMinimum::get(), Error::::InvalidValue); let index = Self::bounty_count(); // reserve deposit for new bounty - let bond = T::BountyDepositBase::get() - + T::DataDepositPerByte::get() * (description.len() as u32).into(); + let bond = T::BountyDepositBase::get() + + T::DataDepositPerByte::get() * (description.len() as u32).into(); T::Currency::reserve(&proposer, bond) .map_err(|_| Error::::InsufficientProposersBalance)?; @@ -722,7 +724,6 @@ impl Module { Ok(()) } - } impl pallet_treasury::SpendFunds for Module { @@ -730,7 +731,7 @@ impl pallet_treasury::SpendFunds for Module { budget_remaining: &mut BalanceOf, imbalance: &mut PositiveImbalanceOf, total_weight: &mut Weight, - missed_any: &mut bool + missed_any: &mut bool, ) { let bounties_len = BountyApprovals::mutate(|v| { let bounties_approval_len = v.len() as u32; @@ -748,7 +749,10 @@ impl pallet_treasury::SpendFunds for Module { debug_assert!(err_amount.is_zero()); // fund the bounty account - imbalance.subsume(T::Currency::deposit_creating(&Self::bounty_account_id(index), bounty.value)); + imbalance.subsume(T::Currency::deposit_creating( + &Self::bounty_account_id(index), + bounty.value, + )); Self::deposit_event(RawEvent::BountyBecameActive(index)); false diff --git a/frame/bounties/src/tests.rs b/frame/bounties/src/tests.rs index 2e96d8271e130..5ce1373ed9066 100644 --- a/frame/bounties/src/tests.rs +++ b/frame/bounties/src/tests.rs @@ -19,20 +19,20 @@ #![cfg(test)] -use crate as pallet_bounties; use super::*; +use crate as pallet_bounties; use std::cell::RefCell; use frame_support::{ - assert_noop, assert_ok, parameter_types, weights::Weight, traits::OnInitialize, - PalletId, pallet_prelude::GenesisBuild, + assert_noop, assert_ok, pallet_prelude::GenesisBuild, parameter_types, traits::OnInitialize, + weights::Weight, PalletId, }; use sp_core::H256; use sp_runtime::{ - Perbill, testing::Header, - traits::{BlakeTwo256, IdentityLookup, BadOrigin}, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, + Perbill, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -121,7 +121,7 @@ impl pallet_treasury::Config for Test { type ProposalBondMinimum = ProposalBondMinimum; type SpendPeriod = SpendPeriod; type Burn = Burn; - type BurnDestination = (); // Just gets burned. + type BurnDestination = (); // Just gets burned. type WeightInfo = (); type SpendFunds = Bounties; type MaxApprovals = MaxApprovals; @@ -146,23 +146,25 @@ impl Config for Test { type WeightInfo = (); } -type TreasuryError = pallet_treasury::Error::; +type TreasuryError = pallet_treasury::Error; pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized at ED. balances: vec![(0, 100), (1, 98), (2, 1)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&pallet_treasury::GenesisConfig, &mut t).unwrap(); t.into() } fn last_event() -> RawEvent { - System::events().into_iter().map(|r| r.event) - .filter_map(|e| { - if let Event::Bounties(inner) = e { Some(inner) } else { None } - }) + System::events() + .into_iter() + .map(|r| r.event) + .filter_map(|e| if let Event::Bounties(inner) = e { Some(inner) } else { None }) .last() .unwrap() } @@ -267,8 +269,10 @@ fn reject_already_rejected_spend_proposal_fails() { #[test] fn reject_non_existent_spend_proposal_fails() { new_test_ext().execute_with(|| { - assert_noop!(Treasury::reject_proposal(Origin::root(), 0), - pallet_treasury::Error::::InvalidIndex); + assert_noop!( + Treasury::reject_proposal(Origin::root(), 0), + pallet_treasury::Error::::InvalidIndex + ); }); } @@ -353,9 +357,9 @@ fn treasury_account_doesnt_get_deleted() { #[test] fn inexistent_account_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ - balances: vec![(0, 100), (1, 99), (2, 1)], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![(0, 100), (1, 99), (2, 1)] } + .assimilate_storage(&mut t) + .unwrap(); // Treasury genesis config is not build thus treasury account does not exist let mut t: sp_io::TestExternalities = t.into(); @@ -398,14 +402,17 @@ fn propose_bounty_works() { assert_eq!(Balances::reserved_balance(0), deposit); assert_eq!(Balances::free_balance(0), 100 - deposit); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 0, - curator_deposit: 0, - value: 10, - bond: deposit, - status: BountyStatus::Proposed, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 0, + curator_deposit: 0, + value: 10, + bond: deposit, + status: BountyStatus::Proposed, + } + ); assert_eq!(Bounties::bounty_descriptions(0).unwrap(), b"1234567890".to_vec()); @@ -476,14 +483,17 @@ fn approve_bounty_works() { let deposit: u64 = 80 + 5; - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 0, - value: 50, - curator_deposit: 0, - bond: deposit, - status: BountyStatus::Approved, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 0, + value: 50, + curator_deposit: 0, + bond: deposit, + status: BountyStatus::Approved, + } + ); assert_eq!(Bounties::bounty_approvals(), vec![0]); assert_noop!(Bounties::close_bounty(Origin::root(), 0), Error::::UnexpectedStatus); @@ -498,14 +508,17 @@ fn approve_bounty_works() { assert_eq!(Balances::reserved_balance(0), 0); assert_eq!(Balances::free_balance(0), 100); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 0, - curator_deposit: 0, - value: 50, - bond: deposit, - status: BountyStatus::Funded, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 0, + curator_deposit: 0, + value: 50, + bond: deposit, + status: BountyStatus::Funded, + } + ); assert_eq!(Treasury::pot(), 100 - 50 - 25); // burn 25 assert_eq!(Balances::free_balance(Bounties::bounty_account_id(0)), 50); @@ -518,7 +531,10 @@ fn assign_curator_works() { System::set_block_number(1); Balances::make_free_balance_be(&Treasury::account_id(), 101); - assert_noop!(Bounties::propose_curator(Origin::root(), 0, 4, 4), Error::::InvalidIndex); + assert_noop!( + Bounties::propose_curator(Origin::root(), 0, 4, 4), + Error::::InvalidIndex + ); assert_ok!(Bounties::propose_bounty(Origin::signed(0), 50, b"12345".to_vec())); @@ -527,39 +543,46 @@ fn assign_curator_works() { System::set_block_number(2); >::on_initialize(2); - assert_noop!(Bounties::propose_curator(Origin::root(), 0, 4, 50), Error::::InvalidFee); + assert_noop!( + Bounties::propose_curator(Origin::root(), 0, 4, 50), + Error::::InvalidFee + ); assert_ok!(Bounties::propose_curator(Origin::root(), 0, 4, 4)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 4, - curator_deposit: 0, - value: 50, - bond: 85, - status: BountyStatus::CuratorProposed { - curator: 4, - }, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 4, + curator_deposit: 0, + value: 50, + bond: 85, + status: BountyStatus::CuratorProposed { curator: 4 }, + } + ); assert_noop!(Bounties::accept_curator(Origin::signed(1), 0), Error::::RequireCurator); - assert_noop!(Bounties::accept_curator(Origin::signed(4), 0), pallet_balances::Error::::InsufficientBalance); + assert_noop!( + Bounties::accept_curator(Origin::signed(4), 0), + pallet_balances::Error::::InsufficientBalance + ); Balances::make_free_balance_be(&4, 10); assert_ok!(Bounties::accept_curator(Origin::signed(4), 0)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 4, - curator_deposit: 2, - value: 50, - bond: 85, - status: BountyStatus::Active { - curator: 4, - update_due: 22, - }, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 4, + curator_deposit: 2, + value: 50, + bond: 85, + status: BountyStatus::Active { curator: 4, update_due: 22 }, + } + ); assert_eq!(Balances::free_balance(&4), 8); assert_eq!(Balances::reserved_balance(&4), 2); @@ -584,14 +607,17 @@ fn unassign_curator_works() { assert_ok!(Bounties::unassign_curator(Origin::signed(4), 0)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 4, - curator_deposit: 0, - value: 50, - bond: 85, - status: BountyStatus::Funded, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 4, + curator_deposit: 0, + value: 50, + bond: 85, + status: BountyStatus::Funded, + } + ); assert_ok!(Bounties::propose_curator(Origin::root(), 0, 4, 4)); @@ -601,21 +627,23 @@ fn unassign_curator_works() { assert_ok!(Bounties::unassign_curator(Origin::root(), 0)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 4, - curator_deposit: 0, - value: 50, - bond: 85, - status: BountyStatus::Funded, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 4, + curator_deposit: 0, + value: 50, + bond: 85, + status: BountyStatus::Funded, + } + ); assert_eq!(Balances::free_balance(&4), 8); assert_eq!(Balances::reserved_balance(&4), 0); // slashed 2 }); } - #[test] fn award_and_claim_bounty_works() { new_test_ext().execute_with(|| { @@ -634,22 +662,24 @@ fn award_and_claim_bounty_works() { assert_eq!(Balances::free_balance(4), 8); // inital 10 - 2 deposit - assert_noop!(Bounties::award_bounty(Origin::signed(1), 0, 3), Error::::RequireCurator); + assert_noop!( + Bounties::award_bounty(Origin::signed(1), 0, 3), + Error::::RequireCurator + ); assert_ok!(Bounties::award_bounty(Origin::signed(4), 0, 3)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 4, - curator_deposit: 2, - value: 50, - bond: 85, - status: BountyStatus::PendingPayout { - curator: 4, - beneficiary: 3, - unlock_at: 5 - }, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 4, + curator_deposit: 2, + value: 50, + bond: 85, + status: BountyStatus::PendingPayout { curator: 4, beneficiary: 3, unlock_at: 5 }, + } + ); assert_noop!(Bounties::claim_bounty(Origin::signed(1), 0), Error::::Premature); @@ -713,7 +743,6 @@ fn claim_handles_high_fee() { #[test] fn cancel_and_refund() { new_test_ext().execute_with(|| { - System::set_block_number(1); Balances::make_free_balance_be(&Treasury::account_id(), 101); @@ -727,14 +756,17 @@ fn cancel_and_refund() { assert_ok!(Balances::transfer(Origin::signed(0), Bounties::bounty_account_id(0), 10)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 0, - curator_deposit: 0, - value: 50, - bond: 85, - status: BountyStatus::Funded, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 0, + curator_deposit: 0, + value: 50, + bond: 85, + status: BountyStatus::Funded, + } + ); assert_eq!(Balances::free_balance(Bounties::bounty_account_id(0)), 60); @@ -743,9 +775,7 @@ fn cancel_and_refund() { assert_ok!(Bounties::close_bounty(Origin::root(), 0)); assert_eq!(Treasury::pot(), 85); // - 25 + 10 - }); - } #[test] @@ -816,18 +846,20 @@ fn expire_and_unassign() { assert_ok!(Bounties::unassign_curator(Origin::signed(0), 0)); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 10, - curator_deposit: 0, - value: 50, - bond: 85, - status: BountyStatus::Funded, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 10, + curator_deposit: 0, + value: 50, + bond: 85, + status: BountyStatus::Funded, + } + ); assert_eq!(Balances::free_balance(1), 93); assert_eq!(Balances::reserved_balance(1), 0); // slashed - }); } @@ -841,7 +873,10 @@ fn extend_expiry() { assert_ok!(Bounties::approve_bounty(Origin::root(), 0)); - assert_noop!(Bounties::extend_bounty_expiry(Origin::signed(1), 0, Vec::new()), Error::::UnexpectedStatus); + assert_noop!( + Bounties::extend_bounty_expiry(Origin::signed(1), 0, Vec::new()), + Error::::UnexpectedStatus + ); System::set_block_number(2); >::on_initialize(2); @@ -855,28 +890,37 @@ fn extend_expiry() { System::set_block_number(10); >::on_initialize(10); - assert_noop!(Bounties::extend_bounty_expiry(Origin::signed(0), 0, Vec::new()), Error::::RequireCurator); + assert_noop!( + Bounties::extend_bounty_expiry(Origin::signed(0), 0, Vec::new()), + Error::::RequireCurator + ); assert_ok!(Bounties::extend_bounty_expiry(Origin::signed(4), 0, Vec::new())); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 10, - curator_deposit: 5, - value: 50, - bond: 85, - status: BountyStatus::Active { curator: 4, update_due: 30 }, - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 10, + curator_deposit: 5, + value: 50, + bond: 85, + status: BountyStatus::Active { curator: 4, update_due: 30 }, + } + ); assert_ok!(Bounties::extend_bounty_expiry(Origin::signed(4), 0, Vec::new())); - assert_eq!(Bounties::bounties(0).unwrap(), Bounty { - proposer: 0, - fee: 10, - curator_deposit: 5, - value: 50, - bond: 85, - status: BountyStatus::Active { curator: 4, update_due: 30 }, // still the same - }); + assert_eq!( + Bounties::bounties(0).unwrap(), + Bounty { + proposer: 0, + fee: 10, + curator_deposit: 5, + value: 50, + bond: 85, + status: BountyStatus::Active { curator: 4, update_due: 30 }, // still the same + } + ); System::set_block_number(25); >::on_initialize(25); @@ -893,10 +937,12 @@ fn extend_expiry() { fn genesis_funding_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); let initial_funding = 100; - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized with 100. balances: vec![(0, 100), (Treasury::account_id(), initial_funding)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&pallet_treasury::GenesisConfig, &mut t).unwrap(); let mut t: sp_io::TestExternalities = t.into(); diff --git a/frame/bounties/src/weights.rs b/frame/bounties/src/weights.rs index 9b50d438923c2..354c14a41a83d 100644 --- a/frame/bounties/src/weights.rs +++ b/frame/bounties/src/weights.rs @@ -35,16 +35,18 @@ // --output=./frame/bounties/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_bounties. pub trait WeightInfo { - fn propose_bounty(d: u32, ) -> Weight; + fn propose_bounty(d: u32) -> Weight; fn approve_bounty() -> Weight; fn propose_curator() -> Weight; fn unassign_curator() -> Weight; @@ -54,13 +56,13 @@ pub trait WeightInfo { fn close_bounty_proposed() -> Weight; fn close_bounty_active() -> Weight; fn extend_bounty_expiry() -> Weight; - fn spend_funds(b: u32, ) -> Weight; + fn spend_funds(b: u32) -> Weight; } /// Weights for pallet_bounties using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn propose_bounty(d: u32, ) -> Weight { + fn propose_bounty(d: u32) -> Weight { (44_351_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(d as Weight)) @@ -112,7 +114,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn spend_funds(b: u32, ) -> Weight { + fn spend_funds(b: u32) -> Weight { (351_000 as Weight) // Standard Error: 13_000 .saturating_add((58_724_000 as Weight).saturating_mul(b as Weight)) @@ -125,7 +127,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn propose_bounty(d: u32, ) -> Weight { + fn propose_bounty(d: u32) -> Weight { (44_351_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(d as Weight)) @@ -177,7 +179,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn spend_funds(b: u32, ) -> Weight { + fn spend_funds(b: u32) -> Weight { (351_000 as Weight) // Standard Error: 13_000 .saturating_add((58_724_000 as Weight).saturating_mul(b as Weight)) diff --git a/frame/collective/src/benchmarking.rs b/frame/collective/src/benchmarking.rs index 7faaa31dc8012..2862c830959c9 100644 --- a/frame/collective/src/benchmarking.rs +++ b/frame/collective/src/benchmarking.rs @@ -19,19 +19,15 @@ use super::*; -use frame_system::RawOrigin as SystemOrigin; use frame_benchmarking::{ - benchmarks_instance, - account, - whitelisted_caller, - impl_benchmark_test_suite, + account, benchmarks_instance, impl_benchmark_test_suite, whitelisted_caller, }; +use frame_system::RawOrigin as SystemOrigin; use sp_runtime::traits::Bounded; use sp_std::mem::size_of; -use frame_system::Call as SystemCall; -use frame_system::Pallet as System; use crate::Module as Collective; +use frame_system::{Call as SystemCall, Pallet as System}; const SEED: u32 = 0; @@ -639,8 +635,4 @@ benchmarks_instance! { } } -impl_benchmark_test_suite!( - Collective, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Collective, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index 8787e7d732f53..a15297517da89 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -42,23 +42,25 @@ #![cfg_attr(not(feature = "std"), no_std)] #![recursion_limit = "128"] -use sp_std::{prelude::*, result}; use sp_core::u32_trait::Value as U32; use sp_io::storage; -use sp_runtime::{RuntimeDebug, traits::Hash}; +use sp_runtime::{traits::Hash, RuntimeDebug}; +use sp_std::{prelude::*, result}; use frame_support::{ - decl_error, decl_event, decl_module, decl_storage, ensure, BoundedVec, codec::{Decode, Encode}, - scale_info::TypeInfo, + decl_error, decl_event, decl_module, decl_storage, dispatch::{ DispatchError, DispatchResult, DispatchResultWithPostInfo, Dispatchable, Parameter, PostDispatchInfo, }, - traits::{ChangeMembers, EnsureOrigin, Get, InitializeMembers, GetBacking, Backing}, - weights::{DispatchClass, GetDispatchInfo, Weight, Pays}, + ensure, + scale_info::TypeInfo, + traits::{Backing, ChangeMembers, EnsureOrigin, Get, GetBacking, InitializeMembers}, + weights::{DispatchClass, GetDispatchInfo, Pays, Weight}, + BoundedVec, }; -use frame_system::{self as system, ensure_signed, ensure_root}; +use frame_system::{self as system, ensure_root, ensure_signed}; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; @@ -121,13 +123,13 @@ impl DefaultVote for MoreThanMajorityThenPrimeDefaultVote { } } -pub trait Config: frame_system::Config { +pub trait Config: frame_system::Config { /// The outer origin type. type Origin: From>; /// The outer call dispatch type. type Proposal: Parameter - + Dispatchable>::Origin, PostInfo=PostDispatchInfo> + + Dispatchable>::Origin, PostInfo = PostDispatchInfo> + From> + GetDispatchInfo; @@ -175,7 +177,7 @@ impl GetBacking for RawOrigin { } /// Origin for the collective module. -pub type Origin = RawOrigin<::AccountId, I>; +pub type Origin = RawOrigin<::AccountId, I>; #[derive(PartialEq, Eq, Clone, Encode, Decode, RuntimeDebug, TypeInfo)] /// Info for keeping track of a motion being voted on. @@ -288,7 +290,6 @@ fn get_result_weight(result: DispatchResultWithPostInfo) -> Option { } } - // Note that councillor operations are assigned to the operational class. decl_module! { pub struct Module, I: Instance=DefaultInstance> for enum Call where origin: ::Origin { @@ -735,12 +736,12 @@ impl, I: Instance> Module { fn validate_and_get_proposal( hash: &T::Hash, length_bound: u32, - weight_bound: Weight + weight_bound: Weight, ) -> Result<(>::Proposal, usize), DispatchError> { let key = ProposalOf::::hashed_key_for(hash); // read the length of the proposal storage entry directly - let proposal_len = storage::read(&key, &mut [0; 0], 0) - .ok_or(Error::::ProposalMissing)?; + let proposal_len = + storage::read(&key, &mut [0; 0], 0).ok_or(Error::::ProposalMissing)?; ensure!(proposal_len <= length_bound, Error::::WrongProposalLength); let proposal = ProposalOf::::get(hash).ok_or(Error::::ProposalMissing)?; let proposal_weight = proposal.get_dispatch_info().weight; @@ -773,9 +774,10 @@ impl, I: Instance> Module { let dispatch_weight = proposal.get_dispatch_info().weight; let origin = RawOrigin::Members(voting.threshold, seats).into(); let result = proposal.dispatch(origin); - Self::deposit_event( - RawEvent::Executed(proposal_hash, result.map(|_| ()).map_err(|e| e.error)) - ); + Self::deposit_event(RawEvent::Executed( + proposal_hash, + result.map(|_| ()).map_err(|e| e.error), + )); // default to the dispatch info weight for safety let proposal_weight = get_result_weight(result).unwrap_or(dispatch_weight); // P1 @@ -837,17 +839,21 @@ impl, I: Instance> ChangeMembers for Module { let mut outgoing = outgoing.to_vec(); outgoing.sort(); for h in Self::proposals().into_iter() { - >::mutate(h, |v| + >::mutate(h, |v| { if let Some(mut votes) = v.take() { - votes.ayes = votes.ayes.into_iter() + votes.ayes = votes + .ayes + .into_iter() .filter(|i| outgoing.binary_search(i).is_err()) .collect(); - votes.nays = votes.nays.into_iter() + votes.nays = votes + .nays + .into_iter() .filter(|i| outgoing.binary_search(i).is_err()) .collect(); *v = Some(votes); } - ); + }); } Members::::put(new); Prime::::kill(); @@ -873,10 +879,12 @@ impl, I: Instance> InitializeMembers for Module /// Ensure that the origin `o` represents at least `n` members. Returns `Ok` or an `Err` /// otherwise. -pub fn ensure_members(o: OuterOrigin, n: MemberCount) - -> result::Result +pub fn ensure_members( + o: OuterOrigin, + n: MemberCount, +) -> result::Result where - OuterOrigin: Into, OuterOrigin>> + OuterOrigin: Into, OuterOrigin>>, { match o.into() { Ok(RawOrigin::Members(x, _)) if x >= n => Ok(n), @@ -884,12 +892,15 @@ where } } -pub struct EnsureMember(sp_std::marker::PhantomData<(AccountId, I)>); +pub struct EnsureMember( + sp_std::marker::PhantomData<(AccountId, I)>, +); impl< - O: Into, O>> + From>, - AccountId: Default, - I, -> EnsureOrigin for EnsureMember { + O: Into, O>> + From>, + AccountId: Default, + I, + > EnsureOrigin for EnsureMember +{ type Success = AccountId; fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -904,13 +915,16 @@ impl< } } -pub struct EnsureMembers(sp_std::marker::PhantomData<(N, AccountId, I)>); +pub struct EnsureMembers( + sp_std::marker::PhantomData<(N, AccountId, I)>, +); impl< - O: Into, O>> + From>, - N: U32, - AccountId, - I, -> EnsureOrigin for EnsureMembers { + O: Into, O>> + From>, + N: U32, + AccountId, + I, + > EnsureOrigin for EnsureMembers +{ type Success = (MemberCount, MemberCount); fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -925,16 +939,17 @@ impl< } } -pub struct EnsureProportionMoreThan( - sp_std::marker::PhantomData<(N, D, AccountId, I)> +pub struct EnsureProportionMoreThan( + sp_std::marker::PhantomData<(N, D, AccountId, I)>, ); impl< - O: Into, O>> + From>, - N: U32, - D: U32, - AccountId, - I, -> EnsureOrigin for EnsureProportionMoreThan { + O: Into, O>> + From>, + N: U32, + D: U32, + AccountId, + I, + > EnsureOrigin for EnsureProportionMoreThan +{ type Success = (); fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -949,16 +964,17 @@ impl< } } -pub struct EnsureProportionAtLeast( - sp_std::marker::PhantomData<(N, D, AccountId, I)> +pub struct EnsureProportionAtLeast( + sp_std::marker::PhantomData<(N, D, AccountId, I)>, ); impl< - O: Into, O>> + From>, - N: U32, - D: U32, - AccountId, - I, -> EnsureOrigin for EnsureProportionAtLeast { + O: Into, O>> + From>, + N: U32, + D: U32, + AccountId, + I, + > EnsureOrigin for EnsureProportionAtLeast +{ type Success = (); fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -976,15 +992,16 @@ impl< #[cfg(test)] mod tests { use super::*; - use frame_support::{Hashable, assert_ok, assert_noop, parameter_types}; + use crate as collective; + use frame_support::{assert_noop, assert_ok, parameter_types, Hashable}; use frame_system::{self as system, EventRecord, Phase}; use hex_literal::hex; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, testing::Header, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, BuildStorage, }; - use crate as collective; parameter_types! { pub const BlockHashCount: u64 = 250; @@ -1077,7 +1094,10 @@ mod tests { phantom: Default::default(), }, default_collective: Default::default(), - }.build_storage().unwrap().into(); + } + .build_storage() + .unwrap() + .into(); ext.execute_with(|| System::set_block_number(1)); ext } @@ -1102,65 +1122,114 @@ mod tests { let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); System::set_block_number(3); assert_noop!( - Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len), + Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + ), Error::::TooEarly ); System::set_block_number(4); - assert_ok!(Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len)); - - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; - assert_eq!(System::events(), vec![ - record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), - record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), - record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), - record(Event::Collective(RawEvent::Closed(hash.clone(), 2, 1))), - record(Event::Collective(RawEvent::Disapproved(hash.clone()))) - ]); + assert_ok!(Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); + + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + assert_eq!( + System::events(), + vec![ + record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), + record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), + record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), + record(Event::Collective(RawEvent::Closed(hash.clone(), 2, 1))), + record(Event::Collective(RawEvent::Disapproved(hash.clone()))) + ] + ); }); } #[test] fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { - let proposal = Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = + Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); // Set 1 as prime voter Prime::::set(Some(1)); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); // With 1's prime vote, this should pass System::set_block_number(4); assert_noop!( - Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight - 100, proposal_len), + Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight - 100, + proposal_len + ), Error::::WrongProposalWeight ); - assert_ok!(Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len)); + assert_ok!(Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); }) } #[test] fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { - let proposal = Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = + Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); // No votes, this proposal wont pass System::set_block_number(4); - assert_ok!( - Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight - 100, proposal_len) - ); + assert_ok!(Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight - 100, + proposal_len + )); }) } @@ -1171,23 +1240,43 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::set_members(Origin::root(), vec![1, 2, 3], Some(3), MaxMembers::get())); - - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::set_members( + Origin::root(), + vec![1, 2, 3], + Some(3), + MaxMembers::get() + )); + + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); System::set_block_number(4); - assert_ok!(Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len)); - - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; - assert_eq!(System::events(), vec![ - record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), - record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), - record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), - record(Event::Collective(RawEvent::Closed(hash.clone(), 2, 1))), - record(Event::Collective(RawEvent::Disapproved(hash.clone()))) - ]); + assert_ok!(Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); + + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + assert_eq!( + System::events(), + vec![ + record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), + record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), + record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), + record(Event::Collective(RawEvent::Closed(hash.clone(), 2, 1))), + record(Event::Collective(RawEvent::Disapproved(hash.clone()))) + ] + ); }); } @@ -1198,24 +1287,47 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::set_members(Origin::root(), vec![1, 2, 3], Some(1), MaxMembers::get())); - - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::set_members( + Origin::root(), + vec![1, 2, 3], + Some(1), + MaxMembers::get() + )); + + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); System::set_block_number(4); - assert_ok!(Collective::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len)); - - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; - assert_eq!(System::events(), vec![ - record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), - record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), - record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), - record(Event::Collective(RawEvent::Closed(hash.clone(), 3, 0))), - record(Event::Collective(RawEvent::Approved(hash.clone()))), - record(Event::Collective(RawEvent::Executed(hash.clone(), Err(DispatchError::BadOrigin)))) - ]); + assert_ok!(Collective::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); + + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + assert_eq!( + System::events(), + vec![ + record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))), + record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), + record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), + record(Event::Collective(RawEvent::Closed(hash.clone(), 3, 0))), + record(Event::Collective(RawEvent::Approved(hash.clone()))), + record(Event::Collective(RawEvent::Executed( + hash.clone(), + Err(DispatchError::BadOrigin) + ))) + ] + ); }); } @@ -1226,26 +1338,49 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(CollectiveMajority::set_members(Origin::root(), vec![1, 2, 3, 4, 5], Some(5), MaxMembers::get())); - - assert_ok!(CollectiveMajority::propose(Origin::signed(1), 5, Box::new(proposal.clone()), proposal_len)); + assert_ok!(CollectiveMajority::set_members( + Origin::root(), + vec![1, 2, 3, 4, 5], + Some(5), + MaxMembers::get() + )); + + assert_ok!(CollectiveMajority::propose( + Origin::signed(1), + 5, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(CollectiveMajority::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(CollectiveMajority::vote(Origin::signed(2), hash.clone(), 0, true)); assert_ok!(CollectiveMajority::vote(Origin::signed(3), hash.clone(), 0, true)); System::set_block_number(4); - assert_ok!(CollectiveMajority::close(Origin::signed(4), hash.clone(), 0, proposal_weight, proposal_len)); - - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; - assert_eq!(System::events(), vec![ - record(Event::CollectiveMajority(RawEvent::Proposed(1, 0, hash.clone(), 5))), - record(Event::CollectiveMajority(RawEvent::Voted(1, hash.clone(), true, 1, 0))), - record(Event::CollectiveMajority(RawEvent::Voted(2, hash.clone(), true, 2, 0))), - record(Event::CollectiveMajority(RawEvent::Voted(3, hash.clone(), true, 3, 0))), - record(Event::CollectiveMajority(RawEvent::Closed(hash.clone(), 5, 0))), - record(Event::CollectiveMajority(RawEvent::Approved(hash.clone()))), - record(Event::CollectiveMajority(RawEvent::Executed(hash.clone(), Err(DispatchError::BadOrigin)))) - ]); + assert_ok!(CollectiveMajority::close( + Origin::signed(4), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); + + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + assert_eq!( + System::events(), + vec![ + record(Event::CollectiveMajority(RawEvent::Proposed(1, 0, hash.clone(), 5))), + record(Event::CollectiveMajority(RawEvent::Voted(1, hash.clone(), true, 1, 0))), + record(Event::CollectiveMajority(RawEvent::Voted(2, hash.clone(), true, 2, 0))), + record(Event::CollectiveMajority(RawEvent::Voted(3, hash.clone(), true, 3, 0))), + record(Event::CollectiveMajority(RawEvent::Closed(hash.clone(), 5, 0))), + record(Event::CollectiveMajority(RawEvent::Approved(hash.clone()))), + record(Event::CollectiveMajority(RawEvent::Executed( + hash.clone(), + Err(DispatchError::BadOrigin) + ))) + ] + ); }); } @@ -1256,7 +1391,12 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash = BlakeTwo256::hash_of(&proposal); let end = 4; - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); assert_eq!( @@ -1272,7 +1412,12 @@ mod tests { let proposal = make_proposal(69); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::propose(Origin::signed(2), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(2), + 2, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 1, true)); assert_ok!(Collective::vote(Origin::signed(3), hash.clone(), 1, false)); assert_eq!( @@ -1294,14 +1439,24 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash = BlakeTwo256::hash_of(&proposal); let end = 4; - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); assert_eq!( Collective::voting(&hash), Some(Votes { index: 0, threshold: 3, ayes: vec![1, 2], nays: vec![], end }) ); - assert_ok!(Collective::set_members(Origin::root(), vec![2, 3, 4], None, MaxMembers::get())); + assert_ok!(Collective::set_members( + Origin::root(), + vec![2, 3, 4], + None, + MaxMembers::get() + )); assert_eq!( Collective::voting(&hash), Some(Votes { index: 0, threshold: 3, ayes: vec![2], nays: vec![], end }) @@ -1310,14 +1465,24 @@ mod tests { let proposal = make_proposal(69); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash = BlakeTwo256::hash_of(&proposal); - assert_ok!(Collective::propose(Origin::signed(2), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(2), + 2, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 1, true)); assert_ok!(Collective::vote(Origin::signed(3), hash.clone(), 1, false)); assert_eq!( Collective::voting(&hash), Some(Votes { index: 1, threshold: 2, ayes: vec![2], nays: vec![3], end }) ); - assert_ok!(Collective::set_members(Origin::root(), vec![2, 4], None, MaxMembers::get())); + assert_ok!(Collective::set_members( + Origin::root(), + vec![2, 4], + None, + MaxMembers::get() + )); assert_eq!( Collective::voting(&hash), Some(Votes { index: 1, threshold: 2, ayes: vec![2], nays: vec![], end }) @@ -1332,7 +1497,12 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash = proposal.blake2_256().into(); let end = 4; - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_eq!(*Collective::proposals(), vec![hash]); assert_eq!(Collective::proposal_of(&hash), Some(proposal)); assert_eq!( @@ -1340,25 +1510,27 @@ mod tests { Some(Votes { index: 0, threshold: 3, ayes: vec![], nays: vec![], end }) ); - assert_eq!(System::events(), vec![ - EventRecord { + assert_eq!( + System::events(), + vec![EventRecord { phase: Phase::Initialization, event: Event::Collective(RawEvent::Proposed( 1, 0, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), + hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"] + .into(), 3, )), topics: vec![], - } - ]); + }] + ); }); } #[test] fn limit_active_proposals() { new_test_ext().execute_with(|| { - for i in 0 .. MaxProposals::get() { + for i in 0..MaxProposals::get() { let proposal = make_proposal(i as u64); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); assert_ok!(Collective::propose( @@ -1380,14 +1552,24 @@ mod tests { #[test] fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { - let proposal = Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + let proposal = + Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); let length = proposal.encode().len() as u32; - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), length)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + length + )); let hash = BlakeTwo256::hash_of(&proposal); let weight = proposal.get_dispatch_info().weight; assert_noop!( - Collective::validate_and_get_proposal(&BlakeTwo256::hash_of(&vec![3; 4]), length, weight), + Collective::validate_and_get_proposal( + &BlakeTwo256::hash_of(&vec![3; 4]), + length, + weight + ), Error::::ProposalMissing ); assert_noop!( @@ -1412,7 +1594,12 @@ mod tests { let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); assert_noop!( - Collective::propose(Origin::signed(42), 3, Box::new(proposal.clone()), proposal_len), + Collective::propose( + Origin::signed(42), + 3, + Box::new(proposal.clone()), + proposal_len + ), Error::::NotMember ); }); @@ -1424,7 +1611,12 @@ mod tests { let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_noop!( Collective::vote(Origin::signed(42), hash.clone(), 0, true), Error::::NotMember, @@ -1439,7 +1631,12 @@ mod tests { let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_noop!( Collective::vote(Origin::signed(2), hash.clone(), 1, true), Error::::WrongIndex, @@ -1454,7 +1651,12 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); let end = 4; - assert_ok!(Collective::propose(Origin::signed(1), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len + )); // Initially there a no votes when the motion is proposed. assert_eq!( Collective::voting(&hash), @@ -1483,41 +1685,52 @@ mod tests { Error::::DuplicateVote, ); - assert_eq!(System::events(), vec![ - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Proposed( - 1, - 0, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - 2, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 1, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"] + assert_eq!( + System::events(), + vec![ + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Proposed( + 1, + 0, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] .into(), - true, - 1, - 0, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 1, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - false, - 0, - 1, - )), - topics: vec![], - } - ]); + 2, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 1, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + true, + 1, + 0, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 1, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + false, + 0, + 1, + )), + topics: vec![], + } + ] + ); }); } @@ -1528,62 +1741,40 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); let end = 4; - assert_ok!( - Collective::propose( - Origin::signed(1), - 2, - Box::new(proposal.clone()), - proposal_len, - ) - ); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len, + )); assert_eq!( Collective::voting(&hash), Some(Votes { index: 0, threshold: 2, ayes: vec![], nays: vec![], end }) ); // For the motion, acc 2's first vote, expecting Ok with Pays::No. - let vote_rval: DispatchResultWithPostInfo = Collective::vote( - Origin::signed(2), - hash.clone(), - 0, - true, - ); + let vote_rval: DispatchResultWithPostInfo = + Collective::vote(Origin::signed(2), hash.clone(), 0, true); assert_eq!(vote_rval.unwrap().pays_fee, Pays::No); // Duplicate vote, expecting error with Pays::Yes. - let vote_rval: DispatchResultWithPostInfo = Collective::vote( - Origin::signed(2), - hash.clone(), - 0, - true, - ); + let vote_rval: DispatchResultWithPostInfo = + Collective::vote(Origin::signed(2), hash.clone(), 0, true); assert_eq!(vote_rval.unwrap_err().post_info.pays_fee, Pays::Yes); // Modifying vote, expecting ok with Pays::Yes. - let vote_rval: DispatchResultWithPostInfo = Collective::vote( - Origin::signed(2), - hash.clone(), - 0, - false, - ); + let vote_rval: DispatchResultWithPostInfo = + Collective::vote(Origin::signed(2), hash.clone(), 0, false); assert_eq!(vote_rval.unwrap().pays_fee, Pays::Yes); // For the motion, acc 3's first vote, expecting Ok with Pays::No. - let vote_rval: DispatchResultWithPostInfo = Collective::vote( - Origin::signed(3), - hash.clone(), - 0, - true, - ); + let vote_rval: DispatchResultWithPostInfo = + Collective::vote(Origin::signed(3), hash.clone(), 0, true); assert_eq!(vote_rval.unwrap().pays_fee, Pays::No); // acc 3 modify the vote, expecting Ok with Pays::Yes. - let vote_rval: DispatchResultWithPostInfo = Collective::vote( - Origin::signed(3), - hash.clone(), - 0, - false, - ); + let vote_rval: DispatchResultWithPostInfo = + Collective::vote(Origin::signed(3), hash.clone(), 0, false); assert_eq!(vote_rval.unwrap().pays_fee, Pays::Yes); // Test close() Extrincis | Check DispatchResultWithPostInfo with Pay Info @@ -1618,11 +1809,27 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, false)); - assert_ok!(Collective::close(Origin::signed(2), hash.clone(), 0, proposal_weight, proposal_len)); + assert_ok!(Collective::close( + Origin::signed(2), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); assert_eq!(*Collective::proposals(), vec![]); - assert_ok!(Collective::propose(Origin::signed(1), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len + )); assert_eq!(*Collective::proposals(), vec![hash]); }); } @@ -1634,60 +1841,90 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, false)); - assert_ok!(Collective::close(Origin::signed(2), hash.clone(), 0, proposal_weight, proposal_len)); + assert_ok!(Collective::close( + Origin::signed(2), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); - assert_eq!(System::events(), vec![ - EventRecord { - phase: Phase::Initialization, - event: Event::Collective( - RawEvent::Proposed( + assert_eq!( + System::events(), + vec![ + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Proposed( 1, 0, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), 3, )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 1, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - true, - 1, - 0, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 2, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - false, - 1, - 1, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Closed( - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), 1, 1, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Disapproved( - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - )), - topics: vec![], - } - ]); + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 1, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + true, + 1, + 0, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 2, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + false, + 1, + 1, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Closed( + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + 1, + 1, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Disapproved( + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + )), + topics: vec![], + } + ] + ); }); } @@ -1698,85 +1935,134 @@ mod tests { let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len + )); assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); - assert_ok!(Collective::close(Origin::signed(2), hash.clone(), 0, proposal_weight, proposal_len)); + assert_ok!(Collective::close( + Origin::signed(2), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); - assert_eq!(System::events(), vec![ - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Proposed( - 1, - 0, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - 2, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 1, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - true, - 1, - 0, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Voted( - 2, - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - true, - 2, - 0, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Closed( - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), 2, 0, - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Approved( - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - )), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Collective(RawEvent::Executed( - hex!["68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35"].into(), - Err(DispatchError::BadOrigin), - )), - topics: vec![], - } - ]); + assert_eq!( + System::events(), + vec![ + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Proposed( + 1, + 0, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + 2, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 1, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + true, + 1, + 0, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Voted( + 2, + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + true, + 2, + 0, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Closed( + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + 2, + 0, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Approved( + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Collective(RawEvent::Executed( + hex![ + "68eea8f20b542ec656c6ac2d10435ae3bd1729efc34d1354ab85af840aad2d35" + ] + .into(), + Err(DispatchError::BadOrigin), + )), + topics: vec![], + } + ] + ); }); } #[test] fn motion_with_no_votes_closes_with_disapproval() { new_test_ext().execute_with(|| { - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), proposal_len)); - assert_eq!(System::events()[0], record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3)))); + assert_ok!(Collective::propose( + Origin::signed(1), + 3, + Box::new(proposal.clone()), + proposal_len + )); + assert_eq!( + System::events()[0], + record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 3))) + ); // Closing the motion too early is not possible because it has neither // an approving or disapproving simple majority due to the lack of votes. assert_noop!( - Collective::close(Origin::signed(2), hash.clone(), 0, proposal_weight, proposal_len), + Collective::close( + Origin::signed(2), + hash.clone(), + 0, + proposal_weight, + proposal_len + ), Error::::TooEarly ); @@ -1784,13 +2070,24 @@ mod tests { let closing_block = System::block_number() + MotionDuration::get(); System::set_block_number(closing_block); // we can successfully close the motion. - assert_ok!(Collective::close(Origin::signed(2), hash.clone(), 0, proposal_weight, proposal_len)); + assert_ok!(Collective::close( + Origin::signed(2), + hash.clone(), + 0, + proposal_weight, + proposal_len + )); // Events show that the close ended in a disapproval. - assert_eq!(System::events()[1], record(Event::Collective(RawEvent::Closed(hash.clone(), 0, 3)))); - assert_eq!(System::events()[2], record(Event::Collective(RawEvent::Disapproved(hash.clone())))); + assert_eq!( + System::events()[1], + record(Event::Collective(RawEvent::Closed(hash.clone(), 0, 3))) + ); + assert_eq!( + System::events()[2], + record(Event::Collective(RawEvent::Disapproved(hash.clone()))) + ); }) - } #[test] @@ -1802,7 +2099,12 @@ mod tests { let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len + )); // First we make the proposal succeed assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); @@ -1829,19 +2131,28 @@ mod tests { let proposal = make_proposal(42); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let hash: H256 = proposal.blake2_256().into(); - assert_ok!(Collective::propose(Origin::signed(1), 2, Box::new(proposal.clone()), proposal_len)); + assert_ok!(Collective::propose( + Origin::signed(1), + 2, + Box::new(proposal.clone()), + proposal_len + )); // Proposal would normally succeed assert_ok!(Collective::vote(Origin::signed(1), hash.clone(), 0, true)); assert_ok!(Collective::vote(Origin::signed(2), hash.clone(), 0, true)); // But Root can disapprove and remove it anyway assert_ok!(Collective::disapprove_proposal(Origin::root(), hash.clone())); - let record = |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; - assert_eq!(System::events(), vec![ - record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 2))), - record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), - record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), - record(Event::Collective(RawEvent::Disapproved(hash.clone()))), - ]); + let record = + |event| EventRecord { phase: Phase::Initialization, event, topics: vec![] }; + assert_eq!( + System::events(), + vec![ + record(Event::Collective(RawEvent::Proposed(1, 0, hash.clone(), 2))), + record(Event::Collective(RawEvent::Voted(1, hash.clone(), true, 1, 0))), + record(Event::Collective(RawEvent::Voted(2, hash.clone(), true, 2, 0))), + record(Event::Collective(RawEvent::Disapproved(hash.clone()))), + ] + ); }) } @@ -1851,6 +2162,8 @@ mod tests { collective::GenesisConfig:: { members: vec![1, 2, 3, 1], phantom: Default::default(), - }.build_storage().unwrap(); + } + .build_storage() + .unwrap(); } } diff --git a/frame/collective/src/weights.rs b/frame/collective/src/weights.rs index 2bbec4d7cc3d8..0d2db29060b2b 100644 --- a/frame/collective/src/weights.rs +++ b/frame/collective/src/weights.rs @@ -35,31 +35,33 @@ // --output=./frame/collective/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_collective. pub trait WeightInfo { - fn set_members(m: u32, n: u32, p: u32, ) -> Weight; - fn execute(b: u32, m: u32, ) -> Weight; - fn propose_execute(b: u32, m: u32, ) -> Weight; - fn propose_proposed(b: u32, m: u32, p: u32, ) -> Weight; - fn vote(m: u32, ) -> Weight; - fn close_early_disapproved(m: u32, p: u32, ) -> Weight; - fn close_early_approved(b: u32, m: u32, p: u32, ) -> Weight; - fn close_disapproved(m: u32, p: u32, ) -> Weight; - fn close_approved(b: u32, m: u32, p: u32, ) -> Weight; - fn disapprove_proposal(p: u32, ) -> Weight; + fn set_members(m: u32, n: u32, p: u32) -> Weight; + fn execute(b: u32, m: u32) -> Weight; + fn propose_execute(b: u32, m: u32) -> Weight; + fn propose_proposed(b: u32, m: u32, p: u32) -> Weight; + fn vote(m: u32) -> Weight; + fn close_early_disapproved(m: u32, p: u32) -> Weight; + fn close_early_approved(b: u32, m: u32, p: u32) -> Weight; + fn close_disapproved(m: u32, p: u32) -> Weight; + fn close_approved(b: u32, m: u32, p: u32) -> Weight; + fn disapprove_proposal(p: u32) -> Weight; } /// Weights for pallet_collective using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn set_members(m: u32, n: u32, p: u32, ) -> Weight { + fn set_members(m: u32, n: u32, p: u32) -> Weight { (0 as Weight) // Standard Error: 5_000 .saturating_add((14_534_000 as Weight).saturating_mul(m as Weight)) @@ -72,7 +74,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(p as Weight))) } - fn execute(b: u32, m: u32, ) -> Weight { + fn execute(b: u32, m: u32) -> Weight { (23_177_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) @@ -80,7 +82,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add((89_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn propose_execute(b: u32, m: u32, ) -> Weight { + fn propose_execute(b: u32, m: u32) -> Weight { (28_063_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) @@ -88,7 +90,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add((174_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) } - fn propose_proposed(b: u32, m: u32, p: u32, ) -> Weight { + fn propose_proposed(b: u32, m: u32, p: u32) -> Weight { (46_515_000 as Weight) // Standard Error: 0 .saturating_add((5_000 as Weight).saturating_mul(b as Weight)) @@ -99,14 +101,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } - fn vote(m: u32, ) -> Weight { + fn vote(m: u32) -> Weight { (38_491_000 as Weight) // Standard Error: 0 .saturating_add((209_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn close_early_disapproved(m: u32, p: u32, ) -> Weight { + fn close_early_disapproved(m: u32, p: u32) -> Weight { (44_903_000 as Weight) // Standard Error: 0 .saturating_add((181_000 as Weight).saturating_mul(m as Weight)) @@ -115,7 +117,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn close_early_approved(b: u32, m: u32, p: u32, ) -> Weight { + fn close_early_approved(b: u32, m: u32, p: u32) -> Weight { (57_416_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(b as Weight)) @@ -126,7 +128,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn close_disapproved(m: u32, p: u32, ) -> Weight { + fn close_disapproved(m: u32, p: u32) -> Weight { (50_134_000 as Weight) // Standard Error: 0 .saturating_add((189_000 as Weight).saturating_mul(m as Weight)) @@ -135,7 +137,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn close_approved(b: u32, m: u32, p: u32, ) -> Weight { + fn close_approved(b: u32, m: u32, p: u32) -> Weight { (65_901_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(b as Weight)) @@ -146,7 +148,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn disapprove_proposal(p: u32, ) -> Weight { + fn disapprove_proposal(p: u32) -> Weight { (28_849_000 as Weight) // Standard Error: 1_000 .saturating_add((494_000 as Weight).saturating_mul(p as Weight)) @@ -157,7 +159,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn set_members(m: u32, n: u32, p: u32, ) -> Weight { + fn set_members(m: u32, n: u32, p: u32) -> Weight { (0 as Weight) // Standard Error: 5_000 .saturating_add((14_534_000 as Weight).saturating_mul(m as Weight)) @@ -170,7 +172,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(2 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(p as Weight))) } - fn execute(b: u32, m: u32, ) -> Weight { + fn execute(b: u32, m: u32) -> Weight { (23_177_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) @@ -178,7 +180,7 @@ impl WeightInfo for () { .saturating_add((89_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn propose_execute(b: u32, m: u32, ) -> Weight { + fn propose_execute(b: u32, m: u32) -> Weight { (28_063_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) @@ -186,7 +188,7 @@ impl WeightInfo for () { .saturating_add((174_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) } - fn propose_proposed(b: u32, m: u32, p: u32, ) -> Weight { + fn propose_proposed(b: u32, m: u32, p: u32) -> Weight { (46_515_000 as Weight) // Standard Error: 0 .saturating_add((5_000 as Weight).saturating_mul(b as Weight)) @@ -197,14 +199,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } - fn vote(m: u32, ) -> Weight { + fn vote(m: u32) -> Weight { (38_491_000 as Weight) // Standard Error: 0 .saturating_add((209_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn close_early_disapproved(m: u32, p: u32, ) -> Weight { + fn close_early_disapproved(m: u32, p: u32) -> Weight { (44_903_000 as Weight) // Standard Error: 0 .saturating_add((181_000 as Weight).saturating_mul(m as Weight)) @@ -213,7 +215,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn close_early_approved(b: u32, m: u32, p: u32, ) -> Weight { + fn close_early_approved(b: u32, m: u32, p: u32) -> Weight { (57_416_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(b as Weight)) @@ -224,7 +226,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn close_disapproved(m: u32, p: u32, ) -> Weight { + fn close_disapproved(m: u32, p: u32) -> Weight { (50_134_000 as Weight) // Standard Error: 0 .saturating_add((189_000 as Weight).saturating_mul(m as Weight)) @@ -233,7 +235,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn close_approved(b: u32, m: u32, p: u32, ) -> Weight { + fn close_approved(b: u32, m: u32, p: u32) -> Weight { (65_901_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(b as Weight)) @@ -244,7 +246,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn disapprove_proposal(p: u32, ) -> Weight { + fn disapprove_proposal(p: u32) -> Weight { (28_849_000 as Weight) // Standard Error: 1_000 .saturating_add((494_000 as Weight).saturating_mul(p as Weight)) diff --git a/frame/contracts/common/src/lib.rs b/frame/contracts/common/src/lib.rs index 098ffd64b8e8e..9260b3e05cf34 100644 --- a/frame/contracts/common/src/lib.rs +++ b/frame/contracts/common/src/lib.rs @@ -26,7 +26,7 @@ use sp_runtime::{DispatchError, RuntimeDebug}; use sp_std::prelude::*; #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; /// Result type of a `bare_call` or `bare_instantiate` call. /// @@ -163,7 +163,7 @@ pub enum Code { #[cfg(feature = "std")] mod as_string { use super::*; - use serde::{Serializer, Deserializer, ser::Error}; + use serde::{ser::Error, Deserializer, Serializer}; pub fn serialize(bytes: &Vec, serializer: S) -> Result { std::str::from_utf8(bytes) diff --git a/frame/contracts/proc-macro/src/lib.rs b/frame/contracts/proc-macro/src/lib.rs index 3b8b1ea5e6636..302a0d01a93d9 100644 --- a/frame/contracts/proc-macro/src/lib.rs +++ b/frame/contracts/proc-macro/src/lib.rs @@ -21,11 +21,10 @@ extern crate alloc; +use alloc::string::ToString; use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; -use syn::spanned::Spanned; -use syn::{parse_macro_input, Data, DataStruct, DeriveInput, Fields, Ident}; -use alloc::string::ToString; +use syn::{parse_macro_input, spanned::Spanned, Data, DataStruct, DeriveInput, Fields, Ident}; /// This derives `Debug` for a struct where each field must be of some numeric type. /// It interprets each field as its represents some weight and formats it as times so that @@ -44,7 +43,7 @@ pub fn derive_schedule_debug(input: proc_macro::TokenStream) -> proc_macro::Toke fn derive_debug( input: proc_macro::TokenStream, - fmt: impl Fn(&Ident) -> TokenStream + fmt: impl Fn(&Ident) -> TokenStream, ) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = &input.ident; @@ -55,7 +54,8 @@ fn derive_debug( return quote_spanned! { name.span() => compile_error!("WeightDebug is only supported for structs."); - }.into(); + } + .into() }; #[cfg(feature = "full")] @@ -87,24 +87,22 @@ fn derive_debug( fn iterate_fields(data: &DataStruct, fmt: impl Fn(&Ident) -> TokenStream) -> TokenStream { match &data.fields { Fields::Named(fields) => { - let recurse = fields.named - .iter() - .filter_map(|f| { + let recurse = fields.named.iter().filter_map(|f| { let name = f.ident.as_ref()?; if name.to_string().starts_with('_') { - return None; + return None } let value = fmt(name); - let ret = quote_spanned!{ f.span() => + let ret = quote_spanned! { f.span() => formatter.field(stringify!(#name), #value); }; Some(ret) }); - quote!{ + quote! { #( #recurse )* } - } - Fields::Unnamed(fields) => quote_spanned!{ + }, + Fields::Unnamed(fields) => quote_spanned! { fields.span() => compile_error!("Unnamed fields are not supported") }, diff --git a/frame/contracts/rpc/runtime-api/src/lib.rs b/frame/contracts/rpc/runtime-api/src/lib.rs index bb65e1b837399..742c2997287d2 100644 --- a/frame/contracts/rpc/runtime-api/src/lib.rs +++ b/frame/contracts/rpc/runtime-api/src/lib.rs @@ -24,10 +24,10 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::Codec; -use sp_std::vec::Vec; use pallet_contracts_primitives::{ - ContractExecResult, GetStorageResult, RentProjectionResult, Code, ContractInstantiateResult, + Code, ContractExecResult, ContractInstantiateResult, GetStorageResult, RentProjectionResult, }; +use sp_std::vec::Vec; sp_api::decl_runtime_apis! { /// The API to interact with contracts without using executive. diff --git a/frame/contracts/rpc/src/lib.rs b/frame/contracts/rpc/src/lib.rs index 3b95e98501656..2586ec7903dd6 100644 --- a/frame/contracts/rpc/src/lib.rs +++ b/frame/contracts/rpc/src/lib.rs @@ -22,7 +22,9 @@ use std::sync::Arc; use codec::Codec; use jsonrpc_core::{Error, ErrorCode, Result}; use jsonrpc_derive::rpc; -use pallet_contracts_primitives::RentProjection; +use pallet_contracts_primitives::{ + Code, ContractExecResult, ContractInstantiateResult, RentProjection, +}; use serde::{Deserialize, Serialize}; use sp_api::ProvideRuntimeApi; use sp_blockchain::HeaderBackend; @@ -33,7 +35,6 @@ use sp_runtime::{ traits::{Block as BlockT, Header as HeaderT}, }; use std::convert::{TryFrom, TryInto}; -use pallet_contracts_primitives::{Code, ContractExecResult, ContractInstantiateResult}; pub use pallet_contracts_rpc_runtime_api::ContractsApi as ContractsRuntimeApi; @@ -164,10 +165,7 @@ pub struct Contracts { impl Contracts { /// Create new `Contracts` with the given reference to the client. pub fn new(client: Arc) -> Self { - Contracts { - client, - _marker: Default::default(), - } + Contracts { client, _marker: Default::default() } } } impl @@ -202,13 +200,7 @@ where // If the block hash is not supplied assume the best block. self.client.info().best_hash)); - let CallRequest { - origin, - dest, - value, - gas_limit, - input_data, - } = call_request; + let CallRequest { origin, dest, value, gas_limit, input_data } = call_request; let value: Balance = decode_hex(value, "balance")?; let gas_limit: Weight = decode_hex(gas_limit, "weight")?; @@ -225,20 +217,15 @@ where &self, instantiate_request: InstantiateRequest, at: Option<::Hash>, - ) -> Result::Header as HeaderT>::Number>> { + ) -> Result::Header as HeaderT>::Number>> + { let api = self.client.runtime_api(); let at = BlockId::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. self.client.info().best_hash)); - let InstantiateRequest { - origin, - endowment, - gas_limit, - code, - data, - salt, - } = instantiate_request; + let InstantiateRequest { origin, endowment, gas_limit, code, data, salt } = + instantiate_request; let endowment: Balance = decode_hex(endowment, "balance")?; let gas_limit: Weight = decode_hex(gas_limit, "weight")?; @@ -337,7 +324,8 @@ mod tests { #[test] fn call_request_should_serialize_deserialize_properly() { type Req = CallRequest; - let req: Req = serde_json::from_str(r#" + let req: Req = serde_json::from_str( + r#" { "origin": "5CiPPseXPECbkjWCa6MnjNokrgYjMqmKndv2rSnekmSK2DjL", "dest": "5DRakbLVnjVrW6niwLfHGW24EeCEvDAFGEXrtaYS5M4ynoom", @@ -345,7 +333,9 @@ mod tests { "gasLimit": 1000000000000, "inputData": "0x8c97db39" } - "#).unwrap(); + "#, + ) + .unwrap(); assert_eq!(req.gas_limit.into_u256(), U256::from(0xe8d4a51000u64)); assert_eq!(req.value.into_u256(), U256::from(1234567890987654321u128)); } @@ -353,7 +343,8 @@ mod tests { #[test] fn instantiate_request_should_serialize_deserialize_properly() { type Req = InstantiateRequest; - let req: Req = serde_json::from_str(r#" + let req: Req = serde_json::from_str( + r#" { "origin": "5CiPPseXPECbkjWCa6MnjNokrgYjMqmKndv2rSnekmSK2DjL", "endowment": "0x88", @@ -362,7 +353,9 @@ mod tests { "data": "0x4299", "salt": "0x9988" } - "#).unwrap(); + "#, + ) + .unwrap(); assert_eq!(req.origin, "5CiPPseXPECbkjWCa6MnjNokrgYjMqmKndv2rSnekmSK2DjL"); assert_eq!(req.endowment.into_u256(), 0x88.into()); @@ -383,7 +376,8 @@ mod tests { let actual = serde_json::to_string(&res).unwrap(); assert_eq!(actual, trim(expected).as_str()); } - test(r#"{ + test( + r#"{ "gasConsumed": 5000, "gasRequired": 8000, "debugMessage": "HelloWorld", @@ -393,25 +387,30 @@ mod tests { "data": "0x1234" } } - }"#); - test(r#"{ + }"#, + ); + test( + r#"{ "gasConsumed": 3400, "gasRequired": 5200, "debugMessage": "HelloWorld", "result": { "Err": "BadOrigin" } - }"#); + }"#, + ); } #[test] fn instantiate_result_should_serialize_deserialize_properly() { fn test(expected: &str) { - let res: ContractInstantiateResult = serde_json::from_str(expected).unwrap(); + let res: ContractInstantiateResult = + serde_json::from_str(expected).unwrap(); let actual = serde_json::to_string(&res).unwrap(); assert_eq!(actual, trim(expected).as_str()); } - test(r#"{ + test( + r#"{ "gasConsumed": 5000, "gasRequired": 8000, "debugMessage": "HelloWorld", @@ -425,14 +424,17 @@ mod tests { "rentProjection": null } } - }"#); - test(r#"{ + }"#, + ); + test( + r#"{ "gasConsumed": 3400, "gasRequired": 5200, "debugMessage": "HelloWorld", "result": { "Err": "BadOrigin" } - }"#); + }"#, + ); } } diff --git a/frame/contracts/src/benchmarking/code.rs b/frame/contracts/src/benchmarking/code.rs index cd13e3be6df3a..0ffcec10269f5 100644 --- a/frame/contracts/src/benchmarking/code.rs +++ b/frame/contracts/src/benchmarking/code.rs @@ -25,21 +25,20 @@ //! compiles it down into a `WasmModule` that can be used as a contract's code. use crate::Config; +use frame_support::traits::Get; use pwasm_utils::{ - stack_height::inject_limiter, parity_wasm::{ + builder, elements::{ - self, Instruction, Instructions, FuncBody, ValueType, BlockType, Section, - CustomSection, + self, BlockType, CustomSection, FuncBody, Instruction, Instructions, Section, ValueType, }, - builder, }, + stack_height::inject_limiter, }; use sp_core::crypto::UncheckedFrom; use sp_runtime::traits::Hash; use sp_sandbox::{EnvironmentDefinitionBuilder, Memory}; -use sp_std::{prelude::*, convert::TryFrom, borrow::ToOwned}; -use frame_support::traits::Get; +use sp_std::{borrow::ToOwned, convert::TryFrom, prelude::*}; /// Pass to `create_code` in order to create a compiled `WasmModule`. /// @@ -117,7 +116,7 @@ pub struct ImportedFunction { /// A wasm module ready to be put on chain. #[derive(Clone)] -pub struct WasmModule { +pub struct WasmModule { pub code: Vec, pub hash: ::Output, memory: Option, @@ -136,27 +135,37 @@ where let mut contract = builder::module() // deploy function (first internal function) .function() - .signature().build() - .with_body(def.deploy_body.unwrap_or_else(|| - FuncBody::new(Vec::new(), Instructions::empty()) - )) - .build() + .signature() + .build() + .with_body( + def.deploy_body + .unwrap_or_else(|| FuncBody::new(Vec::new(), Instructions::empty())), + ) + .build() // call function (second internal function) .function() - .signature().build() - .with_body(def.call_body.unwrap_or_else(|| - FuncBody::new(Vec::new(), Instructions::empty()) - )) - .build() - .export().field("deploy").internal().func(func_offset).build() - .export().field("call").internal().func(func_offset + 1).build(); + .signature() + .build() + .with_body( + def.call_body + .unwrap_or_else(|| FuncBody::new(Vec::new(), Instructions::empty())), + ) + .build() + .export() + .field("deploy") + .internal() + .func(func_offset) + .build() + .export() + .field("call") + .internal() + .func(func_offset + 1) + .build(); // If specified we add an additional internal function if let Some(body) = def.aux_body { - let mut signature = contract - .function() - .signature(); - for _ in 0 .. def.aux_arg_num { + let mut signature = contract.function().signature(); + for _ in 0..def.aux_arg_num { signature = signature.with_param(ValueType::I64); } contract = signature.build().with_body(body).build(); @@ -164,9 +173,12 @@ where // Grant access to linear memory. if let Some(memory) = &def.memory { - contract = contract.import() - .module("env").field("memory") - .external().memory(memory.min_pages, Some(memory.max_pages)) + contract = contract + .import() + .module("env") + .field("memory") + .external() + .memory(memory.min_pages, Some(memory.max_pages)) .build(); } @@ -177,7 +189,8 @@ where .with_results(func.return_type.into_iter().collect()) .build_sig(); let sig = contract.push_signature(sig); - contract = contract.import() + contract = contract + .import() .module(func.module) .field(func.name) .with_external(elements::External::Function(sig)) @@ -186,7 +199,8 @@ where // Initialize memory for data in def.data_segments { - contract = contract.data() + contract = contract + .data() .offset(Instruction::I32Const(data.offset as i32)) .value(data.value) .build() @@ -194,12 +208,13 @@ where // Add global variables if def.num_globals > 0 { - use rand::{prelude::*, distributions::Standard}; + use rand::{distributions::Standard, prelude::*}; let rng = rand_pcg::Pcg32::seed_from_u64(3112244599778833558); for val in rng.sample_iter(Standard).take(def.num_globals as usize) { contract = contract .global() - .value_type().i64() + .value_type() + .i64() .mutable() .init_expr(Instruction::I64Const(val)) .build() @@ -218,31 +233,22 @@ where // Add the dummy section if def.dummy_section > 0 { - contract = contract.with_section( - Section::Custom( - CustomSection::new("dummy".to_owned(), vec![42; def.dummy_section as usize]) - ) - ); + contract = contract.with_section(Section::Custom(CustomSection::new( + "dummy".to_owned(), + vec![42; def.dummy_section as usize], + ))); } let mut code = contract.build(); // Inject stack height metering if def.inject_stack_metering { - code = inject_limiter( - code, - T::Schedule::get().limits.stack_height - ) - .unwrap(); + code = inject_limiter(code, T::Schedule::get().limits.stack_height).unwrap(); } let code = code.to_bytes().unwrap(); let hash = T::Hashing::hash(&code); - Self { - code, - hash, - memory: def.memory, - } + Self { code, hash, memory: def.memory } } } @@ -266,7 +272,7 @@ where ModuleDefinition { memory: Some(ImportedMemory::max::()), dummy_section: dummy_bytes.saturating_sub(module_overhead), - .. Default::default() + ..Default::default() } .into() } @@ -275,23 +281,18 @@ where /// `instantiate_with_code` for different sizes of wasm modules. The generated module maximizes /// instrumentation runtime by nesting blocks as deeply as possible given the byte budget. pub fn sized(target_bytes: u32) -> Self { - use self::elements::Instruction::{If, I32Const, Return, End}; + use self::elements::Instruction::{End, I32Const, If, Return}; // Base size of a contract is 63 bytes and each expansion adds 6 bytes. // We do one expansion less to account for the code section and function body // size fields inside the binary wasm module representation which are leb128 encoded // and therefore grow in size when the contract grows. We are not allowed to overshoot // because of the maximum code size that is enforced by `instantiate_with_code`. let expansions = (target_bytes.saturating_sub(63) / 6).saturating_sub(1); - const EXPANSION: [Instruction; 4] = [ - I32Const(0), - If(BlockType::NoResult), - Return, - End, - ]; + const EXPANSION: [Instruction; 4] = [I32Const(0), If(BlockType::NoResult), Return, End]; ModuleDefinition { call_body: Some(body::repeated(expansions, &EXPANSION)), memory: Some(ImportedMemory::max::()), - .. Default::default() + ..Default::default() } .into() } @@ -317,12 +318,15 @@ where offset: 0, value: (pages * 64 * 1024 - 4).to_le_bytes().to_vec(), }], - call_body: Some(body::repeated(repeat, &[ - Instruction::I32Const(4), // ptr where to store output - Instruction::I32Const(0), // ptr to length - Instruction::Call(0), // call the imported function - ])), - .. Default::default() + call_body: Some(body::repeated( + repeat, + &[ + Instruction::I32Const(4), // ptr where to store output + Instruction::I32Const(0), // ptr to length + Instruction::Call(0), // call the imported function + ], + )), + ..Default::default() } .into() } @@ -339,13 +343,16 @@ where params: vec![ValueType::I32, ValueType::I32, ValueType::I32], return_type: None, }], - call_body: Some(body::repeated(repeat, &[ - Instruction::I32Const(0), // input_ptr - Instruction::I32Const(data_size as i32), // input_len - Instruction::I32Const(0), // output_ptr - Instruction::Call(0), - ])), - .. Default::default() + call_body: Some(body::repeated( + repeat, + &[ + Instruction::I32Const(0), // input_ptr + Instruction::I32Const(data_size as i32), // input_len + Instruction::I32Const(0), // output_ptr + Instruction::Call(0), + ], + )), + ..Default::default() } .into() } @@ -357,7 +364,7 @@ where let memory = if let Some(memory) = &self.memory { memory } else { - return None; + return None }; let memory = Memory::new(memory.min_pages, Some(memory.max_pages)).unwrap(); env.add_memory("env", "memory", memory.clone()); @@ -367,25 +374,25 @@ where pub fn unary_instr(instr: Instruction, repeat: u32) -> Self { use body::DynInstr::{RandomI64Repeated, Regular}; ModuleDefinition { - call_body: Some(body::repeated_dyn(repeat, vec![ - RandomI64Repeated(1), - Regular(instr), - Regular(Instruction::Drop), - ])), - .. Default::default() - }.into() + call_body: Some(body::repeated_dyn( + repeat, + vec![RandomI64Repeated(1), Regular(instr), Regular(Instruction::Drop)], + )), + ..Default::default() + } + .into() } pub fn binary_instr(instr: Instruction, repeat: u32) -> Self { use body::DynInstr::{RandomI64Repeated, Regular}; ModuleDefinition { - call_body: Some(body::repeated_dyn(repeat, vec![ - RandomI64Repeated(2), - Regular(instr), - Regular(Instruction::Drop), - ])), - .. Default::default() - }.into() + call_body: Some(body::repeated_dyn( + repeat, + vec![RandomI64Repeated(2), Regular(instr), Regular(Instruction::Drop)], + )), + ..Default::default() + } + .into() } } @@ -426,7 +433,7 @@ pub mod body { RandomGetGlobal(u32, u32), /// Insert a SetGlobal with a random offset in [low, high). /// (low, high) - RandomSetGlobal(u32, u32) + RandomSetGlobal(u32, u32), } pub fn plain(instructions: Vec) -> FuncBody { @@ -441,13 +448,13 @@ pub mod body { .take(instructions.len() * usize::try_from(repetitions).unwrap()) .cloned() .chain(sp_std::iter::once(Instruction::End)) - .collect() + .collect(), ); FuncBody::new(Vec::new(), instructions) } pub fn repeated_dyn(repetitions: u32, mut instructions: Vec) -> FuncBody { - use rand::{prelude::*, distributions::Standard}; + use rand::{distributions::Standard, prelude::*}; // We do not need to be secure here. let mut rng = rand_pcg::Pcg32::seed_from_u64(8446744073709551615); @@ -456,50 +463,46 @@ pub mod body { let body = (0..instructions.len()) .cycle() .take(instructions.len() * usize::try_from(repetitions).unwrap()) - .flat_map(|idx| - match &mut instructions[idx] { - DynInstr::Regular(instruction) => vec![instruction.clone()], - DynInstr::Counter(offset, increment_by) => { - let current = *offset; - *offset += *increment_by; - vec![Instruction::I32Const(current as i32)] - }, - DynInstr::RandomUnaligned(low, high) => { - let unaligned = rng.gen_range(*low..*high) | 1; - vec![Instruction::I32Const(unaligned as i32)] - }, - DynInstr::RandomI32(low, high) => { - vec![Instruction::I32Const(rng.gen_range(*low..*high))] - }, - DynInstr::RandomI32Repeated(num) => { - (&mut rng).sample_iter(Standard).take(*num).map(|val| - Instruction::I32Const(val) - ) - .collect() - }, - DynInstr::RandomI64Repeated(num) => { - (&mut rng).sample_iter(Standard).take(*num).map(|val| - Instruction::I64Const(val) - ) - .collect() - }, - DynInstr::RandomGetLocal(low, high) => { - vec![Instruction::GetLocal(rng.gen_range(*low..*high))] - }, - DynInstr::RandomSetLocal(low, high) => { - vec![Instruction::SetLocal(rng.gen_range(*low..*high))] - }, - DynInstr::RandomTeeLocal(low, high) => { - vec![Instruction::TeeLocal(rng.gen_range(*low..*high))] - }, - DynInstr::RandomGetGlobal(low, high) => { - vec![Instruction::GetGlobal(rng.gen_range(*low..*high))] - }, - DynInstr::RandomSetGlobal(low, high) => { - vec![Instruction::SetGlobal(rng.gen_range(*low..*high))] - }, - } - ) + .flat_map(|idx| match &mut instructions[idx] { + DynInstr::Regular(instruction) => vec![instruction.clone()], + DynInstr::Counter(offset, increment_by) => { + let current = *offset; + *offset += *increment_by; + vec![Instruction::I32Const(current as i32)] + }, + DynInstr::RandomUnaligned(low, high) => { + let unaligned = rng.gen_range(*low..*high) | 1; + vec![Instruction::I32Const(unaligned as i32)] + }, + DynInstr::RandomI32(low, high) => { + vec![Instruction::I32Const(rng.gen_range(*low..*high))] + }, + DynInstr::RandomI32Repeated(num) => (&mut rng) + .sample_iter(Standard) + .take(*num) + .map(|val| Instruction::I32Const(val)) + .collect(), + DynInstr::RandomI64Repeated(num) => (&mut rng) + .sample_iter(Standard) + .take(*num) + .map(|val| Instruction::I64Const(val)) + .collect(), + DynInstr::RandomGetLocal(low, high) => { + vec![Instruction::GetLocal(rng.gen_range(*low..*high))] + }, + DynInstr::RandomSetLocal(low, high) => { + vec![Instruction::SetLocal(rng.gen_range(*low..*high))] + }, + DynInstr::RandomTeeLocal(low, high) => { + vec![Instruction::TeeLocal(rng.gen_range(*low..*high))] + }, + DynInstr::RandomGetGlobal(low, high) => { + vec![Instruction::GetGlobal(rng.gen_range(*low..*high))] + }, + DynInstr::RandomSetGlobal(low, high) => { + vec![Instruction::SetGlobal(rng.gen_range(*low..*high))] + }, + }) .chain(sp_std::iter::once(Instruction::End)) .collect(); FuncBody::new(Vec::new(), Instructions::new(body)) diff --git a/frame/contracts/src/benchmarking/mod.rs b/frame/contracts/src/benchmarking/mod.rs index cbe5e48a4f020..83c18f8f79e0a 100644 --- a/frame/contracts/src/benchmarking/mod.rs +++ b/frame/contracts/src/benchmarking/mod.rs @@ -22,28 +22,28 @@ mod code; mod sandbox; -use crate::{ - *, Pallet as Contracts, - exec::StorageKey, - rent::Rent, - schedule::{API_BENCHMARK_BATCH_SIZE, INSTR_BENCHMARK_BATCH_SIZE}, - storage::Storage, -}; use self::{ code::{ body::{self, DynInstr::*}, - ModuleDefinition, DataSegment, ImportedMemory, ImportedFunction, WasmModule, + DataSegment, ImportedFunction, ImportedMemory, ModuleDefinition, WasmModule, }, sandbox::Sandbox, }; +use crate::{ + exec::StorageKey, + rent::Rent, + schedule::{API_BENCHMARK_BATCH_SIZE, INSTR_BENCHMARK_BATCH_SIZE}, + storage::Storage, + Pallet as Contracts, *, +}; use codec::Encode; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_support::weights::Weight; use frame_system::{Pallet as System, RawOrigin}; -use pwasm_utils::parity_wasm::elements::{Instruction, ValueType, BlockType, BrTableData}; -use sp_runtime::traits::{Hash, Bounded, Zero}; -use sp_std::{default::Default, convert::{TryInto}, vec::Vec, vec}; use pallet_contracts_primitives::RentProjection; -use frame_support::weights::Weight; +use pwasm_utils::parity_wasm::elements::{BlockType, BrTableData, Instruction, ValueType}; +use sp_runtime::traits::{Bounded, Hash, Zero}; +use sp_std::{convert::TryInto, default::Default, vec, vec::Vec}; /// How many batches we do per API benchmark. const API_BENCHMARK_BATCHES: u32 = 20; @@ -74,7 +74,7 @@ impl Endow { /// The maximum amount of balance a caller can transfer without being brought below /// the existential deposit. This assumes that every caller is funded with the amount /// returned by `caller_funding`. - fn max() -> BalanceOf { + fn max() -> BalanceOf { caller_funding::().saturating_sub(T::Currency::minimum_balance()) } } @@ -109,8 +109,7 @@ where module: WasmModule, data: Vec, endowment: Endow, - ) -> Result, &'static str> - { + ) -> Result, &'static str> { let (storage_size, endowment) = match endowment { Endow::CollectRent => { // storage_size cannot be zero because otherwise a contract that is just above @@ -182,7 +181,8 @@ where /// Get the `AliveContractInfo` of the `addr` or an error if it is no longer alive. fn address_alive_info(addr: &T::AccountId) -> Result, &'static str> { - ContractInfoOf::::get(addr).and_then(|c| c.get_alive()) + ContractInfoOf::::get(addr) + .and_then(|c| c.get_alive()) .ok_or("Expected contract to be alive at this point.") } @@ -193,7 +193,8 @@ where /// Return an error if this contract is no tombstone. fn ensure_tombstone(&self) -> Result<(), &'static str> { - ContractInfoOf::::get(&self.account_id).and_then(|c| c.get_tombstone()) + ContractInfoOf::::get(&self.account_id) + .and_then(|c| c.get_tombstone()) .ok_or("Expected contract to be a tombstone at this point.") .map(|_| ()) } @@ -236,16 +237,13 @@ where let contract = Contract::::new(code, vec![], Endow::CollectRent)?; let storage_items = create_storage::(stor_num, stor_size)?; contract.store(&storage_items)?; - Ok(Self { - contract, - storage: storage_items, - }) + Ok(Self { contract, storage: storage_items }) } /// Increase the system block number so that this contract is eligible for eviction. - fn set_block_num_for_eviction(&self) -> Result<(), &'static str> { + fn set_block_num_for_eviction(&self) -> Result<(), &'static str> { System::::set_block_number( - self.contract.eviction_at()? + T::SignedClaimHandicap::get() + 5u32.into() + self.contract.eviction_at()? + T::SignedClaimHandicap::get() + 5u32.into(), ); Ok(()) } @@ -261,15 +259,17 @@ where /// Generate `stor_num` storage items. Each has the size `stor_size`. fn create_storage( stor_num: u32, - stor_size: u32 + stor_size: u32, ) -> Result)>, &'static str> { - (0..stor_num).map(|i| { - let hash = T::Hashing::hash_of(&i) - .as_ref() - .try_into() - .map_err(|_| "Hash too big for storage key")?; - Ok((hash, vec![42u8; stor_size as usize])) - }).collect::, &'static str>>() + (0..stor_num) + .map(|i| { + let hash = T::Hashing::hash_of(&i) + .as_ref() + .try_into() + .map_err(|_| "Hash too big for storage key")?; + Ok((hash, vec![42u8; stor_size as usize])) + }) + .collect::, &'static str>>() } /// The funding that each account that either calls or instantiates contracts is funded with. diff --git a/frame/contracts/src/benchmarking/sandbox.rs b/frame/contracts/src/benchmarking/sandbox.rs index a97fcc2b113ec..7173482e90c22 100644 --- a/frame/contracts/src/benchmarking/sandbox.rs +++ b/frame/contracts/src/benchmarking/sandbox.rs @@ -18,11 +18,7 @@ ///! For instruction benchmarking we do no instantiate a full contract but merely the ///! sandbox to execute the wasm code. This is because we do not need the full ///! environment that provides the seal interface as imported functions. - -use super::{ - Config, - code::WasmModule, -}; +use super::{code::WasmModule, Config}; use sp_core::crypto::UncheckedFrom; use sp_sandbox::{EnvironmentDefinitionBuilder, Instance, Memory}; @@ -51,9 +47,6 @@ where let memory = module.add_memory(&mut env_builder); let instance = Instance::new(&module.code, &env_builder, &mut ()) .expect("Failed to create benchmarking Sandbox instance"); - Self { - instance, - _memory: memory, - } + Self { instance, _memory: memory } } } diff --git a/frame/contracts/src/chain_extension.rs b/frame/contracts/src/chain_extension.rs index bb352c3a93d6f..38370ce91e167 100644 --- a/frame/contracts/src/chain_extension.rs +++ b/frame/contracts/src/chain_extension.rs @@ -55,22 +55,19 @@ //! on how to use a chain extension in order to provide new features to ink! contracts. use crate::{ - Error, - wasm::{Runtime, RuntimeCosts}, gas::ChargedAmount, + wasm::{Runtime, RuntimeCosts}, + Error, }; use codec::{Decode, MaxEncodedLen}; use frame_support::weights::Weight; use sp_runtime::DispatchError; -use sp_std::{ - marker::PhantomData, - vec::Vec, -}; +use sp_std::{marker::PhantomData, vec::Vec}; +pub use crate::{exec::Ext, Config}; pub use frame_system::Config as SysConfig; pub use pallet_contracts_primitives::ReturnFlags; pub use sp_core::crypto::UncheckedFrom; -pub use crate::{Config, exec::Ext}; pub use state::Init as InitState; /// Result that returns a [`DispatchError`] on error. @@ -143,7 +140,7 @@ pub enum RetVal { /// The semantic is the same as for calling `seal_return`: The control returns to /// the caller of the currently executing contract yielding the supplied buffer and /// flags. - Diverging{flags: ReturnFlags, data: Vec}, + Diverging { flags: ReturnFlags, data: Vec }, } /// Grants the chain extension access to its parameters and execution environment. @@ -183,7 +180,9 @@ where /// This is when a maximum a priori amount was charged and then should be partially /// refunded to match the actual amount. pub fn adjust_weight(&mut self, charged: ChargedAmount, actual_weight: Weight) { - self.inner.runtime.adjust_gas(charged, RuntimeCosts::ChainExtension(actual_weight)) + self.inner + .runtime + .adjust_gas(charged, RuntimeCosts::ChainExtension(actual_weight)) } /// Grants access to the execution environment of the current contract call. @@ -204,46 +203,31 @@ impl<'a, 'b, E: Ext> Environment<'a, 'b, E, state::Init> { /// It is only available to this crate because only the wasm runtime module needs to /// ever create this type. Chain extensions merely consume it. pub(crate) fn new( - runtime: &'a mut Runtime::<'b, E>, + runtime: &'a mut Runtime<'b, E>, input_ptr: u32, input_len: u32, output_ptr: u32, output_len_ptr: u32, ) -> Self { Environment { - inner: Inner { - runtime, - input_ptr, - input_len, - output_ptr, - output_len_ptr, - }, + inner: Inner { runtime, input_ptr, input_len, output_ptr, output_len_ptr }, phantom: PhantomData, } } /// Use all arguments as integer values. pub fn only_in(self) -> Environment<'a, 'b, E, state::OnlyIn> { - Environment { - inner: self.inner, - phantom: PhantomData, - } + Environment { inner: self.inner, phantom: PhantomData } } /// Use input arguments as integer and output arguments as pointer to a buffer. pub fn prim_in_buf_out(self) -> Environment<'a, 'b, E, state::PrimInBufOut> { - Environment { - inner: self.inner, - phantom: PhantomData, - } + Environment { inner: self.inner, phantom: PhantomData } } /// Use input and output arguments as pointers to a buffer. pub fn buf_in_buf_out(self) -> Environment<'a, 'b, E, state::BufInBufOut> { - Environment { - inner: self.inner, - phantom: PhantomData, - } + Environment { inner: self.inner, phantom: PhantomData } } } @@ -287,10 +271,9 @@ where /// charge the overall costs either using `max_len` (worst case approximation) or using /// [`in_len()`](Self::in_len). pub fn read(&self, max_len: u32) -> Result> { - self.inner.runtime.read_sandbox_memory( - self.inner.input_ptr, - self.inner.input_len.min(max_len), - ) + self.inner + .runtime + .read_sandbox_memory(self.inner.input_ptr, self.inner.input_len.min(max_len)) } /// Reads `min(buffer.len(), in_len) from contract memory. @@ -304,10 +287,7 @@ where let buffer = core::mem::take(buffer); &mut buffer[..len.min(self.inner.input_len as usize)] }; - self.inner.runtime.read_sandbox_memory_into_buf( - self.inner.input_ptr, - sliced, - )?; + self.inner.runtime.read_sandbox_memory_into_buf(self.inner.input_ptr, sliced)?; *buffer = sliced; Ok(()) } @@ -377,7 +357,7 @@ where /// gets too large. struct Inner<'a, 'b, E: Ext> { /// The runtime contains all necessary functions to interact with the running contract. - runtime: &'a mut Runtime::<'b, E>, + runtime: &'a mut Runtime<'b, E>, /// Verbatim argument passed to `seal_call_chain_extension`. input_ptr: u32, /// Verbatim argument passed to `seal_call_chain_extension`. diff --git a/frame/contracts/src/exec.rs b/frame/contracts/src/exec.rs index ae1585afbb890..2967e4fa418ac 100644 --- a/frame/contracts/src/exec.rs +++ b/frame/contracts/src/exec.rs @@ -16,27 +16,29 @@ // limitations under the License. use crate::{ - CodeHash, Event, Config, Pallet as Contracts, - BalanceOf, ContractInfo, gas::GasMeter, rent::{Rent, RentStatus}, storage::Storage, - Error, ContractInfoOf, Schedule, AliveContractInfo, AccountCounter, + gas::GasMeter, + rent::{Rent, RentStatus}, + storage::Storage, + AccountCounter, AliveContractInfo, BalanceOf, CodeHash, Config, ContractInfo, ContractInfoOf, + Error, Event, Pallet as Contracts, Schedule, }; -use sp_core::crypto::UncheckedFrom; -use sp_std::{ - prelude::*, - marker::PhantomData, - mem, -}; -use sp_runtime::{Perbill, traits::{Convert, Saturating}}; use frame_support::{ - dispatch::{DispatchResult, DispatchError, DispatchResultWithPostInfo, Dispatchable}, + dispatch::{DispatchError, DispatchResult, DispatchResultWithPostInfo, Dispatchable}, + ensure, storage::{with_transaction, TransactionOutcome}, - traits::{ExistenceRequirement, Currency, Time, Randomness, Get, OriginTrait, Filter}, + traits::{Currency, ExistenceRequirement, Filter, Get, OriginTrait, Randomness, Time}, weights::Weight, - ensure, DefaultNoBound, + DefaultNoBound, }; use frame_system::RawOrigin; -use pallet_contracts_primitives::{ExecReturnValue}; -use smallvec::{SmallVec, Array}; +use pallet_contracts_primitives::ExecReturnValue; +use smallvec::{Array, SmallVec}; +use sp_core::crypto::UncheckedFrom; +use sp_runtime::{ + traits::{Convert, Saturating}, + Perbill, +}; +use sp_std::{marker::PhantomData, mem, prelude::*}; pub type AccountIdOf = ::AccountId; pub type MomentOf = <::Time as Time>::Moment; @@ -75,10 +77,7 @@ pub struct ExecError { impl> From for ExecError { fn from(error: T) -> Self { - Self { - error: error.into(), - origin: ErrorOrigin::Caller, - } + Self { error: error.into(), origin: ErrorOrigin::Caller } } } @@ -124,7 +123,7 @@ where account_id: &T::AccountId, value: &BalanceOf, contract: &AliveContractInfo, - executable: &E + executable: &E, ) -> Self { Self { total_balance: T::Currency::total_balance(account_id).saturating_add(*value), @@ -187,7 +186,7 @@ pub trait Ext: sealing::Sealed { value: BalanceOf, input_data: Vec, salt: &[u8], - ) -> Result<(AccountIdOf, ExecReturnValue ), ExecError>; + ) -> Result<(AccountIdOf, ExecReturnValue), ExecError>; /// Transfer all funds to `beneficiary` and delete the contract. /// @@ -218,11 +217,7 @@ pub trait Ext: sealing::Sealed { ) -> Result<(), DispatchError>; /// Transfer some amount of funds into the specified account. - fn transfer( - &mut self, - to: &AccountIdOf, - value: BalanceOf, - ) -> DispatchResult; + fn transfer(&mut self, to: &AccountIdOf, value: BalanceOf) -> DispatchResult; /// Returns the storage entry of the executing account by the given `key`. /// @@ -351,8 +346,7 @@ pub trait Executable: Sized { /// # Note /// /// Charges weight proportional to the code size from the gas meter. - fn add_user(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError>; + fn add_user(code_hash: CodeHash, gas_meter: &mut GasMeter) -> Result<(), DispatchError>; /// Decrement the refcount by one and remove the code when it drops to zero. /// @@ -361,8 +355,10 @@ pub trait Executable: Sized { /// # Note /// /// Charges weight proportional to the code size from the gas meter - fn remove_user(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError>; + fn remove_user( + code_hash: CodeHash, + gas_meter: &mut GasMeter, + ) -> Result<(), DispatchError>; /// Execute the specified exported function and return the result. /// @@ -550,15 +546,15 @@ macro_rules! get_cached_or_panic_after_load { qed" ); } - }} + }}; } impl CachedContract { /// Load the `contract_info` from storage if necessary. fn load(&mut self, account_id: &T::AccountId) { if let CachedContract::Invalidated = self { - let contract = >::get(&account_id) - .and_then(|contract| contract.get_alive()); + let contract = + >::get(&account_id).and_then(|contract| contract.get_alive()); if let Some(contract) = contract { *self = CachedContract::Cached(contract); } @@ -610,7 +606,7 @@ where debug_message: Option<&'a mut Vec>, ) -> Result { let (mut stack, executable) = Self::new( - FrameArgs::Call{dest, cached_info: None}, + FrameArgs::Call { dest, cached_info: None }, origin, gas_meter, schedule, @@ -692,18 +688,18 @@ where value_transferred: BalanceOf, gas_meter: &mut GasMeter, gas_limit: Weight, - schedule: &Schedule + schedule: &Schedule, ) -> Result<(Frame, E), ExecError> { let (account_id, contract_info, executable, entry_point) = match frame_args { - FrameArgs::Call{dest, cached_info} => { + FrameArgs::Call { dest, cached_info } => { let contract = if let Some(contract) = cached_info { contract } else { >::get(&dest) .ok_or(>::ContractNotFound.into()) - .and_then(|contract| + .and_then(|contract| { contract.get_alive().ok_or(>::ContractIsTombstone) - )? + })? }; let executable = E::from_storage(contract.code_hash, schedule, gas_meter)?; @@ -713,15 +709,14 @@ where // changes would be rolled back in case this contract is called by another // contract. // See: https://github.com/paritytech/substrate/issues/6439#issuecomment-648754324 - let contract = Rent:: - ::charge(&dest, contract, executable.occupied_storage())? - .ok_or(Error::::RentNotPaid)?; + let contract = + Rent::::charge(&dest, contract, executable.occupied_storage())? + .ok_or(Error::::RentNotPaid)?; (dest, contract, executable, ExportedFunction::Call) - } - FrameArgs::Instantiate{sender, trie_seed, executable, salt} => { - let account_id = >::contract_address( - &sender, executable.code_hash(), &salt, - ); + }, + FrameArgs::Instantiate { sender, trie_seed, executable, salt } => { + let account_id = + >::contract_address(&sender, executable.code_hash(), &salt); let trie_id = Storage::::generate_trie_id(&account_id, trie_seed); let contract = Storage::::new_contract( &account_id, @@ -729,12 +724,15 @@ where executable.code_hash().clone(), )?; (account_id, contract, executable, ExportedFunction::Constructor) - } + }, }; let frame = Frame { rent_params: RentParams::new( - &account_id, &value_transferred, &contract_info, &executable, + &account_id, + &value_transferred, + &contract_info, + &executable, ), value_transferred, contract_info: CachedContract::Cached(contract_info), @@ -755,7 +753,7 @@ where gas_limit: Weight, ) -> Result { if self.frames.len() == T::CallStack::size() { - return Err(Error::::MaxCallDepthReached.into()); + return Err(Error::::MaxCallDepthReached.into()) } // We need to make sure that changes made to the contract info are not discarded. @@ -772,17 +770,10 @@ where ); } - let nested_meter = &mut self.frames - .last_mut() - .unwrap_or(&mut self.first_frame) - .nested_meter; - let (frame, executable) = Self::new_frame( - frame_args, - value_transferred, - nested_meter, - gas_limit, - self.schedule, - )?; + let nested_meter = + &mut self.frames.last_mut().unwrap_or(&mut self.first_frame).nested_meter; + let (frame, executable) = + Self::new_frame(frame_args, value_transferred, nested_meter, gas_limit, self.schedule)?; self.frames.push(frame); Ok(executable) } @@ -790,11 +781,7 @@ where /// Run the current (top) frame. /// /// This can be either a call or an instantiate. - fn run( - &mut self, - executable: E, - input_data: Vec - ) -> Result { + fn run(&mut self, executable: E, input_data: Vec) -> Result { let entry_point = self.top_frame().entry_point; let do_transaction = || { // Cache the value before calling into the constructor because that @@ -807,11 +794,9 @@ where self.initial_transfer()?; // Call into the wasm blob. - let output = executable.execute( - self, - &entry_point, - input_data, - ).map_err(|e| ExecError { error: e.error, origin: ErrorOrigin::Callee })?; + let output = executable + .execute(self, &entry_point, input_data) + .map_err(|e| ExecError { error: e.error, origin: ErrorOrigin::Callee })?; // Additional work needs to be performed in case of an instantiation. if output.is_success() && entry_point == ExportedFunction::Constructor { @@ -820,7 +805,7 @@ where // It is not allowed to terminate a contract inside its constructor. if let CachedContract::Terminated = frame.contract_info { - return Err(Error::::TerminatedInConstructor.into()); + return Err(Error::::TerminatedInConstructor.into()) } // Collect the rent for the first block to prevent the creation of very large @@ -828,16 +813,13 @@ where // This also makes sure that it is above the subsistence threshold // in order to keep up the guarantuee that we always leave a tombstone behind // with the exception of a contract that called `seal_terminate`. - let contract = Rent:: - ::charge(&account_id, frame.invalidate(), occupied_storage)? - .ok_or(Error::::NewContractNotFunded)?; + let contract = + Rent::::charge(&account_id, frame.invalidate(), occupied_storage)? + .ok_or(Error::::NewContractNotFunded)?; frame.contract_info = CachedContract::Cached(contract); // Deposit an instantiation event. - deposit_event::(vec![], Event::Instantiated( - self.caller().clone(), - account_id, - )); + deposit_event::(vec![], Event::Instantiated(self.caller().clone(), account_id)); } Ok(output) @@ -849,9 +831,7 @@ where let (success, output) = with_transaction(|| { let output = do_transaction(); match &output { - Ok(result) if result.is_success() => { - TransactionOutcome::Commit((true, output)) - }, + Ok(result) if result.is_success() => TransactionOutcome::Commit((true, output)), _ => TransactionOutcome::Rollback((false, output)), } }); @@ -880,7 +860,7 @@ where prev.nested_meter.absorb_nested(frame.nested_meter); // Only gas counter changes are persisted in case of a failure. if !persist { - return; + return } if let CachedContract::Cached(contract) = frame.contract_info { // optimization: Predecessor is the same contract. @@ -889,7 +869,7 @@ where // trigger a rollback. if prev.account_id == *account_id { prev.contract_info = CachedContract::Cached(contract); - return; + return } // Predecessor is a different contract: We persist the info and invalidate the first @@ -914,12 +894,12 @@ where self.gas_meter.absorb_nested(mem::take(&mut self.first_frame.nested_meter)); // Only gas counter changes are persisted in case of a failure. if !persist { - return; + return } if let CachedContract::Cached(contract) = &self.first_frame.contract_info { >::insert( &self.first_frame.account_id, - ContractInfo::Alive(contract.clone()) + ContractInfo::Alive(contract.clone()), ); } if let Some(counter) = self.account_counter { @@ -942,7 +922,7 @@ where value: BalanceOf, ) -> DispatchResult { if value == 0u32.into() { - return Ok(()); + return Ok(()) } let existence_requirement = match (allow_death, sender_is_contract) { @@ -974,16 +954,10 @@ where // we can error out early. This avoids executing the constructor in cases where // we already know that the contract has too little balance. if frame.entry_point == ExportedFunction::Constructor && value < subsistence_threshold { - return Err(>::NewContractNotFunded.into()); + return Err(>::NewContractNotFunded.into()) } - Self::transfer( - self.caller_is_origin(), - false, - self.caller(), - &frame.account_id, - value, - ) + Self::transfer(self.caller_is_origin(), false, self.caller(), &frame.account_id, value) } /// Wether the caller is the initiator of the call stack. @@ -1004,17 +978,13 @@ where /// Iterator over all frames. /// /// The iterator starts with the top frame and ends with the root frame. - fn frames(&self) -> impl Iterator> { - sp_std::iter::once(&self.first_frame) - .chain(&self.frames) - .rev() + fn frames(&self) -> impl Iterator> { + sp_std::iter::once(&self.first_frame).chain(&self.frames).rev() } /// Same as `frames` but with a mutable reference as iterator item. - fn frames_mut(&mut self) -> impl Iterator> { - sp_std::iter::once(&mut self.first_frame) - .chain(&mut self.frames) - .rev() + fn frames_mut(&mut self) -> impl Iterator> { + sp_std::iter::once(&mut self.first_frame).chain(&mut self.frames).rev() } /// Returns whether the current contract is on the stack multiple times. @@ -1068,7 +1038,7 @@ where let try_call = || { if !self.allows_reentry(&to) { - return Err(>::ReentranceDenied.into()); + return Err(>::ReentranceDenied.into()) } // We ignore instantiate frames in our search for a cached contract. // Otherwise it would be possible to recursively call a contract from its own @@ -1076,17 +1046,12 @@ where let cached_info = self .frames() .find(|f| f.entry_point == ExportedFunction::Call && f.account_id == to) - .and_then(|f| { - match &f.contract_info { - CachedContract::Cached(contract) => Some(contract.clone()), - _ => None, - } + .and_then(|f| match &f.contract_info { + CachedContract::Cached(contract) => Some(contract.clone()), + _ => None, }); - let executable = self.push_frame( - FrameArgs::Call{dest: to, cached_info}, - value, - gas_limit - )?; + let executable = + self.push_frame(FrameArgs::Call { dest: to, cached_info }, value, gas_limit)?; self.run(executable, input_data) }; @@ -1125,7 +1090,7 @@ where fn terminate(&mut self, beneficiary: &AccountIdOf) -> Result<(), DispatchError> { if self.is_recursive() { - return Err(Error::::TerminatedWhileReentrant.into()); + return Err(Error::::TerminatedWhileReentrant.into()) } let frame = self.top_frame_mut(); let info = frame.terminate(); @@ -1139,9 +1104,10 @@ where )?; ContractInfoOf::::remove(&frame.account_id); E::remove_user(info.code_hash, &mut frame.nested_meter)?; - Contracts::::deposit_event( - Event::Terminated(frame.account_id.clone(), beneficiary.clone()), - ); + Contracts::::deposit_event(Event::Terminated( + frame.account_id.clone(), + beneficiary.clone(), + )); Ok(()) } @@ -1153,7 +1119,7 @@ where delta: Vec, ) -> Result<(), DispatchError> { if self.is_recursive() { - return Err(Error::::TerminatedWhileReentrant.into()); + return Err(Error::::TerminatedWhileReentrant.into()) } let frame = self.top_frame_mut(); let origin_contract = frame.contract_info().clone(); @@ -1170,23 +1136,14 @@ where if let Ok(_) = result { deposit_event::( vec![], - Event::Restored( - account_id, - dest, - code_hash, - rent_allowance, - ), + Event::Restored(account_id, dest, code_hash, rent_allowance), ); frame.terminate(); } result } - fn transfer( - &mut self, - to: &T::AccountId, - value: BalanceOf, - ) -> DispatchResult { + fn transfer(&mut self, to: &T::AccountId, value: BalanceOf) -> DispatchResult { Self::transfer(true, false, &self.top_frame().account_id, to, value) } @@ -1197,9 +1154,7 @@ where fn set_storage(&mut self, key: StorageKey, value: Option>) -> DispatchResult { let block_number = self.block_number; let frame = self.top_frame_mut(); - Storage::::write( - block_number, frame.contract_info(), &key, value, - ) + Storage::::write(block_number, frame.contract_info(), &key, value) } fn address(&self) -> &T::AccountId { @@ -1237,7 +1192,7 @@ where fn deposit_event(&mut self, topics: Vec, data: Vec) { deposit_event::( topics, - Event::ContractEmitted(self.top_frame().account_id.clone(), data) + Event::ContractEmitted(self.top_frame().account_id.clone(), data), ); } @@ -1249,7 +1204,9 @@ where self.top_frame_mut().contract_info().rent_allowance } - fn block_number(&self) -> T::BlockNumber { self.block_number } + fn block_number(&self) -> T::BlockNumber { + self.block_number + } fn max_value_size(&self) -> u32 { T::Schedule::get().limits.payload_len @@ -1303,10 +1260,7 @@ where } } -fn deposit_event( - topics: Vec, - event: Event, -) { +fn deposit_event(topics: Vec, event: Event) { >::deposit_event_indexed( &*topics, ::Event::from(event).into(), @@ -1336,24 +1290,27 @@ mod sealing { mod tests { use super::*; use crate::{ + exec::ExportedFunction::*, gas::GasMeter, storage::Storage, tests::{ - ALICE, BOB, CHARLIE, Call, TestFilter, ExtBuilder, Test, Event as MetaEvent, - test_utils::{place_contract, set_balance, get_balance}, + test_utils::{get_balance, place_contract, set_balance}, + Call, Event as MetaEvent, ExtBuilder, Test, TestFilter, ALICE, BOB, CHARLIE, }, - exec::ExportedFunction::*, Error, Weight, }; - use codec::{Encode, Decode}; - use sp_core::Bytes; - use sp_runtime::{DispatchError, traits::{BadOrigin, Hash}}; use assert_matches::assert_matches; - use std::{cell::RefCell, collections::HashMap, rc::Rc}; - use pretty_assertions::{assert_eq, assert_ne}; - use pallet_contracts_primitives::ReturnFlags; - use frame_support::{assert_ok, assert_err}; + use codec::{Decode, Encode}; + use frame_support::{assert_err, assert_ok}; use frame_system::{EventRecord, Phase}; + use pallet_contracts_primitives::ReturnFlags; + use pretty_assertions::{assert_eq, assert_ne}; + use sp_core::Bytes; + use sp_runtime::{ + traits::{BadOrigin, Hash}, + DispatchError, + }; + use std::{cell::RefCell, collections::HashMap, rc::Rc}; type System = frame_system::Pallet; @@ -1404,12 +1361,15 @@ mod tests { // Generate code hashes as monotonically increasing values. let hash = ::Hash::from_low_u64_be(loader.counter); loader.counter += 1; - loader.map.insert(hash, MockExecutable { - func: Rc::new(f), - func_type, - code_hash: hash.clone(), - refcount: 1, - }); + loader.map.insert( + hash, + MockExecutable { + func: Rc::new(f), + func_type, + code_hash: hash.clone(), + refcount: 1, + }, + ); hash }) } @@ -1417,7 +1377,8 @@ mod tests { fn increment_refcount(code_hash: CodeHash) { LOADER.with(|loader| { let mut loader = loader.borrow_mut(); - loader.map + loader + .map .entry(code_hash) .and_modify(|executable| executable.refcount += 1) .or_insert_with(|| panic!("code_hash does not exist")); @@ -1442,12 +1403,7 @@ mod tests { fn refcount(code_hash: &CodeHash) -> u32 { LOADER.with(|loader| { - loader - .borrow() - .map - .get(code_hash) - .expect("code_hash does not exist") - .refcount() + loader.borrow().map.get(code_hash).expect("code_hash does not exist").refcount() }) } } @@ -1463,7 +1419,8 @@ mod tests { fn from_storage_noinstr(code_hash: CodeHash) -> Result { LOADER.with(|loader| { - loader.borrow_mut() + loader + .borrow_mut() .map .get(&code_hash) .cloned() @@ -1475,16 +1432,18 @@ mod tests { MockLoader::decrement_refcount(self.code_hash); } - fn add_user(code_hash: CodeHash, _: &mut GasMeter) - -> Result<(), DispatchError> - { + fn add_user( + code_hash: CodeHash, + _: &mut GasMeter, + ) -> Result<(), DispatchError> { MockLoader::increment_refcount(code_hash); Ok(()) } - fn remove_user(code_hash: CodeHash, _: &mut GasMeter) - -> Result<(), DispatchError> - { + fn remove_user( + code_hash: CodeHash, + _: &mut GasMeter, + ) -> Result<(), DispatchError> { MockLoader::decrement_refcount(code_hash); Ok(()) } @@ -1499,10 +1458,7 @@ mod tests { MockLoader::increment_refcount(self.code_hash); } if function == &self.func_type { - (self.func)(MockCtx { - ext, - input_data, - }, &self) + (self.func)(MockCtx { ext, input_data }, &self) } else { exec_success() } @@ -1551,9 +1507,7 @@ mod tests { place_contract(&BOB, exec_ch); assert_matches!( - MockStack::run_call( - ALICE, BOB, &mut gas_meter, &schedule, value, vec![], None, - ), + MockStack::run_call(ALICE, BOB, &mut gas_meter, &schedule, value, vec![], None,), Ok(_) ); }); @@ -1572,13 +1526,7 @@ mod tests { set_balance(&origin, 100); set_balance(&dest, 0); - MockStack::transfer( - true, - false, - &origin, - &dest, - 55, - ).unwrap(); + MockStack::transfer(true, false, &origin, &dest, 55).unwrap(); assert_eq!(get_balance(&origin), 45); assert_eq!(get_balance(&dest), 55); @@ -1592,10 +1540,9 @@ mod tests { let origin = ALICE; let dest = BOB; - let return_ch = MockLoader::insert( - Call, - |_, _| Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(Vec::new()) }) - ); + let return_ch = MockLoader::insert(Call, |_, _| { + Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(Vec::new()) }) + }); ExtBuilder::default().build().execute_with(|| { let schedule = ::Schedule::get(); @@ -1611,7 +1558,8 @@ mod tests { 55, vec![], None, - ).unwrap(); + ) + .unwrap(); assert!(!output.is_success()); assert_eq!(get_balance(&origin), 100); @@ -1631,18 +1579,9 @@ mod tests { ExtBuilder::default().build().execute_with(|| { set_balance(&origin, 0); - let result = MockStack::transfer( - false, - false, - &origin, - &dest, - 100, - ); + let result = MockStack::transfer(false, false, &origin, &dest, 100); - assert_eq!( - result, - Err(Error::::TransferFailed.into()) - ); + assert_eq!(result, Err(Error::::TransferFailed.into())); assert_eq!(get_balance(&origin), 0); assert_eq!(get_balance(&dest), 0); }); @@ -1654,10 +1593,9 @@ mod tests { // is returned from the execution context. let origin = ALICE; let dest = BOB; - let return_ch = MockLoader::insert( - Call, - |_, _| Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(vec![1, 2, 3, 4]) }) - ); + let return_ch = MockLoader::insert(Call, |_, _| { + Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(vec![1, 2, 3, 4]) }) + }); ExtBuilder::default().build().execute_with(|| { let schedule = ::Schedule::get(); @@ -1685,10 +1623,9 @@ mod tests { // is returned from the execution context. let origin = ALICE; let dest = BOB; - let return_ch = MockLoader::insert( - Call, - |_, _| Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(vec![1, 2, 3, 4]) }) - ); + let return_ch = MockLoader::insert(Call, |_, _| { + Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(vec![1, 2, 3, 4]) }) + }); ExtBuilder::default().build().execute_with(|| { let schedule = ::Schedule::get(); @@ -1747,9 +1684,8 @@ mod tests { let schedule = ::Schedule::get(); let subsistence = Contracts::::subsistence_threshold(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - input_data_ch, &schedule, &mut gas_meter - ).unwrap(); + let executable = + MockExecutable::from_storage(input_data_ch, &schedule, &mut gas_meter).unwrap(); set_balance(&ALICE, subsistence * 10); @@ -1784,10 +1720,7 @@ mod tests { if !*reached_bottom { // We are first time here, it means we just reached bottom. // Verify that we've got proper error and set `reached_bottom`. - assert_eq!( - r, - Err(Error::::MaxCallDepthReached.into()) - ); + assert_eq!(r, Err(Error::::MaxCallDepthReached.into())); *reached_bottom = true; } else { // We just unwinding stack here. @@ -1829,22 +1762,17 @@ mod tests { let bob_ch = MockLoader::insert(Call, |ctx, _| { // Record the caller for bob. - WITNESSED_CALLER_BOB.with(|caller| - *caller.borrow_mut() = Some(ctx.ext.caller().clone()) - ); + WITNESSED_CALLER_BOB + .with(|caller| *caller.borrow_mut() = Some(ctx.ext.caller().clone())); // Call into CHARLIE contract. - assert_matches!( - ctx.ext.call(0, CHARLIE, 0, vec![], true), - Ok(_) - ); + assert_matches!(ctx.ext.call(0, CHARLIE, 0, vec![], true), Ok(_)); exec_success() }); let charlie_ch = MockLoader::insert(Call, |ctx, _| { // Record the caller for charlie. - WITNESSED_CALLER_CHARLIE.with(|caller| - *caller.borrow_mut() = Some(ctx.ext.caller().clone()) - ); + WITNESSED_CALLER_CHARLIE + .with(|caller| *caller.borrow_mut() = Some(ctx.ext.caller().clone())); exec_success() }); @@ -1877,10 +1805,7 @@ mod tests { assert_eq!(*ctx.ext.address(), BOB); // Call into charlie contract. - assert_matches!( - ctx.ext.call(0, CHARLIE, 0, vec![], true), - Ok(_) - ); + assert_matches!(ctx.ext.call(0, CHARLIE, 0, vec![], true), Ok(_)); exec_success() }); let charlie_ch = MockLoader::insert(Call, |ctx, _| { @@ -1914,9 +1839,8 @@ mod tests { ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let schedule = ::Schedule::get(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - dummy_ch, &schedule, &mut gas_meter - ).unwrap(); + let executable = + MockExecutable::from_storage(dummy_ch, &schedule, &mut gas_meter).unwrap(); assert_matches!( MockStack::run_instantiate( @@ -1936,17 +1860,15 @@ mod tests { #[test] fn instantiation_work_with_success_output() { - let dummy_ch = MockLoader::insert( - Constructor, - |_, _| Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(vec![80, 65, 83, 83]) }) - ); + let dummy_ch = MockLoader::insert(Constructor, |_, _| { + Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(vec![80, 65, 83, 83]) }) + }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let schedule = ::Schedule::get(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - dummy_ch, &schedule, &mut gas_meter - ).unwrap(); + let executable = + MockExecutable::from_storage(dummy_ch, &schedule, &mut gas_meter).unwrap(); set_balance(&ALICE, 1000); let instantiated_contract_address = assert_matches!( @@ -1965,26 +1887,25 @@ mod tests { // Check that the newly created account has the expected code hash and // there are instantiation event. - assert_eq!(Storage::::code_hash(&instantiated_contract_address).unwrap(), dummy_ch); - assert_eq!(&events(), &[ - Event::Instantiated(ALICE, instantiated_contract_address) - ]); + assert_eq!( + Storage::::code_hash(&instantiated_contract_address).unwrap(), + dummy_ch + ); + assert_eq!(&events(), &[Event::Instantiated(ALICE, instantiated_contract_address)]); }); } #[test] fn instantiation_fails_with_failing_output() { - let dummy_ch = MockLoader::insert( - Constructor, - |_, _| Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(vec![70, 65, 73, 76]) }) - ); + let dummy_ch = MockLoader::insert(Constructor, |_, _| { + Ok(ExecReturnValue { flags: ReturnFlags::REVERT, data: Bytes(vec![70, 65, 73, 76]) }) + }); ExtBuilder::default().existential_deposit(15).build().execute_with(|| { let schedule = ::Schedule::get(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - dummy_ch, &schedule, &mut gas_meter - ).unwrap(); + let executable = + MockExecutable::from_storage(dummy_ch, &schedule, &mut gas_meter).unwrap(); set_balance(&ALICE, 1000); let instantiated_contract_address = assert_matches!( @@ -2016,13 +1937,16 @@ mod tests { let instantiated_contract_address = Rc::clone(&instantiated_contract_address); move |ctx, _| { // Instantiate a contract and save it's address in `instantiated_contract_address`. - let (address, output) = ctx.ext.instantiate( - 0, - dummy_ch, - Contracts::::subsistence_threshold() * 3, - vec![], - &[48, 49, 50], - ).unwrap(); + let (address, output) = ctx + .ext + .instantiate( + 0, + dummy_ch, + Contracts::::subsistence_threshold() * 3, + vec![], + &[48, 49, 50], + ) + .unwrap(); *instantiated_contract_address.borrow_mut() = address.into(); Ok(output) @@ -2036,27 +1960,33 @@ mod tests { assert_matches!( MockStack::run_call( - ALICE, BOB, &mut GasMeter::::new(GAS_LIMIT), &schedule, 20, vec![], None, + ALICE, + BOB, + &mut GasMeter::::new(GAS_LIMIT), + &schedule, + 20, + vec![], + None, ), Ok(_) ); - let instantiated_contract_address = instantiated_contract_address.borrow().as_ref().unwrap().clone(); + let instantiated_contract_address = + instantiated_contract_address.borrow().as_ref().unwrap().clone(); // Check that the newly created account has the expected code hash and // there are instantiation event. - assert_eq!(Storage::::code_hash(&instantiated_contract_address).unwrap(), dummy_ch); - assert_eq!(&events(), &[ - Event::Instantiated(BOB, instantiated_contract_address) - ]); + assert_eq!( + Storage::::code_hash(&instantiated_contract_address).unwrap(), + dummy_ch + ); + assert_eq!(&events(), &[Event::Instantiated(BOB, instantiated_contract_address)]); }); } #[test] fn instantiation_traps() { - let dummy_ch = MockLoader::insert(Constructor, - |_, _| Err("It's a trap!".into()) - ); + let dummy_ch = MockLoader::insert(Constructor, |_, _| Err("It's a trap!".into())); let instantiator_ch = MockLoader::insert(Call, { let dummy_ch = dummy_ch.clone(); move |ctx, _| { @@ -2087,7 +2017,13 @@ mod tests { assert_matches!( MockStack::run_call( - ALICE, BOB, &mut GasMeter::::new(GAS_LIMIT), &schedule, 20, vec![], None, + ALICE, + BOB, + &mut GasMeter::::new(GAS_LIMIT), + &schedule, + 20, + vec![], + None, ), Ok(_) ); @@ -2105,36 +2041,29 @@ mod tests { exec_success() }); - ExtBuilder::default() - .existential_deposit(15) - .build() - .execute_with(|| { - let schedule = ::Schedule::get(); - let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - terminate_ch, &schedule, &mut gas_meter - ).unwrap(); - set_balance(&ALICE, 1000); - - assert_eq!( - MockStack::run_instantiate( - ALICE, - executable, - &mut gas_meter, - &schedule, - 100, - vec![], - &[], - None, - ), - Err(Error::::TerminatedInConstructor.into()) - ); + ExtBuilder::default().existential_deposit(15).build().execute_with(|| { + let schedule = ::Schedule::get(); + let mut gas_meter = GasMeter::::new(GAS_LIMIT); + let executable = + MockExecutable::from_storage(terminate_ch, &schedule, &mut gas_meter).unwrap(); + set_balance(&ALICE, 1000); - assert_eq!( - &events(), - &[] - ); - }); + assert_eq!( + MockStack::run_instantiate( + ALICE, + executable, + &mut gas_meter, + &schedule, + 100, + vec![], + &[], + None, + ), + Err(Error::::TerminatedInConstructor.into()) + ); + + assert_eq!(&events(), &[]); + }); } #[test] @@ -2152,9 +2081,8 @@ mod tests { let subsistence = Contracts::::subsistence_threshold(); let schedule = ::Schedule::get(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - rent_allowance_ch, &schedule, &mut gas_meter - ).unwrap(); + let executable = + MockExecutable::from_storage(rent_allowance_ch, &schedule, &mut gas_meter).unwrap(); set_balance(&ALICE, subsistence * 10); let result = MockStack::run_instantiate( @@ -2175,9 +2103,8 @@ mod tests { fn rent_params_works() { let code_hash = MockLoader::insert(Call, |ctx, executable| { let address = ctx.ext.address(); - let contract = >::get(address) - .and_then(|c| c.get_alive()) - .unwrap(); + let contract = + >::get(address).and_then(|c| c.get_alive()).unwrap(); assert_eq!(ctx.ext.rent_params(), &RentParams::new(address, &0, &contract, executable)); exec_success() }); @@ -2188,15 +2115,7 @@ mod tests { let mut gas_meter = GasMeter::::new(GAS_LIMIT); set_balance(&ALICE, subsistence * 10); place_contract(&BOB, code_hash); - MockStack::run_call( - ALICE, - BOB, - &mut gas_meter, - &schedule, - 0, - vec![], - None, - ).unwrap(); + MockStack::run_call(ALICE, BOB, &mut gas_meter, &schedule, 0, vec![], None).unwrap(); }); } @@ -2205,9 +2124,8 @@ mod tests { let code_hash = MockLoader::insert(Call, |ctx, executable| { let subsistence = Contracts::::subsistence_threshold(); let address = ctx.ext.address(); - let contract = >::get(address) - .and_then(|c| c.get_alive()) - .unwrap(); + let contract = + >::get(address).and_then(|c| c.get_alive()).unwrap(); let rent_params = RentParams::new(address, &0, &contract, executable); // Changing the allowance during the call: rent params stay unchanged. @@ -2219,13 +2137,9 @@ mod tests { // Creating another instance from the same code_hash increases the refcount. // This is also not reflected in the rent params. assert_eq!(MockLoader::refcount(&executable.code_hash), 1); - ctx.ext.instantiate( - 0, - executable.code_hash, - subsistence * 25, - vec![], - &[], - ).unwrap(); + ctx.ext + .instantiate(0, executable.code_hash, subsistence * 25, vec![], &[]) + .unwrap(); assert_eq!(MockLoader::refcount(&executable.code_hash), 2); assert_eq!(ctx.ext.rent_params(), &rent_params); @@ -2246,31 +2160,38 @@ mod tests { subsistence * 50, vec![], None, - ).unwrap(); + ) + .unwrap(); }); } #[test] fn rent_status_works() { let code_hash = MockLoader::insert(Call, |ctx, _| { - assert_eq!(ctx.ext.rent_status(0), RentStatus { - max_deposit: 80000, - current_deposit: 80000, - custom_refcount_deposit: None, - max_rent: 32, - current_rent: 32, - custom_refcount_rent: None, - _reserved: None, - }); - assert_eq!(ctx.ext.rent_status(1), RentStatus { - max_deposit: 80000, - current_deposit: 80000, - custom_refcount_deposit: Some(80000), - max_rent: 32, - current_rent: 32, - custom_refcount_rent: Some(32), - _reserved: None, - }); + assert_eq!( + ctx.ext.rent_status(0), + RentStatus { + max_deposit: 80000, + current_deposit: 80000, + custom_refcount_deposit: None, + max_rent: 32, + current_rent: 32, + custom_refcount_rent: None, + _reserved: None, + } + ); + assert_eq!( + ctx.ext.rent_status(1), + RentStatus { + max_deposit: 80000, + current_deposit: 80000, + custom_refcount_deposit: Some(80000), + max_rent: 32, + current_rent: 32, + custom_refcount_rent: Some(32), + _reserved: None, + } + ); exec_success() }); @@ -2280,15 +2201,7 @@ mod tests { let mut gas_meter = GasMeter::::new(GAS_LIMIT); set_balance(&ALICE, subsistence * 10); place_contract(&BOB, code_hash); - MockStack::run_call( - ALICE, - BOB, - &mut gas_meter, - &schedule, - 0, - vec![], - None, - ).unwrap(); + MockStack::run_call(ALICE, BOB, &mut gas_meter, &schedule, 0, vec![], None).unwrap(); }); } @@ -2308,10 +2221,7 @@ mod tests { let changed_allowance = >::max_value() / 2; assert_ne!(original_allowance, changed_allowance); ctx.ext.set_rent_allowance(changed_allowance); - assert_eq!( - ctx.ext.call(0, CHARLIE, 0, vec![], true), - exec_trapped() - ); + assert_eq!(ctx.ext.call(0, CHARLIE, 0, vec![], true), exec_trapped()); assert_eq!(ctx.ext.rent_allowance(), changed_allowance); assert_ne!(ctx.ext.rent_allowance(), original_allowance); } @@ -2356,9 +2266,7 @@ mod tests { let schedule = ::Schedule::get(); let subsistence = Contracts::::subsistence_threshold(); let mut gas_meter = GasMeter::::new(GAS_LIMIT); - let executable = MockExecutable::from_storage( - code, &schedule, &mut gas_meter - ).unwrap(); + let executable = MockExecutable::from_storage(code, &schedule, &mut gas_meter).unwrap(); set_balance(&ALICE, subsistence * 10); @@ -2400,7 +2308,8 @@ mod tests { 0, vec![], Some(&mut debug_buffer), - ).unwrap(); + ) + .unwrap(); }); assert_eq!(&String::from_utf8(debug_buffer).unwrap(), "This is a testMore text"); @@ -2445,9 +2354,7 @@ mod tests { ctx.ext.call(0, dest, 0, vec![], false) }); - let code_charlie = MockLoader::insert(Call, |_, _| { - exec_success() - }); + let code_charlie = MockLoader::insert(Call, |_, _| exec_success()); ExtBuilder::default().build().execute_with(|| { let schedule = ::Schedule::get(); @@ -2475,7 +2382,8 @@ mod tests { 0, BOB.encode(), None, - ).map_err(|e| e.error), + ) + .map_err(|e| e.error), >::ReentranceDenied, ); }); @@ -2492,9 +2400,8 @@ mod tests { }); // call BOB with input set to '1' - let code_charlie = MockLoader::insert(Call, |ctx, _| { - ctx.ext.call(0, BOB, 0, vec![1], true) - }); + let code_charlie = + MockLoader::insert(Call, |ctx, _| ctx.ext.call(0, BOB, 0, vec![1], true)); ExtBuilder::default().build().execute_with(|| { let schedule = ::Schedule::get(); @@ -2511,7 +2418,8 @@ mod tests { 0, vec![0], None, - ).map_err(|e| e.error), + ) + .map_err(|e| e.error), >::ReentranceDenied, ); }); @@ -2532,24 +2440,17 @@ mod tests { set_balance(&ALICE, subsistence * 10); place_contract(&BOB, code_hash); System::reset_events(); - MockStack::run_call( - ALICE, - BOB, - &mut gas_meter, - &schedule, - 0, - vec![], - None, - ).unwrap(); + MockStack::run_call(ALICE, BOB, &mut gas_meter, &schedule, 0, vec![], None).unwrap(); let remark_hash = ::Hashing::hash(b"Hello World"); - assert_eq!(System::events(), vec![ - EventRecord { + assert_eq!( + System::events(), + vec![EventRecord { phase: Phase::Initialization, event: MetaEvent::System(frame_system::Event::Remarked(BOB, remark_hash)), topics: vec![], - }, - ]); + },] + ); }); } @@ -2567,17 +2468,14 @@ mod tests { let forbidden_call = Call::Balances(BalanceCall::transfer(CHARLIE, 22)); // simple cases: direct call - assert_err!( - ctx.ext.call_runtime(forbidden_call.clone()), - BadOrigin, - ); + assert_err!(ctx.ext.call_runtime(forbidden_call.clone()), BadOrigin,); // as part of a patch: return is OK (but it interrupted the batch) - assert_ok!( - ctx.ext.call_runtime(Call::Utility(UtilCall::batch(vec![ - allowed_call.clone(), forbidden_call, allowed_call - ]))), - ); + assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch(vec![ + allowed_call.clone(), + forbidden_call, + allowed_call + ]))),); // the transfer wasn't performed assert_eq!(get_balance(&CHARLIE), 0); @@ -2585,11 +2483,9 @@ mod tests { exec_success() }); - TestFilter::set_filter(|call| { - match call { - Call::Balances(pallet_balances::Call::transfer(_, _)) => false, - _ => true, - } + TestFilter::set_filter(|call| match call { + Call::Balances(pallet_balances::Call::transfer(_, _)) => false, + _ => true, }); ExtBuilder::default().build().execute_with(|| { @@ -2599,31 +2495,27 @@ mod tests { set_balance(&ALICE, subsistence * 10); place_contract(&BOB, code_hash); System::reset_events(); - MockStack::run_call( - ALICE, - BOB, - &mut gas_meter, - &schedule, - 0, - vec![], - None, - ).unwrap(); + MockStack::run_call(ALICE, BOB, &mut gas_meter, &schedule, 0, vec![], None).unwrap(); let remark_hash = ::Hashing::hash(b"Hello"); - assert_eq!(System::events(), vec![ - EventRecord { - phase: Phase::Initialization, - event: MetaEvent::System(frame_system::Event::Remarked(BOB, remark_hash)), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: MetaEvent::Utility( - pallet_utility::Event::BatchInterrupted(1, BadOrigin.into()), - ), - topics: vec![], - }, - ]); + assert_eq!( + System::events(), + vec![ + EventRecord { + phase: Phase::Initialization, + event: MetaEvent::System(frame_system::Event::Remarked(BOB, remark_hash)), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: MetaEvent::Utility(pallet_utility::Event::BatchInterrupted( + 1, + BadOrigin.into() + ),), + topics: vec![], + }, + ] + ); }); } } diff --git a/frame/contracts/src/gas.rs b/frame/contracts/src/gas.rs index 64f410c4cef2b..38d18c1e24c19 100644 --- a/frame/contracts/src/gas.rs +++ b/frame/contracts/src/gas.rs @@ -15,17 +15,17 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{Config, Error, exec::ExecError}; -use sp_std::marker::PhantomData; -use sp_runtime::traits::Zero; +use crate::{exec::ExecError, Config, Error}; use frame_support::{ dispatch::{ - DispatchResultWithPostInfo, PostDispatchInfo, DispatchErrorWithPostInfo, DispatchError, + DispatchError, DispatchErrorWithPostInfo, DispatchResultWithPostInfo, PostDispatchInfo, }, weights::Weight, DefaultNoBound, }; use sp_core::crypto::UncheckedFrom; +use sp_runtime::traits::Zero; +use sp_std::marker::PhantomData; #[cfg(test)] use std::{any::Any, fmt::Debug}; @@ -88,7 +88,7 @@ pub struct GasMeter { impl GasMeter where - T::AccountId: UncheckedFrom<::Hash> + AsRef<[u8]> + T::AccountId: UncheckedFrom<::Hash> + AsRef<[u8]>, { pub fn new(gas_limit: Weight) -> Self { GasMeter { @@ -107,11 +107,7 @@ where /// /// Passing `0` as amount is interpreted as "all remaining gas". pub fn nested(&mut self, amount: Weight) -> Result { - let amount = if amount == 0 { - self.gas_left - } else { - amount - }; + let amount = if amount == 0 { self.gas_left } else { amount }; // NOTE that it is ok to allocate all available gas since it still ensured // by `charge` that it doesn't reach zero. @@ -155,10 +151,8 @@ where #[cfg(test)] { // Unconditionally add the token to the storage. - let erased_tok = ErasedToken { - description: format!("{:?}", token), - token: Box::new(token), - }; + let erased_tok = + ErasedToken { description: format!("{:?}", token), token: Box::new(token) }; self.tokens.push(erased_tok); } @@ -277,7 +271,9 @@ mod tests { #[derive(Copy, Clone, PartialEq, Eq, Debug)] struct SimpleToken(u64); impl Token for SimpleToken { - fn weight(&self) -> u64 { self.0 } + fn weight(&self) -> u64 { + self.0 + } } #[test] @@ -318,7 +314,6 @@ mod tests { assert!(gas_meter.charge(SimpleToken(1)).is_err()); } - // Charging the exact amount that the user paid for should be // possible. #[test] diff --git a/frame/contracts/src/lib.rs b/frame/contracts/src/lib.rs index 296ab443cee00..596d48332fc4f 100644 --- a/frame/contracts/src/lib.rs +++ b/frame/contracts/src/lib.rs @@ -78,17 +78,17 @@ //! WebAssembly based smart contracts in the Rust programming language. This is a work in progress. #![cfg_attr(not(feature = "std"), no_std)] -#![cfg_attr(feature = "runtime-benchmarks", recursion_limit="512")] +#![cfg_attr(feature = "runtime-benchmarks", recursion_limit = "512")] #[macro_use] mod gas; -mod storage; +mod benchmarking; mod exec; -mod wasm; +mod migration; mod rent; -mod benchmarking; mod schedule; -mod migration; +mod storage; +mod wasm; pub mod chain_extension; pub mod weights; @@ -97,49 +97,48 @@ pub mod weights; mod tests; pub use crate::{ - pallet::*, - schedule::{Schedule, Limits, InstructionWeights, HostFnWeights}, exec::Frame, + pallet::*, + schedule::{HostFnWeights, InstructionWeights, Limits, Schedule}, }; use crate::{ + exec::{Executable, Stack as ExecStack}, gas::GasMeter, - exec::{Stack as ExecStack, Executable}, rent::Rent, - storage::{Storage, DeletedContract, ContractInfo, AliveContractInfo, TombstoneContractInfo}, - weights::WeightInfo, + storage::{AliveContractInfo, ContractInfo, DeletedContract, Storage, TombstoneContractInfo}, wasm::PrefabWasmModule, -}; -use sp_core::{Bytes, crypto::UncheckedFrom}; -use sp_std::prelude::*; -use sp_runtime::{ - traits::{ - Hash, StaticLookup, Convert, Saturating, Zero, - }, - Perbill, + weights::WeightInfo, }; use frame_support::{ - traits::{OnUnbalanced, Currency, Get, Time, Randomness, Filter}, - weights::{Weight, PostDispatchInfo, WithPostDispatchInfo, GetDispatchInfo}, dispatch::Dispatchable, + traits::{Currency, Filter, Get, OnUnbalanced, Randomness, Time}, + weights::{GetDispatchInfo, PostDispatchInfo, Weight, WithPostDispatchInfo}, }; use frame_system::Pallet as System; use pallet_contracts_primitives::{ - RentProjectionResult, GetStorageResult, ContractAccessError, ContractExecResult, - ContractInstantiateResult, Code, InstantiateReturnValue, + Code, ContractAccessError, ContractExecResult, ContractInstantiateResult, GetStorageResult, + InstantiateReturnValue, RentProjectionResult, +}; +use sp_core::{crypto::UncheckedFrom, Bytes}; +use sp_runtime::{ + traits::{Convert, Hash, Saturating, StaticLookup, Zero}, + Perbill, }; +use sp_std::prelude::*; type CodeHash = ::Hash; type TrieId = Vec; type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = - <::Currency as Currency<::AccountId>>::NegativeImbalance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -156,11 +155,10 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// The overarching call type. - type Call: - Dispatchable + - GetDispatchInfo + - codec::Decode + - IsType<::Call>; + type Call: Dispatchable + + GetDispatchInfo + + codec::Decode + + IsType<::Call>; /// Filter that is applied to calls dispatched by contracts. /// @@ -263,7 +261,7 @@ pub mod pallet { /// The allowed depth is `CallStack::size() + 1`. /// Therefore a size of `0` means that a contract cannot use call or instantiate. /// In other words only the origin called "root contract" is allowed to execute then. - type CallStack: smallvec::Array>; + type CallStack: smallvec::Array>; /// The maximum number of tries that can be queued for deletion. #[pallet::constant] @@ -286,7 +284,8 @@ pub mod pallet { fn on_initialize(_block: T::BlockNumber) -> Weight { // We do not want to go above the block limit and rather avoid lazy deletion // in that case. This should only happen on runtime upgrades. - let weight_limit = T::BlockWeights::get().max_block + let weight_limit = T::BlockWeights::get() + .max_block .saturating_sub(System::::block_weight().total()) .min(T::DeletionWeightLimit::get()); Storage::::process_deletion_queue_batch(weight_limit) @@ -317,14 +316,20 @@ pub mod pallet { dest: ::Source, #[pallet::compact] value: BalanceOf, #[pallet::compact] gas_limit: Weight, - data: Vec + data: Vec, ) -> DispatchResultWithPostInfo { let origin = ensure_signed(origin)?; let dest = T::Lookup::lookup(dest)?; let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); let result = ExecStack::>::run_call( - origin, dest, &mut gas_meter, &schedule, value, data, None, + origin, + dest, + &mut gas_meter, + &schedule, + value, + data, + None, ); gas_meter.into_dispatch_result(result, T::WeightInfo::call()) } @@ -374,11 +379,19 @@ pub mod pallet { let code_len = executable.code_len(); ensure!(code_len <= T::Schedule::get().limits.code_len, Error::::CodeTooLarge); let result = ExecStack::>::run_instantiate( - origin, executable, &mut gas_meter, &schedule, endowment, data, &salt, None, - ).map(|(_address, output)| output); + origin, + executable, + &mut gas_meter, + &schedule, + endowment, + data, + &salt, + None, + ) + .map(|(_address, output)| output); gas_meter.into_dispatch_result( result, - T::WeightInfo::instantiate_with_code(code_len / 1024, salt.len() as u32 / 1024) + T::WeightInfo::instantiate_with_code(code_len / 1024, salt.len() as u32 / 1024), ) } @@ -403,12 +416,18 @@ pub mod pallet { let schedule = T::Schedule::get(); let executable = PrefabWasmModule::from_storage(code_hash, &schedule, &mut gas_meter)?; let result = ExecStack::>::run_instantiate( - origin, executable, &mut gas_meter, &schedule, endowment, data, &salt, None, - ).map(|(_address, output)| output); - gas_meter.into_dispatch_result( - result, - T::WeightInfo::instantiate(salt.len() as u32 / 1024), + origin, + executable, + &mut gas_meter, + &schedule, + endowment, + data, + &salt, + None, ) + .map(|(_address, output)| output); + gas_meter + .into_dispatch_result(result, T::WeightInfo::instantiate(salt.len() as u32 / 1024)) } /// Allows block producers to claim a small reward for evicting a contract. If a block @@ -424,44 +443,33 @@ pub mod pallet { pub fn claim_surcharge( origin: OriginFor, dest: T::AccountId, - aux_sender: Option + aux_sender: Option, ) -> DispatchResultWithPostInfo { let origin = origin.into(); let (signed, rewarded) = match (origin, aux_sender) { - (Ok(frame_system::RawOrigin::Signed(account)), None) => { - (true, account) - }, - (Ok(frame_system::RawOrigin::None), Some(aux_sender)) => { - (false, aux_sender) - }, + (Ok(frame_system::RawOrigin::Signed(account)), None) => (true, account), + (Ok(frame_system::RawOrigin::None), Some(aux_sender)) => (false, aux_sender), _ => Err(Error::::InvalidSurchargeClaim)?, }; // Add some advantage for block producers (who send unsigned extrinsics) by // adding a handicap: for signed extrinsics we use a slightly older block number // for the eviction check. This can be viewed as if we pushed regular users back in past. - let handicap = if signed { - T::SignedClaimHandicap::get() - } else { - Zero::zero() - }; + let handicap = if signed { T::SignedClaimHandicap::get() } else { Zero::zero() }; // If poking the contract has lead to eviction of the contract, give out the rewards. match Rent::>::try_eviction(&dest, handicap)? { - (Some(rent_paid), code_len) => { - T::Currency::deposit_into_existing( - &rewarded, - T::SurchargeReward::get().min(rent_paid), - ) - .map(|_| PostDispatchInfo { - actual_weight: Some(T::WeightInfo::claim_surcharge(code_len / 1024)), - pays_fee: Pays::No, - }) - .map_err(Into::into) - } - (None, code_len) => Err(Error::::ContractNotEvictable.with_weight( - T::WeightInfo::claim_surcharge(code_len / 1024) - )), + (Some(rent_paid), code_len) => T::Currency::deposit_into_existing( + &rewarded, + T::SurchargeReward::get().min(rent_paid), + ) + .map(|_| PostDispatchInfo { + actual_weight: Some(T::WeightInfo::claim_surcharge(code_len / 1024)), + pays_fee: Pays::No, + }) + .map_err(Into::into), + (None, code_len) => Err(Error::::ContractNotEvictable + .with_weight(T::WeightInfo::claim_surcharge(code_len / 1024))), } } } @@ -637,7 +645,8 @@ pub mod pallet { /// A mapping between an original code hash and instrumented wasm code, ready for execution. #[pallet::storage] - pub(crate) type CodeStorage = StorageMap<_, Identity, CodeHash, PrefabWasmModule>; + pub(crate) type CodeStorage = + StorageMap<_, Identity, CodeHash, PrefabWasmModule>; /// The subtrie counter. #[pallet::storage] @@ -647,7 +656,8 @@ pub mod pallet { /// /// TWOX-NOTE: SAFE since `AccountId` is a secure hash. #[pallet::storage] - pub(crate) type ContractInfoOf = StorageMap<_, Twox64Concat, T::AccountId, ContractInfo>; + pub(crate) type ContractInfoOf = + StorageMap<_, Twox64Concat, T::AccountId, ContractInfo>; /// Evicted contracts that await child trie deletion. /// @@ -683,13 +693,15 @@ where ) -> ContractExecResult { let mut gas_meter = GasMeter::new(gas_limit); let schedule = T::Schedule::get(); - let mut debug_message = if debug { - Some(Vec::new()) - } else { - None - }; + let mut debug_message = if debug { Some(Vec::new()) } else { None }; let result = ExecStack::>::run_call( - origin, dest, &mut gas_meter, &schedule, value, input_data, debug_message.as_mut(), + origin, + dest, + &mut gas_meter, + &schedule, + value, + input_data, + debug_message.as_mut(), ); ContractExecResult { result: result.map_err(|r| r.error), @@ -733,34 +745,36 @@ where }; let executable = match executable { Ok(executable) => executable, - Err(error) => return ContractInstantiateResult { - result: Err(error.into()), - gas_consumed: gas_meter.gas_consumed(), - gas_required: gas_meter.gas_required(), - debug_message: Vec::new(), - } - }; - let mut debug_message = if debug { - Some(Vec::new()) - } else { - None + Err(error) => + return ContractInstantiateResult { + result: Err(error.into()), + gas_consumed: gas_meter.gas_consumed(), + gas_required: gas_meter.gas_required(), + debug_message: Vec::new(), + }, }; + let mut debug_message = if debug { Some(Vec::new()) } else { None }; let result = ExecStack::>::run_instantiate( - origin, executable, &mut gas_meter, &schedule, - endowment, data, &salt, debug_message.as_mut(), - ).and_then(|(account_id, result)| { + origin, + executable, + &mut gas_meter, + &schedule, + endowment, + data, + &salt, + debug_message.as_mut(), + ) + .and_then(|(account_id, result)| { let rent_projection = if compute_projection { - Some(Rent::>::compute_projection(&account_id) - .map_err(|_| >::NewContractNotFunded)?) + Some( + Rent::>::compute_projection(&account_id) + .map_err(|_| >::NewContractNotFunded)?, + ) } else { None }; - Ok(InstantiateReturnValue { - result, - account_id, - rent_projection, - }) + Ok(InstantiateReturnValue { result, account_id, rent_projection }) }); ContractInstantiateResult { result: result.map_err(|e| e.error), @@ -799,9 +813,10 @@ where deploying_address: &T::AccountId, code_hash: &CodeHash, salt: &[u8], - ) -> T::AccountId - { - let buf: Vec<_> = deploying_address.as_ref().iter() + ) -> T::AccountId { + let buf: Vec<_> = deploying_address + .as_ref() + .iter() .chain(code_hash.as_ref()) .chain(salt) .cloned() @@ -846,7 +861,7 @@ where #[cfg(feature = "runtime-benchmarks")] fn reinstrument_module( module: &mut PrefabWasmModule, - schedule: &Schedule + schedule: &Schedule, ) -> frame_support::dispatch::DispatchResult { self::wasm::reinstrument(module, schedule) } diff --git a/frame/contracts/src/migration.rs b/frame/contracts/src/migration.rs index 8c5c06fde7ab1..a28cb87bb60bd 100644 --- a/frame/contracts/src/migration.rs +++ b/frame/contracts/src/migration.rs @@ -15,10 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{Config, Weight, Pallet}; +use crate::{Config, Pallet, Weight}; use frame_support::{ storage::migration, - traits::{GetPalletVersion, PalletVersion, PalletInfoAccess, Get}, + traits::{Get, GetPalletVersion, PalletInfoAccess, PalletVersion}, }; pub fn migrate() -> Weight { @@ -32,7 +32,7 @@ pub fn migrate() -> Weight { b"CurrentSchedule", b"", ); - } + }, _ => (), } diff --git a/frame/contracts/src/rent.rs b/frame/contracts/src/rent.rs index 3135862e88c90..9446b027ec1f4 100644 --- a/frame/contracts/src/rent.rs +++ b/frame/contracts/src/rent.rs @@ -18,23 +18,23 @@ //! A module responsible for computing the right amount of weight and charging it. use crate::{ - AliveContractInfo, BalanceOf, ContractInfo, ContractInfoOf, Pallet, Event, - TombstoneContractInfo, Config, CodeHash, Error, - storage::Storage, wasm::PrefabWasmModule, exec::Executable, gas::GasMeter, + exec::Executable, gas::GasMeter, storage::Storage, wasm::PrefabWasmModule, AliveContractInfo, + BalanceOf, CodeHash, Config, ContractInfo, ContractInfoOf, Error, Event, Pallet, + TombstoneContractInfo, }; -use sp_std::prelude::*; -use sp_io::hashing::blake2_256; -use sp_core::crypto::UncheckedFrom; use frame_support::{ storage::child, traits::{Currency, ExistenceRequirement, Get, OnUnbalanced, WithdrawReasons}, DefaultNoBound, }; use pallet_contracts_primitives::{ContractAccessError, RentProjection, RentProjectionResult}; +use sp_core::crypto::UncheckedFrom; +use sp_io::hashing::blake2_256; use sp_runtime::{ - DispatchError, traits::{Bounded, CheckedDiv, CheckedMul, SaturatedConversion, Saturating, Zero}, + DispatchError, }; +use sp_std::prelude::*; /// Information about the required deposit and resulting rent. /// @@ -83,13 +83,8 @@ where code_size: u32, ) -> Result>, DispatchError> { let current_block_number = >::block_number(); - let verdict = Self::consider_case( - account, - current_block_number, - Zero::zero(), - &contract, - code_size, - ); + let verdict = + Self::consider_case(account, current_block_number, Zero::zero(), &contract, code_size); Self::enact_verdict(account, contract, current_block_number, verdict, None) } @@ -136,10 +131,14 @@ where .unwrap_or_else(|| >::zero()) .saturating_add(contract.rent_paid); Self::enact_verdict( - account, contract, current_block_number, verdict, Some(module), + account, + contract, + current_block_number, + verdict, + Some(module), )?; Ok((Some(rent_paid), code_len)) - } + }, _ => Ok((None, code_len)), } } @@ -155,9 +154,7 @@ where /// NOTE that this is not a side-effect free function! It will actually collect rent and then /// compute the projection. This function is only used for implementation of an RPC method through /// `RuntimeApi` meaning that the changes will be discarded anyway. - pub fn compute_projection( - account: &T::AccountId, - ) -> RentProjectionResult { + pub fn compute_projection(account: &T::AccountId) -> RentProjectionResult { use ContractAccessError::IsTombstone; let contract_info = >::get(account); @@ -179,45 +176,42 @@ where // We skip the eviction in case one is in order. // Evictions should only be performed by [`try_eviction`]. - let new_contract_info = Self::enact_verdict( - account, alive_contract_info, current_block_number, verdict, None, - ); + let new_contract_info = + Self::enact_verdict(account, alive_contract_info, current_block_number, verdict, None); // Check what happened after enaction of the verdict. - let alive_contract_info = new_contract_info.map_err(|_| IsTombstone)?.ok_or_else(|| IsTombstone)?; + let alive_contract_info = + new_contract_info.map_err(|_| IsTombstone)?.ok_or_else(|| IsTombstone)?; // Compute how much would the fee per block be with the *updated* balance. let total_balance = T::Currency::total_balance(account); let free_balance = T::Currency::free_balance(account); - let fee_per_block = Self::fee_per_block( - &free_balance, &alive_contract_info, code_size, - ); + let fee_per_block = Self::fee_per_block(&free_balance, &alive_contract_info, code_size); if fee_per_block.is_zero() { - return Ok(RentProjection::NoEviction); + return Ok(RentProjection::NoEviction) } // Then compute how much the contract will sustain under these circumstances. - let rent_budget = Self::rent_budget(&total_balance, &free_balance, &alive_contract_info).expect( - "the contract exists and in the alive state; + let rent_budget = Self::rent_budget(&total_balance, &free_balance, &alive_contract_info) + .expect( + "the contract exists and in the alive state; the updated balance must be greater than subsistence deposit; this function doesn't return `None`; qed ", - ); + ); let blocks_left = match rent_budget.checked_div(&fee_per_block) { Some(blocks_left) => blocks_left, None => { // `fee_per_block` is not zero here, so `checked_div` can return `None` if // there is an overflow. This cannot happen with integers though. Return // `NoEviction` here just in case. - return Ok(RentProjection::NoEviction); - } + return Ok(RentProjection::NoEviction) + }, }; let blocks_left = blocks_left.saturated_into::().into(); - Ok(RentProjection::EvictionAt( - current_block_number + blocks_left, - )) + Ok(RentProjection::EvictionAt(current_block_number + blocks_left)) } /// Restores the destination account using the origin as prototype. @@ -246,18 +240,15 @@ where let current_block = >::block_number(); if origin_contract.last_write == Some(current_block) { - return Err(Error::::InvalidContractOrigin.into()); + return Err(Error::::InvalidContractOrigin.into()) } let dest_tombstone = >::get(&dest) .and_then(|c| c.get_tombstone()) .ok_or(Error::::InvalidDestinationContract)?; - let last_write = if !delta.is_empty() { - Some(current_block) - } else { - origin_contract.last_write - }; + let last_write = + if !delta.is_empty() { Some(current_block) } else { origin_contract.last_write }; // Fails if the code hash does not exist on chain E::add_user(code_hash, gas_meter)?; @@ -266,7 +257,8 @@ where // fail later due to tombstones not matching. This is because the restoration // is always called from a contract and therefore in a storage transaction. // The failure of this function will lead to this transaction's rollback. - let bytes_taken: u32 = delta.iter() + let bytes_taken: u32 = delta + .iter() .filter_map(|key| { let key = blake2_256(key); child::get_raw(&child_trie_info, &key).map(|value| { @@ -284,21 +276,24 @@ where ); if tombstone != dest_tombstone { - return Err(Error::::InvalidTombstone.into()); + return Err(Error::::InvalidTombstone.into()) } origin_contract.storage_size -= bytes_taken; >::remove(&origin); E::remove_user(origin_contract.code_hash, gas_meter)?; - >::insert(&dest, ContractInfo::Alive(AliveContractInfo:: { - code_hash, - rent_allowance, - rent_paid: >::zero(), - deduct_block: current_block, - last_write, - .. origin_contract - })); + >::insert( + &dest, + ContractInfo::Alive(AliveContractInfo:: { + code_hash, + rent_allowance, + rent_paid: >::zero(), + deduct_block: current_block, + last_write, + ..origin_contract + }), + ); let origin_free_balance = T::Currency::free_balance(&origin); T::Currency::make_free_balance_be(&origin, >::zero()); @@ -314,42 +309,34 @@ where current_refcount: u32, at_refcount: u32, ) -> RentStatus { - let calc_share = |refcount: u32| { - aggregated_code_size.checked_div(refcount).unwrap_or(0) - }; + let calc_share = |refcount: u32| aggregated_code_size.checked_div(refcount).unwrap_or(0); let current_share = calc_share(current_refcount); let custom_share = calc_share(at_refcount); RentStatus { max_deposit: Self::required_deposit(contract, aggregated_code_size), current_deposit: Self::required_deposit(contract, current_share), - custom_refcount_deposit: - if at_refcount > 0 { - Some(Self::required_deposit(contract, custom_share)) - } else { - None - }, + custom_refcount_deposit: if at_refcount > 0 { + Some(Self::required_deposit(contract, custom_share)) + } else { + None + }, max_rent: Self::fee_per_block(free_balance, contract, aggregated_code_size), current_rent: Self::fee_per_block(free_balance, contract, current_share), - custom_refcount_rent: - if at_refcount > 0 { - Some(Self::fee_per_block(free_balance, contract, custom_share)) - } else { - None - }, + custom_refcount_rent: if at_refcount > 0 { + Some(Self::fee_per_block(free_balance, contract, custom_share)) + } else { + None + }, _reserved: None, } } /// Returns how much deposit is required to not pay rent. - fn required_deposit( - contract: &AliveContractInfo, - code_size_share: u32, - ) -> BalanceOf { + fn required_deposit(contract: &AliveContractInfo, code_size_share: u32) -> BalanceOf { T::DepositPerStorageByte::get() .saturating_mul(contract.storage_size.saturating_add(code_size_share).into()) .saturating_add( - T::DepositPerStorageItem::get() - .saturating_mul(contract.pair_count.into()) + T::DepositPerStorageItem::get().saturating_mul(contract.pair_count.into()), ) .saturating_add(T::DepositPerContract::get()) } @@ -363,8 +350,8 @@ where contract: &AliveContractInfo, code_size_share: u32, ) -> BalanceOf { - let missing_deposit = Self::required_deposit(contract, code_size_share) - .saturating_sub(*free_balance); + let missing_deposit = + Self::required_deposit(contract, code_size_share).saturating_sub(*free_balance); T::RentFraction::get().mul_ceil(missing_deposit) } @@ -383,16 +370,13 @@ where // Reserved balance contributes towards the subsistence threshold to stay consistent // with the existential deposit where the reserved balance is also counted. if *total_balance < subsistence_threshold { - return None; + return None } // However, reserved balance cannot be charged so we need to use the free balance // to calculate the actual budget (which can be 0). let rent_allowed_to_charge = free_balance.saturating_sub(subsistence_threshold); - Some(>::min( - contract.rent_allowance, - rent_allowed_to_charge, - )) + Some(>::min(contract.rent_allowance, rent_allowed_to_charge)) } /// Consider the case for rent payment of the given account and returns a `Verdict`. @@ -414,7 +398,7 @@ where }; if blocks_passed.is_zero() { // Rent has already been paid - return Verdict::Exempt; + return Verdict::Exempt } let total_balance = T::Currency::total_balance(account); @@ -425,7 +409,7 @@ where if fee_per_block.is_zero() { // The rent deposit offset reduced the fee to 0. This means that the contract // gets the rent for free. - return Verdict::Exempt; + return Verdict::Exempt } let rent_budget = match Self::rent_budget(&total_balance, &free_balance, contract) { @@ -443,7 +427,7 @@ where account, ); 0u32.into() - } + }, }; let dues = fee_per_block @@ -469,18 +453,15 @@ where if insufficient_rent || !can_withdraw_rent { // The contract cannot afford the rent payment and has a balance above the subsistence // threshold, so it leaves a tombstone. - let amount = if can_withdraw_rent { - Some(OutstandingAmount::new(dues_limited)) - } else { - None - }; - return Verdict::Evict { amount }; + let amount = + if can_withdraw_rent { Some(OutstandingAmount::new(dues_limited)) } else { None }; + return Verdict::Evict { amount } } return Verdict::Charge { // We choose to use `dues_limited` here instead of `dues` just to err on the safer side. amount: OutstandingAmount::new(dues_limited), - }; + } } /// Enacts the given verdict and returns the updated `ContractInfo`. @@ -511,9 +492,7 @@ where } // Note: this operation is heavy. - let child_storage_root = child::root( - &alive_contract_info.child_trie_info(), - ); + let child_storage_root = child::root(&alive_contract_info.child_trie_info()); let tombstone = >::new( &child_storage_root[..], @@ -524,11 +503,9 @@ where code.drop_from_storage(); >::deposit_event(Event::Evicted(account.clone())); Ok(None) - } - (Verdict::Evict { amount: _ }, None) => { - Ok(None) - } - (Verdict::Exempt, _) => { + }, + (Verdict::Evict { amount: _ }, None) => Ok(None), + (Verdict::Exempt, _) => { let contract = ContractInfo::Alive(AliveContractInfo:: { deduct_block: current_block_number, ..alive_contract_info @@ -546,11 +523,9 @@ where >::insert(account, &contract); amount.withdraw(account); Ok(Some(contract.get_alive().expect("We just constructed it as alive. qed"))) - } + }, } } - - } /// The amount to charge. @@ -596,9 +571,7 @@ enum Verdict { Exempt, /// The contract cannot afford payment within its rent budget so it gets evicted. However, /// because its balance is greater than the subsistence threshold it leaves a tombstone. - Evict { - amount: Option>, - }, + Evict { amount: Option> }, /// Everything is OK, we just only take some charge. Charge { amount: OutstandingAmount }, } diff --git a/frame/contracts/src/schedule.rs b/frame/contracts/src/schedule.rs index f17b05a2d3abb..1db5804c41871 100644 --- a/frame/contracts/src/schedule.rs +++ b/frame/contracts/src/schedule.rs @@ -18,17 +18,17 @@ //! This module contains the cost schedule and supporting code that constructs a //! sane default schedule from a `WeightInfo` implementation. -use crate::{Config, weights::WeightInfo}; +use crate::{weights::WeightInfo, Config}; -#[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use codec::{Decode, Encode}; +use frame_support::{weights::Weight, DefaultNoBound}; use pallet_contracts_proc_macro::{ScheduleDebug, WeightDebug}; -use frame_support::{DefaultNoBound, weights::Weight}; -use sp_std::{marker::PhantomData, vec::Vec}; -use codec::{Encode, Decode}; -use scale_info::TypeInfo; use pwasm_utils::{parity_wasm::elements, rules}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; use sp_runtime::RuntimeDebug; +use sp_std::{marker::PhantomData, vec::Vec}; /// How many API calls are executed in a single batch. The reason for increasing the amount /// of API calls in batches (per benchmark component increase) is so that the linear regression @@ -396,11 +396,13 @@ pub struct HostFnWeights { /// The type parameter is used in the default implementation. #[codec(skip)] - pub _phantom: PhantomData + pub _phantom: PhantomData, } macro_rules! replace_token { - ($_in:tt $replacement:tt) => { $replacement }; + ($_in:tt $replacement:tt) => { + $replacement + }; } macro_rules! call_zero { @@ -424,20 +426,22 @@ macro_rules! cost_batched_args { macro_rules! cost_instr_no_params_with_batch_size { ($name:ident, $batch_size:expr) => { (cost_args!($name, 1) / Weight::from($batch_size)) as u32 - } + }; } macro_rules! cost_instr_with_batch_size { ($name:ident, $num_params:expr, $batch_size:expr) => { - cost_instr_no_params_with_batch_size!($name, $batch_size) - .saturating_sub((cost_instr_no_params_with_batch_size!(instr_i64const, $batch_size) / 2).saturating_mul($num_params)) - } + cost_instr_no_params_with_batch_size!($name, $batch_size).saturating_sub( + (cost_instr_no_params_with_batch_size!(instr_i64const, $batch_size) / 2) + .saturating_mul($num_params), + ) + }; } macro_rules! cost_instr { ($name:ident, $num_params:expr) => { cost_instr_with_batch_size!($name, $num_params, INSTR_BENCHMARK_BATCH_SIZE) - } + }; } macro_rules! cost_byte_args { @@ -455,25 +459,25 @@ macro_rules! cost_byte_batched_args { macro_rules! cost { ($name:ident) => { cost_args!($name, 1) - } + }; } macro_rules! cost_batched { ($name:ident) => { cost_batched_args!($name, 1) - } + }; } macro_rules! cost_byte { ($name:ident) => { cost_byte_args!($name, 1) - } + }; } macro_rules! cost_byte_batched { ($name:ident) => { cost_byte_batched_args!($name, 1) - } + }; } impl Default for Limits { @@ -582,7 +586,11 @@ impl Default for HostFnWeights { random: cost_batched!(seal_random), deposit_event: cost_batched!(seal_deposit_event), deposit_event_per_topic: cost_batched_args!(seal_deposit_event_per_topic_and_kb, 1, 0), - deposit_event_per_byte: cost_byte_batched_args!(seal_deposit_event_per_topic_and_kb, 0, 1), + deposit_event_per_byte: cost_byte_batched_args!( + seal_deposit_event_per_topic_and_kb, + 0, + 1 + ), debug_message: cost_batched!(seal_debug_message), set_rent_allowance: cost_batched!(seal_set_rent_allowance), set_storage: cost_batched!(seal_set_storage), @@ -592,13 +600,43 @@ impl Default for HostFnWeights { get_storage_per_byte: cost_byte_batched!(seal_get_storage_per_kb), transfer: cost_batched!(seal_transfer), call: cost_batched!(seal_call), - call_transfer_surcharge: cost_batched_args!(seal_call_per_transfer_input_output_kb, 1, 0, 0), - call_per_input_byte: cost_byte_batched_args!(seal_call_per_transfer_input_output_kb, 0, 1, 0), - call_per_output_byte: cost_byte_batched_args!(seal_call_per_transfer_input_output_kb, 0, 0, 1), + call_transfer_surcharge: cost_batched_args!( + seal_call_per_transfer_input_output_kb, + 1, + 0, + 0 + ), + call_per_input_byte: cost_byte_batched_args!( + seal_call_per_transfer_input_output_kb, + 0, + 1, + 0 + ), + call_per_output_byte: cost_byte_batched_args!( + seal_call_per_transfer_input_output_kb, + 0, + 0, + 1 + ), instantiate: cost_batched!(seal_instantiate), - instantiate_per_input_byte: cost_byte_batched_args!(seal_instantiate_per_input_output_salt_kb, 1, 0, 0), - instantiate_per_output_byte: cost_byte_batched_args!(seal_instantiate_per_input_output_salt_kb, 0, 1, 0), - instantiate_per_salt_byte: cost_byte_batched_args!(seal_instantiate_per_input_output_salt_kb, 0, 0, 1), + instantiate_per_input_byte: cost_byte_batched_args!( + seal_instantiate_per_input_output_salt_kb, + 1, + 0, + 0 + ), + instantiate_per_output_byte: cost_byte_batched_args!( + seal_instantiate_per_input_output_salt_kb, + 0, + 1, + 0 + ), + instantiate_per_salt_byte: cost_byte_batched_args!( + seal_instantiate_per_input_output_salt_kb, + 0, + 0, + 1 + ), hash_sha2_256: cost_batched!(seal_hash_sha2_256), hash_sha2_256_per_byte: cost_byte_batched!(seal_hash_sha2_256_per_kb), hash_keccak_256: cost_batched!(seal_hash_keccak_256), @@ -629,7 +667,7 @@ impl Schedule { let elements::Type::Function(func) = func; func.params().len() as u32 }) - .collect() + .collect(), } } } @@ -643,12 +681,25 @@ impl<'a, T: Config> rules::Rules for ScheduleRules<'a, T> { let weight = match *instruction { End | Unreachable | Return | Else => 0, I32Const(_) | I64Const(_) | Block(_) | Loop(_) | Nop | Drop => w.i64const, - I32Load(_, _) | I32Load8S(_, _) | I32Load8U(_, _) | I32Load16S(_, _) | - I32Load16U(_, _) | I64Load(_, _) | I64Load8S(_, _) | I64Load8U(_, _) | - I64Load16S(_, _) | I64Load16U(_, _) | I64Load32S(_, _) | I64Load32U(_, _) - => w.i64load, - I32Store(_, _) | I32Store8(_, _) | I32Store16(_, _) | I64Store(_, _) | - I64Store8(_, _) | I64Store16(_, _) | I64Store32(_, _) => w.i64store, + I32Load(_, _) | + I32Load8S(_, _) | + I32Load8U(_, _) | + I32Load16S(_, _) | + I32Load16U(_, _) | + I64Load(_, _) | + I64Load8S(_, _) | + I64Load8U(_, _) | + I64Load16S(_, _) | + I64Load16U(_, _) | + I64Load32S(_, _) | + I64Load32U(_, _) => w.i64load, + I32Store(_, _) | + I32Store8(_, _) | + I32Store16(_, _) | + I64Store(_, _) | + I64Store8(_, _) | + I64Store16(_, _) | + I64Store32(_, _) => w.i64store, Select => w.select, If(_) => w.r#if, Br(_) => w.br, @@ -662,10 +713,9 @@ impl<'a, T: Config> rules::Rules for ScheduleRules<'a, T> { CurrentMemory(_) => w.memory_current, GrowMemory(_) => w.memory_grow, CallIndirect(idx, _) => *self.params.get(idx as usize).unwrap_or(&max_params), - BrTable(ref data) => - w.br_table.saturating_add( - w.br_table_per_entry.saturating_mul(data.table.len() as u32) - ), + BrTable(ref data) => w + .br_table + .saturating_add(w.br_table_per_entry.saturating_mul(data.table.len() as u32)), I32Clz | I64Clz => w.i64clz, I32Ctz | I64Ctz => w.i64ctz, I32Popcnt | I64Popcnt => w.i64popcnt, @@ -715,8 +765,8 @@ impl<'a, T: Config> rules::Rules for ScheduleRules<'a, T> { #[cfg(test)] mod test { - use crate::tests::Test; use super::*; + use crate::tests::Test; #[test] fn print_test_schedule() { diff --git a/frame/contracts/src/storage.rs b/frame/contracts/src/storage.rs index a7c46cbe37ff5..1d335b650cdce 100644 --- a/frame/contracts/src/storage.rs +++ b/frame/contracts/src/storage.rs @@ -19,30 +19,31 @@ use crate::{ exec::{AccountIdOf, StorageKey}, - BalanceOf, CodeHash, ContractInfoOf, Config, TrieId, DeletionQueue, Error, weights::WeightInfo, + BalanceOf, CodeHash, Config, ContractInfoOf, DeletionQueue, Error, TrieId, }; -use codec::{Codec, Encode, Decode}; -use scale_info::TypeInfo; -use sp_std::prelude::*; -use sp_std::{marker::PhantomData, fmt::Debug}; -use sp_io::hashing::blake2_256; -use sp_runtime::{ - RuntimeDebug, - traits::{Bounded, Saturating, Zero, Hash, Member, MaybeSerializeDeserialize}, -}; -use sp_core::crypto::UncheckedFrom; +use codec::{Codec, Decode, Encode}; use frame_support::{ dispatch::{DispatchError, DispatchResult}, - storage::child::{self, KillStorageResult, ChildInfo}, + storage::child::{self, ChildInfo, KillStorageResult}, traits::Get, weights::Weight, }; +use scale_info::TypeInfo; +use sp_core::crypto::UncheckedFrom; +use sp_io::hashing::blake2_256; +use sp_runtime::{ + traits::{Bounded, Hash, MaybeSerializeDeserialize, Member, Saturating, Zero}, + RuntimeDebug, +}; +use sp_std::{fmt::Debug, marker::PhantomData, prelude::*}; pub type AliveContractInfo = RawAliveContractInfo, BalanceOf, ::BlockNumber>; -pub type TombstoneContractInfo = - RawTombstoneContractInfo<::Hash, ::Hashing>; +pub type TombstoneContractInfo = RawTombstoneContractInfo< + ::Hash, + ::Hashing, +>; /// Information for managing an account and its sub trie abstraction. /// This is the required info to cache for an account @@ -128,10 +129,16 @@ pub struct RawTombstoneContractInfo(H, PhantomData); impl RawTombstoneContractInfo where - H: Member + MaybeSerializeDeserialize+ Debug - + AsRef<[u8]> + AsMut<[u8]> + Copy + Default - + sp_std::hash::Hash + Codec, - Hasher: Hash, + H: Member + + MaybeSerializeDeserialize + + Debug + + AsRef<[u8]> + + AsMut<[u8]> + + Copy + + Default + + sp_std::hash::Hash + + Codec, + Hasher: Hash, { pub fn new(storage_root: &[u8], code_hash: H) -> Self { let mut buf = Vec::new(); @@ -158,7 +165,7 @@ pub struct Storage(PhantomData); impl Storage where T: Config, - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { /// Reads a storage kv pair of a contract. /// @@ -189,11 +196,15 @@ where // Update the total number of KV pairs and the number of empty pairs. match (&opt_prev_len, &opt_new_value) { (Some(_), None) => { - new_info.pair_count = new_info.pair_count.checked_sub(1) + new_info.pair_count = new_info + .pair_count + .checked_sub(1) .ok_or_else(|| Error::::StorageExhausted)?; }, (None, Some(_)) => { - new_info.pair_count = new_info.pair_count.checked_add(1) + new_info.pair_count = new_info + .pair_count + .checked_add(1) .ok_or_else(|| Error::::StorageExhausted)?; }, (Some(_), Some(_)) => {}, @@ -202,10 +213,8 @@ where // Update the total storage size. let prev_value_len = opt_prev_len.unwrap_or(0); - let new_value_len = opt_new_value - .as_ref() - .map(|new_value| new_value.len() as u32) - .unwrap_or(0); + let new_value_len = + opt_new_value.as_ref().map(|new_value| new_value.len() as u32).unwrap_or(0); new_info.storage_size = new_info .storage_size .checked_sub(prev_value_len) @@ -232,7 +241,7 @@ where ch: CodeHash, ) -> Result, DispatchError> { if >::contains_key(account) { - return Err(Error::::DuplicateContract.into()); + return Err(Error::::DuplicateContract.into()) } let contract = AliveContractInfo:: { @@ -299,19 +308,17 @@ where pub fn process_deletion_queue_batch(weight_limit: Weight) -> Weight { let queue_len = >::decode_len().unwrap_or(0); if queue_len == 0 { - return weight_limit; + return weight_limit } - let (weight_per_key, mut remaining_key_budget) = Self::deletion_budget( - queue_len, - weight_limit, - ); + let (weight_per_key, mut remaining_key_budget) = + Self::deletion_budget(queue_len, weight_limit); // We want to check whether we have enough weight to decode the queue before // proceeding. Too little weight for decoding might happen during runtime upgrades // which consume the whole block before the other `on_initialize` blocks are called. if remaining_key_budget == 0 { - return weight_limit; + return weight_limit } let mut queue = >::get(); @@ -320,10 +327,8 @@ where // Cannot panic due to loop condition let trie = &mut queue[0]; let pair_count = trie.pair_count; - let outcome = child::kill_storage( - &child_trie_info(&trie.trie_id), - Some(remaining_key_budget), - ); + let outcome = + child::kill_storage(&child_trie_info(&trie.trie_id), Some(remaining_key_budget)); if pair_count > remaining_key_budget { // Cannot underflow because of the if condition trie.pair_count -= remaining_key_budget; @@ -343,8 +348,8 @@ where KillStorageResult::AllRemoved(_) => (), } } - remaining_key_budget = remaining_key_budget - .saturating_sub(remaining_key_budget.min(pair_count)); + remaining_key_budget = + remaining_key_budget.saturating_sub(remaining_key_budget.min(pair_count)); } >::put(queue); @@ -354,29 +359,22 @@ where /// This generator uses inner counter for account id and applies the hash over `AccountId + /// accountid_counter`. pub fn generate_trie_id(account_id: &AccountIdOf, seed: u64) -> TrieId { - let buf: Vec<_> = account_id.as_ref().iter() - .chain(&seed.to_le_bytes()) - .cloned() - .collect(); + let buf: Vec<_> = account_id.as_ref().iter().chain(&seed.to_le_bytes()).cloned().collect(); T::Hashing::hash(&buf).as_ref().into() } /// Returns the code hash of the contract specified by `account` ID. #[cfg(test)] - pub fn code_hash(account: &AccountIdOf) -> Option> - { - >::get(account) - .and_then(|i| i.as_alive().map(|i| i.code_hash)) + pub fn code_hash(account: &AccountIdOf) -> Option> { + >::get(account).and_then(|i| i.as_alive().map(|i| i.code_hash)) } /// Fill up the queue in order to exercise the limits during testing. #[cfg(test)] pub fn fill_queue_with_dummies() { - let queue: Vec<_> = (0..T::DeletionQueueDepth::get()).map(|_| DeletedContract { - pair_count: 0, - trie_id: vec![], - }) - .collect(); + let queue: Vec<_> = (0..T::DeletionQueueDepth::get()) + .map(|_| DeletedContract { pair_count: 0, trie_id: vec![] }) + .collect(); >::put(queue); } } diff --git a/frame/contracts/src/tests.rs b/frame/contracts/src/tests.rs index ea5fbccb0f2a1..68195211ebe75 100644 --- a/frame/contracts/src/tests.rs +++ b/frame/contracts/src/tests.rs @@ -16,37 +16,35 @@ // limitations under the License. use crate::{ - BalanceOf, ContractInfo, ContractInfoOf, Pallet, - Config, Schedule, - Error, storage::Storage, chain_extension::{ - Result as ExtensionResult, Environment, ChainExtension, Ext, SysConfig, RetVal, - UncheckedFrom, InitState, ReturnFlags, + ChainExtension, Environment, Ext, InitState, Result as ExtensionResult, RetVal, + ReturnFlags, SysConfig, UncheckedFrom, }, - exec::{AccountIdOf, Executable, Frame}, wasm::PrefabWasmModule, + exec::{AccountIdOf, Executable, Frame}, + storage::{RawAliveContractInfo, Storage}, + wasm::{PrefabWasmModule, ReturnCode as RuntimeReturnCode}, weights::WeightInfo, - wasm::ReturnCode as RuntimeReturnCode, - storage::RawAliveContractInfo, + BalanceOf, Config, ContractInfo, ContractInfoOf, Error, Pallet, Schedule, }; use assert_matches::assert_matches; use codec::Encode; -use sp_core::Bytes; -use sp_runtime::{ - traits::{BlakeTwo256, Hash, IdentityLookup, Convert}, - testing::{Header, H256}, - AccountId32, Perbill, -}; -use sp_io::hashing::blake2_256; use frame_support::{ - assert_ok, assert_err, assert_err_ignore_postinfo, - parameter_types, assert_storage_noop, - traits::{Currency, ReservableCurrency, OnInitialize, Filter}, - weights::{Weight, PostDispatchInfo, DispatchClass, constants::WEIGHT_PER_SECOND}, + assert_err, assert_err_ignore_postinfo, assert_ok, assert_storage_noop, dispatch::DispatchErrorWithPostInfo, + parameter_types, storage::child, + traits::{Currency, Filter, OnInitialize, ReservableCurrency}, + weights::{constants::WEIGHT_PER_SECOND, DispatchClass, PostDispatchInfo, Weight}, }; use frame_system::{self as system, EventRecord, Phase}; use pretty_assertions::assert_eq; +use sp_core::Bytes; +use sp_io::hashing::blake2_256; +use sp_runtime::{ + testing::{Header, H256}, + traits::{BlakeTwo256, Convert, Hash, IdentityLookup}, + AccountId32, Perbill, +}; use std::cell::RefCell; use crate as pallet_contracts; @@ -71,23 +69,21 @@ frame_support::construct_runtime!( #[macro_use] pub mod test_utils { - use super::{Test, Balances, System}; + use super::{Balances, System, Test}; use crate::{ - ContractInfoOf, CodeHash, - storage::{Storage, ContractInfo}, - exec::{StorageKey, AccountIdOf}, - Pallet as Contracts, - TrieId, AccountCounter, + exec::{AccountIdOf, StorageKey}, + storage::{ContractInfo, Storage}, + AccountCounter, CodeHash, ContractInfoOf, Pallet as Contracts, TrieId, }; use frame_support::traits::Currency; pub fn set_storage(addr: &AccountIdOf, key: &StorageKey, value: Option>) { - let mut contract_info = >::get(&addr).unwrap().get_alive().unwrap(); + let mut contract_info = >::get(&addr).unwrap().get_alive().unwrap(); let block_number = System::block_number(); Storage::::write(block_number, &mut contract_info, key, value).unwrap(); } pub fn get_storage(addr: &AccountIdOf, key: &StorageKey) -> Option> { - let contract_info = >::get(&addr).unwrap().get_alive().unwrap(); + let contract_info = >::get(&addr).unwrap().get_alive().unwrap(); Storage::::read(&contract_info.trie_id, key) } pub fn generate_trie_id(address: &AccountIdOf) -> TrieId { @@ -114,15 +110,13 @@ pub mod test_utils { ( $x:expr , $y:expr $(,)? ) => {{ use sp_std::convert::TryInto; assert_eq!(u32::from_le_bytes($x.data[..].try_into().unwrap()), $y as u32); - }} + }}; } macro_rules! assert_refcount { ( $code_hash:expr , $should:expr $(,)? ) => {{ - let is = crate::CodeStorage::::get($code_hash) - .map(|m| m.refcount()) - .unwrap_or(0); + let is = crate::CodeStorage::::get($code_hash).map(|m| m.refcount()).unwrap_or(0); assert_eq!(is, $should); - }} + }}; } } @@ -152,11 +146,7 @@ impl TestExtension { impl Default for TestExtension { fn default() -> Self { - Self { - enabled: true, - last_seen_buffer: vec![], - last_seen_inputs: (0, 0, 0, 0), - } + Self { enabled: true, last_seen_buffer: vec![], last_seen_inputs: (0, 0, 0, 0) } } } @@ -176,11 +166,10 @@ impl ChainExtension for TestExtension { }, 1 => { let env = env.only_in(); - TEST_EXTENSION.with(|e| - e.borrow_mut().last_seen_inputs = ( - env.val0(), env.val1(), env.val2(), env.val3() - ) - ); + TEST_EXTENSION.with(|e| { + e.borrow_mut().last_seen_inputs = + (env.val0(), env.val1(), env.val2(), env.val3()) + }); Ok(RetVal::Converging(func_id)) }, 2 => { @@ -189,15 +178,10 @@ impl ChainExtension for TestExtension { env.charge_weight(weight)?; Ok(RetVal::Converging(func_id)) }, - 3 => { - Ok(RetVal::Diverging{ - flags: ReturnFlags::REVERT, - data: vec![42, 99], - }) - }, + 3 => Ok(RetVal::Diverging { flags: ReturnFlags::REVERT, data: vec![42, 99] }), _ => { panic!("Passed unknown func_id to test chain extension: {}", func_id); - } + }, } } @@ -340,9 +324,7 @@ pub struct ExtBuilder { } impl Default for ExtBuilder { fn default() -> Self { - Self { - existential_deposit: 1, - } + Self { existential_deposit: 1 } } } impl ExtBuilder { @@ -356,9 +338,9 @@ impl ExtBuilder { pub fn build(self) -> sp_io::TestExternalities { self.set_associated_consts(); let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig:: { - balances: vec![], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![] } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext @@ -369,9 +351,7 @@ impl ExtBuilder { /// with it's hash. /// /// The fixture files are located under the `fixtures/` directory. -fn compile_module( - fixture_name: &str, -) -> wat::Result<(Vec, ::Output)> +fn compile_module(fixture_name: &str) -> wat::Result<(Vec, ::Output)> where T: frame_system::Config, { @@ -392,22 +372,20 @@ fn calling_plain_account_fails() { assert_eq!( Contracts::call(Origin::signed(ALICE), BOB, 0, GAS_LIMIT, Vec::new()), - Err( - DispatchErrorWithPostInfo { - error: Error::::ContractNotFound.into(), - post_info: PostDispatchInfo { - actual_weight: Some(base_cost), - pays_fee: Default::default(), - }, - } - ) + Err(DispatchErrorWithPostInfo { + error: Error::::ContractNotFound.into(), + post_info: PostDispatchInfo { + actual_weight: Some(base_cost), + pays_fee: Default::default(), + }, + }) ); }); } #[test] fn account_removal_does_not_remove_storage() { - use self::test_utils::{set_storage, get_storage}; + use self::test_utils::{get_storage, set_storage}; ExtBuilder::default().existential_deposit(100).build().execute_with(|| { let trie_id1 = test_utils::generate_trie_id(&ALICE); @@ -461,23 +439,11 @@ fn account_removal_does_not_remove_storage() { // Verify that no entries are removed. { - assert_eq!( - get_storage(&ALICE, key1), - Some(b"1".to_vec()) - ); - assert_eq!( - get_storage(&ALICE, key2), - Some(b"2".to_vec()) - ); + assert_eq!(get_storage(&ALICE, key1), Some(b"1".to_vec())); + assert_eq!(get_storage(&ALICE, key2), Some(b"2".to_vec())); - assert_eq!( - get_storage(&BOB, key1), - Some(b"3".to_vec()) - ); - assert_eq!( - get_storage(&BOB, key2), - Some(b"4".to_vec()) - ); + assert_eq!(get_storage(&BOB, key1), Some(b"3".to_vec())); + assert_eq!(get_storage(&BOB, key2), Some(b"4".to_vec())); } }); } @@ -486,25 +452,24 @@ fn account_removal_does_not_remove_storage() { fn instantiate_and_call_and_deposit_event() { let (wasm, code_hash) = compile_module::("return_from_start_fn").unwrap(); - ExtBuilder::default() - .existential_deposit(100) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - let subsistence = Pallet::::subsistence_threshold(); + ExtBuilder::default().existential_deposit(100).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + let subsistence = Pallet::::subsistence_threshold(); - // Check at the end to get hash on error easily - let creation = Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - wasm, - vec![], - vec![], - ); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + // Check at the end to get hash on error easily + let creation = Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + wasm, + vec![], + vec![], + ); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - assert_eq!(System::events(), vec![ + assert_eq!( + System::events(), + vec![ EventRecord { phase: Phase::Initialization, event: Event::System(frame_system::Event::NewAccount(ALICE.clone())), @@ -512,9 +477,7 @@ fn instantiate_and_call_and_deposit_event() { }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Endowed(ALICE, 1_000_000) - ), + event: Event::Balances(pallet_balances::Event::Endowed(ALICE, 1_000_000)), topics: vec![], }, EventRecord { @@ -524,16 +487,19 @@ fn instantiate_and_call_and_deposit_event() { }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Endowed(addr.clone(), subsistence * 100) - ), + event: Event::Balances(pallet_balances::Event::Endowed( + addr.clone(), + subsistence * 100 + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Transfer(ALICE, addr.clone(), subsistence * 100) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + ALICE, + addr.clone(), + subsistence * 100 + )), topics: vec![], }, EventRecord { @@ -543,9 +509,10 @@ fn instantiate_and_call_and_deposit_event() { }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::ContractEmitted(addr.clone(), vec![1, 2, 3, 4]) - ), + event: Event::Contracts(crate::Event::ContractEmitted( + addr.clone(), + vec![1, 2, 3, 4] + )), topics: vec![], }, EventRecord { @@ -553,54 +520,52 @@ fn instantiate_and_call_and_deposit_event() { event: Event::Contracts(crate::Event::Instantiated(ALICE, addr.clone())), topics: vec![], }, - ]); + ] + ); - assert_ok!(creation); - assert!(ContractInfoOf::::contains_key(&addr)); - }); + assert_ok!(creation); + assert!(ContractInfoOf::::contains_key(&addr)); + }); } #[test] fn deposit_event_max_value_limit() { let (wasm, code_hash) = compile_module::("event_size").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Call contract with allowed storage value. - assert_ok!(Contracts::call( + // Call contract with allowed storage value. + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT * 2, // we are copying a huge buffer, + ::Schedule::get().limits.payload_len.encode(), + )); + + // Call contract with too large a storage value. + assert_err_ignore_postinfo!( + Contracts::call( Origin::signed(ALICE), - addr.clone(), + addr, 0, - GAS_LIMIT * 2, // we are copying a huge buffer, - ::Schedule::get().limits.payload_len.encode(), - )); - - // Call contract with too large a storage value. - assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr, - 0, - GAS_LIMIT, - (::Schedule::get().limits.payload_len + 1).encode(), - ), - Error::::ValueTooLarge, - ); - }); + GAS_LIMIT, + (::Schedule::get().limits.payload_len + 1).encode(), + ), + Error::::ValueTooLarge, + ); + }); } #[test] @@ -608,47 +573,50 @@ fn run_out_of_gas() { let (wasm, code_hash) = compile_module::("run_out_of_gas").unwrap(); let subsistence = Pallet::::subsistence_threshold(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100 * subsistence, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + + // Call the contract with a fixed gas limit. It must run out of gas because it just + // loops forever. + assert_err_ignore_postinfo!( + Contracts::call( Origin::signed(ALICE), - 100 * subsistence, - GAS_LIMIT, - wasm, - vec![], + addr, // newly created account + 0, + 67_500_000, vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - - // Call the contract with a fixed gas limit. It must run out of gas because it just - // loops forever. - assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr, // newly created account - 0, - 67_500_000, - vec![], - ), - Error::::OutOfGas, - ); - }); + ), + Error::::OutOfGas, + ); + }); } /// Input data for each call in set_rent code mod call { use super::{AccountIdOf, Test}; - pub fn set_storage_4_byte() -> Vec { 0u32.to_le_bytes().to_vec() } - pub fn remove_storage_4_byte() -> Vec { 1u32.to_le_bytes().to_vec() } + pub fn set_storage_4_byte() -> Vec { + 0u32.to_le_bytes().to_vec() + } + pub fn remove_storage_4_byte() -> Vec { + 1u32.to_le_bytes().to_vec() + } #[allow(dead_code)] pub fn transfer(to: &AccountIdOf) -> Vec { 2u32.to_le_bytes().iter().chain(AsRef::<[u8]>::as_ref(to)).cloned().collect() } - pub fn null() -> Vec { 3u32.to_le_bytes().to_vec() } + pub fn null() -> Vec { + 3u32.to_le_bytes().to_vec() + } } #[test] @@ -656,117 +624,71 @@ fn storage_size() { let (wasm, code_hash) = compile_module::("set_rent").unwrap(); // Storage size - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - // rent_allowance - ::Balance::from(10_000u32).encode(), - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let bob_contract = ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap(); - assert_eq!( - bob_contract.storage_size, - 4 - ); - assert_eq!( - bob_contract.pair_count, - 1, - ); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + // rent_allowance + ::Balance::from(10_000u32).encode(), + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let bob_contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(bob_contract.storage_size, 4); + assert_eq!(bob_contract.pair_count, 1,); - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - call::set_storage_4_byte() - )); - let bob_contract = ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap(); - assert_eq!( - bob_contract.storage_size, - 4 + 4 - ); - assert_eq!( - bob_contract.pair_count, - 2, - ); + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::set_storage_4_byte() + )); + let bob_contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(bob_contract.storage_size, 4 + 4); + assert_eq!(bob_contract.pair_count, 2,); - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - call::remove_storage_4_byte() - )); - let bob_contract = ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap(); - assert_eq!( - bob_contract.storage_size, - 4 - ); - assert_eq!( - bob_contract.pair_count, - 1, - ); - }); + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::remove_storage_4_byte() + )); + let bob_contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(bob_contract.storage_size, 4); + assert_eq!(bob_contract.pair_count, 1,); + }); } #[test] fn empty_kv_pairs() { let (wasm, code_hash) = compile_module::("set_empty_storage").unwrap(); - ExtBuilder::default() - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let bob_contract = ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap(); + ExtBuilder::default().build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let bob_contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - assert_eq!( - bob_contract.storage_size, - 0, - ); - assert_eq!( - bob_contract.pair_count, - 1, - ); - }); + assert_eq!(bob_contract.storage_size, 0,); + assert_eq!(bob_contract.pair_count, 1,); + }); } fn initialize_block(number: u64) { - System::initialize( - &number, - &[0u8; 32].into(), - &Default::default(), - Default::default(), - ); + System::initialize(&number, &[0u8; 32].into(), &Default::default(), Default::default()); } #[test] @@ -775,83 +697,92 @@ fn deduct_blocks() { let endowment: BalanceOf = 100_000; let allowance: BalanceOf = 70_000; - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - endowment, - GAS_LIMIT, - wasm, - allowance.encode(), - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - let code_len: BalanceOf = - PrefabWasmModule::::from_storage_noinstr(contract.code_hash) - .unwrap() - .occupied_storage() - .into(); - - // The instantiation deducted the rent for one block immediately - let rent0 = ::RentFraction::get() + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + endowment, + GAS_LIMIT, + wasm, + allowance.encode(), + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + let code_len: BalanceOf = + PrefabWasmModule::::from_storage_noinstr(contract.code_hash) + .unwrap() + .occupied_storage() + .into(); + + // The instantiation deducted the rent for one block immediately + let rent0 = ::RentFraction::get() // (base_deposit(8) + bytes in storage(4) + size of code) * byte_price // + 1 storage item (10_000) - free_balance .mul_ceil((8 + 4 + code_len) * 10_000 + 10_000 - endowment) // blocks to rent * 1; - assert!(rent0 > 0); - assert_eq!(contract.rent_allowance, allowance - rent0); - assert_eq!(contract.deduct_block, 1); - assert_eq!(Balances::free_balance(&addr), endowment - rent0); + assert!(rent0 > 0); + assert_eq!(contract.rent_allowance, allowance - rent0); + assert_eq!(contract.deduct_block, 1); + assert_eq!(Balances::free_balance(&addr), endowment - rent0); - // Advance 4 blocks - initialize_block(5); + // Advance 4 blocks + initialize_block(5); - // Trigger rent through call - assert_ok!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()) - ); + // Trigger rent through call + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::null() + )); - // Check result - let rent = ::RentFraction::get() - .mul_ceil((8 + 4 + code_len) * 10_000 + 10_000 - (endowment - rent0)) - * 4; - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - assert_eq!(contract.rent_allowance, allowance - rent0 - rent); - assert_eq!(contract.deduct_block, 5); - assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent); - - // Advance 2 blocks more - initialize_block(7); - - // Trigger rent through call - assert_ok!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()) - ); + // Check result + let rent = ::RentFraction::get() + .mul_ceil((8 + 4 + code_len) * 10_000 + 10_000 - (endowment - rent0)) * + 4; + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(contract.rent_allowance, allowance - rent0 - rent); + assert_eq!(contract.deduct_block, 5); + assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent); - // Check result - let rent_2 = ::RentFraction::get() - .mul_ceil((8 + 4 + code_len) * 10_000 + 10_000 - (endowment - rent0 - rent)) - * 2; - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - assert_eq!(contract.rent_allowance, allowance - rent0 - rent - rent_2); - assert_eq!(contract.deduct_block, 7); - assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent - rent_2); - - // Second call on same block should have no effect on rent - assert_ok!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()) - ); - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - assert_eq!(contract.rent_allowance, allowance - rent0 - rent - rent_2); - assert_eq!(contract.deduct_block, 7); - assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent - rent_2) - }); + // Advance 2 blocks more + initialize_block(7); + + // Trigger rent through call + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::null() + )); + + // Check result + let rent_2 = ::RentFraction::get() + .mul_ceil((8 + 4 + code_len) * 10_000 + 10_000 - (endowment - rent0 - rent)) * + 2; + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(contract.rent_allowance, allowance - rent0 - rent - rent_2); + assert_eq!(contract.deduct_block, 7); + assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent - rent_2); + + // Second call on same block should have no effect on rent + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::null() + )); + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + assert_eq!(contract.rent_allowance, allowance - rent0 - rent - rent_2); + assert_eq!(contract.deduct_block, 7); + assert_eq!(Balances::free_balance(&addr), endowment - rent0 - rent - rent_2) + }); } #[test] @@ -867,16 +798,48 @@ fn signed_claim_surcharge_contract_removals() { #[test] fn claim_surcharge_malus() { // Test surcharge malus for inherent - claim_surcharge(8, |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), true); - claim_surcharge(7, |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), true); - claim_surcharge(6, |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), true); - claim_surcharge(5, |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), false); + claim_surcharge( + 8, + |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), + true, + ); + claim_surcharge( + 7, + |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), + true, + ); + claim_surcharge( + 6, + |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), + true, + ); + claim_surcharge( + 5, + |addr| Contracts::claim_surcharge(Origin::none(), addr, Some(ALICE)).is_ok(), + false, + ); // Test surcharge malus for signed - claim_surcharge(8, |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), true); - claim_surcharge(7, |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), false); - claim_surcharge(6, |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), false); - claim_surcharge(5, |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), false); + claim_surcharge( + 8, + |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), + true, + ); + claim_surcharge( + 7, + |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), + false, + ); + claim_surcharge( + 6, + |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), + false, + ); + claim_surcharge( + 5, + |addr| Contracts::claim_surcharge(Origin::signed(ALICE), addr, None).is_ok(), + false, + ); } /// Claim surcharge with the given trigger_call at the given blocks. @@ -884,34 +847,31 @@ fn claim_surcharge_malus() { fn claim_surcharge(blocks: u64, trigger_call: impl Fn(AccountIdOf) -> bool, removes: bool) { let (wasm, code_hash) = compile_module::("set_rent").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm, - ::Balance::from(30_000u32).encode(), // rent allowance - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm, + ::Balance::from(30_000u32).encode(), // rent allowance + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Advance blocks - initialize_block(blocks); + // Advance blocks + initialize_block(blocks); - // Trigger rent through call - assert_eq!(trigger_call(addr.clone()), removes); + // Trigger rent through call + assert_eq!(trigger_call(addr.clone()), removes); - if removes { - assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); - } else { - assert!(ContractInfoOf::::get(&addr).unwrap().get_alive().is_some()); - } - }); + if removes { + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + } else { + assert!(ContractInfoOf::::get(&addr).unwrap().get_alive().is_some()); + } + }); } /// Test for all kind of removals for the given trigger: @@ -923,164 +883,138 @@ fn removals(trigger_call: impl Fn(AccountIdOf) -> bool) { let (wasm, code_hash) = compile_module::("set_rent").unwrap(); // Balance reached and superior to subsistence threshold - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 70_000, - GAS_LIMIT, - wasm.clone(), - ::Balance::from(100_000u32).encode(), // rent allowance - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let allowance = ContractInfoOf::::get(&addr) - .unwrap().get_alive().unwrap().rent_allowance; - let balance = Balances::free_balance(&addr); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 70_000, + GAS_LIMIT, + wasm.clone(), + ::Balance::from(100_000u32).encode(), // rent allowance + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let allowance = + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance; + let balance = Balances::free_balance(&addr); - let subsistence_threshold = Pallet::::subsistence_threshold(); + let subsistence_threshold = Pallet::::subsistence_threshold(); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert_eq!( - ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance, - allowance, - ); - assert_eq!(Balances::free_balance(&addr), balance); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert_eq!( + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance, + allowance, + ); + assert_eq!(Balances::free_balance(&addr), balance); - // Advance blocks - initialize_block(27); + // Advance blocks + initialize_block(27); - // Trigger rent through call (should remove the contract) - assert!(trigger_call(addr.clone())); - assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); - assert_eq!(Balances::free_balance(&addr), subsistence_threshold); + // Trigger rent through call (should remove the contract) + assert!(trigger_call(addr.clone())); + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + assert_eq!(Balances::free_balance(&addr), subsistence_threshold); - // Advance blocks - initialize_block(30); + // Advance blocks + initialize_block(30); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); - assert_eq!(Balances::free_balance(&addr), subsistence_threshold); - }); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + assert_eq!(Balances::free_balance(&addr), subsistence_threshold); + }); // Allowance exceeded - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm.clone(), - ::Balance::from(70_000u32).encode(), // rent allowance - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let allowance = ContractInfoOf::::get(&addr) - .unwrap().get_alive().unwrap().rent_allowance; - let balance = Balances::free_balance(&addr); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm.clone(), + ::Balance::from(70_000u32).encode(), // rent allowance + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let allowance = + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance; + let balance = Balances::free_balance(&addr); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert_eq!( - ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap() - .rent_allowance, - allowance, - ); - assert_eq!(Balances::free_balance(&addr), balance); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert_eq!( + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance, + allowance, + ); + assert_eq!(Balances::free_balance(&addr), balance); - // Advance blocks - initialize_block(27); + // Advance blocks + initialize_block(27); - // Trigger rent through call - assert!(trigger_call(addr.clone())); - assert!(ContractInfoOf::::get(&addr) - .unwrap() - .get_tombstone() - .is_some()); - // Balance should be initial balance - initial rent_allowance - assert_eq!(Balances::free_balance(&addr), 30_000); + // Trigger rent through call + assert!(trigger_call(addr.clone())); + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + // Balance should be initial balance - initial rent_allowance + assert_eq!(Balances::free_balance(&addr), 30_000); - // Advance blocks - initialize_block(20); + // Advance blocks + initialize_block(20); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert!(ContractInfoOf::::get(&addr) - .unwrap() - .get_tombstone() - .is_some()); - assert_eq!(Balances::free_balance(&addr), 30_000); - }); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + assert_eq!(Balances::free_balance(&addr), 30_000); + }); // Balance reached and inferior to subsistence threshold - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let subsistence_threshold = Pallet::::subsistence_threshold(); - let _ = Balances::deposit_creating(&ALICE, subsistence_threshold * 1000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence_threshold * 100, - GAS_LIMIT, - wasm, - (subsistence_threshold * 100).encode(), // rent allowance - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let allowance = ContractInfoOf::::get(&addr) - .unwrap().get_alive().unwrap().rent_allowance; - let balance = Balances::free_balance(&addr); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let subsistence_threshold = Pallet::::subsistence_threshold(); + let _ = Balances::deposit_creating(&ALICE, subsistence_threshold * 1000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence_threshold * 100, + GAS_LIMIT, + wasm, + (subsistence_threshold * 100).encode(), // rent allowance + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let allowance = + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance; + let balance = Balances::free_balance(&addr); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert_eq!( - ContractInfoOf::::get(&addr) - .unwrap() - .get_alive() - .unwrap() - .rent_allowance, - allowance, - ); - assert_eq!( - Balances::free_balance(&addr), - balance, - ); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert_eq!( + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap().rent_allowance, + allowance, + ); + assert_eq!(Balances::free_balance(&addr), balance,); - // Make contract have exactly the subsistence threshold - Balances::make_free_balance_be(&addr, subsistence_threshold); - assert_eq!(Balances::free_balance(&addr), subsistence_threshold); + // Make contract have exactly the subsistence threshold + Balances::make_free_balance_be(&addr, subsistence_threshold); + assert_eq!(Balances::free_balance(&addr), subsistence_threshold); - // Advance blocks (should remove as balance is exactly subsistence) - initialize_block(10); + // Advance blocks (should remove as balance is exactly subsistence) + initialize_block(10); - // Trigger rent through call - assert!(trigger_call(addr.clone())); - assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Tombstone(_))); - assert_eq!(Balances::free_balance(&addr), subsistence_threshold); + // Trigger rent through call + assert!(trigger_call(addr.clone())); + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Tombstone(_))); + assert_eq!(Balances::free_balance(&addr), subsistence_threshold); - // Advance blocks - initialize_block(20); + // Advance blocks + initialize_block(20); - // Trigger rent must have no effect - assert!(!trigger_call(addr.clone())); - assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Tombstone(_))); - assert_eq!(Balances::free_balance(&addr), subsistence_threshold); - }); + // Trigger rent must have no effect + assert!(!trigger_call(addr.clone())); + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Tombstone(_))); + assert_eq!(Balances::free_balance(&addr), subsistence_threshold); + }); } #[test] @@ -1088,97 +1022,99 @@ fn call_removed_contract() { let (wasm, code_hash) = compile_module::("set_rent").unwrap(); // Balance reached and superior to subsistence threshold - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - // rent allowance - ::Balance::from(10_000u32).encode(), - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + // rent allowance + ::Balance::from(10_000u32).encode(), + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Calling contract should succeed. - assert_ok!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()) - ); + // Calling contract should succeed. + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::null() + )); - // Advance blocks - initialize_block(27); + // Advance blocks + initialize_block(27); - // Calling contract should deny access because rent cannot be paid. - assert_err_ignore_postinfo!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()), - Error::::RentNotPaid, - ); - // No event is generated because the contract is not actually removed. - assert_eq!(System::events(), vec![]); + // Calling contract should deny access because rent cannot be paid. + assert_err_ignore_postinfo!( + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()), + Error::::RentNotPaid, + ); + // No event is generated because the contract is not actually removed. + assert_eq!(System::events(), vec![]); - // Subsequent contract calls should also fail. - assert_err_ignore_postinfo!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()), - Error::::RentNotPaid, - ); + // Subsequent contract calls should also fail. + assert_err_ignore_postinfo!( + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()), + Error::::RentNotPaid, + ); - // A snitch can now remove the contract - assert_ok!(Contracts::claim_surcharge(Origin::none(), addr.clone(), Some(ALICE))); - assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); - }) + // A snitch can now remove the contract + assert_ok!(Contracts::claim_surcharge(Origin::none(), addr.clone(), Some(ALICE))); + assert!(ContractInfoOf::::get(&addr).unwrap().get_tombstone().is_some()); + }) } #[test] fn default_rent_allowance_on_instantiate() { let (wasm, code_hash) = compile_module::("check_default_rent_allowance").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - let code_len: BalanceOf = + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + let code_len: BalanceOf = PrefabWasmModule::::from_storage_noinstr(contract.code_hash) .unwrap() .occupied_storage() .into(); - // The instantiation deducted the rent for one block immediately - let first_rent = ::RentFraction::get() + // The instantiation deducted the rent for one block immediately + let first_rent = ::RentFraction::get() // (base_deposit(8) + code_len) * byte_price - free_balance .mul_ceil((8 + code_len) * 10_000 - 30_000) // blocks to rent * 1; - assert_eq!(contract.rent_allowance, >::max_value() - first_rent); + assert_eq!(contract.rent_allowance, >::max_value() - first_rent); - // Advance blocks - initialize_block(5); + // Advance blocks + initialize_block(5); - // Trigger rent through call - assert_ok!( - Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, call::null()) - ); + // Trigger rent through call + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT, + call::null() + )); - // Check contract is still alive - let contract = ContractInfoOf::::get(&addr).unwrap().get_alive(); - assert!(contract.is_some()) - }); + // Check contract is still alive + let contract = ContractInfoOf::::get(&addr).unwrap().get_alive(); + assert!(contract.is_some()) + }); } #[test] @@ -1209,92 +1145,84 @@ fn restoration_success() { fn restoration( test_different_storage: bool, test_restore_to_with_dirty_storage: bool, - test_code_evicted: bool + test_code_evicted: bool, ) { let (set_rent_wasm, set_rent_code_hash) = compile_module::("set_rent").unwrap(); let (restoration_wasm, restoration_code_hash) = compile_module::("restoration").unwrap(); let allowance: ::Balance = 10_000; - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + + // Create an account with address `BOB` with code `CODE_SET_RENT`. + // The input parameter sets the rent allowance to 0. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + set_rent_wasm.clone(), + allowance.encode(), + vec![], + )); + let addr_bob = Contracts::contract_address(&ALICE, &set_rent_code_hash, &[]); - // Create an account with address `BOB` with code `CODE_SET_RENT`. - // The input parameter sets the rent allowance to 0. + let mut events = vec![ + EventRecord { + phase: Phase::Initialization, + event: Event::System(frame_system::Event::NewAccount(ALICE)), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Balances(pallet_balances::Event::Endowed(ALICE, 1_000_000)), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::System(frame_system::Event::NewAccount(addr_bob.clone())), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Balances(pallet_balances::Event::Endowed(addr_bob.clone(), 30_000)), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Balances(pallet_balances::Event::Transfer( + ALICE, + addr_bob.clone(), + 30_000, + )), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Contracts(crate::Event::CodeStored(set_rent_code_hash.into())), + topics: vec![], + }, + EventRecord { + phase: Phase::Initialization, + event: Event::Contracts(crate::Event::Instantiated(ALICE, addr_bob.clone())), + topics: vec![], + }, + ]; + + // Create another contract from the same code in order to increment the codes + // refcounter so that it stays on chain. + if !test_code_evicted { assert_ok!(Contracts::instantiate_with_code( Origin::signed(ALICE), - 30_000, + 20_000, GAS_LIMIT, - set_rent_wasm.clone(), + set_rent_wasm, allowance.encode(), - vec![], + vec![1], )); - let addr_bob = Contracts::contract_address(&ALICE, &set_rent_code_hash, &[]); - - let mut events = vec![ - EventRecord { - phase: Phase::Initialization, - event: Event::System(frame_system::Event::NewAccount(ALICE)), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Endowed(ALICE, 1_000_000) - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::System(frame_system::Event::NewAccount(addr_bob.clone())), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Endowed(addr_bob.clone(), 30_000) - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Transfer(ALICE, addr_bob.clone(), 30_000) - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::CodeStored(set_rent_code_hash.into()) - ), - topics: vec![], - }, - EventRecord { - phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::Instantiated(ALICE, addr_bob.clone()) - ), - topics: vec![], - }, - ]; - - // Create another contract from the same code in order to increment the codes - // refcounter so that it stays on chain. - if !test_code_evicted { - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 20_000, - GAS_LIMIT, - set_rent_wasm, - allowance.encode(), - vec![1], - )); - assert_refcount!(set_rent_code_hash, 2); - let addr_dummy = Contracts::contract_address(&ALICE, &set_rent_code_hash, &[1]); - events.extend([ + assert_refcount!(set_rent_code_hash, 2); + let addr_dummy = Contracts::contract_address(&ALICE, &set_rent_code_hash, &[1]); + events.extend( + [ EventRecord { phase: Phase::Initialization, event: Event::System(frame_system::Event::NewAccount(addr_dummy.clone())), @@ -1302,146 +1230,144 @@ fn restoration( }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Endowed(addr_dummy.clone(), 20_000) - ), + event: Event::Balances(pallet_balances::Event::Endowed( + addr_dummy.clone(), + 20_000, + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Transfer(ALICE, addr_dummy.clone(), 20_000) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + ALICE, + addr_dummy.clone(), + 20_000, + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::Instantiated(ALICE, addr_dummy.clone()) - ), + event: Event::Contracts(crate::Event::Instantiated( + ALICE, + addr_dummy.clone(), + )), topics: vec![], }, - ].iter().cloned()); - } - - assert_eq!(System::events(), events); + ] + .iter() + .cloned(), + ); + } - // Check if `BOB` was created successfully and that the rent allowance is below what - // we specified as the first rent was already collected. - let bob_contract = ContractInfoOf::::get(&addr_bob).unwrap().get_alive().unwrap(); - assert!(bob_contract.rent_allowance < allowance); - - if test_different_storage { - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr_bob.clone(), 0, GAS_LIMIT, - call::set_storage_4_byte()) - ); - } + assert_eq!(System::events(), events); - // Advance blocks in order to make the contract run out of money for rent. - initialize_block(27); - - // Call `BOB`, which makes it pay rent. Since the rent allowance is set to 20_000 - // we expect that it is no longer callable but keeps existing until someone - // calls `claim_surcharge`. - assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), addr_bob.clone(), 0, GAS_LIMIT, call::null() - ), - Error::::RentNotPaid, - ); - assert!(System::events().is_empty()); - assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_alive().is_some()); - assert_ok!(Contracts::claim_surcharge(Origin::none(), addr_bob.clone(), Some(ALICE))); - assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_tombstone().is_some()); - if test_code_evicted { - assert_refcount!(set_rent_code_hash, 0); - } else { - assert_refcount!(set_rent_code_hash, 1); - } + // Check if `BOB` was created successfully and that the rent allowance is below what + // we specified as the first rent was already collected. + let bob_contract = ContractInfoOf::::get(&addr_bob).unwrap().get_alive().unwrap(); + assert!(bob_contract.rent_allowance < allowance); - // Create another account with the address `DJANGO` with `CODE_RESTORATION`. - // - // Note that we can't use `ALICE` for creating `DJANGO` so we create yet another - // account `CHARLIE` and create `DJANGO` with it. - let _ = Balances::deposit_creating(&CHARLIE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(CHARLIE), - 30_000, + if test_different_storage { + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr_bob.clone(), + 0, GAS_LIMIT, - restoration_wasm, - vec![], - vec![], + call::set_storage_4_byte() )); - let addr_django = Contracts::contract_address(&CHARLIE, &restoration_code_hash, &[]); + } - // Before performing a call to `DJANGO` save its original trie id. - let django_trie_id = ContractInfoOf::::get(&addr_django).unwrap() - .get_alive().unwrap().trie_id; + // Advance blocks in order to make the contract run out of money for rent. + initialize_block(27); - // The trie is regarded as 'dirty' when it was written to in the current block. - if !test_restore_to_with_dirty_storage { - // Advance 1 block. - initialize_block(28); - } + // Call `BOB`, which makes it pay rent. Since the rent allowance is set to 20_000 + // we expect that it is no longer callable but keeps existing until someone + // calls `claim_surcharge`. + assert_err_ignore_postinfo!( + Contracts::call(Origin::signed(ALICE), addr_bob.clone(), 0, GAS_LIMIT, call::null()), + Error::::RentNotPaid, + ); + assert!(System::events().is_empty()); + assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_alive().is_some()); + assert_ok!(Contracts::claim_surcharge(Origin::none(), addr_bob.clone(), Some(ALICE))); + assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_tombstone().is_some()); + if test_code_evicted { + assert_refcount!(set_rent_code_hash, 0); + } else { + assert_refcount!(set_rent_code_hash, 1); + } - // Perform a call to `DJANGO`. This should either perform restoration successfully or - // fail depending on the test parameters. - let perform_the_restoration = || { - Contracts::call( - Origin::signed(ALICE), - addr_django.clone(), - 0, - GAS_LIMIT, - set_rent_code_hash - .as_ref() - .iter() - .chain(AsRef::<[u8]>::as_ref(&addr_bob)) - .cloned() - .collect(), - ) - }; + // Create another account with the address `DJANGO` with `CODE_RESTORATION`. + // + // Note that we can't use `ALICE` for creating `DJANGO` so we create yet another + // account `CHARLIE` and create `DJANGO` with it. + let _ = Balances::deposit_creating(&CHARLIE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(CHARLIE), + 30_000, + GAS_LIMIT, + restoration_wasm, + vec![], + vec![], + )); + let addr_django = Contracts::contract_address(&CHARLIE, &restoration_code_hash, &[]); - // The key that is used in the restorer contract but is not in the target contract. - // Is supplied as delta to the restoration. We need it to check whether the key - // is properly removed on success but still there on failure. - let delta_key = { - let mut key = [0u8; 32]; - key[0] = 1; - key - }; + // Before performing a call to `DJANGO` save its original trie id. + let django_trie_id = + ContractInfoOf::::get(&addr_django).unwrap().get_alive().unwrap().trie_id; - if test_different_storage || test_restore_to_with_dirty_storage || test_code_evicted { - // Parametrization of the test imply restoration failure. Check that `DJANGO` aka - // restoration contract is still in place and also that `BOB` doesn't exist. - let result = perform_the_restoration(); - assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_tombstone().is_some()); - let django_contract = ContractInfoOf::::get(&addr_django).unwrap() - .get_alive().unwrap(); - assert_eq!(django_contract.storage_size, 8); - assert_eq!(django_contract.trie_id, django_trie_id); - assert_eq!(django_contract.deduct_block, System::block_number()); - assert_eq!( - Storage::::read(&django_trie_id, &delta_key), - Some(vec![40, 0, 0, 0]), - ); - match ( - test_different_storage, - test_restore_to_with_dirty_storage, - test_code_evicted - ) { - (true, false, false) => { - assert_err_ignore_postinfo!( - result, Error::::InvalidTombstone, - ); - assert_eq!(System::events(), vec![]); - } - (_, true, false) => { - assert_err_ignore_postinfo!( - result, Error::::InvalidContractOrigin, - ); - assert_eq!(System::events(), vec![ + // The trie is regarded as 'dirty' when it was written to in the current block. + if !test_restore_to_with_dirty_storage { + // Advance 1 block. + initialize_block(28); + } + + // Perform a call to `DJANGO`. This should either perform restoration successfully or + // fail depending on the test parameters. + let perform_the_restoration = || { + Contracts::call( + Origin::signed(ALICE), + addr_django.clone(), + 0, + GAS_LIMIT, + set_rent_code_hash + .as_ref() + .iter() + .chain(AsRef::<[u8]>::as_ref(&addr_bob)) + .cloned() + .collect(), + ) + }; + + // The key that is used in the restorer contract but is not in the target contract. + // Is supplied as delta to the restoration. We need it to check whether the key + // is properly removed on success but still there on failure. + let delta_key = { + let mut key = [0u8; 32]; + key[0] = 1; + key + }; + + if test_different_storage || test_restore_to_with_dirty_storage || test_code_evicted { + // Parametrization of the test imply restoration failure. Check that `DJANGO` aka + // restoration contract is still in place and also that `BOB` doesn't exist. + let result = perform_the_restoration(); + assert!(ContractInfoOf::::get(&addr_bob).unwrap().get_tombstone().is_some()); + let django_contract = + ContractInfoOf::::get(&addr_django).unwrap().get_alive().unwrap(); + assert_eq!(django_contract.storage_size, 8); + assert_eq!(django_contract.trie_id, django_trie_id); + assert_eq!(django_contract.deduct_block, System::block_number()); + assert_eq!(Storage::::read(&django_trie_id, &delta_key), Some(vec![40, 0, 0, 0]),); + match (test_different_storage, test_restore_to_with_dirty_storage, test_code_evicted) { + (true, false, false) => { + assert_err_ignore_postinfo!(result, Error::::InvalidTombstone,); + assert_eq!(System::events(), vec![]); + }, + (_, true, false) => { + assert_err_ignore_postinfo!(result, Error::::InvalidContractOrigin,); + assert_eq!( + System::events(), + vec![ EventRecord { phase: Phase::Initialization, event: Event::Contracts(crate::Event::Evicted(addr_bob)), @@ -1454,67 +1380,76 @@ fn restoration( }, EventRecord { phase: Phase::Initialization, - event: Event::Balances(pallet_balances::Event::Endowed(CHARLIE, 1_000_000)), + event: Event::Balances(pallet_balances::Event::Endowed( + CHARLIE, 1_000_000 + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::System(frame_system::Event::NewAccount(addr_django.clone())), + event: Event::System(frame_system::Event::NewAccount( + addr_django.clone() + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Balances(pallet_balances::Event::Endowed(addr_django.clone(), 30_000)), + event: Event::Balances(pallet_balances::Event::Endowed( + addr_django.clone(), + 30_000 + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Transfer(CHARLIE, addr_django.clone(), 30_000) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + CHARLIE, + addr_django.clone(), + 30_000 + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::CodeStored(restoration_code_hash) - ), + event: Event::Contracts(crate::Event::CodeStored( + restoration_code_hash + )), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::Instantiated(CHARLIE, addr_django.clone()) - ), + event: Event::Contracts(crate::Event::Instantiated( + CHARLIE, + addr_django.clone() + )), topics: vec![], }, + ] + ); + }, + (false, false, true) => { + assert_err_ignore_postinfo!(result, Error::::CodeNotFound,); + assert_refcount!(set_rent_code_hash, 0); + assert_eq!(System::events(), vec![]); + }, + _ => unreachable!(), + } + } else { + assert_ok!(perform_the_restoration()); + assert_refcount!(set_rent_code_hash, 2); - ]); - }, - (false, false, true) => { - assert_err_ignore_postinfo!( - result, Error::::CodeNotFound, - ); - assert_refcount!(set_rent_code_hash, 0); - assert_eq!(System::events(), vec![]); - }, - _ => unreachable!(), - } - } else { - assert_ok!(perform_the_restoration()); - assert_refcount!(set_rent_code_hash, 2); - - // Here we expect that the restoration is succeeded. Check that the restoration - // contract `DJANGO` ceased to exist and that `BOB` returned back. - let bob_contract = ContractInfoOf::::get(&addr_bob).unwrap() - .get_alive().unwrap(); - assert_eq!(bob_contract.rent_allowance, 50); - assert_eq!(bob_contract.storage_size, 4); - assert_eq!(bob_contract.trie_id, django_trie_id); - assert_eq!(bob_contract.deduct_block, System::block_number()); - assert!(ContractInfoOf::::get(&addr_django).is_none()); - assert_matches!(Storage::::read(&django_trie_id, &delta_key), None); - assert_eq!(System::events(), vec![ + // Here we expect that the restoration is succeeded. Check that the restoration + // contract `DJANGO` ceased to exist and that `BOB` returned back. + let bob_contract = ContractInfoOf::::get(&addr_bob).unwrap().get_alive().unwrap(); + assert_eq!(bob_contract.rent_allowance, 50); + assert_eq!(bob_contract.storage_size, 4); + assert_eq!(bob_contract.trie_id, django_trie_id); + assert_eq!(bob_contract.deduct_block, System::block_number()); + assert!(ContractInfoOf::::get(&addr_django).is_none()); + assert_matches!(Storage::::read(&django_trie_id, &delta_key), None); + assert_eq!( + System::events(), + vec![ EventRecord { phase: Phase::Initialization, event: Event::Contracts(crate::Event::CodeRemoved(restoration_code_hash)), @@ -1527,60 +1462,59 @@ fn restoration( }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::Restored( - addr_django, addr_bob, bob_contract.code_hash, 50 - ) - ), + event: Event::Contracts(crate::Event::Restored( + addr_django, + addr_bob, + bob_contract.code_hash, + 50 + )), topics: vec![], }, - ]); - } - }); + ] + ); + } + }); } #[test] fn storage_max_value_limit() { let (wasm, code_hash) = compile_module::("storage_size").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + ContractInfoOf::::get(&addr).unwrap().get_alive().unwrap(); - // Call contract with allowed storage value. - assert_ok!(Contracts::call( + // Call contract with allowed storage value. + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr.clone(), + 0, + GAS_LIMIT * 2, // we are copying a huge buffer + ::Schedule::get().limits.payload_len.encode(), + )); + + // Call contract with too large a storage value. + assert_err_ignore_postinfo!( + Contracts::call( Origin::signed(ALICE), - addr.clone(), + addr, 0, - GAS_LIMIT * 2, // we are copying a huge buffer - ::Schedule::get().limits.payload_len.encode(), - )); - - // Call contract with too large a storage value. - assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr, - 0, - GAS_LIMIT, - (::Schedule::get().limits.payload_len + 1).encode(), - ), - Error::::ValueTooLarge, - ); - }); + GAS_LIMIT, + (::Schedule::get().limits.payload_len + 1).encode(), + ), + Error::::ValueTooLarge, + ); + }); } #[test] @@ -1588,187 +1522,145 @@ fn deploy_and_call_other_contract() { let (callee_wasm, callee_code_hash) = compile_module::("return_with_data").unwrap(); let (caller_wasm, caller_code_hash) = compile_module::("caller_contract").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - caller_wasm, - vec![], - vec![], - )); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - callee_wasm, - 0u32.to_le_bytes().encode(), - vec![42], - )); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + caller_wasm, + vec![], + vec![], + )); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + callee_wasm, + 0u32.to_le_bytes().encode(), + vec![42], + )); - // Call BOB contract, which attempts to instantiate and call the callee contract and - // makes various assertions on the results from those calls. - assert_ok!(Contracts::call( - Origin::signed(ALICE), - Contracts::contract_address(&ALICE, &caller_code_hash, &[]), - 0, - GAS_LIMIT, - callee_code_hash.as_ref().to_vec(), - )); - }); + // Call BOB contract, which attempts to instantiate and call the callee contract and + // makes various assertions on the results from those calls. + assert_ok!(Contracts::call( + Origin::signed(ALICE), + Contracts::contract_address(&ALICE, &caller_code_hash, &[]), + 0, + GAS_LIMIT, + callee_code_hash.as_ref().to_vec(), + )); + }); } #[test] fn cannot_self_destruct_through_draning() { let (wasm, code_hash) = compile_module::("drain").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); - // Instantiate the BOB contract. - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + // Instantiate the BOB contract. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Check that the BOB contract has been instantiated. - assert_matches!( - ContractInfoOf::::get(&addr), - Some(ContractInfo::Alive(_)) - ); + // Check that the BOB contract has been instantiated. + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Alive(_))); - // Call BOB which makes it send all funds to the zero address - // The contract code asserts that the correct error value is returned. - assert_ok!( - Contracts::call( - Origin::signed(ALICE), - addr, - 0, - GAS_LIMIT, - vec![], - ) - ); - }); + // Call BOB which makes it send all funds to the zero address + // The contract code asserts that the correct error value is returned. + assert_ok!(Contracts::call(Origin::signed(ALICE), addr, 0, GAS_LIMIT, vec![],)); + }); } #[test] fn cannot_self_destruct_while_live() { let (wasm, code_hash) = compile_module::("self_destruct").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); - // Instantiate the BOB contract. - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + // Instantiate the BOB contract. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Check that the BOB contract has been instantiated. - assert_matches!( - ContractInfoOf::::get(&addr), - Some(ContractInfo::Alive(_)) - ); + // Check that the BOB contract has been instantiated. + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Alive(_))); - // Call BOB with input data, forcing it make a recursive call to itself to - // self-destruct, resulting in a trap. - assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![0], - ), - Error::::ContractTrapped, - ); + // Call BOB with input data, forcing it make a recursive call to itself to + // self-destruct, resulting in a trap. + assert_err_ignore_postinfo!( + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![0],), + Error::::ContractTrapped, + ); - // Check that BOB is still alive. - assert_matches!( - ContractInfoOf::::get(&addr), - Some(ContractInfo::Alive(_)) - ); - }); + // Check that BOB is still alive. + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Alive(_))); + }); } #[test] fn self_destruct_works() { let (wasm, code_hash) = compile_module::("self_destruct").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - let _ = Balances::deposit_creating(&DJANGO, 1_000_000); - - // Instantiate the BOB contract. - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + let _ = Balances::deposit_creating(&DJANGO, 1_000_000); - // Check that the BOB contract has been instantiated. - assert_matches!( - ContractInfoOf::::get(&addr), - Some(ContractInfo::Alive(_)) - ); + // Instantiate the BOB contract. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Drop all previous events - initialize_block(2); - - // Call BOB without input data which triggers termination. - assert_matches!( - Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - ), - Ok(_) - ); + // Check that the BOB contract has been instantiated. + assert_matches!(ContractInfoOf::::get(&addr), Some(ContractInfo::Alive(_))); + + // Drop all previous events + initialize_block(2); + + // Call BOB without input data which triggers termination. + assert_matches!( + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],), + Ok(_) + ); - // The call triggers rent collection that reduces the amount of balance - // that remains for the beneficiary. - let balance_after_rent = 93_078; + // The call triggers rent collection that reduces the amount of balance + // that remains for the beneficiary. + let balance_after_rent = 93_078; - pretty_assertions::assert_eq!(System::events(), vec![ + pretty_assertions::assert_eq!( + System::events(), + vec![ EventRecord { phase: Phase::Initialization, - event: Event::System( - frame_system::Event::KilledAccount(addr.clone()) - ), + event: Event::System(frame_system::Event::KilledAccount(addr.clone())), topics: vec![], }, EventRecord { phase: Phase::Initialization, - event: Event::Balances( - pallet_balances::Event::Transfer(addr.clone(), DJANGO, balance_after_rent) - ), + event: Event::Balances(pallet_balances::Event::Transfer( + addr.clone(), + DJANGO, + balance_after_rent + )), topics: vec![], }, EventRecord { @@ -1778,20 +1670,19 @@ fn self_destruct_works() { }, EventRecord { phase: Phase::Initialization, - event: Event::Contracts( - crate::Event::Terminated(addr.clone(), DJANGO) - ), + event: Event::Contracts(crate::Event::Terminated(addr.clone(), DJANGO)), topics: vec![], }, - ]); + ] + ); - // Check that account is gone - assert!(ContractInfoOf::::get(&addr).is_none()); + // Check that account is gone + assert!(ContractInfoOf::::get(&addr).is_none()); - // check that the beneficiary (django) got remaining balance - // some rent was deducted before termination - assert_eq!(Balances::free_balance(DJANGO), 1_000_000 + balance_after_rent); - }); + // check that the beneficiary (django) got remaining balance + // some rent was deducted before termination + assert_eq!(Balances::free_balance(DJANGO), 1_000_000 + balance_after_rent); + }); } // This tests that one contract cannot prevent another from self-destructing by sending it @@ -1801,134 +1692,116 @@ fn destroy_contract_and_transfer_funds() { let (callee_wasm, callee_code_hash) = compile_module::("self_destruct").unwrap(); let (caller_wasm, caller_code_hash) = compile_module::("destroy_and_transfer").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - // Create - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 200_000, - GAS_LIMIT, - callee_wasm, - vec![], - vec![42] - )); - - // This deploys the BOB contract, which in turn deploys the CHARLIE contract during - // construction. - assert_ok!(Contracts::instantiate_with_code( - Origin::signed(ALICE), - 200_000, - GAS_LIMIT, - caller_wasm, - callee_code_hash.as_ref().to_vec(), - vec![], - )); - let addr_bob = Contracts::contract_address(&ALICE, &caller_code_hash, &[]); - let addr_charlie = Contracts::contract_address( - &addr_bob, &callee_code_hash, &[0x47, 0x11] - ); - - // Check that the CHARLIE contract has been instantiated. - assert_matches!( - ContractInfoOf::::get(&addr_charlie), - Some(ContractInfo::Alive(_)) - ); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + // Create + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 200_000, + GAS_LIMIT, + callee_wasm, + vec![], + vec![42] + )); - // Call BOB, which calls CHARLIE, forcing CHARLIE to self-destruct. - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr_bob, - 0, - GAS_LIMIT, - addr_charlie.encode(), - )); + // This deploys the BOB contract, which in turn deploys the CHARLIE contract during + // construction. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 200_000, + GAS_LIMIT, + caller_wasm, + callee_code_hash.as_ref().to_vec(), + vec![], + )); + let addr_bob = Contracts::contract_address(&ALICE, &caller_code_hash, &[]); + let addr_charlie = Contracts::contract_address(&addr_bob, &callee_code_hash, &[0x47, 0x11]); - // Check that CHARLIE has moved on to the great beyond (ie. died). - assert!(ContractInfoOf::::get(&addr_charlie).is_none()); - }); -} + // Check that the CHARLIE contract has been instantiated. + assert_matches!(ContractInfoOf::::get(&addr_charlie), Some(ContractInfo::Alive(_))); -#[test] -fn cannot_self_destruct_in_constructor() { - let (wasm, _) = compile_module::("self_destructing_constructor").unwrap(); - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); - - // Fail to instantiate the BOB because the contructor calls seal_terminate. - assert_err_ignore_postinfo!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - 100_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - ), - Error::::TerminatedInConstructor, - ); - }); + // Call BOB, which calls CHARLIE, forcing CHARLIE to self-destruct. + assert_ok!(Contracts::call( + Origin::signed(ALICE), + addr_bob, + 0, + GAS_LIMIT, + addr_charlie.encode(), + )); + + // Check that CHARLIE has moved on to the great beyond (ie. died). + assert!(ContractInfoOf::::get(&addr_charlie).is_none()); + }); } #[test] -fn crypto_hashes() { - let (wasm, code_hash) = compile_module::("crypto_hashes").unwrap(); - - ExtBuilder::default() - .existential_deposit(50) - .build() - .execute_with(|| { - let _ = Balances::deposit_creating(&ALICE, 1_000_000); +fn cannot_self_destruct_in_constructor() { + let (wasm, _) = compile_module::("self_destructing_constructor").unwrap(); + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); - // Instantiate the CRYPTO_HASHES contract. - assert_ok!(Contracts::instantiate_with_code( + // Fail to instantiate the BOB because the contructor calls seal_terminate. + assert_err_ignore_postinfo!( + Contracts::instantiate_with_code( Origin::signed(ALICE), 100_000, GAS_LIMIT, wasm, vec![], vec![], - )); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // Perform the call. - let input = b"_DEAD_BEEF"; - use sp_io::hashing::*; - // Wraps a hash function into a more dynamic form usable for testing. - macro_rules! dyn_hash_fn { - ($name:ident) => { - Box::new(|input| $name(input).as_ref().to_vec().into_boxed_slice()) - }; - } - // All hash functions and their associated output byte lengths. - let test_cases: &[(Box Box<[u8]>>, usize)] = &[ - (dyn_hash_fn!(sha2_256), 32), - (dyn_hash_fn!(keccak_256), 32), - (dyn_hash_fn!(blake2_256), 32), - (dyn_hash_fn!(blake2_128), 16), - ]; - // Test the given hash functions for the input: "_DEAD_BEEF" - for (n, (hash_fn, expected_size)) in test_cases.iter().enumerate() { - // We offset data in the contract tables by 1. - let mut params = vec![(n + 1) as u8]; - params.extend_from_slice(input); - let result = >::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - params, - false, - ).result.unwrap(); - assert!(result.is_success()); - let expected = hash_fn(input.as_ref()); - assert_eq!(&result.data[..*expected_size], &*expected); - } - }) + ), + Error::::TerminatedInConstructor, + ); + }); +} + +#[test] +fn crypto_hashes() { + let (wasm, code_hash) = compile_module::("crypto_hashes").unwrap(); + + ExtBuilder::default().existential_deposit(50).build().execute_with(|| { + let _ = Balances::deposit_creating(&ALICE, 1_000_000); + + // Instantiate the CRYPTO_HASHES contract. + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 100_000, + GAS_LIMIT, + wasm, + vec![], + vec![], + )); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + // Perform the call. + let input = b"_DEAD_BEEF"; + use sp_io::hashing::*; + // Wraps a hash function into a more dynamic form usable for testing. + macro_rules! dyn_hash_fn { + ($name:ident) => { + Box::new(|input| $name(input).as_ref().to_vec().into_boxed_slice()) + }; + } + // All hash functions and their associated output byte lengths. + let test_cases: &[(Box Box<[u8]>>, usize)] = &[ + (dyn_hash_fn!(sha2_256), 32), + (dyn_hash_fn!(keccak_256), 32), + (dyn_hash_fn!(blake2_256), 32), + (dyn_hash_fn!(blake2_128), 16), + ]; + // Test the given hash functions for the input: "_DEAD_BEEF" + for (n, (hash_fn, expected_size)) in test_cases.iter().enumerate() { + // We offset data in the contract tables by 1. + let mut params = vec![(n + 1) as u8]; + params.extend_from_slice(input); + let result = + >::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, params, false) + .result + .unwrap(); + assert!(result.is_success()); + let expected = hash_fn(input.as_ref()); + assert_eq!(&result.data[..*expected_size], &*expected); + } + }) } #[test] @@ -1938,28 +1811,21 @@ fn transfer_return_code() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - wasm, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + wasm, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); // Contract has only the minimal balance so any transfer will return BelowSubsistence. Balances::make_free_balance_be(&addr, subsistence); - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![], - false, - ).result.unwrap(); + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![], false) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::BelowSubsistenceThreshold); // Contract has enough total balance in order to not go below the subsistence @@ -1967,14 +1833,7 @@ fn transfer_return_code() { // the transfer still fails but with another return code. Balances::make_free_balance_be(&addr, subsistence + 100); Balances::reserve(&addr, subsistence + 100).unwrap(); - let result = Contracts::bare_call( - ALICE, - addr, - 0, - GAS_LIMIT, - vec![], - false, - ).result.unwrap(); + let result = Contracts::bare_call(ALICE, addr, 0, GAS_LIMIT, vec![], false).result.unwrap(); assert_return_code!(result, RuntimeReturnCode::TransferFailed); }); } @@ -1988,16 +1847,14 @@ fn call_return_code() { let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); let _ = Balances::deposit_creating(&CHARLIE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - caller_code, - vec![0], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + caller_code, + vec![0], + vec![], + ),); let addr_bob = Contracts::contract_address(&ALICE, &caller_hash, &[]); Balances::make_free_balance_be(&addr_bob, subsistence); @@ -2009,19 +1866,19 @@ fn call_return_code() { GAS_LIMIT, AsRef::<[u8]>::as_ref(&DJANGO).to_vec(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::NotCallable); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(CHARLIE), - subsistence * 100, - GAS_LIMIT, - callee_code, - vec![0], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(CHARLIE), + subsistence * 100, + GAS_LIMIT, + callee_code, + vec![0], + vec![], + ),); let addr_django = Contracts::contract_address(&CHARLIE, &callee_hash, &[]); Balances::make_free_balance_be(&addr_django, subsistence); @@ -2031,9 +1888,15 @@ fn call_return_code() { addr_bob.clone(), 0, GAS_LIMIT, - AsRef::<[u8]>::as_ref(&addr_django).iter().chain(&0u32.to_le_bytes()).cloned().collect(), + AsRef::<[u8]>::as_ref(&addr_django) + .iter() + .chain(&0u32.to_le_bytes()) + .cloned() + .collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::BelowSubsistenceThreshold); // Contract has enough total balance in order to not go below the subsistence @@ -2046,9 +1909,15 @@ fn call_return_code() { addr_bob.clone(), 0, GAS_LIMIT, - AsRef::<[u8]>::as_ref(&addr_django).iter().chain(&0u32.to_le_bytes()).cloned().collect(), + AsRef::<[u8]>::as_ref(&addr_django) + .iter() + .chain(&0u32.to_le_bytes()) + .cloned() + .collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::TransferFailed); // Contract has enough balance but callee reverts because "1" is passed. @@ -2058,9 +1927,15 @@ fn call_return_code() { addr_bob.clone(), 0, GAS_LIMIT, - AsRef::<[u8]>::as_ref(&addr_django).iter().chain(&1u32.to_le_bytes()).cloned().collect(), + AsRef::<[u8]>::as_ref(&addr_django) + .iter() + .chain(&1u32.to_le_bytes()) + .cloned() + .collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::CalleeReverted); // Contract has enough balance but callee traps because "2" is passed. @@ -2069,11 +1944,16 @@ fn call_return_code() { addr_bob, 0, GAS_LIMIT, - AsRef::<[u8]>::as_ref(&addr_django).iter().chain(&2u32.to_le_bytes()).cloned().collect(), + AsRef::<[u8]>::as_ref(&addr_django) + .iter() + .chain(&2u32.to_le_bytes()) + .cloned() + .collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::CalleeTrapped); - }); } @@ -2087,39 +1967,31 @@ fn instantiate_return_code() { let _ = Balances::deposit_creating(&CHARLIE, 1000 * subsistence); let callee_hash = callee_hash.as_ref().to_vec(); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - callee_code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + callee_code, + vec![], + vec![], + ),); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - caller_code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + caller_code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &caller_hash, &[]); // Contract has only the minimal balance so any transfer will return BelowSubsistence. Balances::make_free_balance_be(&addr, subsistence); - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - callee_hash.clone(), - false, - ).result.unwrap(); + let result = + Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, callee_hash.clone(), false) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::BelowSubsistenceThreshold); // Contract has enough total balance in order to not go below the subsistence @@ -2127,26 +1999,17 @@ fn instantiate_return_code() { // the transfer still fails but with another return code. Balances::make_free_balance_be(&addr, subsistence + 10_000); Balances::reserve(&addr, subsistence + 10_000).unwrap(); - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - callee_hash.clone(), - false, - ).result.unwrap(); + let result = + Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, callee_hash.clone(), false) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::TransferFailed); // Contract has enough balance but the passed code hash is invalid Balances::make_free_balance_be(&addr, subsistence + 10_000); - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![0; 33], - false, - ).result.unwrap(); + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![0; 33], false) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::CodeNotFound); // Contract has enough balance but callee reverts because "1" is passed. @@ -2157,7 +2020,9 @@ fn instantiate_return_code() { GAS_LIMIT, callee_hash.iter().chain(&1u32.to_le_bytes()).cloned().collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::CalleeReverted); // Contract has enough balance but callee traps because "2" is passed. @@ -2168,9 +2033,10 @@ fn instantiate_return_code() { GAS_LIMIT, callee_hash.iter().chain(&2u32.to_le_bytes()).cloned().collect(), false, - ).result.unwrap(); + ) + .result + .unwrap(); assert_return_code!(result, RuntimeReturnCode::CalleeTrapped); - }); } @@ -2201,26 +2067,18 @@ fn disabled_chain_extension_errors_on_call() { ExtBuilder::default().existential_deposit(50).build().execute_with(|| { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); TestExtension::disable(); assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - ), + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],), Error::::NoChainExtension, ); }); @@ -2232,16 +2090,14 @@ fn chain_extension_works() { ExtBuilder::default().existential_deposit(50).build().execute_with(|| { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); // The contract takes a up to 2 byte buffer where the first byte passed is used as @@ -2249,51 +2105,27 @@ fn chain_extension_works() { // func_id. // 0 = read input buffer and pass it through as output - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![0, 99], - false, - ); + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![0, 99], false); let gas_consumed = result.gas_consumed; assert_eq!(TestExtension::last_seen_buffer(), vec![0, 99]); assert_eq!(result.result.unwrap().data, Bytes(vec![0, 99])); // 1 = treat inputs as integer primitives and store the supplied integers - Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![1], - false, - ).result.unwrap(); + Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![1], false) + .result + .unwrap(); // those values passed in the fixture assert_eq!(TestExtension::last_seen_inputs(), (4, 1, 16, 12)); // 2 = charge some extra weight (amount supplied in second byte) - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![2, 42], - false, - ); + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![2, 42], false); assert_ok!(result.result); assert_eq!(result.gas_consumed, gas_consumed + 42); // 3 = diverging chain extension call that sets flags to 0x1 and returns a fixed buffer - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - vec![3], - false, - ).result.unwrap(); + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![3], false) + .result + .unwrap(); assert_eq!(result.flags, ReturnFlags::REVERT); assert_eq!(result.data, Bytes(vec![42, 99])); }); @@ -2306,32 +2138,24 @@ fn lazy_removal_works() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); - let info = >::get(&addr).unwrap().get_alive().unwrap(); + let info = >::get(&addr).unwrap().get_alive().unwrap(); let trie = &info.child_trie_info(); // Put value into the contracts child trie child::put(trie, &[99], &42); // Terminate the contract - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - )); + assert_ok!(Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],)); // Contract info should be gone assert!(!>::contains_key(&addr)); @@ -2355,10 +2179,9 @@ fn lazy_removal_partial_remove_works() { let extra_keys = 7u32; let weight_limit = 5_000_000_000; let (_, max_keys) = Storage::::deletion_budget(1, weight_limit); - let vals: Vec<_> = (0..max_keys + extra_keys).map(|i| { - (blake2_256(&i.encode()), (i as u32), (i as u32).encode()) - }) - .collect(); + let vals: Vec<_> = (0..max_keys + extra_keys) + .map(|i| (blake2_256(&i.encode()), (i as u32), (i as u32).encode())) + .collect(); let mut ext = ExtBuilder::default().existential_deposit(50).build(); @@ -2366,39 +2189,27 @@ fn lazy_removal_partial_remove_works() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); - let mut info = >::get(&addr).unwrap().get_alive().unwrap(); + let mut info = >::get(&addr).unwrap().get_alive().unwrap(); // Put value into the contracts child trie for val in &vals { - Storage::::write( - System::block_number(), - &mut info, - &val.0, - Some(val.2.clone()), - ).unwrap(); - } - >::insert(&addr, ContractInfo::Alive(info.clone())); - - // Terminate the contract - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - )); + Storage::::write(System::block_number(), &mut info, &val.0, Some(val.2.clone())) + .unwrap(); + } + >::insert(&addr, ContractInfo::Alive(info.clone())); + + // Terminate the contract + assert_ok!(Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],)); // Contract info should be gone assert!(!>::contains_key(&addr)); @@ -2449,46 +2260,33 @@ fn lazy_removal_does_no_run_on_full_block() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); - let mut info = >::get(&addr).unwrap().get_alive().unwrap(); + let mut info = >::get(&addr).unwrap().get_alive().unwrap(); let max_keys = 30; // Create some storage items for the contract. - let vals: Vec<_> = (0..max_keys).map(|i| { - (blake2_256(&i.encode()), (i as u32), (i as u32).encode()) - }) - .collect(); + let vals: Vec<_> = (0..max_keys) + .map(|i| (blake2_256(&i.encode()), (i as u32), (i as u32).encode())) + .collect(); // Put value into the contracts child trie for val in &vals { - Storage::::write( - System::block_number(), - &mut info, - &val.0, - Some(val.2.clone()), - ).unwrap(); + Storage::::write(System::block_number(), &mut info, &val.0, Some(val.2.clone())) + .unwrap(); } >::insert(&addr, ContractInfo::Alive(info.clone())); // Terminate the contract - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - )); + assert_ok!(Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],)); // Contract info should be gone assert!(!>::contains_key(&addr)); @@ -2527,7 +2325,6 @@ fn lazy_removal_does_no_run_on_full_block() { }); } - #[test] fn lazy_removal_does_not_use_all_weight() { let (code, hash) = compile_module::("self_destruct").unwrap(); @@ -2535,47 +2332,34 @@ fn lazy_removal_does_not_use_all_weight() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); - let mut info = >::get(&addr).unwrap().get_alive().unwrap(); + let mut info = >::get(&addr).unwrap().get_alive().unwrap(); let weight_limit = 5_000_000_000; let (weight_per_key, max_keys) = Storage::::deletion_budget(1, weight_limit); // We create a contract with one less storage item than we can remove within the limit - let vals: Vec<_> = (0..max_keys - 1).map(|i| { - (blake2_256(&i.encode()), (i as u32), (i as u32).encode()) - }) - .collect(); + let vals: Vec<_> = (0..max_keys - 1) + .map(|i| (blake2_256(&i.encode()), (i as u32), (i as u32).encode())) + .collect(); // Put value into the contracts child trie for val in &vals { - Storage::::write( - System::block_number(), - &mut info, - &val.0, - Some(val.2.clone()), - ).unwrap(); + Storage::::write(System::block_number(), &mut info, &val.0, Some(val.2.clone())) + .unwrap(); } >::insert(&addr, ContractInfo::Alive(info.clone())); // Terminate the contract - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - )); + assert_ok!(Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],)); // Contract info should be gone assert!(!>::contains_key(&addr)); @@ -2607,16 +2391,14 @@ fn deletion_queue_full() { let subsistence = Pallet::::subsistence_threshold(); let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - code, - vec![], - vec![], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + code, + vec![], + vec![], + ),); let addr = Contracts::contract_address(&ALICE, &hash, &[]); @@ -2625,18 +2407,12 @@ fn deletion_queue_full() { // Terminate the contract should fail assert_err_ignore_postinfo!( - Contracts::call( - Origin::signed(ALICE), - addr.clone(), - 0, - GAS_LIMIT, - vec![], - ), + Contracts::call(Origin::signed(ALICE), addr.clone(), 0, GAS_LIMIT, vec![],), Error::::DeletionQueueFull, ); // Contract should be alive because removal failed - >::get(&addr).unwrap().get_alive().unwrap(); + >::get(&addr).unwrap().get_alive().unwrap(); // make the contract ripe for eviction initialize_block(5); @@ -2648,7 +2424,7 @@ fn deletion_queue_full() { ); // Contract should be alive because removal failed - >::get(&addr).unwrap().get_alive().unwrap(); + >::get(&addr).unwrap().get_alive().unwrap(); }); } @@ -2672,8 +2448,7 @@ fn not_deployed_if_endowment_too_low_for_first_rent() { 30_000, GAS_LIMIT, wasm, - (BalanceOf::::from(first_rent) - BalanceOf::::from(1u32)) - .encode(), // rent allowance + (BalanceOf::::from(first_rent) - BalanceOf::::from(1u32)).encode(), // rent allowance vec![], ), Error::::NewContractNotFunded, @@ -2697,7 +2472,7 @@ fn surcharge_reward_is_capped() { vec![], )); let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let contract = >::get(&addr).unwrap().get_alive().unwrap(); + let contract = >::get(&addr).unwrap().get_alive().unwrap(); let balance = Balances::free_balance(&ALICE); let reward = ::SurchargeReward::get(); @@ -2768,13 +2543,7 @@ fn refcounter() { let addr2 = Contracts::contract_address(&ALICE, &code_hash, &[2]); // Terminating one contract should decrement the refcount - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr0, - 0, - GAS_LIMIT, - vec![], - )); + assert_ok!(Contracts::call(Origin::signed(ALICE), addr0, 0, GAS_LIMIT, vec![],)); assert_refcount!(code_hash, 2); // make remaining contracts eligible for eviction @@ -2819,24 +2588,10 @@ fn reinstrument_does_charge() { // Call the contract two times without reinstrument - let result0 = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - zero.clone(), - false, - ); + let result0 = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, zero.clone(), false); assert!(result0.result.unwrap().is_success()); - let result1 = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - zero.clone(), - false, - ); + let result1 = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, zero.clone(), false); assert!(result1.result.unwrap().is_success()); // They should match because both where called with the same schedule. @@ -2849,14 +2604,7 @@ fn reinstrument_does_charge() { }); // This call should trigger reinstrumentation - let result2 = Contracts::bare_call( - ALICE, - addr.clone(), - 0, - GAS_LIMIT, - zero.clone(), - false, - ); + let result2 = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, zero.clone(), false); assert!(result2.result.unwrap().is_success()); assert!(result2.gas_consumed > result1.gas_consumed); assert_eq!( @@ -2873,25 +2621,16 @@ fn debug_message_works() { ExtBuilder::default().existential_deposit(50).build().execute_with(|| { let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - ), - ); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let result = Contracts::bare_call( - ALICE, - addr, - 0, + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, GAS_LIMIT, + wasm, vec![], - true, - ); + vec![], + ),); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let result = Contracts::bare_call(ALICE, addr, 0, GAS_LIMIT, vec![], true); assert_matches!(result.result, Ok(_)); assert_eq!(std::str::from_utf8(&result.debug_message).unwrap(), "Hello World!"); @@ -2905,35 +2644,20 @@ fn debug_message_logging_disabled() { ExtBuilder::default().existential_deposit(50).build().execute_with(|| { let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - ), - ); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - // disable logging by passing `false` - let result = Contracts::bare_call( - ALICE, - addr.clone(), - 0, + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, GAS_LIMIT, + wasm, vec![], - false, - ); + vec![], + ),); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + // disable logging by passing `false` + let result = Contracts::bare_call(ALICE, addr.clone(), 0, GAS_LIMIT, vec![], false); assert_matches!(result.result, Ok(_)); // the dispatchables always run without debugging - assert_ok!(Contracts::call( - Origin::signed(ALICE), - addr, - 0, - GAS_LIMIT, - vec![], - )); + assert_ok!(Contracts::call(Origin::signed(ALICE), addr, 0, GAS_LIMIT, vec![],)); assert!(result.debug_message.is_empty()); }); } @@ -2945,25 +2669,16 @@ fn debug_message_invalid_utf8() { ExtBuilder::default().existential_deposit(50).build().execute_with(|| { let _ = Balances::deposit_creating(&ALICE, 1_000_000); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - 30_000, - GAS_LIMIT, - wasm, - vec![], - vec![], - ), - ); - let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); - let result = Contracts::bare_call( - ALICE, - addr, - 0, + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + 30_000, GAS_LIMIT, + wasm, vec![], - true, - ); + vec![], + ),); + let addr = Contracts::contract_address(&ALICE, &code_hash, &[]); + let result = Contracts::bare_call(ALICE, addr, 0, GAS_LIMIT, vec![], true); assert_err!(result.result, >::DebugMessageInvalidUTF8); }); } @@ -2977,28 +2692,24 @@ fn gas_estimation_nested_call_fixed_limit() { let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); let _ = Balances::deposit_creating(&CHARLIE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - caller_code, - vec![], - vec![0], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + caller_code, + vec![], + vec![0], + ),); let addr_caller = Contracts::contract_address(&ALICE, &caller_hash, &[0]); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - callee_code, - vec![], - vec![1], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + callee_code, + vec![], + vec![1], + ),); let addr_callee = Contracts::contract_address(&ALICE, &callee_hash, &[1]); let input: Vec = AsRef::<[u8]>::as_ref(&addr_callee) @@ -3008,27 +2719,16 @@ fn gas_estimation_nested_call_fixed_limit() { .collect(); // Call in order to determine the gas that is required for this call - let result = Contracts::bare_call( - ALICE, - addr_caller.clone(), - 0, - GAS_LIMIT, - input.clone(), - false, - ); + let result = + Contracts::bare_call(ALICE, addr_caller.clone(), 0, GAS_LIMIT, input.clone(), false); assert_ok!(&result.result); assert!(result.gas_required > result.gas_consumed); // Make the same call using the estimated gas. Should succeed. - assert_ok!(Contracts::bare_call( - ALICE, - addr_caller, - 0, - result.gas_required, - input, - false, - ).result); + assert_ok!( + Contracts::bare_call(ALICE, addr_caller, 0, result.gas_required, input, false,).result + ); }); } @@ -3042,53 +2742,39 @@ fn gas_estimation_call_runtime() { let _ = Balances::deposit_creating(&ALICE, 1000 * subsistence); let _ = Balances::deposit_creating(&CHARLIE, 1000 * subsistence); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - caller_code, - vec![], - vec![0], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + caller_code, + vec![], + vec![0], + ),); let addr_caller = Contracts::contract_address(&ALICE, &caller_hash, &[0]); - assert_ok!( - Contracts::instantiate_with_code( - Origin::signed(ALICE), - subsistence * 100, - GAS_LIMIT, - callee_code, - vec![], - vec![1], - ), - ); + assert_ok!(Contracts::instantiate_with_code( + Origin::signed(ALICE), + subsistence * 100, + GAS_LIMIT, + callee_code, + vec![], + vec![1], + ),); let addr_callee = Contracts::contract_address(&ALICE, &callee_hash, &[1]); // Call something trivial with a huge gas limit so that we can observe the effects // of pre-charging. This should create a difference between consumed and required. let call = Call::Contracts(crate::Call::call(addr_callee, 0, GAS_LIMIT / 3, vec![])); - let result = Contracts::bare_call( - ALICE, - addr_caller.clone(), - 0, - GAS_LIMIT, - call.encode(), - false, - ); + let result = + Contracts::bare_call(ALICE, addr_caller.clone(), 0, GAS_LIMIT, call.encode(), false); assert_ok!(&result.result); assert!(result.gas_required > result.gas_consumed); // Make the same call using the required gas. Should succeed. - assert_ok!(Contracts::bare_call( - ALICE, - addr_caller, - 0, - result.gas_required, - call.encode(), - false, - ).result); + assert_ok!( + Contracts::bare_call(ALICE, addr_caller, 0, result.gas_required, call.encode(), false,) + .result + ); }); } diff --git a/frame/contracts/src/wasm/code_cache.rs b/frame/contracts/src/wasm/code_cache.rs index a2aa2b55e1657..06329a7e81ad9 100644 --- a/frame/contracts/src/wasm/code_cache.rs +++ b/frame/contracts/src/wasm/code_cache.rs @@ -27,16 +27,17 @@ //! this guarantees that every instrumented contract code in cache cannot have the version equal to the current one. //! Thus, before executing a contract it should be reinstrument with new schedule. +#[cfg(feature = "runtime-benchmarks")] +pub use self::private::reinstrument; use crate::{ - CodeHash, CodeStorage, PristineCode, Schedule, Config, Error, Weight, - wasm::{prepare, PrefabWasmModule}, Pallet as Contracts, Event, gas::{GasMeter, Token}, + wasm::{prepare, PrefabWasmModule}, weights::WeightInfo, + CodeHash, CodeStorage, Config, Error, Event, Pallet as Contracts, PristineCode, Schedule, + Weight, }; -use sp_core::crypto::UncheckedFrom; use frame_support::dispatch::DispatchError; -#[cfg(feature = "runtime-benchmarks")] -pub use self::private::reinstrument as reinstrument; +use sp_core::crypto::UncheckedFrom; /// Put the instrumented module in storage. /// @@ -44,7 +45,7 @@ pub use self::private::reinstrument as reinstrument; /// under the specified `code_hash`. pub fn store(mut prefab_module: PrefabWasmModule) where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { let code_hash = sp_std::mem::take(&mut prefab_module.code_hash); @@ -53,14 +54,12 @@ where if let Some(code) = prefab_module.original_code.take() { >::insert(&code_hash, code); } - >::mutate(&code_hash, |existing| { - match existing { - Some(module) => increment_64(&mut module.refcount), - None => { - *existing = Some(prefab_module); - Contracts::::deposit_event(Event::CodeStored(code_hash)) - } - } + >::mutate(&code_hash, |existing| match existing { + Some(module) => increment_64(&mut module.refcount), + None => { + *existing = Some(prefab_module); + Contracts::::deposit_event(Event::CodeStored(code_hash)) + }, }); } @@ -69,7 +68,7 @@ where /// Removes the code instead of storing it when the refcount drops to zero. pub fn store_decremented(mut prefab_module: PrefabWasmModule) where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { prefab_module.refcount = prefab_module.refcount.saturating_sub(1); if prefab_module.refcount > 0 { @@ -81,10 +80,12 @@ where } /// Increment the refcount of a code in-storage by one. -pub fn increment_refcount(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError> +pub fn increment_refcount( + code_hash: CodeHash, + gas_meter: &mut GasMeter, +) -> Result<(), DispatchError> where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { gas_meter.charge(CodeToken::UpdateRefcount(estimate_code_size::(&code_hash)?))?; >::mutate(code_hash, |existing| { @@ -98,10 +99,12 @@ where } /// Decrement the refcount of a code in-storage by one and remove the code when it drops to zero. -pub fn decrement_refcount(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError> +pub fn decrement_refcount( + code_hash: CodeHash, + gas_meter: &mut GasMeter, +) -> Result<(), DispatchError> where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { if let Ok(len) = estimate_code_size::(&code_hash) { gas_meter.charge(CodeToken::UpdateRefcount(len))?; @@ -133,7 +136,7 @@ pub fn load( mut reinstrument: Option<(&Schedule, &mut GasMeter)>, ) -> Result, DispatchError> where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { // The reinstrument case coincides with the cases where we need to charge extra // based upon the code size: On-chain execution. @@ -141,8 +144,8 @@ where gas_meter.charge(CodeToken::Load(estimate_code_size::(&code_hash)?))?; } - let mut prefab_module = >::get(code_hash) - .ok_or_else(|| Error::::CodeNotFound)?; + let mut prefab_module = + >::get(code_hash).ok_or_else(|| Error::::CodeNotFound)?; prefab_module.code_hash = code_hash; if let Some((schedule, gas_meter)) = reinstrument { @@ -165,7 +168,7 @@ mod private { schedule: &Schedule, ) -> Result<(), DispatchError> where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { let original_code = >::get(&prefab_module.code_hash) .ok_or_else(|| Error::::CodeNotFound)?; @@ -179,7 +182,7 @@ mod private { /// Finish removal of a code by deleting the pristine code and emitting an event. fn finish_removal(code_hash: CodeHash) where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { >::remove(code_hash); Contracts::::deposit_event(Event::CodeRemoved(code_hash)) @@ -190,13 +193,15 @@ where /// We try hard to be infallible here because otherwise more storage transactions would be /// necessary to account for failures in storing code for an already instantiated contract. fn increment_64(refcount: &mut u64) { - *refcount = refcount.checked_add(1).expect(" + *refcount = refcount.checked_add(1).expect( + " refcount is 64bit. Generating this overflow would require to store _at least_ 18 exabyte of data assuming that a contract consumes only one byte of data. Any node would run out of storage space before hitting this overflow. qed - "); + ", + ); } /// Get the size of the instrumented code stored at `code_hash` without loading it. @@ -206,7 +211,7 @@ fn increment_64(refcount: &mut u64) { /// compared to the code size. Additionally, charging too much weight is completely safe. fn estimate_code_size(code_hash: &CodeHash) -> Result where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { let key = >::hashed_key_for(code_hash); let mut data = [0u8; 0]; @@ -229,7 +234,7 @@ enum CodeToken { impl Token for CodeToken where T: Config, - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { fn weight(&self) -> Weight { use self::CodeToken::*; @@ -240,9 +245,10 @@ where // the contract. match *self { Instrument(len) => T::WeightInfo::instrument(len / 1024), - Load(len) => T::WeightInfo::code_load(len / 1024).saturating_sub(T::WeightInfo::code_load(0)), - UpdateRefcount(len) => - T::WeightInfo::code_refcount(len / 1024).saturating_sub(T::WeightInfo::code_refcount(0)), + Load(len) => + T::WeightInfo::code_load(len / 1024).saturating_sub(T::WeightInfo::code_load(0)), + UpdateRefcount(len) => T::WeightInfo::code_refcount(len / 1024) + .saturating_sub(T::WeightInfo::code_refcount(0)), } } } diff --git a/frame/contracts/src/wasm/env_def/macros.rs b/frame/contracts/src/wasm/env_def/macros.rs index b7358f6aa2345..8d316794c6396 100644 --- a/frame/contracts/src/wasm/env_def/macros.rs +++ b/frame/contracts/src/wasm/env_def/macros.rs @@ -255,14 +255,14 @@ macro_rules! define_env { #[cfg(test)] mod tests { - use pwasm_utils::parity_wasm::elements::{FunctionType, ValueType}; - use sp_runtime::traits::Zero; - use sp_sandbox::{ReturnValue, Value}; use crate::{ - Weight, - wasm::{Runtime, runtime::TrapReason, tests::MockExt}, exec::Ext, + wasm::{runtime::TrapReason, tests::MockExt, Runtime}, + Weight, }; + use pwasm_utils::parity_wasm::elements::{FunctionType, ValueType}; + use sp_runtime::traits::Zero; + use sp_sandbox::{ReturnValue, Value}; struct TestRuntime { value: u32, @@ -333,16 +333,15 @@ mod tests { Err(TrapReason::Termination) } }); - let _f: fn(&mut Runtime, &[sp_sandbox::Value]) - -> Result = seal_gas::; + let _f: fn( + &mut Runtime, + &[sp_sandbox::Value], + ) -> Result = seal_gas::; } #[test] fn macro_gen_signature() { - assert_eq!( - gen_signature!((i32)), - FunctionType::new(vec![ValueType::I32], vec![]), - ); + assert_eq!(gen_signature!((i32)), FunctionType::new(vec![ValueType::I32], vec![]),); assert_eq!( gen_signature!( (i32, u32) -> u32 ), @@ -387,11 +386,11 @@ mod tests { }, ); - assert!( - Env::can_satisfy(b"seal0", b"seal_gas",&FunctionType::new(vec![ValueType::I32], vec![])) - ); - assert!( - !Env::can_satisfy(b"seal0", b"not_exists", &FunctionType::new(vec![], vec![])) - ); + assert!(Env::can_satisfy( + b"seal0", + b"seal_gas", + &FunctionType::new(vec![ValueType::I32], vec![]) + )); + assert!(!Env::can_satisfy(b"seal0", b"not_exists", &FunctionType::new(vec![], vec![]))); } } diff --git a/frame/contracts/src/wasm/env_def/mod.rs b/frame/contracts/src/wasm/env_def/mod.rs index 5855befd34cb2..6a55677f69a01 100644 --- a/frame/contracts/src/wasm/env_def/mod.rs +++ b/frame/contracts/src/wasm/env_def/mod.rs @@ -18,8 +18,8 @@ use super::Runtime; use crate::exec::Ext; -use sp_sandbox::Value; use pwasm_utils::parity_wasm::elements::{FunctionType, ValueType}; +use sp_sandbox::Value; #[macro_use] pub mod macros; @@ -67,11 +67,10 @@ impl ConvertibleToWasm for u64 { } } -pub type HostFunc = - fn( - &mut Runtime, - &[sp_sandbox::Value] - ) -> Result; +pub type HostFunc = fn( + &mut Runtime, + &[sp_sandbox::Value], +) -> Result; pub trait FunctionImplProvider { fn impls)>(f: &mut F); diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index 16cecf0c1d13f..f31265c5587f8 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -24,19 +24,19 @@ mod code_cache; mod prepare; mod runtime; +#[cfg(feature = "runtime-benchmarks")] +pub use self::code_cache::reinstrument; +pub use self::runtime::{ReturnCode, Runtime, RuntimeCosts}; use crate::{ - CodeHash, Schedule, Config, - wasm::env_def::FunctionImplProvider, - exec::{Ext, Executable, ExportedFunction, ExecResult}, + exec::{ExecResult, Executable, ExportedFunction, Ext}, gas::GasMeter, + wasm::env_def::FunctionImplProvider, + CodeHash, Config, Schedule, }; -use sp_std::prelude::*; -use sp_core::crypto::UncheckedFrom; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use frame_support::dispatch::DispatchError; -pub use self::runtime::{ReturnCode, Runtime, RuntimeCosts}; -#[cfg(feature = "runtime-benchmarks")] -pub use self::code_cache::reinstrument; +use sp_core::crypto::UncheckedFrom; +use sp_std::prelude::*; #[cfg(test)] pub use tests::MockExt; @@ -109,12 +109,12 @@ impl ExportedFunction { impl PrefabWasmModule where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { /// Create the module by checking and instrumenting `original_code`. pub fn from_code( original_code: Vec, - schedule: &Schedule + schedule: &Schedule, ) -> Result { prepare::prepare_contract(original_code, schedule).map_err(Into::into) } @@ -128,7 +128,7 @@ where #[cfg(feature = "runtime-benchmarks")] pub fn store_code_unchecked( original_code: Vec, - schedule: &Schedule + schedule: &Schedule, ) -> Result<(), DispatchError> { let executable = prepare::benchmarking::prepare_contract(original_code, schedule) .map_err::(Into::into)?; @@ -151,7 +151,7 @@ where impl Executable for PrefabWasmModule where - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { fn from_storage( code_hash: CodeHash, @@ -169,15 +169,14 @@ where code_cache::store_decremented(self); } - fn add_user(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError> - { + fn add_user(code_hash: CodeHash, gas_meter: &mut GasMeter) -> Result<(), DispatchError> { code_cache::increment_refcount::(code_hash, gas_meter) } - fn remove_user(code_hash: CodeHash, gas_meter: &mut GasMeter) - -> Result<(), DispatchError> - { + fn remove_user( + code_hash: CodeHash, + gas_meter: &mut GasMeter, + ) -> Result<(), DispatchError> { code_cache::decrement_refcount::(code_hash, gas_meter) } @@ -188,16 +187,15 @@ where input_data: Vec, ) -> ExecResult { let memory = - sp_sandbox::Memory::new(self.initial, Some(self.maximum)) - .unwrap_or_else(|_| { + sp_sandbox::Memory::new(self.initial, Some(self.maximum)).unwrap_or_else(|_| { // unlike `.expect`, explicit panic preserves the source location. // Needed as we can't use `RUST_BACKTRACE` in here. - panic!( - "exec.prefab_module.initial can't be greater than exec.prefab_module.maximum; + panic!( + "exec.prefab_module.initial can't be greater than exec.prefab_module.maximum; thus Memory::new must not fail; qed" - ) - }); + ) + }); let mut imports = sp_sandbox::EnvironmentDefinitionBuilder::new(); imports.add_memory(self::prepare::IMPORT_MODULE_MEMORY, "memory", memory.clone()); @@ -205,11 +203,7 @@ where imports.add_host_func(module, name, func_ptr); }); - let mut runtime = Runtime::new( - ext, - input_data, - memory, - ); + let mut runtime = Runtime::new(ext, input_data, memory); // We store before executing so that the code hash is available in the constructor. let code = self.code.clone(); @@ -246,31 +240,27 @@ where mod tests { use super::*; use crate::{ - CodeHash, BalanceOf, Error, Pallet as Contracts, exec::{ - Ext, StorageKey, AccountIdOf, Executable, SeedOf, BlockNumberOf, - RentParams, ExecError, ErrorOrigin, + AccountIdOf, BlockNumberOf, ErrorOrigin, ExecError, Executable, Ext, RentParams, + SeedOf, StorageKey, }, gas::GasMeter, rent::RentStatus, - tests::{Test, Call, ALICE, BOB}, + tests::{Call, Test, ALICE, BOB}, + BalanceOf, CodeHash, Error, Pallet as Contracts, }; - use std::{ - borrow::BorrowMut, - cell::RefCell, - collections::HashMap, - }; - use sp_core::{Bytes, H256}; - use hex_literal::hex; - use sp_runtime::DispatchError; + use assert_matches::assert_matches; use frame_support::{ assert_ok, dispatch::{DispatchResult, DispatchResultWithPostInfo}, weights::Weight, }; - use assert_matches::assert_matches; + use hex_literal::hex; use pallet_contracts_primitives::{ExecReturnValue, ReturnFlags}; use pretty_assertions::assert_eq; + use sp_core::{Bytes, H256}; + use sp_runtime::DispatchError; + use std::{borrow::BorrowMut, cell::RefCell, collections::HashMap}; #[derive(Debug, PartialEq, Eq)] struct RestoreEntry { @@ -361,12 +351,7 @@ mod tests { data: Vec, allows_reentry: bool, ) -> Result { - self.calls.push(CallEntry { - to, - value, - data, - allows_reentry, - }); + self.calls.push(CallEntry { to, value, data, allows_reentry }); Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: call_return_data() }) } fn instantiate( @@ -386,30 +371,15 @@ mod tests { }); Ok(( Contracts::::contract_address(&ALICE, &code_hash, salt), - ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes(Vec::new()), - }, + ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(Vec::new()) }, )) } - fn transfer( - &mut self, - to: &AccountIdOf, - value: u64, - ) -> Result<(), DispatchError> { - self.transfers.push(TransferEntry { - to: to.clone(), - value, - }); + fn transfer(&mut self, to: &AccountIdOf, value: u64) -> Result<(), DispatchError> { + self.transfers.push(TransferEntry { to: to.clone(), value }); Ok(()) } - fn terminate( - &mut self, - beneficiary: &AccountIdOf, - ) -> Result<(), DispatchError> { - self.terminations.push(TerminationEntry { - beneficiary: beneficiary.clone(), - }); + fn terminate(&mut self, beneficiary: &AccountIdOf) -> Result<(), DispatchError> { + self.terminations.push(TerminationEntry { beneficiary: beneficiary.clone() }); Ok(()) } fn restore_to( @@ -419,12 +389,7 @@ mod tests { rent_allowance: u64, delta: Vec, ) -> Result<(), DispatchError> { - self.restores.push(RestoreEntry { - dest, - code_hash, - rent_allowance, - delta, - }); + self.restores.push(RestoreEntry { dest, code_hash, rent_allowance, delta }); Ok(()) } fn get_storage(&mut self, key: &StorageKey) -> Option> { @@ -467,8 +432,12 @@ mod tests { fn rent_allowance(&mut self) -> u64 { self.rent_allowance } - fn block_number(&self) -> u64 { 121 } - fn max_value_size(&self) -> u32 { 16_384 } + fn block_number(&self) -> u64 { + 121 + } + fn max_value_size(&self) -> u32 { + 16_384 + } fn get_weight_price(&self, weight: Weight) -> BalanceOf { BalanceOf::::from(1312_u32).saturating_mul(weight.into()) } @@ -494,16 +463,11 @@ mod tests { } } - fn execute>( - wat: &str, - input_data: Vec, - mut ext: E, - ) -> ExecResult - { + fn execute>(wat: &str, input_data: Vec, mut ext: E) -> ExecResult { let wasm = wat::parse_str(wat).unwrap(); let schedule = crate::Schedule::default(); - let executable = PrefabWasmModule::<::T>::from_code(wasm, &schedule) - .unwrap(); + let executable = + PrefabWasmModule::<::T>::from_code(wasm, &schedule).unwrap(); executable.execute(ext.borrow_mut(), &ExportedFunction::Call, input_data) } @@ -544,19 +508,9 @@ mod tests { #[test] fn contract_transfer() { let mut mock_ext = MockExt::default(); - assert_ok!(execute( - CODE_TRANSFER, - vec![], - &mut mock_ext, - )); + assert_ok!(execute(CODE_TRANSFER, vec![], &mut mock_ext,)); - assert_eq!( - &mock_ext.transfers, - &[TransferEntry { - to: ALICE, - value: 153, - }] - ); + assert_eq!(&mock_ext.transfers, &[TransferEntry { to: ALICE, value: 153 }]); } const CODE_CALL: &str = r#" @@ -608,20 +562,11 @@ mod tests { #[test] fn contract_call() { let mut mock_ext = MockExt::default(); - assert_ok!(execute( - CODE_CALL, - vec![], - &mut mock_ext, - )); + assert_ok!(execute(CODE_CALL, vec![], &mut mock_ext,)); assert_eq!( &mock_ext.calls, - &[CallEntry { - to: ALICE, - value: 6, - data: vec![1, 2, 3, 4], - allows_reentry: true, - }] + &[CallEntry { to: ALICE, value: 6, data: vec![1, 2, 3, 4], allows_reentry: true }] ); } @@ -676,12 +621,7 @@ mod tests { assert_eq!( &mock_ext.calls, - &[CallEntry { - to: ALICE, - value: 0x2a, - data: input, - allows_reentry: false, - }] + &[CallEntry { to: ALICE, value: 0x2a, data: input, allows_reentry: false }] ); } @@ -737,12 +677,7 @@ mod tests { assert_eq!(result.data.0, input); assert_eq!( &mock_ext.calls, - &[CallEntry { - to: ALICE, - value: 0x2a, - data: input, - allows_reentry: true, - }] + &[CallEntry { to: ALICE, value: 0x2a, data: input, allows_reentry: true }] ); } @@ -790,12 +725,7 @@ mod tests { assert_eq!(result.data, call_return_data()); assert_eq!( &mock_ext.calls, - &[CallEntry { - to: ALICE, - value: 0x2a, - data: input, - allows_reentry: false, - }] + &[CallEntry { to: ALICE, value: 0x2a, data: input, allows_reentry: false }] ); } @@ -858,11 +788,7 @@ mod tests { #[test] fn contract_instantiate() { let mut mock_ext = MockExt::default(); - assert_ok!(execute( - CODE_INSTANTIATE, - vec![], - &mut mock_ext, - )); + assert_ok!(execute(CODE_INSTANTIATE, vec![], &mut mock_ext,)); assert_matches!( &mock_ext.instantiates[..], @@ -906,18 +832,9 @@ mod tests { #[test] fn contract_terminate() { let mut mock_ext = MockExt::default(); - execute( - CODE_TERMINATE, - vec![], - &mut mock_ext, - ).unwrap(); + execute(CODE_TERMINATE, vec![], &mut mock_ext).unwrap(); - assert_eq!( - &mock_ext.terminations, - &[TerminationEntry { - beneficiary: ALICE, - }] - ); + assert_eq!(&mock_ext.terminations, &[TerminationEntry { beneficiary: ALICE }]); } const CODE_TRANSFER_LIMITED_GAS: &str = r#" @@ -968,20 +885,11 @@ mod tests { #[test] fn contract_call_limited_gas() { let mut mock_ext = MockExt::default(); - assert_ok!(execute( - &CODE_TRANSFER_LIMITED_GAS, - vec![], - &mut mock_ext, - )); + assert_ok!(execute(&CODE_TRANSFER_LIMITED_GAS, vec![], &mut mock_ext,)); assert_eq!( &mock_ext.calls, - &[CallEntry { - to: ALICE, - value: 6, - data: vec![1, 2, 3, 4], - allows_reentry: true, - }] + &[CallEntry { to: ALICE, value: 6, data: vec![1, 2, 3, 4], allows_reentry: true }] ); } @@ -1052,20 +960,14 @@ mod tests { #[test] fn get_storage_puts_data_into_buf() { let mut mock_ext = MockExt::default(); - mock_ext - .storage - .insert([0x11; 32], [0x22; 32].to_vec()); + mock_ext.storage.insert([0x11; 32], [0x22; 32].to_vec()); - let output = execute( - CODE_GET_STORAGE, - vec![], - mock_ext, - ).unwrap(); - - assert_eq!(output, ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes([0x22; 32].to_vec()) - }); + let output = execute(CODE_GET_STORAGE, vec![], mock_ext).unwrap(); + + assert_eq!( + output, + ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes([0x22; 32].to_vec()) } + ); } /// calls `seal_caller` and compares the result with the constant 42. @@ -1113,11 +1015,7 @@ mod tests { #[test] fn caller() { - assert_ok!(execute( - CODE_CALLER, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_CALLER, vec![], MockExt::default(),)); } /// calls `seal_address` and compares the result with the constant 69. @@ -1165,11 +1063,7 @@ mod tests { #[test] fn address() { - assert_ok!(execute( - CODE_ADDRESS, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_ADDRESS, vec![], MockExt::default(),)); } const CODE_BALANCE: &str = r#" @@ -1215,11 +1109,7 @@ mod tests { #[test] fn balance() { - assert_ok!(execute( - CODE_BALANCE, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_BALANCE, vec![], MockExt::default(),)); } const CODE_GAS_PRICE: &str = r#" @@ -1265,11 +1155,7 @@ mod tests { #[test] fn gas_price() { - assert_ok!(execute( - CODE_GAS_PRICE, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_GAS_PRICE, vec![], MockExt::default(),)); } const CODE_GAS_LEFT: &str = r#" @@ -1316,11 +1202,7 @@ mod tests { let mut ext = MockExt::default(); let gas_limit = ext.gas_meter.gas_left(); - let output = execute( - CODE_GAS_LEFT, - vec![], - &mut ext, - ).unwrap(); + let output = execute(CODE_GAS_LEFT, vec![], &mut ext).unwrap(); let gas_left = Weight::decode(&mut &*output.data).unwrap(); let actual_left = ext.gas_meter.gas_left(); @@ -1371,11 +1253,7 @@ mod tests { #[test] fn value_transferred() { - assert_ok!(execute( - CODE_VALUE_TRANSFERRED, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_VALUE_TRANSFERRED, vec![], MockExt::default(),)); } const CODE_RETURN_FROM_START_FN: &str = r#" @@ -1404,18 +1282,11 @@ mod tests { #[test] fn return_from_start_fn() { - let output = execute( - CODE_RETURN_FROM_START_FN, - vec![], - MockExt::default(), - ).unwrap(); + let output = execute(CODE_RETURN_FROM_START_FN, vec![], MockExt::default()).unwrap(); assert_eq!( output, - ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes(vec![1, 2, 3, 4]) - } + ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(vec![1, 2, 3, 4]) } ); } @@ -1462,11 +1333,7 @@ mod tests { #[test] fn now() { - assert_ok!(execute( - CODE_TIMESTAMP_NOW, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_TIMESTAMP_NOW, vec![], MockExt::default(),)); } const CODE_MINIMUM_BALANCE: &str = r#" @@ -1511,11 +1378,7 @@ mod tests { #[test] fn minimum_balance() { - assert_ok!(execute( - CODE_MINIMUM_BALANCE, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_MINIMUM_BALANCE, vec![], MockExt::default(),)); } const CODE_TOMBSTONE_DEPOSIT: &str = r#" @@ -1560,11 +1423,7 @@ mod tests { #[test] fn tombstone_deposit() { - assert_ok!(execute( - CODE_TOMBSTONE_DEPOSIT, - vec![], - MockExt::default(), - )); + assert_ok!(execute(CODE_TOMBSTONE_DEPOSIT, vec![], MockExt::default(),)); } const CODE_RANDOM: &str = r#" @@ -1623,11 +1482,7 @@ mod tests { #[test] fn random() { - let output = execute( - CODE_RANDOM, - vec![], - MockExt::default(), - ).unwrap(); + let output = execute(CODE_RANDOM, vec![], MockExt::default()).unwrap(); // The mock ext just returns the same data that was passed as the subject. assert_eq!( @@ -1698,26 +1553,24 @@ mod tests { #[test] fn random_v1() { - let output = execute( - CODE_RANDOM_V1, - vec![], - MockExt::default(), - ).unwrap(); + let output = execute(CODE_RANDOM_V1, vec![], MockExt::default()).unwrap(); // The mock ext just returns the same data that was passed as the subject. assert_eq!( output, ExecReturnValue { flags: ReturnFlags::empty(), - data: Bytes(( + data: Bytes( + ( hex!("000102030405060708090A0B0C0D0E0F000102030405060708090A0B0C0D0E0F"), 42u64, - ).encode()), + ) + .encode() + ), }, ); } - const CODE_DEPOSIT_EVENT: &str = r#" (module (import "seal0" "seal_deposit_event" (func $seal_deposit_event (param i32 i32 i32 i32))) @@ -1744,16 +1597,15 @@ mod tests { #[test] fn deposit_event() { let mut mock_ext = MockExt::default(); - assert_ok!(execute( - CODE_DEPOSIT_EVENT, - vec![], - &mut mock_ext, - )); + assert_ok!(execute(CODE_DEPOSIT_EVENT, vec![], &mut mock_ext,)); - assert_eq!(mock_ext.events, vec![ - (vec![H256::repeat_byte(0x33)], - vec![0x00, 0x01, 0x2a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe5, 0x14, 0x00]) - ]); + assert_eq!( + mock_ext.events, + vec![( + vec![H256::repeat_byte(0x33)], + vec![0x00, 0x01, 0x2a, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe5, 0x14, 0x00] + )] + ); assert!(mock_ext.gas_meter.gas_left() > 0); } @@ -1789,11 +1641,7 @@ mod tests { #[test] fn deposit_event_max_topics() { assert_eq!( - execute( - CODE_DEPOSIT_EVENT_MAX_TOPICS, - vec![], - MockExt::default(), - ), + execute(CODE_DEPOSIT_EVENT_MAX_TOPICS, vec![], MockExt::default(),), Err(ExecError { error: Error::::TooManyTopics.into(), origin: ErrorOrigin::Caller, @@ -1831,11 +1679,7 @@ mod tests { #[test] fn deposit_event_duplicates() { assert_eq!( - execute( - CODE_DEPOSIT_EVENT_DUPLICATES, - vec![], - MockExt::default(), - ), + execute(CODE_DEPOSIT_EVENT_DUPLICATES, vec![], MockExt::default(),), Err(ExecError { error: Error::::DuplicateTopics.into(), origin: ErrorOrigin::Caller, @@ -1888,11 +1732,7 @@ mod tests { #[test] fn block_number() { - let _ = execute( - CODE_BLOCK_NUMBER, - vec![], - MockExt::default(), - ).unwrap(); + let _ = execute(CODE_BLOCK_NUMBER, vec![], MockExt::default()).unwrap(); } const CODE_RETURN_WITH_DATA: &str = r#" @@ -1933,27 +1773,32 @@ mod tests { CODE_RETURN_WITH_DATA, hex!("00000000445566778899").to_vec(), MockExt::default(), - ).unwrap(); + ) + .unwrap(); - assert_eq!(output, ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes(hex!("445566778899").to_vec()), - }); + assert_eq!( + output, + ExecReturnValue { + flags: ReturnFlags::empty(), + data: Bytes(hex!("445566778899").to_vec()), + } + ); assert!(output.is_success()); } #[test] fn return_with_revert_status() { - let output = execute( - CODE_RETURN_WITH_DATA, - hex!("010000005566778899").to_vec(), - MockExt::default(), - ).unwrap(); + let output = + execute(CODE_RETURN_WITH_DATA, hex!("010000005566778899").to_vec(), MockExt::default()) + .unwrap(); - assert_eq!(output, ExecReturnValue { - flags: ReturnFlags::REVERT, - data: Bytes(hex!("5566778899").to_vec()), - }); + assert_eq!( + output, + ExecReturnValue { + flags: ReturnFlags::REVERT, + data: Bytes(hex!("5566778899").to_vec()), + } + ); assert!(!output.is_success()); } @@ -1976,11 +1821,7 @@ mod tests { #[test] fn contract_out_of_bounds_access() { let mut mock_ext = MockExt::default(); - let result = execute( - CODE_OUT_OF_BOUNDS_ACCESS, - vec![], - &mut mock_ext, - ); + let result = execute(CODE_OUT_OF_BOUNDS_ACCESS, vec![], &mut mock_ext); assert_eq!( result, @@ -2010,11 +1851,7 @@ mod tests { #[test] fn contract_decode_length_ignored() { let mut mock_ext = MockExt::default(); - let result = execute( - CODE_DECODE_FAILURE, - vec![], - &mut mock_ext, - ); + let result = execute(CODE_DECODE_FAILURE, vec![], &mut mock_ext); // AccountID implements `MaxEncodeLen` and therefore the supplied length is // no longer needed nor used to determine how much is read from contract memory. assert_ok!(result); @@ -2052,17 +1889,11 @@ mod tests { (func (export "deploy")) ) "#; - let output = execute( - CODE_RENT_PARAMS, - vec![], - MockExt::default(), - ).unwrap(); + let output = execute(CODE_RENT_PARAMS, vec![], MockExt::default()).unwrap(); let rent_params = Bytes(>::default().encode()); assert_eq!(output, ExecReturnValue { flags: ReturnFlags::empty(), data: rent_params }); } - - #[test] #[cfg(feature = "unstable-interface")] fn rent_status_works() { @@ -2096,11 +1927,7 @@ mod tests { (func (export "deploy")) ) "#; - let output = execute( - CODE_RENT_STATUS, - vec![], - MockExt::default(), - ).unwrap(); + let output = execute(CODE_RENT_STATUS, vec![], MockExt::default()).unwrap(); let rent_status = Bytes(>::default().encode()); assert_eq!(output, ExecReturnValue { flags: ReturnFlags::empty(), data: rent_status }); } @@ -2127,11 +1954,7 @@ mod tests { ) "#; let mut ext = MockExt::default(); - execute( - CODE_DEBUG_MESSAGE, - vec![], - &mut ext, - ).unwrap(); + execute(CODE_DEBUG_MESSAGE, vec![], &mut ext).unwrap(); assert_eq!(std::str::from_utf8(&ext.debug_buffer).unwrap(), "Hello World!"); } @@ -2158,11 +1981,7 @@ mod tests { ) "#; let mut ext = MockExt::default(); - let result = execute( - CODE_DEBUG_MESSAGE_FAIL, - vec![], - &mut ext, - ); + let result = execute(CODE_DEBUG_MESSAGE_FAIL, vec![], &mut ext); assert_eq!( result, Err(ExecError { @@ -2214,15 +2033,8 @@ mod tests { use std::convert::TryInto; let call = Call::System(frame_system::Call::remark(b"Hello World".to_vec())); let mut ext = MockExt::default(); - let result = execute( - CODE_CALL_RUNTIME, - call.encode(), - &mut ext, - ).unwrap(); - assert_eq!( - *ext.runtime_calls.borrow(), - vec![call], - ); + let result = execute(CODE_CALL_RUNTIME, call.encode(), &mut ext).unwrap(); + assert_eq!(*ext.runtime_calls.borrow(), vec![call],); // 0 = ReturnCode::Success assert_eq!(u32::from_le_bytes(result.data.0.try_into().unwrap()), 0); } @@ -2231,11 +2043,7 @@ mod tests { #[cfg(feature = "unstable-interface")] fn call_runtime_panics_on_invalid_call() { let mut ext = MockExt::default(); - let result = execute( - CODE_CALL_RUNTIME, - vec![0x42], - &mut ext, - ); + let result = execute(CODE_CALL_RUNTIME, vec![0x42], &mut ext); assert_eq!( result, Err(ExecError { @@ -2243,9 +2051,6 @@ mod tests { origin: ErrorOrigin::Caller, }) ); - assert_eq!( - *ext.runtime_calls.borrow(), - vec![], - ); + assert_eq!(*ext.runtime_calls.borrow(), vec![],); } } diff --git a/frame/contracts/src/wasm/prepare.rs b/frame/contracts/src/wasm/prepare.rs index 2b52d9438904d..ca4a5f2dfd6f9 100644 --- a/frame/contracts/src/wasm/prepare.rs +++ b/frame/contracts/src/wasm/prepare.rs @@ -20,11 +20,11 @@ //! from a module. use crate::{ - Schedule, Config, chain_extension::ChainExtension, - wasm::{PrefabWasmModule, env_def::ImportSatisfyCheck}, + wasm::{env_def::ImportSatisfyCheck, PrefabWasmModule}, + Config, Schedule, }; -use pwasm_utils::parity_wasm::elements::{self, Internal, External, MemoryType, Type, ValueType}; +use pwasm_utils::parity_wasm::elements::{self, External, Internal, MemoryType, Type, ValueType}; use sp_runtime::traits::Hash; use sp_std::prelude::*; @@ -43,10 +43,7 @@ impl<'a, T: Config> ContractModule<'a, T> { /// /// Returns `Err` if the `original_code` couldn't be decoded or /// if it contains an invalid module. - fn new( - original_code: &[u8], - schedule: &'a Schedule, - ) -> Result { + fn new(original_code: &[u8], schedule: &'a Schedule) -> Result { use wasmi_validation::{validate_module, PlainValidator}; let module = @@ -57,10 +54,7 @@ impl<'a, T: Config> ContractModule<'a, T> { // Return a `ContractModule` instance with // __valid__ module. - Ok(ContractModule { - module, - schedule, - }) + Ok(ContractModule { module, schedule }) } /// Ensures that module doesn't declare internal memories. @@ -69,11 +63,8 @@ impl<'a, T: Config> ContractModule<'a, T> { /// Memory section contains declarations of internal linear memories, so if we find one /// we reject such a module. fn ensure_no_internal_memory(&self) -> Result<(), &'static str> { - if self.module - .memory_section() - .map_or(false, |ms| ms.entries().len() > 0) - { - return Err("module declares internal memory"); + if self.module.memory_section().map_or(false, |ms| ms.entries().len() > 0) { + return Err("module declares internal memory") } Ok(()) } @@ -84,7 +75,7 @@ impl<'a, T: Config> ContractModule<'a, T> { // In Wasm MVP spec, there may be at most one table declared. Double check this // explicitly just in case the Wasm version changes. if table_section.entries().len() > 1 { - return Err("multiple tables declared"); + return Err("multiple tables declared") } if let Some(table_type) = table_section.entries().first() { // Check the table's initial size as there is no instruction or environment function @@ -102,7 +93,7 @@ impl<'a, T: Config> ContractModule<'a, T> { let code_section = if let Some(type_section) = self.module.code_section() { type_section } else { - return Ok(()); + return Ok(()) }; for instr in code_section.bodies().iter().flat_map(|body| body.code().elements()) { use self::elements::Instruction::BrTable; @@ -131,7 +122,7 @@ impl<'a, T: Config> ContractModule<'a, T> { match global.global_type().content_type() { ValueType::F32 | ValueType::F64 => return Err("use of floating point type in globals is forbidden"), - _ => {} + _ => {}, } } } @@ -142,7 +133,7 @@ impl<'a, T: Config> ContractModule<'a, T> { match local.value_type() { ValueType::F32 | ValueType::F64 => return Err("use of floating point type in locals is forbidden"), - _ => {} + _ => {}, } } } @@ -156,11 +147,13 @@ impl<'a, T: Config> ContractModule<'a, T> { for value_type in func_type.params().iter().chain(return_type) { match value_type { ValueType::F32 | ValueType::F64 => - return Err("use of floating point type in function types is forbidden"), - _ => {} + return Err( + "use of floating point type in function types is forbidden", + ), + _ => {}, } } - } + }, } } } @@ -173,12 +166,12 @@ impl<'a, T: Config> ContractModule<'a, T> { let type_section = if let Some(type_section) = self.module.type_section() { type_section } else { - return Ok(()); + return Ok(()) }; for Type::Function(func) in type_section.types() { if func.params().len() > limit as usize { - return Err("Use of a function type with too many parameters."); + return Err("Use of a function type with too many parameters.") } } @@ -187,26 +180,18 @@ impl<'a, T: Config> ContractModule<'a, T> { fn inject_gas_metering(self) -> Result { let gas_rules = self.schedule.rules(&self.module); - let contract_module = pwasm_utils::inject_gas_counter( - self.module, - &gas_rules, - "seal0", - ).map_err(|_| "gas instrumentation failed")?; - Ok(ContractModule { - module: contract_module, - schedule: self.schedule, - }) + let contract_module = pwasm_utils::inject_gas_counter(self.module, &gas_rules, "seal0") + .map_err(|_| "gas instrumentation failed")?; + Ok(ContractModule { module: contract_module, schedule: self.schedule }) } fn inject_stack_height_metering(self) -> Result { - let contract_module = - pwasm_utils::stack_height - ::inject_limiter(self.module, self.schedule.limits.stack_height) - .map_err(|_| "stack height instrumentation failed")?; - Ok(ContractModule { - module: contract_module, - schedule: self.schedule, - }) + let contract_module = pwasm_utils::stack_height::inject_limiter( + self.module, + self.schedule.limits.stack_height, + ) + .map_err(|_| "stack height instrumentation failed")?; + Ok(ContractModule { module: contract_module, schedule: self.schedule }) } /// Check that the module has required exported functions. For now @@ -223,14 +208,8 @@ impl<'a, T: Config> ContractModule<'a, T> { let module = &self.module; let types = module.type_section().map(|ts| ts.types()).unwrap_or(&[]); - let export_entries = module - .export_section() - .map(|is| is.entries()) - .unwrap_or(&[]); - let func_entries = module - .function_section() - .map(|fs| fs.entries()) - .unwrap_or(&[]); + let export_entries = module.export_section().map(|is| is.entries()).unwrap_or(&[]); + let func_entries = module.function_section().map(|fs| fs.entries()).unwrap_or(&[]); // Function index space consists of imported function following by // declared functions. Calculate the total number of imported functions so @@ -240,11 +219,9 @@ impl<'a, T: Config> ContractModule<'a, T> { .map(|is| is.entries()) .unwrap_or(&[]) .iter() - .filter(|entry| { - match *entry.external() { - External::Function(_) => true, - _ => false, - } + .filter(|entry| match *entry.external() { + External::Function(_) => true, + _ => false, }) .count(); @@ -267,32 +244,32 @@ impl<'a, T: Config> ContractModule<'a, T> { Some(fn_idx) => fn_idx, None => { // Underflow here means fn_idx points to imported function which we don't allow! - return Err("entry point points to an imported function"); - } + return Err("entry point points to an imported function") + }, }; // Then check the signature. // Both "call" and "deploy" has a () -> () function type. // We still support () -> (i32) for backwards compatibility. - let func_ty_idx = func_entries.get(fn_idx as usize) + let func_ty_idx = func_entries + .get(fn_idx as usize) .ok_or_else(|| "export refers to non-existent function")? .type_ref(); let Type::Function(ref func_ty) = types .get(func_ty_idx as usize) .ok_or_else(|| "function has a non-existent type")?; - if !( - func_ty.params().is_empty() && - (func_ty.results().is_empty() || func_ty.results() == [ValueType::I32]) - ) { - return Err("entry point has wrong signature"); + if !(func_ty.params().is_empty() && + (func_ty.results().is_empty() || func_ty.results() == [ValueType::I32])) + { + return Err("entry point has wrong signature") } } if !deploy_found { - return Err("deploy function isn't exported"); + return Err("deploy function isn't exported") } if !call_found { - return Err("call function isn't exported"); + return Err("call function isn't exported") } Ok(()) @@ -306,16 +283,14 @@ impl<'a, T: Config> ContractModule<'a, T> { /// their signatures. /// - if there is a memory import, returns it's descriptor /// `import_fn_banlist`: list of function names that are disallowed to be imported - fn scan_imports(&self, import_fn_banlist: &[&[u8]]) - -> Result, &'static str> - { + fn scan_imports( + &self, + import_fn_banlist: &[&[u8]], + ) -> Result, &'static str> { let module = &self.module; let types = module.type_section().map(|ts| ts.types()).unwrap_or(&[]); - let import_entries = module - .import_section() - .map(|is| is.entries()) - .unwrap_or(&[]); + let import_entries = module.import_section().map(|is| is.entries()).unwrap_or(&[]); let mut imported_mem_type = None; @@ -326,7 +301,7 @@ impl<'a, T: Config> ContractModule<'a, T> { &External::Function(ref type_idx) => type_idx, &External::Memory(ref memory_type) => { if import.module() != IMPORT_MODULE_MEMORY { - return Err("Invalid module for imported memory"); + return Err("Invalid module for imported memory") } if import.field() != "memory" { return Err("Memory import must have the field name 'memory'") @@ -335,8 +310,8 @@ impl<'a, T: Config> ContractModule<'a, T> { return Err("Multiple memory imports defined") } imported_mem_type = Some(memory_type); - continue; - } + continue + }, }; let Type::Function(ref func_ty) = types @@ -346,29 +321,27 @@ impl<'a, T: Config> ContractModule<'a, T> { if !T::ChainExtension::enabled() && import.field().as_bytes() == b"seal_call_chain_extension" { - return Err("module uses chain extensions but chain extensions are disabled"); + return Err("module uses chain extensions but chain extensions are disabled") } - if import_fn_banlist.iter().any(|f| import.field().as_bytes() == *f) - || !C::can_satisfy( - import.module().as_bytes(), import.field().as_bytes(), func_ty, - ) + if import_fn_banlist.iter().any(|f| import.field().as_bytes() == *f) || + !C::can_satisfy(import.module().as_bytes(), import.field().as_bytes(), func_ty) { - return Err("module imports a non-existent function"); + return Err("module imports a non-existent function") } } Ok(imported_mem_type) } fn into_wasm_code(self) -> Result, &'static str> { - elements::serialize(self.module) - .map_err(|_| "error serializing instrumented module") + elements::serialize(self.module).map_err(|_| "error serializing instrumented module") } } -fn get_memory_limits(module: Option<&MemoryType>, schedule: &Schedule) - -> Result<(u32, u32), &'static str> -{ +fn get_memory_limits( + module: Option<&MemoryType>, + schedule: &Schedule, +) -> Result<(u32, u32), &'static str> { if let Some(memory_type) = module { // Inspect the module to extract the initial and maximum page count. let limits = memory_type.limits(); @@ -376,18 +349,18 @@ fn get_memory_limits(module: Option<&MemoryType>, schedule: &Schedule (initial, Some(maximum)) if initial > maximum => { return Err( "Requested initial number of pages should not exceed the requested maximum", - ); - } + ) + }, (_, Some(maximum)) if maximum > schedule.limits.memory_pages => { - return Err("Maximum number of pages should not exceed the configured maximum."); - } + return Err("Maximum number of pages should not exceed the configured maximum.") + }, (initial, Some(maximum)) => Ok((initial, maximum)), (_, None) => { // Maximum number of pages should be always declared. // This isn't a hard requirement and can be treated as a maximum set // to configured maximum. - return Err("Maximum number of pages should be always declared."); - } + return Err("Maximum number of pages should be always declared.") + }, } } else { // If none memory imported then just crate an empty placeholder. @@ -411,10 +384,8 @@ fn check_and_instrument( // We disallow importing `gas` function here since it is treated as implementation detail. let disallowed_imports = [b"gas".as_ref()]; - let memory_limits = get_memory_limits( - contract_module.scan_imports::(&disallowed_imports)?, - schedule - )?; + let memory_limits = + get_memory_limits(contract_module.scan_imports::(&disallowed_imports)?, schedule)?; let code = contract_module .inject_gas_metering()? @@ -428,10 +399,8 @@ fn do_preparation( original_code: Vec, schedule: &Schedule, ) -> Result, &'static str> { - let (code, (initial, maximum)) = check_and_instrument::( - original_code.as_ref(), - schedule, - )?; + let (code, (initial, maximum)) = + check_and_instrument::(original_code.as_ref(), schedule)?; Ok(PrefabWasmModule { instruction_weights_version: schedule.instruction_weights.version, initial, @@ -483,8 +452,7 @@ pub fn reinstrument_contract( /// in production code. #[cfg(feature = "runtime-benchmarks")] pub mod benchmarking { - use super::*; - use super::elements::FunctionType; + use super::{elements::FunctionType, *}; impl ImportSatisfyCheck for () { fn can_satisfy(_module: &[u8], _name: &[u8], _func_type: &FunctionType) -> bool { @@ -493,9 +461,10 @@ pub mod benchmarking { } /// Prepare function that neither checks nor instruments the passed in code. - pub fn prepare_contract(original_code: Vec, schedule: &Schedule) - -> Result, &'static str> - { + pub fn prepare_contract( + original_code: Vec, + schedule: &Schedule, + ) -> Result, &'static str> { let contract_module = ContractModule::new(&original_code, schedule)?; let memory_limits = get_memory_limits(contract_module.scan_imports::<()>(&[])?, schedule)?; Ok(PrefabWasmModule { @@ -566,7 +535,8 @@ mod tests { }; } - prepare_test!(no_floats, + prepare_test!( + no_floats, r#" (module (func (export "call") @@ -585,7 +555,8 @@ mod tests { mod functions { use super::*; - prepare_test!(param_number_valid, + prepare_test!( + param_number_valid, r#" (module (func (export "call")) @@ -596,7 +567,8 @@ mod tests { Ok(_) ); - prepare_test!(param_number_invalid, + prepare_test!( + param_number_invalid, r#" (module (func (export "call")) @@ -612,7 +584,8 @@ mod tests { mod globals { use super::*; - prepare_test!(global_number_valid, + prepare_test!( + global_number_valid, r#" (module (global i64 (i64.const 0)) @@ -625,7 +598,8 @@ mod tests { Ok(_) ); - prepare_test!(global_number_too_high, + prepare_test!( + global_number_too_high, r#" (module (global i64 (i64.const 0)) @@ -643,7 +617,8 @@ mod tests { mod memories { use super::*; - prepare_test!(memory_with_one_page, + prepare_test!( + memory_with_one_page, r#" (module (import "env" "memory" (memory 1 1)) @@ -655,7 +630,8 @@ mod tests { Ok(_) ); - prepare_test!(internal_memory_declaration, + prepare_test!( + internal_memory_declaration, r#" (module (memory 1 1) @@ -667,7 +643,8 @@ mod tests { Err("module declares internal memory") ); - prepare_test!(no_memory_import, + prepare_test!( + no_memory_import, r#" (module ;; no memory imported @@ -678,7 +655,8 @@ mod tests { Ok(_) ); - prepare_test!(initial_exceeds_maximum, + prepare_test!( + initial_exceeds_maximum, r#" (module (import "env" "memory" (memory 16 1)) @@ -690,7 +668,8 @@ mod tests { Err("Module is not valid") ); - prepare_test!(no_maximum, + prepare_test!( + no_maximum, r#" (module (import "env" "memory" (memory 1)) @@ -702,7 +681,8 @@ mod tests { Err("Maximum number of pages should be always declared.") ); - prepare_test!(requested_maximum_valid, + prepare_test!( + requested_maximum_valid, r#" (module (import "env" "memory" (memory 1 16)) @@ -714,7 +694,8 @@ mod tests { Ok(_) ); - prepare_test!(requested_maximum_exceeds_configured_maximum, + prepare_test!( + requested_maximum_exceeds_configured_maximum, r#" (module (import "env" "memory" (memory 1 17)) @@ -726,7 +707,8 @@ mod tests { Err("Maximum number of pages should not exceed the configured maximum.") ); - prepare_test!(field_name_not_memory, + prepare_test!( + field_name_not_memory, r#" (module (import "env" "forgetit" (memory 1 1)) @@ -738,7 +720,8 @@ mod tests { Err("Memory import must have the field name 'memory'") ); - prepare_test!(multiple_memory_imports, + prepare_test!( + multiple_memory_imports, r#" (module (import "env" "memory" (memory 1 1)) @@ -751,7 +734,8 @@ mod tests { Err("Module is not valid") ); - prepare_test!(table_import, + prepare_test!( + table_import, r#" (module (import "seal0" "table" (table 1 anyfunc)) @@ -763,7 +747,8 @@ mod tests { Err("Cannot import tables") ); - prepare_test!(global_import, + prepare_test!( + global_import, r#" (module (global $g (import "seal0" "global") i32) @@ -778,7 +763,8 @@ mod tests { mod tables { use super::*; - prepare_test!(no_tables, + prepare_test!( + no_tables, r#" (module (func (export "call")) @@ -788,7 +774,8 @@ mod tests { Ok(_) ); - prepare_test!(table_valid_size, + prepare_test!( + table_valid_size, r#" (module (table 3 funcref) @@ -800,7 +787,8 @@ mod tests { Ok(_) ); - prepare_test!(table_too_big, + prepare_test!( + table_too_big, r#" (module (table 4 funcref) @@ -811,7 +799,8 @@ mod tests { Err("table exceeds maximum size allowed") ); - prepare_test!(br_table_valid_size, + prepare_test!( + br_table_valid_size, r#" (module (func (export "call")) @@ -825,7 +814,8 @@ mod tests { Ok(_) ); - prepare_test!(br_table_too_big, + prepare_test!( + br_table_too_big, r#" (module (func (export "call")) @@ -842,7 +832,8 @@ mod tests { mod imports { use super::*; - prepare_test!(can_import_legit_function, + prepare_test!( + can_import_legit_function, r#" (module (import "seal0" "nop" (func (param i64))) @@ -856,7 +847,8 @@ mod tests { // even though gas is defined the contract can't import it since // it is an implementation defined. - prepare_test!(can_not_import_gas_function, + prepare_test!( + can_not_import_gas_function, r#" (module (import "seal0" "gas" (func (param i32))) @@ -869,7 +861,8 @@ mod tests { ); // memory is in "env" and not in "seal0" - prepare_test!(memory_not_in_seal0, + prepare_test!( + memory_not_in_seal0, r#" (module (import "seal0" "memory" (memory 1 1)) @@ -882,7 +875,8 @@ mod tests { ); // memory is in "env" and not in some arbitrary module - prepare_test!(memory_not_in_arbitrary_module, + prepare_test!( + memory_not_in_arbitrary_module, r#" (module (import "any_module" "memory" (memory 1 1)) @@ -894,7 +888,8 @@ mod tests { Err("Invalid module for imported memory") ); - prepare_test!(function_in_other_module_works, + prepare_test!( + function_in_other_module_works, r#" (module (import "seal1" "nop" (func (param i32))) @@ -907,7 +902,8 @@ mod tests { ); // wrong signature - prepare_test!(wrong_signature, + prepare_test!( + wrong_signature, r#" (module (import "seal0" "gas" (func (param i64))) @@ -919,7 +915,8 @@ mod tests { Err("module imports a non-existent function") ); - prepare_test!(unknown_func_name, + prepare_test!( + unknown_func_name, r#" (module (import "seal0" "unknown_func" (func)) @@ -935,7 +932,8 @@ mod tests { mod entrypoints { use super::*; - prepare_test!(it_works, + prepare_test!( + it_works, r#" (module (func (export "call")) @@ -945,7 +943,8 @@ mod tests { Ok(_) ); - prepare_test!(omit_deploy, + prepare_test!( + omit_deploy, r#" (module (func (export "call")) @@ -954,7 +953,8 @@ mod tests { Err("deploy function isn't exported") ); - prepare_test!(omit_call, + prepare_test!( + omit_call, r#" (module (func (export "deploy")) @@ -964,7 +964,8 @@ mod tests { ); // Try to use imported function as an entry point. - prepare_test!(try_sneak_export_as_entrypoint, + prepare_test!( + try_sneak_export_as_entrypoint, r#" (module (import "seal0" "panic" (func)) @@ -978,7 +979,8 @@ mod tests { ); // Try to use imported function as an entry point. - prepare_test!(try_sneak_export_as_global, + prepare_test!( + try_sneak_export_as_global, r#" (module (func (export "deploy")) @@ -988,7 +990,8 @@ mod tests { Err("expected a function") ); - prepare_test!(wrong_signature, + prepare_test!( + wrong_signature, r#" (module (func (export "deploy")) @@ -998,7 +1001,8 @@ mod tests { Err("entry point has wrong signature") ); - prepare_test!(unknown_exports, + prepare_test!( + unknown_exports, r#" (module (func (export "call")) @@ -1009,7 +1013,8 @@ mod tests { Err("unknown export: expecting only deploy and call functions") ); - prepare_test!(global_float, + prepare_test!( + global_float, r#" (module (global $x f32 (f32.const 0)) @@ -1020,7 +1025,8 @@ mod tests { Err("use of floating point type in globals is forbidden") ); - prepare_test!(local_float, + prepare_test!( + local_float, r#" (module (func $foo (local f32)) @@ -1031,7 +1037,8 @@ mod tests { Err("use of floating point type in locals is forbidden") ); - prepare_test!(param_float, + prepare_test!( + param_float, r#" (module (func $foo (param f32)) @@ -1042,7 +1049,8 @@ mod tests { Err("use of floating point type in function types is forbidden") ); - prepare_test!(result_float, + prepare_test!( + result_float, r#" (module (func $foo (result f32) (f32.const 0)) diff --git a/frame/contracts/src/wasm/runtime.rs b/frame/contracts/src/wasm/runtime.rs index 7b6004a84f06b..c04f25766dc71 100644 --- a/frame/contracts/src/wasm/runtime.rs +++ b/frame/contracts/src/wasm/runtime.rs @@ -18,25 +18,20 @@ //! Environment definition of the wasm smart-contract runtime. use crate::{ - Config, CodeHash, BalanceOf, Error, - exec::{Ext, StorageKey, TopicOf, ExecResult, ExecError}, - gas::{Token, ChargedAmount}, - wasm::env_def::ConvertibleToWasm, + exec::{ExecError, ExecResult, Ext, StorageKey, TopicOf}, + gas::{ChargedAmount, Token}, schedule::HostFnWeights, + wasm::env_def::ConvertibleToWasm, + BalanceOf, CodeHash, Config, Error, }; use bitflags::bitflags; -use pwasm_utils::parity_wasm::elements::ValueType; -use frame_support::{dispatch::DispatchError, ensure, weights::Weight}; -use sp_std::prelude::*; use codec::{Decode, DecodeAll, Encode, MaxEncodedLen}; -use sp_core::{Bytes, crypto::UncheckedFrom}; -use sp_io::hashing::{ - keccak_256, - blake2_256, - blake2_128, - sha2_256, -}; +use frame_support::{dispatch::DispatchError, ensure, weights::Weight}; use pallet_contracts_primitives::{ExecReturnValue, ReturnFlags}; +use pwasm_utils::parity_wasm::elements::ValueType; +use sp_core::{crypto::UncheckedFrom, Bytes}; +use sp_io::hashing::{blake2_128, blake2_256, keccak_256, sha2_256}; +use sp_std::prelude::*; /// Every error that can be returned to a contract when it calls any of the host functions. /// @@ -178,7 +173,7 @@ pub enum RuntimeCosts { /// Weight of calling `seal_random`. It includes the weight for copying the subject. Random, /// Weight of calling `seal_deposit_event` with the given number of topics and event size. - DepositEvent{num_topic: u32, len: u32}, + DepositEvent { num_topic: u32, len: u32 }, /// Weight of calling `seal_debug_message`. #[cfg(feature = "unstable-interface")] DebugMessage, @@ -203,7 +198,7 @@ pub enum RuntimeCosts { /// Weight of calling `seal_instantiate` for the given input and salt without output weight. /// This includes the transfer as an instantiate without a value will always be below /// the existential deposit and is disregarded as corner case. - InstantiateBase{input_data_len: u32, salt_len: u32}, + InstantiateBase { input_data_len: u32, salt_len: u32 }, /// Weight of output received through `seal_instantiate` for the given size. InstantiateCopyOut(u32), /// Weight of calling `seal_hash_sha_256` for the given input size. @@ -228,7 +223,7 @@ impl RuntimeCosts { fn token(&self, s: &HostFnWeights) -> RuntimeToken where T: Config, - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { use self::RuntimeCosts::*; let weight = match *self { @@ -246,40 +241,44 @@ impl RuntimeCosts { WeightToFee => s.weight_to_fee, InputBase => s.input, InputCopyOut(len) => s.input_per_byte.saturating_mul(len.into()), - Return(len) => s.r#return - .saturating_add(s.return_per_byte.saturating_mul(len.into())), + Return(len) => s.r#return.saturating_add(s.return_per_byte.saturating_mul(len.into())), Terminate => s.terminate, - RestoreTo(delta) => s.restore_to - .saturating_add(s.restore_to_per_delta.saturating_mul(delta.into())), + RestoreTo(delta) => + s.restore_to.saturating_add(s.restore_to_per_delta.saturating_mul(delta.into())), Random => s.random, - DepositEvent{num_topic, len} => s.deposit_event + DepositEvent { num_topic, len } => s + .deposit_event .saturating_add(s.deposit_event_per_topic.saturating_mul(num_topic.into())) .saturating_add(s.deposit_event_per_byte.saturating_mul(len.into())), #[cfg(feature = "unstable-interface")] DebugMessage => s.debug_message, SetRentAllowance => s.set_rent_allowance, - SetStorage(len) => s.set_storage - .saturating_add(s.set_storage_per_byte.saturating_mul(len.into())), + SetStorage(len) => + s.set_storage.saturating_add(s.set_storage_per_byte.saturating_mul(len.into())), ClearStorage => s.clear_storage, GetStorageBase => s.get_storage, GetStorageCopyOut(len) => s.get_storage_per_byte.saturating_mul(len.into()), Transfer => s.transfer, - CallBase(len) => s.call - .saturating_add(s.call_per_input_byte.saturating_mul(len.into())), + CallBase(len) => + s.call.saturating_add(s.call_per_input_byte.saturating_mul(len.into())), CallSurchargeTransfer => s.call_transfer_surcharge, CallCopyOut(len) => s.call_per_output_byte.saturating_mul(len.into()), - InstantiateBase{input_data_len, salt_len} => s.instantiate + InstantiateBase { input_data_len, salt_len } => s + .instantiate .saturating_add(s.instantiate_per_input_byte.saturating_mul(input_data_len.into())) .saturating_add(s.instantiate_per_salt_byte.saturating_mul(salt_len.into())), - InstantiateCopyOut(len) => s.instantiate_per_output_byte - .saturating_mul(len.into()), - HashSha256(len) => s.hash_sha2_256 + InstantiateCopyOut(len) => s.instantiate_per_output_byte.saturating_mul(len.into()), + HashSha256(len) => s + .hash_sha2_256 .saturating_add(s.hash_sha2_256_per_byte.saturating_mul(len.into())), - HashKeccak256(len) => s.hash_keccak_256 + HashKeccak256(len) => s + .hash_keccak_256 .saturating_add(s.hash_keccak_256_per_byte.saturating_mul(len.into())), - HashBlake256(len) => s.hash_blake2_256 + HashBlake256(len) => s + .hash_blake2_256 .saturating_add(s.hash_blake2_256_per_byte.saturating_mul(len.into())), - HashBlake128(len) => s.hash_blake2_128 + HashBlake128(len) => s + .hash_blake2_128 .saturating_add(s.hash_blake2_128_per_byte.saturating_mul(len.into())), ChainExtension(amount) => amount, #[cfg(feature = "unstable-interface")] @@ -306,7 +305,7 @@ struct RuntimeToken { impl Token for RuntimeToken where T: Config, - T::AccountId: UncheckedFrom + AsRef<[u8]> + T::AccountId: UncheckedFrom + AsRef<[u8]>, { fn weight(&self) -> Weight { self.weight @@ -373,19 +372,10 @@ impl<'a, E> Runtime<'a, E> where E: Ext + 'a, ::AccountId: - UncheckedFrom<::Hash> + AsRef<[u8]> + UncheckedFrom<::Hash> + AsRef<[u8]>, { - pub fn new( - ext: &'a mut E, - input_data: Vec, - memory: sp_sandbox::Memory, - ) -> Self { - Runtime { - ext, - input_data: Some(input_data), - memory, - trap_reason: None, - } + pub fn new(ext: &'a mut E, input_data: Vec, memory: sp_sandbox::Memory) -> Self { + Runtime { ext, input_data: Some(input_data), memory, trap_reason: None } } /// Converts the sandbox result and the runtime state into the execution outcome. @@ -401,27 +391,15 @@ where if let Some(trap_reason) = self.trap_reason { return match trap_reason { // The trap was the result of the execution `return` host function. - TrapReason::Return(ReturnData{ flags, data }) => { - let flags = ReturnFlags::from_bits(flags).ok_or_else(|| - "used reserved bit in return flags" - )?; - Ok(ExecReturnValue { - flags, - data: Bytes(data), - }) - }, - TrapReason::Termination => { - Ok(ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes(Vec::new()), - }) - }, - TrapReason::Restoration => { - Ok(ExecReturnValue { - flags: ReturnFlags::empty(), - data: Bytes(Vec::new()), - }) + TrapReason::Return(ReturnData { flags, data }) => { + let flags = ReturnFlags::from_bits(flags) + .ok_or_else(|| "used reserved bit in return flags")?; + Ok(ExecReturnValue { flags, data: Bytes(data) }) }, + TrapReason::Termination => + Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(Vec::new()) }), + TrapReason::Restoration => + Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(Vec::new()) }), TrapReason::SupervisorError(error) => Err(error)?, } } @@ -429,9 +407,7 @@ where // Check the exact type of the error. match sandbox_result { // No traps were generated. Proceed normally. - Ok(_) => { - Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(Vec::new()) }) - } + Ok(_) => Ok(ExecReturnValue { flags: ReturnFlags::empty(), data: Bytes(Vec::new()) }), // `Error::Module` is returned only if instantiation or linking failed (i.e. // wasm binary tried to import a function that is not provided by the host). // This shouldn't happen because validation process ought to reject such binaries. @@ -441,7 +417,7 @@ where Err(sp_sandbox::Error::Module) => Err("validation error")?, // Any other kind of a trap should result in a failure. Err(sp_sandbox::Error::Execution) | Err(sp_sandbox::Error::OutOfBounds) => - Err(Error::::ContractTrapped)? + Err(Error::::ContractTrapped)?, } } @@ -484,12 +460,11 @@ where /// Returns `Err` if one of the following conditions occurs: /// /// - requested buffer is not within the bounds of the sandbox memory. - pub fn read_sandbox_memory(&self, ptr: u32, len: u32) - -> Result, DispatchError> - { + pub fn read_sandbox_memory(&self, ptr: u32, len: u32) -> Result, DispatchError> { ensure!(len <= self.ext.schedule().limits.max_memory_size(), Error::::OutOfBounds); let mut buf = vec![0u8; len as usize]; - self.memory.get(ptr, buf.as_mut_slice()) + self.memory + .get(ptr, buf.as_mut_slice()) .map_err(|_| Error::::OutOfBounds)?; Ok(buf) } @@ -499,9 +474,11 @@ where /// Returns `Err` if one of the following conditions occurs: /// /// - requested buffer is not within the bounds of the sandbox memory. - pub fn read_sandbox_memory_into_buf(&self, ptr: u32, buf: &mut [u8]) - -> Result<(), DispatchError> - { + pub fn read_sandbox_memory_into_buf( + &self, + ptr: u32, + buf: &mut [u8], + ) -> Result<(), DispatchError> { self.memory.get(ptr, buf).map_err(|_| Error::::OutOfBounds.into()) } @@ -511,9 +488,10 @@ where /// /// The weight of reading a fixed value is included in the overall weight of any /// contract callable function. - pub fn read_sandbox_memory_as(&self, ptr: u32) - -> Result - { + pub fn read_sandbox_memory_as( + &self, + ptr: u32, + ) -> Result { let buf = self.read_sandbox_memory(ptr, D::max_encoded_len() as u32)?; let decoded = D::decode_all(&mut &buf[..]) .map_err(|_| DispatchError::from(Error::::DecodingFailed))?; @@ -531,9 +509,11 @@ where /// /// There must be an extra benchmark for determining the influence of `len` with /// regard to the overall weight. - pub fn read_sandbox_memory_as_unbounded(&self, ptr: u32, len: u32) - -> Result - { + pub fn read_sandbox_memory_as_unbounded( + &self, + ptr: u32, + len: u32, + ) -> Result { let buf = self.read_sandbox_memory(ptr, len)?; let decoded = D::decode_all(&mut &buf[..]) .map_err(|_| DispatchError::from(Error::::DecodingFailed))?; @@ -566,10 +546,9 @@ where buf: &[u8], allow_skip: bool, create_token: impl FnOnce(u32) -> Option, - ) -> Result<(), DispatchError> - { + ) -> Result<(), DispatchError> { if allow_skip && out_ptr == u32::MAX { - return Ok(()); + return Ok(()) } let buf_len = buf.len() as u32; @@ -583,10 +562,10 @@ where self.charge_gas(costs)?; } - self.memory.set(out_ptr, buf).and_then(|_| { - self.memory.set(out_len_ptr, &buf_len.encode()) - }) - .map_err(|_| Error::::OutOfBounds)?; + self.memory + .set(out_ptr, buf) + .and_then(|_| self.memory.set(out_len_ptr, &buf_len.encode())) + .map_err(|_| Error::::OutOfBounds)?; Ok(()) } @@ -650,7 +629,7 @@ where x if x == not_funded => Ok(NewContractNotFunded), x if x == no_code => Ok(CodeNotFound), x if (x == not_found || x == is_tombstone || x == rent_not_paid) => Ok(NotCallable), - err => Err(err) + err => Err(err), } } @@ -665,7 +644,7 @@ where match (error, origin) { (_, Callee) => Ok(ReturnCode::CalleeTrapped), - (err, _) => Self::err_into_return_code(err) + (err, _) => Self::err_into_return_code(err), } } @@ -678,9 +657,8 @@ where input_data_ptr: u32, input_data_len: u32, output_ptr: u32, - output_len_ptr: u32 - ) -> Result - { + output_len_ptr: u32, + ) -> Result { self.charge_gas(RuntimeCosts::CallBase(input_data_len))?; let callee: <::T as frame_system::Config>::AccountId = self.read_sandbox_memory_as(callee_ptr)?; @@ -696,9 +674,8 @@ where self.charge_gas(RuntimeCosts::CallSurchargeTransfer)?; } let ext = &mut self.ext; - let call_outcome = ext.call( - gas, callee, value, input_data, flags.contains(CallFlags::ALLOW_REENTRY), - ); + let call_outcome = + ext.call(gas, callee, value, input_data, flags.contains(CallFlags::ALLOW_REENTRY)); // `TAIL_CALL` only matters on an `OK` result. Otherwise the call stack comes to // a halt anyways without anymore code being executed. @@ -707,7 +684,7 @@ where return Err(TrapReason::Return(ReturnData { flags: return_value.flags.bits(), data: return_value.data.0, - })); + })) } } @@ -731,10 +708,9 @@ where output_ptr: u32, output_len_ptr: u32, salt_ptr: u32, - salt_len: u32 - ) -> Result - { - self.charge_gas(RuntimeCosts::InstantiateBase {input_data_len, salt_len})?; + salt_len: u32, + ) -> Result { + self.charge_gas(RuntimeCosts::InstantiateBase { input_data_len, salt_len })?; let code_hash: CodeHash<::T> = self.read_sandbox_memory_as(code_hash_ptr)?; let value: BalanceOf<::T> = self.read_sandbox_memory_as(value_ptr)?; let input_data = self.read_sandbox_memory(input_data_ptr, input_data_len)?; @@ -743,7 +719,11 @@ where if let Ok((address, output)) = &instantiate_outcome { if !output.flags.contains(ReturnFlags::REVERT) { self.write_sandbox_output( - address_ptr, address_len_ptr, &address.encode(), true, already_charged, + address_ptr, + address_len_ptr, + &address.encode(), + true, + already_charged, )?; } self.write_sandbox_output(output_ptr, output_len_ptr, &output.data, true, |len| { @@ -767,13 +747,12 @@ where code_hash_ptr: u32, rent_allowance_ptr: u32, delta_ptr: u32, - delta_count: u32 + delta_count: u32, ) -> Result<(), TrapReason> { self.charge_gas(RuntimeCosts::RestoreTo(delta_count))?; let dest: <::T as frame_system::Config>::AccountId = self.read_sandbox_memory_as(dest_ptr)?; - let code_hash: CodeHash<::T> = - self.read_sandbox_memory_as(code_hash_ptr)?; + let code_hash: CodeHash<::T> = self.read_sandbox_memory_as(code_hash_ptr)?; let rent_allowance: BalanceOf<::T> = self.read_sandbox_memory_as(rent_allowance_ptr)?; let delta = { diff --git a/frame/contracts/src/weights.rs b/frame/contracts/src/weights.rs index 503d952b110ed..294f06df5b9c4 100644 --- a/frame/contracts/src/weights.rs +++ b/frame/contracts/src/weights.rs @@ -35,128 +35,129 @@ // --output=./frame/contracts/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_contracts. pub trait WeightInfo { fn on_initialize() -> Weight; - fn on_initialize_per_trie_key(k: u32, ) -> Weight; - fn on_initialize_per_queue_item(q: u32, ) -> Weight; - fn instrument(c: u32, ) -> Weight; - fn code_load(c: u32, ) -> Weight; - fn code_refcount(c: u32, ) -> Weight; - fn instantiate_with_code(c: u32, s: u32, ) -> Weight; - fn instantiate(s: u32, ) -> Weight; + fn on_initialize_per_trie_key(k: u32) -> Weight; + fn on_initialize_per_queue_item(q: u32) -> Weight; + fn instrument(c: u32) -> Weight; + fn code_load(c: u32) -> Weight; + fn code_refcount(c: u32) -> Weight; + fn instantiate_with_code(c: u32, s: u32) -> Weight; + fn instantiate(s: u32) -> Weight; fn call() -> Weight; - fn claim_surcharge(c: u32, ) -> Weight; - fn seal_caller(r: u32, ) -> Weight; - fn seal_address(r: u32, ) -> Weight; - fn seal_gas_left(r: u32, ) -> Weight; - fn seal_balance(r: u32, ) -> Weight; - fn seal_value_transferred(r: u32, ) -> Weight; - fn seal_minimum_balance(r: u32, ) -> Weight; - fn seal_tombstone_deposit(r: u32, ) -> Weight; - fn seal_rent_allowance(r: u32, ) -> Weight; - fn seal_block_number(r: u32, ) -> Weight; - fn seal_now(r: u32, ) -> Weight; - fn seal_weight_to_fee(r: u32, ) -> Weight; - fn seal_gas(r: u32, ) -> Weight; - fn seal_input(r: u32, ) -> Weight; - fn seal_input_per_kb(n: u32, ) -> Weight; - fn seal_return(r: u32, ) -> Weight; - fn seal_return_per_kb(n: u32, ) -> Weight; - fn seal_terminate(r: u32, ) -> Weight; - fn seal_restore_to(r: u32, ) -> Weight; - fn seal_restore_to_per_delta(d: u32, ) -> Weight; - fn seal_random(r: u32, ) -> Weight; - fn seal_deposit_event(r: u32, ) -> Weight; - fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32, ) -> Weight; - fn seal_set_rent_allowance(r: u32, ) -> Weight; - fn seal_debug_message(r: u32, ) -> Weight; - fn seal_set_storage(r: u32, ) -> Weight; - fn seal_set_storage_per_kb(n: u32, ) -> Weight; - fn seal_clear_storage(r: u32, ) -> Weight; - fn seal_get_storage(r: u32, ) -> Weight; - fn seal_get_storage_per_kb(n: u32, ) -> Weight; - fn seal_transfer(r: u32, ) -> Weight; - fn seal_call(r: u32, ) -> Weight; - fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32, ) -> Weight; - fn seal_instantiate(r: u32, ) -> Weight; - fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32, ) -> Weight; - fn seal_hash_sha2_256(r: u32, ) -> Weight; - fn seal_hash_sha2_256_per_kb(n: u32, ) -> Weight; - fn seal_hash_keccak_256(r: u32, ) -> Weight; - fn seal_hash_keccak_256_per_kb(n: u32, ) -> Weight; - fn seal_hash_blake2_256(r: u32, ) -> Weight; - fn seal_hash_blake2_256_per_kb(n: u32, ) -> Weight; - fn seal_hash_blake2_128(r: u32, ) -> Weight; - fn seal_hash_blake2_128_per_kb(n: u32, ) -> Weight; - fn instr_i64const(r: u32, ) -> Weight; - fn instr_i64load(r: u32, ) -> Weight; - fn instr_i64store(r: u32, ) -> Weight; - fn instr_select(r: u32, ) -> Weight; - fn instr_if(r: u32, ) -> Weight; - fn instr_br(r: u32, ) -> Weight; - fn instr_br_if(r: u32, ) -> Weight; - fn instr_br_table(r: u32, ) -> Weight; - fn instr_br_table_per_entry(e: u32, ) -> Weight; - fn instr_call(r: u32, ) -> Weight; - fn instr_call_indirect(r: u32, ) -> Weight; - fn instr_call_indirect_per_param(p: u32, ) -> Weight; - fn instr_local_get(r: u32, ) -> Weight; - fn instr_local_set(r: u32, ) -> Weight; - fn instr_local_tee(r: u32, ) -> Weight; - fn instr_global_get(r: u32, ) -> Weight; - fn instr_global_set(r: u32, ) -> Weight; - fn instr_memory_current(r: u32, ) -> Weight; - fn instr_memory_grow(r: u32, ) -> Weight; - fn instr_i64clz(r: u32, ) -> Weight; - fn instr_i64ctz(r: u32, ) -> Weight; - fn instr_i64popcnt(r: u32, ) -> Weight; - fn instr_i64eqz(r: u32, ) -> Weight; - fn instr_i64extendsi32(r: u32, ) -> Weight; - fn instr_i64extendui32(r: u32, ) -> Weight; - fn instr_i32wrapi64(r: u32, ) -> Weight; - fn instr_i64eq(r: u32, ) -> Weight; - fn instr_i64ne(r: u32, ) -> Weight; - fn instr_i64lts(r: u32, ) -> Weight; - fn instr_i64ltu(r: u32, ) -> Weight; - fn instr_i64gts(r: u32, ) -> Weight; - fn instr_i64gtu(r: u32, ) -> Weight; - fn instr_i64les(r: u32, ) -> Weight; - fn instr_i64leu(r: u32, ) -> Weight; - fn instr_i64ges(r: u32, ) -> Weight; - fn instr_i64geu(r: u32, ) -> Weight; - fn instr_i64add(r: u32, ) -> Weight; - fn instr_i64sub(r: u32, ) -> Weight; - fn instr_i64mul(r: u32, ) -> Weight; - fn instr_i64divs(r: u32, ) -> Weight; - fn instr_i64divu(r: u32, ) -> Weight; - fn instr_i64rems(r: u32, ) -> Weight; - fn instr_i64remu(r: u32, ) -> Weight; - fn instr_i64and(r: u32, ) -> Weight; - fn instr_i64or(r: u32, ) -> Weight; - fn instr_i64xor(r: u32, ) -> Weight; - fn instr_i64shl(r: u32, ) -> Weight; - fn instr_i64shrs(r: u32, ) -> Weight; - fn instr_i64shru(r: u32, ) -> Weight; - fn instr_i64rotl(r: u32, ) -> Weight; - fn instr_i64rotr(r: u32, ) -> Weight; + fn claim_surcharge(c: u32) -> Weight; + fn seal_caller(r: u32) -> Weight; + fn seal_address(r: u32) -> Weight; + fn seal_gas_left(r: u32) -> Weight; + fn seal_balance(r: u32) -> Weight; + fn seal_value_transferred(r: u32) -> Weight; + fn seal_minimum_balance(r: u32) -> Weight; + fn seal_tombstone_deposit(r: u32) -> Weight; + fn seal_rent_allowance(r: u32) -> Weight; + fn seal_block_number(r: u32) -> Weight; + fn seal_now(r: u32) -> Weight; + fn seal_weight_to_fee(r: u32) -> Weight; + fn seal_gas(r: u32) -> Weight; + fn seal_input(r: u32) -> Weight; + fn seal_input_per_kb(n: u32) -> Weight; + fn seal_return(r: u32) -> Weight; + fn seal_return_per_kb(n: u32) -> Weight; + fn seal_terminate(r: u32) -> Weight; + fn seal_restore_to(r: u32) -> Weight; + fn seal_restore_to_per_delta(d: u32) -> Weight; + fn seal_random(r: u32) -> Weight; + fn seal_deposit_event(r: u32) -> Weight; + fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32) -> Weight; + fn seal_set_rent_allowance(r: u32) -> Weight; + fn seal_debug_message(r: u32) -> Weight; + fn seal_set_storage(r: u32) -> Weight; + fn seal_set_storage_per_kb(n: u32) -> Weight; + fn seal_clear_storage(r: u32) -> Weight; + fn seal_get_storage(r: u32) -> Weight; + fn seal_get_storage_per_kb(n: u32) -> Weight; + fn seal_transfer(r: u32) -> Weight; + fn seal_call(r: u32) -> Weight; + fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32) -> Weight; + fn seal_instantiate(r: u32) -> Weight; + fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32) -> Weight; + fn seal_hash_sha2_256(r: u32) -> Weight; + fn seal_hash_sha2_256_per_kb(n: u32) -> Weight; + fn seal_hash_keccak_256(r: u32) -> Weight; + fn seal_hash_keccak_256_per_kb(n: u32) -> Weight; + fn seal_hash_blake2_256(r: u32) -> Weight; + fn seal_hash_blake2_256_per_kb(n: u32) -> Weight; + fn seal_hash_blake2_128(r: u32) -> Weight; + fn seal_hash_blake2_128_per_kb(n: u32) -> Weight; + fn instr_i64const(r: u32) -> Weight; + fn instr_i64load(r: u32) -> Weight; + fn instr_i64store(r: u32) -> Weight; + fn instr_select(r: u32) -> Weight; + fn instr_if(r: u32) -> Weight; + fn instr_br(r: u32) -> Weight; + fn instr_br_if(r: u32) -> Weight; + fn instr_br_table(r: u32) -> Weight; + fn instr_br_table_per_entry(e: u32) -> Weight; + fn instr_call(r: u32) -> Weight; + fn instr_call_indirect(r: u32) -> Weight; + fn instr_call_indirect_per_param(p: u32) -> Weight; + fn instr_local_get(r: u32) -> Weight; + fn instr_local_set(r: u32) -> Weight; + fn instr_local_tee(r: u32) -> Weight; + fn instr_global_get(r: u32) -> Weight; + fn instr_global_set(r: u32) -> Weight; + fn instr_memory_current(r: u32) -> Weight; + fn instr_memory_grow(r: u32) -> Weight; + fn instr_i64clz(r: u32) -> Weight; + fn instr_i64ctz(r: u32) -> Weight; + fn instr_i64popcnt(r: u32) -> Weight; + fn instr_i64eqz(r: u32) -> Weight; + fn instr_i64extendsi32(r: u32) -> Weight; + fn instr_i64extendui32(r: u32) -> Weight; + fn instr_i32wrapi64(r: u32) -> Weight; + fn instr_i64eq(r: u32) -> Weight; + fn instr_i64ne(r: u32) -> Weight; + fn instr_i64lts(r: u32) -> Weight; + fn instr_i64ltu(r: u32) -> Weight; + fn instr_i64gts(r: u32) -> Weight; + fn instr_i64gtu(r: u32) -> Weight; + fn instr_i64les(r: u32) -> Weight; + fn instr_i64leu(r: u32) -> Weight; + fn instr_i64ges(r: u32) -> Weight; + fn instr_i64geu(r: u32) -> Weight; + fn instr_i64add(r: u32) -> Weight; + fn instr_i64sub(r: u32) -> Weight; + fn instr_i64mul(r: u32) -> Weight; + fn instr_i64divs(r: u32) -> Weight; + fn instr_i64divu(r: u32) -> Weight; + fn instr_i64rems(r: u32) -> Weight; + fn instr_i64remu(r: u32) -> Weight; + fn instr_i64and(r: u32) -> Weight; + fn instr_i64or(r: u32) -> Weight; + fn instr_i64xor(r: u32) -> Weight; + fn instr_i64shl(r: u32) -> Weight; + fn instr_i64shrs(r: u32) -> Weight; + fn instr_i64shru(r: u32) -> Weight; + fn instr_i64rotl(r: u32) -> Weight; + fn instr_i64rotr(r: u32) -> Weight; } /// Weights for pallet_contracts using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { fn on_initialize() -> Weight { - (4_636_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) + (4_636_000 as Weight).saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn on_initialize_per_trie_key(k: u32, ) -> Weight { + fn on_initialize_per_trie_key(k: u32) -> Weight { (0 as Weight) // Standard Error: 3_000 .saturating_add((2_851_000 as Weight).saturating_mul(k as Weight)) @@ -164,34 +165,34 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) } - fn on_initialize_per_queue_item(q: u32, ) -> Weight { + fn on_initialize_per_queue_item(q: u32) -> Weight { (0 as Weight) // Standard Error: 11_000 .saturating_add((38_093_000 as Weight).saturating_mul(q as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn instrument(c: u32, ) -> Weight { + fn instrument(c: u32) -> Weight { (60_027_000 as Weight) // Standard Error: 109_000 .saturating_add((169_008_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn code_load(c: u32, ) -> Weight { + fn code_load(c: u32) -> Weight { (7_881_000 as Weight) // Standard Error: 0 .saturating_add((2_007_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn code_refcount(c: u32, ) -> Weight { + fn code_refcount(c: u32) -> Weight { (12_861_000 as Weight) // Standard Error: 0 .saturating_add((3_028_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn instantiate_with_code(c: u32, s: u32, ) -> Weight { + fn instantiate_with_code(c: u32, s: u32) -> Weight { (189_624_000 as Weight) // Standard Error: 120_000 .saturating_add((244_984_000 as Weight).saturating_mul(c as Weight)) @@ -200,7 +201,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } - fn instantiate(s: u32, ) -> Weight { + fn instantiate(s: u32) -> Weight { (224_867_000 as Weight) // Standard Error: 0 .saturating_add((1_476_000 as Weight).saturating_mul(s as Weight)) @@ -212,126 +213,126 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn claim_surcharge(c: u32, ) -> Weight { + fn claim_surcharge(c: u32) -> Weight { (147_775_000 as Weight) // Standard Error: 5_000 .saturating_add((3_094_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } - fn seal_caller(r: u32, ) -> Weight { + fn seal_caller(r: u32) -> Weight { (150_159_000 as Weight) // Standard Error: 90_000 .saturating_add((274_529_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_address(r: u32, ) -> Weight { + fn seal_address(r: u32) -> Weight { (140_207_000 as Weight) // Standard Error: 116_000 .saturating_add((276_569_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_gas_left(r: u32, ) -> Weight { + fn seal_gas_left(r: u32) -> Weight { (156_581_000 as Weight) // Standard Error: 107_000 .saturating_add((270_368_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_balance(r: u32, ) -> Weight { + fn seal_balance(r: u32) -> Weight { (141_778_000 as Weight) // Standard Error: 305_000 .saturating_add((615_927_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_value_transferred(r: u32, ) -> Weight { + fn seal_value_transferred(r: u32) -> Weight { (138_752_000 as Weight) // Standard Error: 91_000 .saturating_add((280_176_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_minimum_balance(r: u32, ) -> Weight { + fn seal_minimum_balance(r: u32) -> Weight { (141_089_000 as Weight) // Standard Error: 82_000 .saturating_add((274_199_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_tombstone_deposit(r: u32, ) -> Weight { + fn seal_tombstone_deposit(r: u32) -> Weight { (140_447_000 as Weight) // Standard Error: 119_000 .saturating_add((270_823_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_rent_allowance(r: u32, ) -> Weight { + fn seal_rent_allowance(r: u32) -> Weight { (138_394_000 as Weight) // Standard Error: 105_000 .saturating_add((275_261_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_block_number(r: u32, ) -> Weight { + fn seal_block_number(r: u32) -> Weight { (151_633_000 as Weight) // Standard Error: 109_000 .saturating_add((269_666_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_now(r: u32, ) -> Weight { + fn seal_now(r: u32) -> Weight { (129_087_000 as Weight) // Standard Error: 252_000 .saturating_add((277_368_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_weight_to_fee(r: u32, ) -> Weight { + fn seal_weight_to_fee(r: u32) -> Weight { (176_205_000 as Weight) // Standard Error: 304_000 .saturating_add((555_094_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_gas(r: u32, ) -> Weight { + fn seal_gas(r: u32) -> Weight { (129_942_000 as Weight) // Standard Error: 92_000 .saturating_add((144_914_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_input(r: u32, ) -> Weight { + fn seal_input(r: u32) -> Weight { (141_540_000 as Weight) // Standard Error: 68_000 .saturating_add((6_576_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_input_per_kb(n: u32, ) -> Weight { + fn seal_input_per_kb(n: u32) -> Weight { (150_832_000 as Weight) // Standard Error: 0 .saturating_add((263_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_return(r: u32, ) -> Weight { + fn seal_return(r: u32) -> Weight { (135_920_000 as Weight) // Standard Error: 61_000 .saturating_add((3_733_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_return_per_kb(n: u32, ) -> Weight { + fn seal_return_per_kb(n: u32) -> Weight { (144_104_000 as Weight) // Standard Error: 0 .saturating_add((640_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_terminate(r: u32, ) -> Weight { + fn seal_terminate(r: u32) -> Weight { (141_631_000 as Weight) // Standard Error: 70_000 .saturating_add((112_747_000 as Weight).saturating_mul(r as Weight)) @@ -340,7 +341,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((4 as Weight).saturating_mul(r as Weight))) } - fn seal_restore_to(r: u32, ) -> Weight { + fn seal_restore_to(r: u32) -> Weight { (168_955_000 as Weight) // Standard Error: 211_000 .saturating_add((119_247_000 as Weight).saturating_mul(r as Weight)) @@ -349,7 +350,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((5 as Weight).saturating_mul(r as Weight))) } - fn seal_restore_to_per_delta(d: u32, ) -> Weight { + fn seal_restore_to_per_delta(d: u32) -> Weight { (0 as Weight) // Standard Error: 3_299_000 .saturating_add((3_257_862_000 as Weight).saturating_mul(d as Weight)) @@ -358,21 +359,21 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(7 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(d as Weight))) } - fn seal_random(r: u32, ) -> Weight { + fn seal_random(r: u32) -> Weight { (124_927_000 as Weight) // Standard Error: 407_000 .saturating_add((730_247_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_deposit_event(r: u32, ) -> Weight { + fn seal_deposit_event(r: u32) -> Weight { (135_014_000 as Weight) // Standard Error: 892_000 .saturating_add((1_131_992_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32, ) -> Weight { + fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32) -> Weight { (1_401_344_000 as Weight) // Standard Error: 2_961_000 .saturating_add((701_918_000 as Weight).saturating_mul(t as Weight)) @@ -383,21 +384,21 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(t as Weight))) } - fn seal_set_rent_allowance(r: u32, ) -> Weight { + fn seal_set_rent_allowance(r: u32) -> Weight { (146_753_000 as Weight) // Standard Error: 117_000 .saturating_add((194_150_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_debug_message(r: u32, ) -> Weight { + fn seal_debug_message(r: u32) -> Weight { (141_972_000 as Weight) // Standard Error: 114_000 .saturating_add((164_981_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_set_storage(r: u32, ) -> Weight { + fn seal_set_storage(r: u32) -> Weight { (549_424_000 as Weight) // Standard Error: 7_901_000 .saturating_add((4_159_879_000 as Weight).saturating_mul(r as Weight)) @@ -406,14 +407,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) } - fn seal_set_storage_per_kb(n: u32, ) -> Weight { + fn seal_set_storage_per_kb(n: u32) -> Weight { (682_814_000 as Weight) // Standard Error: 229_000 .saturating_add((59_572_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn seal_clear_storage(r: u32, ) -> Weight { + fn seal_clear_storage(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_889_000 .saturating_add((1_563_117_000 as Weight).saturating_mul(r as Weight)) @@ -422,7 +423,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) } - fn seal_get_storage(r: u32, ) -> Weight { + fn seal_get_storage(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_414_000 .saturating_add((1_178_803_000 as Weight).saturating_mul(r as Weight)) @@ -430,14 +431,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads((100 as Weight).saturating_mul(r as Weight))) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_get_storage_per_kb(n: u32, ) -> Weight { + fn seal_get_storage_per_kb(n: u32) -> Weight { (696_056_000 as Weight) // Standard Error: 266_000 .saturating_add((108_870_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_transfer(r: u32, ) -> Weight { + fn seal_transfer(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_764_000 .saturating_add((6_397_838_000 as Weight).saturating_mul(r as Weight)) @@ -446,7 +447,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) } - fn seal_call(r: u32, ) -> Weight { + fn seal_call(r: u32) -> Weight { (0 as Weight) // Standard Error: 8_279_000 .saturating_add((13_318_274_000 as Weight).saturating_mul(r as Weight)) @@ -455,7 +456,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) } - fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32, ) -> Weight { + fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32) -> Weight { (13_411_599_000 as Weight) // Standard Error: 40_931_000 .saturating_add((4_291_567_000 as Weight).saturating_mul(t as Weight)) @@ -467,7 +468,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(101 as Weight)) .saturating_add(T::DbWeight::get().writes((101 as Weight).saturating_mul(t as Weight))) } - fn seal_instantiate(r: u32, ) -> Weight { + fn seal_instantiate(r: u32) -> Weight { (0 as Weight) // Standard Error: 31_671_000 .saturating_add((24_164_540_000 as Weight).saturating_mul(r as Weight)) @@ -476,7 +477,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((300 as Weight).saturating_mul(r as Weight))) } - fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32, ) -> Weight { + fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32) -> Weight { (17_228_488_000 as Weight) // Standard Error: 26_000 .saturating_add((50_822_000 as Weight).saturating_mul(i as Weight)) @@ -487,313 +488,313 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(206 as Weight)) .saturating_add(T::DbWeight::get().writes(204 as Weight)) } - fn seal_hash_sha2_256(r: u32, ) -> Weight { + fn seal_hash_sha2_256(r: u32) -> Weight { (149_183_000 as Weight) // Standard Error: 99_000 .saturating_add((279_233_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_sha2_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_sha2_256_per_kb(n: u32) -> Weight { (457_629_000 as Weight) // Standard Error: 14_000 .saturating_add((480_686_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_keccak_256(r: u32, ) -> Weight { + fn seal_hash_keccak_256(r: u32) -> Weight { (141_603_000 as Weight) // Standard Error: 120_000 .saturating_add((283_527_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_keccak_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_keccak_256_per_kb(n: u32) -> Weight { (463_644_000 as Weight) // Standard Error: 18_000 .saturating_add((332_183_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_256(r: u32, ) -> Weight { + fn seal_hash_blake2_256(r: u32) -> Weight { (144_145_000 as Weight) // Standard Error: 113_000 .saturating_add((252_640_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_blake2_256_per_kb(n: u32) -> Weight { (455_101_000 as Weight) // Standard Error: 23_000 .saturating_add((149_174_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_128(r: u32, ) -> Weight { + fn seal_hash_blake2_128(r: u32) -> Weight { (147_166_000 as Weight) // Standard Error: 233_000 .saturating_add((254_430_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_128_per_kb(n: u32, ) -> Weight { + fn seal_hash_blake2_128_per_kb(n: u32) -> Weight { (445_667_000 as Weight) // Standard Error: 24_000 .saturating_add((149_178_000 as Weight).saturating_mul(n as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn instr_i64const(r: u32, ) -> Weight { + fn instr_i64const(r: u32) -> Weight { (21_505_000 as Weight) // Standard Error: 10_000 .saturating_add((7_963_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64load(r: u32, ) -> Weight { + fn instr_i64load(r: u32) -> Weight { (24_775_000 as Weight) // Standard Error: 37_000 .saturating_add((157_130_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64store(r: u32, ) -> Weight { + fn instr_i64store(r: u32) -> Weight { (24_722_000 as Weight) // Standard Error: 69_000 .saturating_add((240_564_000 as Weight).saturating_mul(r as Weight)) } - fn instr_select(r: u32, ) -> Weight { + fn instr_select(r: u32) -> Weight { (21_506_000 as Weight) // Standard Error: 21_000 .saturating_add((45_277_000 as Weight).saturating_mul(r as Weight)) } - fn instr_if(r: u32, ) -> Weight { + fn instr_if(r: u32) -> Weight { (21_587_000 as Weight) // Standard Error: 18_000 .saturating_add((42_269_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br(r: u32, ) -> Weight { + fn instr_br(r: u32) -> Weight { (21_538_000 as Weight) // Standard Error: 807_000 .saturating_add((22_392_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_if(r: u32, ) -> Weight { + fn instr_br_if(r: u32) -> Weight { (21_634_000 as Weight) // Standard Error: 57_000 .saturating_add((44_203_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_table(r: u32, ) -> Weight { + fn instr_br_table(r: u32) -> Weight { (21_531_000 as Weight) // Standard Error: 19_000 .saturating_add((33_198_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_table_per_entry(e: u32, ) -> Weight { + fn instr_br_table_per_entry(e: u32) -> Weight { (60_960_000 as Weight) // Standard Error: 1_000 .saturating_add((151_000 as Weight).saturating_mul(e as Weight)) } - fn instr_call(r: u32, ) -> Weight { + fn instr_call(r: u32) -> Weight { (21_777_000 as Weight) // Standard Error: 141_000 .saturating_add((245_105_000 as Weight).saturating_mul(r as Weight)) } - fn instr_call_indirect(r: u32, ) -> Weight { + fn instr_call_indirect(r: u32) -> Weight { (34_307_000 as Weight) // Standard Error: 365_000 .saturating_add((344_623_000 as Weight).saturating_mul(r as Weight)) } - fn instr_call_indirect_per_param(p: u32, ) -> Weight { + fn instr_call_indirect_per_param(p: u32) -> Weight { (398_310_000 as Weight) // Standard Error: 6_000 .saturating_add((4_163_000 as Weight).saturating_mul(p as Weight)) } - fn instr_local_get(r: u32, ) -> Weight { + fn instr_local_get(r: u32) -> Weight { (40_478_000 as Weight) // Standard Error: 19_000 .saturating_add((9_991_000 as Weight).saturating_mul(r as Weight)) } - fn instr_local_set(r: u32, ) -> Weight { + fn instr_local_set(r: u32) -> Weight { (40_427_000 as Weight) // Standard Error: 26_000 .saturating_add((8_526_000 as Weight).saturating_mul(r as Weight)) } - fn instr_local_tee(r: u32, ) -> Weight { + fn instr_local_tee(r: u32) -> Weight { (40_463_000 as Weight) // Standard Error: 19_000 .saturating_add((16_497_000 as Weight).saturating_mul(r as Weight)) } - fn instr_global_get(r: u32, ) -> Weight { + fn instr_global_get(r: u32) -> Weight { (25_998_000 as Weight) // Standard Error: 21_000 .saturating_add((18_214_000 as Weight).saturating_mul(r as Weight)) } - fn instr_global_set(r: u32, ) -> Weight { + fn instr_global_set(r: u32) -> Weight { (25_972_000 as Weight) // Standard Error: 42_000 .saturating_add((18_901_000 as Weight).saturating_mul(r as Weight)) } - fn instr_memory_current(r: u32, ) -> Weight { + fn instr_memory_current(r: u32) -> Weight { (24_949_000 as Weight) // Standard Error: 17_000 .saturating_add((8_541_000 as Weight).saturating_mul(r as Weight)) } - fn instr_memory_grow(r: u32, ) -> Weight { + fn instr_memory_grow(r: u32) -> Weight { (22_204_000 as Weight) // Standard Error: 4_776_000 .saturating_add((2_198_462_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64clz(r: u32, ) -> Weight { + fn instr_i64clz(r: u32) -> Weight { (21_506_000 as Weight) // Standard Error: 18_000 .saturating_add((25_302_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ctz(r: u32, ) -> Weight { + fn instr_i64ctz(r: u32) -> Weight { (21_523_000 as Weight) // Standard Error: 29_000 .saturating_add((25_206_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64popcnt(r: u32, ) -> Weight { + fn instr_i64popcnt(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 466_000 .saturating_add((19_925_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64eqz(r: u32, ) -> Weight { + fn instr_i64eqz(r: u32) -> Weight { (21_569_000 as Weight) // Standard Error: 30_000 .saturating_add((25_027_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64extendsi32(r: u32, ) -> Weight { + fn instr_i64extendsi32(r: u32) -> Weight { (21_536_000 as Weight) // Standard Error: 193_000 .saturating_add((17_690_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64extendui32(r: u32, ) -> Weight { + fn instr_i64extendui32(r: u32) -> Weight { (21_555_000 as Weight) // Standard Error: 356_000 .saturating_add((17_105_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i32wrapi64(r: u32, ) -> Weight { + fn instr_i32wrapi64(r: u32) -> Weight { (21_561_000 as Weight) // Standard Error: 1_038_000 .saturating_add((22_198_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64eq(r: u32, ) -> Weight { + fn instr_i64eq(r: u32) -> Weight { (21_513_000 as Weight) // Standard Error: 21_000 .saturating_add((33_620_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ne(r: u32, ) -> Weight { + fn instr_i64ne(r: u32) -> Weight { (21_556_000 as Weight) // Standard Error: 17_000 .saturating_add((33_669_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64lts(r: u32, ) -> Weight { + fn instr_i64lts(r: u32) -> Weight { (21_571_000 as Weight) // Standard Error: 19_000 .saturating_add((33_649_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ltu(r: u32, ) -> Weight { + fn instr_i64ltu(r: u32) -> Weight { (21_533_000 as Weight) // Standard Error: 23_000 .saturating_add((33_450_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64gts(r: u32, ) -> Weight { + fn instr_i64gts(r: u32) -> Weight { (21_525_000 as Weight) // Standard Error: 24_000 .saturating_add((33_727_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64gtu(r: u32, ) -> Weight { + fn instr_i64gtu(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 16_000 .saturating_add((33_420_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64les(r: u32, ) -> Weight { + fn instr_i64les(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 22_000 .saturating_add((33_720_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64leu(r: u32, ) -> Weight { + fn instr_i64leu(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 20_000 .saturating_add((33_383_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ges(r: u32, ) -> Weight { + fn instr_i64ges(r: u32) -> Weight { (21_577_000 as Weight) // Standard Error: 27_000 .saturating_add((33_454_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64geu(r: u32, ) -> Weight { + fn instr_i64geu(r: u32) -> Weight { (21_566_000 as Weight) // Standard Error: 25_000 .saturating_add((33_665_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64add(r: u32, ) -> Weight { + fn instr_i64add(r: u32) -> Weight { (21_524_000 as Weight) // Standard Error: 22_000 .saturating_add((33_351_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64sub(r: u32, ) -> Weight { + fn instr_i64sub(r: u32) -> Weight { (21_558_000 as Weight) // Standard Error: 18_000 .saturating_add((33_423_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64mul(r: u32, ) -> Weight { + fn instr_i64mul(r: u32) -> Weight { (21_554_000 as Weight) // Standard Error: 17_000 .saturating_add((33_588_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64divs(r: u32, ) -> Weight { + fn instr_i64divs(r: u32) -> Weight { (21_568_000 as Weight) // Standard Error: 29_000 .saturating_add((38_897_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64divu(r: u32, ) -> Weight { + fn instr_i64divu(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 31_000 .saturating_add((38_756_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rems(r: u32, ) -> Weight { + fn instr_i64rems(r: u32) -> Weight { (21_540_000 as Weight) // Standard Error: 20_000 .saturating_add((39_244_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64remu(r: u32, ) -> Weight { + fn instr_i64remu(r: u32) -> Weight { (21_581_000 as Weight) // Standard Error: 24_000 .saturating_add((38_461_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64and(r: u32, ) -> Weight { + fn instr_i64and(r: u32) -> Weight { (21_555_000 as Weight) // Standard Error: 24_000 .saturating_add((33_367_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64or(r: u32, ) -> Weight { + fn instr_i64or(r: u32) -> Weight { (21_523_000 as Weight) // Standard Error: 18_000 .saturating_add((33_466_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64xor(r: u32, ) -> Weight { + fn instr_i64xor(r: u32) -> Weight { (21_536_000 as Weight) // Standard Error: 34_000 .saturating_add((33_452_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shl(r: u32, ) -> Weight { + fn instr_i64shl(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 24_000 .saturating_add((33_809_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shrs(r: u32, ) -> Weight { + fn instr_i64shrs(r: u32) -> Weight { (21_580_000 as Weight) // Standard Error: 32_000 .saturating_add((33_849_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shru(r: u32, ) -> Weight { + fn instr_i64shru(r: u32) -> Weight { (21_571_000 as Weight) // Standard Error: 18_000 .saturating_add((33_799_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rotl(r: u32, ) -> Weight { + fn instr_i64rotl(r: u32) -> Weight { (21_559_000 as Weight) // Standard Error: 22_000 .saturating_add((33_947_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rotr(r: u32, ) -> Weight { + fn instr_i64rotr(r: u32) -> Weight { (21_565_000 as Weight) // Standard Error: 20_000 .saturating_add((33_754_000 as Weight).saturating_mul(r as Weight)) @@ -803,10 +804,9 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { fn on_initialize() -> Weight { - (4_636_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) + (4_636_000 as Weight).saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn on_initialize_per_trie_key(k: u32, ) -> Weight { + fn on_initialize_per_trie_key(k: u32) -> Weight { (0 as Weight) // Standard Error: 3_000 .saturating_add((2_851_000 as Weight).saturating_mul(k as Weight)) @@ -814,34 +814,34 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) } - fn on_initialize_per_queue_item(q: u32, ) -> Weight { + fn on_initialize_per_queue_item(q: u32) -> Weight { (0 as Weight) // Standard Error: 11_000 .saturating_add((38_093_000 as Weight).saturating_mul(q as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn instrument(c: u32, ) -> Weight { + fn instrument(c: u32) -> Weight { (60_027_000 as Weight) // Standard Error: 109_000 .saturating_add((169_008_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn code_load(c: u32, ) -> Weight { + fn code_load(c: u32) -> Weight { (7_881_000 as Weight) // Standard Error: 0 .saturating_add((2_007_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn code_refcount(c: u32, ) -> Weight { + fn code_refcount(c: u32) -> Weight { (12_861_000 as Weight) // Standard Error: 0 .saturating_add((3_028_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn instantiate_with_code(c: u32, s: u32, ) -> Weight { + fn instantiate_with_code(c: u32, s: u32) -> Weight { (189_624_000 as Weight) // Standard Error: 120_000 .saturating_add((244_984_000 as Weight).saturating_mul(c as Weight)) @@ -850,7 +850,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } - fn instantiate(s: u32, ) -> Weight { + fn instantiate(s: u32) -> Weight { (224_867_000 as Weight) // Standard Error: 0 .saturating_add((1_476_000 as Weight).saturating_mul(s as Weight)) @@ -862,126 +862,126 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn claim_surcharge(c: u32, ) -> Weight { + fn claim_surcharge(c: u32) -> Weight { (147_775_000 as Weight) // Standard Error: 5_000 .saturating_add((3_094_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } - fn seal_caller(r: u32, ) -> Weight { + fn seal_caller(r: u32) -> Weight { (150_159_000 as Weight) // Standard Error: 90_000 .saturating_add((274_529_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_address(r: u32, ) -> Weight { + fn seal_address(r: u32) -> Weight { (140_207_000 as Weight) // Standard Error: 116_000 .saturating_add((276_569_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_gas_left(r: u32, ) -> Weight { + fn seal_gas_left(r: u32) -> Weight { (156_581_000 as Weight) // Standard Error: 107_000 .saturating_add((270_368_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_balance(r: u32, ) -> Weight { + fn seal_balance(r: u32) -> Weight { (141_778_000 as Weight) // Standard Error: 305_000 .saturating_add((615_927_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_value_transferred(r: u32, ) -> Weight { + fn seal_value_transferred(r: u32) -> Weight { (138_752_000 as Weight) // Standard Error: 91_000 .saturating_add((280_176_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_minimum_balance(r: u32, ) -> Weight { + fn seal_minimum_balance(r: u32) -> Weight { (141_089_000 as Weight) // Standard Error: 82_000 .saturating_add((274_199_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_tombstone_deposit(r: u32, ) -> Weight { + fn seal_tombstone_deposit(r: u32) -> Weight { (140_447_000 as Weight) // Standard Error: 119_000 .saturating_add((270_823_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_rent_allowance(r: u32, ) -> Weight { + fn seal_rent_allowance(r: u32) -> Weight { (138_394_000 as Weight) // Standard Error: 105_000 .saturating_add((275_261_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_block_number(r: u32, ) -> Weight { + fn seal_block_number(r: u32) -> Weight { (151_633_000 as Weight) // Standard Error: 109_000 .saturating_add((269_666_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_now(r: u32, ) -> Weight { + fn seal_now(r: u32) -> Weight { (129_087_000 as Weight) // Standard Error: 252_000 .saturating_add((277_368_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_weight_to_fee(r: u32, ) -> Weight { + fn seal_weight_to_fee(r: u32) -> Weight { (176_205_000 as Weight) // Standard Error: 304_000 .saturating_add((555_094_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_gas(r: u32, ) -> Weight { + fn seal_gas(r: u32) -> Weight { (129_942_000 as Weight) // Standard Error: 92_000 .saturating_add((144_914_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_input(r: u32, ) -> Weight { + fn seal_input(r: u32) -> Weight { (141_540_000 as Weight) // Standard Error: 68_000 .saturating_add((6_576_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_input_per_kb(n: u32, ) -> Weight { + fn seal_input_per_kb(n: u32) -> Weight { (150_832_000 as Weight) // Standard Error: 0 .saturating_add((263_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_return(r: u32, ) -> Weight { + fn seal_return(r: u32) -> Weight { (135_920_000 as Weight) // Standard Error: 61_000 .saturating_add((3_733_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_return_per_kb(n: u32, ) -> Weight { + fn seal_return_per_kb(n: u32) -> Weight { (144_104_000 as Weight) // Standard Error: 0 .saturating_add((640_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_terminate(r: u32, ) -> Weight { + fn seal_terminate(r: u32) -> Weight { (141_631_000 as Weight) // Standard Error: 70_000 .saturating_add((112_747_000 as Weight).saturating_mul(r as Weight)) @@ -990,7 +990,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) .saturating_add(RocksDbWeight::get().writes((4 as Weight).saturating_mul(r as Weight))) } - fn seal_restore_to(r: u32, ) -> Weight { + fn seal_restore_to(r: u32) -> Weight { (168_955_000 as Weight) // Standard Error: 211_000 .saturating_add((119_247_000 as Weight).saturating_mul(r as Weight)) @@ -999,30 +999,32 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) .saturating_add(RocksDbWeight::get().writes((5 as Weight).saturating_mul(r as Weight))) } - fn seal_restore_to_per_delta(d: u32, ) -> Weight { + fn seal_restore_to_per_delta(d: u32) -> Weight { (0 as Weight) // Standard Error: 3_299_000 .saturating_add((3_257_862_000 as Weight).saturating_mul(d as Weight)) .saturating_add(RocksDbWeight::get().reads(7 as Weight)) .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(d as Weight))) .saturating_add(RocksDbWeight::get().writes(7 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(d as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(d as Weight)), + ) } - fn seal_random(r: u32, ) -> Weight { + fn seal_random(r: u32) -> Weight { (124_927_000 as Weight) // Standard Error: 407_000 .saturating_add((730_247_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_deposit_event(r: u32, ) -> Weight { + fn seal_deposit_event(r: u32) -> Weight { (135_014_000 as Weight) // Standard Error: 892_000 .saturating_add((1_131_992_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32, ) -> Weight { + fn seal_deposit_event_per_topic_and_kb(t: u32, n: u32) -> Weight { (1_401_344_000 as Weight) // Standard Error: 2_961_000 .saturating_add((701_918_000 as Weight).saturating_mul(t as Weight)) @@ -1031,48 +1033,54 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(t as Weight))) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(t as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(t as Weight)), + ) } - fn seal_set_rent_allowance(r: u32, ) -> Weight { + fn seal_set_rent_allowance(r: u32) -> Weight { (146_753_000 as Weight) // Standard Error: 117_000 .saturating_add((194_150_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_debug_message(r: u32, ) -> Weight { + fn seal_debug_message(r: u32) -> Weight { (141_972_000 as Weight) // Standard Error: 114_000 .saturating_add((164_981_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_set_storage(r: u32, ) -> Weight { + fn seal_set_storage(r: u32) -> Weight { (549_424_000 as Weight) // Standard Error: 7_901_000 .saturating_add((4_159_879_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight)), + ) } - fn seal_set_storage_per_kb(n: u32, ) -> Weight { + fn seal_set_storage_per_kb(n: u32) -> Weight { (682_814_000 as Weight) // Standard Error: 229_000 .saturating_add((59_572_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn seal_clear_storage(r: u32, ) -> Weight { + fn seal_clear_storage(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_889_000 .saturating_add((1_563_117_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight)), + ) } - fn seal_get_storage(r: u32, ) -> Weight { + fn seal_get_storage(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_414_000 .saturating_add((1_178_803_000 as Weight).saturating_mul(r as Weight)) @@ -1080,32 +1088,36 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_get_storage_per_kb(n: u32, ) -> Weight { + fn seal_get_storage_per_kb(n: u32) -> Weight { (696_056_000 as Weight) // Standard Error: 266_000 .saturating_add((108_870_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_transfer(r: u32, ) -> Weight { + fn seal_transfer(r: u32) -> Weight { (0 as Weight) // Standard Error: 2_764_000 .saturating_add((6_397_838_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().reads((100 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight)), + ) } - fn seal_call(r: u32, ) -> Weight { + fn seal_call(r: u32) -> Weight { (0 as Weight) // Standard Error: 8_279_000 .saturating_add((13_318_274_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().reads((200 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) - .saturating_add(RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight))) + .saturating_add( + RocksDbWeight::get().writes((100 as Weight).saturating_mul(r as Weight)), + ) } - fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32, ) -> Weight { + fn seal_call_per_transfer_input_output_kb(t: u32, i: u32, o: u32) -> Weight { (13_411_599_000 as Weight) // Standard Error: 40_931_000 .saturating_add((4_291_567_000 as Weight).saturating_mul(t as Weight)) @@ -1115,18 +1127,22 @@ impl WeightInfo for () { .saturating_add((68_502_000 as Weight).saturating_mul(o as Weight)) .saturating_add(RocksDbWeight::get().reads(205 as Weight)) .saturating_add(RocksDbWeight::get().writes(101 as Weight)) - .saturating_add(RocksDbWeight::get().writes((101 as Weight).saturating_mul(t as Weight))) + .saturating_add( + RocksDbWeight::get().writes((101 as Weight).saturating_mul(t as Weight)), + ) } - fn seal_instantiate(r: u32, ) -> Weight { + fn seal_instantiate(r: u32) -> Weight { (0 as Weight) // Standard Error: 31_671_000 .saturating_add((24_164_540_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().reads((300 as Weight).saturating_mul(r as Weight))) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) - .saturating_add(RocksDbWeight::get().writes((300 as Weight).saturating_mul(r as Weight))) + .saturating_add( + RocksDbWeight::get().writes((300 as Weight).saturating_mul(r as Weight)), + ) } - fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32, ) -> Weight { + fn seal_instantiate_per_input_output_salt_kb(i: u32, o: u32, s: u32) -> Weight { (17_228_488_000 as Weight) // Standard Error: 26_000 .saturating_add((50_822_000 as Weight).saturating_mul(i as Weight)) @@ -1137,313 +1153,313 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(206 as Weight)) .saturating_add(RocksDbWeight::get().writes(204 as Weight)) } - fn seal_hash_sha2_256(r: u32, ) -> Weight { + fn seal_hash_sha2_256(r: u32) -> Weight { (149_183_000 as Weight) // Standard Error: 99_000 .saturating_add((279_233_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_sha2_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_sha2_256_per_kb(n: u32) -> Weight { (457_629_000 as Weight) // Standard Error: 14_000 .saturating_add((480_686_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_keccak_256(r: u32, ) -> Weight { + fn seal_hash_keccak_256(r: u32) -> Weight { (141_603_000 as Weight) // Standard Error: 120_000 .saturating_add((283_527_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_keccak_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_keccak_256_per_kb(n: u32) -> Weight { (463_644_000 as Weight) // Standard Error: 18_000 .saturating_add((332_183_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_256(r: u32, ) -> Weight { + fn seal_hash_blake2_256(r: u32) -> Weight { (144_145_000 as Weight) // Standard Error: 113_000 .saturating_add((252_640_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_256_per_kb(n: u32, ) -> Weight { + fn seal_hash_blake2_256_per_kb(n: u32) -> Weight { (455_101_000 as Weight) // Standard Error: 23_000 .saturating_add((149_174_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_128(r: u32, ) -> Weight { + fn seal_hash_blake2_128(r: u32) -> Weight { (147_166_000 as Weight) // Standard Error: 233_000 .saturating_add((254_430_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn seal_hash_blake2_128_per_kb(n: u32, ) -> Weight { + fn seal_hash_blake2_128_per_kb(n: u32) -> Weight { (445_667_000 as Weight) // Standard Error: 24_000 .saturating_add((149_178_000 as Weight).saturating_mul(n as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn instr_i64const(r: u32, ) -> Weight { + fn instr_i64const(r: u32) -> Weight { (21_505_000 as Weight) // Standard Error: 10_000 .saturating_add((7_963_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64load(r: u32, ) -> Weight { + fn instr_i64load(r: u32) -> Weight { (24_775_000 as Weight) // Standard Error: 37_000 .saturating_add((157_130_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64store(r: u32, ) -> Weight { + fn instr_i64store(r: u32) -> Weight { (24_722_000 as Weight) // Standard Error: 69_000 .saturating_add((240_564_000 as Weight).saturating_mul(r as Weight)) } - fn instr_select(r: u32, ) -> Weight { + fn instr_select(r: u32) -> Weight { (21_506_000 as Weight) // Standard Error: 21_000 .saturating_add((45_277_000 as Weight).saturating_mul(r as Weight)) } - fn instr_if(r: u32, ) -> Weight { + fn instr_if(r: u32) -> Weight { (21_587_000 as Weight) // Standard Error: 18_000 .saturating_add((42_269_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br(r: u32, ) -> Weight { + fn instr_br(r: u32) -> Weight { (21_538_000 as Weight) // Standard Error: 807_000 .saturating_add((22_392_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_if(r: u32, ) -> Weight { + fn instr_br_if(r: u32) -> Weight { (21_634_000 as Weight) // Standard Error: 57_000 .saturating_add((44_203_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_table(r: u32, ) -> Weight { + fn instr_br_table(r: u32) -> Weight { (21_531_000 as Weight) // Standard Error: 19_000 .saturating_add((33_198_000 as Weight).saturating_mul(r as Weight)) } - fn instr_br_table_per_entry(e: u32, ) -> Weight { + fn instr_br_table_per_entry(e: u32) -> Weight { (60_960_000 as Weight) // Standard Error: 1_000 .saturating_add((151_000 as Weight).saturating_mul(e as Weight)) } - fn instr_call(r: u32, ) -> Weight { + fn instr_call(r: u32) -> Weight { (21_777_000 as Weight) // Standard Error: 141_000 .saturating_add((245_105_000 as Weight).saturating_mul(r as Weight)) } - fn instr_call_indirect(r: u32, ) -> Weight { + fn instr_call_indirect(r: u32) -> Weight { (34_307_000 as Weight) // Standard Error: 365_000 .saturating_add((344_623_000 as Weight).saturating_mul(r as Weight)) } - fn instr_call_indirect_per_param(p: u32, ) -> Weight { + fn instr_call_indirect_per_param(p: u32) -> Weight { (398_310_000 as Weight) // Standard Error: 6_000 .saturating_add((4_163_000 as Weight).saturating_mul(p as Weight)) } - fn instr_local_get(r: u32, ) -> Weight { + fn instr_local_get(r: u32) -> Weight { (40_478_000 as Weight) // Standard Error: 19_000 .saturating_add((9_991_000 as Weight).saturating_mul(r as Weight)) } - fn instr_local_set(r: u32, ) -> Weight { + fn instr_local_set(r: u32) -> Weight { (40_427_000 as Weight) // Standard Error: 26_000 .saturating_add((8_526_000 as Weight).saturating_mul(r as Weight)) } - fn instr_local_tee(r: u32, ) -> Weight { + fn instr_local_tee(r: u32) -> Weight { (40_463_000 as Weight) // Standard Error: 19_000 .saturating_add((16_497_000 as Weight).saturating_mul(r as Weight)) } - fn instr_global_get(r: u32, ) -> Weight { + fn instr_global_get(r: u32) -> Weight { (25_998_000 as Weight) // Standard Error: 21_000 .saturating_add((18_214_000 as Weight).saturating_mul(r as Weight)) } - fn instr_global_set(r: u32, ) -> Weight { + fn instr_global_set(r: u32) -> Weight { (25_972_000 as Weight) // Standard Error: 42_000 .saturating_add((18_901_000 as Weight).saturating_mul(r as Weight)) } - fn instr_memory_current(r: u32, ) -> Weight { + fn instr_memory_current(r: u32) -> Weight { (24_949_000 as Weight) // Standard Error: 17_000 .saturating_add((8_541_000 as Weight).saturating_mul(r as Weight)) } - fn instr_memory_grow(r: u32, ) -> Weight { + fn instr_memory_grow(r: u32) -> Weight { (22_204_000 as Weight) // Standard Error: 4_776_000 .saturating_add((2_198_462_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64clz(r: u32, ) -> Weight { + fn instr_i64clz(r: u32) -> Weight { (21_506_000 as Weight) // Standard Error: 18_000 .saturating_add((25_302_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ctz(r: u32, ) -> Weight { + fn instr_i64ctz(r: u32) -> Weight { (21_523_000 as Weight) // Standard Error: 29_000 .saturating_add((25_206_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64popcnt(r: u32, ) -> Weight { + fn instr_i64popcnt(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 466_000 .saturating_add((19_925_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64eqz(r: u32, ) -> Weight { + fn instr_i64eqz(r: u32) -> Weight { (21_569_000 as Weight) // Standard Error: 30_000 .saturating_add((25_027_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64extendsi32(r: u32, ) -> Weight { + fn instr_i64extendsi32(r: u32) -> Weight { (21_536_000 as Weight) // Standard Error: 193_000 .saturating_add((17_690_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64extendui32(r: u32, ) -> Weight { + fn instr_i64extendui32(r: u32) -> Weight { (21_555_000 as Weight) // Standard Error: 356_000 .saturating_add((17_105_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i32wrapi64(r: u32, ) -> Weight { + fn instr_i32wrapi64(r: u32) -> Weight { (21_561_000 as Weight) // Standard Error: 1_038_000 .saturating_add((22_198_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64eq(r: u32, ) -> Weight { + fn instr_i64eq(r: u32) -> Weight { (21_513_000 as Weight) // Standard Error: 21_000 .saturating_add((33_620_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ne(r: u32, ) -> Weight { + fn instr_i64ne(r: u32) -> Weight { (21_556_000 as Weight) // Standard Error: 17_000 .saturating_add((33_669_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64lts(r: u32, ) -> Weight { + fn instr_i64lts(r: u32) -> Weight { (21_571_000 as Weight) // Standard Error: 19_000 .saturating_add((33_649_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ltu(r: u32, ) -> Weight { + fn instr_i64ltu(r: u32) -> Weight { (21_533_000 as Weight) // Standard Error: 23_000 .saturating_add((33_450_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64gts(r: u32, ) -> Weight { + fn instr_i64gts(r: u32) -> Weight { (21_525_000 as Weight) // Standard Error: 24_000 .saturating_add((33_727_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64gtu(r: u32, ) -> Weight { + fn instr_i64gtu(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 16_000 .saturating_add((33_420_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64les(r: u32, ) -> Weight { + fn instr_i64les(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 22_000 .saturating_add((33_720_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64leu(r: u32, ) -> Weight { + fn instr_i64leu(r: u32) -> Weight { (21_546_000 as Weight) // Standard Error: 20_000 .saturating_add((33_383_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64ges(r: u32, ) -> Weight { + fn instr_i64ges(r: u32) -> Weight { (21_577_000 as Weight) // Standard Error: 27_000 .saturating_add((33_454_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64geu(r: u32, ) -> Weight { + fn instr_i64geu(r: u32) -> Weight { (21_566_000 as Weight) // Standard Error: 25_000 .saturating_add((33_665_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64add(r: u32, ) -> Weight { + fn instr_i64add(r: u32) -> Weight { (21_524_000 as Weight) // Standard Error: 22_000 .saturating_add((33_351_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64sub(r: u32, ) -> Weight { + fn instr_i64sub(r: u32) -> Weight { (21_558_000 as Weight) // Standard Error: 18_000 .saturating_add((33_423_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64mul(r: u32, ) -> Weight { + fn instr_i64mul(r: u32) -> Weight { (21_554_000 as Weight) // Standard Error: 17_000 .saturating_add((33_588_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64divs(r: u32, ) -> Weight { + fn instr_i64divs(r: u32) -> Weight { (21_568_000 as Weight) // Standard Error: 29_000 .saturating_add((38_897_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64divu(r: u32, ) -> Weight { + fn instr_i64divu(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 31_000 .saturating_add((38_756_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rems(r: u32, ) -> Weight { + fn instr_i64rems(r: u32) -> Weight { (21_540_000 as Weight) // Standard Error: 20_000 .saturating_add((39_244_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64remu(r: u32, ) -> Weight { + fn instr_i64remu(r: u32) -> Weight { (21_581_000 as Weight) // Standard Error: 24_000 .saturating_add((38_461_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64and(r: u32, ) -> Weight { + fn instr_i64and(r: u32) -> Weight { (21_555_000 as Weight) // Standard Error: 24_000 .saturating_add((33_367_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64or(r: u32, ) -> Weight { + fn instr_i64or(r: u32) -> Weight { (21_523_000 as Weight) // Standard Error: 18_000 .saturating_add((33_466_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64xor(r: u32, ) -> Weight { + fn instr_i64xor(r: u32) -> Weight { (21_536_000 as Weight) // Standard Error: 34_000 .saturating_add((33_452_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shl(r: u32, ) -> Weight { + fn instr_i64shl(r: u32) -> Weight { (21_567_000 as Weight) // Standard Error: 24_000 .saturating_add((33_809_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shrs(r: u32, ) -> Weight { + fn instr_i64shrs(r: u32) -> Weight { (21_580_000 as Weight) // Standard Error: 32_000 .saturating_add((33_849_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64shru(r: u32, ) -> Weight { + fn instr_i64shru(r: u32) -> Weight { (21_571_000 as Weight) // Standard Error: 18_000 .saturating_add((33_799_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rotl(r: u32, ) -> Weight { + fn instr_i64rotl(r: u32) -> Weight { (21_559_000 as Weight) // Standard Error: 22_000 .saturating_add((33_947_000 as Weight).saturating_mul(r as Weight)) } - fn instr_i64rotr(r: u32, ) -> Weight { + fn instr_i64rotr(r: u32) -> Weight { (21_565_000 as Weight) // Standard Error: 20_000 .saturating_add((33_754_000 as Weight).saturating_mul(r as Weight)) diff --git a/frame/democracy/src/benchmarking.rs b/frame/democracy/src/benchmarking.rs index ef2c7de27ba59..2e23502a59fb3 100644 --- a/frame/democracy/src/benchmarking.rs +++ b/frame/democracy/src/benchmarking.rs @@ -19,13 +19,15 @@ use super::*; -use frame_benchmarking::{benchmarks, account, whitelist_account, impl_benchmark_test_suite}; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelist_account}; use frame_support::{ assert_noop, assert_ok, - traits::{Currency, Get, EnsureOrigin, OnInitialize, UnfilteredDispatchable, schedule::DispatchTime}, + traits::{ + schedule::DispatchTime, Currency, EnsureOrigin, Get, OnInitialize, UnfilteredDispatchable, + }, }; -use frame_system::{RawOrigin, Pallet as System, self}; -use sp_runtime::traits::{Bounded, One, BadOrigin}; +use frame_system::{self, Pallet as System, RawOrigin}; +use sp_runtime::traits::{BadOrigin, Bounded, One}; use crate::Pallet as Democracy; @@ -49,11 +51,7 @@ fn add_proposal(n: u32) -> Result { let value = T::MinimumDeposit::get(); let proposal_hash: T::Hash = T::Hashing::hash_of(&n); - Democracy::::propose( - RawOrigin::Signed(other).into(), - proposal_hash, - value.into(), - )?; + Democracy::::propose(RawOrigin::Signed(other).into(), proposal_hash, value.into())?; Ok(proposal_hash) } @@ -76,20 +74,15 @@ fn add_referendum(n: u32) -> Result { 63, frame_system::RawOrigin::Root.into(), Call::enact_proposal(proposal_hash, referendum_index).into(), - ).map_err(|_| "failed to schedule named")?; + ) + .map_err(|_| "failed to schedule named")?; Ok(referendum_index) } fn account_vote(b: BalanceOf) -> AccountVote> { - let v = Vote { - aye: true, - conviction: Conviction::Locked1x, - }; - - AccountVote::Standard { - vote: v, - balance: b, - } + let v = Vote { aye: true, conviction: Conviction::Locked1x }; + + AccountVote::Standard { vote: v, balance: b } } benchmarks! { @@ -224,8 +217,8 @@ benchmarks! { // Place our proposal in the external queue, too. let hash = T::Hashing::hash_of(&0); assert_ok!( - Democracy::::external_propose(T::ExternalOrigin::successful_origin(), hash.clone()) - ); + Democracy::::external_propose(T::ExternalOrigin::successful_origin(), hash.clone()) + ); // Add a referendum of our proposal. let referendum_index = add_referendum::(0)?; @@ -237,9 +230,9 @@ benchmarks! { verify { // Referendum has been canceled assert_noop!( - Democracy::::referendum_status(referendum_index), - Error::::ReferendumInvalid - ); + Democracy::::referendum_status(referendum_index), + Error::::ReferendumInvalid + ); } // Worst case scenario, we external propose a previously blacklisted proposal @@ -785,9 +778,4 @@ benchmarks! { } } - -impl_benchmark_test_suite!( - Democracy, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Democracy, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/democracy/src/conviction.rs b/frame/democracy/src/conviction.rs index 7f9714a7d5ce4..b4f24c93bb40f 100644 --- a/frame/democracy/src/conviction.rs +++ b/frame/democracy/src/conviction.rs @@ -17,11 +17,14 @@ //! The conviction datatype. -use sp_std::{result::Result, convert::TryFrom}; -use sp_runtime::{RuntimeDebug, traits::{Zero, Bounded, CheckedMul, CheckedDiv}}; -use codec::{Encode, Decode}; -use scale_info::TypeInfo; use crate::types::Delegations; +use codec::{Decode, Encode}; +use scale_info::TypeInfo; +use sp_runtime::{ + traits::{Bounded, CheckedDiv, CheckedMul, Zero}, + RuntimeDebug, +}; +use sp_std::{convert::TryFrom, result::Result}; /// A value denoting the strength of conviction of a vote. #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo)] @@ -94,9 +97,10 @@ impl Conviction { } /// The votes of a voter of the given `balance` with our conviction. - pub fn votes< - B: From + Zero + Copy + CheckedMul + CheckedDiv + Bounded - >(self, capital: B) -> Delegations { + pub fn votes + Zero + Copy + CheckedMul + CheckedDiv + Bounded>( + self, + capital: B, + ) -> Delegations { let votes = match self { Conviction::None => capital.checked_div(&10u8.into()).unwrap_or_else(Zero::zero), x => capital.checked_mul(&u8::from(x).into()).unwrap_or_else(B::max_value), diff --git a/frame/democracy/src/lib.rs b/frame/democracy/src/lib.rs index 99920e98fb1b1..52f5e5d33319d 100644 --- a/frame/democracy/src/lib.rs +++ b/frame/democracy/src/lib.rs @@ -149,35 +149,37 @@ //! - `cancel_queued` - Cancels a proposal that is queued for enactment. //! - `clear_public_proposal` - Removes all public proposals. -#![recursion_limit="128"] +#![recursion_limit = "128"] #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; -use sp_runtime::{ - DispatchResult, DispatchError, ArithmeticError, RuntimeDebug, - traits::{Zero, Hash, Dispatchable, Saturating, Bounded}, -}; -use codec::{Encode, Decode, Input}; -use scale_info::TypeInfo; +use codec::{Decode, Encode, Input}; use frame_support::{ - ensure, weights::Weight, + ensure, traits::{ - Currency, ReservableCurrency, LockableCurrency, WithdrawReasons, LockIdentifier, Get, - OnUnbalanced, BalanceStatus, schedule::{Named as ScheduleNamed, DispatchTime}, + schedule::{DispatchTime, Named as ScheduleNamed}, + BalanceStatus, Currency, Get, LockIdentifier, LockableCurrency, OnUnbalanced, + ReservableCurrency, WithdrawReasons, }, + weights::Weight, +}; +use scale_info::TypeInfo; +use sp_runtime::{ + traits::{Bounded, Dispatchable, Hash, Saturating, Zero}, + ArithmeticError, DispatchError, DispatchResult, RuntimeDebug, }; +use sp_std::prelude::*; -mod vote_threshold; -mod vote; mod conviction; mod types; +mod vote; +mod vote_threshold; pub mod weights; -pub use weights::WeightInfo; -pub use vote_threshold::{Approved, VoteThreshold}; -pub use vote::{Vote, AccountVote, Voting}; pub use conviction::Conviction; -pub use types::{ReferendumInfo, ReferendumStatus, Tally, UnvoteScope, Delegations}; pub use pallet::*; +pub use types::{Delegations, ReferendumInfo, ReferendumStatus, Tally, UnvoteScope}; +pub use vote::{AccountVote, Vote, Voting}; +pub use vote_threshold::{Approved, VoteThreshold}; +pub use weights::WeightInfo; #[cfg(test)] mod tests; @@ -198,9 +200,11 @@ pub type PropIndex = u32; /// A referendum index. pub type ReferendumIndex = u32; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = - <::Currency as Currency<::AccountId>>::NegativeImbalance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; #[derive(Clone, Encode, Decode, RuntimeDebug, TypeInfo)] pub enum PreimageStatus { @@ -236,13 +240,16 @@ enum Releases { #[frame_support::pallet] pub mod pallet { - use sp_runtime::DispatchResult; + use super::*; use frame_support::{ - pallet_prelude::*, Parameter, - weights::{DispatchClass, Pays}, traits::EnsureOrigin, dispatch::DispatchResultWithPostInfo, + dispatch::DispatchResultWithPostInfo, + pallet_prelude::*, + traits::EnsureOrigin, + weights::{DispatchClass, Pays}, + Parameter, }; - use frame_system::{pallet_prelude::*, ensure_signed, ensure_root}; - use super::*; + use frame_system::{ensure_root, ensure_signed, pallet_prelude::*}; + use sp_runtime::DispatchResult; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -250,12 +257,12 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config + Sized { - type Proposal: Parameter + Dispatchable + From>; + type Proposal: Parameter + Dispatchable + From>; type Event: From> + IsType<::Event>; /// Currency type for this pallet. type Currency: ReservableCurrency - + LockableCurrency; + + LockableCurrency; /// The minimum period of locking and the period between a proposal being approved and enacted. /// @@ -324,7 +331,7 @@ pub mod pallet { /// /// The number of Vetoers for a proposal must be small, extrinsics are weighted according to /// [MAX_VETOERS](./const.MAX_VETOERS.html) - type VetoOrigin: EnsureOrigin; + type VetoOrigin: EnsureOrigin; /// Period in blocks where an external proposal may not be re-submitted after being vetoed. #[pallet::constant] @@ -335,7 +342,7 @@ pub mod pallet { type PreimageByteDeposit: Get>; /// An origin that can provide a preimage using operational extrinsics. - type OperationalPreimageOrigin: EnsureOrigin; + type OperationalPreimageOrigin: EnsureOrigin; /// Handler for the unbalanced reduction when slashing a preimage deposit. type Slash: OnUnbalanced>; @@ -371,18 +378,16 @@ pub mod pallet { /// The public proposals. Unsorted. The second item is the proposal's hash. #[pallet::storage] #[pallet::getter(fn public_props)] - pub type PublicProps = StorageValue<_, Vec<(PropIndex, T::Hash, T::AccountId)>, ValueQuery>; + pub type PublicProps = + StorageValue<_, Vec<(PropIndex, T::Hash, T::AccountId)>, ValueQuery>; /// Those who have locked a deposit. /// /// TWOX-NOTE: Safe, as increasing integer keys are safe. #[pallet::storage] #[pallet::getter(fn deposit_of)] - pub type DepositOf = StorageMap< - _, - Twox64Concat, PropIndex, - (Vec, BalanceOf), - >; + pub type DepositOf = + StorageMap<_, Twox64Concat, PropIndex, (Vec, BalanceOf)>; /// Map of hashes to the proposal preimage, along with who registered it and their deposit. /// The block number is the block at which it was deposited. @@ -391,7 +396,8 @@ pub mod pallet { #[pallet::storage] pub type Preimages = StorageMap< _, - Identity, T::Hash, + Identity, + T::Hash, PreimageStatus, T::BlockNumber>, >; @@ -413,7 +419,8 @@ pub mod pallet { #[pallet::getter(fn referendum_info)] pub type ReferendumInfoOf = StorageMap< _, - Twox64Concat, ReferendumIndex, + Twox64Concat, + ReferendumIndex, ReferendumInfo>, >; @@ -423,7 +430,9 @@ pub mod pallet { /// TWOX-NOTE: SAFE as `AccountId`s are crypto hashes anyway. #[pallet::storage] pub type VotingOf = StorageMap< - _, Twox64Concat, T::AccountId, + _, + Twox64Concat, + T::AccountId, Voting, T::AccountId, T::BlockNumber>, ValueQuery, >; @@ -453,7 +462,8 @@ pub mod pallet { /// A record of who vetoed what. Maps proposal hash to a possible existent block number /// (until when it may not be resubmitted) and who vetoed it. #[pallet::storage] - pub type Blacklist = StorageMap<_, Identity, T::Hash, (T::BlockNumber, Vec)>; + pub type Blacklist = + StorageMap<_, Identity, T::Hash, (T::BlockNumber, Vec)>; /// Record of all proposals that have been subject to emergency cancellation. #[pallet::storage] @@ -473,9 +483,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - GenesisConfig { - _phantom: Default::default(), - } + GenesisConfig { _phantom: Default::default() } } } @@ -678,11 +686,10 @@ pub mod pallet { ) -> DispatchResult { let who = ensure_signed(origin)?; - let seconds = Self::len_of_deposit_of(proposal) - .ok_or_else(|| Error::::ProposalMissing)?; + let seconds = + Self::len_of_deposit_of(proposal).ok_or_else(|| Error::::ProposalMissing)?; ensure!(seconds <= seconds_upper_bound, Error::::WrongUpperBound); - let mut deposit = Self::deposit_of(proposal) - .ok_or(Error::::ProposalMissing)?; + let mut deposit = Self::deposit_of(proposal).ok_or(Error::::ProposalMissing)?; T::Currency::reserve(&who, deposit.1)?; deposit.0.push(who); >::insert(proposal, deposit); @@ -720,7 +727,10 @@ pub mod pallet { /// /// Weight: `O(1)`. #[pallet::weight((T::WeightInfo::emergency_cancel(), DispatchClass::Operational))] - pub fn emergency_cancel(origin: OriginFor, ref_index: ReferendumIndex) -> DispatchResult { + pub fn emergency_cancel( + origin: OriginFor, + ref_index: ReferendumIndex, + ) -> DispatchResult { T::CancellationOrigin::ensure_origin(origin)?; let status = Self::referendum_status(ref_index)?; @@ -836,8 +846,8 @@ pub mod pallet { ensure!(T::InstantAllowed::get(), Error::::InstantNotAllowed); } - let (e_proposal_hash, threshold) = >::get() - .ok_or(Error::::ProposalMissing)?; + let (e_proposal_hash, threshold) = + >::get().ok_or(Error::::ProposalMissing)?; ensure!( threshold != VoteThreshold::SuperMajorityApprove, Error::::NotSimpleMajority, @@ -869,11 +879,10 @@ pub mod pallet { Err(Error::::NoProposal)?; } - let mut existing_vetoers = >::get(&proposal_hash) - .map(|pair| pair.1) - .unwrap_or_else(Vec::new); - let insert_position = existing_vetoers.binary_search(&who) - .err().ok_or(Error::::AlreadyVetoed)?; + let mut existing_vetoers = + >::get(&proposal_hash).map(|pair| pair.1).unwrap_or_else(Vec::new); + let insert_position = + existing_vetoers.binary_search(&who).err().ok_or(Error::::AlreadyVetoed)?; existing_vetoers.insert(insert_position, who.clone()); let until = >::block_number() + T::CooloffPeriod::get(); @@ -943,7 +952,7 @@ pub mod pallet { origin: OriginFor, to: T::AccountId, conviction: Conviction, - balance: BalanceOf + balance: BalanceOf, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let votes = Self::try_delegate(who, to, conviction, balance)?; @@ -1083,10 +1092,11 @@ pub mod pallet { let (provider, deposit, since, expiry) = >::get(&proposal_hash) .and_then(|m| match m { - PreimageStatus::Available { provider, deposit, since, expiry, .. } - => Some((provider, deposit, since, expiry)), + PreimageStatus::Available { provider, deposit, since, expiry, .. } => + Some((provider, deposit, since, expiry)), _ => None, - }).ok_or(Error::::PreimageMissing)?; + }) + .ok_or(Error::::PreimageMissing)?; let now = >::block_number(); let (voting, enactment) = (T::VotingPeriod::get(), T::EnactmentPeriod::get()); @@ -1094,7 +1104,8 @@ pub mod pallet { ensure!(now >= since + voting + additional, Error::::TooEarly); ensure!(expiry.map_or(true, |e| now > e), Error::::Imminent); - let res = T::Currency::repatriate_reserved(&provider, &who, deposit, BalanceStatus::Free); + let res = + T::Currency::repatriate_reserved(&provider, &who, deposit, BalanceStatus::Free); debug_assert!(res.is_ok()); >::remove(&proposal_hash); Self::deposit_event(Event::::PreimageReaped(proposal_hash, provider, deposit, who)); @@ -1205,7 +1216,8 @@ pub mod pallet { /// Weight: `O(p)` (though as this is an high-privilege dispatch, we assume it has a /// reasonable value). #[pallet::weight((T::WeightInfo::blacklist(T::MaxProposals::get()), DispatchClass::Operational))] - pub fn blacklist(origin: OriginFor, + pub fn blacklist( + origin: OriginFor, proposal_hash: T::Hash, maybe_ref_index: Option, ) -> DispatchResult { @@ -1282,7 +1294,7 @@ impl Pallet { /// Get all referenda ready for tally at block `n`. pub fn maturing_referenda_at( - n: T::BlockNumber + n: T::BlockNumber, ) -> Vec<(ReferendumIndex, ReferendumStatus>)> { let next = Self::lowest_unbaked(); let last = Self::referendum_count(); @@ -1293,7 +1305,8 @@ impl Pallet { n: T::BlockNumber, range: core::ops::Range, ) -> Vec<(ReferendumIndex, ReferendumStatus>)> { - range.into_iter() + range + .into_iter() .map(|i| (i, Self::referendum_info(i))) .filter_map(|(i, maybe_info)| match maybe_info { Some(ReferendumInfo::Ongoing(status)) => Some((i, status)), @@ -1309,13 +1322,13 @@ impl Pallet { pub fn internal_start_referendum( proposal_hash: T::Hash, threshold: VoteThreshold, - delay: T::BlockNumber + delay: T::BlockNumber, ) -> ReferendumIndex { >::inject_referendum( >::block_number() + T::VotingPeriod::get(), proposal_hash, threshold, - delay + delay, ) } @@ -1328,25 +1341,28 @@ impl Pallet { // private. /// Ok if the given referendum is active, Err otherwise - fn ensure_ongoing(r: ReferendumInfo>) - -> Result>, DispatchError> - { + fn ensure_ongoing( + r: ReferendumInfo>, + ) -> Result>, DispatchError> { match r { ReferendumInfo::Ongoing(s) => Ok(s), _ => Err(Error::::ReferendumInvalid.into()), } } - fn referendum_status(ref_index: ReferendumIndex) - -> Result>, DispatchError> - { - let info = ReferendumInfoOf::::get(ref_index) - .ok_or(Error::::ReferendumInvalid)?; + fn referendum_status( + ref_index: ReferendumIndex, + ) -> Result>, DispatchError> { + let info = ReferendumInfoOf::::get(ref_index).ok_or(Error::::ReferendumInvalid)?; Self::ensure_ongoing(info) } /// Actually enact a vote, if legit. - fn try_vote(who: &T::AccountId, ref_index: ReferendumIndex, vote: AccountVote>) -> DispatchResult { + fn try_vote( + who: &T::AccountId, + ref_index: ReferendumIndex, + vote: AccountVote>, + ) -> DispatchResult { let mut status = Self::referendum_status(ref_index)?; ensure!(vote.balance() <= T::Currency::free_balance(who), Error::::InsufficientFunds); VotingOf::::try_mutate(who, |voting| -> DispatchResult { @@ -1359,11 +1375,14 @@ impl Pallet { status.tally.reduce(approve, *delegations); } votes[i].1 = vote; - } + }, Err(i) => { - ensure!(votes.len() as u32 <= T::MaxVotes::get(), Error::::MaxVotesReached); + ensure!( + votes.len() as u32 <= T::MaxVotes::get(), + Error::::MaxVotesReached + ); votes.insert(i, (ref_index, vote)); - } + }, } // Shouldn't be possible to fail, but we handle it gracefully. status.tally.add(vote).ok_or(ArithmeticError::Overflow)?; @@ -1377,12 +1396,7 @@ impl Pallet { })?; // Extend the lock to `balance` (rather than setting it) since we don't know what other // votes are in place. - T::Currency::extend_lock( - DEMOCRACY_ID, - who, - vote.balance(), - WithdrawReasons::TRANSFER - ); + T::Currency::extend_lock(DEMOCRACY_ID, who, vote.balance(), WithdrawReasons::TRANSFER); ReferendumInfoOf::::insert(ref_index, ReferendumInfo::Ongoing(status)); Ok(()) } @@ -1393,11 +1407,17 @@ impl Pallet { /// - The referendum has finished and the voter's lock period is up. /// /// This will generally be combined with a call to `unlock`. - fn try_remove_vote(who: &T::AccountId, ref_index: ReferendumIndex, scope: UnvoteScope) -> DispatchResult { + fn try_remove_vote( + who: &T::AccountId, + ref_index: ReferendumIndex, + scope: UnvoteScope, + ) -> DispatchResult { let info = ReferendumInfoOf::::get(ref_index); VotingOf::::try_mutate(who, |voting| -> DispatchResult { if let Voting::Direct { ref mut votes, delegations, ref mut prior } = voting { - let i = votes.binary_search_by_key(&ref_index, |i| i.0).map_err(|_| Error::::NotVoter)?; + let i = votes + .binary_search_by_key(&ref_index, |i| i.0) + .map_err(|_| Error::::NotVoter)?; match info { Some(ReferendumInfo::Ongoing(mut status)) => { ensure!(matches!(scope, UnvoteScope::Any), Error::::NoPermission); @@ -1407,17 +1427,20 @@ impl Pallet { status.tally.reduce(approve, *delegations); } ReferendumInfoOf::::insert(ref_index, ReferendumInfo::Ongoing(status)); - } - Some(ReferendumInfo::Finished{end, approved}) => + }, + Some(ReferendumInfo::Finished { end, approved }) => if let Some((lock_periods, balance)) = votes[i].1.locked_if(approved) { let unlock_at = end + T::EnactmentPeriod::get() * lock_periods.into(); let now = frame_system::Pallet::::block_number(); if now < unlock_at { - ensure!(matches!(scope, UnvoteScope::Any), Error::::NoPermission); + ensure!( + matches!(scope, UnvoteScope::Any), + Error::::NoPermission + ); prior.accumulate(unlock_at, balance) } }, - None => {} // Referendum was cancelled. + None => {}, // Referendum was cancelled. } votes.remove(i); } @@ -1438,15 +1461,15 @@ impl Pallet { *delegations = delegations.saturating_add(amount); for &(ref_index, account_vote) in votes.iter() { if let AccountVote::Standard { vote, .. } = account_vote { - ReferendumInfoOf::::mutate(ref_index, |maybe_info| + ReferendumInfoOf::::mutate(ref_index, |maybe_info| { if let Some(ReferendumInfo::Ongoing(ref mut status)) = maybe_info { status.tally.increase(vote.aye, amount); } - ); + }); } } votes.len() as u32 - } + }, }) } @@ -1457,20 +1480,20 @@ impl Pallet { // We don't support second level delegating, so we don't need to do anything more. *delegations = delegations.saturating_sub(amount); 1 - } + }, Voting::Direct { votes, delegations, .. } => { *delegations = delegations.saturating_sub(amount); for &(ref_index, account_vote) in votes.iter() { if let AccountVote::Standard { vote, .. } = account_vote { - ReferendumInfoOf::::mutate(ref_index, |maybe_info| + ReferendumInfoOf::::mutate(ref_index, |maybe_info| { if let Some(ReferendumInfo::Ongoing(ref mut status)) = maybe_info { status.tally.reduce(vote.aye, amount); } - ); + }); } } votes.len() as u32 - } + }, }) } @@ -1499,22 +1522,17 @@ impl Pallet { // remove any delegation votes to our current target. Self::reduce_upstream_delegation(&target, conviction.votes(balance)); voting.set_common(delegations, prior); - } + }, Voting::Direct { votes, delegations, prior } => { // here we just ensure that we're currently idling with no votes recorded. ensure!(votes.is_empty(), Error::::VotesExist); voting.set_common(delegations, prior); - } + }, } let votes = Self::increase_upstream_delegation(&target, conviction.votes(balance)); // Extend the lock to `balance` (rather than setting it) since we don't know what other // votes are in place. - T::Currency::extend_lock( - DEMOCRACY_ID, - &who, - balance, - WithdrawReasons::TRANSFER - ); + T::Currency::extend_lock(DEMOCRACY_ID, &who, balance, WithdrawReasons::TRANSFER); Ok(votes) })?; Self::deposit_event(Event::::Delegated(who, target)); @@ -1529,25 +1547,18 @@ impl Pallet { let mut old = Voting::default(); sp_std::mem::swap(&mut old, voting); match old { - Voting::Delegating { - balance, - target, - conviction, - delegations, - mut prior, - } => { + Voting::Delegating { balance, target, conviction, delegations, mut prior } => { // remove any delegation votes to our current target. - let votes = Self::reduce_upstream_delegation(&target, conviction.votes(balance)); + let votes = + Self::reduce_upstream_delegation(&target, conviction.votes(balance)); let now = frame_system::Pallet::::block_number(); let lock_periods = conviction.lock_periods().into(); prior.accumulate(now + T::EnactmentPeriod::get() * lock_periods, balance); voting.set_common(delegations, prior); Ok(votes) - } - Voting::Direct { .. } => { - Err(Error::::NotDelegating.into()) - } + }, + Voting::Direct { .. } => Err(Error::::NotDelegating.into()), } })?; Self::deposit_event(Event::::Undelegated(who)); @@ -1577,7 +1588,8 @@ impl Pallet { ) -> ReferendumIndex { let ref_index = Self::referendum_count(); ReferendumCount::::put(ref_index + 1); - let status = ReferendumStatus { end, proposal_hash, threshold, delay, tally: Default::default() }; + let status = + ReferendumStatus { end, proposal_hash, threshold, delay, tally: Default::default() }; let item = ReferendumInfo::Ongoing(status); >::insert(ref_index, item); Self::deposit_event(Event::::Started(ref_index, threshold)); @@ -1590,7 +1602,8 @@ impl Pallet { Self::launch_public(now).or_else(|_| Self::launch_external(now)) } else { Self::launch_external(now).or_else(|_| Self::launch_public(now)) - }.map_err(|_| Error::::NoneWaiting.into()) + } + .map_err(|_| Error::::NoneWaiting.into()) } /// Table the waiting external proposal for a vote, if there is one. @@ -1648,8 +1661,10 @@ impl Pallet { debug_assert!(err_amount.is_zero()); Self::deposit_event(Event::::PreimageUsed(proposal_hash, provider, deposit)); - let res = proposal.dispatch(frame_system::RawOrigin::Root.into()) - .map(|_| ()).map_err(|e| e.error); + let res = proposal + .dispatch(frame_system::RawOrigin::Root.into()) + .map(|_| ()) + .map_err(|e| e.error); Self::deposit_event(Event::::Executed(index, res)); Ok(()) @@ -1679,10 +1694,14 @@ impl Pallet { } else { let when = now + status.delay; // Note that we need the preimage now. - Preimages::::mutate_exists(&status.proposal_hash, |maybe_pre| match *maybe_pre { - Some(PreimageStatus::Available { ref mut expiry, .. }) => *expiry = Some(when), - ref mut a => *a = Some(PreimageStatus::Missing(when)), - }); + Preimages::::mutate_exists( + &status.proposal_hash, + |maybe_pre| match *maybe_pre { + Some(PreimageStatus::Available { ref mut expiry, .. }) => + *expiry = Some(when), + ref mut a => *a = Some(PreimageStatus::Missing(when)), + }, + ); if T::Scheduler::schedule_named( (DEMOCRACY_ID, index).encode(), @@ -1691,7 +1710,9 @@ impl Pallet { 63, frame_system::RawOrigin::Root.into(), Call::enact_proposal { proposal_hash: status.proposal_hash, index }.into(), - ).is_err() { + ) + .is_err() + { frame_support::print("LOGIC ERROR: bake_referendum/schedule_named failed"); } } @@ -1756,7 +1777,8 @@ impl Pallet { // To decode the enum variant we only need the first byte. let mut buf = [0u8; 1]; let key = >::hashed_key_for(proposal_hash); - let bytes = sp_io::storage::read(&key, &mut buf, 0).ok_or_else(|| Error::::NotImminent)?; + let bytes = + sp_io::storage::read(&key, &mut buf, 0).ok_or_else(|| Error::::NotImminent)?; // The value may be smaller that 1 byte. let mut input = &buf[0..buf.len().min(bytes as usize)]; @@ -1766,7 +1788,7 @@ impl Pallet { _ => { sp_runtime::print("Failed to decode `PreimageStatus` variant"); Err(Error::::NotImminent.into()) - } + }, } } @@ -1784,7 +1806,8 @@ impl Pallet { // * at most 5 bytes to decode a `Compact` let mut buf = [0u8; 6]; let key = >::hashed_key_for(proposal_hash); - let bytes = sp_io::storage::read(&key, &mut buf, 0).ok_or_else(|| Error::::PreimageMissing)?; + let bytes = + sp_io::storage::read(&key, &mut buf, 0).ok_or_else(|| Error::::PreimageMissing)?; // The value may be smaller that 6 bytes. let mut input = &buf[0..buf.len().min(bytes as usize)]; @@ -1793,15 +1816,17 @@ impl Pallet { Ok(0) => return Err(Error::::PreimageMissing.into()), _ => { sp_runtime::print("Failed to decode `PreimageStatus` variant"); - return Err(Error::::PreimageMissing.into()); - } + return Err(Error::::PreimageMissing.into()) + }, } // Decode the length of the vector. - let len = codec::Compact::::decode(&mut input).map_err(|_| { - sp_runtime::print("Failed to decode `PreimageStatus` variant"); - DispatchError::from(Error::::PreimageMissing) - })?.0; + let len = codec::Compact::::decode(&mut input) + .map_err(|_| { + sp_runtime::print("Failed to decode `PreimageStatus` variant"); + DispatchError::from(Error::::PreimageMissing) + })? + .0; Ok(len) } @@ -1831,7 +1856,10 @@ impl Pallet { } // See `note_imminent_preimage` - fn note_imminent_preimage_inner(who: T::AccountId, encoded_proposal: Vec) -> DispatchResult { + fn note_imminent_preimage_inner( + who: T::AccountId, + encoded_proposal: Vec, + ) -> DispatchResult { let proposal_hash = T::Hashing::hash(&encoded_proposal[..]); Self::check_pre_image_is_missing(proposal_hash)?; let status = Preimages::::get(&proposal_hash).ok_or(Error::::NotImminent)?; @@ -1867,6 +1895,6 @@ fn decode_compact_u32_at(key: &[u8]) -> Option { sp_runtime::print("Failed to decode compact u32 at:"); sp_runtime::print(key); None - } + }, } } diff --git a/frame/democracy/src/tests.rs b/frame/democracy/src/tests.rs index 1c68715d49e3e..64444304db673 100644 --- a/frame/democracy/src/tests.rs +++ b/frame/democracy/src/tests.rs @@ -17,23 +17,25 @@ //! The crate's tests. -use crate as pallet_democracy; use super::*; +use crate as pallet_democracy; use codec::Encode; use frame_support::{ - assert_noop, assert_ok, parameter_types, ord_parameter_types, - traits::{SortedMembers, OnInitialize, Filter, GenesisBuild}, + assert_noop, assert_ok, ord_parameter_types, parameter_types, + traits::{Filter, GenesisBuild, OnInitialize, SortedMembers}, weights::Weight, }; +use frame_system::{EnsureRoot, EnsureSignedBy}; +use pallet_balances::{BalanceLock, Error as BalancesError}; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup, BadOrigin}, - testing::Header, Perbill, + testing::Header, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, + Perbill, }; -use pallet_balances::{BalanceLock, Error as BalancesError}; -use frame_system::{EnsureSignedBy, EnsureRoot}; mod cancellation; +mod decoders; mod delegation; mod external_proposing; mod fast_tracking; @@ -42,7 +44,6 @@ mod preimage; mod public_proposals; mod scheduling; mod voting; -mod decoders; const AYE: Vote = Vote { aye: true, conviction: Conviction::None }; const NAY: Vote = Vote { aye: false, conviction: Conviction::None }; @@ -194,10 +195,14 @@ impl Config for Test { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)], - }.assimilate_storage(&mut t).unwrap(); - pallet_democracy::GenesisConfig::::default().assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); + pallet_democracy::GenesisConfig::::default() + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext @@ -246,19 +251,11 @@ fn set_balance_proposal_hash_and_note(value: u64) -> H256 { } fn propose_set_balance(who: u64, value: u64, delay: u64) -> DispatchResult { - Democracy::propose( - Origin::signed(who), - set_balance_proposal_hash(value), - delay, - ) + Democracy::propose(Origin::signed(who), set_balance_proposal_hash(value), delay) } fn propose_set_balance_and_note(who: u64, value: u64, delay: u64) -> DispatchResult { - Democracy::propose( - Origin::signed(who), - set_balance_proposal_hash_and_note(value), - delay, - ) + Democracy::propose(Origin::signed(who), set_balance_proposal_hash_and_note(value), delay) } fn next_block() { diff --git a/frame/democracy/src/tests/cancellation.rs b/frame/democracy/src/tests/cancellation.rs index d48173a39d832..c2bd725ce934a 100644 --- a/frame/democracy/src/tests/cancellation.rs +++ b/frame/democracy/src/tests/cancellation.rs @@ -26,7 +26,7 @@ fn cancel_referendum_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); assert_ok!(Democracy::cancel_referendum(Origin::root(), r.into())); @@ -67,7 +67,7 @@ fn emergency_cancel_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 2 + 2, ); assert!(Democracy::referendum_status(r).is_ok()); @@ -81,7 +81,7 @@ fn emergency_cancel_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 2 + 2, ); assert!(Democracy::referendum_status(r).is_ok()); assert_noop!( diff --git a/frame/democracy/src/tests/decoders.rs b/frame/democracy/src/tests/decoders.rs index c3eb9ca7e3322..3c1729c4355c0 100644 --- a/frame/democracy/src/tests/decoders.rs +++ b/frame/democracy/src/tests/decoders.rs @@ -66,7 +66,7 @@ fn pre_image() { assert_noop!(Democracy::check_pre_image_is_missing(key), Error::::NotImminent); for l in vec![0, 10, 100, 1000u32] { - let available = PreimageStatus::Available{ + let available = PreimageStatus::Available { data: (0..l).map(|i| i as u8).collect(), provider: 0, deposit: 0, @@ -76,8 +76,10 @@ fn pre_image() { Preimages::::insert(key, available); assert_eq!(Democracy::pre_image_data_len(key), Ok(l)); - assert_noop!(Democracy::check_pre_image_is_missing(key), - Error::::DuplicatePreimage); + assert_noop!( + Democracy::check_pre_image_is_missing(key), + Error::::DuplicatePreimage + ); } }) } diff --git a/frame/democracy/src/tests/external_proposing.rs b/frame/democracy/src/tests/external_proposing.rs index 37654a5e91462..7442964584fa9 100644 --- a/frame/democracy/src/tests/external_proposing.rs +++ b/frame/democracy/src/tests/external_proposing.rs @@ -34,17 +34,17 @@ fn veto_external_works() { // cancelled. assert!(!>::exists()); // fails - same proposal can't be resubmitted. - assert_noop!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash(2), - ), Error::::ProposalBlacklisted); + assert_noop!( + Democracy::external_propose(Origin::signed(2), set_balance_proposal_hash(2),), + Error::::ProposalBlacklisted + ); fast_forward_to(1); // fails as we're still in cooloff period. - assert_noop!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash(2), - ), Error::::ProposalBlacklisted); + assert_noop!( + Democracy::external_propose(Origin::signed(2), set_balance_proposal_hash(2),), + Error::::ProposalBlacklisted + ); fast_forward_to(2); // works; as we're out of the cooloff period. @@ -67,10 +67,10 @@ fn veto_external_works() { fast_forward_to(3); // same proposal fails as we're still in cooloff - assert_noop!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash(2), - ), Error::::ProposalBlacklisted); + assert_noop!( + Democracy::external_propose(Origin::signed(2), set_balance_proposal_hash(2),), + Error::::ProposalBlacklisted + ); // different proposal works fine. assert_ok!(Democracy::external_propose( Origin::signed(2), @@ -96,10 +96,7 @@ fn external_blacklisting_should_work() { assert_noop!(Democracy::referendum_status(0), Error::::ReferendumInvalid); assert_noop!( - Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash_and_note(2), - ), + Democracy::external_propose(Origin::signed(2), set_balance_proposal_hash_and_note(2),), Error::::ProposalBlacklisted, ); }); @@ -110,20 +107,17 @@ fn external_referendum_works() { new_test_ext().execute_with(|| { System::set_block_number(0); assert_noop!( - Democracy::external_propose( - Origin::signed(1), - set_balance_proposal_hash(2), - ), + Democracy::external_propose(Origin::signed(1), set_balance_proposal_hash(2),), BadOrigin, ); assert_ok!(Democracy::external_propose( Origin::signed(2), set_balance_proposal_hash_and_note(2), )); - assert_noop!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash(1), - ), Error::::DuplicateProposal); + assert_noop!( + Democracy::external_propose(Origin::signed(2), set_balance_proposal_hash(1),), + Error::::DuplicateProposal + ); fast_forward_to(2); assert_eq!( Democracy::referendum_status(0), @@ -143,10 +137,7 @@ fn external_majority_referendum_works() { new_test_ext().execute_with(|| { System::set_block_number(0); assert_noop!( - Democracy::external_propose_majority( - Origin::signed(1), - set_balance_proposal_hash(2) - ), + Democracy::external_propose_majority(Origin::signed(1), set_balance_proposal_hash(2)), BadOrigin, ); assert_ok!(Democracy::external_propose_majority( @@ -172,10 +163,7 @@ fn external_default_referendum_works() { new_test_ext().execute_with(|| { System::set_block_number(0); assert_noop!( - Democracy::external_propose_default( - Origin::signed(3), - set_balance_proposal_hash(2) - ), + Democracy::external_propose_default(Origin::signed(3), set_balance_proposal_hash(2)), BadOrigin, ); assert_ok!(Democracy::external_propose_default( @@ -196,7 +184,6 @@ fn external_default_referendum_works() { }); } - #[test] fn external_and_public_interleaving_works() { new_test_ext().execute_with(|| { @@ -222,9 +209,9 @@ fn external_and_public_interleaving_works() { ); // replenish external assert_ok!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash_and_note(3), - )); + Origin::signed(2), + set_balance_proposal_hash_and_note(3), + )); fast_forward_to(4); @@ -256,9 +243,9 @@ fn external_and_public_interleaving_works() { ); // replenish external assert_ok!(Democracy::external_propose( - Origin::signed(2), - set_balance_proposal_hash_and_note(5), - )); + Origin::signed(2), + set_balance_proposal_hash_and_note(5), + )); fast_forward_to(8); diff --git a/frame/democracy/src/tests/fast_tracking.rs b/frame/democracy/src/tests/fast_tracking.rs index d01dafaa762ba..9b2f2760bde1c 100644 --- a/frame/democracy/src/tests/fast_tracking.rs +++ b/frame/democracy/src/tests/fast_tracking.rs @@ -24,7 +24,10 @@ fn fast_track_referendum_works() { new_test_ext().execute_with(|| { System::set_block_number(0); let h = set_balance_proposal_hash_and_note(2); - assert_noop!(Democracy::fast_track(Origin::signed(5), h, 3, 2), Error::::ProposalMissing); + assert_noop!( + Democracy::fast_track(Origin::signed(5), h, 3, 2), + Error::::ProposalMissing + ); assert_ok!(Democracy::external_propose_majority( Origin::signed(3), set_balance_proposal_hash_and_note(2) @@ -49,14 +52,20 @@ fn instant_referendum_works() { new_test_ext().execute_with(|| { System::set_block_number(0); let h = set_balance_proposal_hash_and_note(2); - assert_noop!(Democracy::fast_track(Origin::signed(5), h, 3, 2), Error::::ProposalMissing); + assert_noop!( + Democracy::fast_track(Origin::signed(5), h, 3, 2), + Error::::ProposalMissing + ); assert_ok!(Democracy::external_propose_majority( Origin::signed(3), set_balance_proposal_hash_and_note(2) )); assert_noop!(Democracy::fast_track(Origin::signed(1), h, 3, 2), BadOrigin); assert_noop!(Democracy::fast_track(Origin::signed(5), h, 1, 0), BadOrigin); - assert_noop!(Democracy::fast_track(Origin::signed(6), h, 1, 0), Error::::InstantNotAllowed); + assert_noop!( + Democracy::fast_track(Origin::signed(6), h, 1, 0), + Error::::InstantNotAllowed + ); INSTANT_ALLOWED.with(|v| *v.borrow_mut() = true); assert_ok!(Democracy::fast_track(Origin::signed(6), h, 1, 0)); assert_eq!( diff --git a/frame/democracy/src/tests/lock_voting.rs b/frame/democracy/src/tests/lock_voting.rs index 29cd24e1de60a..c1a27400fe557 100644 --- a/frame/democracy/src/tests/lock_voting.rs +++ b/frame/democracy/src/tests/lock_voting.rs @@ -23,23 +23,19 @@ use std::convert::TryFrom; fn aye(x: u8, balance: u64) -> AccountVote { AccountVote::Standard { vote: Vote { aye: true, conviction: Conviction::try_from(x).unwrap() }, - balance + balance, } } fn nay(x: u8, balance: u64) -> AccountVote { AccountVote::Standard { vote: Vote { aye: false, conviction: Conviction::try_from(x).unwrap() }, - balance + balance, } } fn the_lock(amount: u64) -> BalanceLock { - BalanceLock { - id: DEMOCRACY_ID, - amount, - reasons: pallet_balances::Reasons::Misc, - } + BalanceLock { id: DEMOCRACY_ID, amount, reasons: pallet_balances::Reasons::Misc } } #[test] @@ -50,7 +46,7 @@ fn lock_voting_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, nay(5, 10))); assert_ok!(Democracy::vote(Origin::signed(2), r, aye(4, 20))); @@ -74,7 +70,10 @@ fn lock_voting_should_work() { assert_ok!(Democracy::unlock(Origin::signed(2), 5)); // 2, 3, 4 got their way with the vote, so they cannot be reaped by others. - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 2, r), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 2, r), + Error::::NoPermission + ); // However, they can be unvoted by the owner, though it will make no difference to the lock. assert_ok!(Democracy::remove_vote(Origin::signed(2), r)); assert_ok!(Democracy::unlock(Origin::signed(2), 2)); @@ -86,10 +85,12 @@ fn lock_voting_should_work() { assert_eq!(Balances::locks(5), vec![]); assert_eq!(Balances::free_balance(42), 2); - fast_forward_to(5); // No change yet... - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 4, r), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 4, r), + Error::::NoPermission + ); assert_ok!(Democracy::unlock(Origin::signed(1), 4)); assert_eq!(Balances::locks(4), vec![the_lock(40)]); fast_forward_to(6); @@ -99,7 +100,10 @@ fn lock_voting_should_work() { assert_eq!(Balances::locks(4), vec![]); fast_forward_to(9); - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 3, r), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 3, r), + Error::::NoPermission + ); assert_ok!(Democracy::unlock(Origin::signed(1), 3)); assert_eq!(Balances::locks(3), vec![the_lock(30)]); fast_forward_to(10); @@ -145,7 +149,7 @@ fn lock_voting_should_work_with_delegation() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, nay(5, 10))); assert_ok!(Democracy::vote(Origin::signed(2), r, aye(4, 20))); @@ -168,7 +172,7 @@ fn setup_three_referenda() -> (u32, u32, u32) { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SimpleMajority, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(5), r1, aye(4, 10))); @@ -176,7 +180,7 @@ fn setup_three_referenda() -> (u32, u32, u32) { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SimpleMajority, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(5), r2, aye(3, 20))); @@ -184,7 +188,7 @@ fn setup_three_referenda() -> (u32, u32, u32) { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SimpleMajority, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(5), r3, aye(2, 50))); @@ -202,7 +206,10 @@ fn prior_lockvotes_should_be_enforced() { // r.2 locked 50 until #6. fast_forward_to(5); - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 5, r.2), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 5, r.2), + Error::::NoPermission + ); assert_ok!(Democracy::unlock(Origin::signed(5), 5)); assert_eq!(Balances::locks(5), vec![the_lock(50)]); fast_forward_to(6); @@ -210,7 +217,10 @@ fn prior_lockvotes_should_be_enforced() { assert_ok!(Democracy::unlock(Origin::signed(5), 5)); assert_eq!(Balances::locks(5), vec![the_lock(20)]); fast_forward_to(9); - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 5, r.1), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 5, r.1), + Error::::NoPermission + ); assert_ok!(Democracy::unlock(Origin::signed(5), 5)); assert_eq!(Balances::locks(5), vec![the_lock(20)]); fast_forward_to(10); @@ -218,7 +228,10 @@ fn prior_lockvotes_should_be_enforced() { assert_ok!(Democracy::unlock(Origin::signed(5), 5)); assert_eq!(Balances::locks(5), vec![the_lock(10)]); fast_forward_to(17); - assert_noop!(Democracy::remove_other_vote(Origin::signed(1), 5, r.0), Error::::NoPermission); + assert_noop!( + Democracy::remove_other_vote(Origin::signed(1), 5, r.0), + Error::::NoPermission + ); assert_ok!(Democracy::unlock(Origin::signed(5), 5)); assert_eq!(Balances::locks(5), vec![the_lock(10)]); fast_forward_to(18); @@ -296,7 +309,7 @@ fn locks_should_persist_from_voting_to_delegation() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SimpleMajority, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(5), r, aye(4, 10))); fast_forward_to(2); diff --git a/frame/democracy/src/tests/preimage.rs b/frame/democracy/src/tests/preimage.rs index a412343299d9f..6d478fcaa68c7 100644 --- a/frame/democracy/src/tests/preimage.rs +++ b/frame/democracy/src/tests/preimage.rs @@ -26,7 +26,7 @@ fn missing_preimage_should_fail() { 2, set_balance_proposal_hash(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); @@ -43,8 +43,11 @@ fn preimage_deposit_should_be_required_and_returned() { // fee of 100 is too much. PREIMAGE_BYTE_DEPOSIT.with(|v| *v.borrow_mut() = 100); assert_noop!( - if operational { Democracy::note_preimage_operational(Origin::signed(6), vec![0; 500]) } - else { Democracy::note_preimage(Origin::signed(6), vec![0; 500]) }, + if operational { + Democracy::note_preimage_operational(Origin::signed(6), vec![0; 500]) + } else { + Democracy::note_preimage(Origin::signed(6), vec![0; 500]) + }, BalancesError::::InsufficientBalance, ); // fee of 1 is reasonable. @@ -53,7 +56,7 @@ fn preimage_deposit_should_be_required_and_returned() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); @@ -72,10 +75,11 @@ fn preimage_deposit_should_be_required_and_returned() { fn preimage_deposit_should_be_reapable_earlier_by_owner() { new_test_ext_execute_with_cond(|operational| { PREIMAGE_BYTE_DEPOSIT.with(|v| *v.borrow_mut() = 1); - assert_ok!( - if operational { Democracy::note_preimage_operational(Origin::signed(6), set_balance_proposal(2)) } - else { Democracy::note_preimage(Origin::signed(6), set_balance_proposal(2)) } - ); + assert_ok!(if operational { + Democracy::note_preimage_operational(Origin::signed(6), set_balance_proposal(2)) + } else { + Democracy::note_preimage(Origin::signed(6), set_balance_proposal(2)) + }); assert_eq!(Balances::reserved_balance(6), 12); @@ -85,7 +89,11 @@ fn preimage_deposit_should_be_reapable_earlier_by_owner() { Error::::TooEarly ); next_block(); - assert_ok!(Democracy::reap_preimage(Origin::signed(6), set_balance_proposal_hash(2), u32::MAX)); + assert_ok!(Democracy::reap_preimage( + Origin::signed(6), + set_balance_proposal_hash(2), + u32::MAX + )); assert_eq!(Balances::free_balance(6), 60); assert_eq!(Balances::reserved_balance(6), 0); @@ -96,27 +104,32 @@ fn preimage_deposit_should_be_reapable_earlier_by_owner() { fn preimage_deposit_should_be_reapable() { new_test_ext_execute_with_cond(|operational| { assert_noop!( - Democracy::reap_preimage(Origin::signed(5), set_balance_proposal_hash(2), u32::MAX), - Error::::PreimageMissing - ); + Democracy::reap_preimage(Origin::signed(5), set_balance_proposal_hash(2), u32::MAX), + Error::::PreimageMissing + ); PREIMAGE_BYTE_DEPOSIT.with(|v| *v.borrow_mut() = 1); - assert_ok!( - if operational { Democracy::note_preimage_operational(Origin::signed(6), set_balance_proposal(2)) } - else { Democracy::note_preimage(Origin::signed(6), set_balance_proposal(2)) } - ); + assert_ok!(if operational { + Democracy::note_preimage_operational(Origin::signed(6), set_balance_proposal(2)) + } else { + Democracy::note_preimage(Origin::signed(6), set_balance_proposal(2)) + }); assert_eq!(Balances::reserved_balance(6), 12); next_block(); next_block(); next_block(); assert_noop!( - Democracy::reap_preimage(Origin::signed(5), set_balance_proposal_hash(2), u32::MAX), - Error::::TooEarly - ); + Democracy::reap_preimage(Origin::signed(5), set_balance_proposal_hash(2), u32::MAX), + Error::::TooEarly + ); next_block(); - assert_ok!(Democracy::reap_preimage(Origin::signed(5), set_balance_proposal_hash(2), u32::MAX)); + assert_ok!(Democracy::reap_preimage( + Origin::signed(5), + set_balance_proposal_hash(2), + u32::MAX + )); assert_eq!(Balances::reserved_balance(6), 0); assert_eq!(Balances::free_balance(6), 48); assert_eq!(Balances::free_balance(5), 62); @@ -132,13 +145,19 @@ fn noting_imminent_preimage_for_free_should_work() { 2, set_balance_proposal_hash(2), VoteThreshold::SuperMajorityApprove, - 1 + 1, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); assert_noop!( - if operational { Democracy::note_imminent_preimage_operational(Origin::signed(6), set_balance_proposal(2)) } - else { Democracy::note_imminent_preimage(Origin::signed(6), set_balance_proposal(2)) }, + if operational { + Democracy::note_imminent_preimage_operational( + Origin::signed(6), + set_balance_proposal(2), + ) + } else { + Democracy::note_imminent_preimage(Origin::signed(6), set_balance_proposal(2)) + }, Error::::NotImminent ); @@ -161,7 +180,10 @@ fn reaping_imminent_preimage_should_fail() { assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); next_block(); next_block(); - assert_noop!(Democracy::reap_preimage(Origin::signed(6), h, u32::MAX), Error::::Imminent); + assert_noop!( + Democracy::reap_preimage(Origin::signed(6), h, u32::MAX), + Error::::Imminent + ); }); } @@ -174,7 +196,7 @@ fn note_imminent_preimage_can_only_be_successful_once() { 2, set_balance_proposal_hash(2), VoteThreshold::SuperMajorityApprove, - 1 + 1, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); next_block(); diff --git a/frame/democracy/src/tests/public_proposals.rs b/frame/democracy/src/tests/public_proposals.rs index 1d323d684d7f2..34713c3e15725 100644 --- a/frame/democracy/src/tests/public_proposals.rs +++ b/frame/democracy/src/tests/public_proposals.rs @@ -89,10 +89,7 @@ fn poor_seconder_should_not_work() { fn invalid_seconds_upper_bound_should_not_work() { new_test_ext().execute_with(|| { assert_ok!(propose_set_balance_and_note(1, 2, 5)); - assert_noop!( - Democracy::second(Origin::signed(2), 0, 0), - Error::::WrongUpperBound - ); + assert_noop!(Democracy::second(Origin::signed(2), 0, 0), Error::::WrongUpperBound); }); } diff --git a/frame/democracy/src/tests/scheduling.rs b/frame/democracy/src/tests/scheduling.rs index e178ff0fc1a25..06b492bc6093c 100644 --- a/frame/democracy/src/tests/scheduling.rs +++ b/frame/democracy/src/tests/scheduling.rs @@ -26,7 +26,7 @@ fn simple_passing_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); assert_eq!(tally(r), Tally { ayes: 1, nays: 0, turnout: 10 }); @@ -43,7 +43,7 @@ fn simple_failing_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, nay(1))); assert_eq!(tally(r), Tally { ayes: 0, nays: 1, turnout: 10 }); @@ -62,13 +62,13 @@ fn ooo_inject_referendums_should_work() { 3, set_balance_proposal_hash_and_note(3), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); let r2 = Democracy::inject_referendum( 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r2, aye(1))); @@ -92,7 +92,7 @@ fn delayed_enactment_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 1 + 1, ); assert_ok!(Democracy::vote(Origin::signed(1), r, aye(1))); assert_ok!(Democracy::vote(Origin::signed(2), r, aye(2))); diff --git a/frame/democracy/src/tests/voting.rs b/frame/democracy/src/tests/voting.rs index 13072ebf87b11..e035c2d46c1b6 100644 --- a/frame/democracy/src/tests/voting.rs +++ b/frame/democracy/src/tests/voting.rs @@ -23,7 +23,10 @@ use super::*; fn overvoting_should_fail() { new_test_ext().execute_with(|| { let r = begin_referendum(); - assert_noop!(Democracy::vote(Origin::signed(1), r, aye(2)), Error::::InsufficientFunds); + assert_noop!( + Democracy::vote(Origin::signed(1), r, aye(2)), + Error::::InsufficientFunds + ); }); } @@ -102,7 +105,7 @@ fn controversial_voting_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(1), r, big_aye(1))); @@ -128,7 +131,7 @@ fn controversial_low_turnout_voting_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(5), r, big_nay(5))); assert_ok!(Democracy::vote(Origin::signed(6), r, big_aye(6))); @@ -152,7 +155,7 @@ fn passing_low_turnout_voting_should_work() { 2, set_balance_proposal_hash_and_note(2), VoteThreshold::SuperMajorityApprove, - 0 + 0, ); assert_ok!(Democracy::vote(Origin::signed(4), r, big_aye(4))); assert_ok!(Democracy::vote(Origin::signed(5), r, big_nay(5))); diff --git a/frame/democracy/src/types.rs b/frame/democracy/src/types.rs index 7cc748a0a0e28..ffe0ce43fc20e 100644 --- a/frame/democracy/src/types.rs +++ b/frame/democracy/src/types.rs @@ -17,30 +17,32 @@ //! Miscellaneous additional datatypes. -use codec::{Encode, Decode}; +use crate::{AccountVote, Conviction, Vote, VoteThreshold}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::RuntimeDebug; -use sp_runtime::traits::{Zero, Bounded, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, Saturating}; -use crate::{Vote, VoteThreshold, AccountVote, Conviction}; +use sp_runtime::{ + traits::{Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Saturating, Zero}, + RuntimeDebug, +}; /// Info regarding an ongoing referendum. #[derive(Encode, Decode, Default, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Tally { /// The number of aye votes, expressed in terms of post-conviction lock-vote. - pub (crate) ayes: Balance, + pub(crate) ayes: Balance, /// The number of nay votes, expressed in terms of post-conviction lock-vote. - pub (crate) nays: Balance, + pub(crate) nays: Balance, /// The amount of funds currently expressing its opinion. Pre-conviction. - pub (crate) turnout: Balance, + pub(crate) turnout: Balance, } /// Amount of votes and capital placed in delegation for an account. #[derive(Encode, Decode, Default, Copy, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Delegations { /// The number of votes (this is post-conviction). - pub (crate) votes: Balance, + pub(crate) votes: Balance, /// The amount of raw capital, used for the turnout. - pub (crate) capital: Balance, + pub(crate) capital: Balance, } impl Saturating for Delegations { @@ -66,22 +68,24 @@ impl Saturating for Delegations { } fn saturating_pow(self, exp: usize) -> Self { - Self { - votes: self.votes.saturating_pow(exp), - capital: self.capital.saturating_pow(exp), - } + Self { votes: self.votes.saturating_pow(exp), capital: self.capital.saturating_pow(exp) } } } impl< - Balance: From + Zero + Copy + CheckedAdd + CheckedSub + CheckedMul + CheckedDiv + Bounded + - Saturating -> Tally { + Balance: From + + Zero + + Copy + + CheckedAdd + + CheckedSub + + CheckedMul + + CheckedDiv + + Bounded + + Saturating, + > Tally +{ /// Create a new tally. - pub fn new( - vote: Vote, - balance: Balance, - ) -> Self { + pub fn new(vote: Vote, balance: Balance) -> Self { let Delegations { votes, capital } = vote.conviction.votes(balance); Self { ayes: if vote.aye { votes } else { Zero::zero() }, @@ -91,10 +95,7 @@ impl< } /// Add an account's vote into the tally. - pub fn add( - &mut self, - vote: AccountVote, - ) -> Option<()> { + pub fn add(&mut self, vote: AccountVote) -> Option<()> { match vote { AccountVote::Standard { vote, balance } => { let Delegations { votes, capital } = vote.conviction.votes(balance); @@ -103,23 +104,20 @@ impl< true => self.ayes = self.ayes.checked_add(&votes)?, false => self.nays = self.nays.checked_add(&votes)?, } - } + }, AccountVote::Split { aye, nay } => { let aye = Conviction::None.votes(aye); let nay = Conviction::None.votes(nay); self.turnout = self.turnout.checked_add(&aye.capital)?.checked_add(&nay.capital)?; self.ayes = self.ayes.checked_add(&aye.votes)?; self.nays = self.nays.checked_add(&nay.votes)?; - } + }, } Some(()) } /// Remove an account's vote from the tally. - pub fn remove( - &mut self, - vote: AccountVote, - ) -> Option<()> { + pub fn remove(&mut self, vote: AccountVote) -> Option<()> { match vote { AccountVote::Standard { vote, balance } => { let Delegations { votes, capital } = vote.conviction.votes(balance); @@ -128,14 +126,14 @@ impl< true => self.ayes = self.ayes.checked_sub(&votes)?, false => self.nays = self.nays.checked_sub(&votes)?, } - } + }, AccountVote::Split { aye, nay } => { let aye = Conviction::None.votes(aye); let nay = Conviction::None.votes(nay); self.turnout = self.turnout.checked_sub(&aye.capital)?.checked_sub(&nay.capital)?; self.ayes = self.ayes.checked_sub(&aye.votes)?; self.nays = self.nays.checked_sub(&nay.votes)?; - } + }, } Some(()) } @@ -165,15 +163,15 @@ impl< #[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct ReferendumStatus { /// When voting on this referendum will end. - pub (crate) end: BlockNumber, + pub(crate) end: BlockNumber, /// The hash of the proposal being voted on. - pub (crate) proposal_hash: Hash, + pub(crate) proposal_hash: Hash, /// The thresholding mechanism to determine whether it passed. - pub (crate) threshold: VoteThreshold, + pub(crate) threshold: VoteThreshold, /// The delay (in blocks) to wait after a successful referendum before deploying. - pub (crate) delay: BlockNumber, + pub(crate) delay: BlockNumber, /// The current tally of votes in this referendum. - pub (crate) tally: Tally, + pub(crate) tally: Tally, } /// Info regarding a referendum, present or past. @@ -182,7 +180,7 @@ pub enum ReferendumInfo { /// Referendum is happening, the arg is the block number at which it will end. Ongoing(ReferendumStatus), /// Referendum finished at `end`, and has been `approved` or rejected. - Finished{approved: bool, end: BlockNumber}, + Finished { approved: bool, end: BlockNumber }, } impl ReferendumInfo { @@ -193,7 +191,7 @@ impl ReferendumInfo Self { - let s = ReferendumStatus{ end, proposal_hash, threshold, delay, tally: Tally::default() }; + let s = ReferendumStatus { end, proposal_hash, threshold, delay, tally: Tally::default() }; ReferendumInfo::Ongoing(s) } } diff --git a/frame/democracy/src/vote.rs b/frame/democracy/src/vote.rs index 1f2baa81833a8..03ca020ca0949 100644 --- a/frame/democracy/src/vote.rs +++ b/frame/democracy/src/vote.rs @@ -17,11 +17,14 @@ //! The vote datatype. -use sp_std::{prelude::*, result::Result, convert::TryFrom}; -use codec::{Encode, EncodeLike, Decode, Output, Input}; +use crate::{Conviction, Delegations, ReferendumIndex}; +use codec::{Decode, Encode, EncodeLike, Input, Output}; use scale_info::TypeInfo; -use sp_runtime::{RuntimeDebug, traits::{Saturating, Zero}}; -use crate::{Conviction, ReferendumIndex, Delegations}; +use sp_runtime::{ + traits::{Saturating, Zero}, + RuntimeDebug, +}; +use sp_std::{convert::TryFrom, prelude::*, result::Result}; /// A number of lock periods, plus a vote, one way or the other. #[derive(Copy, Clone, Eq, PartialEq, Default, RuntimeDebug)] @@ -55,8 +58,9 @@ impl TypeInfo for Vote { fn type_info() -> scale_info::Type { scale_info::Type::builder() .path(scale_info::Path::new("Vote", module_path!())) - .composite(scale_info::build::Fields::unnamed() - .field(|f| f.ty::().docs(&["Raw vote byte, encodes aye + conviction"])) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::().docs(&["Raw vote byte, encodes aye + conviction"])), ) } } @@ -103,7 +107,7 @@ impl AccountVote { /// A "prior" lock, i.e. a lock for some now-forgotten reason. #[derive( - Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo + Encode, Decode, Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, TypeInfo, )] pub struct PriorLock(BlockNumber, Balance); @@ -151,7 +155,9 @@ pub enum Voting { }, } -impl Default for Voting { +impl Default + for Voting +{ fn default() -> Self { Voting::Direct { votes: Vec::new(), @@ -161,31 +167,30 @@ impl Default for Voting Voting { +impl + Voting +{ pub fn rejig(&mut self, now: BlockNumber) { match self { Voting::Direct { prior, .. } => prior, Voting::Delegating { prior, .. } => prior, - }.rejig(now); + } + .rejig(now); } /// The amount of this account's balance that much currently be locked due to voting. pub fn locked_balance(&self) -> Balance { match self { - Voting::Direct { votes, prior, .. } => votes.iter() - .map(|i| i.1.balance()) - .fold(prior.locked(), |a, i| a.max(i)), + Voting::Direct { votes, prior, .. } => + votes.iter().map(|i| i.1.balance()).fold(prior.locked(), |a, i| a.max(i)), Voting::Delegating { balance, .. } => *balance, } } - pub fn set_common(&mut self, + pub fn set_common( + &mut self, delegations: Delegations, - prior: PriorLock + prior: PriorLock, ) { let (d, p) = match self { Voting::Direct { ref mut delegations, ref mut prior, .. } => (delegations, prior), diff --git a/frame/democracy/src/vote_threshold.rs b/frame/democracy/src/vote_threshold.rs index 87e6ded0dd554..ad8bce290ed4f 100644 --- a/frame/democracy/src/vote_threshold.rs +++ b/frame/democracy/src/vote_threshold.rs @@ -17,13 +17,13 @@ //! Voting thresholds. -#[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; -use codec::{Encode, Decode}; -use scale_info::TypeInfo; -use sp_runtime::traits::{Zero, IntegerSquareRoot}; -use sp_std::ops::{Add, Mul, Div, Rem}; use crate::Tally; +use codec::{Decode, Encode}; +use scale_info::TypeInfo; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; +use sp_runtime::traits::{IntegerSquareRoot, Zero}; +use sp_std::ops::{Add, Div, Mul, Rem}; /// A means of determining if a vote is past pass threshold. #[derive(Clone, Copy, PartialEq, Eq, Encode, Decode, sp_runtime::RuntimeDebug, TypeInfo)] @@ -44,25 +44,32 @@ pub trait Approved { } /// Return `true` iff `n1 / d1 < n2 / d2`. `d1` and `d2` may not be zero. -fn compare_rationals + Div + Rem + Ord + Copy>(mut n1: T, mut d1: T, mut n2: T, mut d2: T) -> bool { +fn compare_rationals< + T: Zero + Mul + Div + Rem + Ord + Copy, +>( + mut n1: T, + mut d1: T, + mut n2: T, + mut d2: T, +) -> bool { // Uses a continued fractional representation for a non-overflowing compare. // Detailed at https://janmr.com/blog/2014/05/comparing-rational-numbers-without-overflow/. loop { let q1 = n1 / d1; let q2 = n2 / d2; if q1 < q2 { - return true; + return true } if q2 < q1 { - return false; + return false } let r1 = n1 % d1; let r2 = n2 % d2; if r2.is_zero() { - return false; + return false } if r1.is_zero() { - return true; + return true } n1 = d2; n2 = d1; @@ -72,14 +79,22 @@ fn compare_rationals + Div + Rem - + Mul + Div - + Rem + Copy, -> Approved for VoteThreshold { + Balance: IntegerSquareRoot + + Zero + + Ord + + Add + + Mul + + Div + + Rem + + Copy, + > Approved for VoteThreshold +{ fn approved(&self, tally: Tally, electorate: Balance) -> bool { let sqrt_voters = tally.turnout.integer_sqrt(); let sqrt_electorate = electorate.integer_sqrt(); - if sqrt_voters.is_zero() { return false; } + if sqrt_voters.is_zero() { + return false + } match *self { VoteThreshold::SuperMajorityApprove => compare_rationals(tally.nays, sqrt_voters, tally.ayes, sqrt_electorate), @@ -96,7 +111,9 @@ mod tests { #[test] fn should_work() { - assert!(!VoteThreshold::SuperMajorityApprove.approved(Tally{ayes: 60, nays: 50, turnout: 110}, 210)); - assert!(VoteThreshold::SuperMajorityApprove.approved(Tally{ayes: 100, nays: 50, turnout: 150}, 210)); + assert!(!VoteThreshold::SuperMajorityApprove + .approved(Tally { ayes: 60, nays: 50, turnout: 110 }, 210)); + assert!(VoteThreshold::SuperMajorityApprove + .approved(Tally { ayes: 100, nays: 50, turnout: 150 }, 210)); } } diff --git a/frame/democracy/src/weights.rs b/frame/democracy/src/weights.rs index 1462e65c409b1..682e762c838ba 100644 --- a/frame/democracy/src/weights.rs +++ b/frame/democracy/src/weights.rs @@ -35,40 +35,42 @@ // --output=./frame/democracy/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_democracy. pub trait WeightInfo { fn propose() -> Weight; - fn second(s: u32, ) -> Weight; - fn vote_new(r: u32, ) -> Weight; - fn vote_existing(r: u32, ) -> Weight; + fn second(s: u32) -> Weight; + fn vote_new(r: u32) -> Weight; + fn vote_existing(r: u32) -> Weight; fn emergency_cancel() -> Weight; - fn blacklist(p: u32, ) -> Weight; - fn external_propose(v: u32, ) -> Weight; + fn blacklist(p: u32) -> Weight; + fn external_propose(v: u32) -> Weight; fn external_propose_majority() -> Weight; fn external_propose_default() -> Weight; fn fast_track() -> Weight; - fn veto_external(v: u32, ) -> Weight; - fn cancel_proposal(p: u32, ) -> Weight; + fn veto_external(v: u32) -> Weight; + fn cancel_proposal(p: u32) -> Weight; fn cancel_referendum() -> Weight; - fn cancel_queued(r: u32, ) -> Weight; - fn on_initialize_base(r: u32, ) -> Weight; - fn delegate(r: u32, ) -> Weight; - fn undelegate(r: u32, ) -> Weight; + fn cancel_queued(r: u32) -> Weight; + fn on_initialize_base(r: u32) -> Weight; + fn delegate(r: u32) -> Weight; + fn undelegate(r: u32) -> Weight; fn clear_public_proposals() -> Weight; - fn note_preimage(b: u32, ) -> Weight; - fn note_imminent_preimage(b: u32, ) -> Weight; - fn reap_preimage(b: u32, ) -> Weight; - fn unlock_remove(r: u32, ) -> Weight; - fn unlock_set(r: u32, ) -> Weight; - fn remove_vote(r: u32, ) -> Weight; - fn remove_other_vote(r: u32, ) -> Weight; + fn note_preimage(b: u32) -> Weight; + fn note_imminent_preimage(b: u32) -> Weight; + fn reap_preimage(b: u32) -> Weight; + fn unlock_remove(r: u32) -> Weight; + fn unlock_set(r: u32) -> Weight; + fn remove_vote(r: u32) -> Weight; + fn remove_other_vote(r: u32) -> Weight; } /// Weights for pallet_democracy using the Substrate node and recommended hardware. @@ -79,21 +81,21 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn second(s: u32, ) -> Weight { + fn second(s: u32) -> Weight { (41_071_000 as Weight) // Standard Error: 1_000 .saturating_add((211_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn vote_new(r: u32, ) -> Weight { + fn vote_new(r: u32) -> Weight { (46_179_000 as Weight) // Standard Error: 0 .saturating_add((283_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn vote_existing(r: u32, ) -> Weight { + fn vote_existing(r: u32) -> Weight { (46_169_000 as Weight) // Standard Error: 0 .saturating_add((284_000 as Weight).saturating_mul(r as Weight)) @@ -105,14 +107,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn blacklist(p: u32, ) -> Weight { + fn blacklist(p: u32) -> Weight { (80_711_000 as Weight) // Standard Error: 4_000 .saturating_add((590_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(6 as Weight)) } - fn external_propose(v: u32, ) -> Weight { + fn external_propose(v: u32) -> Weight { (13_197_000 as Weight) // Standard Error: 0 .saturating_add((90_000 as Weight).saturating_mul(v as Weight)) @@ -120,26 +122,24 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn external_propose_majority() -> Weight { - (2_712_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_712_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn external_propose_default() -> Weight { - (2_680_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_680_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn fast_track() -> Weight { (28_340_000 as Weight) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn veto_external(v: u32, ) -> Weight { + fn veto_external(v: u32) -> Weight { (28_894_000 as Weight) // Standard Error: 0 .saturating_add((133_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn cancel_proposal(p: u32, ) -> Weight { + fn cancel_proposal(p: u32) -> Weight { (54_339_000 as Weight) // Standard Error: 1_000 .saturating_add((561_000 as Weight).saturating_mul(p as Weight)) @@ -147,24 +147,23 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn cancel_referendum() -> Weight { - (17_183_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (17_183_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn cancel_queued(r: u32, ) -> Weight { + fn cancel_queued(r: u32) -> Weight { (30_500_000 as Weight) // Standard Error: 1_000 .saturating_add((1_730_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn on_initialize_base(r: u32, ) -> Weight { + fn on_initialize_base(r: u32) -> Weight { (7_788_000 as Weight) // Standard Error: 4_000 .saturating_add((5_422_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(r as Weight))) } - fn delegate(r: u32, ) -> Weight { + fn delegate(r: u32) -> Weight { (55_676_000 as Weight) // Standard Error: 5_000 .saturating_add((7_553_000 as Weight).saturating_mul(r as Weight)) @@ -173,7 +172,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(r as Weight))) } - fn undelegate(r: u32, ) -> Weight { + fn undelegate(r: u32) -> Weight { (23_908_000 as Weight) // Standard Error: 5_000 .saturating_add((7_551_000 as Weight).saturating_mul(r as Weight)) @@ -183,52 +182,51 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(r as Weight))) } fn clear_public_proposals() -> Weight { - (3_023_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (3_023_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn note_preimage(b: u32, ) -> Weight { + fn note_preimage(b: u32) -> Weight { (44_069_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn note_imminent_preimage(b: u32, ) -> Weight { + fn note_imminent_preimage(b: u32) -> Weight { (28_457_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn reap_preimage(b: u32, ) -> Weight { + fn reap_preimage(b: u32) -> Weight { (39_646_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn unlock_remove(r: u32, ) -> Weight { + fn unlock_remove(r: u32) -> Weight { (39_499_000 as Weight) // Standard Error: 0 .saturating_add((148_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn unlock_set(r: u32, ) -> Weight { + fn unlock_set(r: u32) -> Weight { (37_340_000 as Weight) // Standard Error: 0 .saturating_add((266_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn remove_vote(r: u32, ) -> Weight { + fn remove_vote(r: u32) -> Weight { (20_397_000 as Weight) // Standard Error: 0 .saturating_add((259_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn remove_other_vote(r: u32, ) -> Weight { + fn remove_other_vote(r: u32) -> Weight { (20_425_000 as Weight) // Standard Error: 0 .saturating_add((156_000 as Weight).saturating_mul(r as Weight)) @@ -244,21 +242,21 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn second(s: u32, ) -> Weight { + fn second(s: u32) -> Weight { (41_071_000 as Weight) // Standard Error: 1_000 .saturating_add((211_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn vote_new(r: u32, ) -> Weight { + fn vote_new(r: u32) -> Weight { (46_179_000 as Weight) // Standard Error: 0 .saturating_add((283_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn vote_existing(r: u32, ) -> Weight { + fn vote_existing(r: u32) -> Weight { (46_169_000 as Weight) // Standard Error: 0 .saturating_add((284_000 as Weight).saturating_mul(r as Weight)) @@ -270,14 +268,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn blacklist(p: u32, ) -> Weight { + fn blacklist(p: u32) -> Weight { (80_711_000 as Weight) // Standard Error: 4_000 .saturating_add((590_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(6 as Weight)) } - fn external_propose(v: u32, ) -> Weight { + fn external_propose(v: u32) -> Weight { (13_197_000 as Weight) // Standard Error: 0 .saturating_add((90_000 as Weight).saturating_mul(v as Weight)) @@ -285,26 +283,24 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn external_propose_majority() -> Weight { - (2_712_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_712_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn external_propose_default() -> Weight { - (2_680_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_680_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn fast_track() -> Weight { (28_340_000 as Weight) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn veto_external(v: u32, ) -> Weight { + fn veto_external(v: u32) -> Weight { (28_894_000 as Weight) // Standard Error: 0 .saturating_add((133_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn cancel_proposal(p: u32, ) -> Weight { + fn cancel_proposal(p: u32) -> Weight { (54_339_000 as Weight) // Standard Error: 1_000 .saturating_add((561_000 as Weight).saturating_mul(p as Weight)) @@ -312,24 +308,23 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } fn cancel_referendum() -> Weight { - (17_183_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (17_183_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn cancel_queued(r: u32, ) -> Weight { + fn cancel_queued(r: u32) -> Weight { (30_500_000 as Weight) // Standard Error: 1_000 .saturating_add((1_730_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn on_initialize_base(r: u32, ) -> Weight { + fn on_initialize_base(r: u32) -> Weight { (7_788_000 as Weight) // Standard Error: 4_000 .saturating_add((5_422_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(r as Weight))) } - fn delegate(r: u32, ) -> Weight { + fn delegate(r: u32) -> Weight { (55_676_000 as Weight) // Standard Error: 5_000 .saturating_add((7_553_000 as Weight).saturating_mul(r as Weight)) @@ -338,7 +333,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(4 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(r as Weight))) } - fn undelegate(r: u32, ) -> Weight { + fn undelegate(r: u32) -> Weight { (23_908_000 as Weight) // Standard Error: 5_000 .saturating_add((7_551_000 as Weight).saturating_mul(r as Weight)) @@ -348,52 +343,51 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(r as Weight))) } fn clear_public_proposals() -> Weight { - (3_023_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (3_023_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn note_preimage(b: u32, ) -> Weight { + fn note_preimage(b: u32) -> Weight { (44_069_000 as Weight) // Standard Error: 0 .saturating_add((3_000 as Weight).saturating_mul(b as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn note_imminent_preimage(b: u32, ) -> Weight { + fn note_imminent_preimage(b: u32) -> Weight { (28_457_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn reap_preimage(b: u32, ) -> Weight { + fn reap_preimage(b: u32) -> Weight { (39_646_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(b as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn unlock_remove(r: u32, ) -> Weight { + fn unlock_remove(r: u32) -> Weight { (39_499_000 as Weight) // Standard Error: 0 .saturating_add((148_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn unlock_set(r: u32, ) -> Weight { + fn unlock_set(r: u32) -> Weight { (37_340_000 as Weight) // Standard Error: 0 .saturating_add((266_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn remove_vote(r: u32, ) -> Weight { + fn remove_vote(r: u32) -> Weight { (20_397_000 as Weight) // Standard Error: 0 .saturating_add((259_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn remove_other_vote(r: u32, ) -> Weight { + fn remove_other_vote(r: u32) -> Weight { (20_425_000 as Weight) // Standard Error: 0 .saturating_add((156_000 as Weight).saturating_mul(r as Weight)) diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index 6cf581135f144..5e89db7537d07 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -18,7 +18,7 @@ //! Two phase election pallet benchmarking. use super::*; -use crate::{Pallet as MultiPhase, unsigned::IndexAssignmentOf}; +use crate::{unsigned::IndexAssignmentOf, Pallet as MultiPhase}; use frame_benchmarking::{account, impl_benchmark_test_suite}; use frame_support::{assert_ok, traits::Hooks}; use frame_system::RawOrigin; @@ -53,8 +53,9 @@ fn solution_with_size( let stake: VoteWeight = ed.max(One::one()).saturating_mul(100); // first generates random targets. - let targets: Vec = - (0..size.targets).map(|i| frame_benchmarking::account("Targets", i, SEED)).collect(); + let targets: Vec = (0..size.targets) + .map(|i| frame_benchmarking::account("Targets", i, SEED)) + .collect(); let mut rng = SmallRng::seed_from_u64(SEED.into()); @@ -80,8 +81,11 @@ fn solution_with_size( .collect::>(); // rest of the voters. They can only vote for non-winners. - let non_winners = - targets.iter().filter(|t| !winners.contains(t)).cloned().collect::>(); + let non_winners = targets + .iter() + .filter(|t| !winners.contains(t)) + .cloned() + .collect::>(); let rest_voters = (active_voters_count..size.voters) .map(|i| { let votes = (&non_winners) @@ -147,14 +151,22 @@ fn set_up_data_provider(v: u32, t: u32) { // number of votes in snapshot. T::DataProvider::clear(); - log!(info, "setting up with voters = {} [degree = {}], targets = {}", v, T::DataProvider::MAXIMUM_VOTES_PER_VOTER, t); + log!( + info, + "setting up with voters = {} [degree = {}], targets = {}", + v, + T::DataProvider::MAXIMUM_VOTES_PER_VOTER, + t + ); // fill targets. - let mut targets = (0..t).map(|i| { - let target = frame_benchmarking::account::("Target", i, SEED); - T::DataProvider::add_target(target.clone()); - target - }).collect::>(); + let mut targets = (0..t) + .map(|i| { + let target = frame_benchmarking::account::("Target", i, SEED); + T::DataProvider::add_target(target.clone()); + target + }) + .collect::>(); // we should always have enough voters to fill. assert!(targets.len() > T::DataProvider::MAXIMUM_VOTES_PER_VOTER as usize); targets.truncate(T::DataProvider::MAXIMUM_VOTES_PER_VOTER as usize); diff --git a/frame/election-provider-multi-phase/src/helpers.rs b/frame/election-provider-multi-phase/src/helpers.rs index 46eeef0a6bf73..0abf448a4567b 100644 --- a/frame/election-provider-multi-phase/src/helpers.rs +++ b/frame/election-provider-multi-phase/src/helpers.rs @@ -17,7 +17,7 @@ //! Some helper functions/macros for this crate. -use super::{Config, VoteWeight, CompactVoterIndexOf, CompactTargetIndexOf}; +use super::{CompactTargetIndexOf, CompactVoterIndexOf, Config, VoteWeight}; use sp_std::{collections::btree_map::BTreeMap, convert::TryInto, prelude::*}; #[macro_export] @@ -58,7 +58,9 @@ pub fn voter_index_fn( cache: &BTreeMap, ) -> impl Fn(&T::AccountId) -> Option> + '_ { move |who| { - cache.get(who).and_then(|i| >>::try_into(*i).ok()) + cache + .get(who) + .and_then(|i| >>::try_into(*i).ok()) } } @@ -70,7 +72,9 @@ pub fn voter_index_fn_owned( cache: BTreeMap, ) -> impl Fn(&T::AccountId) -> Option> { move |who| { - cache.get(who).and_then(|i| >>::try_into(*i).ok()) + cache + .get(who) + .and_then(|i| >>::try_into(*i).ok()) } } @@ -173,7 +177,11 @@ pub fn stake_of_fn_linear( snapshot: &Vec<(T::AccountId, VoteWeight, Vec)>, ) -> impl Fn(&T::AccountId) -> VoteWeight + '_ { move |who| { - snapshot.iter().find(|(x, _, _)| x == who).map(|(_, x, _)| *x).unwrap_or_default() + snapshot + .iter() + .find(|(x, _, _)| x == who) + .map(|(_, x, _)| *x) + .unwrap_or_default() } } diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 5555ad4ac6560..c04b1aa5309fe 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -228,35 +228,32 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::{Decode, Encode}; -use scale_info::TypeInfo; +use frame_election_provider_support::{onchain, ElectionDataProvider, ElectionProvider}; use frame_support::{ dispatch::DispatchResultWithPostInfo, ensure, - traits::{Currency, Get, ReservableCurrency, OnUnbalanced}, + traits::{Currency, Get, OnUnbalanced, ReservableCurrency}, weights::Weight, }; use frame_system::{ensure_none, offchain::SendTransactionTypes}; -use frame_election_provider_support::{ElectionDataProvider, ElectionProvider, onchain}; +use scale_info::TypeInfo; +use sp_arithmetic::{ + traits::{CheckedAdd, Zero}, + UpperOf, +}; use sp_npos_elections::{ - assignment_ratio_to_staked_normalized, CompactSolution, ElectionScore, - EvaluateSupport, PerThing128, Supports, VoteWeight, + assignment_ratio_to_staked_normalized, CompactSolution, ElectionScore, EvaluateSupport, + PerThing128, Supports, VoteWeight, }; use sp_runtime::{ + traits::Bounded, transaction_validity::{ InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity, TransactionValidityError, ValidTransaction, }, DispatchError, PerThing, Perbill, RuntimeDebug, SaturatedConversion, - traits::Bounded, -}; -use sp_std::{ - convert::TryInto, - prelude::*, -}; -use sp_arithmetic::{ - UpperOf, - traits::{Zero, CheckedAdd}, }; +use sp_std::{convert::TryInto, prelude::*}; #[cfg(any(feature = "runtime-benchmarks", test))] mod benchmarking; @@ -564,7 +561,9 @@ pub mod pallet { #[pallet::config] pub trait Config: frame_system::Config + SendTransactionTypes> { - type Event: From> + IsType<::Event> + TryInto>; + type Event: From> + + IsType<::Event> + + TryInto>; /// Currency type. type Currency: ReservableCurrency + Currency; @@ -704,21 +703,22 @@ pub mod pallet { Ok(snap_weight) => { log!(info, "Starting signed phase round {}.", Self::round()); T::WeightInfo::on_initialize_open_signed().saturating_add(snap_weight) - } + }, Err(why) => { // Not much we can do about this at this point. log!(warn, "failed to open signed phase due to {:?}", why); T::WeightInfo::on_initialize_nothing() // NOTE: ^^ The trait specifies that this is a noop in terms of weight // in case of error. - } + }, } - } + }, Phase::Signed | Phase::Off if remaining <= unsigned_deadline && remaining > Zero::zero() => { // our needs vary according to whether or not the unsigned phase follows a signed phase - let (need_snapshot, enabled, signed_weight) = if current_phase == Phase::Signed { + let (need_snapshot, enabled, signed_weight) = if current_phase == Phase::Signed + { // there was previously a signed phase: close the signed phase, no need for snapshot. // // Notes: @@ -747,14 +747,14 @@ pub mod pallet { }; base_weight.saturating_add(snap_weight).saturating_add(signed_weight) - } + }, Err(why) => { // Not much we can do about this at this point. log!(warn, "failed to open unsigned phase due to {:?}", why); T::WeightInfo::on_initialize_nothing() // NOTE: ^^ The trait specifies that this is a noop in terms of weight // in case of error. - } + }, } } _ => T::WeightInfo::on_initialize_nothing(), @@ -762,15 +762,16 @@ pub mod pallet { } fn offchain_worker(now: T::BlockNumber) { - use sp_runtime::offchain::storage_lock::{StorageLock, BlockAndTime}; + use sp_runtime::offchain::storage_lock::{BlockAndTime, StorageLock}; // Create a lock with the maximum deadline of number of blocks in the unsigned phase. // This should only come useful in an **abrupt** termination of execution, otherwise the // guard will be dropped upon successful execution. - let mut lock = StorageLock::>>::with_block_deadline( - unsigned::OFFCHAIN_LOCK, - T::UnsignedPhase::get().saturated_into(), - ); + let mut lock = + StorageLock::>>::with_block_deadline( + unsigned::OFFCHAIN_LOCK, + T::UnsignedPhase::get().saturated_into(), + ); match lock.try_lock() { Ok(_guard) => { @@ -778,7 +779,7 @@ pub mod pallet { }, Err(deadline) => { log!(debug, "offchain worker lock not released, deadline is {:?}", deadline); - } + }, }; } @@ -860,8 +861,7 @@ pub mod pallet { witness: SolutionOrSnapshotSize, ) -> DispatchResultWithPostInfo { ensure_none(origin)?; - let error_message = - "Invalid unsigned submission must produce invalid block and \ + let error_message = "Invalid unsigned submission must produce invalid block and \ deprive validator from their authoring reward."; // Check score being an improvement, phase, and desired targets. @@ -924,11 +924,8 @@ pub mod pallet { // Note: we don't `rotate_round` at this point; the next call to // `ElectionProvider::elect` will succeed and take care of that. - let solution = ReadySolution { - supports, - score: [0, 0, 0], - compute: ElectionCompute::Emergency, - }; + let solution = + ReadySolution { supports, score: [0, 0, 0], compute: ElectionCompute::Emergency }; >::put(solution); Ok(()) @@ -957,7 +954,8 @@ pub mod pallet { // ensure witness data is correct. ensure!( - num_signed_submissions >= >::decode_len().unwrap_or_default() as u32, + num_signed_submissions >= + >::decode_len().unwrap_or_default() as u32, Error::::SignedInvalidWitness, ); @@ -992,8 +990,7 @@ pub mod pallet { }; // collect deposit. Thereafter, the function cannot fail. - T::Currency::reserve(&who, deposit) - .map_err(|_| Error::::SignedCannotPayDeposit)?; + T::Currency::reserve(&who, deposit).map_err(|_| Error::::SignedCannotPayDeposit)?; let ejected_a_solution = maybe_removed.is_some(); // if we had to remove the weakest solution, unreserve its deposit @@ -1067,10 +1064,10 @@ pub mod pallet { if let Call::submit_unsigned { solution, .. } = call { // Discard solution not coming from the local OCW. match source { - TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } + TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, _ => { - return InvalidTransaction::Call.into(); - } + return InvalidTransaction::Call.into() + }, } let _ = Self::unsigned_pre_dispatch_checks(solution) @@ -1083,9 +1080,8 @@ pub mod pallet { ValidTransaction::with_tag_prefix("OffchainElection") // The higher the score[0], the better a solution is. .priority( - T::MinerTxPriority::get().saturating_add( - solution.score[0].saturated_into() - ), + T::MinerTxPriority::get() + .saturating_add(solution.score[0].saturated_into()), ) // Used to deduplicate unsigned solutions: each validator should produce one // solution per round at most, and solutions are not propagate. @@ -1218,20 +1214,18 @@ impl Pallet { match current_phase { Phase::Unsigned((true, opened)) if opened == now => { // Mine a new solution, cache it, and attempt to submit it - let initial_output = Self::ensure_offchain_repeat_frequency(now).and_then(|_| { - Self::mine_check_save_submit() - }); + let initial_output = Self::ensure_offchain_repeat_frequency(now) + .and_then(|_| Self::mine_check_save_submit()); log!(debug, "initial offchain thread output: {:?}", initial_output); - } + }, Phase::Unsigned((true, opened)) if opened < now => { // Try and resubmit the cached solution, and recompute ONLY if it is not // feasible. - let resubmit_output = Self::ensure_offchain_repeat_frequency(now).and_then(|_| { - Self::restore_or_compute_then_maybe_submit() - }); + let resubmit_output = Self::ensure_offchain_repeat_frequency(now) + .and_then(|_| Self::restore_or_compute_then_maybe_submit()); log!(debug, "resubmit offchain thread output: {:?}", resubmit_output); - } - _ => {} + }, + _ => {}, } // After election finalization, clear OCW solution storage. @@ -1241,9 +1235,7 @@ impl Pallet { let local_event = ::Event::from(event_record.event); local_event.try_into().ok() }) - .any(|event| { - matches!(event, Event::ElectionFinalized(_)) - }) + .any(|event| matches!(event, Event::ElectionFinalized(_))) { unsigned::kill_ocw_solution::(); } @@ -1307,14 +1299,12 @@ impl Pallet { // Defensive-only. if targets.len() > target_limit || voters.len() > voter_limit { debug_assert!(false, "Snapshot limit has not been respected."); - return Err(ElectionError::DataProvider("Snapshot too big for submission.")); + return Err(ElectionError::DataProvider("Snapshot too big for submission.")) } // Only write snapshot if all existed. - let metadata = SolutionOrSnapshotSize { - voters: voters.len() as u32, - targets: targets.len() as u32, - }; + let metadata = + SolutionOrSnapshotSize { voters: voters.len() as u32, targets: targets.len() as u32 }; log!(debug, "creating a snapshot with metadata {:?}", metadata); >::put(metadata); @@ -1334,7 +1324,10 @@ impl Pallet { debug_assert!(buffer.len() == size && size == buffer.capacity()); sp_io::storage::set(&>::hashed_key(), &buffer); - Ok(w1.saturating_add(w2).saturating_add(w3).saturating_add(T::DbWeight::get().writes(3))) + Ok(w1 + .saturating_add(w2) + .saturating_add(w3) + .saturating_add(T::DbWeight::get().writes(3))) } /// Kill everything created by [`Pallet::create_snapshot`]. @@ -1368,9 +1361,9 @@ impl Pallet { // Ensure that the solution's score can pass absolute min-score. let submitted_score = solution.score.clone(); ensure!( - Self::minimum_untrusted_score().map_or(true, |min_score| + Self::minimum_untrusted_score().map_or(true, |min_score| { sp_npos_elections::is_score_better(submitted_score, min_score, Perbill::zero()) - ), + }), FeasibilityError::UntrustedScoreTooLow ); @@ -1417,7 +1410,7 @@ impl Pallet { // Check that all of the targets are valid based on the snapshot. if assignment.distribution.iter().any(|(d, _)| !targets.contains(d)) { - return Err(FeasibilityError::InvalidVote); + return Err(FeasibilityError::InvalidVote) } Ok(()) }) @@ -1493,8 +1486,13 @@ impl Pallet { .fold(Zero::zero(), |acc, next| acc + next.voters.len() as u32); Ok(( supports, - T::WeightInfo::elect_queued(metadata.voters, metadata.targets, active_voters, desired), - compute + T::WeightInfo::elect_queued( + metadata.voters, + metadata.targets, + active_voters, + desired, + ), + compute, )) }, ) @@ -1525,12 +1523,12 @@ impl ElectionProvider for Pallet { // All went okay, put sign to be Off, clean snapshot, etc. Self::rotate_round(); Ok((supports, weight)) - } + }, Err(why) => { log!(error, "Entering emergency mode: {:?}", why); >::put(Phase::Emergency); Err(why) - } + }, } } } @@ -1552,11 +1550,9 @@ mod feasibility_check { //! that is invalid, but gets through the system as valid. use super::*; - use crate::{ - mock::{ - MultiPhase, Runtime, roll_to, TargetIndex, raw_solution, EpochLength, UnsignedPhase, - SignedPhase, VoterIndex, ExtBuilder, - }, + use crate::mock::{ + raw_solution, roll_to, EpochLength, ExtBuilder, MultiPhase, Runtime, SignedPhase, + TargetIndex, UnsignedPhase, VoterIndex, }; use frame_support::assert_noop; @@ -1727,11 +1723,11 @@ mod feasibility_check { mod tests { use super::*; use crate::{ - Phase, mock::{ - ExtBuilder, MultiPhase, Runtime, roll_to, MockWeightInfo, AccountId, TargetIndex, - Targets, multi_phase_events, System, SignedMaxSubmissions, + multi_phase_events, roll_to, AccountId, ExtBuilder, MockWeightInfo, MultiPhase, + Runtime, SignedMaxSubmissions, System, TargetIndex, Targets, }, + Phase, }; use frame_election_provider_support::ElectionProvider; use frame_support::{assert_noop, assert_ok}; @@ -2001,7 +1997,6 @@ mod tests { roll_to(15); assert_eq!(MultiPhase::current_phase(), Phase::Signed); - let (solution, _) = MultiPhase::mine_solution(2).unwrap(); // Default solution has a score of [50, 100, 5000]. assert_eq!(solution.score, [50, 100, 5000]); @@ -2011,10 +2006,7 @@ mod tests { >::put([51, 0, 0]); assert_noop!( - MultiPhase::feasibility_check( - solution, - ElectionCompute::Signed - ), + MultiPhase::feasibility_check(solution, ElectionCompute::Signed), FeasibilityError::UntrustedScoreTooLow, ); }) @@ -2038,9 +2030,9 @@ mod tests { }; let mut active = 1; - while weight_with(active) - <= ::BlockWeights::get().max_block - || active == all_voters + while weight_with(active) <= + ::BlockWeights::get().max_block || + active == all_voters { active += 1; } diff --git a/frame/election-provider-multi-phase/src/mock.rs b/frame/election-provider-multi-phase/src/mock.rs index 55fa58590ce71..c5007733c1e33 100644 --- a/frame/election-provider-multi-phase/src/mock.rs +++ b/frame/election-provider-multi-phase/src/mock.rs @@ -17,13 +17,10 @@ use super::*; use crate as multi_phase; -use multi_phase::unsigned::{IndexAssignmentOf, Voter}; +use frame_election_provider_support::{data_provider, ElectionDataProvider}; pub use frame_support::{assert_noop, assert_ok}; -use frame_support::{ - parameter_types, - traits::{Hooks}, - weights::Weight, -}; +use frame_support::{parameter_types, traits::Hooks, weights::Weight}; +use multi_phase::unsigned::{IndexAssignmentOf, Voter}; use parking_lot::RwLock; use sp_core::{ offchain::{ @@ -32,7 +29,6 @@ use sp_core::{ }, H256, }; -use frame_election_provider_support::{ElectionDataProvider, data_provider}; use sp_npos_elections::{ assignment_ratio_to_staked_normalized, seq_phragmen, to_supports, to_without_backing, CompactSolution, ElectionResult, EvaluateSupport, @@ -405,7 +401,7 @@ impl ElectionDataProvider for StakingMock { let targets = Targets::get(); if maybe_max_len.map_or(false, |max_len| targets.len() > max_len) { - return Err("Targets too big"); + return Err("Targets too big") } Ok((targets, 0)) @@ -416,7 +412,7 @@ impl ElectionDataProvider for StakingMock { ) -> data_provider::Result<(Vec<(AccountId, VoteWeight, Vec)>, Weight)> { let voters = Voters::get(); if maybe_max_len.map_or(false, |max_len| voters.len() > max_len) { - return Err("Voters too big"); + return Err("Voters too big") } Ok((voters, 0)) diff --git a/frame/election-provider-multi-phase/src/signed.rs b/frame/election-provider-multi-phase/src/signed.rs index b89000b887a61..af4e1b293536f 100644 --- a/frame/election-provider-multi-phase/src/signed.rs +++ b/frame/election-provider-multi-phase/src/signed.rs @@ -18,11 +18,11 @@ //! The signed phase implementation. use crate::{ - CompactOf, Config, ElectionCompute, Pallet, RawSolution, ReadySolution, SolutionOrSnapshotSize, - Weight, WeightInfo, QueuedSolution, SignedSubmissionsMap, SignedSubmissionIndices, - SignedSubmissionNextIndex, + CompactOf, Config, ElectionCompute, Pallet, QueuedSolution, RawSolution, ReadySolution, + SignedSubmissionIndices, SignedSubmissionNextIndex, SignedSubmissionsMap, + SolutionOrSnapshotSize, Weight, WeightInfo, }; -use codec::{Encode, Decode, HasCompact}; +use codec::{Decode, Encode, HasCompact}; use frame_support::{ storage::bounded_btree_map::BoundedBTreeMap, traits::{Currency, Get, OnUnbalanced, ReservableCurrency}, @@ -31,8 +31,8 @@ use frame_support::{ use sp_arithmetic::traits::SaturatedConversion; use sp_npos_elections::{is_score_better, CompactSolution, ElectionScore}; use sp_runtime::{ - RuntimeDebug, traits::{Saturating, Zero}, + RuntimeDebug, }; use sp_std::{ cmp::Ordering, @@ -131,24 +131,30 @@ impl SignedSubmissions { deletion_overlay: BTreeSet::new(), }; // validate that the stored state is sane - debug_assert!(submissions.indices.values().copied().max().map_or( - true, - |max_idx| submissions.next_idx > max_idx, - )); + debug_assert!(submissions + .indices + .values() + .copied() + .max() + .map_or(true, |max_idx| submissions.next_idx > max_idx,)); submissions } /// Put the signed submissions back into storage. pub fn put(mut self) { // validate that we're going to write only sane things to storage - debug_assert!(self.insertion_overlay.keys().copied().max().map_or( - true, - |max_idx| self.next_idx > max_idx, - )); - debug_assert!(self.indices.values().copied().max().map_or( - true, - |max_idx| self.next_idx > max_idx, - )); + debug_assert!(self + .insertion_overlay + .keys() + .copied() + .max() + .map_or(true, |max_idx| self.next_idx > max_idx,)); + debug_assert!(self + .indices + .values() + .copied() + .max() + .map_or(true, |max_idx| self.next_idx > max_idx,)); SignedSubmissionIndices::::put(self.indices); SignedSubmissionNextIndex::::put(self.next_idx); @@ -203,10 +209,12 @@ impl SignedSubmissions { } self.insertion_overlay.remove(&remove_idx).or_else(|| { - (!self.deletion_overlay.contains(&remove_idx)).then(|| { - self.deletion_overlay.insert(remove_idx); - SignedSubmissionsMap::::try_get(remove_idx).ok() - }).flatten() + (!self.deletion_overlay.contains(&remove_idx)) + .then(|| { + self.deletion_overlay.insert(remove_idx); + SignedSubmissionsMap::::try_get(remove_idx).ok() + }) + .flatten() }) } @@ -256,10 +264,7 @@ impl SignedSubmissions { /// /// In the event that the new submission is not better than the current weakest according /// to `is_score_better`, we do not change anything. - pub fn insert( - &mut self, - submission: SignedSubmissionOf, - ) -> InsertResult { + pub fn insert(&mut self, submission: SignedSubmissionOf) -> InsertResult { // verify the expectation that we never reuse an index debug_assert!(!self.indices.values().any(|&idx| idx == self.next_idx)); @@ -271,12 +276,12 @@ impl SignedSubmissions { self.indices .try_insert(submission.solution.score, prev_idx) .expect("didn't change the map size; qed"); - return InsertResult::NotInserted; - } + return InsertResult::NotInserted + }, Ok(None) => { // successfully inserted into the set; no need to take out weakest member None - } + }, Err((insert_score, insert_idx)) => { // could not insert into the set because it is full. // note that we short-circuit return here in case the iteration produces `None`. @@ -290,11 +295,11 @@ impl SignedSubmissions { // if we haven't improved on the weakest score, don't change anything. if !is_score_better(insert_score, weakest_score, threshold) { - return InsertResult::NotInserted; + return InsertResult::NotInserted } self.swap_out_submission(weakest_score, Some((insert_score, insert_idx))) - } + }, }; // we've taken out the weakest, so update the storage map and the next index @@ -349,17 +354,12 @@ impl Pallet { let reward = T::SignedRewardBase::get(); while let Some(best) = all_submissions.pop_last() { - let SignedSubmission { solution, who, deposit} = best; + let SignedSubmission { solution, who, deposit } = best; let active_voters = solution.compact.voter_count() as u32; let feasibility_weight = { // defensive only: at the end of signed phase, snapshot will exits. let desired_targets = Self::desired_targets().unwrap_or_default(); - T::WeightInfo::feasibility_check( - voters, - targets, - active_voters, - desired_targets, - ) + T::WeightInfo::feasibility_check(voters, targets, active_voters, desired_targets) }; // the feasibility check itself has some weight weight = weight.saturating_add(feasibility_weight); @@ -375,13 +375,13 @@ impl Pallet { weight = weight .saturating_add(T::WeightInfo::finalize_signed_phase_accept_solution()); - break; - } + break + }, Err(_) => { Self::finalize_signed_phase_reject_solution(&who, deposit); weight = weight .saturating_add(T::WeightInfo::finalize_signed_phase_reject_solution()); - } + }, } } @@ -398,7 +398,12 @@ impl Pallet { debug_assert!(!SignedSubmissionNextIndex::::exists()); debug_assert!(SignedSubmissionsMap::::iter().next().is_none()); - log!(debug, "closed signed phase, found solution? {}, discarded {}", found_solution, discarded); + log!( + debug, + "closed signed phase, found solution? {}, discarded {}", + found_solution, + discarded + ); (found_solution, weight) } @@ -469,9 +474,12 @@ impl Pallet { let feasibility_weight = Self::feasibility_weight_of(solution, size); let len_deposit = T::SignedDepositByte::get().saturating_mul(encoded_len); - let weight_deposit = T::SignedDepositWeight::get().saturating_mul(feasibility_weight.saturated_into()); + let weight_deposit = + T::SignedDepositWeight::get().saturating_mul(feasibility_weight.saturated_into()); - T::SignedDepositBase::get().saturating_add(len_deposit).saturating_add(weight_deposit) + T::SignedDepositBase::get() + .saturating_add(len_deposit) + .saturating_add(weight_deposit) } } @@ -479,13 +487,13 @@ impl Pallet { mod tests { use super::*; use crate::{ - Phase, Error, mock::{ - balances, ExtBuilder, MultiPhase, Origin, raw_solution, roll_to, Runtime, + balances, raw_solution, roll_to, ExtBuilder, MultiPhase, Origin, Runtime, SignedMaxSubmissions, SignedMaxWeight, }, + Error, Phase, }; - use frame_support::{dispatch::DispatchResult, assert_noop, assert_storage_noop, assert_ok}; + use frame_support::{assert_noop, assert_ok, assert_storage_noop, dispatch::DispatchResult}; fn submit_with_witness( origin: Origin, @@ -626,7 +634,6 @@ mod tests { assert_ok!(submit_with_witness(Origin::signed(99), solution)); } - // weaker. let solution = RawSolution { score: [4, 0, 0], ..Default::default() }; @@ -810,33 +817,36 @@ mod tests { #[test] fn cannot_consume_too_much_future_weight() { - ExtBuilder::default().signed_weight(40).mock_weight_info(true).build_and_execute(|| { - roll_to(15); - assert!(MultiPhase::current_phase().is_signed()); - - let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); - let solution_weight = ::WeightInfo::feasibility_check( - witness.voters, - witness.targets, - solution.compact.voter_count() as u32, - solution.compact.unique_targets().len() as u32, - ); - // default solution will have 5 edges (5 * 5 + 10) - assert_eq!(solution_weight, 35); - assert_eq!(solution.compact.voter_count(), 5); - assert_eq!(::SignedMaxWeight::get(), 40); - - assert_ok!(submit_with_witness(Origin::signed(99), solution.clone())); - - ::set(30); - - // note: resubmitting the same solution is technically okay as long as the queue has - // space. - assert_noop!( - submit_with_witness(Origin::signed(99), solution), - Error::::SignedTooMuchWeight, - ); - }) + ExtBuilder::default() + .signed_weight(40) + .mock_weight_info(true) + .build_and_execute(|| { + roll_to(15); + assert!(MultiPhase::current_phase().is_signed()); + + let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); + let solution_weight = ::WeightInfo::feasibility_check( + witness.voters, + witness.targets, + solution.compact.voter_count() as u32, + solution.compact.unique_targets().len() as u32, + ); + // default solution will have 5 edges (5 * 5 + 10) + assert_eq!(solution_weight, 35); + assert_eq!(solution.compact.voter_count(), 5); + assert_eq!(::SignedMaxWeight::get(), 40); + + assert_ok!(submit_with_witness(Origin::signed(99), solution.clone())); + + ::set(30); + + // note: resubmitting the same solution is technically okay as long as the queue has + // space. + assert_noop!( + submit_with_witness(Origin::signed(99), solution), + Error::::SignedTooMuchWeight, + ); + }) } #[test] diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 6b1c0368fc575..b5ae48e653797 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -21,19 +21,18 @@ use crate::{ helpers, Call, CompactAccuracyOf, CompactOf, Config, ElectionCompute, Error, FeasibilityError, Pallet, RawSolution, ReadySolution, RoundSnapshot, SolutionOrSnapshotSize, Weight, WeightInfo, }; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use frame_support::{dispatch::DispatchResult, ensure, traits::Get}; use frame_system::offchain::SubmitTransaction; use sp_arithmetic::Perbill; use sp_npos_elections::{ - CompactSolution, ElectionResult, assignment_ratio_to_staked_normalized, - assignment_staked_to_ratio_normalized, is_score_better, seq_phragmen, + assignment_ratio_to_staked_normalized, assignment_staked_to_ratio_normalized, is_score_better, + seq_phragmen, CompactSolution, ElectionResult, }; use sp_runtime::{ - DispatchError, - SaturatedConversion, offchain::storage::{MutateStorageError, StorageValueRef}, traits::TrailingZeroInput, + DispatchError, SaturatedConversion, }; use sp_std::{cmp::Ordering, convert::TryFrom, vec::Vec}; @@ -54,10 +53,8 @@ pub type Voter = ( ); /// The relative distribution of a voter's stake among the winning targets. -pub type Assignment = sp_npos_elections::Assignment< - ::AccountId, - CompactAccuracyOf, ->; +pub type Assignment = + sp_npos_elections::Assignment<::AccountId, CompactAccuracyOf>; /// The [`IndexAssignment`][sp_npos_elections::IndexAssignment] type specialized for a particular /// runtime `T`. @@ -105,7 +102,8 @@ fn save_solution(call: &Call) -> Result<(), MinerError> { let storage = StorageValueRef::persistent(&OFFCHAIN_CACHED_CALL); match storage.mutate::<_, (), _>(|_| Ok(call.clone())) { Ok(_) => Ok(()), - Err(MutateStorageError::ConcurrentModification(_)) => Err(MinerError::FailedToStoreSolution), + Err(MutateStorageError::ConcurrentModification(_)) => + Err(MinerError::FailedToStoreSolution), Err(MutateStorageError::ValueFunctionFailed(_)) => { // this branch should be unreachable according to the definition of // `StorageValueRef::mutate`: that function should only ever `Err` if the closure we @@ -151,44 +149,45 @@ impl Pallet { /// Attempt to restore a solution from cache. Otherwise, compute it fresh. Either way, submit /// if our call's score is greater than that of the cached solution. pub fn restore_or_compute_then_maybe_submit() -> Result<(), MinerError> { - log!(debug,"miner attempting to restore or compute an unsigned solution."); + log!(debug, "miner attempting to restore or compute an unsigned solution."); let call = restore_solution::() - .and_then(|call| { - // ensure the cached call is still current before submitting - if let Call::submit_unsigned { solution, .. } = &call { - // prevent errors arising from state changes in a forkful chain - Self::basic_checks(solution, "restored")?; - Ok(call) - } else { - Err(MinerError::SolutionCallInvalid) - } - }).or_else::(|error| { - log!(debug, "restoring solution failed due to {:?}", error); - match error { - MinerError::NoStoredSolution => { - log!(trace, "mining a new solution."); - // if not present or cache invalidated due to feasibility, regenerate. - // note that failing `Feasibility` can only mean that the solution was - // computed over a snapshot that has changed due to a fork. - let call = Self::mine_checked_call()?; - save_solution(&call)?; + .and_then(|call| { + // ensure the cached call is still current before submitting + if let Call::submit_unsigned { solution, .. } = &call { + // prevent errors arising from state changes in a forkful chain + Self::basic_checks(solution, "restored")?; Ok(call) + } else { + Err(MinerError::SolutionCallInvalid) } - MinerError::Feasibility(_) => { - log!(trace, "wiping infeasible solution."); - // kill the infeasible solution, hopefully in the next runs (whenever they - // may be) we mine a new one. - kill_ocw_solution::(); - clear_offchain_repeat_frequency(); - Err(error) - }, - _ => { - // nothing to do. Return the error as-is. - Err(error) + }) + .or_else::(|error| { + log!(debug, "restoring solution failed due to {:?}", error); + match error { + MinerError::NoStoredSolution => { + log!(trace, "mining a new solution."); + // if not present or cache invalidated due to feasibility, regenerate. + // note that failing `Feasibility` can only mean that the solution was + // computed over a snapshot that has changed due to a fork. + let call = Self::mine_checked_call()?; + save_solution(&call)?; + Ok(call) + }, + MinerError::Feasibility(_) => { + log!(trace, "wiping infeasible solution."); + // kill the infeasible solution, hopefully in the next runs (whenever they + // may be) we mine a new one. + kill_ocw_solution::(); + clear_offchain_repeat_frequency(); + Err(error) + }, + _ => { + // nothing to do. Return the error as-is. + Err(error) + }, } - } - })?; + })?; Self::submit_call(call) } @@ -240,10 +239,12 @@ impl Pallet { MinerError::PreDispatchChecksFailed(err) })?; - Self::feasibility_check(raw_solution.clone(), ElectionCompute::Unsigned).map_err(|err| { - log!(debug, "feasibility check failed for {} solution: {:?}", solution_type, err); - err - })?; + Self::feasibility_check(raw_solution.clone(), ElectionCompute::Unsigned).map_err( + |err| { + log!(debug, "feasibility check failed for {} solution: {:?}", solution_type, err); + err + }, + )?; Ok(()) } @@ -347,11 +348,7 @@ impl Pallet { // converting to `Compact`. let mut index_assignments = sorted_assignments .into_iter() - .map(|assignment| IndexAssignmentOf::::new( - &assignment, - &voter_index, - &target_index, - )) + .map(|assignment| IndexAssignmentOf::::new(&assignment, &voter_index, &target_index)) .collect::, _>>()?; // trim assignments list for weight and length. @@ -390,10 +387,10 @@ impl Pallet { max @ _ => { let seed = sp_io::offchain::random_seed(); let random = ::decode(&mut TrailingZeroInput::new(seed.as_ref())) - .expect("input is padded with zeroes; qed") - % max.saturating_add(1); + .expect("input is padded with zeroes; qed") % + max.saturating_add(1); random as usize - } + }, } } @@ -418,18 +415,16 @@ impl Pallet { max_weight: Weight, assignments: &mut Vec>, ) { - let maximum_allowed_voters = Self::maximum_voter_for_weight::( - desired_targets, - size, - max_weight, - ); - let removing: usize = assignments.len().saturating_sub( - maximum_allowed_voters.saturated_into(), - ); + let maximum_allowed_voters = + Self::maximum_voter_for_weight::(desired_targets, size, max_weight); + let removing: usize = + assignments.len().saturating_sub(maximum_allowed_voters.saturated_into()); log!( debug, "from {} assignments, truncating to {} for weight, removing {}", - assignments.len(), maximum_allowed_voters, removing, + assignments.len(), + maximum_allowed_voters, + removing, ); assignments.truncate(maximum_allowed_voters as usize); } @@ -461,7 +456,7 @@ impl Pallet { // not much we can do if assignments are already empty. if high == low { - return Ok(()); + return Ok(()) } while high - low > 1 { @@ -472,22 +467,21 @@ impl Pallet { high = test; } } - let maximum_allowed_voters = - if low < assignments.len() && - encoded_size_of(&assignments[..low + 1])? <= max_allowed_length - { - low + 1 - } else { - low - }; + let maximum_allowed_voters = if low < assignments.len() && + encoded_size_of(&assignments[..low + 1])? <= max_allowed_length + { + low + 1 + } else { + low + }; // ensure our post-conditions are correct debug_assert!( encoded_size_of(&assignments[..maximum_allowed_voters]).unwrap() <= max_allowed_length ); debug_assert!(if maximum_allowed_voters < assignments.len() { - encoded_size_of(&assignments[..maximum_allowed_voters + 1]).unwrap() - > max_allowed_length + encoded_size_of(&assignments[..maximum_allowed_voters + 1]).unwrap() > + max_allowed_length } else { true }); @@ -517,7 +511,7 @@ impl Pallet { max_weight: Weight, ) -> u32 { if size.voters < 1 { - return size.voters; + return size.voters } let max_voters = size.voters.max(1); @@ -536,7 +530,7 @@ impl Pallet { Some(voters) if voters < max_voters => Ok(voters), _ => Err(()), } - } + }, Ordering::Greater => voters.checked_sub(step).ok_or(()), Ordering::Equal => Ok(voters), } @@ -551,11 +545,11 @@ impl Pallet { // proceed with the binary search Ok(next) if next != voters => { voters = next; - } + }, // we are out of bounds, break out of the loop. Err(()) => { - break; - } + break + }, // we found the right value - early exit the function. Ok(next) => return next, } @@ -599,17 +593,16 @@ impl Pallet { |maybe_head: Result, _>| { match maybe_head { Ok(Some(head)) if now < head => Err("fork."), - Ok(Some(head)) if now >= head && now <= head + threshold => { - Err("recently executed.") - } + Ok(Some(head)) if now >= head && now <= head + threshold => + Err("recently executed."), Ok(Some(head)) if now > head + threshold => { // we can run again now. Write the new head. Ok(now) - } + }, _ => { // value doesn't exists. Probably this node just booted up. Write, and run Ok(now) - } + }, } }, ); @@ -632,9 +625,7 @@ impl Pallet { /// /// NOTE: Ideally, these tests should move more and more outside of this and more to the miner's /// code, so that we do less and less storage reads here. - pub fn unsigned_pre_dispatch_checks( - solution: &RawSolution>, - ) -> DispatchResult { + pub fn unsigned_pre_dispatch_checks(solution: &RawSolution>) -> DispatchResult { // ensure solution is timely. Don't panic yet. This is a cheap check. ensure!(Self::current_phase().is_unsigned_open(), Error::::PreDispatchEarlySubmission); @@ -643,8 +634,8 @@ impl Pallet { // ensure correct number of winners. ensure!( - Self::desired_targets().unwrap_or_default() - == solution.compact.unique_targets().len() as u32, + Self::desired_targets().unwrap_or_default() == + solution.compact.unique_targets().len() as u32, Error::::PreDispatchWrongWinnerCount, ); @@ -761,19 +752,22 @@ mod max_weight { mod tests { use super::*; use crate::{ - CurrentPhase, InvalidTransaction, Phase, QueuedSolution, TransactionSource, - TransactionValidityError, mock::{ - Call as OuterCall, ExtBuilder, Extrinsic, MinerMaxWeight, MultiPhase, Origin, Runtime, - TestCompact, TrimHelpers, roll_to, roll_to_with_ocw, trim_helpers, witness, - UnsignedPhase, BlockNumber, System, + roll_to, roll_to_with_ocw, trim_helpers, witness, BlockNumber, Call as OuterCall, + ExtBuilder, Extrinsic, MinerMaxWeight, MultiPhase, Origin, Runtime, System, + TestCompact, TrimHelpers, UnsignedPhase, }, + CurrentPhase, InvalidTransaction, Phase, QueuedSolution, TransactionSource, + TransactionValidityError, }; use frame_benchmarking::Zero; use frame_support::{assert_noop, assert_ok, dispatch::Dispatchable, traits::OffchainWorker}; use sp_npos_elections::IndexAssignment; - use sp_runtime::offchain::storage_lock::{StorageLock, BlockAndTime}; - use sp_runtime::{traits::ValidateUnsigned, PerU16}; + use sp_runtime::{ + offchain::storage_lock::{BlockAndTime, StorageLock}, + traits::ValidateUnsigned, + PerU16, + }; type Assignment = crate::unsigned::Assignment; @@ -786,8 +780,11 @@ mod tests { // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); assert!(matches!( - ::validate_unsigned(TransactionSource::Local, &call) - .unwrap_err(), + ::validate_unsigned( + TransactionSource::Local, + &call + ) + .unwrap_err(), TransactionValidityError::Invalid(InvalidTransaction::Custom(0)) )); assert!(matches!( @@ -799,8 +796,11 @@ mod tests { roll_to(15); assert_eq!(MultiPhase::current_phase(), Phase::Signed); assert!(matches!( - ::validate_unsigned(TransactionSource::Local, &call) - .unwrap_err(), + ::validate_unsigned( + TransactionSource::Local, + &call + ) + .unwrap_err(), TransactionValidityError::Invalid(InvalidTransaction::Custom(0)) )); assert!(matches!( @@ -823,8 +823,11 @@ mod tests { >::put(Phase::Unsigned((false, 25))); assert!(MultiPhase::current_phase().is_unsigned()); assert!(matches!( - ::validate_unsigned(TransactionSource::Local, &call) - .unwrap_err(), + ::validate_unsigned( + TransactionSource::Local, + &call + ) + .unwrap_err(), TransactionValidityError::Invalid(InvalidTransaction::Custom(0)) )); assert!(matches!( @@ -895,23 +898,27 @@ mod tests { #[test] fn priority_is_set() { - ExtBuilder::default().miner_tx_priority(20).desired_targets(0).build_and_execute(|| { - roll_to(25); - assert!(MultiPhase::current_phase().is_unsigned()); - - let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + ExtBuilder::default() + .miner_tx_priority(20) + .desired_targets(0) + .build_and_execute(|| { + roll_to(25); + assert!(MultiPhase::current_phase().is_unsigned()); - assert_eq!( - ::validate_unsigned( - TransactionSource::Local, - &call - ) - .unwrap() - .priority, - 25 - ); - }) + let solution = + RawSolution:: { score: [5, 0, 0], ..Default::default() }; + let call = Call::submit_unsigned(solution.clone(), witness()); + + assert_eq!( + ::validate_unsigned( + TransactionSource::Local, + &call + ) + .unwrap() + .priority, + 25 + ); + }) } #[test] @@ -974,35 +981,38 @@ mod tests { #[test] fn miner_trims_weight() { - ExtBuilder::default().miner_weight(100).mock_weight_info(true).build_and_execute(|| { - roll_to(25); - assert!(MultiPhase::current_phase().is_unsigned()); - - let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); - let solution_weight = ::WeightInfo::submit_unsigned( - witness.voters, - witness.targets, - solution.compact.voter_count() as u32, - solution.compact.unique_targets().len() as u32, - ); - // default solution will have 5 edges (5 * 5 + 10) - assert_eq!(solution_weight, 35); - assert_eq!(solution.compact.voter_count(), 5); - - // now reduce the max weight - ::set(25); + ExtBuilder::default() + .miner_weight(100) + .mock_weight_info(true) + .build_and_execute(|| { + roll_to(25); + assert!(MultiPhase::current_phase().is_unsigned()); - let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); - let solution_weight = ::WeightInfo::submit_unsigned( - witness.voters, - witness.targets, - solution.compact.voter_count() as u32, - solution.compact.unique_targets().len() as u32, - ); - // default solution will have 5 edges (5 * 5 + 10) - assert_eq!(solution_weight, 25); - assert_eq!(solution.compact.voter_count(), 3); - }) + let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); + let solution_weight = ::WeightInfo::submit_unsigned( + witness.voters, + witness.targets, + solution.compact.voter_count() as u32, + solution.compact.unique_targets().len() as u32, + ); + // default solution will have 5 edges (5 * 5 + 10) + assert_eq!(solution_weight, 35); + assert_eq!(solution.compact.voter_count(), 5); + + // now reduce the max weight + ::set(25); + + let (solution, witness) = MultiPhase::mine_solution(2).unwrap(); + let solution_weight = ::WeightInfo::submit_unsigned( + witness.voters, + witness.targets, + solution.compact.voter_count() as u32, + solution.compact.unique_targets().len() as u32, + ); + // default solution will have 5 edges (5 * 5 + 10) + assert_eq!(solution_weight, 25); + assert_eq!(solution.compact.voter_count(), 3); + }) } #[test] @@ -1014,7 +1024,7 @@ mod tests { assert_eq!( MultiPhase::mine_check_save_submit().unwrap_err(), - MinerError::PreDispatchChecksFailed(DispatchError::Module{ + MinerError::PreDispatchChecksFailed(DispatchError::Module { index: 2, error: 1, message: Some("PreDispatchWrongWinnerCount"), @@ -1360,15 +1370,14 @@ mod tests { }; // Custom(7) maps to PreDispatchChecksFailed - let pre_dispatch_check_error = TransactionValidityError::Invalid( - InvalidTransaction::Custom(7), - ); + let pre_dispatch_check_error = + TransactionValidityError::Invalid(InvalidTransaction::Custom(7)); assert_eq!( ::validate_unsigned( TransactionSource::Local, &call, ) - .unwrap_err(), + .unwrap_err(), pre_dispatch_check_error, ); assert_eq!( @@ -1384,21 +1393,14 @@ mod tests { roll_to(25); // given - let TrimHelpers { - mut assignments, - encoded_size_of, - .. - } = trim_helpers(); + let TrimHelpers { mut assignments, encoded_size_of, .. } = trim_helpers(); let compact = CompactOf::::try_from(assignments.as_slice()).unwrap(); let encoded_len = compact.encoded_size() as u32; let compact_clone = compact.clone(); // when - MultiPhase::trim_assignments_length( - encoded_len, - &mut assignments, - encoded_size_of, - ).unwrap(); + MultiPhase::trim_assignments_length(encoded_len, &mut assignments, encoded_size_of) + .unwrap(); // then let compact = CompactOf::::try_from(assignments.as_slice()).unwrap(); @@ -1412,11 +1414,7 @@ mod tests { roll_to(25); // given - let TrimHelpers { - mut assignments, - encoded_size_of, - .. - } = trim_helpers(); + let TrimHelpers { mut assignments, encoded_size_of, .. } = trim_helpers(); let compact = CompactOf::::try_from(assignments.as_slice()).unwrap(); let encoded_len = compact.encoded_size(); let compact_clone = compact.clone(); @@ -1426,7 +1424,8 @@ mod tests { encoded_len as u32 - 1, &mut assignments, encoded_size_of, - ).unwrap(); + ) + .unwrap(); // then let compact = CompactOf::::try_from(assignments.as_slice()).unwrap(); @@ -1441,33 +1440,26 @@ mod tests { roll_to(25); // given - let TrimHelpers { - voters, - mut assignments, - encoded_size_of, - voter_index, - } = trim_helpers(); + let TrimHelpers { voters, mut assignments, encoded_size_of, voter_index } = + trim_helpers(); let compact = CompactOf::::try_from(assignments.as_slice()).unwrap(); let encoded_len = compact.encoded_size() as u32; let count = assignments.len(); - let min_stake_voter = voters.iter() + let min_stake_voter = voters + .iter() .map(|(id, weight, _)| (weight, id)) .min() .and_then(|(_, id)| voter_index(id)) .unwrap(); // when - MultiPhase::trim_assignments_length( - encoded_len - 1, - &mut assignments, - encoded_size_of, - ).unwrap(); + MultiPhase::trim_assignments_length(encoded_len - 1, &mut assignments, encoded_size_of) + .unwrap(); // then assert_eq!(assignments.len(), count - 1, "we must have removed exactly one assignment"); assert!( - assignments.iter() - .all(|IndexAssignment{ who, ..}| *who != min_stake_voter), + assignments.iter().all(|IndexAssignment { who, .. }| *who != min_stake_voter), "min_stake_voter must no longer be in the set of voters", ); }); diff --git a/frame/election-provider-multi-phase/src/weights.rs b/frame/election-provider-multi-phase/src/weights.rs index 0f732784c62c4..64898578184a7 100644 --- a/frame/election-provider-multi-phase/src/weights.rs +++ b/frame/election-provider-multi-phase/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/election-provider-multi-phase/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_election_provider_multi_phase. @@ -50,18 +52,17 @@ pub trait WeightInfo { fn finalize_signed_phase_accept_solution() -> Weight; fn finalize_signed_phase_reject_solution() -> Weight; fn on_initialize_open_unsigned_without_snapshot() -> Weight; - fn elect_queued(v: u32, t: u32, a: u32, d: u32, ) -> Weight; - fn submit(c: u32, ) -> Weight; - fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight; - fn feasibility_check(v: u32, t: u32, a: u32, d: u32, ) -> Weight; + fn elect_queued(v: u32, t: u32, a: u32, d: u32) -> Weight; + fn submit(c: u32) -> Weight; + fn submit_unsigned(v: u32, t: u32, a: u32, d: u32) -> Weight; + fn feasibility_check(v: u32, t: u32, a: u32, d: u32) -> Weight; } /// Weights for pallet_election_provider_multi_phase using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { fn on_initialize_nothing() -> Weight { - (33_170_000 as Weight) - .saturating_add(T::DbWeight::get().reads(8 as Weight)) + (33_170_000 as Weight).saturating_add(T::DbWeight::get().reads(8 as Weight)) } fn on_initialize_open_signed() -> Weight { (113_680_000 as Weight) @@ -88,7 +89,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn elect_queued(v: u32, _t: u32, a: u32, d: u32, ) -> Weight { + fn elect_queued(v: u32, _t: u32, a: u32, d: u32) -> Weight { (51_573_000 as Weight) // Standard Error: 1_000 .saturating_add((9_000 as Weight).saturating_mul(v as Weight)) @@ -99,14 +100,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(8 as Weight)) } - fn submit(c: u32, ) -> Weight { + fn submit(c: u32) -> Weight { (77_469_000 as Weight) // Standard Error: 17_000 .saturating_add((281_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight { + fn submit_unsigned(v: u32, t: u32, a: u32, d: u32) -> Weight { (0 as Weight) // Standard Error: 5_000 .saturating_add((3_667_000 as Weight).saturating_mul(v as Weight)) @@ -119,7 +120,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(7 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn feasibility_check(v: u32, t: u32, a: u32, d: u32, ) -> Weight { + fn feasibility_check(v: u32, t: u32, a: u32, d: u32) -> Weight { (0 as Weight) // Standard Error: 4_000 .saturating_add((3_613_000 as Weight).saturating_mul(v as Weight)) @@ -136,8 +137,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { fn on_initialize_nothing() -> Weight { - (33_564_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(8 as Weight)) + (33_564_000 as Weight).saturating_add(RocksDbWeight::get().reads(8 as Weight)) } fn on_initialize_open_signed() -> Weight { (114_561_000 as Weight) @@ -164,7 +164,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn elect_queued(v: u32, _t: u32, a: u32, d: u32, ) -> Weight { + fn elect_queued(v: u32, _t: u32, a: u32, d: u32) -> Weight { (0 as Weight) // Standard Error: 1_000 .saturating_add((19_000 as Weight).saturating_mul(v as Weight)) @@ -175,14 +175,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(6 as Weight)) .saturating_add(RocksDbWeight::get().writes(8 as Weight)) } - fn submit(c: u32, ) -> Weight { + fn submit(c: u32) -> Weight { (77_616_000 as Weight) // Standard Error: 18_000 .saturating_add((213_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn submit_unsigned(v: u32, t: u32, a: u32, d: u32, ) -> Weight { + fn submit_unsigned(v: u32, t: u32, a: u32, d: u32) -> Weight { (0 as Weight) // Standard Error: 8_000 .saturating_add((3_701_000 as Weight).saturating_mul(v as Weight)) @@ -195,7 +195,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(7 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn feasibility_check(v: u32, _t: u32, a: u32, d: u32, ) -> Weight { + fn feasibility_check(v: u32, _t: u32, a: u32, d: u32) -> Weight { (0 as Weight) // Standard Error: 7_000 .saturating_add((3_632_000 as Weight).saturating_mul(v as Weight)) diff --git a/frame/election-provider-support/src/lib.rs b/frame/election-provider-support/src/lib.rs index 1d1ebf02a2635..72896e5599138 100644 --- a/frame/election-provider-support/src/lib.rs +++ b/frame/election-provider-support/src/lib.rs @@ -164,13 +164,13 @@ #![cfg_attr(not(feature = "std"), no_std)] pub mod onchain; -use sp_std::{prelude::*, fmt::Debug}; use frame_support::weights::Weight; +use sp_std::{fmt::Debug, prelude::*}; /// Re-export some type as they are used in the interface. pub use sp_arithmetic::PerThing; pub use sp_npos_elections::{ - Assignment, ExtendedBalance, PerThing128, Supports, Support, VoteWeight + Assignment, ExtendedBalance, PerThing128, Support, Supports, VoteWeight, }; /// Types that are used by the data provider trait. @@ -224,7 +224,8 @@ pub trait ElectionDataProvider { _voters: Vec<(AccountId, VoteWeight, Vec)>, _targets: Vec, _target_stake: Option, - ) {} + ) { + } /// Utility function only to be used in benchmarking scenarios, to be implemented optionally, /// else a noop. diff --git a/frame/election-provider-support/src/onchain.rs b/frame/election-provider-support/src/onchain.rs index e034a9c36a8ac..2e2c286dc6422 100644 --- a/frame/election-provider-support/src/onchain.rs +++ b/frame/election-provider-support/src/onchain.rs @@ -18,9 +18,9 @@ //! An implementation of [`ElectionProvider`] that does an on-chain sequential phragmen. use crate::{ElectionDataProvider, ElectionProvider}; +use frame_support::{traits::Get, weights::Weight}; use sp_npos_elections::*; use sp_std::{collections::btree_map::BTreeMap, marker::PhantomData, prelude::*}; -use frame_support::{traits::Get, weights::Weight}; /// Errors of the on-chain election. #[derive(Eq, PartialEq, Debug)] @@ -83,9 +83,8 @@ impl ElectionProvider for OnChainSequen stake_map.insert(v.clone(), *s); }); - let stake_of = |w: &T::AccountId| -> VoteWeight { - stake_map.get(w).cloned().unwrap_or_default() - }; + let stake_of = + |w: &T::AccountId| -> VoteWeight { stake_map.get(w).cloned().unwrap_or_default() }; let ElectionResult { winners, assignments } = seq_phragmen::<_, T::Accuracy>(desired_targets as usize, targets, voters, None) @@ -94,16 +93,18 @@ impl ElectionProvider for OnChainSequen let staked = assignment_ratio_to_staked_normalized(assignments, &stake_of)?; let winners = to_without_backing(winners); - to_supports(&winners, &staked).map_err(Error::from).map(|s| (s, T::BlockWeights::get().max_block)) + to_supports(&winners, &staked) + .map_err(Error::from) + .map(|s| (s, T::BlockWeights::get().max_block)) } } #[cfg(test)] mod tests { use super::*; + use frame_support::weights::Weight; use sp_npos_elections::Support; use sp_runtime::Perbill; - use frame_support::weights::Weight; type AccountId = u64; type BlockNumber = u32; @@ -151,20 +152,8 @@ mod tests { assert_eq!( OnChainPhragmen::elect().unwrap().0, vec![ - ( - 10, - Support { - total: 25, - voters: vec![(1, 10), (3, 15)] - } - ), - ( - 30, - Support { - total: 35, - voters: vec![(2, 20), (3, 15)] - } - ) + (10, Support { total: 25, voters: vec![(1, 10), (3, 15)] }), + (30, Support { total: 35, voters: vec![(2, 20), (3, 15)] }) ] ); } diff --git a/frame/elections-phragmen/src/benchmarking.rs b/frame/elections-phragmen/src/benchmarking.rs index 86a0116978067..4e19b64ef7a5f 100644 --- a/frame/elections-phragmen/src/benchmarking.rs +++ b/frame/elections-phragmen/src/benchmarking.rs @@ -21,9 +21,9 @@ use super::*; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelist}; +use frame_support::{dispatch::DispatchResultWithPostInfo, traits::OnInitialize}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelist, impl_benchmark_test_suite}; -use frame_support::{traits::OnInitialize, dispatch::DispatchResultWithPostInfo}; use crate::Pallet as Elections; @@ -62,28 +62,34 @@ fn candidate_count() -> u32 { } /// Add `c` new candidates. -fn submit_candidates(c: u32, prefix: &'static str) - -> Result, &'static str> -{ - (0..c).map(|i| { - let account = endowed_account::(prefix, i); - >::submit_candidacy( - RawOrigin::Signed(account.clone()).into(), - candidate_count::(), - ).map_err(|_| "failed to submit candidacy")?; - Ok(account) - }).collect::>() +fn submit_candidates( + c: u32, + prefix: &'static str, +) -> Result, &'static str> { + (0..c) + .map(|i| { + let account = endowed_account::(prefix, i); + >::submit_candidacy( + RawOrigin::Signed(account.clone()).into(), + candidate_count::(), + ) + .map_err(|_| "failed to submit candidacy")?; + Ok(account) + }) + .collect::>() } /// Add `c` new candidates with self vote. -fn submit_candidates_with_self_vote(c: u32, prefix: &'static str) - -> Result, &'static str> -{ +fn submit_candidates_with_self_vote( + c: u32, + prefix: &'static str, +) -> Result, &'static str> { let candidates = submit_candidates::(c, prefix)?; let stake = default_stake::(BALANCE_FACTOR); - let _ = candidates.iter().map(|c| - submit_voter::(c.clone(), vec![c.clone()], stake).map(|_| ()) - ).collect::>()?; + let _ = candidates + .iter() + .map(|c| submit_voter::(c.clone(), vec![c.clone()], stake).map(|_| ())) + .collect::>()?; Ok(candidates) } @@ -98,18 +104,16 @@ fn submit_voter( /// create `num_voter` voters who randomly vote for at most `votes` of `all_candidates` if /// available. -fn distribute_voters(mut all_candidates: Vec, num_voters: u32, votes: usize) - -> Result<(), &'static str> -{ +fn distribute_voters( + mut all_candidates: Vec, + num_voters: u32, + votes: usize, +) -> Result<(), &'static str> { let stake = default_stake::(BALANCE_FACTOR); for i in 0..num_voters { // to ensure that votes are different all_candidates.rotate_left(1); - let votes = all_candidates - .iter() - .cloned() - .take(votes) - .collect::>(); + let votes = all_candidates.iter().cloned().take(votes).collect::>(); let voter = endowed_account::("voter", i); submit_voter::(voter, votes, stake)?; } @@ -128,13 +132,11 @@ fn fill_seats_up_to(m: u32) -> Result, &'static str m as usize, "wrong number of members and runners-up", ); - Ok( - >::members() - .into_iter() - .map(|m| m.who) - .chain(>::runners_up().into_iter().map(|r| r.who)) - .collect() - ) + Ok(>::members() + .into_iter() + .map(|m| m.who) + .chain(>::runners_up().into_iter().map(|r| r.who)) + .collect()) } /// removes all the storage items to reverse any genesis state. diff --git a/frame/elections-phragmen/src/lib.rs b/frame/elections-phragmen/src/lib.rs index 7cf5ae39b7263..2c3bc3de8836c 100644 --- a/frame/elections-phragmen/src/lib.rs +++ b/frame/elections-phragmen/src/lib.rs @@ -100,11 +100,11 @@ use codec::{Decode, Encode}; use frame_support::{ - dispatch::{WithPostDispatchInfo}, + dispatch::WithPostDispatchInfo, traits::{ ChangeMembers, Contains, ContainsLengthBound, Currency, CurrencyToVote, Get, InitializeMembers, LockIdentifier, LockableCurrency, OnUnbalanced, ReservableCurrency, - WithdrawReasons, SortedMembers, + SortedMembers, WithdrawReasons, }, weights::Weight, }; @@ -114,7 +114,7 @@ use sp_runtime::{ traits::{Saturating, StaticLookup, Zero}, DispatchError, Perbill, RuntimeDebug, }; -use sp_std::{prelude::*, cmp::Ordering}; +use sp_std::{cmp::Ordering, prelude::*}; mod benchmarking; pub mod weights; @@ -128,8 +128,9 @@ pub const MAXIMUM_VOTE: usize = 16; type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = - <::Currency as Currency<::AccountId>>::NegativeImbalance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; /// An indication that the renouncing account currently has which of the below roles. #[derive(Encode, Decode, Clone, PartialEq, RuntimeDebug, TypeInfo)] @@ -172,14 +173,13 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { - type Event: From> - + IsType<::Event>; + type Event: From> + IsType<::Event>; /// Identifier for the elections-phragmen pallet's lock #[pallet::constant] @@ -321,24 +321,19 @@ pub mod pallet { let to_reserve = new_deposit - old_deposit; T::Currency::reserve(&who, to_reserve) .map_err(|_| Error::::UnableToPayBond)?; - } - Ordering::Equal => {} + }, + Ordering::Equal => {}, Ordering::Less => { // Must unreserve a bit. let to_unreserve = old_deposit - new_deposit; let _remainder = T::Currency::unreserve(&who, to_unreserve); debug_assert!(_remainder.is_zero()); - } + }, }; // Amount to be locked up. let locked_stake = value.min(T::Currency::total_balance(&who)); - T::Currency::set_lock( - T::PalletId::get(), - &who, - locked_stake, - WithdrawReasons::all(), - ); + T::Currency::set_lock(T::PalletId::get(), &who, locked_stake, WithdrawReasons::all()); Voting::::insert(&who, Voter { votes, deposit: new_deposit, stake: locked_stake }); Ok(None.into()) @@ -427,7 +422,7 @@ pub mod pallet { let _ = Self::remove_and_replace_member(&who, false) .map_err(|_| Error::::InvalidRenouncing)?; Self::deposit_event(Event::Renounced(who)); - } + }, Renouncing::RunnerUp => { >::try_mutate::<_, Error, _>(|runners_up| { let index = runners_up @@ -441,7 +436,7 @@ pub mod pallet { Self::deposit_event(Event::Renounced(who)); Ok(()) })?; - } + }, Renouncing::Candidate(count) => { >::try_mutate::<_, Error, _>(|candidates| { ensure!(count >= candidates.len() as u32, Error::::InvalidWitnessData); @@ -454,7 +449,7 @@ pub mod pallet { Self::deposit_event(Event::Renounced(who)); Ok(()) })?; - } + }, }; Ok(None.into()) } @@ -492,7 +487,7 @@ pub mod pallet { return Err(Error::::InvalidReplacement.with_weight( // refund. The weight value comes from a benchmark which is special to this. T::WeightInfo::remove_member_wrong_refund(), - )); + )) } let had_replacement = Self::remove_and_replace_member(&who, true)?; @@ -660,37 +655,46 @@ pub mod pallet { self.members.len() as u32 <= T::DesiredMembers::get(), "Cannot accept more than DesiredMembers genesis member", ); - let members = self.members.iter().map(|(ref member, ref stake)| { - // make sure they have enough stake. - assert!( - T::Currency::free_balance(member) >= *stake, - "Genesis member does not have enough stake.", - ); + let members = self + .members + .iter() + .map(|(ref member, ref stake)| { + // make sure they have enough stake. + assert!( + T::Currency::free_balance(member) >= *stake, + "Genesis member does not have enough stake.", + ); - // Note: all members will only vote for themselves, hence they must be given exactly - // their own stake as total backing. Any sane election should behave as such. - // Nonetheless, stakes will be updated for term 1 onwards according to the election. - Members::::mutate(|members| { - match members.binary_search_by(|m| m.who.cmp(member)) { - Ok(_) => panic!("Duplicate member in elections-phragmen genesis: {}", member), - Err(pos) => members.insert( - pos, - SeatHolder { who: member.clone(), stake: *stake, deposit: Zero::zero() }, - ), - } - }); - - // set self-votes to make persistent. Genesis voters don't have any bond, nor do - // they have any lock. NOTE: this means that we will still try to remove a lock once - // this genesis voter is removed, and for now it is okay because remove_lock is noop - // if lock is not there. - >::insert( - &member, - Voter { votes: vec![member.clone()], stake: *stake, deposit: Zero::zero() }, - ); + // Note: all members will only vote for themselves, hence they must be given exactly + // their own stake as total backing. Any sane election should behave as such. + // Nonetheless, stakes will be updated for term 1 onwards according to the election. + Members::::mutate(|members| { + match members.binary_search_by(|m| m.who.cmp(member)) { + Ok(_) => + panic!("Duplicate member in elections-phragmen genesis: {}", member), + Err(pos) => members.insert( + pos, + SeatHolder { + who: member.clone(), + stake: *stake, + deposit: Zero::zero(), + }, + ), + } + }); + + // set self-votes to make persistent. Genesis voters don't have any bond, nor do + // they have any lock. NOTE: this means that we will still try to remove a lock once + // this genesis voter is removed, and for now it is okay because remove_lock is noop + // if lock is not there. + >::insert( + &member, + Voter { votes: vec![member.clone()], stake: *stake, deposit: Zero::zero() }, + ); - member.clone() - }).collect::>(); + member.clone() + }) + .collect::>(); // report genesis members to upstream, if any. T::InitializeMembers::initialize_members(&members); @@ -727,8 +731,9 @@ impl Pallet { // - `Ok(None)` if member was removed but no replacement was found // - `Err(_)` if who is not a member. let maybe_replacement = >::try_mutate::<_, Error, _>(|members| { - let remove_index = - members.binary_search_by(|m| m.who.cmp(who)).map_err(|_| Error::::NotMember)?; + let remove_index = members + .binary_search_by(|m| m.who.cmp(who)) + .map_err(|_| Error::::NotMember)?; // we remove the member anyhow, regardless of having a runner-up or not. let removed = members.remove(remove_index); @@ -760,10 +765,8 @@ impl Pallet { Ok(maybe_next_best) })?; - let remaining_member_ids_sorted = Self::members() - .into_iter() - .map(|x| x.who.clone()) - .collect::>(); + let remaining_member_ids_sorted = + Self::members().into_iter().map(|x| x.who.clone()).collect::>(); let outgoing = &[who.clone()]; let maybe_current_prime = T::ChangeMembers::get_prime(); let return_value = match maybe_replacement { @@ -772,18 +775,18 @@ impl Pallet { T::ChangeMembers::change_members_sorted( &[incoming.who], outgoing, - &remaining_member_ids_sorted[..] + &remaining_member_ids_sorted[..], ); true - } + }, None => { T::ChangeMembers::change_members_sorted( &[], outgoing, - &remaining_member_ids_sorted[..] + &remaining_member_ids_sorted[..], ); false - } + }, }; // if there was a prime before and they are not the one being removed, then set them @@ -841,11 +844,9 @@ impl Pallet { /// O(NLogM) with M candidates and `who` having voted for `N` of them. /// Reads Members, RunnersUp, Candidates and Voting(who) from database. fn is_defunct_voter(votes: &[T::AccountId]) -> bool { - votes.iter().all(|v| - !Self::is_member(v) && - !Self::is_runner_up(v) && - !Self::is_candidate(v).is_ok() - ) + votes.iter().all(|v| { + !Self::is_member(v) && !Self::is_runner_up(v) && !Self::is_candidate(v).is_ok() + }) } /// Remove a certain someone as a voter. @@ -876,15 +877,12 @@ impl Pallet { if candidates_and_deposit.len().is_zero() { Self::deposit_event(Event::EmptyTerm); - return T::DbWeight::get().reads(5); + return T::DbWeight::get().reads(5) } // All of the new winners that come out of phragmen will thus have a deposit recorded. - let candidate_ids = candidates_and_deposit - .iter() - .map(|(x, _)| x) - .cloned() - .collect::>(); + let candidate_ids = + candidates_and_deposit.iter().map(|(x, _)| x).cloned().collect::>(); // helper closures to deal with balance/stake. let total_issuance = T::Currency::total_issuance(); @@ -894,10 +892,11 @@ impl Pallet { let mut num_edges: u32 = 0; // used for prime election. let voters_and_stakes = Voting::::iter() - .map(|(voter, Voter { stake, votes, .. })| { (voter, stake, votes) }) + .map(|(voter, Voter { stake, votes, .. })| (voter, stake, votes)) .collect::>(); // used for phragmen. - let voters_and_votes = voters_and_stakes.iter() + let voters_and_votes = voters_and_stakes + .iter() .cloned() .map(|(voter, stake, votes)| { num_edges = num_edges.saturating_add(votes.len() as u32); @@ -913,15 +912,14 @@ impl Pallet { candidate_ids, voters_and_votes.clone(), None, - ).map(|ElectionResult { winners, assignments: _, }| { + ) + .map(|ElectionResult { winners, assignments: _ }| { // this is already sorted by id. - let old_members_ids_sorted = >::take().into_iter() - .map(|m| m.who) - .collect::>(); + let old_members_ids_sorted = + >::take().into_iter().map(|m| m.who).collect::>(); // this one needs a sort by id. - let mut old_runners_up_ids_sorted = >::take().into_iter() - .map(|r| r.who) - .collect::>(); + let mut old_runners_up_ids_sorted = + >::take().into_iter().map(|r| r.who).collect::>(); old_runners_up_ids_sorted.sort(); // filter out those who end up with no backing stake. @@ -937,16 +935,15 @@ impl Pallet { // split new set into winners and runners up. let split_point = desired_seats.min(new_set_with_stake.len()); - let mut new_members_sorted_by_id = new_set_with_stake.drain(..split_point).collect::>(); + let mut new_members_sorted_by_id = + new_set_with_stake.drain(..split_point).collect::>(); new_members_sorted_by_id.sort_by(|i, j| i.0.cmp(&j.0)); // all the rest will be runners-up new_set_with_stake.reverse(); let new_runners_up_sorted_by_rank = new_set_with_stake; - let mut new_runners_up_ids_sorted = new_runners_up_sorted_by_rank - .iter() - .map(|(r, _)| r.clone()) - .collect::>(); + let mut new_runners_up_ids_sorted = + new_runners_up_sorted_by_rank.iter().map(|(r, _)| r.clone()).collect::>(); new_runners_up_ids_sorted.sort(); // Now we select a prime member using a [Borda @@ -959,14 +956,15 @@ impl Pallet { .map(|c| (&c.0, BalanceOf::::zero())) .collect::>(); for (_, stake, votes) in voters_and_stakes.into_iter() { - for (vote_multiplier, who) in votes.iter() + for (vote_multiplier, who) in votes + .iter() .enumerate() .map(|(vote_position, who)| ((MAXIMUM_VOTE - vote_position) as u32, who)) { if let Ok(i) = prime_votes.binary_search_by_key(&who, |k| k.0) { - prime_votes[i].1 = prime_votes[i].1.saturating_add( - stake.saturating_mul(vote_multiplier.into()) - ); + prime_votes[i].1 = prime_votes[i] + .1 + .saturating_add(stake.saturating_mul(vote_multiplier.into())); } } } @@ -986,18 +984,13 @@ impl Pallet { &new_members_ids_sorted, &old_members_ids_sorted, ); - T::ChangeMembers::change_members_sorted( - &incoming, - &outgoing, - &new_members_ids_sorted, - ); + T::ChangeMembers::change_members_sorted(&incoming, &outgoing, &new_members_ids_sorted); T::ChangeMembers::set_prime(prime); // All candidates/members/runners-up who are no longer retaining a position as a // seat holder will lose their bond. candidates_and_deposit.iter().for_each(|(c, d)| { - if - new_members_ids_sorted.binary_search(c).is_err() && + if new_members_ids_sorted.binary_search(c).is_err() && new_runners_up_ids_sorted.binary_search(c).is_err() { let (imbalance, _) = T::Currency::slash_reserved(c, *d); @@ -1044,7 +1037,8 @@ impl Pallet { Self::deposit_event(Event::NewTerm(new_members_sorted_by_id)); >::mutate(|v| *v += 1); - }).map_err(|e| { + }) + .map_err(|e| { log::error!( target: "runtime::elections-phragmen", "Failed to run election [{:?}].", @@ -1076,11 +1070,9 @@ impl SortedMembers for Pallet { // checks in runtime benchmarking. #[cfg(feature = "runtime-benchmarks")] fn add(who: &T::AccountId) { - Members::::mutate(|members| { - match members.binary_search_by(|m| m.who.cmp(who)) { - Ok(_) => (), - Err(pos) => members.insert(pos, SeatHolder { who: who.clone(), ..Default::default() }), - } + Members::::mutate(|members| match members.binary_search_by(|m| m.who.cmp(who)) { + Ok(_) => (), + Err(pos) => members.insert(pos, SeatHolder { who: who.clone(), ..Default::default() }), }) } } @@ -1099,19 +1091,19 @@ impl ContainsLengthBound for Pallet { #[cfg(test)] mod tests { use super::*; + use crate as elections_phragmen; use frame_support::{ - assert_ok, assert_noop, parameter_types, traits::OnInitialize, - dispatch::DispatchResultWithPostInfo, + assert_noop, assert_ok, dispatch::DispatchResultWithPostInfo, parameter_types, + traits::OnInitialize, }; - use substrate_test_utils::assert_eq_uvec; + use frame_system::ensure_signed; use sp_core::H256; use sp_runtime::{ - BuildStorage, testing::Header, traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; - use frame_system::ensure_signed; - use crate as elections_phragmen; + use substrate_test_utils::assert_eq_uvec; parameter_types! { pub const BlockHashCount: u64 = 250; @@ -1260,10 +1252,7 @@ mod tests { impl Default for ExtBuilder { fn default() -> Self { - Self { - balance_factor: 1, - genesis_members: vec![], - } + Self { balance_factor: 1, genesis_members: vec![] } } } @@ -1286,10 +1275,7 @@ mod tests { } pub fn genesis_members(mut self, members: Vec<(u64, u64)>) -> Self { MEMBERS.with(|m| { - *m.borrow_mut() = members - .iter() - .map(|(m, _)| m.clone()) - .collect::>() + *m.borrow_mut() = members.iter().map(|(m, _)| m.clone()).collect::>() }); self.genesis_members = members; self @@ -1303,22 +1289,28 @@ mod tests { self } pub fn build_and_execute(self, test: impl FnOnce() -> ()) { - MEMBERS.with(|m| *m.borrow_mut() = self.genesis_members.iter().map(|(m, _)| m.clone()).collect::>()); + MEMBERS.with(|m| { + *m.borrow_mut() = + self.genesis_members.iter().map(|(m, _)| m.clone()).collect::>() + }); let mut ext: sp_io::TestExternalities = GenesisConfig { - balances: pallet_balances::GenesisConfig::{ + balances: pallet_balances::GenesisConfig:: { balances: vec![ (1, 10 * self.balance_factor), (2, 20 * self.balance_factor), (3, 30 * self.balance_factor), (4, 40 * self.balance_factor), (5, 50 * self.balance_factor), - (6, 60 * self.balance_factor) + (6, 60 * self.balance_factor), ], }, elections: elections_phragmen::GenesisConfig:: { - members: self.genesis_members + members: self.genesis_members, }, - }.build_storage().unwrap().into(); + } + .build_storage() + .unwrap() + .into(); ext.execute_with(pre_conditions); ext.execute_with(test); ext.execute_with(post_conditions) @@ -1326,10 +1318,7 @@ mod tests { } fn candidate_ids() -> Vec { - Elections::candidates() - .into_iter() - .map(|(c, _)| c) - .collect::>() + Elections::candidates().into_iter().map(|(c, _)| c).collect::>() } fn candidate_deposit(who: &u64) -> u64 { @@ -1356,7 +1345,10 @@ mod tests { } fn runners_up_and_stake() -> Vec<(u64, u64)> { - Elections::runners_up().into_iter().map(|r| (r.who, r.stake)).collect::>() + Elections::runners_up() + .into_iter() + .map(|r| (r.who, r.stake)) + .collect::>() } fn all_voters() -> Vec { @@ -1469,64 +1461,88 @@ mod tests { #[test] fn genesis_members_should_work() { - ExtBuilder::default().genesis_members(vec![(1, 10), (2, 20)]).build_and_execute(|| { - System::set_block_number(1); - assert_eq!( - Elections::members(), - vec![ - SeatHolder { who: 1, stake: 10, deposit: 0 }, - SeatHolder { who: 2, stake: 20, deposit: 0 } - ] - ); + ExtBuilder::default() + .genesis_members(vec![(1, 10), (2, 20)]) + .build_and_execute(|| { + System::set_block_number(1); + assert_eq!( + Elections::members(), + vec![ + SeatHolder { who: 1, stake: 10, deposit: 0 }, + SeatHolder { who: 2, stake: 20, deposit: 0 } + ] + ); - assert_eq!(Elections::voting(1), Voter { stake: 10u64, votes: vec![1], deposit: 0 }); - assert_eq!(Elections::voting(2), Voter { stake: 20u64, votes: vec![2], deposit: 0 }); + assert_eq!( + Elections::voting(1), + Voter { stake: 10u64, votes: vec![1], deposit: 0 } + ); + assert_eq!( + Elections::voting(2), + Voter { stake: 20u64, votes: vec![2], deposit: 0 } + ); - // they will persist since they have self vote. - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + // they will persist since they have self vote. + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![1, 2]); - }) + assert_eq!(members_ids(), vec![1, 2]); + }) } #[test] fn genesis_voters_can_remove_lock() { - ExtBuilder::default().genesis_members(vec![(1, 10), (2, 20)]).build_and_execute(|| { - System::set_block_number(1); + ExtBuilder::default() + .genesis_members(vec![(1, 10), (2, 20)]) + .build_and_execute(|| { + System::set_block_number(1); - assert_eq!(Elections::voting(1), Voter { stake: 10u64, votes: vec![1], deposit: 0 }); - assert_eq!(Elections::voting(2), Voter { stake: 20u64, votes: vec![2], deposit: 0 }); + assert_eq!( + Elections::voting(1), + Voter { stake: 10u64, votes: vec![1], deposit: 0 } + ); + assert_eq!( + Elections::voting(2), + Voter { stake: 20u64, votes: vec![2], deposit: 0 } + ); - assert_ok!(Elections::remove_voter(Origin::signed(1))); - assert_ok!(Elections::remove_voter(Origin::signed(2))); + assert_ok!(Elections::remove_voter(Origin::signed(1))); + assert_ok!(Elections::remove_voter(Origin::signed(2))); - assert_eq!(Elections::voting(1), Default::default()); - assert_eq!(Elections::voting(2), Default::default()); - }) + assert_eq!(Elections::voting(1), Default::default()); + assert_eq!(Elections::voting(2), Default::default()); + }) } #[test] fn genesis_members_unsorted_should_work() { - ExtBuilder::default().genesis_members(vec![(2, 20), (1, 10)]).build_and_execute(|| { - System::set_block_number(1); - assert_eq!( - Elections::members(), - vec![ - SeatHolder { who: 1, stake: 10, deposit: 0 }, - SeatHolder { who: 2, stake: 20, deposit: 0 }, - ] - ); + ExtBuilder::default() + .genesis_members(vec![(2, 20), (1, 10)]) + .build_and_execute(|| { + System::set_block_number(1); + assert_eq!( + Elections::members(), + vec![ + SeatHolder { who: 1, stake: 10, deposit: 0 }, + SeatHolder { who: 2, stake: 20, deposit: 0 }, + ] + ); - assert_eq!(Elections::voting(1), Voter { stake: 10u64, votes: vec![1], deposit: 0 }); - assert_eq!(Elections::voting(2), Voter { stake: 20u64, votes: vec![2], deposit: 0 }); + assert_eq!( + Elections::voting(1), + Voter { stake: 10u64, votes: vec![1], deposit: 0 } + ); + assert_eq!( + Elections::voting(2), + Voter { stake: 20u64, votes: vec![2], deposit: 0 } + ); - // they will persist since they have self vote. - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + // they will persist since they have self vote. + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![1, 2]); - }) + assert_eq!(members_ids(), vec![1, 2]); + }) } #[test] @@ -1558,10 +1574,7 @@ mod tests { #[test] fn term_duration_zero_is_passive() { - ExtBuilder::default() - .term_duration(0) - .build_and_execute(|| - { + ExtBuilder::default().term_duration(0).build_and_execute(|| { assert_eq!(::TermDuration::get(), 0); assert_eq!(::DesiredMembers::get(), 2); assert_eq!(Elections::election_rounds(), 0); @@ -1660,10 +1673,7 @@ mod tests { assert_eq!(candidate_ids(), Vec::::new()); assert_ok!(submit_candidacy(Origin::signed(1))); assert_eq!(candidate_ids(), vec![1]); - assert_noop!( - submit_candidacy(Origin::signed(1)), - Error::::DuplicatedCandidate, - ); + assert_noop!(submit_candidacy(Origin::signed(1)), Error::::DuplicatedCandidate,); }); } @@ -1681,10 +1691,7 @@ mod tests { assert!(Elections::runners_up().is_empty()); assert!(candidate_ids().is_empty()); - assert_noop!( - submit_candidacy(Origin::signed(5)), - Error::::MemberSubmit, - ); + assert_noop!(submit_candidacy(Origin::signed(5)), Error::::MemberSubmit,); }); } @@ -1704,10 +1711,7 @@ mod tests { assert_eq!(members_ids(), vec![4, 5]); assert_eq!(runners_up_ids(), vec![3]); - assert_noop!( - submit_candidacy(Origin::signed(3)), - Error::::RunnerUpSubmit, - ); + assert_noop!(submit_candidacy(Origin::signed(3)), Error::::RunnerUpSubmit,); }); } @@ -1842,10 +1846,7 @@ mod tests { #[test] fn cannot_vote_for_no_candidate() { ExtBuilder::default().build_and_execute(|| { - assert_noop!( - vote(Origin::signed(2), vec![], 20), - Error::::NoVotes, - ); + assert_noop!(vote(Origin::signed(2), vec![], 20), Error::::NoVotes,); }); } @@ -1930,10 +1931,7 @@ mod tests { assert_eq!(members_ids(), vec![4, 5]); assert_eq!(PRIME.with(|p| *p.borrow()), Some(5)); - assert_ok!(Elections::renounce_candidacy( - Origin::signed(4), - Renouncing::Member - )); + assert_ok!(Elections::renounce_candidacy(Origin::signed(4), Renouncing::Member)); assert_eq!(members_ids(), vec![5]); assert_eq!(PRIME.with(|p| *p.borrow()), Some(5)); @@ -1966,35 +1964,34 @@ mod tests { ExtBuilder::default() .desired_runners_up(1) .balance_factor(10) - .build_and_execute( - || { - // when we have only candidates - assert_ok!(submit_candidacy(Origin::signed(5))); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); + .build_and_execute(|| { + // when we have only candidates + assert_ok!(submit_candidacy(Origin::signed(5))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); - assert_noop!( - // content of the vote is irrelevant. - vote(Origin::signed(1), vec![9, 99, 999, 9999], 5), - Error::::TooManyVotes, - ); + assert_noop!( + // content of the vote is irrelevant. + vote(Origin::signed(1), vec![9, 99, 999, 9999], 5), + Error::::TooManyVotes, + ); - assert_ok!(vote(Origin::signed(3), vec![3], 30)); - assert_ok!(vote(Origin::signed(4), vec![4], 40)); - assert_ok!(vote(Origin::signed(5), vec![5], 50)); + assert_ok!(vote(Origin::signed(3), vec![3], 30)); + assert_ok!(vote(Origin::signed(4), vec![4], 40)); + assert_ok!(vote(Origin::signed(5), vec![5], 50)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - // now we have 2 members, 1 runner-up, and 1 new candidate - assert_ok!(submit_candidacy(Origin::signed(2))); + // now we have 2 members, 1 runner-up, and 1 new candidate + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_ok!(vote(Origin::signed(1), vec![9, 99, 999, 9999], 5)); - assert_noop!( - vote(Origin::signed(1), vec![9, 99, 999, 9_999, 99_999], 5), - Error::::TooManyVotes, - ); - }); + assert_ok!(vote(Origin::signed(1), vec![9, 99, 999, 9999], 5)); + assert_noop!( + vote(Origin::signed(1), vec![9, 99, 999, 9_999, 99_999], 5), + Error::::TooManyVotes, + ); + }); } #[test] @@ -2003,10 +2000,7 @@ mod tests { assert_ok!(submit_candidacy(Origin::signed(5))); assert_ok!(submit_candidacy(Origin::signed(4))); - assert_noop!( - vote(Origin::signed(2), vec![4], 1), - Error::::LowBalance, - ); + assert_noop!(vote(Origin::signed(2), vec![4], 1), Error::::LowBalance,); }) } @@ -2147,7 +2141,10 @@ mod tests { System::set_block_number(5); Elections::on_initialize(System::block_number()); - System::assert_last_event(Event::Elections(super::Event::NewTerm(vec![(4, 40), (5, 50)]))); + System::assert_last_event(Event::Elections(super::Event::NewTerm(vec![ + (4, 40), + (5, 50), + ]))); assert_eq!(members_and_stake(), vec![(4, 40), (5, 50)]); assert_eq!(runners_up_and_stake(), vec![]); @@ -2476,10 +2473,7 @@ mod tests { let unwrapped_error = Elections::remove_member(Origin::root(), 4, true).unwrap_err(); assert!(matches!( unwrapped_error.error, - DispatchError::Module { - message: Some("InvalidReplacement"), - .. - } + DispatchError::Module { message: Some("InvalidReplacement"), .. } )); assert!(unwrapped_error.post_info.actual_weight.is_some()); }); @@ -2502,10 +2496,7 @@ mod tests { let unwrapped_error = Elections::remove_member(Origin::root(), 4, false).unwrap_err(); assert!(matches!( unwrapped_error.error, - DispatchError::Module { - message: Some("InvalidReplacement"), - .. - } + DispatchError::Module { message: Some("InvalidReplacement"), .. } )); assert!(unwrapped_error.post_info.actual_weight.is_some()); }); @@ -2586,7 +2577,10 @@ mod tests { // 5 is an outgoing loser. will also get slashed. assert_eq!(balances(&5), (45, 2)); - System::assert_has_event(Event::Elections(super::Event::NewTerm(vec![(4, 40), (5, 50)]))); + System::assert_has_event(Event::Elections(super::Event::NewTerm(vec![ + (4, 40), + (5, 50), + ]))); }) } @@ -2632,24 +2626,22 @@ mod tests { #[test] fn runner_up_replacement_maintains_members_order() { - ExtBuilder::default() - .desired_runners_up(2) - .build_and_execute(|| { - assert_ok!(submit_candidacy(Origin::signed(5))); - assert_ok!(submit_candidacy(Origin::signed(4))); + ExtBuilder::default().desired_runners_up(2).build_and_execute(|| { + assert_ok!(submit_candidacy(Origin::signed(5))); + assert_ok!(submit_candidacy(Origin::signed(4))); assert_ok!(submit_candidacy(Origin::signed(2))); assert_ok!(vote(Origin::signed(2), vec![5], 20)); assert_ok!(vote(Origin::signed(4), vec![4], 40)); assert_ok!(vote(Origin::signed(5), vec![2], 50)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![2, 4]); - assert_ok!(Elections::remove_member(Origin::root(), 2, true)); - assert_eq!(members_ids(), vec![4, 5]); - }); + assert_eq!(members_ids(), vec![2, 4]); + assert_ok!(Elections::remove_member(Origin::root(), 2, true)); + assert_eq!(members_ids(), vec![4, 5]); + }); } #[test] @@ -2705,12 +2697,10 @@ mod tests { #[test] fn can_renounce_candidacy_runner_up() { - ExtBuilder::default() - .desired_runners_up(2) - .build_and_execute(|| { - assert_ok!(submit_candidacy(Origin::signed(5))); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); + ExtBuilder::default().desired_runners_up(2).build_and_execute(|| { + assert_ok!(submit_candidacy(Origin::signed(5))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); assert_ok!(submit_candidacy(Origin::signed(2))); assert_ok!(vote(Origin::signed(5), vec![4], 50)); @@ -2718,21 +2708,18 @@ mod tests { assert_ok!(vote(Origin::signed(3), vec![3], 30)); assert_ok!(vote(Origin::signed(2), vec![2], 20)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![4, 5]); - assert_eq!(runners_up_ids(), vec![2, 3]); + assert_eq!(members_ids(), vec![4, 5]); + assert_eq!(runners_up_ids(), vec![2, 3]); - assert_ok!(Elections::renounce_candidacy( - Origin::signed(3), - Renouncing::RunnerUp - )); - assert_eq!(balances(&3), (28, 2)); // 2 is voting bond. + assert_ok!(Elections::renounce_candidacy(Origin::signed(3), Renouncing::RunnerUp)); + assert_eq!(balances(&3), (28, 2)); // 2 is voting bond. - assert_eq!(members_ids(), vec![4, 5]); - assert_eq!(runners_up_ids(), vec![2]); - }) + assert_eq!(members_ids(), vec![4, 5]); + assert_eq!(runners_up_ids(), vec![2]); + }) } #[test] @@ -2867,117 +2854,124 @@ mod tests { #[test] fn unsorted_runners_up_are_detected() { - ExtBuilder::default().desired_runners_up(2).desired_members(1).build_and_execute(|| { - assert_ok!(submit_candidacy(Origin::signed(5))); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); + ExtBuilder::default() + .desired_runners_up(2) + .desired_members(1) + .build_and_execute(|| { + assert_ok!(submit_candidacy(Origin::signed(5))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(vote(Origin::signed(5), vec![5], 50)); - assert_ok!(vote(Origin::signed(4), vec![4], 5)); - assert_ok!(vote(Origin::signed(3), vec![3], 15)); + assert_ok!(vote(Origin::signed(5), vec![5], 50)); + assert_ok!(vote(Origin::signed(4), vec![4], 5)); + assert_ok!(vote(Origin::signed(3), vec![3], 15)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![5]); - assert_eq!(runners_up_ids(), vec![4, 3]); + assert_eq!(members_ids(), vec![5]); + assert_eq!(runners_up_ids(), vec![4, 3]); - assert_ok!(submit_candidacy(Origin::signed(2))); - assert_ok!(vote(Origin::signed(2), vec![2], 10)); + assert_ok!(submit_candidacy(Origin::signed(2))); + assert_ok!(vote(Origin::signed(2), vec![2], 10)); - System::set_block_number(10); - Elections::on_initialize(System::block_number()); + System::set_block_number(10); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![5]); - assert_eq!(runners_up_ids(), vec![2, 3]); + assert_eq!(members_ids(), vec![5]); + assert_eq!(runners_up_ids(), vec![2, 3]); - // 4 is outgoing runner-up. Slash candidacy bond. - assert_eq!(balances(&4), (35, 2)); - // 3 stays. - assert_eq!(balances(&3), (25, 5)); - }) + // 4 is outgoing runner-up. Slash candidacy bond. + assert_eq!(balances(&4), (35, 2)); + // 3 stays. + assert_eq!(balances(&3), (25, 5)); + }) } #[test] fn member_to_runner_up_wont_slash() { - ExtBuilder::default().desired_runners_up(2).desired_members(1).build_and_execute(|| { - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(submit_candidacy(Origin::signed(2))); - + ExtBuilder::default() + .desired_runners_up(2) + .desired_members(1) + .build_and_execute(|| { + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_ok!(vote(Origin::signed(4), vec![4], 40)); - assert_ok!(vote(Origin::signed(3), vec![3], 30)); - assert_ok!(vote(Origin::signed(2), vec![2], 20)); + assert_ok!(vote(Origin::signed(4), vec![4], 40)); + assert_ok!(vote(Origin::signed(3), vec![3], 30)); + assert_ok!(vote(Origin::signed(2), vec![2], 20)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![4]); - assert_eq!(runners_up_ids(), vec![2, 3]); + assert_eq!(members_ids(), vec![4]); + assert_eq!(runners_up_ids(), vec![2, 3]); - assert_eq!(balances(&4), (35, 5)); - assert_eq!(balances(&3), (25, 5)); - assert_eq!(balances(&2), (15, 5)); + assert_eq!(balances(&4), (35, 5)); + assert_eq!(balances(&3), (25, 5)); + assert_eq!(balances(&2), (15, 5)); - // this guy will shift everyone down. - assert_ok!(submit_candidacy(Origin::signed(5))); - assert_ok!(vote(Origin::signed(5), vec![5], 50)); + // this guy will shift everyone down. + assert_ok!(submit_candidacy(Origin::signed(5))); + assert_ok!(vote(Origin::signed(5), vec![5], 50)); - System::set_block_number(10); - Elections::on_initialize(System::block_number()); + System::set_block_number(10); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![5]); - assert_eq!(runners_up_ids(), vec![3, 4]); + assert_eq!(members_ids(), vec![5]); + assert_eq!(runners_up_ids(), vec![3, 4]); - // 4 went from member to runner-up -- don't slash. - assert_eq!(balances(&4), (35, 5)); - // 3 stayed runner-up -- don't slash. - assert_eq!(balances(&3), (25, 5)); - // 2 was removed -- slash. - assert_eq!(balances(&2), (15, 2)); - }); + // 4 went from member to runner-up -- don't slash. + assert_eq!(balances(&4), (35, 5)); + // 3 stayed runner-up -- don't slash. + assert_eq!(balances(&3), (25, 5)); + // 2 was removed -- slash. + assert_eq!(balances(&2), (15, 2)); + }); } #[test] fn runner_up_to_member_wont_slash() { - ExtBuilder::default().desired_runners_up(2).desired_members(1).build_and_execute(|| { - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(submit_candidacy(Origin::signed(2))); - + ExtBuilder::default() + .desired_runners_up(2) + .desired_members(1) + .build_and_execute(|| { + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_ok!(vote(Origin::signed(4), vec![4], 40)); - assert_ok!(vote(Origin::signed(3), vec![3], 30)); - assert_ok!(vote(Origin::signed(2), vec![2], 20)); + assert_ok!(vote(Origin::signed(4), vec![4], 40)); + assert_ok!(vote(Origin::signed(3), vec![3], 30)); + assert_ok!(vote(Origin::signed(2), vec![2], 20)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![4]); - assert_eq!(runners_up_ids(), vec![2, 3]); + assert_eq!(members_ids(), vec![4]); + assert_eq!(runners_up_ids(), vec![2, 3]); - assert_eq!(balances(&4), (35, 5)); - assert_eq!(balances(&3), (25, 5)); - assert_eq!(balances(&2), (15, 5)); + assert_eq!(balances(&4), (35, 5)); + assert_eq!(balances(&3), (25, 5)); + assert_eq!(balances(&2), (15, 5)); - // swap some votes. - assert_ok!(vote(Origin::signed(4), vec![2], 40)); - assert_ok!(vote(Origin::signed(2), vec![4], 20)); + // swap some votes. + assert_ok!(vote(Origin::signed(4), vec![2], 40)); + assert_ok!(vote(Origin::signed(2), vec![4], 20)); - System::set_block_number(10); - Elections::on_initialize(System::block_number()); + System::set_block_number(10); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![2]); - assert_eq!(runners_up_ids(), vec![4, 3]); + assert_eq!(members_ids(), vec![2]); + assert_eq!(runners_up_ids(), vec![4, 3]); - // 2 went from runner to member, don't slash - assert_eq!(balances(&2), (15, 5)); - // 4 went from member to runner, don't slash - assert_eq!(balances(&4), (35, 5)); - // 3 stayed the same - assert_eq!(balances(&3), (25, 5)); - }); + // 2 went from runner to member, don't slash + assert_eq!(balances(&2), (15, 5)); + // 4 went from member to runner, don't slash + assert_eq!(balances(&4), (35, 5)); + // 3 stayed the same + assert_eq!(balances(&3), (25, 5)); + }); } #[test] @@ -3027,14 +3021,17 @@ mod tests { #[test] fn no_desired_members() { // not interested in anything - ExtBuilder::default().desired_members(0).desired_runners_up(0).build_and_execute(|| { - assert_eq!(Elections::candidates().len(), 0); + ExtBuilder::default() + .desired_members(0) + .desired_runners_up(0) + .build_and_execute(|| { + assert_eq!(Elections::candidates().len(), 0); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(submit_candidacy(Origin::signed(2))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_eq!(Elections::candidates().len(), 3); + assert_eq!(Elections::candidates().len(), 3); assert_ok!(vote(Origin::signed(4), vec![4], 40)); assert_ok!(vote(Origin::signed(3), vec![3], 30)); @@ -3044,56 +3041,62 @@ mod tests { Elections::on_initialize(System::block_number()); assert_eq!(members_ids().len(), 0); - assert_eq!(runners_up_ids().len(), 0); - assert_eq!(all_voters().len(), 3); - assert_eq!(Elections::candidates().len(), 0); - }); + assert_eq!(runners_up_ids().len(), 0); + assert_eq!(all_voters().len(), 3); + assert_eq!(Elections::candidates().len(), 0); + }); // not interested in members - ExtBuilder::default().desired_members(0).desired_runners_up(2).build_and_execute(|| { - assert_eq!(Elections::candidates().len(), 0); + ExtBuilder::default() + .desired_members(0) + .desired_runners_up(2) + .build_and_execute(|| { + assert_eq!(Elections::candidates().len(), 0); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(submit_candidacy(Origin::signed(2))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_eq!(Elections::candidates().len(), 3); + assert_eq!(Elections::candidates().len(), 3); - assert_ok!(vote(Origin::signed(4), vec![4], 40)); - assert_ok!(vote(Origin::signed(3), vec![3], 30)); - assert_ok!(vote(Origin::signed(2), vec![2], 20)); + assert_ok!(vote(Origin::signed(4), vec![4], 40)); + assert_ok!(vote(Origin::signed(3), vec![3], 30)); + assert_ok!(vote(Origin::signed(2), vec![2], 20)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids().len(), 0); - assert_eq!(runners_up_ids(), vec![3, 4]); - assert_eq!(all_voters().len(), 3); - assert_eq!(Elections::candidates().len(), 0); - }); + assert_eq!(members_ids().len(), 0); + assert_eq!(runners_up_ids(), vec![3, 4]); + assert_eq!(all_voters().len(), 3); + assert_eq!(Elections::candidates().len(), 0); + }); // not interested in runners-up - ExtBuilder::default().desired_members(2).desired_runners_up(0).build_and_execute(|| { - assert_eq!(Elections::candidates().len(), 0); + ExtBuilder::default() + .desired_members(2) + .desired_runners_up(0) + .build_and_execute(|| { + assert_eq!(Elections::candidates().len(), 0); - assert_ok!(submit_candidacy(Origin::signed(4))); - assert_ok!(submit_candidacy(Origin::signed(3))); - assert_ok!(submit_candidacy(Origin::signed(2))); + assert_ok!(submit_candidacy(Origin::signed(4))); + assert_ok!(submit_candidacy(Origin::signed(3))); + assert_ok!(submit_candidacy(Origin::signed(2))); - assert_eq!(Elections::candidates().len(), 3); + assert_eq!(Elections::candidates().len(), 3); - assert_ok!(vote(Origin::signed(4), vec![4], 40)); - assert_ok!(vote(Origin::signed(3), vec![3], 30)); - assert_ok!(vote(Origin::signed(2), vec![2], 20)); + assert_ok!(vote(Origin::signed(4), vec![4], 40)); + assert_ok!(vote(Origin::signed(3), vec![3], 30)); + assert_ok!(vote(Origin::signed(2), vec![2], 20)); - System::set_block_number(5); - Elections::on_initialize(System::block_number()); + System::set_block_number(5); + Elections::on_initialize(System::block_number()); - assert_eq!(members_ids(), vec![3, 4]); - assert_eq!(runners_up_ids().len(), 0); - assert_eq!(all_voters().len(), 3); - assert_eq!(Elections::candidates().len(), 0); - }); + assert_eq!(members_ids(), vec![3, 4]); + assert_eq!(runners_up_ids().len(), 0); + assert_eq!(all_voters().len(), 3); + assert_eq!(Elections::candidates().len(), 0); + }); } #[test] diff --git a/frame/elections-phragmen/src/migrations/v3.rs b/frame/elections-phragmen/src/migrations/v3.rs index 8afc9ed66920b..b19146a9e28e5 100644 --- a/frame/elections-phragmen/src/migrations/v3.rs +++ b/frame/elections-phragmen/src/migrations/v3.rs @@ -17,12 +17,13 @@ //! Migrations to version [`3.0.0`], as denoted by the changelog. -use codec::{Encode, Decode, FullCodec}; -use sp_std::prelude::*; +use codec::{Decode, Encode, FullCodec}; use frame_support::{ - RuntimeDebug, weights::Weight, Twox64Concat, traits::{GetPalletVersion, PalletVersion}, + weights::Weight, + RuntimeDebug, Twox64Concat, }; +use sp_std::prelude::*; #[derive(Encode, Decode, Clone, Default, RuntimeDebug, PartialEq)] struct SeatHolder { @@ -89,7 +90,7 @@ pub fn apply(old_voter_bond: T::Balance, old_candidacy_bond: T::Balan migrate_runners_up_to_recorded_deposit::(old_candidacy_bond); migrate_members_to_recorded_deposit::(old_candidacy_bond); Weight::max_value() - } + }, _ => { log::warn!( target: "runtime::elections-phragmen", @@ -103,15 +104,9 @@ pub fn apply(old_voter_bond: T::Balance, old_candidacy_bond: T::Balan /// Migrate from the old legacy voting bond (fixed) to the new one (per-vote dynamic). pub fn migrate_voters_to_recorded_deposit(old_deposit: T::Balance) { - >::translate::<(T::Balance, Vec), _>( - |_who, (stake, votes)| { - Some(Voter { - votes, - stake, - deposit: old_deposit, - }) - }, - ); + >::translate::<(T::Balance, Vec), _>(|_who, (stake, votes)| { + Some(Voter { votes, stake, deposit: old_deposit }) + }); log::info!( target: "runtime::elections-phragmen", @@ -122,50 +117,39 @@ pub fn migrate_voters_to_recorded_deposit(old_deposit: T::Balance) { /// Migrate all candidates to recorded deposit. pub fn migrate_candidates_to_recorded_deposit(old_deposit: T::Balance) { - let _ = >::translate::, _>( - |maybe_old_candidates| { - maybe_old_candidates.map(|old_candidates| { - log::info!( - target: "runtime::elections-phragmen", - "migrated {} candidate accounts.", - old_candidates.len(), - ); - old_candidates - .into_iter() - .map(|c| (c, old_deposit)) - .collect::>() - }) - }, - ); + let _ = >::translate::, _>(|maybe_old_candidates| { + maybe_old_candidates.map(|old_candidates| { + log::info!( + target: "runtime::elections-phragmen", + "migrated {} candidate accounts.", + old_candidates.len(), + ); + old_candidates.into_iter().map(|c| (c, old_deposit)).collect::>() + }) + }); } /// Migrate all members to recorded deposit. pub fn migrate_members_to_recorded_deposit(old_deposit: T::Balance) { - let _ = >::translate::, _>( - |maybe_old_members| { - maybe_old_members.map(|old_members| { - log::info!( - target: "runtime::elections-phragmen", - "migrated {} member accounts.", - old_members.len(), - ); - old_members - .into_iter() - .map(|(who, stake)| SeatHolder { - who, - stake, - deposit: old_deposit, - }) - .collect::>() - }) - }, - ); + let _ = >::translate::, _>(|maybe_old_members| { + maybe_old_members.map(|old_members| { + log::info!( + target: "runtime::elections-phragmen", + "migrated {} member accounts.", + old_members.len(), + ); + old_members + .into_iter() + .map(|(who, stake)| SeatHolder { who, stake, deposit: old_deposit }) + .collect::>() + }) + }); } /// Migrate all runners-up to recorded deposit. pub fn migrate_runners_up_to_recorded_deposit(old_deposit: T::Balance) { - let _ = >::translate::, _>( - |maybe_old_runners_up| { + let _ = + >::translate::, _>(|maybe_old_runners_up| { maybe_old_runners_up.map(|old_runners_up| { log::info!( target: "runtime::elections-phragmen", @@ -174,13 +158,8 @@ pub fn migrate_runners_up_to_recorded_deposit(old_deposit: T::Balance ); old_runners_up .into_iter() - .map(|(who, stake)| SeatHolder { - who, - stake, - deposit: old_deposit, - }) + .map(|(who, stake)| SeatHolder { who, stake, deposit: old_deposit }) .collect::>() }) - }, - ); + }); } diff --git a/frame/elections-phragmen/src/migrations/v4.rs b/frame/elections-phragmen/src/migrations/v4.rs index f704b203d34cf..fde9a768f335e 100644 --- a/frame/elections-phragmen/src/migrations/v4.rs +++ b/frame/elections-phragmen/src/migrations/v4.rs @@ -18,8 +18,8 @@ //! Migrations to version [`4.0.0`], as denoted by the changelog. use frame_support::{ + traits::{Get, GetPalletVersion, PalletVersion}, weights::Weight, - traits::{GetPalletVersion, PalletVersion, Get}, }; /// The old prefix. @@ -32,17 +32,15 @@ pub const OLD_PREFIX: &[u8] = b"PhragmenElection"; /// `::PalletInfo::name::`. /// /// The old storage prefix, `PhragmenElection` is hardcoded in the migration code. -pub fn migrate< - T: frame_system::Config, - P: GetPalletVersion, - N: AsRef, ->(new_pallet_name: N) -> Weight { +pub fn migrate>( + new_pallet_name: N, +) -> Weight { if new_pallet_name.as_ref().as_bytes() == OLD_PREFIX { log::info!( target: "runtime::elections-phragmen", "New pallet name is equal to the old prefix. No migration needs to be done.", ); - return 0; + return 0 } let maybe_storage_version =

::storage_version(); log::info!( @@ -59,7 +57,7 @@ pub fn migrate< new_pallet_name.as_ref().as_bytes(), ); ::BlockWeights::get().max_block - } + }, _ => { log::warn!( target: "runtime::elections-phragmen", @@ -103,7 +101,7 @@ pub fn pre_migration>(new: N) { /// [`frame_support::traits::OnRuntimeUpgrade::post_upgrade`] for further testing. /// /// Panics if anything goes wrong. -pub fn post_migration

() { +pub fn post_migration() { log::info!("post-migration elections-phragmen"); // ensure we've been updated to v4 by the automatic write of crate version -> storage version. assert!(

::storage_version().unwrap().major == 4); diff --git a/frame/elections-phragmen/src/weights.rs b/frame/elections-phragmen/src/weights.rs index 12a3a433401bb..f44d763452ad1 100644 --- a/frame/elections-phragmen/src/weights.rs +++ b/frame/elections-phragmen/src/weights.rs @@ -35,47 +35,49 @@ // --output=./frame/elections-phragmen/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_elections_phragmen. pub trait WeightInfo { - fn vote_equal(v: u32, ) -> Weight; - fn vote_more(v: u32, ) -> Weight; - fn vote_less(v: u32, ) -> Weight; + fn vote_equal(v: u32) -> Weight; + fn vote_more(v: u32) -> Weight; + fn vote_less(v: u32) -> Weight; fn remove_voter() -> Weight; - fn submit_candidacy(c: u32, ) -> Weight; - fn renounce_candidacy_candidate(c: u32, ) -> Weight; + fn submit_candidacy(c: u32) -> Weight; + fn renounce_candidacy_candidate(c: u32) -> Weight; fn renounce_candidacy_members() -> Weight; fn renounce_candidacy_runners_up() -> Weight; fn remove_member_with_replacement() -> Weight; fn remove_member_wrong_refund() -> Weight; - fn clean_defunct_voters(v: u32, d: u32, ) -> Weight; - fn election_phragmen(c: u32, v: u32, e: u32, ) -> Weight; + fn clean_defunct_voters(v: u32, d: u32) -> Weight; + fn election_phragmen(c: u32, v: u32, e: u32) -> Weight; } /// Weights for pallet_elections_phragmen using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn vote_equal(v: u32, ) -> Weight { + fn vote_equal(v: u32) -> Weight { (43_911_000 as Weight) // Standard Error: 7_000 .saturating_add((324_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn vote_more(v: u32, ) -> Weight { + fn vote_more(v: u32) -> Weight { (68_236_000 as Weight) // Standard Error: 10_000 .saturating_add((359_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().reads(5 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn vote_less(v: u32, ) -> Weight { + fn vote_less(v: u32) -> Weight { (68_162_000 as Weight) // Standard Error: 9_000 .saturating_add((350_000 as Weight).saturating_mul(v as Weight)) @@ -87,14 +89,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn submit_candidacy(c: u32, ) -> Weight { + fn submit_candidacy(c: u32) -> Weight { (58_498_000 as Weight) // Standard Error: 1_000 .saturating_add((305_000 as Weight).saturating_mul(c as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn renounce_candidacy_candidate(c: u32, ) -> Weight { + fn renounce_candidacy_candidate(c: u32) -> Weight { (52_062_000 as Weight) // Standard Error: 0 .saturating_add((173_000 as Weight).saturating_mul(c as Weight)) @@ -117,10 +119,9 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(5 as Weight)) } fn remove_member_wrong_refund() -> Weight { - (6_877_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) + (6_877_000 as Weight).saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn clean_defunct_voters(v: u32, _d: u32, ) -> Weight { + fn clean_defunct_voters(v: u32, _d: u32) -> Weight { (0 as Weight) // Standard Error: 39_000 .saturating_add((112_381_000 as Weight).saturating_mul(v as Weight)) @@ -128,7 +129,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } - fn election_phragmen(c: u32, v: u32, e: u32, ) -> Weight { + fn election_phragmen(c: u32, v: u32, e: u32) -> Weight { (0 as Weight) // Standard Error: 1_789_000 .saturating_add((42_600_000 as Weight).saturating_mul(c as Weight)) @@ -144,21 +145,21 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn vote_equal(v: u32, ) -> Weight { + fn vote_equal(v: u32) -> Weight { (43_911_000 as Weight) // Standard Error: 7_000 .saturating_add((324_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn vote_more(v: u32, ) -> Weight { + fn vote_more(v: u32) -> Weight { (68_236_000 as Weight) // Standard Error: 10_000 .saturating_add((359_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().reads(5 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn vote_less(v: u32, ) -> Weight { + fn vote_less(v: u32) -> Weight { (68_162_000 as Weight) // Standard Error: 9_000 .saturating_add((350_000 as Weight).saturating_mul(v as Weight)) @@ -170,14 +171,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn submit_candidacy(c: u32, ) -> Weight { + fn submit_candidacy(c: u32) -> Weight { (58_498_000 as Weight) // Standard Error: 1_000 .saturating_add((305_000 as Weight).saturating_mul(c as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn renounce_candidacy_candidate(c: u32, ) -> Weight { + fn renounce_candidacy_candidate(c: u32) -> Weight { (52_062_000 as Weight) // Standard Error: 0 .saturating_add((173_000 as Weight).saturating_mul(c as Weight)) @@ -200,10 +201,9 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(5 as Weight)) } fn remove_member_wrong_refund() -> Weight { - (6_877_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) + (6_877_000 as Weight).saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn clean_defunct_voters(v: u32, _d: u32, ) -> Weight { + fn clean_defunct_voters(v: u32, _d: u32) -> Weight { (0 as Weight) // Standard Error: 39_000 .saturating_add((112_381_000 as Weight).saturating_mul(v as Weight)) @@ -211,7 +211,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(v as Weight))) .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } - fn election_phragmen(c: u32, v: u32, e: u32, ) -> Weight { + fn election_phragmen(c: u32, v: u32, e: u32) -> Weight { (0 as Weight) // Standard Error: 1_789_000 .saturating_add((42_600_000 as Weight).saturating_mul(c as Weight)) diff --git a/frame/elections/src/lib.rs b/frame/elections/src/lib.rs index 61699062ccad6..fdf750d297069 100644 --- a/frame/elections/src/lib.rs +++ b/frame/elections/src/lib.rs @@ -29,24 +29,26 @@ //! whose voting is serially unsuccessful. #![cfg_attr(not(feature = "std"), no_std)] -#![recursion_limit="128"] +#![recursion_limit = "128"] -use sp_std::prelude::*; -use sp_runtime::{ - RuntimeDebug, print, - traits::{Zero, One, StaticLookup, Saturating}, -}; +use codec::{Decode, Encode}; use frame_support::{ - pallet_prelude::*, ensure, - weights::{Weight, DispatchClass}, + ensure, + pallet_prelude::*, traits::{ - Currency, ExistenceRequirement, LockableCurrency, LockIdentifier, BalanceStatus, - OnUnbalanced, ReservableCurrency, WithdrawReasons, ChangeMembers, - } + BalanceStatus, ChangeMembers, Currency, ExistenceRequirement, LockIdentifier, + LockableCurrency, OnUnbalanced, ReservableCurrency, WithdrawReasons, + }, + weights::{DispatchClass, Weight}, }; -use codec::{Encode, Decode}; use frame_system::pallet_prelude::*; pub use pallet::*; +use sp_runtime::{ + print, + traits::{One, Saturating, StaticLookup, Zero}, + RuntimeDebug, +}; +use sp_std::prelude::*; mod mock; mod tests; @@ -109,7 +111,9 @@ mod tests; // entries before they increase the capacity. /// The activity status of a voter. -#[derive(PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo)] +#[derive( + PartialEq, Eq, Copy, Clone, Encode, Decode, Default, RuntimeDebug, scale_info::TypeInfo, +)] pub struct VoterInfo { /// Last VoteIndex in which this voter assigned (or initialized) approvals. last_active: VoteIndex, @@ -140,9 +144,11 @@ pub const VOTER_SET_SIZE: usize = 64; /// NUmber of approvals grouped in one chunk. pub const APPROVAL_SET_SIZE: usize = 8; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = - <::Currency as Currency<::AccountId>>::NegativeImbalance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; /// Index used to access chunks. type SetIndex = u32; @@ -170,8 +176,7 @@ pub mod pallet { type PalletId: Get; /// The currency that people are electing with. - type Currency: - LockableCurrency + type Currency: LockableCurrency + ReservableCurrency; /// Handler for the unbalanced reduction when slashing a validator. @@ -297,12 +302,8 @@ pub mod pallet { /// attacker-controlled. #[pallet::storage] #[pallet::getter(fn approvals_of)] - pub type ApprovalsOf = StorageMap< - _, - Twox64Concat, (T::AccountId, SetIndex), - Vec, - ValueQuery, - >; + pub type ApprovalsOf = + StorageMap<_, Twox64Concat, (T::AccountId, SetIndex), Vec, ValueQuery>; /// The vote index and list slot that the candidate `who` was registered or `None` if they /// are not currently registered. @@ -310,26 +311,24 @@ pub mod pallet { /// TWOX-NOTE: SAFE as `AccountId` is a crypto hash. #[pallet::storage] #[pallet::getter(fn candidate_reg_info)] - pub type RegisterInfoOf = StorageMap<_, Twox64Concat, T::AccountId, (VoteIndex, u32)>; + pub type RegisterInfoOf = + StorageMap<_, Twox64Concat, T::AccountId, (VoteIndex, u32)>; /// Basic information about a voter. /// /// TWOX-NOTE: SAFE as `AccountId` is a crypto hash. #[pallet::storage] #[pallet::getter(fn voter_info)] - pub type VoterInfoOf = StorageMap<_, Twox64Concat, T::AccountId, VoterInfo>>; + pub type VoterInfoOf = + StorageMap<_, Twox64Concat, T::AccountId, VoterInfo>>; /// The present voter list (chunked and capped at [`VOTER_SET_SIZE`]). /// /// TWOX-NOTE: OKAY ― `SetIndex` is not user-controlled data. #[pallet::storage] #[pallet::getter(fn voters)] - pub type Voters = StorageMap< - _, - Twox64Concat, SetIndex, - Vec>, - ValueQuery, - >; + pub type Voters = + StorageMap<_, Twox64Concat, SetIndex, Vec>, ValueQuery>; /// the next free set to store a voter in. This will keep growing. #[pallet::storage] @@ -558,7 +557,8 @@ pub mod pallet { let reporter_index = reporter_index as usize; let who_index = who_index as usize; - let assumed_reporter = Self::voter_at(reporter_index).ok_or(Error::::InvalidReporterIndex)?; + let assumed_reporter = + Self::voter_at(reporter_index).ok_or(Error::::InvalidReporterIndex)?; let assumed_who = Self::voter_at(who_index).ok_or(Error::::InvalidTargetIndex)?; ensure!(assumed_reporter == reporter, Error::::InvalidReporterIndex); @@ -566,29 +566,31 @@ pub mod pallet { // will definitely kill one of reporter or who now. - let valid = !Self::all_approvals_of(&who).iter() - .zip(Self::candidates().iter()) - .any(|(&appr, addr)| - appr && + let valid = !Self::all_approvals_of(&who).iter().zip(Self::candidates().iter()).any( + |(&appr, addr)| { + appr && *addr != T::AccountId::default() && // defensive only: all items in candidates list are registered Self::candidate_reg_info(addr).map_or(false, |x| x.0 <= last_active) - ); + }, + ); Self::remove_voter( if valid { &who } else { &reporter }, - if valid { who_index } else { reporter_index } + if valid { who_index } else { reporter_index }, ); - T::Currency::remove_lock( - T::PalletId::get(), - if valid { &who } else { &reporter } - ); + T::Currency::remove_lock(T::PalletId::get(), if valid { &who } else { &reporter }); if valid { // This only fails if `reporter` doesn't exist, which it clearly must do since its // the origin. Still, it's no more harmful to propagate any error at this point. - T::Currency::repatriate_reserved(&who, &reporter, T::VotingBond::get(), BalanceStatus::Free)?; + T::Currency::repatriate_reserved( + &who, + &reporter, + T::VotingBond::get(), + BalanceStatus::Free, + )?; Self::deposit_event(Event::::VoterReaped(who, reporter)); } else { let imbalance = T::Currency::slash_reserved(&reporter, T::VotingBond::get()).0; @@ -613,7 +615,10 @@ pub mod pallet { /// - Two fewer DB entries, one DB change. /// # #[pallet::weight(1_250_000_000)] - pub fn retract_voter(origin: OriginFor, #[pallet::compact] index: u32) -> DispatchResult { + pub fn retract_voter( + origin: OriginFor, + #[pallet::compact] index: u32, + ) -> DispatchResult { let who = ensure_signed(origin)?; ensure!(!Self::presentation_active(), Error::::CannotRetractPresenting); @@ -643,7 +648,10 @@ pub mod pallet { /// - Three DB changes. /// # #[pallet::weight(2_500_000_000)] - pub fn submit_candidacy(origin: OriginFor, #[pallet::compact] slot: u32) -> DispatchResult { + pub fn submit_candidacy( + origin: OriginFor, + #[pallet::compact] slot: u32, + ) -> DispatchResult { let who = ensure_signed(origin)?; ensure!(!Self::is_a_candidate(&who), Error::::DuplicatedCandidate); @@ -688,38 +696,31 @@ pub mod pallet { #[pallet::compact] index: VoteIndex, ) -> DispatchResult { let who = ensure_signed(origin)?; - ensure!( - !total.is_zero(), - Error::::ZeroDeposit, - ); + ensure!(!total.is_zero(), Error::::ZeroDeposit,); let candidate = T::Lookup::lookup(candidate)?; ensure!(index == Self::vote_index(), Error::::InvalidVoteIndex); - let (_, _, expiring) = Self::next_finalize() - .ok_or(Error::::NotPresentationPeriod)?; + let (_, _, expiring) = + Self::next_finalize().ok_or(Error::::NotPresentationPeriod)?; let bad_presentation_punishment = - T::PresentSlashPerVoter::get() - * BalanceOf::::from(Self::voter_count() as u32); + T::PresentSlashPerVoter::get() * BalanceOf::::from(Self::voter_count() as u32); ensure!( T::Currency::can_slash(&who, bad_presentation_punishment), Error::::InsufficientPresenterFunds, ); - let mut leaderboard = Self::leaderboard() - .ok_or(Error::::LeaderboardMustExist)?; + let mut leaderboard = Self::leaderboard().ok_or(Error::::LeaderboardMustExist)?; ensure!(total > leaderboard[0].0, Error::::UnworthyCandidate); if let Some(p) = Self::members().iter().position(|&(ref c, _)| c == &candidate) { - ensure!( - p < expiring.len(), - Error::::DuplicatedCandidate, - ); + ensure!(p < expiring.len(), Error::::DuplicatedCandidate,); } let voters = Self::all_voters(); let (registered_since, candidate_index): (VoteIndex, u32) = Self::candidate_reg_info(&candidate).ok_or(Error::::InvalidCandidate)?; - let actual_total = voters.iter() + let actual_total = voters + .iter() .filter_map(|maybe_voter| maybe_voter.as_ref()) .filter_map(|voter| match Self::voter_info(voter) { Some(b) if b.last_active >= registered_since => { @@ -730,7 +731,9 @@ pub mod pallet { let weight = stake + offset + b.pot; if Self::approvals_of_at(voter, candidate_index as usize) { Some(weight) - } else { None } + } else { + None + } }, _ => None, }) @@ -747,7 +750,11 @@ pub mod pallet { // better safe than sorry. let imbalance = T::Currency::slash(&who, bad_presentation_punishment).0; T::BadPresentation::on_unbalanced(imbalance); - Err(if dupe { Error::::DuplicatedPresentation } else { Error::::IncorrectTotal })? + Err(if dupe { + Error::::DuplicatedPresentation + } else { + Error::::IncorrectTotal + })? } } @@ -755,7 +762,10 @@ pub mod pallet { /// election when they expire. If more, then a new vote will be started if one is not /// already in progress. #[pallet::weight((0, DispatchClass::Operational))] - pub fn set_desired_seats(origin: OriginFor, #[pallet::compact] count: u32) -> DispatchResult { + pub fn set_desired_seats( + origin: OriginFor, + #[pallet::compact] count: u32, + ) -> DispatchResult { ensure_root(origin)?; DesiredSeats::::put(count); Ok(()) @@ -766,13 +776,14 @@ pub mod pallet { /// Note: A tally should happen instantly (if not already in a presentation /// period) to fill the seat if removal means that the desired members are not met. #[pallet::weight((0, DispatchClass::Operational))] - pub fn remove_member(origin: OriginFor, who: ::Source) -> DispatchResult { + pub fn remove_member( + origin: OriginFor, + who: ::Source, + ) -> DispatchResult { ensure_root(origin)?; let who = T::Lookup::lookup(who)?; - let new_set: Vec<(T::AccountId, T::BlockNumber)> = Self::members() - .into_iter() - .filter(|i| i.0 != who) - .collect(); + let new_set: Vec<(T::AccountId, T::BlockNumber)> = + Self::members().into_iter().filter(|i| i.0 != who).collect(); >::put(&new_set); let new_set = new_set.into_iter().map(|x| x.0).collect::>(); T::ChangeMembers::change_members(&[], &[who], new_set); @@ -820,7 +831,8 @@ impl Pallet { /// Iff the member `who` still has a seat at blocknumber `n` returns `true`. pub fn will_still_be_member_at(who: &T::AccountId, n: T::BlockNumber) -> bool { - Self::members().iter() + Self::members() + .iter() .find(|&&(ref a, _)| a == who) .map(|&(_, expires)| expires > n) .unwrap_or(false) @@ -858,7 +870,8 @@ impl Pallet { } else { Some(c[c.len() - (desired_seats - coming) as usize].1) } - }.map(Self::next_vote_from) + } + .map(Self::next_vote_from) } } @@ -905,18 +918,12 @@ impl Pallet { ensure!(!Self::presentation_active(), Error::::ApprovalPresentation); ensure!(index == Self::vote_index(), Error::::InvalidVoteIndex); - ensure!( - !candidates_len.is_zero(), - Error::::ZeroCandidates, - ); + ensure!(!candidates_len.is_zero(), Error::::ZeroCandidates,); // Prevent a vote from voters that provide a list of votes that exceeds the candidates // length since otherwise an attacker may be able to submit a very long list of `votes` that // far exceeds the amount of candidates and waste more computation than a reasonable voting // bond would cover. - ensure!( - candidates_len >= votes.len(), - Error::::TooManyVotes, - ); + ensure!(candidates_len >= votes.len(), Error::::TooManyVotes,); ensure!(value >= T::MinimumVotingLock::get(), Error::::InsufficientLockedValue); // Amount to be locked up. @@ -968,19 +975,14 @@ impl Pallet { NextVoterSet::::put(next + 1); } >::append(next, Some(who.clone())); - } + }, } T::Currency::reserve(&who, T::VotingBond::get())?; VoterCount::::mutate(|c| *c = *c + 1); } - T::Currency::set_lock( - T::PalletId::get(), - &who, - locked_balance, - WithdrawReasons::all(), - ); + T::Currency::set_lock(T::PalletId::get(), &who, locked_balance, WithdrawReasons::all()); >::insert( &who, @@ -989,7 +991,7 @@ impl Pallet { last_win: index, stake: locked_balance, pot: pot_to_set, - } + }, ); Self::set_approvals_chunked(&who, votes); @@ -1001,18 +1003,26 @@ impl Pallet { let members = Self::members(); let desired_seats = Self::desired_seats() as usize; let number = >::block_number(); - let expiring = - members.iter().take_while(|i| i.1 <= number).map(|i| i.0.clone()).collect::>(); + let expiring = members + .iter() + .take_while(|i| i.1 <= number) + .map(|i| i.0.clone()) + .collect::>(); let retaining_seats = members.len() - expiring.len(); if retaining_seats < desired_seats { let empty_seats = desired_seats - retaining_seats; - >::put( - (number + Self::presentation_duration(), empty_seats as u32, expiring) - ); + >::put(( + number + Self::presentation_duration(), + empty_seats as u32, + expiring, + )); // initialize leaderboard. let leaderboard_size = empty_seats + T::CarryCount::get() as usize; - >::put(vec![(BalanceOf::::zero(), T::AccountId::default()); leaderboard_size]); + >::put(vec![ + (BalanceOf::::zero(), T::AccountId::default()); + leaderboard_size + ]); Self::deposit_event(Event::::TallyStarted(empty_seats as u32)); } @@ -1026,19 +1036,22 @@ impl Pallet { let (_, coming, expiring): (T::BlockNumber, u32, Vec) = >::take() .ok_or("finalize can only be called after a tally is started.")?; - let leaderboard: Vec<(BalanceOf, T::AccountId)> = >::take() - .unwrap_or_default(); + let leaderboard: Vec<(BalanceOf, T::AccountId)> = + >::take().unwrap_or_default(); let new_expiry = >::block_number() + Self::term_duration(); // return bond to winners. let candidacy_bond = T::CandidacyBond::get(); - let incoming: Vec<_> = leaderboard.iter() + let incoming: Vec<_> = leaderboard + .iter() .rev() .take_while(|&&(b, _)| !b.is_zero()) .take(coming as usize) .map(|(_, a)| a) .cloned() - .inspect(|a| { T::Currency::unreserve(a, candidacy_bond); }) + .inspect(|a| { + T::Currency::unreserve(a, candidacy_bond); + }) .collect(); // Update last win index for anyone voted for any of the incomings. @@ -1048,14 +1061,16 @@ impl Pallet { .iter() .filter_map(|mv| mv.as_ref()) .filter(|v| Self::approvals_of_at(*v, index)) - .for_each(|v| >::mutate(v, |a| { - if let Some(activity) = a { activity.last_win = Self::vote_index() + 1; } - })); + .for_each(|v| { + >::mutate(v, |a| { + if let Some(activity) = a { + activity.last_win = Self::vote_index() + 1; + } + }) + }); }); let members = Self::members(); - let outgoing: Vec<_> = members.iter() - .take(expiring.len()) - .map(|a| a.0.clone()).collect(); + let outgoing: Vec<_> = members.iter().take(expiring.len()).map(|a| a.0.clone()).collect(); // set the new membership set. let mut new_set: Vec<_> = members @@ -1071,8 +1086,9 @@ impl Pallet { // clear all except runners-up from candidate list. let candidates = Self::candidates(); - let mut new_candidates = vec![T::AccountId::default(); candidates.len()]; // shrink later. - let runners_up = leaderboard.into_iter() + let mut new_candidates = vec![T::AccountId::default(); candidates.len()]; // shrink later. + let runners_up = leaderboard + .into_iter() .rev() .take_while(|&(b, _)| !b.is_zero()) .skip(coming as usize) @@ -1097,11 +1113,10 @@ impl Pallet { } } // discard any superfluous slots. - if let Some(last_index) = new_candidates - .iter() - .rposition(|c| *c != T::AccountId::default()) { - new_candidates.truncate(last_index + 1); - } + if let Some(last_index) = new_candidates.iter().rposition(|c| *c != T::AccountId::default()) + { + new_candidates.truncate(last_index + 1); + } Self::deposit_event(Event::::TallyFinalized(incoming, outgoing)); @@ -1130,7 +1145,7 @@ impl Pallet { loop { let next_set = >::get(index); if next_set.is_empty() { - break; + break } else { index += 1; all.extend(next_set); @@ -1176,9 +1191,7 @@ impl Pallet { approvals_flag_vec .chunks(APPROVAL_SET_SIZE) .enumerate() - .for_each(|(index, slice)| >::insert( - (&who, index as SetIndex), slice) - ); + .for_each(|(index, slice)| >::insert((&who, index as SetIndex), slice)); } /// shorthand for fetching a specific approval of a voter at a specific (global) index. @@ -1203,7 +1216,7 @@ impl Pallet { /// Return true of the bit `n` of scalar `x` is set to `1` and false otherwise. fn bit_at(x: ApprovalFlag, n: usize) -> bool { if n < APPROVAL_FLAG_LEN { - x & ( 1 << n ) != 0 + x & (1 << n) != 0 } else { false } @@ -1214,7 +1227,7 @@ impl Pallet { pub fn bool_to_flag(x: Vec) -> Vec { let mut result: Vec = Vec::with_capacity(x.len() / APPROVAL_FLAG_LEN); if x.is_empty() { - return result; + return result } result.push(0); let mut index = 0; @@ -1223,7 +1236,9 @@ impl Pallet { let shl_index = counter % APPROVAL_FLAG_LEN; result[index] += (if x[counter] { 1 } else { 0 }) << shl_index; counter += 1; - if counter > x.len() - 1 { break; } + if counter > x.len() - 1 { + break + } if counter % APPROVAL_FLAG_LEN == 0 { result.push(0); index += 1; @@ -1235,15 +1250,18 @@ impl Pallet { /// Convert a vec of flags (u32) to boolean. pub fn flag_to_bool(chunk: Vec) -> Vec { let mut result = Vec::with_capacity(chunk.len()); - if chunk.is_empty() { return vec![] } - chunk.into_iter() - .map(|num| + if chunk.is_empty() { + return vec![] + } + chunk + .into_iter() + .map(|num| { (0..APPROVAL_FLAG_LEN).map(|bit| Self::bit_at(num, bit)).collect::>() - ) + }) .for_each(|c| { let last_approve = match c.iter().rposition(|n| *n) { Some(index) => index + 1, - None => 0 + None => 0, }; result.extend(c.into_iter().take(last_approve)); }); @@ -1257,7 +1275,9 @@ impl Pallet { let mut index = 0_u32; loop { let chunk = Self::approvals_of((who.clone(), index)); - if chunk.is_empty() { break; } + if chunk.is_empty() { + break + } all.extend(Self::flag_to_bool(chunk)); index += 1; } @@ -1290,7 +1310,9 @@ impl Pallet { /// returned if `t` is zero. fn get_offset(stake: BalanceOf, t: VoteIndex) -> BalanceOf { let decay_ratio: BalanceOf = T::DecayRatio::get().into(); - if t > 150 { return stake * decay_ratio } + if t > 150 { + return stake * decay_ratio + } let mut offset = stake; let mut r = Zero::zero(); let decay = decay_ratio + One::one(); diff --git a/frame/elections/src/mock.rs b/frame/elections/src/mock.rs index 4df6da829a18e..78982f7af3988 100644 --- a/frame/elections/src/mock.rs +++ b/frame/elections/src/mock.rs @@ -19,16 +19,17 @@ #![cfg(test)] +use crate as elections; use frame_support::{ - parameter_types, assert_ok, + assert_ok, parameter_types, traits::{ChangeMembers, Currency, LockIdentifier}, }; use sp_core::H256; use sp_runtime::{ - BuildStorage, testing::Header, traits::{BlakeTwo256, IdentityLookup}, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; -use crate as elections; - parameter_types! { pub const BlockHashCount: u64 = 250; @@ -104,7 +105,7 @@ impl ChangeMembers for TestChangeMembers { } } -parameter_types!{ +parameter_types! { pub const ElectionPalletId: LockIdentifier = *b"py/elect"; } @@ -197,56 +198,55 @@ impl ExtBuilder { PRESENT_SLASH_PER_VOTER.with(|v| *v.borrow_mut() = self.bad_presentation_punishment); DECAY_RATIO.with(|v| *v.borrow_mut() = self.decay_ratio); let mut ext: sp_io::TestExternalities = GenesisConfig { - balances: pallet_balances::GenesisConfig::{ + balances: pallet_balances::GenesisConfig:: { balances: vec![ (1, 10 * self.balance_factor), (2, 20 * self.balance_factor), (3, 30 * self.balance_factor), (4, 40 * self.balance_factor), (5, 50 * self.balance_factor), - (6, 60 * self.balance_factor) + (6, 60 * self.balance_factor), ], }, - elections: elections::GenesisConfig::{ + elections: elections::GenesisConfig:: { members: vec![], desired_seats: self.desired_seats, presentation_duration: 2, term_duration: 5, }, - }.build_storage().unwrap().into(); + } + .build_storage() + .unwrap() + .into(); ext.execute_with(|| System::set_block_number(1)); ext } } pub(crate) fn voter_ids() -> Vec { - Elections::all_voters().iter().map(|v| v.unwrap_or(0) ).collect::>() + Elections::all_voters().iter().map(|v| v.unwrap_or(0)).collect::>() } pub(crate) fn vote(i: u64, l: usize) { let _ = Balances::make_free_balance_be(&i, 20); - assert_ok!( - Elections::set_approvals( - Origin::signed(i), - (0..l).map(|_| true).collect::>(), - 0, - 0, - 20, - ) - ); + assert_ok!(Elections::set_approvals( + Origin::signed(i), + (0..l).map(|_| true).collect::>(), + 0, + 0, + 20, + )); } pub(crate) fn vote_at(i: u64, l: usize, index: elections::VoteIndex) { let _ = Balances::make_free_balance_be(&i, 20); - assert_ok!( - Elections::set_approvals( - Origin::signed(i), - (0..l).map(|_| true).collect::>(), - 0, - index, - 20, - ) - ); + assert_ok!(Elections::set_approvals( + Origin::signed(i), + (0..l).map(|_| true).collect::>(), + 0, + index, + 20, + )); } pub(crate) fn create_candidate(i: u64, index: u32) { diff --git a/frame/elections/src/tests.rs b/frame/elections/src/tests.rs index 62e28eb6da082..0df84c6d79baf 100644 --- a/frame/elections/src/tests.rs +++ b/frame/elections/src/tests.rs @@ -19,10 +19,9 @@ #![cfg(test)] -use crate::mock::*; -use crate::*; +use crate::{mock::*, *}; -use frame_support::{assert_ok, assert_err, assert_noop}; +use frame_support::{assert_err, assert_noop, assert_ok}; #[test] fn params_should_work() { @@ -60,38 +59,23 @@ fn chunking_bool_to_flag_should_work() { assert_eq!(Elections::bool_to_flag(vec![true, true, true, true, true]), vec![15 + 16]); let set_1 = vec![ - true, false, false, false, // 0x1 - false, true, true, true, // 0xE + true, false, false, false, // 0x1 + false, true, true, true, // 0xE ]; - assert_eq!( - Elections::bool_to_flag(set_1.clone()), - vec![0x00_00_00_E1_u32] - ); - assert_eq!( - Elections::flag_to_bool(vec![0x00_00_00_E1_u32]), - set_1 - ); + assert_eq!(Elections::bool_to_flag(set_1.clone()), vec![0x00_00_00_E1_u32]); + assert_eq!(Elections::flag_to_bool(vec![0x00_00_00_E1_u32]), set_1); let set_2 = vec![ - false, false, false, true, // 0x8 - false, true, false, true, // 0xA + false, false, false, true, // 0x8 + false, true, false, true, // 0xA ]; - assert_eq!( - Elections::bool_to_flag(set_2.clone()), - vec![0x00_00_00_A8_u32] - ); - assert_eq!( - Elections::flag_to_bool(vec![0x00_00_00_A8_u32]), - set_2 - ); + assert_eq!(Elections::bool_to_flag(set_2.clone()), vec![0x00_00_00_A8_u32]); + assert_eq!(Elections::flag_to_bool(vec![0x00_00_00_A8_u32]), set_2); - let mut rhs = (0..100/APPROVAL_FLAG_LEN).map(|_| 0xFFFFFFFF_u32).collect::>(); + let mut rhs = (0..100 / APPROVAL_FLAG_LEN).map(|_| 0xFFFFFFFF_u32).collect::>(); // NOTE: this might be need change based on `APPROVAL_FLAG_LEN`. rhs.extend(vec![0x00_00_00_0F]); - assert_eq!( - Elections::bool_to_flag((0..100).map(|_| true).collect()), - rhs - ) + assert_eq!(Elections::bool_to_flag((0..100).map(|_| true).collect()), rhs) }) } @@ -160,7 +144,7 @@ fn chunking_voter_set_reclaim_should_work() { fn chunking_approvals_set_growth_should_work() { ExtBuilder::default().build().execute_with(|| { // create candidates and voters. - (1..=250).for_each(|i| create_candidate(i, (i-1) as u32)); + (1..=250).for_each(|i| create_candidate(i, (i - 1) as u32)); (1..=250).for_each(|i| vote(i, i as usize)); // all approvals of should return the exact expected vector. @@ -168,26 +152,11 @@ fn chunking_approvals_set_growth_should_work() { Elections::all_approvals_of(&180), (0..180).map(|_| true).collect::>() ); - assert_eq!( - Elections::all_approvals_of(&32), - (0..32).map(|_| true).collect::>() - ); - assert_eq!( - Elections::all_approvals_of(&8), - (0..8).map(|_| true).collect::>() - ); - assert_eq!( - Elections::all_approvals_of(&64), - (0..64).map(|_| true).collect::>() - ); - assert_eq!( - Elections::all_approvals_of(&65), - (0..65).map(|_| true).collect::>() - ); - assert_eq!( - Elections::all_approvals_of(&63), - (0..63).map(|_| true).collect::>() - ); + assert_eq!(Elections::all_approvals_of(&32), (0..32).map(|_| true).collect::>()); + assert_eq!(Elections::all_approvals_of(&8), (0..8).map(|_| true).collect::>()); + assert_eq!(Elections::all_approvals_of(&64), (0..64).map(|_| true).collect::>()); + assert_eq!(Elections::all_approvals_of(&65), (0..65).map(|_| true).collect::>()); + assert_eq!(Elections::all_approvals_of(&63), (0..63).map(|_| true).collect::>()); // NOTE: assuming that APPROVAL_SET_SIZE is more or less small-ish. Might fail otherwise. let full_sets = (180 / APPROVAL_FLAG_LEN) / APPROVAL_SET_SIZE; @@ -197,10 +166,9 @@ fn chunking_approvals_set_growth_should_work() { // grab and check the last full set, if it exists. if full_sets > 0 { assert_eq!( - Elections::approvals_of((180, (full_sets-1) as SetIndex )), + Elections::approvals_of((180, (full_sets - 1) as SetIndex)), Elections::bool_to_flag( - (0..APPROVAL_SET_SIZE * APPROVAL_FLAG_LEN) - .map(|_| true).collect::>() + (0..APPROVAL_SET_SIZE * APPROVAL_FLAG_LEN).map(|_| true).collect::>() ) ); } @@ -210,8 +178,7 @@ fn chunking_approvals_set_growth_should_work() { assert_eq!( Elections::approvals_of((180, full_sets as SetIndex)), Elections::bool_to_flag( - (0..left_over * APPROVAL_FLAG_LEN + rem) - .map(|_| true).collect::>() + (0..left_over * APPROVAL_FLAG_LEN + rem).map(|_| true).collect::>() ) ); } @@ -311,7 +278,7 @@ fn voting_bad_approval_index_slashes_voters_and_bond_reduces_stake() { assert_eq!(balances(&64), (18, 2)); assert_eq!( Elections::voter_info(&64).unwrap(), - VoterInfo { last_win: 0, last_active: 0, stake: 20, pot:0 } + VoterInfo { last_win: 0, last_active: 0, stake: 20, pot: 0 } ); assert_eq!(Elections::next_nonfull_voter_set(), 1); @@ -321,7 +288,7 @@ fn voting_bad_approval_index_slashes_voters_and_bond_reduces_stake() { assert_eq!(balances(&65), (13, 2)); assert_eq!( Elections::voter_info(&65).unwrap(), - VoterInfo { last_win: 0, last_active: 0, stake: 15, pot:0 } + VoterInfo { last_win: 0, last_active: 0, stake: 15, pot: 0 } ); }); } @@ -374,7 +341,7 @@ fn voting_locking_more_than_total_balance_is_moot() { assert_eq!(balances(&3), (28, 2)); assert_eq!( Elections::voter_info(&3).unwrap(), - VoterInfo { last_win: 0, last_active: 0, stake: 30, pot:0 } + VoterInfo { last_win: 0, last_active: 0, stake: 30, pot: 0 } ); }); } @@ -424,7 +391,7 @@ fn voting_setting_an_approval_vote_count_more_than_candidate_count_should_not_wo assert_eq!(Elections::candidates().len(), 1); assert_noop!( - Elections::set_approvals(Origin::signed(4),vec![true, true], 0, 0, 40), + Elections::set_approvals(Origin::signed(4), vec![true, true], 0, 0, 40), Error::::TooManyVotes, ); }); @@ -498,7 +465,10 @@ fn voting_invalid_retraction_index_should_not_work() { assert_ok!(Elections::set_approvals(Origin::signed(1), vec![true], 0, 0, 10)); assert_ok!(Elections::set_approvals(Origin::signed(2), vec![true], 0, 0, 20)); assert_eq!(voter_ids(), vec![1, 2]); - assert_noop!(Elections::retract_voter(Origin::signed(1), 1), Error::::InvalidRetractionIndex); + assert_noop!( + Elections::retract_voter(Origin::signed(1), 1), + Error::::InvalidRetractionIndex + ); }); } @@ -508,7 +478,10 @@ fn voting_overflow_retraction_index_should_not_work() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 0)); assert_ok!(Elections::set_approvals(Origin::signed(1), vec![true], 0, 0, 10)); - assert_noop!(Elections::retract_voter(Origin::signed(1), 1), Error::::InvalidRetractionIndex); + assert_noop!( + Elections::retract_voter(Origin::signed(1), 1), + Error::::InvalidRetractionIndex + ); }); } @@ -518,7 +491,10 @@ fn voting_non_voter_retraction_should_not_work() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 0)); assert_ok!(Elections::set_approvals(Origin::signed(1), vec![true], 0, 0, 10)); - assert_noop!(Elections::retract_voter(Origin::signed(2), 0), Error::::RetractNonVoter); + assert_noop!( + Elections::retract_voter(Origin::signed(2), 0), + Error::::RetractNonVoter + ); }); } @@ -543,9 +519,11 @@ fn retracting_inactive_voter_should_work() { assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 1)); assert_ok!(Elections::end_block(System::block_number())); - assert_ok!(Elections::reap_inactive_voter(Origin::signed(5), + assert_ok!(Elections::reap_inactive_voter( + Origin::signed(5), (voter_ids().iter().position(|&i| i == 5).unwrap() as u32).into(), - 2, (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), + 2, + (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), 2 )); @@ -580,9 +558,11 @@ fn retracting_inactive_voter_with_other_candidates_in_slots_should_work() { System::set_block_number(11); assert_ok!(Elections::submit_candidacy(Origin::signed(1), 0)); - assert_ok!(Elections::reap_inactive_voter(Origin::signed(5), + assert_ok!(Elections::reap_inactive_voter( + Origin::signed(5), (voter_ids().iter().position(|&i| i == 5).unwrap() as u32).into(), - 2, (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), + 2, + (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), 2 )); @@ -612,11 +592,16 @@ fn retracting_inactive_voter_with_bad_reporter_index_should_not_work() { assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 1)); assert_ok!(Elections::end_block(System::block_number())); - assert_noop!(Elections::reap_inactive_voter(Origin::signed(2), - 42, - 2, (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), - 2 - ), Error::::InvalidReporterIndex); + assert_noop!( + Elections::reap_inactive_voter( + Origin::signed(2), + 42, + 2, + (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), + 2 + ), + Error::::InvalidReporterIndex + ); }); } @@ -641,11 +626,16 @@ fn retracting_inactive_voter_with_bad_target_index_should_not_work() { assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 1)); assert_ok!(Elections::end_block(System::block_number())); - assert_noop!(Elections::reap_inactive_voter(Origin::signed(2), - (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), - 2, 42, - 2 - ), Error::::InvalidTargetIndex); + assert_noop!( + Elections::reap_inactive_voter( + Origin::signed(2), + (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), + 2, + 42, + 2 + ), + Error::::InvalidTargetIndex + ); }); } @@ -657,10 +647,34 @@ fn retracting_active_voter_should_slash_reporter() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 2)); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(2), vec![true, false, false, false], 0, 0, 20)); - assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, true, false, false], 0, 0, 30)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, true, false], 0, 0, 40)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, true], 0, 0, 50)); + assert_ok!(Elections::set_approvals( + Origin::signed(2), + vec![true, false, false, false], + 0, + 0, + 20 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(3), + vec![false, true, false, false], + 0, + 0, + 30 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, false, true, false], + 0, + 0, + 40 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, false, false, true], + 0, + 0, + 50 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); @@ -675,16 +689,30 @@ fn retracting_active_voter_should_slash_reporter() { assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(10); - assert_ok!(Elections::present_winner(Origin::signed(4), 2, 20 + Elections::get_offset(20, 1), 1)); - assert_ok!(Elections::present_winner(Origin::signed(4), 3, 30 + Elections::get_offset(30, 1), 1)); + assert_ok!(Elections::present_winner( + Origin::signed(4), + 2, + 20 + Elections::get_offset(20, 1), + 1 + )); + assert_ok!(Elections::present_winner( + Origin::signed(4), + 3, + 30 + Elections::get_offset(30, 1), + 1 + )); assert_ok!(Elections::end_block(System::block_number())); assert_eq!(Elections::vote_index(), 2); assert_eq!(::InactiveGracePeriod::get(), 1); assert_eq!(::VotingPeriod::get(), 4); - assert_eq!(Elections::voter_info(4), Some(VoterInfo { last_win: 1, last_active: 0, stake: 40, pot: 0 })); + assert_eq!( + Elections::voter_info(4), + Some(VoterInfo { last_win: 1, last_active: 0, stake: 40, pot: 0 }) + ); - assert_ok!(Elections::reap_inactive_voter(Origin::signed(4), + assert_ok!(Elections::reap_inactive_voter( + Origin::signed(4), (voter_ids().iter().position(|&i| i == 4).unwrap() as u32).into(), 2, (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), @@ -718,11 +746,16 @@ fn retracting_inactive_voter_by_nonvoter_should_not_work() { assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 1)); assert_ok!(Elections::end_block(System::block_number())); - assert_noop!(Elections::reap_inactive_voter(Origin::signed(4), - 0, - 2, (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), - 2 - ), Error::::NotVoter); + assert_noop!( + Elections::reap_inactive_voter( + Origin::signed(4), + 0, + 2, + (voter_ids().iter().position(|&i| i == 2).unwrap() as u32).into(), + 2 + ), + Error::::NotVoter + ); }); } @@ -933,7 +966,7 @@ fn election_seats_should_be_released() { assert_ok!(Elections::end_block(System::block_number())); if Elections::members().len() == 0 { free_block = current; - break; + break } } // 11 + 2 which is the next voting period. @@ -1021,9 +1054,21 @@ fn election_presenting_loser_should_not_work() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 2)); assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true], 0, 0, 30)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, false, true], 0, 0, 40)); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, false, false, true], + 0, + 0, + 40 + )); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 4)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, false, true], 0, 0, 50)); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, false, false, false, true], + 0, + 0, + 50 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); @@ -1032,14 +1077,12 @@ fn election_presenting_loser_should_not_work() { assert_ok!(Elections::present_winner(Origin::signed(4), 4, 40, 0)); assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 0)); - assert_eq!(Elections::leaderboard(), Some(vec![ - (30, 3), - (40, 4), - (50, 5), - (60, 1) - ])); + assert_eq!(Elections::leaderboard(), Some(vec![(30, 3), (40, 4), (50, 5), (60, 1)])); - assert_noop!(Elections::present_winner(Origin::signed(4), 2, 20, 0), Error::::UnworthyCandidate); + assert_noop!( + Elections::present_winner(Origin::signed(4), 2, 20, 0), + Error::::UnworthyCandidate + ); }); } @@ -1054,9 +1097,21 @@ fn election_presenting_loser_first_should_not_matter() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 2)); assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true], 0, 0, 30)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, false, true], 0, 0, 40)); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, false, false, true], + 0, + 0, + 40 + )); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 4)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, false, true], 0, 0, 50)); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, false, false, false, true], + 0, + 0, + 50 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); @@ -1066,12 +1121,7 @@ fn election_presenting_loser_first_should_not_matter() { assert_ok!(Elections::present_winner(Origin::signed(4), 4, 40, 0)); assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 0)); - assert_eq!(Elections::leaderboard(), Some(vec![ - (30, 3), - (40, 4), - (50, 5), - (60, 1) - ])); + assert_eq!(Elections::leaderboard(), Some(vec![(30, 3), (40, 4), (50, 5), (60, 1)])); }); } @@ -1098,7 +1148,10 @@ fn election_present_with_invalid_vote_index_should_not_work() { assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); - assert_noop!(Elections::present_winner(Origin::signed(4), 2, 20, 1), Error::::InvalidVoteIndex); + assert_noop!( + Elections::present_winner(Origin::signed(4), 2, 20, 1), + Error::::InvalidVoteIndex + ); }); } @@ -1115,10 +1168,10 @@ fn election_present_when_presenter_is_poor_should_not_work() { let _ = Balances::make_free_balance_be(&1, 15); assert!(!Elections::presentation_active()); - // -3 + // -3 assert_ok!(Elections::submit_candidacy(Origin::signed(1), 0)); assert_eq!(Balances::free_balance(1), 12); - // -2 -5 + // -2 -5 assert_ok!(Elections::set_approvals(Origin::signed(1), vec![true], 0, 0, 15)); assert_ok!(Elections::end_block(System::block_number())); @@ -1126,8 +1179,8 @@ fn election_present_when_presenter_is_poor_should_not_work() { assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::reserved_balance(1), 5); if p > 5 { - assert_noop!(Elections::present_winner( - Origin::signed(1), 1, 10, 0), + assert_noop!( + Elections::present_winner(Origin::signed(1), 1, 10, 0), Error::::InsufficientPresenterFunds, ); } else { @@ -1153,7 +1206,10 @@ fn election_invalid_present_tally_should_slash() { assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); - assert_err!(Elections::present_winner(Origin::signed(4), 2, 80, 0), Error::::IncorrectTotal); + assert_err!( + Elections::present_winner(Origin::signed(4), 2, 80, 0), + Error::::IncorrectTotal + ); assert_eq!(Balances::total_balance(&4), 38); }); @@ -1172,9 +1228,21 @@ fn election_runners_up_should_be_kept() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 2)); assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true], 0, 0, 30)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, false, true], 0, 0, 40)); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, false, false, true], + 0, + 0, + 40 + )); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 4)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, false, true], 0, 0, 50)); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, false, false, false, true], + 0, + 0, + 50 + )); assert_ok!(Elections::end_block(System::block_number())); @@ -1183,21 +1251,11 @@ fn election_runners_up_should_be_kept() { assert_ok!(Elections::present_winner(Origin::signed(4), 1, 60, 0)); // leaderboard length is the empty seats plus the carry count (i.e. 5 + 2), where those // to be carried are the lowest and stored in lowest indices - assert_eq!(Elections::leaderboard(), Some(vec![ - (0, 0), - (0, 0), - (0, 0), - (60, 1) - ])); + assert_eq!(Elections::leaderboard(), Some(vec![(0, 0), (0, 0), (0, 0), (60, 1)])); assert_ok!(Elections::present_winner(Origin::signed(4), 3, 30, 0)); assert_ok!(Elections::present_winner(Origin::signed(4), 4, 40, 0)); assert_ok!(Elections::present_winner(Origin::signed(4), 5, 50, 0)); - assert_eq!(Elections::leaderboard(), Some(vec![ - (30, 3), - (40, 4), - (50, 5), - (60, 1) - ])); + assert_eq!(Elections::leaderboard(), Some(vec![(30, 3), (40, 4), (50, 5), (60, 1)])); assert_ok!(Elections::end_block(System::block_number())); @@ -1210,11 +1268,26 @@ fn election_runners_up_should_be_kept() { assert!(Elections::is_a_candidate(&3)); assert!(Elections::is_a_candidate(&4)); assert_eq!(Elections::vote_index(), 1); - assert_eq!(Elections::voter_info(2), Some(VoterInfo { last_win: 0, last_active: 0, stake: 20, pot: 0 })); - assert_eq!(Elections::voter_info(3), Some(VoterInfo { last_win: 0, last_active: 0, stake: 30, pot: 0 })); - assert_eq!(Elections::voter_info(4), Some(VoterInfo { last_win: 0, last_active: 0, stake: 40, pot: 0 })); - assert_eq!(Elections::voter_info(5), Some(VoterInfo { last_win: 1, last_active: 0, stake: 50, pot: 0 })); - assert_eq!(Elections::voter_info(6), Some(VoterInfo { last_win: 1, last_active: 0, stake: 60, pot: 0 })); + assert_eq!( + Elections::voter_info(2), + Some(VoterInfo { last_win: 0, last_active: 0, stake: 20, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(3), + Some(VoterInfo { last_win: 0, last_active: 0, stake: 30, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(4), + Some(VoterInfo { last_win: 0, last_active: 0, stake: 40, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(5), + Some(VoterInfo { last_win: 1, last_active: 0, stake: 50, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(6), + Some(VoterInfo { last_win: 1, last_active: 0, stake: 60, pot: 0 }) + ); assert_eq!(Elections::candidate_reg_info(3), Some((0, 2))); assert_eq!(Elections::candidate_reg_info(4), Some((0, 3))); }); @@ -1231,9 +1304,21 @@ fn election_second_tally_should_use_runners_up() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 2)); assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true], 0, 0, 30)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, false, false, true], 0, 0, 40)); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, false, false, true], + 0, + 0, + 40 + )); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 4)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, false, false, false, true], 0, 0, 50)); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, false, false, false, true], + 0, + 0, + 50 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); @@ -1244,13 +1329,29 @@ fn election_second_tally_should_use_runners_up() { assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(8); - assert_ok!(Elections::set_approvals(Origin::signed(6), vec![false, false, true, false], 1, 0, 60)); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![false, false, true, false], + 1, + 0, + 60 + )); assert_ok!(Elections::set_desired_seats(Origin::root(), 3)); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(10); - assert_ok!(Elections::present_winner(Origin::signed(4), 3, 30 + Elections::get_offset(30, 1) + 60, 1)); - assert_ok!(Elections::present_winner(Origin::signed(4), 4, 40 + Elections::get_offset(40, 1), 1)); + assert_ok!(Elections::present_winner( + Origin::signed(4), + 3, + 30 + Elections::get_offset(30, 1) + 60, + 1 + )); + assert_ok!(Elections::present_winner( + Origin::signed(4), + 4, + 40 + Elections::get_offset(40, 1), + 1 + )); assert_ok!(Elections::end_block(System::block_number())); assert!(!Elections::presentation_active()); @@ -1262,13 +1363,25 @@ fn election_second_tally_should_use_runners_up() { assert!(!Elections::is_a_candidate(&5)); assert!(Elections::is_a_candidate(&4)); assert_eq!(Elections::vote_index(), 2); - assert_eq!(Elections::voter_info(2), Some( VoterInfo { last_win: 0, last_active: 0, stake: 20, pot: 0})); - assert_eq!(Elections::voter_info(3), Some( VoterInfo { last_win: 2, last_active: 0, stake: 30, pot: 0})); - assert_eq!(Elections::voter_info(4), Some( VoterInfo { last_win: 0, last_active: 0, stake: 40, pot: 0})); - assert_eq!(Elections::voter_info(5), Some( VoterInfo { last_win: 1, last_active: 0, stake: 50, pot: 0})); + assert_eq!( + Elections::voter_info(2), + Some(VoterInfo { last_win: 0, last_active: 0, stake: 20, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(3), + Some(VoterInfo { last_win: 2, last_active: 0, stake: 30, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(4), + Some(VoterInfo { last_win: 0, last_active: 0, stake: 40, pot: 0 }) + ); + assert_eq!( + Elections::voter_info(5), + Some(VoterInfo { last_win: 1, last_active: 0, stake: 50, pot: 0 }) + ); assert_eq!( Elections::voter_info(6), - Some(VoterInfo { last_win: 2, last_active: 1, stake: 60, pot: 0}) + Some(VoterInfo { last_win: 2, last_active: 1, stake: 60, pot: 0 }) ); assert_eq!(Elections::candidate_reg_info(4), Some((0, 3))); @@ -1289,9 +1402,13 @@ fn election_loser_candidates_bond_gets_slashed() { assert_eq!(balances(&2), (17, 3)); assert_ok!(Elections::set_approvals(Origin::signed(5), vec![true], 0, 0, 50)); - assert_ok!( - Elections::set_approvals(Origin::signed(1), vec![false, true, true, true], 0, 0, 10) - ); + assert_ok!(Elections::set_approvals( + Origin::signed(1), + vec![false, true, true, true], + 0, + 0, + 10 + )); assert_ok!(Elections::end_block(System::block_number())); @@ -1302,7 +1419,6 @@ fn election_loser_candidates_bond_gets_slashed() { assert_eq!(Elections::present_winner(Origin::signed(2), 2, 10, 0), Ok(())); assert_eq!(Elections::present_winner(Origin::signed(1), 1, 50, 0), Ok(())); - // winner + carry assert_eq!(Elections::leaderboard(), Some(vec![(10, 3), (10, 4), (50, 1)])); assert_ok!(Elections::end_block(System::block_number())); @@ -1324,15 +1440,27 @@ fn pot_accumulating_weight_and_decaying_should_work() { assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(1), 2)); - assert_ok!( - Elections::set_approvals(Origin::signed(6), vec![true, false, false], 0, 0, 600) - ); - assert_ok!( - Elections::set_approvals(Origin::signed(5), vec![false, true, false], 0, 0, 500) - ); - assert_ok!( - Elections::set_approvals(Origin::signed(1), vec![false, false, true], 0, 0, 100) - ); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 0, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 0, + 0, + 500 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(1), + vec![false, false, true], + 0, + 0, + 100 + )); assert_ok!(Elections::end_block(System::block_number())); @@ -1348,15 +1476,15 @@ fn pot_accumulating_weight_and_decaying_should_work() { assert_eq!(Elections::members(), vec![(6, 11), (5, 11)]); assert_eq!( Elections::voter_info(6).unwrap(), - VoterInfo { last_win: 1, last_active: 0, stake: 600, pot: 0}, + VoterInfo { last_win: 1, last_active: 0, stake: 600, pot: 0 }, ); assert_eq!( Elections::voter_info(5).unwrap(), - VoterInfo { last_win: 1, last_active: 0, stake: 500, pot: 0}, + VoterInfo { last_win: 1, last_active: 0, stake: 500, pot: 0 }, ); assert_eq!( Elections::voter_info(1).unwrap(), - VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0}, + VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0 }, ); System::set_block_number(12); @@ -1365,80 +1493,144 @@ fn pot_accumulating_weight_and_decaying_should_work() { assert_ok!(Elections::retract_voter(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(6), 0)); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); - assert_ok!( - Elections::set_approvals(Origin::signed(6), vec![true, false, false], 1, 0, 600) - ); - assert_ok!( - Elections::set_approvals(Origin::signed(5), vec![false, true, false], 1, 1, 500) - ); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 1, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 1, + 1, + 500 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(14); assert!(Elections::presentation_active()); assert_eq!(Elections::present_winner(Origin::signed(6), 6, 600, 1), Ok(())); assert_eq!(Elections::present_winner(Origin::signed(5), 5, 500, 1), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 1), 1), Ok(())); + assert_eq!( + Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 1), 1), + Ok(()) + ); assert_eq!(Elections::leaderboard(), Some(vec![(0, 0), (100 + 96, 1), (500, 5), (600, 6)])); assert_ok!(Elections::end_block(System::block_number())); assert_eq!(Elections::members(), vec![(6, 19), (5, 19)]); assert_eq!( Elections::voter_info(6).unwrap(), - VoterInfo { last_win: 2, last_active: 1, stake: 600, pot:0 } + VoterInfo { last_win: 2, last_active: 1, stake: 600, pot: 0 } + ); + assert_eq!( + Elections::voter_info(5).unwrap(), + VoterInfo { last_win: 2, last_active: 1, stake: 500, pot: 0 } + ); + assert_eq!( + Elections::voter_info(1).unwrap(), + VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0 } ); - assert_eq!(Elections::voter_info(5).unwrap(), VoterInfo { last_win: 2, last_active: 1, stake: 500, pot:0 }); - assert_eq!(Elections::voter_info(1).unwrap(), VoterInfo { last_win: 0, last_active: 0, stake: 100, pot:0 }); System::set_block_number(20); assert_ok!(Elections::retract_voter(Origin::signed(6), 0)); assert_ok!(Elections::retract_voter(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(6), 0)); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); - assert_ok!(Elections::set_approvals(Origin::signed(6), vec![true, false, false], 2, 0, 600)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, true, false], 2, 1, 500)); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 2, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 2, + 1, + 500 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(22); assert!(Elections::presentation_active()); assert_eq!(Elections::present_winner(Origin::signed(6), 6, 600, 2), Ok(())); assert_eq!(Elections::present_winner(Origin::signed(5), 5, 500, 2), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 2), 2), Ok(())); - assert_eq!(Elections::leaderboard(), Some(vec![(0, 0), (100 + 96 + 93, 1), (500, 5), (600, 6)])); + assert_eq!( + Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 2), 2), + Ok(()) + ); + assert_eq!( + Elections::leaderboard(), + Some(vec![(0, 0), (100 + 96 + 93, 1), (500, 5), (600, 6)]) + ); assert_ok!(Elections::end_block(System::block_number())); assert_eq!(Elections::members(), vec![(6, 27), (5, 27)]); assert_eq!( Elections::voter_info(6).unwrap(), - VoterInfo { last_win: 3, last_active: 2, stake: 600, pot: 0} + VoterInfo { last_win: 3, last_active: 2, stake: 600, pot: 0 } + ); + assert_eq!( + Elections::voter_info(5).unwrap(), + VoterInfo { last_win: 3, last_active: 2, stake: 500, pot: 0 } + ); + assert_eq!( + Elections::voter_info(1).unwrap(), + VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0 } ); - assert_eq!(Elections::voter_info(5).unwrap(), VoterInfo { last_win: 3, last_active: 2, stake: 500, pot: 0}); - assert_eq!(Elections::voter_info(1).unwrap(), VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0}); - System::set_block_number(28); assert_ok!(Elections::retract_voter(Origin::signed(6), 0)); assert_ok!(Elections::retract_voter(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(6), 0)); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); - assert_ok!(Elections::set_approvals(Origin::signed(6), vec![true, false, false], 3, 0, 600)); - assert_ok!(Elections::set_approvals(Origin::signed(5), vec![false, true, false], 3, 1, 500)); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 3, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 3, + 1, + 500 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(30); assert!(Elections::presentation_active()); assert_eq!(Elections::present_winner(Origin::signed(6), 6, 600, 3), Ok(())); assert_eq!(Elections::present_winner(Origin::signed(5), 5, 500, 3), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 3), 3), Ok(())); - assert_eq!(Elections::leaderboard(), Some(vec![(0, 0), (100 + 96 + 93 + 90, 1), (500, 5), (600, 6)])); + assert_eq!( + Elections::present_winner(Origin::signed(1), 1, 100 + Elections::get_offset(100, 3), 3), + Ok(()) + ); + assert_eq!( + Elections::leaderboard(), + Some(vec![(0, 0), (100 + 96 + 93 + 90, 1), (500, 5), (600, 6)]) + ); assert_ok!(Elections::end_block(System::block_number())); assert_eq!(Elections::members(), vec![(6, 35), (5, 35)]); assert_eq!( Elections::voter_info(6).unwrap(), - VoterInfo { last_win: 4, last_active: 3, stake: 600, pot: 0} + VoterInfo { last_win: 4, last_active: 3, stake: 600, pot: 0 } + ); + assert_eq!( + Elections::voter_info(5).unwrap(), + VoterInfo { last_win: 4, last_active: 3, stake: 500, pot: 0 } + ); + assert_eq!( + Elections::voter_info(1).unwrap(), + VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0 } ); - assert_eq!(Elections::voter_info(5).unwrap(), VoterInfo { last_win: 4, last_active: 3, stake: 500, pot: 0}); - assert_eq!(Elections::voter_info(1).unwrap(), VoterInfo { last_win: 0, last_active: 0, stake: 100, pot: 0}); }) } @@ -1453,9 +1645,27 @@ fn pot_winning_resets_accumulated_pot() { assert_ok!(Elections::submit_candidacy(Origin::signed(3), 2)); assert_ok!(Elections::submit_candidacy(Origin::signed(2), 3)); - assert_ok!(Elections::set_approvals(Origin::signed(6), vec![true, false, false, false], 0, 0, 600)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, true, false, false], 0, 1, 400)); - assert_ok!(Elections::set_approvals(Origin::signed(3), vec![false, false, true, true], 0, 2, 300)); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false, false], + 0, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, true, false, false], + 0, + 1, + 400 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(3), + vec![false, false, true, true], + 0, + 2, + 300 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(6); @@ -1474,16 +1684,34 @@ fn pot_winning_resets_accumulated_pot() { assert_ok!(Elections::retract_voter(Origin::signed(4), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(6), 0)); assert_ok!(Elections::submit_candidacy(Origin::signed(4), 1)); - assert_ok!(Elections::set_approvals(Origin::signed(6), vec![true, false, false, false], 1, 0, 600)); - assert_ok!(Elections::set_approvals(Origin::signed(4), vec![false, true, false, false], 1, 1, 400)); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false, false], + 1, + 0, + 600 + )); + assert_ok!(Elections::set_approvals( + Origin::signed(4), + vec![false, true, false, false], + 1, + 1, + 400 + )); assert_ok!(Elections::end_block(System::block_number())); System::set_block_number(14); assert!(Elections::presentation_active()); assert_eq!(Elections::present_winner(Origin::signed(6), 6, 600, 1), Ok(())); assert_eq!(Elections::present_winner(Origin::signed(4), 4, 400, 1), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(3), 3, 300 + Elections::get_offset(300, 1), 1), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(2), 2, 300 + Elections::get_offset(300, 1), 1), Ok(())); + assert_eq!( + Elections::present_winner(Origin::signed(3), 3, 300 + Elections::get_offset(300, 1), 1), + Ok(()) + ); + assert_eq!( + Elections::present_winner(Origin::signed(2), 2, 300 + Elections::get_offset(300, 1), 1), + Ok(()) + ); assert_eq!(Elections::leaderboard(), Some(vec![(400, 4), (588, 2), (588, 3), (600, 6)])); assert_ok!(Elections::end_block(System::block_number())); @@ -1497,7 +1725,10 @@ fn pot_winning_resets_accumulated_pot() { // because one of 3's candidates (3) won in previous round // 4 on the other hand will get extra weight since it was unlucky. assert_eq!(Elections::present_winner(Origin::signed(3), 2, 300, 2), Ok(())); - assert_eq!(Elections::present_winner(Origin::signed(4), 4, 400 + Elections::get_offset(400, 1), 2), Ok(())); + assert_eq!( + Elections::present_winner(Origin::signed(4), 4, 400 + Elections::get_offset(400, 1), 2), + Ok(()) + ); assert_ok!(Elections::end_block(System::block_number())); assert_eq!(Elections::members(), vec![(4, 27), (2, 27)]); @@ -1519,15 +1750,27 @@ fn pot_resubmitting_approvals_stores_pot() { assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(1), 2)); - assert_ok!( - Elections::set_approvals(Origin::signed(6), vec![true, false, false], 0, 0, 600), - ); - assert_ok!( - Elections::set_approvals(Origin::signed(5), vec![false, true, false], 0, 1, 500), - ); - assert_ok!( - Elections::set_approvals(Origin::signed(1), vec![false, false, true], 0, 2, 100), - ); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 0, + 0, + 600 + ),); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 0, + 1, + 500 + ),); + assert_ok!(Elections::set_approvals( + Origin::signed(1), + vec![false, false, true], + 0, + 2, + 100 + ),); assert_ok!(Elections::end_block(System::block_number())); @@ -1547,18 +1790,31 @@ fn pot_resubmitting_approvals_stores_pot() { assert_ok!(Elections::retract_voter(Origin::signed(5), 1)); assert_ok!(Elections::submit_candidacy(Origin::signed(6), 0)); assert_ok!(Elections::submit_candidacy(Origin::signed(5), 1)); - assert_ok!( - Elections::set_approvals(Origin::signed(6), vec![true, false, false], 1, 0, 600), - ); - assert_ok!( - Elections::set_approvals(Origin::signed(5), vec![false, true, false], 1, 1, 500), - ); + assert_ok!(Elections::set_approvals( + Origin::signed(6), + vec![true, false, false], + 1, + 0, + 600 + ),); + assert_ok!(Elections::set_approvals( + Origin::signed(5), + vec![false, true, false], + 1, + 1, + 500 + ),); // give 1 some new high balance let _ = Balances::make_free_balance_be(&1, 997); - assert_ok!( - Elections::set_approvals(Origin::signed(1), vec![false, false, true], 1, 2, 1000), - ); - assert_eq!(Elections::voter_info(1).unwrap(), + assert_ok!(Elections::set_approvals( + Origin::signed(1), + vec![false, false, true], + 1, + 2, + 1000 + ),); + assert_eq!( + Elections::voter_info(1).unwrap(), VoterInfo { stake: 1000, // 997 + 3 which is candidacy bond. pot: Elections::get_offset(100, 1), @@ -1599,7 +1855,10 @@ fn pot_get_offset_should_work() { assert_eq!(Elections::get_offset(50_000_000_000, 0), 0); assert_eq!(Elections::get_offset(50_000_000_000, 1), 48_000_000_000); assert_eq!(Elections::get_offset(50_000_000_000, 2), 48_000_000_000 + 46_080_000_000); - assert_eq!(Elections::get_offset(50_000_000_000, 3), 48_000_000_000 + 46_080_000_000 + 44_236_800_000); + assert_eq!( + Elections::get_offset(50_000_000_000, 3), + 48_000_000_000 + 46_080_000_000 + 44_236_800_000 + ); assert_eq!( Elections::get_offset(50_000_000_000, 4), 48_000_000_000 + 46_080_000_000 + 44_236_800_000 + 42_467_328_000 diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index a705b5f9b3b73..35e8dffd4edac 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -42,24 +42,28 @@ //! one unsigned transaction floating in the network. #![cfg_attr(not(feature = "std"), no_std)] +use codec::{Decode, Encode}; +use frame_support::traits::Get; use frame_system::{ self as system, offchain::{ - AppCrypto, CreateSignedTransaction, SendUnsignedTransaction, SendSignedTransaction, - SignedPayload, SigningTypes, Signer, SubmitTransaction, - } + AppCrypto, CreateSignedTransaction, SendSignedTransaction, SendUnsignedTransaction, + SignedPayload, Signer, SigningTypes, SubmitTransaction, + }, }; -use frame_support::traits::Get; +use lite_json::json::JsonValue; use sp_core::crypto::KeyTypeId; use sp_runtime::{ - RuntimeDebug, - offchain::{http, Duration, storage::{MutateStorageError, StorageRetrievalError, StorageValueRef}}, + offchain::{ + http, + storage::{MutateStorageError, StorageRetrievalError, StorageValueRef}, + Duration, + }, traits::Zero, - transaction_validity::{InvalidTransaction, ValidTransaction, TransactionValidity}, + transaction_validity::{InvalidTransaction, TransactionValidity, ValidTransaction}, + RuntimeDebug, }; -use codec::{Encode, Decode}; use sp_std::vec::Vec; -use lite_json::json::JsonValue; #[cfg(test)] mod tests; @@ -78,15 +82,17 @@ pub const KEY_TYPE: KeyTypeId = KeyTypeId(*b"btc!"); /// the types with this pallet-specific identifier. pub mod crypto { use super::KEY_TYPE; + use sp_core::sr25519::Signature as Sr25519Signature; use sp_runtime::{ app_crypto::{app_crypto, sr25519}, traits::Verify, }; - use sp_core::sr25519::Signature as Sr25519Signature; app_crypto!(sr25519, KEY_TYPE); pub struct TestAuthId; - impl frame_system::offchain::AppCrypto<::Signer, Sr25519Signature> for TestAuthId { + impl frame_system::offchain::AppCrypto<::Signer, Sr25519Signature> + for TestAuthId + { type RuntimeAppPublic = Public; type GenericSignature = sp_core::sr25519::Signature; type GenericPublic = sp_core::sr25519::Public; @@ -97,9 +103,9 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// This pallet's configuration trait #[pallet::config] @@ -179,8 +185,10 @@ pub mod pallet { let should_send = Self::choose_transaction_type(block_number); let res = match should_send { TransactionType::Signed => Self::fetch_price_and_send_signed(), - TransactionType::UnsignedForAny => Self::fetch_price_and_send_unsigned_for_any_account(block_number), - TransactionType::UnsignedForAll => Self::fetch_price_and_send_unsigned_for_all_accounts(block_number), + TransactionType::UnsignedForAny => + Self::fetch_price_and_send_unsigned_for_any_account(block_number), + TransactionType::UnsignedForAll => + Self::fetch_price_and_send_unsigned_for_all_accounts(block_number), TransactionType::Raw => Self::fetch_price_and_send_raw_unsigned(block_number), TransactionType::None => Ok(()), }; @@ -236,7 +244,7 @@ pub mod pallet { pub fn submit_price_unsigned( origin: OriginFor, _block_number: T::BlockNumber, - price: u32 + price: u32, ) -> DispatchResultWithPostInfo { // This ensures that the function can only be called via unsigned transaction. ensure_none(origin)?; @@ -283,17 +291,17 @@ pub mod pallet { /// By default unsigned transactions are disallowed, but implementing the validator /// here we make sure that some particular calls (the ones produced by offchain worker) /// are being whitelisted and marked as valid. - fn validate_unsigned( - _source: TransactionSource, - call: &Self::Call, - ) -> TransactionValidity { + fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { // Firstly let's check that we call the right function. if let Call::submit_price_unsigned_with_signed_payload { - price_payload: ref payload, _signature: ref signature, - } = call { - let signature_valid = SignedPayload::::verify::(payload, signature.clone()); + price_payload: ref payload, + _signature: ref signature, + } = call + { + let signature_valid = + SignedPayload::::verify::(payload, signature.clone()); if !signature_valid { - return InvalidTransaction::BadProof.into(); + return InvalidTransaction::BadProof.into() } Self::validate_transaction_parameters(&payload.block_number, &payload.price) } else if let Call::submit_price_unsigned { block_number, new_price } = call { @@ -370,11 +378,10 @@ impl Pallet { match last_send { // If we already have a value in storage and the block number is recent enough // we avoid sending another transaction at this time. - Ok(Some(block)) if block_number < block + T::GracePeriod::get() => { - Err(RECENTLY_SENT) - }, + Ok(Some(block)) if block_number < block + T::GracePeriod::get() => + Err(RECENTLY_SENT), // In every other case we attempt to acquire the lock and send a transaction. - _ => Ok(block_number) + _ => Ok(block_number), } }); @@ -396,10 +403,15 @@ impl Pallet { // the storage entry for that. (for instance store both block number and a flag // indicating the type of next transaction to send). let transaction_type = block_number % 3u32.into(); - if transaction_type == Zero::zero() { TransactionType::Signed } - else if transaction_type == T::BlockNumber::from(1u32) { TransactionType::UnsignedForAny } - else if transaction_type == T::BlockNumber::from(2u32) { TransactionType::UnsignedForAll } - else { TransactionType::Raw } + if transaction_type == Zero::zero() { + TransactionType::Signed + } else if transaction_type == T::BlockNumber::from(1u32) { + TransactionType::UnsignedForAny + } else if transaction_type == T::BlockNumber::from(2u32) { + TransactionType::UnsignedForAll + } else { + TransactionType::Raw + } }, // We are in the grace period, we should not send a transaction this time. Err(MutateStorageError::ValueFunctionFailed(RECENTLY_SENT)) => TransactionType::None, @@ -417,7 +429,7 @@ impl Pallet { let signer = Signer::::all_accounts(); if !signer.can_sign() { return Err( - "No local accounts available. Consider adding one via `author_insertKey` RPC." + "No local accounts available. Consider adding one via `author_insertKey` RPC.", )? } // Make an external HTTP request to fetch the current price. @@ -428,14 +440,12 @@ impl Pallet { // representing the call, we've just created. // Submit signed will return a vector of results for all accounts that were found in the // local keystore with expected `KEY_TYPE`. - let results = signer.send_signed_transaction( - |_account| { - // Received price is wrapped into a call to `submit_price` public function of this pallet. - // This means that the transaction, when executed, will simply call that function passing - // `price` as an argument. - Call::submit_price { price } - } - ); + let results = signer.send_signed_transaction(|_account| { + // Received price is wrapped into a call to `submit_price` public function of this pallet. + // This means that the transaction, when executed, will simply call that function passing + // `price` as an argument. + Call::submit_price { price } + }); for (acc, res) in &results { match res { @@ -480,7 +490,9 @@ impl Pallet { } /// A helper function to fetch the price, sign payload and send an unsigned transaction - fn fetch_price_and_send_unsigned_for_any_account(block_number: T::BlockNumber) -> Result<(), &'static str> { + fn fetch_price_and_send_unsigned_for_any_account( + block_number: T::BlockNumber, + ) -> Result<(), &'static str> { // Make sure we don't fetch the price if unsigned transaction is going to be rejected // anyway. let next_unsigned_at = >::get(); @@ -493,23 +505,24 @@ impl Pallet { let price = Self::fetch_price().map_err(|_| "Failed to fetch price")?; // -- Sign using any account - let (_, result) = Signer::::any_account().send_unsigned_transaction( - |account| PricePayload { - price, - block_number, - public: account.public.clone() - }, - |payload, signature| { - Call::submit_price_unsigned_with_signed_payload { payload, signature } - } - ).ok_or("No local accounts accounts available.")?; + let (_, result) = Signer::::any_account() + .send_unsigned_transaction( + |account| PricePayload { price, block_number, public: account.public.clone() }, + |payload, signature| Call::submit_price_unsigned_with_signed_payload { + payload, + signature, + }, + ) + .ok_or("No local accounts accounts available.")?; result.map_err(|()| "Unable to submit transaction")?; Ok(()) } /// A helper function to fetch the price, sign payload and send an unsigned transaction - fn fetch_price_and_send_unsigned_for_all_accounts(block_number: T::BlockNumber) -> Result<(), &'static str> { + fn fetch_price_and_send_unsigned_for_all_accounts( + block_number: T::BlockNumber, + ) -> Result<(), &'static str> { // Make sure we don't fetch the price if unsigned transaction is going to be rejected // anyway. let next_unsigned_at = >::get(); @@ -524,18 +537,15 @@ impl Pallet { // -- Sign using all accounts let transaction_results = Signer::::all_accounts() .send_unsigned_transaction( - |account| PricePayload { - price, - block_number, - public: account.public.clone() + |account| PricePayload { price, block_number, public: account.public.clone() }, + |payload, signature| Call::submit_price_unsigned_with_signed_payload { + payload, + signature, }, - |payload, signature| { - Call::submit_price_unsigned_with_signed_payload { payload, signature } - } ); for (_account_id, result) in transaction_results.into_iter() { if result.is_err() { - return Err("Unable to submit transaction"); + return Err("Unable to submit transaction") } } @@ -554,16 +564,12 @@ impl Pallet { // you can find in `sp_io`. The API is trying to be similar to `reqwest`, but // since we are running in a custom WASM execution environment we can't simply // import the library here. - let request = http::Request::get( - "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD" - ); + let request = + http::Request::get("https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD"); // We set the deadline for sending of the request, note that awaiting response can // have a separate deadline. Next we send the request, before that it's also possible // to alter request headers or stream body content in case of non-GET requests. - let pending = request - .deadline(deadline) - .send() - .map_err(|_| http::Error::IoError)?; + let pending = request.deadline(deadline).send().map_err(|_| http::Error::IoError)?; // The request is already being processed by the host, we are free to do anything // else in the worker (we can send multiple concurrent requests too). @@ -571,12 +577,11 @@ impl Pallet { // so we can block current thread and wait for it to finish. // Note that since the request is being driven by the host, we don't have to wait // for the request to have it complete, we will just not read the response. - let response = pending.try_wait(deadline) - .map_err(|_| http::Error::DeadlineReached)??; + let response = pending.try_wait(deadline).map_err(|_| http::Error::DeadlineReached)??; // Let's check the status code before we proceed to reading the response. if response.code != 200 { log::warn!("Unexpected status code: {}", response.code); - return Err(http::Error::Unknown); + return Err(http::Error::Unknown) } // Next we want to fully read the response body and collect it to a vector of bytes. @@ -595,7 +600,7 @@ impl Pallet { None => { log::warn!("Unable to extract price from the response: {:?}", body_str); Err(http::Error::Unknown) - } + }, }?; log::warn!("Got price: {} cents", price); @@ -610,8 +615,7 @@ impl Pallet { let val = lite_json::parse_json(price_str); let price = match val.ok()? { JsonValue::Object(obj) => { - let (_, v) = obj.into_iter() - .find(|(k, _)| k.iter().copied().eq("USD".chars()))?; + let (_, v) = obj.into_iter().find(|(k, _)| k.iter().copied().eq("USD".chars()))?; match v { JsonValue::Number(number) => number, _ => return None, @@ -661,12 +665,12 @@ impl Pallet { // Now let's check if the transaction has any chance to succeed. let next_unsigned_at = >::get(); if &next_unsigned_at > block_number { - return InvalidTransaction::Stale.into(); + return InvalidTransaction::Stale.into() } // Let's make sure to reject transactions from the future. let current_block = >::block_number(); if ¤t_block < block_number { - return InvalidTransaction::Future.into(); + return InvalidTransaction::Future.into() } // We prioritize transactions that are more far away from current average. diff --git a/frame/example-offchain-worker/src/tests.rs b/frame/example-offchain-worker/src/tests.rs index e2ad3d8561170..36f69a2d76c82 100644 --- a/frame/example-offchain-worker/src/tests.rs +++ b/frame/example-offchain-worker/src/tests.rs @@ -15,28 +15,22 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::*; use crate as example_offchain_worker; -use std::sync::Arc; +use crate::*; use codec::Decode; use frame_support::{assert_ok, parameter_types}; use sp_core::{ - H256, - offchain::{OffchainWorkerExt, TransactionPoolExt, testing}, + offchain::{testing, OffchainWorkerExt, TransactionPoolExt}, sr25519::Signature, + H256, }; +use std::sync::Arc; -use sp_keystore::{ - {KeystoreExt, SyncCryptoStore}, - testing::KeyStore, -}; +use sp_keystore::{testing::KeyStore, KeystoreExt, SyncCryptoStore}; use sp_runtime::{ - RuntimeAppPublic, testing::{Header, TestXt}, - traits::{ - BlakeTwo256, IdentityLookup, Extrinsic as ExtrinsicT, - IdentifyAccount, Verify, - }, + traits::{BlakeTwo256, Extrinsic as ExtrinsicT, IdentifyAccount, IdentityLookup, Verify}, + RuntimeAppPublic, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -93,14 +87,16 @@ impl frame_system::offchain::SigningTypes for Test { type Signature = Signature; } -impl frame_system::offchain::SendTransactionTypes for Test where +impl frame_system::offchain::SendTransactionTypes for Test +where Call: From, { type OverarchingCall = Call; type Extrinsic = Extrinsic; } -impl frame_system::offchain::CreateSignedTransaction for Test where +impl frame_system::offchain::CreateSignedTransaction for Test +where Call: From, { fn create_transaction>( @@ -190,7 +186,6 @@ fn knows_how_to_mock_several_http_calls() { }); } - t.execute_with(|| { let price1 = Example::fetch_price().unwrap(); let price2 = Example::fetch_price().unwrap(); @@ -200,12 +195,12 @@ fn knows_how_to_mock_several_http_calls() { assert_eq!(price2, 200); assert_eq!(price3, 300); }) - } #[test] fn should_submit_signed_transaction_on_chain() { - const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; + const PHRASE: &str = + "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; let (offchain, offchain_state) = testing::TestOffchainExt::new(); let (pool, pool_state) = testing::TestTransactionPoolExt::new(); @@ -213,9 +208,9 @@ fn should_submit_signed_transaction_on_chain() { SyncCryptoStore::sr25519_generate_new( &keystore, crate::crypto::Public::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); - + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); let mut t = sp_io::TestExternalities::default(); t.register_extension(OffchainWorkerExt::new(offchain)); @@ -238,7 +233,8 @@ fn should_submit_signed_transaction_on_chain() { #[test] fn should_submit_unsigned_transaction_on_chain_for_any_account() { - const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; + const PHRASE: &str = + "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; let (offchain, offchain_state) = testing::TestOffchainExt::new(); let (pool, pool_state) = testing::TestTransactionPoolExt::new(); @@ -247,8 +243,9 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { SyncCryptoStore::sr25519_generate_new( &keystore, crate::crypto::Public::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); let public_key = SyncCryptoStore::sr25519_public_keys(&keystore, crate::crypto::Public::ID) .get(0) @@ -276,13 +273,18 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { let tx = pool_state.write().transactions.pop().unwrap(); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { price_payload: body, _signature: signature }) = tx.call { + if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { + price_payload: body, + _signature: signature, + }) = tx.call + { assert_eq!(body, price_payload); - let signature_valid = ::Public, - ::BlockNumber - > as SignedPayload>::verify::(&price_payload, signature); + let signature_valid = + ::Public, + ::BlockNumber, + > as SignedPayload>::verify::(&price_payload, signature); assert!(signature_valid); } @@ -291,7 +293,8 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { #[test] fn should_submit_unsigned_transaction_on_chain_for_all_accounts() { - const PHRASE: &str = "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; + const PHRASE: &str = + "news slush supreme milk chapter athlete soap sausage put clutch what kitten"; let (offchain, offchain_state) = testing::TestOffchainExt::new(); let (pool, pool_state) = testing::TestTransactionPoolExt::new(); @@ -300,8 +303,9 @@ fn should_submit_unsigned_transaction_on_chain_for_all_accounts() { SyncCryptoStore::sr25519_generate_new( &keystore, crate::crypto::Public::ID, - Some(&format!("{}/hunter1", PHRASE)) - ).unwrap(); + Some(&format!("{}/hunter1", PHRASE)), + ) + .unwrap(); let public_key = SyncCryptoStore::sr25519_public_keys(&keystore, crate::crypto::Public::ID) .get(0) @@ -329,13 +333,18 @@ fn should_submit_unsigned_transaction_on_chain_for_all_accounts() { let tx = pool_state.write().transactions.pop().unwrap(); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { price_payload: body, _signature: signature }) = tx.call { + if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { + price_payload: body, + _signature: signature, + }) = tx.call + { assert_eq!(body, price_payload); - let signature_valid = ::Public, - ::BlockNumber - > as SignedPayload>::verify::(&price_payload, signature); + let signature_valid = + ::Public, + ::BlockNumber, + > as SignedPayload>::verify::(&price_payload, signature); assert!(signature_valid); } @@ -364,7 +373,10 @@ fn should_submit_raw_unsigned_transaction_on_chain() { assert!(pool_state.read().transactions.is_empty()); let tx = Extrinsic::decode(&mut &*tx).unwrap(); assert_eq!(tx.signature, None); - assert_eq!(tx.call, Call::Example(crate::Call::submit_price_unsigned { _block_number: 1, price: 15523 })); + assert_eq!( + tx.call, + Call::Example(crate::Call::submit_price_unsigned { _block_number: 1, price: 15523 }) + ); }); } diff --git a/frame/example-parallel/src/lib.rs b/frame/example-parallel/src/lib.rs index 610298d960e2a..c86cac4295684 100644 --- a/frame/example-parallel/src/lib.rs +++ b/frame/example-parallel/src/lib.rs @@ -24,7 +24,7 @@ use sp_runtime::RuntimeDebug; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_std::vec::Vec; #[cfg(test)] @@ -34,9 +34,9 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -65,9 +65,10 @@ pub mod pallet { /// The example utilizes parallel execution by checking half of the /// signatures in spawned task. #[pallet::weight(0)] - pub fn enlist_participants(origin: OriginFor, participants: Vec) - -> DispatchResultWithPostInfo - { + pub fn enlist_participants( + origin: OriginFor, + participants: Vec, + ) -> DispatchResultWithPostInfo { let _ = ensure_signed(origin)?; if validate_participants_parallel(&>::get(), &participants[..]) { @@ -103,21 +104,20 @@ pub struct EnlistedParticipant { impl EnlistedParticipant { fn verify(&self, event_id: &[u8]) -> bool { use sp_core::Public; - use std::convert::TryFrom; use sp_runtime::traits::Verify; + use std::convert::TryFrom; match sp_core::sr25519::Signature::try_from(&self.signature[..]) { Ok(signature) => { let public = sp_core::sr25519::Public::from_slice(self.account.as_ref()); signature.verify(event_id, &public) - } - _ => false + }, + _ => false, } } } fn validate_participants_parallel(event_id: &[u8], participants: &[EnlistedParticipant]) -> bool { - fn spawn_verify(data: Vec) -> Vec { let stream = &mut &data[..]; let event_id = Vec::::decode(stream).expect("Failed to decode"); @@ -138,10 +138,10 @@ fn validate_participants_parallel(event_id: &[u8], participants: &[EnlistedParti let handle = sp_tasks::spawn(spawn_verify, async_payload); let mut result = true; - for participant in &participants[participants.len()/2+1..] { + for participant in &participants[participants.len() / 2 + 1..] { if !participant.verify(event_id) { result = false; - break; + break } } diff --git a/frame/example-parallel/src/tests.rs b/frame/example-parallel/src/tests.rs index 395290c0bf6e7..f67c5ae51b504 100644 --- a/frame/example-parallel/src/tests.rs +++ b/frame/example-parallel/src/tests.rs @@ -20,8 +20,9 @@ use crate::{self as pallet_example_parallel, *}; use frame_support::parameter_types; use sp_core::H256; use sp_runtime::{ - Perbill, testing::Header, + testing::Header, traits::{BlakeTwo256, IdentityLookup}, + Perbill, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -108,7 +109,6 @@ fn it_can_enlist() { assert_eq!(Example::participants().len(), 2); }); - } #[test] @@ -146,5 +146,4 @@ fn one_wrong_will_not_enlist_anyone() { assert_eq!(Example::participants().len(), 0); }); - } diff --git a/frame/example/src/benchmarking.rs b/frame/example/src/benchmarking.rs index 64602ca41cee9..cdf6c152a4880 100644 --- a/frame/example/src/benchmarking.rs +++ b/frame/example/src/benchmarking.rs @@ -20,7 +20,7 @@ #![cfg(feature = "runtime-benchmarks")] use crate::*; -use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_system::RawOrigin; // To actually run this benchmark on pallet-example, we need to put this pallet into the @@ -33,7 +33,7 @@ use frame_system::RawOrigin; // Details on using the benchmarks macro can be seen at: // https://substrate.dev/rustdocs/v3.0.0/frame_benchmarking/macro.benchmarks.html -benchmarks!{ +benchmarks! { // This will measure the execution time of `set_dummy` for b in [1..1000] range. set_dummy_benchmark { // This is the benchmark setup phase diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 6f046d3009cab..5df2e894e1a9c 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -255,26 +255,22 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{ - prelude::*, - marker::PhantomData -}; +use codec::{Decode, Encode}; use frame_support::{ - dispatch::DispatchResult, traits::IsSubType, - weights::{DispatchClass, ClassifyDispatch, WeighData, Weight, PaysFee, Pays}, + dispatch::DispatchResult, + traits::IsSubType, + weights::{ClassifyDispatch, DispatchClass, Pays, PaysFee, WeighData, Weight}, }; -use frame_system::{ensure_signed}; -use codec::{Encode, Decode}; +use frame_system::ensure_signed; +use log::info; use scale_info::TypeInfo; use sp_runtime::{ - traits::{ - SignedExtension, Bounded, SaturatedConversion, DispatchInfoOf, Saturating - }, + traits::{Bounded, DispatchInfoOf, SaturatedConversion, Saturating, SignedExtension}, transaction_validity::{ - ValidTransaction, TransactionValidityError, InvalidTransaction, TransactionValidity, + InvalidTransaction, TransactionValidity, TransactionValidityError, ValidTransaction, }, }; -use log::info; +use sp_std::{marker::PhantomData, prelude::*}; // Re-export pallet items so that they can be accessed from the crate namespace. pub use pallet::*; @@ -313,8 +309,7 @@ const MILLICENTS: u32 = 1_000_000_000; // fulfilled by running the benchmarking toolchain. Refer to `benchmarking.rs` file. struct WeightForSetDummy(BalanceOf); -impl WeighData<(&BalanceOf,)> for WeightForSetDummy -{ +impl WeighData<(&BalanceOf,)> for WeightForSetDummy { fn weigh_data(&self, target: (&BalanceOf,)) -> Weight { let multiplier = self.0; // *target.0 is the amount passed into the extrinsic @@ -344,9 +339,9 @@ impl PaysFee<(&BalanceOf,)> for WeightForSetDummy #[frame_support::pallet] pub mod pallet { // Import various types used to declare pallet in scope. + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// Our pallet's configuration trait. All our types and constants go in here. If the /// pallet is dependent on specific other pallets, then their configuration traits @@ -398,7 +393,7 @@ pub mod pallet { // but we could dispatch extrinsic (transaction/unsigned/inherent) using // sp_io::submit_extrinsic. // To see example on offchain worker, please refer to example-offchain-worker pallet - // accompanied in this repository. + // accompanied in this repository. } } @@ -489,10 +484,7 @@ pub mod pallet { #[pallet::weight( ::WeightInfo::accumulate_dummy((*increase_by).saturated_into()) )] - pub fn accumulate_dummy( - origin: OriginFor, - increase_by: T::Balance - ) -> DispatchResult { + pub fn accumulate_dummy(origin: OriginFor, increase_by: T::Balance) -> DispatchResult { // This is a public call, so we ensure that the origin is some signed account. let _sender = ensure_signed(origin)?; @@ -611,11 +603,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - dummy: Default::default(), - bar: Default::default(), - foo: Default::default(), - } + Self { dummy: Default::default(), bar: Default::default(), foo: Default::default() } } } @@ -711,7 +699,9 @@ where type AdditionalSigned = (); type Pre = (); - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } fn validate( &self, @@ -733,7 +723,7 @@ where let mut valid_tx = ValidTransaction::default(); valid_tx.priority = Bounded::max_value(); Ok(valid_tx) - } + }, _ => Ok(Default::default()), } } diff --git a/frame/example/src/tests.rs b/frame/example/src/tests.rs index 68a9237921805..18089888dba1a 100644 --- a/frame/example/src/tests.rs +++ b/frame/example/src/tests.rs @@ -20,14 +20,16 @@ use crate::*; use frame_support::{ assert_ok, parameter_types, - weights::{DispatchInfo, GetDispatchInfo}, traits::OnInitialize + traits::OnInitialize, + weights::{DispatchInfo, GetDispatchInfo}, }; use sp_core::H256; // The testing primitives are very useful for avoiding having to work with signatures // or public keys. `u64` is used as the `AccountId` and no `Signature`s are required. use sp_runtime::{ - testing::Header, BuildStorage, + testing::Header, traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; // Reexport crate as its pallet name for construct_runtime. use crate as pallet_example; @@ -115,7 +117,9 @@ pub fn new_test_ext() -> sp_io::TestExternalities { bar: vec![(1, 2), (2, 3)], foo: 24, }, - }.build_storage().unwrap(); + } + .build_storage() + .unwrap(); t.into() } @@ -163,7 +167,8 @@ fn signed_ext_watch_dummy_works() { let info = DispatchInfo::default(); assert_eq!( - WatchDummy::(PhantomData).validate(&1, &call, &info, 150) + WatchDummy::(PhantomData) + .validate(&1, &call, &info, 150) .unwrap() .priority, u64::MAX, @@ -183,7 +188,6 @@ fn weights_work() { // aka. `let info = as GetDispatchInfo>::get_dispatch_info(&default_call);` assert!(info1.weight > 0); - // `set_dummy` is simpler than `accumulate_dummy`, and the weight // should be less. let custom_call = >::set_dummy(20); diff --git a/frame/example/src/weights.rs b/frame/example/src/weights.rs index db6491335c76f..16258d2c4a96d 100644 --- a/frame/example/src/weights.rs +++ b/frame/example/src/weights.rs @@ -44,35 +44,37 @@ // --template // ./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_example. pub trait WeightInfo { - fn set_dummy_benchmark(b: u32, ) -> Weight; - fn accumulate_dummy(b: u32, ) -> Weight; - fn sort_vector(x: u32, ) -> Weight; + fn set_dummy_benchmark(b: u32) -> Weight; + fn accumulate_dummy(b: u32) -> Weight; + fn sort_vector(x: u32) -> Weight; } /// Weights for pallet_example using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn set_dummy_benchmark(b: u32, ) -> Weight { + fn set_dummy_benchmark(b: u32) -> Weight { (5_834_000 as Weight) .saturating_add((24_000 as Weight).saturating_mul(b as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn accumulate_dummy(b: u32, ) -> Weight { + fn accumulate_dummy(b: u32) -> Weight { (51_353_000 as Weight) .saturating_add((14_000 as Weight).saturating_mul(b as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn sort_vector(x: u32, ) -> Weight { + fn sort_vector(x: u32) -> Weight { (2_569_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(x as Weight)) @@ -81,18 +83,18 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn set_dummy_benchmark(b: u32, ) -> Weight { + fn set_dummy_benchmark(b: u32) -> Weight { (5_834_000 as Weight) .saturating_add((24_000 as Weight).saturating_mul(b as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn accumulate_dummy(b: u32, ) -> Weight { + fn accumulate_dummy(b: u32) -> Weight { (51_353_000 as Weight) .saturating_add((14_000 as Weight).saturating_mul(b as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn sort_vector(x: u32, ) -> Weight { + fn sort_vector(x: u32) -> Weight { (2_569_000 as Weight) // Standard Error: 0 .saturating_add((4_000 as Weight).saturating_mul(x as Weight)) diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index a11a5172dc95e..3e2cdd241f6df 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -116,25 +116,26 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{prelude::*, marker::PhantomData}; +use codec::{Codec, Encode}; use frame_support::{ - weights::{GetDispatchInfo, DispatchInfo, DispatchClass}, + dispatch::PostDispatchInfo, traits::{ - OnInitialize, OnIdle, OnFinalize, OnRuntimeUpgrade, OffchainWorker, ExecuteBlock, - EnsureInherentsAreFirst, + EnsureInherentsAreFirst, ExecuteBlock, OffchainWorker, OnFinalize, OnIdle, OnInitialize, + OnRuntimeUpgrade, }, - dispatch::PostDispatchInfo, + weights::{DispatchClass, DispatchInfo, GetDispatchInfo}, }; +use frame_system::DigestOf; use sp_runtime::{ - generic::Digest, ApplyExtrinsicResult, + generic::Digest, traits::{ - self, Header, Zero, One, Checkable, Applyable, CheckEqual, ValidateUnsigned, NumberFor, - Dispatchable, Saturating, + self, Applyable, CheckEqual, Checkable, Dispatchable, Header, NumberFor, One, Saturating, + ValidateUnsigned, Zero, }, - transaction_validity::{TransactionValidity, TransactionSource}, + transaction_validity::{TransactionSource, TransactionValidity}, + ApplyExtrinsicResult, }; -use codec::{Codec, Encode}; -use frame_system::DigestOf; +use sp_std::{marker::PhantomData, prelude::*}; pub type CheckedOf = >::Checked; pub type CallOf = as Applyable>::Call; @@ -152,31 +153,29 @@ pub type OriginOf = as Dispatchable>::Origin; /// already called by `AllPallets`. It will be called before all modules will /// be called. pub struct Executive( - PhantomData<(System, Block, Context, UnsignedValidator, AllPallets, OnRuntimeUpgrade)> + PhantomData<(System, Block, Context, UnsignedValidator, AllPallets, OnRuntimeUpgrade)>, ); impl< - System: frame_system::Config + EnsureInherentsAreFirst, - Block: traits::Block, - Context: Default, - UnsignedValidator, - AllPallets: - OnRuntimeUpgrade + - OnInitialize + - OnIdle + - OnFinalize + - OffchainWorker, - COnRuntimeUpgrade: OnRuntimeUpgrade, -> ExecuteBlock for - Executive + System: frame_system::Config + EnsureInherentsAreFirst, + Block: traits::Block

, + Context: Default, + UnsignedValidator, + AllPallets: OnRuntimeUpgrade + + OnInitialize + + OnIdle + + OnFinalize + + OffchainWorker, + COnRuntimeUpgrade: OnRuntimeUpgrade, + > ExecuteBlock + for Executive where Block::Extrinsic: Checkable + Codec, - CheckedOf: - Applyable + - GetDispatchInfo, - CallOf: Dispatchable, + CheckedOf: Applyable + GetDispatchInfo, + CallOf: + Dispatchable, OriginOf: From>, - UnsignedValidator: ValidateUnsigned>, + UnsignedValidator: ValidateUnsigned>, { fn execute_block(block: Block) { Executive::< @@ -249,20 +248,16 @@ where sp_io::init_tracing(); sp_tracing::enter_span!(sp_tracing::Level::TRACE, "init_block"); let digests = Self::extract_pre_digest(&header); - Self::initialize_block_impl( - header.number(), - header.parent_hash(), - &digests - ); + Self::initialize_block_impl(header.number(), header.parent_hash(), &digests); } fn extract_pre_digest(header: &System::Header) -> DigestOf { let mut digest = >::default(); - header.digest().logs() - .iter() - .for_each(|d| if d.as_pre_runtime().is_some() { + header.digest().logs().iter().for_each(|d| { + if d.as_pre_runtime().is_some() { digest.push(d.clone()) - }); + } + }); digest } @@ -281,16 +276,19 @@ where digest, frame_system::InitKind::Full, ); + weight = weight.saturating_add( as OnInitialize< + System::BlockNumber, + >>::on_initialize(*block_number)); weight = weight.saturating_add( - as OnInitialize>::on_initialize(*block_number) + >::on_initialize(*block_number), ); weight = weight.saturating_add( - >::on_initialize(*block_number) + >::get().base_block, ); - weight = weight.saturating_add( - >::get().base_block + >::register_extra_weight_unchecked( + weight, + DispatchClass::Mandatory, ); - >::register_extra_weight_unchecked(weight, DispatchClass::Mandatory); frame_system::Pallet::::note_finished_initialize(); } @@ -317,8 +315,9 @@ where // Check that `parent_hash` is correct. let n = header.number().clone(); assert!( - n > System::BlockNumber::zero() - && >::block_hash(n - System::BlockNumber::one()) == *header.parent_hash(), + n > System::BlockNumber::zero() && + >::block_hash(n - System::BlockNumber::one()) == + *header.parent_hash(), "Parent hash should be valid.", ); @@ -358,9 +357,11 @@ where extrinsics: Vec, block_number: NumberFor, ) { - extrinsics.into_iter().for_each(|e| if let Err(e) = Self::apply_extrinsic(e) { - let err: &'static str = e.into(); - panic!("{}", err) + extrinsics.into_iter().for_each(|e| { + if let Err(e) = Self::apply_extrinsic(e) { + let err: &'static str = e.into(); + panic!("{}", err) + } }); // post-extrinsics book-keeping @@ -373,7 +374,7 @@ where /// except state-root. pub fn finalize_block() -> System::Header { sp_io::init_tracing(); - sp_tracing::enter_span!( sp_tracing::Level::TRACE, "finalize_block" ); + sp_tracing::enter_span!(sp_tracing::Level::TRACE, "finalize_block"); >::note_finished_extrinsics(); let block_number = >::block_number(); @@ -383,26 +384,31 @@ where } fn idle_and_finalize_hook(block_number: NumberFor) { - let weight = >::block_weight(); - let max_weight = >::get().max_block; + let weight = >::block_weight(); + let max_weight = >::get().max_block; let mut remaining_weight = max_weight.saturating_sub(weight.total()); if remaining_weight > 0 { let mut used_weight = as OnIdle>::on_idle( block_number, - remaining_weight + remaining_weight, ); remaining_weight = remaining_weight.saturating_sub(used_weight); used_weight = >::on_idle( block_number, - remaining_weight + remaining_weight, ) .saturating_add(used_weight); - >::register_extra_weight_unchecked(used_weight, DispatchClass::Mandatory); + >::register_extra_weight_unchecked( + used_weight, + DispatchClass::Mandatory, + ); } - as OnFinalize>::on_finalize(block_number); + as OnFinalize>::on_finalize( + block_number, + ); >::on_finalize(block_number); } @@ -423,10 +429,8 @@ where encoded_len: usize, to_note: Vec, ) -> ApplyExtrinsicResult { - sp_tracing::enter_span!( - sp_tracing::info_span!("apply_extrinsic", - ext=?sp_core::hexdisplay::HexDisplay::from(&uxt.encode())) - ); + sp_tracing::enter_span!(sp_tracing::info_span!("apply_extrinsic", + ext=?sp_core::hexdisplay::HexDisplay::from(&uxt.encode()))); // Verify that the signature is good. let xt = uxt.check(&Default::default())?; @@ -493,17 +497,17 @@ where frame_system::InitKind::Inspection, ); - enter_span!{ sp_tracing::Level::TRACE, "validate_transaction" }; + enter_span! { sp_tracing::Level::TRACE, "validate_transaction" }; - let encoded_len = within_span!{ sp_tracing::Level::TRACE, "using_encoded"; + let encoded_len = within_span! { sp_tracing::Level::TRACE, "using_encoded"; uxt.using_encoded(|d| d.len()) }; - let xt = within_span!{ sp_tracing::Level::TRACE, "check"; + let xt = within_span! { sp_tracing::Level::TRACE, "check"; uxt.check(&Default::default()) }?; - let dispatch_info = within_span!{ sp_tracing::Level::TRACE, "dispatch_info"; + let dispatch_info = within_span! { sp_tracing::Level::TRACE, "dispatch_info"; xt.get_dispatch_info() }; @@ -537,35 +541,34 @@ where } } - #[cfg(test)] mod tests { use super::*; - use sp_core::H256; - use sp_runtime::{ - generic::{Era, DigestItem}, DispatchError, testing::{Digest, Header, Block}, - traits::{Header as HeaderT, BlakeTwo256, IdentityLookup, Block as BlockT}, - transaction_validity::{ - InvalidTransaction, ValidTransaction, TransactionValidityError, UnknownTransaction - }, - }; use frame_support::{ assert_err, parameter_types, - weights::{Weight, RuntimeDbWeight, IdentityFee, WeightToFeePolynomial}, traits::{Currency, LockIdentifier, LockableCurrency, WithdrawReasons}, + weights::{IdentityFee, RuntimeDbWeight, Weight, WeightToFeePolynomial}, }; - use frame_system::{ - Call as SystemCall, ChainContext, LastRuntimeUpgradeInfo, - }; - use pallet_transaction_payment::CurrencyAdapter; - use pallet_balances::Call as BalancesCall; + use frame_system::{Call as SystemCall, ChainContext, LastRuntimeUpgradeInfo}; use hex_literal::hex; + use pallet_balances::Call as BalancesCall; + use pallet_transaction_payment::CurrencyAdapter; + use sp_core::H256; + use sp_runtime::{ + generic::{DigestItem, Era}, + testing::{Block, Digest, Header}, + traits::{BlakeTwo256, Block as BlockT, Header as HeaderT, IdentityLookup}, + transaction_validity::{ + InvalidTransaction, TransactionValidityError, UnknownTransaction, ValidTransaction, + }, + DispatchError, + }; const TEST_KEY: &[u8] = &*b":test:key:"; mod custom { - use frame_support::weights::{Weight, DispatchClass}; + use frame_support::weights::{DispatchClass, Weight}; use sp_runtime::transaction_validity::{ - UnknownTransaction, TransactionSource, TransactionValidity, TransactionValidityError, + TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, }; pub trait Config: frame_system::Config {} @@ -658,13 +661,10 @@ mod tests { Call::allowed_unsigned(..) => Ok(Default::default()), _ => UnknownTransaction::NoUnsignedValidator.into(), } - } // Inherent call is accepted for being dispatched - fn pre_dispatch( - call: &Self::Call, - ) -> Result<(), TransactionValidityError> { + fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { match call { Call::allowed_unsigned(..) => Ok(()), Call::inherent_call(..) => Ok(()), @@ -793,7 +793,7 @@ mod tests { ChainContext, Runtime, AllPallets, - CustomOnRuntimeUpgrade + CustomOnRuntimeUpgrade, >; fn extra(nonce: u64, fee: Balance) -> SignedExtra { @@ -801,7 +801,7 @@ mod tests { frame_system::CheckEra::from(Era::Immortal), frame_system::CheckNonce::from(nonce), frame_system::CheckWeight::new(), - pallet_transaction_payment::ChargeTransactionPayment::from(fee) + pallet_transaction_payment::ChargeTransactionPayment::from(fee), ) } @@ -812,14 +812,16 @@ mod tests { #[test] fn balance_transfer_dispatch_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig:: { - balances: vec![(1, 211)], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![(1, 211)] } + .assimilate_storage(&mut t) + .unwrap(); let xt = TestXt::new(Call::Balances(BalancesCall::transfer(2, 69)), sign_extra(1, 0, 0)); let weight = xt.get_dispatch_info().weight + - ::BlockWeights::get().get(DispatchClass::Normal).base_extrinsic; - let fee: Balance - = ::WeightToFee::calc(&weight); + ::BlockWeights::get() + .get(DispatchClass::Normal) + .base_extrinsic; + let fee: Balance = + ::WeightToFee::calc(&weight); let mut t = sp_io::TestExternalities::new(t); t.execute_with(|| { Executive::initialize_block(&Header::new( @@ -838,9 +840,9 @@ mod tests { fn new_test_ext(balance_factor: Balance) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig:: { - balances: vec![(1, 111 * balance_factor)], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![(1, 111 * balance_factor)] } + .assimilate_storage(&mut t) + .unwrap(); t.into() } @@ -851,9 +853,15 @@ mod tests { header: Header { parent_hash: [69u8; 32].into(), number: 1, - state_root: hex!("1039e1a4bd0cf5deefe65f313577e70169c41c7773d6acf31ca8d671397559f5").into(), - extrinsics_root: hex!("03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314").into(), - digest: Digest { logs: vec![], }, + state_root: hex!( + "1039e1a4bd0cf5deefe65f313577e70169c41c7773d6acf31ca8d671397559f5" + ) + .into(), + extrinsics_root: hex!( + "03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314" + ) + .into(), + digest: Digest { logs: vec![] }, }, extrinsics: vec![], }); @@ -869,8 +877,11 @@ mod tests { parent_hash: [69u8; 32].into(), number: 1, state_root: [0u8; 32].into(), - extrinsics_root: hex!("03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314").into(), - digest: Digest { logs: vec![], }, + extrinsics_root: hex!( + "03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314" + ) + .into(), + digest: Digest { logs: vec![] }, }, extrinsics: vec![], }); @@ -885,9 +896,12 @@ mod tests { header: Header { parent_hash: [69u8; 32].into(), number: 1, - state_root: hex!("49cd58a254ccf6abc4a023d9a22dcfc421e385527a250faec69f8ad0d8ed3e48").into(), + state_root: hex!( + "49cd58a254ccf6abc4a023d9a22dcfc421e385527a250faec69f8ad0d8ed3e48" + ) + .into(), extrinsics_root: [0u8; 32].into(), - digest: Digest { logs: vec![], }, + digest: Digest { logs: vec![] }, }, extrinsics: vec![], }); @@ -907,7 +921,8 @@ mod tests { [69u8; 32].into(), Digest::default(), )); - assert_err!(Executive::apply_extrinsic(xt), + assert_err!( + Executive::apply_extrinsic(xt), TransactionValidityError::Invalid(InvalidTransaction::Future) ); assert_eq!(>::extrinsic_index(), Some(0)); @@ -924,8 +939,7 @@ mod tests { // on_initialize weight + base block execution weight let block_weights = ::BlockWeights::get(); let base_block_weight = 175 + block_weights.base_block; - let limit = block_weights.get(DispatchClass::Normal).max_total.unwrap() - - base_block_weight; + let limit = block_weights.get(DispatchClass::Normal).max_total.unwrap() - base_block_weight; let num_to_exhaust_block = limit / (encoded_len + 5); t.execute_with(|| { Executive::initialize_block(&Header::new( @@ -940,7 +954,8 @@ mod tests { for nonce in 0..=num_to_exhaust_block { let xt = TestXt::new( - Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, nonce.into(), 0), + Call::Balances(BalancesCall::transfer(33, 0)), + sign_extra(1, nonce.into(), 0), ); let res = Executive::apply_extrinsic(xt); if nonce != num_to_exhaust_block { @@ -950,7 +965,10 @@ mod tests { //--------------------- on_initialize + block_execution + extrinsic_base weight (encoded_len + 5) * (nonce + 1) + base_block_weight, ); - assert_eq!(>::extrinsic_index(), Some(nonce as u32 + 1)); + assert_eq!( + >::extrinsic_index(), + Some(nonce as u32 + 1) + ); } else { assert_eq!(res, Err(InvalidTransaction::ExhaustsResources.into())); } @@ -967,7 +985,8 @@ mod tests { let mut t = new_test_ext(1); t.execute_with(|| { // Block execution weight + on_initialize weight from custom module - let base_block_weight = 175 + ::BlockWeights::get().base_block; + let base_block_weight = + 175 + ::BlockWeights::get().base_block; Executive::initialize_block(&Header::new( 1, @@ -985,8 +1004,10 @@ mod tests { assert!(Executive::apply_extrinsic(x2.clone()).unwrap().is_ok()); // default weight for `TestXt` == encoded length. - let extrinsic_weight = len as Weight + ::BlockWeights - ::get().get(DispatchClass::Normal).base_extrinsic; + let extrinsic_weight = len as Weight + + ::BlockWeights::get() + .get(DispatchClass::Normal) + .base_extrinsic; assert_eq!( >::block_weight().total(), base_block_weight + 3 * extrinsic_weight, @@ -1051,20 +1072,14 @@ mod tests { let mut t = new_test_ext(1); t.execute_with(|| { as LockableCurrency>::set_lock( - id, - &1, - 110, - lock, + id, &1, 110, lock, ); - let xt = TestXt::new( - Call::System(SystemCall::remark(vec![1u8])), - sign_extra(1, 0, 0), - ); - let weight = xt.get_dispatch_info().weight + - ::BlockWeights - ::get() - .get(DispatchClass::Normal) - .base_extrinsic; + let xt = + TestXt::new(Call::System(SystemCall::remark(vec![1u8])), sign_extra(1, 0, 0)); + let weight = xt.get_dispatch_info().weight + + ::BlockWeights::get() + .get(DispatchClass::Normal) + .base_extrinsic; let fee: Balance = ::WeightToFee::calc(&weight); Executive::initialize_block(&Header::new( @@ -1096,13 +1111,12 @@ mod tests { #[test] fn block_hooks_weight_is_stored() { new_test_ext(1).execute_with(|| { - Executive::initialize_block(&Header::new_from_number(1)); Executive::finalize_block(); // NOTE: might need updates over time if new weights are introduced. // For now it only accounts for the base block execution weight and // the `on_initialize` weight defined in the custom test module. - assert_eq!(>::block_weight().total(), 175 + 175 + 10); + assert_eq!(>::block_weight().total(), 175 + 175 + 10); }) } @@ -1114,9 +1128,9 @@ mod tests { assert!(frame_system::LastRuntimeUpgrade::::exists()); assert!(!Executive::runtime_upgraded()); - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = + sp_version::RuntimeVersion { spec_version: 1, ..Default::default() } }); assert!(Executive::runtime_upgraded()); assert_eq!( @@ -1124,10 +1138,12 @@ mod tests { frame_system::LastRuntimeUpgrade::::get(), ); - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - spec_name: "test".into(), - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = sp_version::RuntimeVersion { + spec_version: 1, + spec_name: "test".into(), + ..Default::default() + } }); assert!(Executive::runtime_upgraded()); assert_eq!( @@ -1135,11 +1151,13 @@ mod tests { frame_system::LastRuntimeUpgrade::::get(), ); - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - spec_name: "test".into(), - impl_version: 2, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = sp_version::RuntimeVersion { + spec_version: 1, + spec_name: "test".into(), + impl_version: 2, + ..Default::default() + } }); assert!(!Executive::runtime_upgraded()); @@ -1182,9 +1200,9 @@ mod tests { fn custom_runtime_upgrade_is_called_before_modules() { new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = + sp_version::RuntimeVersion { spec_version: 1, ..Default::default() } }); Executive::initialize_block(&Header::new( @@ -1208,9 +1226,9 @@ mod tests { let header = new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = + sp_version::RuntimeVersion { spec_version: 1, ..Default::default() } }); // Let's build some fake block. @@ -1228,16 +1246,15 @@ mod tests { }); // Reset to get the correct new genesis below. - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 0, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = sp_version::RuntimeVersion { spec_version: 0, ..Default::default() } }); new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = + sp_version::RuntimeVersion { spec_version: 1, ..Default::default() } }); >>::execute_block(Block::new(header, vec![xt])); @@ -1251,9 +1268,9 @@ mod tests { fn all_weights_are_recorded_correctly() { new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called for maximum complexity - RUNTIME_VERSION.with(|v| *v.borrow_mut() = sp_version::RuntimeVersion { - spec_version: 1, - ..Default::default() + RUNTIME_VERSION.with(|v| { + *v.borrow_mut() = + sp_version::RuntimeVersion { spec_version: 1, ..Default::default() } }); let block_number = 1; @@ -1270,19 +1287,21 @@ mod tests { let frame_system_upgrade_weight = frame_system::Pallet::::on_runtime_upgrade(); let custom_runtime_upgrade_weight = CustomOnRuntimeUpgrade::on_runtime_upgrade(); let runtime_upgrade_weight = ::on_runtime_upgrade(); - let frame_system_on_initialize_weight = frame_system::Pallet::::on_initialize(block_number); - let on_initialize_weight = >::on_initialize(block_number); - let base_block_weight = ::BlockWeights::get().base_block; + let frame_system_on_initialize_weight = + frame_system::Pallet::::on_initialize(block_number); + let on_initialize_weight = + >::on_initialize(block_number); + let base_block_weight = + ::BlockWeights::get().base_block; // Weights are recorded correctly assert_eq!( frame_system::Pallet::::block_weight().total(), frame_system_upgrade_weight + - custom_runtime_upgrade_weight + - runtime_upgrade_weight + - frame_system_on_initialize_weight + - on_initialize_weight + - base_block_weight, + custom_runtime_upgrade_weight + + runtime_upgrade_weight + + frame_system_on_initialize_weight + + on_initialize_weight + base_block_weight, ); }); } @@ -1294,13 +1313,8 @@ mod tests { let mut digest = Digest::default(); digest.push(DigestItem::Seal([1, 2, 3, 4], vec![5, 6, 7, 8])); - let header = Header::new( - 1, - H256::default(), - H256::default(), - parent_hash, - digest.clone(), - ); + let header = + Header::new(1, H256::default(), H256::default(), parent_hash, digest.clone()); Executive::offchain_worker(&header); diff --git a/frame/gilt/src/benchmarking.rs b/frame/gilt/src/benchmarking.rs index 2ee7bffd9410e..73e1c9a901cb9 100644 --- a/frame/gilt/src/benchmarking.rs +++ b/frame/gilt/src/benchmarking.rs @@ -19,17 +19,21 @@ #![cfg(feature = "runtime-benchmarks")] -use sp_std::prelude::*; use super::*; -use sp_runtime::traits::{Zero, Bounded}; -use sp_arithmetic::Perquintill; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_support::{ + dispatch::UnfilteredDispatchable, + traits::{Currency, EnsureOrigin, Get}, +}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; -use frame_support::{traits::{Currency, Get, EnsureOrigin}, dispatch::UnfilteredDispatchable}; +use sp_arithmetic::Perquintill; +use sp_runtime::traits::{Bounded, Zero}; +use sp_std::prelude::*; use crate::Pallet as Gilt; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; benchmarks! { place_bid { @@ -129,8 +133,4 @@ benchmarks! { }: { Gilt::::pursue_target(q) } } -impl_benchmark_test_suite!( - Gilt, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Gilt, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/gilt/src/lib.rs b/frame/gilt/src/lib.rs index dbb93180e8c01..93f3d5f530618 100644 --- a/frame/gilt/src/lib.rs +++ b/frame/gilt/src/lib.rs @@ -67,29 +67,34 @@ pub use pallet::*; +mod benchmarking; #[cfg(test)] mod mock; #[cfg(test)] mod tests; -mod benchmarking; pub mod weights; #[frame_support::pallet] pub mod pallet { + pub use crate::weights::WeightInfo; + use frame_support::{ + pallet_prelude::*, + traits::{Currency, OnUnbalanced, ReservableCurrency}, + }; + use frame_system::pallet_prelude::*; use scale_info::TypeInfo; + use sp_arithmetic::{PerThing, Perquintill}; + use sp_runtime::traits::{Saturating, Zero}; use sp_std::prelude::*; - use sp_arithmetic::{Perquintill, PerThing}; - use sp_runtime::traits::{Zero, Saturating}; - use frame_support::traits::{Currency, OnUnbalanced, ReservableCurrency}; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - pub use crate::weights::WeightInfo; - type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; - type PositiveImbalanceOf = - <::Currency as Currency<::AccountId>>::PositiveImbalance; - type NegativeImbalanceOf = - <::Currency as Currency<::AccountId>>::NegativeImbalance; + type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; + type PositiveImbalanceOf = <::Currency as Currency< + ::AccountId, + >>::PositiveImbalance; + type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, + >>::NegativeImbalance; #[pallet::config] pub trait Config: frame_system::Config { @@ -97,14 +102,18 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// Currency type that this works on. - type Currency: ReservableCurrency; + type Currency: ReservableCurrency; /// Just the `Currency::Balance` type; we have this item to allow us to constrain it to /// `From`. - type CurrencyBalance: - sp_runtime::traits::AtLeast32BitUnsigned + codec::FullCodec + Copy - + MaybeSerializeDeserialize + sp_std::fmt::Debug + Default + From - + TypeInfo; + type CurrencyBalance: sp_runtime::traits::AtLeast32BitUnsigned + + codec::FullCodec + + Copy + + MaybeSerializeDeserialize + + sp_std::fmt::Debug + + Default + + From + + TypeInfo; /// Origin required for setting the target proportion to be under gilt. type AdminOrigin: EnsureOrigin; @@ -229,13 +238,8 @@ pub mod pallet { /// The queues of bids ready to become gilts. Indexed by duration (in `Period`s). #[pallet::storage] - pub type Queues = StorageMap< - _, - Blake2_128Concat, - u32, - Vec, T::AccountId>>, - ValueQuery, - >; + pub type Queues = + StorageMap<_, Blake2_128Concat, u32, Vec, T::AccountId>>, ValueQuery>; /// Information relating to the gilts currently active. #[pallet::storage] @@ -247,7 +251,11 @@ pub mod pallet { _, Blake2_128Concat, ActiveIndex, - ActiveGilt, ::AccountId, ::BlockNumber>, + ActiveGilt< + BalanceOf, + ::AccountId, + ::BlockNumber, + >, OptionQuery, >; @@ -257,7 +265,7 @@ pub mod pallet { #[pallet::genesis_build] impl GenesisBuild for GenesisConfig { - fn build(&self) { + fn build(&self) { QueueTotals::::put(vec![(0, BalanceOf::::zero()); T::QueueCount::get() as usize]); } } @@ -312,7 +320,7 @@ pub mod pallet { } #[pallet::call] - impl Pallet { + impl Pallet { /// Place a bid for a gilt to be issued. /// /// Origin must be Signed, and account must have at least `amount` in free balance. @@ -336,35 +344,35 @@ pub mod pallet { ensure!(amount >= T::MinFreeze::get(), Error::::AmountTooSmall); let queue_count = T::QueueCount::get() as usize; - let queue_index = duration.checked_sub(1) - .ok_or(Error::::DurationTooSmall)? as usize; + let queue_index = duration.checked_sub(1).ok_or(Error::::DurationTooSmall)? as usize; ensure!(queue_index < queue_count, Error::::DurationTooBig); - let net = Queues::::try_mutate(duration, |q| - -> Result<(u32, BalanceOf::), DispatchError> - { - let queue_full = q.len() == T::MaxQueueLen::get() as usize; - ensure!(!queue_full || q[0].amount < amount, Error::::BidTooLow); - T::Currency::reserve(&who, amount)?; - - // queue is - let mut bid = GiltBid { amount, who: who.clone() }; - let net = if queue_full { - sp_std::mem::swap(&mut q[0], &mut bid); - T::Currency::unreserve(&bid.who, bid.amount); - (0, amount - bid.amount) - } else { - q.insert(0, bid); - (1, amount) - }; - - let sorted_item_count = q.len().saturating_sub(T::FifoQueueLen::get() as usize); - if sorted_item_count > 1 { - q[0..sorted_item_count].sort_by_key(|x| x.amount); - } + let net = Queues::::try_mutate( + duration, + |q| -> Result<(u32, BalanceOf), DispatchError> { + let queue_full = q.len() == T::MaxQueueLen::get() as usize; + ensure!(!queue_full || q[0].amount < amount, Error::::BidTooLow); + T::Currency::reserve(&who, amount)?; + + // queue is + let mut bid = GiltBid { amount, who: who.clone() }; + let net = if queue_full { + sp_std::mem::swap(&mut q[0], &mut bid); + T::Currency::unreserve(&bid.who, bid.amount); + (0, amount - bid.amount) + } else { + q.insert(0, bid); + (1, amount) + }; + + let sorted_item_count = q.len().saturating_sub(T::FifoQueueLen::get() as usize); + if sorted_item_count > 1 { + q[0..sorted_item_count].sort_by_key(|x| x.amount); + } - Ok(net) - })?; + Ok(net) + }, + )?; QueueTotals::::mutate(|qs| { qs.resize(queue_count, (0, Zero::zero())); qs[queue_index].0 += net.0; @@ -391,8 +399,7 @@ pub mod pallet { let who = ensure_signed(origin)?; let queue_count = T::QueueCount::get() as usize; - let queue_index = duration.checked_sub(1) - .ok_or(Error::::DurationTooSmall)? as usize; + let queue_index = duration.checked_sub(1).ok_or(Error::::DurationTooSmall)? as usize; ensure!(queue_index < queue_count, Error::::DurationTooBig); let bid = GiltBid { amount, who }; @@ -454,11 +461,12 @@ pub mod pallet { Active::::remove(index); // Multiply the proportion it is by the total issued. - let total_issuance = T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); + let total_issuance = + T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); ActiveTotal::::mutate(|totals| { let nongilt_issuance = total_issuance.saturating_sub(totals.frozen); - let effective_issuance = totals.proportion.left_from_one() - .saturating_reciprocal_mul(nongilt_issuance); + let effective_issuance = + totals.proportion.left_from_one().saturating_reciprocal_mul(nongilt_issuance); let gilt_value = gilt.proportion * effective_issuance; totals.frozen = totals.frozen.saturating_sub(gilt.amount); @@ -519,14 +527,9 @@ pub mod pallet { let total_issuance = T::Currency::total_issuance(); let non_gilt = total_issuance.saturating_sub(totals.frozen); - let effective = totals.proportion.left_from_one() - .saturating_reciprocal_mul(non_gilt); + let effective = totals.proportion.left_from_one().saturating_reciprocal_mul(non_gilt); - IssuanceInfo { - reserved: totals.frozen, - non_gilt, - effective, - } + IssuanceInfo { reserved: totals.frozen, non_gilt, effective } } /// Attempt to enlarge our gilt-set from bids in order to satisfy our desired target amount @@ -536,16 +539,17 @@ pub mod pallet { if totals.proportion < totals.target { let missing = totals.target.saturating_sub(totals.proportion); - let total_issuance = T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); + let total_issuance = + T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); let nongilt_issuance = total_issuance.saturating_sub(totals.frozen); - let effective_issuance = totals.proportion.left_from_one() - .saturating_reciprocal_mul(nongilt_issuance); + let effective_issuance = + totals.proportion.left_from_one().saturating_reciprocal_mul(nongilt_issuance); let intake = missing * effective_issuance; let (bids_taken, queues_hit) = Self::enlarge(intake, max_bids); let first_from_each_queue = T::WeightInfo::pursue_target_per_queue(queues_hit); let rest_from_each_queue = T::WeightInfo::pursue_target_per_item(bids_taken) - .saturating_sub(T::WeightInfo::pursue_target_per_item(queues_hit)); + .saturating_sub(T::WeightInfo::pursue_target_per_item(queues_hit)); first_from_each_queue + rest_from_each_queue } else { T::WeightInfo::pursue_target_noop() @@ -556,11 +560,9 @@ pub mod pallet { /// from the queue. /// /// Return the number of bids taken and the number of distinct queues taken from. - pub fn enlarge( - amount: BalanceOf, - max_bids: u32, - ) -> (u32, u32) { - let total_issuance = T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); + pub fn enlarge(amount: BalanceOf, max_bids: u32) -> (u32, u32) { + let total_issuance = + T::Currency::total_issuance().saturating_sub(T::IgnoredIssuance::get()); let mut remaining = amount; let mut bids_taken = 0; let mut queues_hit = 0; @@ -573,7 +575,8 @@ pub mod pallet { continue } let queue_index = duration as usize - 1; - let expiry = now.saturating_add(T::Period::get().saturating_mul(duration.into())); + let expiry = + now.saturating_add(T::Period::get().saturating_mul(duration.into())); Queues::::mutate(duration, |q| { while let Some(mut bid) = q.pop() { if remaining < bid.amount { @@ -590,7 +593,9 @@ pub mod pallet { // Now to activate the bid... let nongilt_issuance = total_issuance.saturating_sub(totals.frozen); - let effective_issuance = totals.proportion.left_from_one() + let effective_issuance = totals + .proportion + .left_from_one() .saturating_reciprocal_mul(nongilt_issuance); let n = amount; let d = effective_issuance; @@ -608,7 +613,7 @@ pub mod pallet { bids_taken += 1; if remaining.is_zero() || bids_taken == max_bids { - break; + break } } queues_hit += 1; diff --git a/frame/gilt/src/mock.rs b/frame/gilt/src/mock.rs index aeff70610d4bb..91606f1852313 100644 --- a/frame/gilt/src/mock.rs +++ b/frame/gilt/src/mock.rs @@ -20,11 +20,14 @@ use crate as pallet_gilt; use frame_support::{ - parameter_types, ord_parameter_types, - traits::{OnInitialize, OnFinalize, GenesisBuild, Currency}, + ord_parameter_types, parameter_types, + traits::{Currency, GenesisBuild, OnFinalize, OnInitialize}, }; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -125,9 +128,11 @@ impl pallet_gilt::Config for Test { // our desired mockup. pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { balances: vec![(1, 100), (2, 100), (3, 100), (4, 100)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&crate::GenesisConfig, &mut t).unwrap(); t.into() } diff --git a/frame/gilt/src/tests.rs b/frame/gilt/src/tests.rs index 2f328ba904bbe..80315141e2325 100644 --- a/frame/gilt/src/tests.rs +++ b/frame/gilt/src/tests.rs @@ -18,10 +18,10 @@ //! Tests for Gilt pallet. use super::*; -use crate::{Error, mock::*}; -use frame_support::{assert_ok, assert_noop, dispatch::DispatchError, traits::Currency}; -use sp_arithmetic::Perquintill; +use crate::{mock::*, Error}; +use frame_support::{assert_noop, assert_ok, dispatch::DispatchError, traits::Currency}; use pallet_balances::Error as BalancesError; +use sp_arithmetic::Perquintill; #[test] fn basic_setup_works() { @@ -31,12 +31,15 @@ fn basic_setup_works() { for q in 0..3 { assert!(Queues::::get(q).is_empty()); } - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 0, - proportion: Perquintill::zero(), - index: 0, - target: Perquintill::zero(), - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 0, + proportion: Perquintill::zero(), + index: 0, + target: Perquintill::zero(), + } + ); assert_eq!(QueueTotals::::get(), vec![(0, 0); 3]); }); } @@ -49,12 +52,15 @@ fn set_target_works() { assert_noop!(Gilt::set_target(Origin::signed(2), Perquintill::from_percent(50)), e); assert_ok!(Gilt::set_target(Origin::signed(1), Perquintill::from_percent(50))); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 0, - proportion: Perquintill::zero(), - index: 0, - target: Perquintill::from_percent(50), - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 0, + proportion: Perquintill::zero(), + index: 0, + target: Perquintill::from_percent(50), + } + ); }); } @@ -63,7 +69,10 @@ fn place_bid_works() { new_test_ext().execute_with(|| { run_to_block(1); assert_noop!(Gilt::place_bid(Origin::signed(1), 1, 2), Error::::AmountTooSmall); - assert_noop!(Gilt::place_bid(Origin::signed(1), 101, 2), BalancesError::::InsufficientBalance); + assert_noop!( + Gilt::place_bid(Origin::signed(1), 101, 2), + BalancesError::::InsufficientBalance + ); assert_noop!(Gilt::place_bid(Origin::signed(1), 10, 4), Error::::DurationTooBig); assert_ok!(Gilt::place_bid(Origin::signed(1), 10, 2)); assert_eq!(Balances::reserved_balance(1), 10); @@ -86,11 +95,14 @@ fn place_bid_queuing_works() { assert_ok!(Gilt::place_bid(Origin::signed(1), 25, 2)); assert_eq!(Balances::reserved_balance(1), 60); assert_noop!(Gilt::place_bid(Origin::signed(1), 10, 2), Error::::BidTooLow); - assert_eq!(Queues::::get(2), vec![ - GiltBid { amount: 15, who: 1 }, - GiltBid { amount: 25, who: 1 }, - GiltBid { amount: 20, who: 1 }, - ]); + assert_eq!( + Queues::::get(2), + vec![ + GiltBid { amount: 15, who: 1 }, + GiltBid { amount: 25, who: 1 }, + GiltBid { amount: 20, who: 1 }, + ] + ); assert_eq!(QueueTotals::::get(), vec![(0, 0), (3, 60), (0, 0)]); }); } @@ -119,17 +131,16 @@ fn multiple_place_bids_works() { assert_eq!(Balances::reserved_balance(1), 40); assert_eq!(Balances::reserved_balance(2), 10); - assert_eq!(Queues::::get(1), vec![ - GiltBid { amount: 10, who: 1 }, - ]); - assert_eq!(Queues::::get(2), vec![ - GiltBid { amount: 10, who: 2 }, - GiltBid { amount: 10, who: 1 }, - GiltBid { amount: 10, who: 1 }, - ]); - assert_eq!(Queues::::get(3), vec![ - GiltBid { amount: 10, who: 1 }, - ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 10, who: 1 },]); + assert_eq!( + Queues::::get(2), + vec![ + GiltBid { amount: 10, who: 2 }, + GiltBid { amount: 10, who: 1 }, + GiltBid { amount: 10, who: 1 }, + ] + ); + assert_eq!(Queues::::get(3), vec![GiltBid { amount: 10, who: 1 },]); assert_eq!(QueueTotals::::get(), vec![(1, 10), (3, 30), (1, 10)]); }); } @@ -144,7 +155,7 @@ fn retract_single_item_queue_works() { assert_eq!(Balances::reserved_balance(1), 10); assert_eq!(Queues::::get(1), vec![]); - assert_eq!(Queues::::get(2), vec![ GiltBid { amount: 10, who: 1 } ]); + assert_eq!(Queues::::get(2), vec![GiltBid { amount: 10, who: 1 }]); assert_eq!(QueueTotals::::get(), vec![(0, 0), (1, 10), (0, 0)]); }); } @@ -161,13 +172,11 @@ fn retract_with_other_and_duplicate_works() { assert_ok!(Gilt::retract_bid(Origin::signed(1), 10, 2)); assert_eq!(Balances::reserved_balance(1), 20); assert_eq!(Balances::reserved_balance(2), 10); - assert_eq!(Queues::::get(1), vec![ - GiltBid { amount: 10, who: 1 }, - ]); - assert_eq!(Queues::::get(2), vec![ - GiltBid { amount: 10, who: 2 }, - GiltBid { amount: 10, who: 1 }, - ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 10, who: 1 },]); + assert_eq!( + Queues::::get(2), + vec![GiltBid { amount: 10, who: 2 }, GiltBid { amount: 10, who: 1 },] + ); assert_eq!(QueueTotals::::get(), vec![(1, 10), (2, 20), (0, 0)]); }); } @@ -195,22 +204,23 @@ fn basic_enlarge_works() { // Takes 2/2, then stopped because it reaches its max amount assert_eq!(Balances::reserved_balance(1), 40); assert_eq!(Balances::reserved_balance(2), 40); - assert_eq!(Queues::::get(1), vec![ GiltBid { amount: 40, who: 1 } ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 40, who: 1 }]); assert_eq!(Queues::::get(2), vec![]); assert_eq!(QueueTotals::::get(), vec![(1, 40), (0, 0), (0, 0)]); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 40, - proportion: Perquintill::from_percent(10), - index: 1, - target: Perquintill::zero(), - }); - assert_eq!(Active::::get(0).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 2, - expiry: 7, - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 40, + proportion: Perquintill::from_percent(10), + index: 1, + target: Perquintill::zero(), + } + ); + assert_eq!( + Active::::get(0).unwrap(), + ActiveGilt { proportion: Perquintill::from_percent(10), amount: 40, who: 2, expiry: 7 } + ); }); } @@ -225,29 +235,33 @@ fn enlarge_respects_bids_limit() { Gilt::enlarge(100, 2); // Should have taken 4/3 and 2/2, then stopped because it's only allowed 2. - assert_eq!(Queues::::get(1), vec![ GiltBid { amount: 40, who: 1 } ]); - assert_eq!(Queues::::get(2), vec![ GiltBid { amount: 40, who: 3 } ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 40, who: 1 }]); + assert_eq!(Queues::::get(2), vec![GiltBid { amount: 40, who: 3 }]); assert_eq!(Queues::::get(3), vec![]); assert_eq!(QueueTotals::::get(), vec![(1, 40), (1, 40), (0, 0)]); - assert_eq!(Active::::get(0).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 4, - expiry: 10, - }); - assert_eq!(Active::::get(1).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 2, - expiry: 7, - }); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 80, - proportion: Perquintill::from_percent(20), - index: 2, - target: Perquintill::zero(), - }); + assert_eq!( + Active::::get(0).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 4, + expiry: 10, + } + ); + assert_eq!( + Active::::get(1).unwrap(), + ActiveGilt { proportion: Perquintill::from_percent(10), amount: 40, who: 2, expiry: 7 } + ); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 80, + proportion: Perquintill::from_percent(20), + index: 2, + target: Perquintill::zero(), + } + ); }); } @@ -259,21 +273,22 @@ fn enlarge_respects_amount_limit_and_will_split() { Gilt::enlarge(40, 2); // Takes 2/2, then stopped because it reaches its max amount - assert_eq!(Queues::::get(1), vec![ GiltBid { amount: 40, who: 1 } ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 40, who: 1 }]); assert_eq!(QueueTotals::::get(), vec![(1, 40), (0, 0), (0, 0)]); - assert_eq!(Active::::get(0).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 1, - expiry: 4, - }); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 40, - proportion: Perquintill::from_percent(10), - index: 1, - target: Perquintill::zero(), - }); + assert_eq!( + Active::::get(0).unwrap(), + ActiveGilt { proportion: Perquintill::from_percent(10), amount: 40, who: 1, expiry: 4 } + ); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 40, + proportion: Perquintill::from_percent(10), + index: 1, + target: Perquintill::zero(), + } + ); }); } @@ -290,12 +305,15 @@ fn basic_thaw_works() { assert_noop!(Gilt::thaw(Origin::signed(2), 0), Error::::NotOwner); assert_ok!(Gilt::thaw(Origin::signed(1), 0)); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 0, - proportion: Perquintill::zero(), - index: 1, - target: Perquintill::zero(), - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 0, + proportion: Perquintill::zero(), + index: 1, + target: Perquintill::zero(), + } + ); assert_eq!(Active::::get(0), None); assert_eq!(Balances::free_balance(1), 100); assert_eq!(Balances::reserved_balance(1), 0); @@ -426,98 +444,124 @@ fn enlargement_to_target_works() { assert_ok!(Gilt::set_target(Origin::signed(1), Perquintill::from_percent(40))); run_to_block(3); - assert_eq!(Queues::::get(1), vec![ - GiltBid { amount: 40, who: 1 }, - ]); - assert_eq!(Queues::::get(2), vec![ - GiltBid { amount: 40, who: 2 }, - GiltBid { amount: 40, who: 1 }, - ]); - assert_eq!(Queues::::get(3), vec![ - GiltBid { amount: 40, who: 3 }, - GiltBid { amount: 40, who: 2 }, - ]); + assert_eq!(Queues::::get(1), vec![GiltBid { amount: 40, who: 1 },]); + assert_eq!( + Queues::::get(2), + vec![GiltBid { amount: 40, who: 2 }, GiltBid { amount: 40, who: 1 },] + ); + assert_eq!( + Queues::::get(3), + vec![GiltBid { amount: 40, who: 3 }, GiltBid { amount: 40, who: 2 },] + ); assert_eq!(QueueTotals::::get(), vec![(1, 40), (2, 80), (2, 80)]); run_to_block(4); // Two new gilts should have been issued to 2 & 3 for 40 each & duration of 3. - assert_eq!(Active::::get(0).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 2, - expiry: 13, - }); - assert_eq!(Active::::get(1).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 3, - expiry: 13, - - }); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 80, - proportion: Perquintill::from_percent(20), - index: 2, - target: Perquintill::from_percent(40), - }); + assert_eq!( + Active::::get(0).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 2, + expiry: 13, + } + ); + assert_eq!( + Active::::get(1).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 3, + expiry: 13, + } + ); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 80, + proportion: Perquintill::from_percent(20), + index: 2, + target: Perquintill::from_percent(40), + } + ); run_to_block(5); // No change - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 80, - proportion: Perquintill::from_percent(20), - index: 2, - target: Perquintill::from_percent(40), - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 80, + proportion: Perquintill::from_percent(20), + index: 2, + target: Perquintill::from_percent(40), + } + ); run_to_block(6); // Two new gilts should have been issued to 1 & 2 for 40 each & duration of 2. - assert_eq!(Active::::get(2).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 1, - expiry: 12, - }); - assert_eq!(Active::::get(3).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 2, - expiry: 12, - - }); - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 160, - proportion: Perquintill::from_percent(40), - index: 4, - target: Perquintill::from_percent(40), - }); + assert_eq!( + Active::::get(2).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 1, + expiry: 12, + } + ); + assert_eq!( + Active::::get(3).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 2, + expiry: 12, + } + ); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 160, + proportion: Perquintill::from_percent(40), + index: 4, + target: Perquintill::from_percent(40), + } + ); run_to_block(8); // No change now. - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 160, - proportion: Perquintill::from_percent(40), - index: 4, - target: Perquintill::from_percent(40), - }); + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 160, + proportion: Perquintill::from_percent(40), + index: 4, + target: Perquintill::from_percent(40), + } + ); // Set target a bit higher to use up the remaining bid. assert_ok!(Gilt::set_target(Origin::signed(1), Perquintill::from_percent(60))); run_to_block(10); // Two new gilts should have been issued to 1 & 2 for 40 each & duration of 2. - assert_eq!(Active::::get(4).unwrap(), ActiveGilt { - proportion: Perquintill::from_percent(10), - amount: 40, - who: 1, - expiry: 13, - }); - - assert_eq!(ActiveTotal::::get(), ActiveGiltsTotal { - frozen: 200, - proportion: Perquintill::from_percent(50), - index: 5, - target: Perquintill::from_percent(60), - }); + assert_eq!( + Active::::get(4).unwrap(), + ActiveGilt { + proportion: Perquintill::from_percent(10), + amount: 40, + who: 1, + expiry: 13, + } + ); + + assert_eq!( + ActiveTotal::::get(), + ActiveGiltsTotal { + frozen: 200, + proportion: Perquintill::from_percent(50), + index: 5, + target: Perquintill::from_percent(60), + } + ); }); } diff --git a/frame/gilt/src/weights.rs b/frame/gilt/src/weights.rs index c9e16c041874c..cbe881d6b6da1 100644 --- a/frame/gilt/src/weights.rs +++ b/frame/gilt/src/weights.rs @@ -35,29 +35,31 @@ // --output=./frame/gilt/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_gilt. pub trait WeightInfo { - fn place_bid(l: u32, ) -> Weight; + fn place_bid(l: u32) -> Weight; fn place_bid_max() -> Weight; - fn retract_bid(l: u32, ) -> Weight; + fn retract_bid(l: u32) -> Weight; fn set_target() -> Weight; fn thaw() -> Weight; fn pursue_target_noop() -> Weight; - fn pursue_target_per_item(b: u32, ) -> Weight; - fn pursue_target_per_queue(q: u32, ) -> Weight; + fn pursue_target_per_item(b: u32) -> Weight; + fn pursue_target_per_queue(q: u32) -> Weight; } /// Weights for pallet_gilt using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn place_bid(l: u32, ) -> Weight { + fn place_bid(l: u32) -> Weight { (60_401_000 as Weight) // Standard Error: 0 .saturating_add((146_000 as Weight).saturating_mul(l as Weight)) @@ -69,7 +71,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn retract_bid(l: u32, ) -> Weight { + fn retract_bid(l: u32) -> Weight { (61_026_000 as Weight) // Standard Error: 0 .saturating_add((119_000 as Weight).saturating_mul(l as Weight)) @@ -87,10 +89,9 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn pursue_target_noop() -> Weight { - (3_449_000 as Weight) - .saturating_add(T::DbWeight::get().reads(1 as Weight)) + (3_449_000 as Weight).saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn pursue_target_per_item(b: u32, ) -> Weight { + fn pursue_target_per_item(b: u32) -> Weight { (58_182_000 as Weight) // Standard Error: 1_000 .saturating_add((10_005_000 as Weight).saturating_mul(b as Weight)) @@ -98,7 +99,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) } - fn pursue_target_per_queue(q: u32, ) -> Weight { + fn pursue_target_per_queue(q: u32) -> Weight { (21_740_000 as Weight) // Standard Error: 7_000 .saturating_add((16_849_000 as Weight).saturating_mul(q as Weight)) @@ -111,7 +112,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn place_bid(l: u32, ) -> Weight { + fn place_bid(l: u32) -> Weight { (60_401_000 as Weight) // Standard Error: 0 .saturating_add((146_000 as Weight).saturating_mul(l as Weight)) @@ -123,7 +124,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn retract_bid(l: u32, ) -> Weight { + fn retract_bid(l: u32) -> Weight { (61_026_000 as Weight) // Standard Error: 0 .saturating_add((119_000 as Weight).saturating_mul(l as Weight)) @@ -141,10 +142,9 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } fn pursue_target_noop() -> Weight { - (3_449_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(1 as Weight)) + (3_449_000 as Weight).saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn pursue_target_per_item(b: u32, ) -> Weight { + fn pursue_target_per_item(b: u32) -> Weight { (58_182_000 as Weight) // Standard Error: 1_000 .saturating_add((10_005_000 as Weight).saturating_mul(b as Weight)) @@ -152,7 +152,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(3 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(b as Weight))) } - fn pursue_target_per_queue(q: u32, ) -> Weight { + fn pursue_target_per_queue(q: u32) -> Weight { (21_740_000 as Weight) // Standard Error: 7_000 .saturating_add((16_849_000 as Weight).saturating_mul(q as Weight)) diff --git a/frame/grandpa/src/benchmarking.rs b/frame/grandpa/src/benchmarking.rs index 1bd65944f0a35..d5372c5687a4b 100644 --- a/frame/grandpa/src/benchmarking.rs +++ b/frame/grandpa/src/benchmarking.rs @@ -19,7 +19,7 @@ #![cfg_attr(not(feature = "std"), no_std)] -use super::{*, Pallet as Grandpa}; +use super::{Pallet as Grandpa, *}; use frame_benchmarking::benchmarks; use frame_system::RawOrigin; use sp_core::H256; @@ -106,10 +106,7 @@ mod tests { ); println!("equivocation_proof: {:?}", equivocation_proof); - println!( - "equivocation_proof.encode(): {:?}", - equivocation_proof.encode() - ); + println!("equivocation_proof.encode(): {:?}", equivocation_proof.encode()); }); } } diff --git a/frame/grandpa/src/default_weights.rs b/frame/grandpa/src/default_weights.rs index 63122fcf4b538..edc18a7ff8c93 100644 --- a/frame/grandpa/src/default_weights.rs +++ b/frame/grandpa/src/default_weights.rs @@ -19,7 +19,8 @@ //! This file was not auto-generated. use frame_support::weights::{ - Weight, constants::{WEIGHT_PER_MICROS, WEIGHT_PER_NANOS, RocksDbWeight as DbWeight}, + constants::{RocksDbWeight as DbWeight, WEIGHT_PER_MICROS, WEIGHT_PER_NANOS}, + Weight, }; impl crate::WeightInfo for () { @@ -48,7 +49,6 @@ impl crate::WeightInfo for () { } fn note_stalled() -> Weight { - (3 * WEIGHT_PER_MICROS) - .saturating_add(DbWeight::get().writes(1)) + (3 * WEIGHT_PER_MICROS).saturating_add(DbWeight::get().writes(1)) } } diff --git a/frame/grandpa/src/equivocation.rs b/frame/grandpa/src/equivocation.rs index f1d4363645356..04198df192e71 100644 --- a/frame/grandpa/src/equivocation.rs +++ b/frame/grandpa/src/equivocation.rs @@ -54,7 +54,7 @@ use sp_staking::{ SessionIndex, }; -use super::{Call, Pallet, Config}; +use super::{Call, Config, Pallet}; /// A trait with utility methods for handling equivocation reports in GRANDPA. /// The offence type is generic, and the trait provides , reporting an offence @@ -130,9 +130,7 @@ pub struct EquivocationHandler> { impl Default for EquivocationHandler { fn default() -> Self { - Self { - _phantom: Default::default(), - } + Self { _phantom: Default::default() } } } @@ -209,21 +207,22 @@ impl Pallet { if let Call::report_equivocation_unsigned { equivocation_proof, key_owner_proof } = call { // discard equivocation report not coming from the local node match source { - TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ } + TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, _ => { log::warn!( target: "runtime::afg", "rejecting unsigned report equivocation transaction because it is not local/in-block." ); - return InvalidTransaction::Call.into(); - } + return InvalidTransaction::Call.into() + }, } // check report staleness is_known_offence::(equivocation_proof, key_owner_proof)?; - let longevity = >::ReportLongevity::get(); + let longevity = + >::ReportLongevity::get(); ValidTransaction::with_tag_prefix("GrandpaEquivocation") // We assign the maximum priority for any equivocation report. @@ -257,10 +256,7 @@ fn is_known_offence( key_owner_proof: &T::KeyOwnerProof, ) -> Result<(), TransactionValidityError> { // check the membership proof to extract the offender's id - let key = ( - sp_finality_grandpa::KEY_TYPE, - equivocation_proof.offender().clone(), - ); + let key = (sp_finality_grandpa::KEY_TYPE, equivocation_proof.offender().clone()); let offender = T::KeyOwnerProofSystem::check_proof(key, key_owner_proof.clone()) .ok_or(InvalidTransaction::BadProof)?; diff --git a/frame/grandpa/src/lib.rs b/frame/grandpa/src/lib.rs index 72c80a8773631..02422c040e64f 100644 --- a/frame/grandpa/src/lib.rs +++ b/frame/grandpa/src/lib.rs @@ -41,18 +41,16 @@ use fg_primitives::{ }; use frame_support::{ dispatch::DispatchResultWithPostInfo, - storage, traits::{OneSessionHandler, KeyOwnerProofSystem}, weights::{Pays, Weight}, -}; -use sp_runtime::{ - generic::DigestItem, - traits::Zero, - DispatchResult, KeyTypeId, + storage, + traits::{KeyOwnerProofSystem, OneSessionHandler}, + weights::{Pays, Weight}, }; +use sp_runtime::{generic::DigestItem, traits::Zero, DispatchResult, KeyTypeId}; use sp_session::{GetSessionNumber, GetValidatorCount}; use sp_staking::SessionIndex; -mod equivocation; mod default_weights; +mod equivocation; pub mod migrations; #[cfg(any(feature = "runtime-benchmarks", test))] @@ -73,9 +71,9 @@ use scale_info::TypeInfo; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -132,24 +130,20 @@ pub mod pallet { ScheduledChange { delay: pending_change.delay, next_authorities: pending_change.next_authorities.clone(), - } + }, )) } else { - Self::deposit_log(ConsensusLog::ScheduledChange( - ScheduledChange { - delay: pending_change.delay, - next_authorities: pending_change.next_authorities.clone(), - } - )); + Self::deposit_log(ConsensusLog::ScheduledChange(ScheduledChange { + delay: pending_change.delay, + next_authorities: pending_change.next_authorities.clone(), + })); } } // enact the change if we've reached the enacting block if block_number == pending_change.scheduled_at + pending_change.delay { Self::set_grandpa_authorities(&pending_change.next_authorities); - Self::deposit_event( - Event::NewAuthorities(pending_change.next_authorities) - ); + Self::deposit_event(Event::NewAuthorities(pending_change.next_authorities)); >::kill(); } } @@ -199,11 +193,7 @@ pub mod pallet { ) -> DispatchResultWithPostInfo { let reporter = ensure_signed(origin)?; - Self::do_report_equivocation( - Some(reporter), - equivocation_proof, - key_owner_proof, - ) + Self::do_report_equivocation(Some(reporter), equivocation_proof, key_owner_proof) } /// Report voter equivocation/misbehavior. This method will verify the @@ -291,7 +281,8 @@ pub mod pallet { /// State of the current authority set. #[pallet::storage] #[pallet::getter(fn state)] - pub(super) type State = StorageValue<_, StoredState, ValueQuery, DefaultForState>; + pub(super) type State = + StorageValue<_, StoredState, ValueQuery, DefaultForState>; /// Pending change: (signaled at, scheduled change). #[pallet::storage] @@ -330,9 +321,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - authorities: Default::default(), - } + Self { authorities: Default::default() } } } @@ -390,7 +379,7 @@ pub enum StoredState { /// Block at which the intention to pause was scheduled. scheduled_at: N, /// Number of blocks after which the change will be enacted. - delay: N + delay: N, }, /// The current GRANDPA authority set is paused. Paused, @@ -412,10 +401,7 @@ impl Pallet { /// Set the current set of authorities, along with their respective weights. fn set_grandpa_authorities(authorities: &AuthorityList) { - storage::unhashed::put( - GRANDPA_AUTHORITIES_KEY, - &VersionedAuthorityList::from(authorities), - ); + storage::unhashed::put(GRANDPA_AUTHORITIES_KEY, &VersionedAuthorityList::from(authorities)); } /// Schedule GRANDPA to pause starting in the given number of blocks. @@ -423,10 +409,7 @@ impl Pallet { pub fn schedule_pause(in_blocks: T::BlockNumber) -> DispatchResult { if let StoredState::Live = >::get() { let scheduled_at = >::block_number(); - >::put(StoredState::PendingPause { - delay: in_blocks, - scheduled_at, - }); + >::put(StoredState::PendingPause { delay: in_blocks, scheduled_at }); Ok(()) } else { @@ -438,10 +421,7 @@ impl Pallet { pub fn schedule_resume(in_blocks: T::BlockNumber) -> DispatchResult { if let StoredState::Paused = >::get() { let scheduled_at = >::block_number(); - >::put(StoredState::PendingResume { - delay: in_blocks, - scheduled_at, - }); + >::put(StoredState::PendingResume { delay: in_blocks, scheduled_at }); Ok(()) } else { @@ -504,10 +484,7 @@ impl Pallet { // config builder or through `on_genesis_session`. fn initialize(authorities: &AuthorityList) { if !authorities.is_empty() { - assert!( - Self::grandpa_authorities().is_empty(), - "Authorities are already initialized!" - ); + assert!(Self::grandpa_authorities().is_empty(), "Authorities are already initialized!"); Self::set_grandpa_authorities(authorities); } @@ -532,16 +509,16 @@ impl Pallet { let validator_count = key_owner_proof.validator_count(); // validate the key ownership proof extracting the id of the offender. - let offender = - T::KeyOwnerProofSystem::check_proof( - (fg_primitives::KEY_TYPE, equivocation_proof.offender().clone()), - key_owner_proof, - ).ok_or(Error::::InvalidKeyOwnershipProof)?; + let offender = T::KeyOwnerProofSystem::check_proof( + (fg_primitives::KEY_TYPE, equivocation_proof.offender().clone()), + key_owner_proof, + ) + .ok_or(Error::::InvalidKeyOwnershipProof)?; // validate equivocation proof (check votes are different and // signatures are valid). if !sp_finality_grandpa::check_equivocation_proof(equivocation_proof) { - return Err(Error::::InvalidEquivocationProof.into()); + return Err(Error::::InvalidEquivocationProof.into()) } // fetch the current and previous sets last session index. on the @@ -549,8 +526,8 @@ impl Pallet { let previous_set_id_session_index = if set_id == 0 { None } else { - let session_index = - Self::session_for_set(set_id - 1).ok_or_else(|| Error::::InvalidEquivocationProof)?; + let session_index = Self::session_for_set(set_id - 1) + .ok_or_else(|| Error::::InvalidEquivocationProof)?; Some(session_index) }; @@ -562,10 +539,10 @@ impl Pallet { // bounds of the set id reported in the equivocation. if session_index > set_id_session_index || previous_set_id_session_index - .map(|previous_index| session_index <= previous_index) - .unwrap_or(false) + .map(|previous_index| session_index <= previous_index) + .unwrap_or(false) { - return Err(Error::::InvalidEquivocationProof.into()); + return Err(Error::::InvalidEquivocationProof.into()) } // report to the offences module rewarding the sender. @@ -578,7 +555,8 @@ impl Pallet { set_id, round, ), - ).map_err(|_| Error::::DuplicateOffenceReport)?; + ) + .map_err(|_| Error::::DuplicateOffenceReport)?; // waive the fee since the report is valid and beneficial Ok(Pays::No.into()) @@ -612,19 +590,22 @@ impl sp_runtime::BoundToRuntimeAppPublic for Pallet { } impl OneSessionHandler for Pallet - where T: pallet_session::Config +where + T: pallet_session::Config, { type Key = AuthorityId; fn on_genesis_session<'a, I: 'a>(validators: I) - where I: Iterator + where + I: Iterator, { let authorities = validators.map(|(_, k)| (k, 1)).collect::>(); Self::initialize(&authorities); } fn on_new_session<'a, I: 'a>(changed: bool, validators: I, _queued_validators: I) - where I: Iterator + where + I: Iterator, { // Always issue a change if `session` says that the validators have changed. // Even if their session keys are the same as before, the underlying economic diff --git a/frame/grandpa/src/migrations/v3_1.rs b/frame/grandpa/src/migrations/v3_1.rs index fc626578098da..c2ab9d3b7f665 100644 --- a/frame/grandpa/src/migrations/v3_1.rs +++ b/frame/grandpa/src/migrations/v3_1.rs @@ -16,8 +16,8 @@ // limitations under the License. use frame_support::{ + traits::{Get, GetPalletVersion, PalletVersion}, weights::Weight, - traits::{GetPalletVersion, PalletVersion, Get}, }; use sp_io::hashing::twox_128; @@ -31,18 +31,15 @@ pub const OLD_PREFIX: &[u8] = b"GrandpaFinality"; /// `::PalletInfo::name::`. /// /// The old storage prefix, `GrandpaFinality` is hardcoded in the migration code. -pub fn migrate< - T: frame_system::Config, - P: GetPalletVersion, - N: AsRef, ->(new_pallet_name: N) -> Weight { - +pub fn migrate>( + new_pallet_name: N, +) -> Weight { if new_pallet_name.as_ref().as_bytes() == OLD_PREFIX { log::info!( target: "runtime::afg", "New pallet name is equal to the old prefix. No migration needs to be done.", ); - return 0; + return 0 } let maybe_storage_version =

{ pub token: syn::token::$tok, @@ -46,7 +49,7 @@ macro_rules! groups_impl { fn parse(input: ParseStream) -> Result { let syn::group::$name { token, content } = syn::group::$parse(input)?; let content = content.parse()?; - Ok($name { token, content, }) + Ok($name { token, content }) } } @@ -60,12 +63,12 @@ macro_rules! groups_impl { } } - impl Clone for $name

{ + impl Clone for $name

{ fn clone(&self) -> Self { Self { token: self.token.clone(), content: self.content.clone() } } } - } + }; } groups_impl!(Braces, Brace, Brace, parse_braces); @@ -73,23 +76,22 @@ groups_impl!(Brackets, Bracket, Bracket, parse_brackets); groups_impl!(Parens, Paren, Parenthesis, parse_parens); #[derive(Debug)] -pub struct PunctuatedInner { - pub inner: syn::punctuated::Punctuated, +pub struct PunctuatedInner { + pub inner: syn::punctuated::Punctuated, pub variant: V, } #[derive(Debug, Clone)] pub struct NoTrailing; - #[derive(Debug, Clone)] pub struct Trailing; -pub type Punctuated = PunctuatedInner; +pub type Punctuated = PunctuatedInner; -pub type PunctuatedTrailing = PunctuatedInner; +pub type PunctuatedTrailing = PunctuatedInner; -impl Parse for PunctuatedInner { +impl Parse for PunctuatedInner { fn parse(input: ParseStream) -> Result { Ok(PunctuatedInner { inner: syn::punctuated::Punctuated::parse_separated_nonempty(input)?, @@ -98,7 +100,7 @@ impl Parse for PunctuatedInner Parse for PunctuatedInner { +impl Parse for PunctuatedInner { fn parse(input: ParseStream) -> Result { Ok(PunctuatedInner { inner: syn::punctuated::Punctuated::parse_terminated(input)?, @@ -107,13 +109,13 @@ impl Parse for PunctuatedInner { } } -impl ToTokens for PunctuatedInner { +impl ToTokens for PunctuatedInner { fn to_tokens(&self, tokens: &mut TokenStream) { self.inner.to_tokens(tokens) } } -impl Clone for PunctuatedInner { +impl Clone for PunctuatedInner { fn clone(&self) -> Self { Self { inner: self.inner.clone(), variant: self.variant.clone() } } @@ -127,9 +129,7 @@ pub struct Meta { impl Parse for Meta { fn parse(input: ParseStream) -> Result { - Ok(Meta { - inner: syn::Meta::parse(input)?, - }) + Ok(Meta { inner: syn::Meta::parse(input)? }) } } @@ -151,9 +151,7 @@ pub struct OuterAttributes { impl Parse for OuterAttributes { fn parse(input: ParseStream) -> Result { let inner = syn::Attribute::parse_outer(input)?; - Ok(OuterAttributes { - inner, - }) + Ok(OuterAttributes { inner }) } } @@ -189,13 +187,11 @@ struct ContainsIdent<'a> { impl<'ast> ContainsIdent<'ast> { fn visit_tokenstream(&mut self, stream: TokenStream) { - stream.into_iter().for_each(|tt| - match tt { - TokenTree::Ident(id) => self.visit_ident(&id), - TokenTree::Group(ref group) => self.visit_tokenstream(group.stream()), - _ => {} - } - ) + stream.into_iter().for_each(|tt| match tt { + TokenTree::Ident(id) => self.visit_ident(&id), + TokenTree::Group(ref group) => self.visit_tokenstream(group.stream()), + _ => {}, + }) } fn visit_ident(&mut self, ident: &Ident) { @@ -218,10 +214,7 @@ impl<'ast> Visit<'ast> for ContainsIdent<'ast> { /// Check if a `Type` contains the given `Ident`. pub fn type_contains_ident(typ: &syn::Type, ident: &Ident) -> bool { - let mut visit = ContainsIdent { - result: false, - ident, - }; + let mut visit = ContainsIdent { result: false, ident }; visit::visit_type(&mut visit, typ); visit.result @@ -229,10 +222,7 @@ pub fn type_contains_ident(typ: &syn::Type, ident: &Ident) -> bool { /// Check if a `Expr` contains the given `Ident`. pub fn expr_contains_ident(expr: &syn::Expr, ident: &Ident) -> bool { - let mut visit = ContainsIdent { - result: false, - ident, - }; + let mut visit = ContainsIdent { result: false, ident }; visit::visit_expr(&mut visit, expr); visit.result diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 6a67c5f0148b6..12a7753c465d0 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -18,16 +18,22 @@ //! Dispatch system. Contains a macro for defining runtime modules and //! generating values representing lazy module function calls. -pub use crate::sp_std::{result, fmt, prelude::{Vec, Clone, Eq, PartialEq}, marker}; -pub use crate::codec::{Codec, EncodeLike, Decode, Encode, Input, Output, HasCompact, EncodeAsRef}; -pub use crate::weights::{ - GetDispatchInfo, DispatchInfo, WeighData, ClassifyDispatch, TransactionPriority, Weight, - PaysFee, PostDispatchInfo, WithPostDispatchInfo, +pub use crate::{ + codec::{Codec, Decode, Encode, EncodeAsRef, EncodeLike, HasCompact, Input, Output}, + sp_std::{ + fmt, marker, + prelude::{Clone, Eq, PartialEq, Vec}, + result, + }, + traits::{ + CallMetadata, GetCallMetadata, GetCallName, GetPalletVersion, UnfilteredDispatchable, + }, + weights::{ + ClassifyDispatch, DispatchInfo, GetDispatchInfo, PaysFee, PostDispatchInfo, + TransactionPriority, WeighData, Weight, WithPostDispatchInfo, + }, }; pub use sp_runtime::{traits::Dispatchable, DispatchError}; -pub use crate::traits::{ - CallMetadata, GetCallMetadata, GetCallName, UnfilteredDispatchable, GetPalletVersion, -}; /// The return typ of a `Dispatchable` in frame. When returned explicitly from /// a dispatchable function it allows overriding the default `PostDispatchInfo` @@ -2397,14 +2403,20 @@ macro_rules! __check_reserved_fn_name { #[allow(dead_code)] mod tests { use super::*; - use crate::weights::{DispatchInfo, DispatchClass, Pays, RuntimeDbWeight}; - use crate::traits::{ - GetCallName, OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, - IntegrityTest, Get, PalletInfo, - }; - use crate::metadata::*; - - pub trait Config: system::Config + Sized where Self::AccountId: From { } + use crate::{ + metadata::*, + traits::{ + Get, GetCallName, IntegrityTest, OnFinalize, OnIdle, OnInitialize, OnRuntimeUpgrade, + PalletInfo, + }, + weights::{DispatchClass, DispatchInfo, Pays, RuntimeDbWeight}, + }; + + pub trait Config: system::Config + Sized + where + Self::AccountId: From, + { + } pub mod system { use super::*; @@ -2477,77 +2489,53 @@ mod tests { fn expected_calls() -> Vec { vec![ - FunctionMetadata { - name: "aux_0", - args: vec![], - docs: vec![ - " Hi, this is a comment." - ] - }, + FunctionMetadata { name: "aux_0", args: vec![], docs: vec![" Hi, this is a comment."] }, FunctionMetadata { name: "aux_1", - args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::>(), - } - ], + args: vec![FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::>(), + }], docs: vec![], }, FunctionMetadata { name: "aux_2", args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - }, + FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::() }, FunctionArgumentMetadata { name: "_data2", ty: scale_info::meta_type::(), - } + }, ], docs: vec![], }, - FunctionMetadata { - name: "aux_3", - args: vec![], - docs: vec![], - }, + FunctionMetadata { name: "aux_3", args: vec![], docs: vec![] }, FunctionMetadata { name: "aux_4", - args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - } - ], + args: vec![FunctionArgumentMetadata { + name: "_data", + ty: scale_info::meta_type::(), + }], docs: vec![], }, FunctionMetadata { name: "aux_5", args: vec![ - FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - }, + FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::() }, FunctionArgumentMetadata { name: "_data2", - ty: scale_info::meta_type::>() - } + ty: scale_info::meta_type::>(), + }, ], docs: vec![], }, - FunctionMetadata { - name: "operational", - args: vec![], - docs: vec![], - }, + FunctionMetadata { name: "operational", args: vec![], docs: vec![] }, ] } #[derive(scale_info::TypeInfo)] pub struct TraitImpl {} - impl Config for TraitImpl { } + impl Config for TraitImpl {} type Test = Module; @@ -2615,7 +2603,6 @@ mod tests { } } - impl system::Config for TraitImpl { type Origin = OuterOrigin; type AccountId = u32; @@ -2700,9 +2687,9 @@ mod tests { #[test] fn on_runtime_upgrade_should_work() { - sp_io::TestExternalities::default().execute_with(|| + sp_io::TestExternalities::default().execute_with(|| { assert_eq!( as OnRuntimeUpgrade>::on_runtime_upgrade(), 10) - ); + }); } #[test] @@ -2728,7 +2715,10 @@ mod tests { #[test] fn get_call_names() { let call_names = Call::::get_call_names(); - assert_eq!(["aux_0", "aux_1", "aux_2", "aux_3", "aux_4", "aux_5", "operational"], call_names); + assert_eq!( + ["aux_0", "aux_1", "aux_2", "aux_3", "aux_4", "aux_5", "operational"], + call_names + ); } #[test] diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index d51189057eb56..b6a46d7633e26 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -18,7 +18,7 @@ //! Macro for declaring a module error. #[doc(hidden)] -pub use sp_runtime::traits::{LookupError, BadOrigin}; +pub use sp_runtime::traits::{BadOrigin, LookupError}; /// Declare an error type for a runtime module. /// diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index ac881fbe301b7..be11e82e6cd21 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -287,4 +287,4 @@ macro_rules! __decl_generic_event { (@cannot_parse $ty:ty) => { compile_error!(concat!("The type `", stringify!($ty), "` can't be parsed as an unnamed one, please name it `Name = ", stringify!($ty), "`")); } -} \ No newline at end of file +} diff --git a/frame/support/src/hash.rs b/frame/support/src/hash.rs index a7932811bbdd1..f943bcf323090 100644 --- a/frame/support/src/hash.rs +++ b/frame/support/src/hash.rs @@ -19,8 +19,8 @@ use crate::metadata; use codec::{Codec, MaxEncodedLen}; +use sp_io::hashing::{blake2_128, blake2_256, twox_128, twox_256, twox_64}; use sp_std::prelude::Vec; -use sp_io::hashing::{blake2_128, blake2_256, twox_64, twox_128, twox_256}; // This trait must be kept coherent with frame-support-procedural HasherKind usage pub trait Hashable: Sized { @@ -52,7 +52,9 @@ impl Hashable for T { fn twox_64_concat(&self) -> Vec { self.using_encoded(Twox64Concat::hash) } - fn identity(&self) -> Vec { self.encode() } + fn identity(&self) -> Vec { + self.encode() + } } /// Hasher to use to hash keys to insert to storage. @@ -99,11 +101,7 @@ impl StorageHasher for Twox64Concat { const METADATA: metadata::StorageHasher = metadata::StorageHasher::Twox64Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { - twox_64(x) - .iter() - .chain(x.into_iter()) - .cloned() - .collect::>() + twox_64(x).iter().chain(x.into_iter()).cloned().collect::>() } fn max_len() -> usize { K::max_encoded_len().saturating_add(8) @@ -125,11 +123,7 @@ impl StorageHasher for Blake2_128Concat { const METADATA: metadata::StorageHasher = metadata::StorageHasher::Blake2_128Concat; type Output = Vec; fn hash(x: &[u8]) -> Vec { - blake2_128(x) - .iter() - .chain(x.into_iter()) - .cloned() - .collect::>() + blake2_128(x).iter().chain(x.into_iter()).cloned().collect::>() } fn max_len() -> usize { K::max_encoded_len().saturating_add(16) diff --git a/frame/support/src/inherent.rs b/frame/support/src/inherent.rs index cccbbbaa517ce..2125f3e7f50a7 100644 --- a/frame/support/src/inherent.rs +++ b/frame/support/src/inherent.rs @@ -15,13 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -#[doc(hidden)] -pub use crate::sp_std::vec::Vec; #[doc(hidden)] pub use crate::sp_runtime::traits::{Block as BlockT, Extrinsic}; +#[doc(hidden)] +pub use crate::sp_std::vec::Vec; pub use sp_inherents::{ - InherentData, CheckInherentsResult, IsFatalError, InherentIdentifier, MakeFatalError, + CheckInherentsResult, InherentData, InherentIdentifier, IsFatalError, MakeFatalError, }; /// A pallet that provides or verifies an inherent extrinsic. @@ -53,7 +53,9 @@ pub trait ProvideInherent { /// one inherent for which: /// * type is [`Self::Call`], /// * [`Self::is_inherent`] returns true. - fn is_inherent_required(_: &InherentData) -> Result, Self::Error> { Ok(None) } + fn is_inherent_required(_: &InherentData) -> Result, Self::Error> { + Ok(None) + } /// Check whether the given inherent is valid. Checking the inherent is optional and can be /// omitted by using the default implementation. diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 1fe3c94179d7c..a5e6418e8d354 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -25,44 +25,44 @@ extern crate self as frame_support; #[doc(hidden)] pub use sp_tracing; -#[cfg(feature = "std")] -pub use serde; #[doc(hidden)] -pub use scale_info; -pub use sp_core::Void; +pub use codec; #[doc(hidden)] -pub use sp_std; +pub use frame_metadata as metadata; #[doc(hidden)] -pub use codec; +pub use log; #[cfg(feature = "std")] #[doc(hidden)] pub use once_cell; #[doc(hidden)] pub use paste; -#[cfg(feature = "std")] #[doc(hidden)] -pub use sp_state_machine::BasicExternalities; +pub use scale_info; +#[cfg(feature = "std")] +pub use serde; +pub use sp_core::Void; #[doc(hidden)] -pub use sp_io::{storage::root as storage_root, self}; +pub use sp_io::{self, storage::root as storage_root}; #[doc(hidden)] pub use sp_runtime::RuntimeDebug; +#[cfg(feature = "std")] #[doc(hidden)] -pub use log; +pub use sp_state_machine::BasicExternalities; #[doc(hidden)] -pub use frame_metadata as metadata; +pub use sp_std; #[macro_use] pub mod dispatch; -pub mod storage; mod hash; +pub mod storage; #[macro_use] pub mod event; pub mod inherent; #[macro_use] pub mod error; +pub mod instances; pub mod traits; pub mod weights; -pub mod instances; #[doc(hidden)] pub mod unsigned { @@ -70,23 +70,27 @@ pub mod unsigned { pub use crate::sp_runtime::traits::ValidateUnsigned; #[doc(hidden)] pub use crate::sp_runtime::transaction_validity::{ - TransactionValidity, UnknownTransaction, TransactionValidityError, TransactionSource, + TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, }; } -pub use self::hash::{ - Twox256, Twox128, Blake2_256, Blake2_128, Identity, Twox64Concat, Blake2_128Concat, Hashable, - StorageHasher, ReversibleStorageHasher -}; -pub use self::storage::{ - StorageValue, StorageMap, StorageDoubleMap, StorageNMap, StoragePrefixedMap, - IterableStorageMap, IterableStorageDoubleMap, IterableStorageNMap, migration, - bounded_vec::{BoundedVec, BoundedSlice}, weak_bounded_vec::WeakBoundedVec, +pub use self::{ + dispatch::{Callable, Parameter}, + hash::{ + Blake2_128, Blake2_128Concat, Blake2_256, Hashable, Identity, ReversibleStorageHasher, + StorageHasher, Twox128, Twox256, Twox64Concat, + }, + storage::{ + bounded_vec::{BoundedSlice, BoundedVec}, + migration, + weak_bounded_vec::WeakBoundedVec, + IterableStorageDoubleMap, IterableStorageMap, IterableStorageNMap, StorageDoubleMap, + StorageMap, StorageNMap, StoragePrefixedMap, StorageValue, + }, }; -pub use self::dispatch::{Parameter, Callable}; -pub use sp_runtime::{self, ConsensusEngineId, print, traits::Printable}; +pub use sp_runtime::{self, print, traits::Printable, ConsensusEngineId}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_runtime::TypeId; @@ -118,7 +122,7 @@ impl TypeId for PalletId { /// // generate a storage value with type u32. /// generate_storage_alias!(Prefix, StorageName => Value); /// -/// // generate a double map from `(u32, u32)` (with hashers `Twox64Concat` for each key) +/// // generate a double map from `(u32, u32)` (with hashers `Twox64Concat` for each key) /// // to `Vec` /// generate_storage_alias!( /// OtherPrefix, OtherStorageName => DoubleMap< @@ -537,7 +541,7 @@ pub fn debug(data: &impl sp_std::fmt::Debug) { #[doc(inline)] pub use frame_support_procedural::{ - decl_storage, construct_runtime, transactional, RuntimeDebugNoBound, + construct_runtime, decl_storage, transactional, RuntimeDebugNoBound, }; #[doc(hidden)] @@ -687,8 +691,8 @@ pub use frame_support_procedural::crate_to_pallet_version; #[macro_export] macro_rules! fail { ( $y:expr ) => {{ - return Err($y.into()); - }} + return Err($y.into()) + }}; } /// Evaluate `$x:expr` and if not true return `Err($y:expr)`. @@ -700,7 +704,7 @@ macro_rules! ensure { if !$x { $crate::fail!($y); } - }} + }}; } /// Evaluate an expression, assert it returns an expected `Err` value and that @@ -716,7 +720,7 @@ macro_rules! assert_noop { let h = $crate::storage_root(); $crate::assert_err!($x, $y); assert_eq!(h, $crate::storage_root()); - } + }; } /// Evaluate any expression and assert that runtime storage has not been mutated @@ -731,7 +735,7 @@ macro_rules! assert_storage_noop { let h = $crate::storage_root(); $x; assert_eq!(h, $crate::storage_root()); - } + }; } /// Assert an expression returns an error specified. @@ -741,7 +745,7 @@ macro_rules! assert_storage_noop { macro_rules! assert_err { ( $x:expr , $y:expr $(,)? ) => { assert_eq!($x, Err($y.into())); - } + }; } /// Assert an expression returns an error specified. @@ -752,7 +756,7 @@ macro_rules! assert_err { macro_rules! assert_err_ignore_postinfo { ( $x:expr , $y:expr $(,)? ) => { $crate::assert_err!($x.map(|_| ()).map_err(|e| e.error), $y); - } + }; } /// Assert an expression returns error with the given weight. @@ -765,7 +769,7 @@ macro_rules! assert_err_with_weight { } else { panic!("expected Err(_), got Ok(_).") } - } + }; } /// Panic if an expression doesn't evaluate to `Ok`. @@ -783,23 +787,23 @@ macro_rules! assert_ok { }; ( $x:expr, $y:expr $(,)? ) => { assert_eq!($x, Ok($y)); - } + }; } #[cfg(feature = "std")] #[doc(hidden)] -pub use serde::{Serialize, Deserialize}; +pub use serde::{Deserialize, Serialize}; #[cfg(test)] pub mod tests { use super::*; - use codec::{Codec, EncodeLike}; use crate::metadata::{ - StorageEntryMetadata, PalletStorageMetadata, StorageEntryType, StorageEntryModifier, + PalletStorageMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, StorageHasher, }; - use sp_std::result; + use codec::{Codec, EncodeLike}; use sp_io::TestExternalities; + use sp_std::result; /// A PalletInfo implementation which just panics. pub struct PanicPalletInfo; @@ -867,7 +871,9 @@ pub mod tests { type Map = Data; - trait Sorted { fn sorted(self) -> Self; } + trait Sorted { + fn sorted(self) -> Self; + } impl Sorted for Vec { fn sorted(mut self) -> Self { self.sort(); @@ -921,13 +927,15 @@ pub mod tests { DataDM::insert(1, 0, 2); DataDM::insert(1, 1, 3); - let get_all = || vec![ - DataDM::get(0, 1), - DataDM::get(1, 0), - DataDM::get(1, 1), - DataDM::get(2, 0), - DataDM::get(2, 1), - ]; + let get_all = || { + vec![ + DataDM::get(0, 1), + DataDM::get(1, 0), + DataDM::get(1, 1), + DataDM::get(2, 0), + DataDM::get(2, 1), + ] + }; assert_eq!(get_all(), vec![1, 2, 3, 0, 0]); // Two existing @@ -993,15 +1001,24 @@ pub mod tests { Map::mutate(&key, |val| { *val = 15; }); - assert_eq!(Map::iter().collect::>().sorted(), vec![(key - 2, 42), (key - 1, 43), (key, 15)]); + assert_eq!( + Map::iter().collect::>().sorted(), + vec![(key - 2, 42), (key - 1, 43), (key, 15)] + ); Map::mutate(&key, |val| { *val = 17; }); - assert_eq!(Map::iter().collect::>().sorted(), vec![(key - 2, 42), (key - 1, 43), (key, 17)]); + assert_eq!( + Map::iter().collect::>().sorted(), + vec![(key - 2, 42), (key - 1, 43), (key, 17)] + ); // remove first Map::remove(&key); - assert_eq!(Map::iter().collect::>().sorted(), vec![(key - 2, 42), (key - 1, 43)]); + assert_eq!( + Map::iter().collect::>().sorted(), + vec![(key - 2, 42), (key - 1, 43)] + ); // remove last from the list Map::remove(&(key - 2)); @@ -1052,7 +1069,6 @@ pub mod tests { assert_eq!(DoubleMap::get(&key1, &(key2 + 1)), 0u64); assert_eq!(DoubleMap::get(&(key1 + 1), &key2), 4u64); assert_eq!(DoubleMap::get(&(key1 + 1), &(key2 + 1)), 4u64); - }); } @@ -1103,10 +1119,13 @@ pub mod tests { assert_eq!(DoubleMap::get(&key1, key2), 1); // no-op if `Err` - assert_noop!(DoubleMap::try_mutate_exists(key1, key2, |v| -> TestResult { - *v = Some(2); - Err("nah") - }), "nah"); + assert_noop!( + DoubleMap::try_mutate_exists(key1, key2, |v| -> TestResult { + *v = Some(2); + Err("nah") + }), + "nah" + ); // removed if mutated to`None` assert_ok!(DoubleMap::try_mutate_exists(key1, key2, |v| -> TestResult { @@ -1252,35 +1271,38 @@ pub mod tests { /// Prelude to be used alongside pallet macro, for ease of use. pub mod pallet_prelude { - pub use sp_std::marker::PhantomData; #[cfg(feature = "std")] pub use crate::traits::GenesisBuild; pub use crate::{ - EqNoBound, PartialEqNoBound, RuntimeDebugNoBound, DebugNoBound, CloneNoBound, Twox256, - Twox128, Blake2_256, Blake2_128, Identity, Twox64Concat, Blake2_128Concat, ensure, - RuntimeDebug, storage, + dispatch::{DispatchError, DispatchResult, DispatchResultWithPostInfo, Parameter}, + ensure, + inherent::{InherentData, InherentIdentifier, ProvideInherent}, + storage, + storage::{ + bounded_vec::BoundedVec, + types::{ + Key as NMapKey, OptionQuery, StorageDoubleMap, StorageMap, StorageNMap, + StorageValue, ValueQuery, + }, + }, traits::{ - Get, Hooks, IsType, GetPalletVersion, EnsureOrigin, PalletInfoAccess, StorageInfoTrait, - ConstU32, GetDefault, + ConstU32, EnsureOrigin, Get, GetDefault, GetPalletVersion, Hooks, IsType, + PalletInfoAccess, StorageInfoTrait, }, - dispatch::{DispatchResultWithPostInfo, Parameter, DispatchError, DispatchResult}, weights::{DispatchClass, Pays, Weight}, - storage::types::{ - Key as NMapKey, StorageDoubleMap, StorageMap, StorageNMap, StorageValue, ValueQuery, - OptionQuery, - }, - storage::bounded_vec::BoundedVec, + Blake2_128, Blake2_128Concat, Blake2_256, CloneNoBound, DebugNoBound, EqNoBound, Identity, + PartialEqNoBound, RuntimeDebug, RuntimeDebugNoBound, Twox128, Twox256, Twox64Concat, }; - pub use codec::{Encode, Decode, MaxEncodedLen}; - pub use crate::inherent::{InherentData, InherentIdentifier, ProvideInherent}; + pub use codec::{Decode, Encode, MaxEncodedLen}; pub use sp_runtime::{ traits::{MaybeSerializeDeserialize, Member, ValidateUnsigned}, transaction_validity::{ - TransactionSource, TransactionValidity, ValidTransaction, TransactionPriority, - TransactionTag, TransactionLongevity, TransactionValidityError, InvalidTransaction, - UnknownTransaction, + InvalidTransaction, TransactionLongevity, TransactionPriority, TransactionSource, + TransactionTag, TransactionValidity, TransactionValidityError, UnknownTransaction, + ValidTransaction, }, }; + pub use sp_std::marker::PhantomData; } /// `pallet` attribute macro allows to define a pallet to be used in `construct_runtime!`. diff --git a/frame/support/src/storage/bounded_btree_map.rs b/frame/support/src/storage/bounded_btree_map.rs index 936c2779eecb7..331b306fa0a93 100644 --- a/frame/support/src/storage/bounded_btree_map.rs +++ b/frame/support/src/storage/bounded_btree_map.rs @@ -17,15 +17,12 @@ //! Traits, types and structs to support a bounded BTreeMap. +use crate::{storage::StorageDecodeLength, traits::Get}; +use codec::{Decode, Encode, MaxEncodedLen}; use sp_std::{ borrow::Borrow, collections::btree_map::BTreeMap, convert::TryFrom, fmt, marker::PhantomData, ops::Deref, }; -use crate::{ - storage::StorageDecodeLength, - traits::Get, -}; -use codec::{Encode, Decode, MaxEncodedLen}; /// A bounded map based on a B-Tree. /// @@ -47,7 +44,7 @@ where fn decode(input: &mut I) -> Result { let inner = BTreeMap::::decode(input)?; if inner.len() > S::get() as usize { - return Err("BoundedBTreeMap exceeds its limit".into()); + return Err("BoundedBTreeMap exceeds its limit".into()) } Ok(Self(inner, PhantomData)) } @@ -281,7 +278,9 @@ where type Error = (); fn try_from(value: BTreeMap) -> Result { - (value.len() <= Self::bound()).then(move || BoundedBTreeMap(value, PhantomData)).ok_or(()) + (value.len() <= Self::bound()) + .then(move || BoundedBTreeMap(value, PhantomData)) + .ok_or(()) } } @@ -304,9 +303,9 @@ impl codec::EncodeLike> for BoundedBTreeMap whe #[cfg(test)] pub mod test { use super::*; + use crate::Twox128; use sp_io::TestExternalities; use sp_std::convert::TryInto; - use crate::Twox128; crate::parameter_types! { pub const Seven: u32 = 7; diff --git a/frame/support/src/storage/bounded_btree_set.rs b/frame/support/src/storage/bounded_btree_set.rs index 461b1de58ec81..182884e655dd2 100644 --- a/frame/support/src/storage/bounded_btree_set.rs +++ b/frame/support/src/storage/bounded_btree_set.rs @@ -17,15 +17,12 @@ //! Traits, types and structs to support a bounded `BTreeSet`. +use crate::{storage::StorageDecodeLength, traits::Get}; +use codec::{Decode, Encode, MaxEncodedLen}; use sp_std::{ borrow::Borrow, collections::btree_set::BTreeSet, convert::TryFrom, fmt, marker::PhantomData, ops::Deref, }; -use crate::{ - storage::StorageDecodeLength, - traits::Get, -}; -use codec::{Encode, Decode, MaxEncodedLen}; /// A bounded set based on a B-Tree. /// @@ -45,7 +42,7 @@ where fn decode(input: &mut I) -> Result { let inner = BTreeSet::::decode(input)?; if inner.len() > S::get() as usize { - return Err("BoundedBTreeSet exceeds its limit".into()); + return Err("BoundedBTreeSet exceeds its limit".into()) } Ok(Self(inner, PhantomData)) } @@ -266,7 +263,9 @@ where type Error = (); fn try_from(value: BTreeSet) -> Result { - (value.len() <= Self::bound()).then(move || BoundedBTreeSet(value, PhantomData)).ok_or(()) + (value.len() <= Self::bound()) + .then(move || BoundedBTreeSet(value, PhantomData)) + .ok_or(()) } } @@ -281,16 +280,14 @@ impl codec::DecodeLength for BoundedBTreeSet { impl StorageDecodeLength for BoundedBTreeSet {} -impl codec::EncodeLike> for BoundedBTreeSet where - BTreeSet: Encode -{} +impl codec::EncodeLike> for BoundedBTreeSet where BTreeSet: Encode {} #[cfg(test)] pub mod test { use super::*; + use crate::Twox128; use sp_io::TestExternalities; use sp_std::convert::TryInto; - use crate::Twox128; crate::parameter_types! { pub const Seven: u32 = 7; diff --git a/frame/support/src/storage/bounded_vec.rs b/frame/support/src/storage/bounded_vec.rs index 80509b9b10d13..2a4a642a13c99 100644 --- a/frame/support/src/storage/bounded_vec.rs +++ b/frame/support/src/storage/bounded_vec.rs @@ -18,17 +18,16 @@ //! Traits, types and structs to support putting a bounded vector into storage, as a raw value, map //! or a double map. -use sp_std::prelude::*; -use sp_std::{convert::TryFrom, fmt, marker::PhantomData}; -use codec::{Encode, Decode, EncodeLike, MaxEncodedLen}; +use crate::{ + storage::{StorageDecodeLength, StorageTryAppend}, + traits::Get, +}; +use codec::{Decode, Encode, EncodeLike, MaxEncodedLen}; use core::{ ops::{Deref, Index, IndexMut}, slice::SliceIndex, }; -use crate::{ - traits::Get, - storage::{StorageDecodeLength, StorageTryAppend}, -}; +use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; /// A bounded vector. /// @@ -72,7 +71,7 @@ impl> Decode for BoundedVec { fn decode(input: &mut I) -> Result { let inner = Vec::::decode(input)?; if inner.len() > S::get() as usize { - return Err("BoundedVec exceeds its limit".into()); + return Err("BoundedVec exceeds its limit".into()) } Ok(Self(inner, PhantomData)) } @@ -342,9 +341,9 @@ where #[cfg(test)] pub mod test { use super::*; + use crate::Twox128; use sp_io::TestExternalities; use sp_std::convert::TryInto; - use crate::Twox128; crate::parameter_types! { pub const Seven: u32 = 7; diff --git a/frame/support/src/storage/child.rs b/frame/support/src/storage/child.rs index 52830c8ac5dc8..4b237aaa561fd 100644 --- a/frame/support/src/storage/child.rs +++ b/frame/support/src/storage/child.rs @@ -21,23 +21,17 @@ // NOTE: could replace unhashed by having only one kind of storage (top trie being the child info // of null length parent storage key). +pub use crate::sp_io::KillStorageResult; use crate::sp_std::prelude::*; -use codec::{Codec, Encode, Decode}; +use codec::{Codec, Decode, Encode}; pub use sp_core::storage::{ChildInfo, ChildType}; -pub use crate::sp_io::KillStorageResult; /// Return the value of the item in storage under `key`, or `None` if there is no explicit entry. -pub fn get( - child_info: &ChildInfo, - key: &[u8], -) -> Option { +pub fn get(child_info: &ChildInfo, key: &[u8]) -> Option { match child_info.child_type() { ChildType::ParentKeyId => { let storage_key = child_info.storage_key(); - sp_io::default_child_storage::get( - storage_key, - key, - ).and_then(|v| { + sp_io::default_child_storage::get(storage_key, key).and_then(|v| { Decode::decode(&mut &v[..]).map(Some).unwrap_or_else(|_| { // TODO #3700: error should be handleable. crate::runtime_print!( @@ -54,20 +48,13 @@ pub fn get( /// Return the value of the item in storage under `key`, or the type's default if there is no /// explicit entry. -pub fn get_or_default( - child_info: &ChildInfo, - key: &[u8], -) -> T { +pub fn get_or_default(child_info: &ChildInfo, key: &[u8]) -> T { get(child_info, key).unwrap_or_else(Default::default) } /// Return the value of the item in storage under `key`, or `default_value` if there is no /// explicit entry. -pub fn get_or( - child_info: &ChildInfo, - key: &[u8], - default_value: T, -) -> T { +pub fn get_or(child_info: &ChildInfo, key: &[u8], default_value: T) -> T { get(child_info, key).unwrap_or(default_value) } @@ -82,27 +69,16 @@ pub fn get_or_else T>( } /// Put `value` in storage under `key`. -pub fn put( - child_info: &ChildInfo, - key: &[u8], - value: &T, -) { +pub fn put(child_info: &ChildInfo, key: &[u8], value: &T) { match child_info.child_type() { - ChildType::ParentKeyId => value.using_encoded(|slice| - sp_io::default_child_storage::set( - child_info.storage_key(), - key, - slice, - ) - ), + ChildType::ParentKeyId => value.using_encoded(|slice| { + sp_io::default_child_storage::set(child_info.storage_key(), key, slice) + }), } } /// Remove `key` from storage, returning its value if it had an explicit entry or `None` otherwise. -pub fn take( - child_info: &ChildInfo, - key: &[u8], -) -> Option { +pub fn take(child_info: &ChildInfo, key: &[u8]) -> Option { let r = get(child_info, key); if r.is_some() { kill(child_info, key); @@ -112,20 +88,13 @@ pub fn take( /// Remove `key` from storage, returning its value, or, if there was no explicit entry in storage, /// the default for its type. -pub fn take_or_default( - child_info: &ChildInfo, - key: &[u8], -) -> T { +pub fn take_or_default(child_info: &ChildInfo, key: &[u8]) -> T { take(child_info, key).unwrap_or_else(Default::default) } /// Return the value of the item in storage under `key`, or `default_value` if there is no /// explicit entry. Ensure there is no explicit entry on return. -pub fn take_or( - child_info: &ChildInfo, - key: &[u8], - default_value: T, -) -> T { +pub fn take_or(child_info: &ChildInfo, key: &[u8], default_value: T) -> T { take(child_info, key).unwrap_or(default_value) } @@ -140,15 +109,11 @@ pub fn take_or_else T>( } /// Check to see if `key` has an explicit entry in storage. -pub fn exists( - child_info: &ChildInfo, - key: &[u8], -) -> bool { +pub fn exists(child_info: &ChildInfo, key: &[u8]) -> bool { match child_info.child_type() { - ChildType::ParentKeyId => sp_io::default_child_storage::read( - child_info.storage_key(), - key, &mut [0;0][..], 0, - ).is_some(), + ChildType::ParentKeyId => + sp_io::default_child_storage::read(child_info.storage_key(), key, &mut [0; 0][..], 0) + .is_some(), } } @@ -171,86 +136,50 @@ pub fn exists( /// not make much sense because it is not cumulative when called inside the same block. /// Use this function to distribute the deletion of a single child trie across multiple /// blocks. -pub fn kill_storage( - child_info: &ChildInfo, - limit: Option, -) -> KillStorageResult { +pub fn kill_storage(child_info: &ChildInfo, limit: Option) -> KillStorageResult { match child_info.child_type() { - ChildType::ParentKeyId => sp_io::default_child_storage::storage_kill( - child_info.storage_key(), - limit - ), + ChildType::ParentKeyId => + sp_io::default_child_storage::storage_kill(child_info.storage_key(), limit), } } /// Ensure `key` has no explicit entry in storage. -pub fn kill( - child_info: &ChildInfo, - key: &[u8], -) { +pub fn kill(child_info: &ChildInfo, key: &[u8]) { match child_info.child_type() { ChildType::ParentKeyId => { - sp_io::default_child_storage::clear( - child_info.storage_key(), - key, - ); + sp_io::default_child_storage::clear(child_info.storage_key(), key); }, } } /// Get a Vec of bytes from storage. -pub fn get_raw( - child_info: &ChildInfo, - key: &[u8], -) -> Option> { +pub fn get_raw(child_info: &ChildInfo, key: &[u8]) -> Option> { match child_info.child_type() { - ChildType::ParentKeyId => sp_io::default_child_storage::get( - child_info.storage_key(), - key, - ), + ChildType::ParentKeyId => sp_io::default_child_storage::get(child_info.storage_key(), key), } } /// Put a raw byte slice into storage. -pub fn put_raw( - child_info: &ChildInfo, - key: &[u8], - value: &[u8], -) { +pub fn put_raw(child_info: &ChildInfo, key: &[u8], value: &[u8]) { match child_info.child_type() { - ChildType::ParentKeyId => sp_io::default_child_storage::set( - child_info.storage_key(), - key, - value, - ), + ChildType::ParentKeyId => + sp_io::default_child_storage::set(child_info.storage_key(), key, value), } } /// Calculate current child root value. -pub fn root( - child_info: &ChildInfo, -) -> Vec { +pub fn root(child_info: &ChildInfo) -> Vec { match child_info.child_type() { - ChildType::ParentKeyId => sp_io::default_child_storage::root( - child_info.storage_key(), - ), + ChildType::ParentKeyId => sp_io::default_child_storage::root(child_info.storage_key()), } } /// Return the length in bytes of the value without reading it. `None` if it does not exist. -pub fn len( - child_info: &ChildInfo, - key: &[u8], -) -> Option { +pub fn len(child_info: &ChildInfo, key: &[u8]) -> Option { match child_info.child_type() { ChildType::ParentKeyId => { let mut buffer = [0; 0]; - sp_io::default_child_storage::read( - child_info.storage_key(), - key, - &mut buffer, - 0, - ) - } + sp_io::default_child_storage::read(child_info.storage_key(), key, &mut buffer, 0) + }, } } diff --git a/frame/support/src/storage/generator/double_map.rs b/frame/support/src/storage/generator/double_map.rs index 71d8ca3c043a6..3a68fe740ab08 100644 --- a/frame/support/src/storage/generator/double_map.rs +++ b/frame/support/src/storage/generator/double_map.rs @@ -15,11 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -use sp_std::prelude::*; -use sp_std::borrow::Borrow; -use codec::{FullCodec, FullEncode, Decode, Encode, EncodeLike}; -use crate::{storage::{self, unhashed, KeyPrefixIterator, StorageAppend, PrefixIterator}, Never}; -use crate::hash::{StorageHasher, Twox128, ReversibleStorageHasher}; +use crate::{ + hash::{ReversibleStorageHasher, StorageHasher, Twox128}, + storage::{self, unhashed, KeyPrefixIterator, PrefixIterator, StorageAppend}, + Never, +}; +use codec::{Decode, Encode, EncodeLike, FullCodec, FullEncode}; +use sp_std::{borrow::Borrow, prelude::*}; /// Generator for `StorageDoubleMap` used by `decl_storage`. /// @@ -63,9 +65,8 @@ pub trait StorageDoubleMap { let module_prefix_hashed = Twox128::hash(Self::module_prefix()); let storage_prefix_hashed = Twox128::hash(Self::storage_prefix()); - let mut result = Vec::with_capacity( - module_prefix_hashed.len() + storage_prefix_hashed.len() - ); + let mut result = + Vec::with_capacity(module_prefix_hashed.len() + storage_prefix_hashed.len()); result.extend_from_slice(&module_prefix_hashed[..]); result.extend_from_slice(&storage_prefix_hashed[..]); @@ -80,7 +81,8 @@ pub trait StorageDoubleMap { fn from_query_to_optional_value(v: Self::Query) -> Option; /// Generate the first part of the key used in top storage. - fn storage_double_map_final_key1(k1: KArg1) -> Vec where + fn storage_double_map_final_key1(k1: KArg1) -> Vec + where KArg1: EncodeLike, { let module_prefix_hashed = Twox128::hash(Self::module_prefix()); @@ -88,7 +90,7 @@ pub trait StorageDoubleMap { let key_hashed = k1.borrow().using_encoded(Self::Hasher1::hash); let mut final_key = Vec::with_capacity( - module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len() + module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len(), ); final_key.extend_from_slice(&module_prefix_hashed[..]); @@ -99,7 +101,8 @@ pub trait StorageDoubleMap { } /// Generate the full key used in top storage. - fn storage_double_map_final_key(k1: KArg1, k2: KArg2) -> Vec where + fn storage_double_map_final_key(k1: KArg1, k2: KArg2) -> Vec + where KArg1: EncodeLike, KArg2: EncodeLike, { @@ -109,10 +112,10 @@ pub trait StorageDoubleMap { let key2_hashed = k2.borrow().using_encoded(Self::Hasher2::hash); let mut final_key = Vec::with_capacity( - module_prefix_hashed.len() - + storage_prefix_hashed.len() - + key1_hashed.as_ref().len() - + key2_hashed.as_ref().len() + module_prefix_hashed.len() + + storage_prefix_hashed.len() + + key1_hashed.as_ref().len() + + key2_hashed.as_ref().len(), ); final_key.extend_from_slice(&module_prefix_hashed[..]); @@ -124,7 +127,8 @@ pub trait StorageDoubleMap { } } -impl storage::StorageDoubleMap for G where +impl storage::StorageDoubleMap for G +where K1: FullEncode, K2: FullEncode, V: FullCodec, @@ -132,21 +136,24 @@ impl storage::StorageDoubleMap for G where { type Query = G::Query; - fn hashed_key_for(k1: KArg1, k2: KArg2) -> Vec where + fn hashed_key_for(k1: KArg1, k2: KArg2) -> Vec + where KArg1: EncodeLike, KArg2: EncodeLike, { Self::storage_double_map_final_key(k1, k2) } - fn contains_key(k1: KArg1, k2: KArg2) -> bool where + fn contains_key(k1: KArg1, k2: KArg2) -> bool + where KArg1: EncodeLike, KArg2: EncodeLike, { unhashed::exists(&Self::storage_double_map_final_key(k1, k2)) } - fn get(k1: KArg1, k2: KArg2) -> Self::Query where + fn get(k1: KArg1, k2: KArg2) -> Self::Query + where KArg1: EncodeLike, KArg2: EncodeLike, { @@ -156,11 +163,13 @@ impl storage::StorageDoubleMap for G where fn try_get(k1: KArg1, k2: KArg2) -> Result where KArg1: EncodeLike, - KArg2: EncodeLike { + KArg2: EncodeLike, + { unhashed::get(&Self::storage_double_map_final_key(k1, k2)).ok_or(()) } - fn take(k1: KArg1, k2: KArg2) -> Self::Query where + fn take(k1: KArg1, k2: KArg2) -> Self::Query + where KArg1: EncodeLike, KArg2: EncodeLike, { @@ -170,16 +179,12 @@ impl storage::StorageDoubleMap for G where G::from_optional_value_to_query(value) } - fn swap( - x_k1: XKArg1, - x_k2: XKArg2, - y_k1: YKArg1, - y_k2: YKArg2 - ) where + fn swap(x_k1: XKArg1, x_k2: XKArg2, y_k1: YKArg1, y_k2: YKArg2) + where XKArg1: EncodeLike, XKArg2: EncodeLike, YKArg1: EncodeLike, - YKArg2: EncodeLike + YKArg2: EncodeLike, { let final_x_key = Self::storage_double_map_final_key(x_k1, x_k2); let final_y_key = Self::storage_double_map_final_key(y_k1, y_k2); @@ -197,7 +202,8 @@ impl storage::StorageDoubleMap for G where } } - fn insert(k1: KArg1, k2: KArg2, val: VArg) where + fn insert(k1: KArg1, k2: KArg2, val: VArg) + where KArg1: EncodeLike, KArg2: EncodeLike, VArg: EncodeLike, @@ -205,7 +211,8 @@ impl storage::StorageDoubleMap for G where unhashed::put(&Self::storage_double_map_final_key(k1, k2), &val.borrow()) } - fn remove(k1: KArg1, k2: KArg2) where + fn remove(k1: KArg1, k2: KArg2) + where KArg1: EncodeLike, KArg2: EncodeLike, { @@ -213,12 +220,15 @@ impl storage::StorageDoubleMap for G where } fn remove_prefix(k1: KArg1, limit: Option) -> sp_io::KillStorageResult - where KArg1: EncodeLike { + where + KArg1: EncodeLike, + { unhashed::kill_prefix(Self::storage_double_map_final_key1(k1).as_ref(), limit) } - fn iter_prefix_values(k1: KArg1) -> storage::PrefixIterator where - KArg1: ?Sized + EncodeLike + fn iter_prefix_values(k1: KArg1) -> storage::PrefixIterator + where + KArg1: ?Sized + EncodeLike, { let prefix = Self::storage_double_map_final_key1(k1); storage::PrefixIterator { @@ -229,12 +239,14 @@ impl storage::StorageDoubleMap for G where } } - fn mutate(k1: KArg1, k2: KArg2, f: F) -> R where + fn mutate(k1: KArg1, k2: KArg2, f: F) -> R + where KArg1: EncodeLike, KArg2: EncodeLike, F: FnOnce(&mut Self::Query) -> R, { - Self::try_mutate(k1, k2, |v| Ok::(f(v))).expect("`Never` can not be constructed; qed") + Self::try_mutate(k1, k2, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") } fn mutate_exists(k1: KArg1, k2: KArg2, f: F) -> R @@ -243,10 +255,12 @@ impl storage::StorageDoubleMap for G where KArg2: EncodeLike, F: FnOnce(&mut Option) -> R, { - Self::try_mutate_exists(k1, k2, |v| Ok::(f(v))).expect("`Never` can not be constructed; qed") + Self::try_mutate_exists(k1, k2, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") } - fn try_mutate(k1: KArg1, k2: KArg2, f: F) -> Result where + fn try_mutate(k1: KArg1, k2: KArg2, f: F) -> Result + where KArg1: EncodeLike, KArg2: EncodeLike, F: FnOnce(&mut Self::Query) -> Result, @@ -283,11 +297,8 @@ impl storage::StorageDoubleMap for G where ret } - fn append( - k1: KArg1, - k2: KArg2, - item: EncodeLikeItem, - ) where + fn append(k1: KArg1, k2: KArg2, item: EncodeLikeItem) + where KArg1: EncodeLike, KArg2: EncodeLike, Item: Encode, @@ -303,7 +314,10 @@ impl storage::StorageDoubleMap for G where OldHasher2: StorageHasher, KeyArg1: EncodeLike, KeyArg2: EncodeLike, - >(key1: KeyArg1, key2: KeyArg2) -> Option { + >( + key1: KeyArg1, + key2: KeyArg2, + ) -> Option { let old_key = { let module_prefix_hashed = Twox128::hash(Self::module_prefix()); let storage_prefix_hashed = Twox128::hash(Self::storage_prefix()); @@ -311,10 +325,10 @@ impl storage::StorageDoubleMap for G where let key2_hashed = key2.borrow().using_encoded(OldHasher2::hash); let mut final_key = Vec::with_capacity( - module_prefix_hashed.len() - + storage_prefix_hashed.len() - + key1_hashed.as_ref().len() - + key2_hashed.as_ref().len() + module_prefix_hashed.len() + + storage_prefix_hashed.len() + + key1_hashed.as_ref().len() + + key2_hashed.as_ref().len(), ); final_key.extend_from_slice(&module_prefix_hashed[..]); @@ -331,14 +345,11 @@ impl storage::StorageDoubleMap for G where } } -impl< - K1: FullCodec, - K2: FullCodec, - V: FullCodec, - G: StorageDoubleMap, -> storage::IterableStorageDoubleMap for G where +impl> + storage::IterableStorageDoubleMap for G +where G::Hasher1: ReversibleStorageHasher, - G::Hasher2: ReversibleStorageHasher + G::Hasher2: ReversibleStorageHasher, { type PartialKeyIterator = KeyPrefixIterator; type PrefixIterator = PrefixIterator<(K2, V)>; @@ -367,7 +378,7 @@ impl< closure: |raw_key_without_prefix| { let mut key_material = G::Hasher2::reverse(raw_key_without_prefix); K2::decode(&mut key_material) - } + }, } } @@ -405,7 +416,7 @@ impl< let mut k2_material = G::Hasher2::reverse(k1_k2_material); let k2 = K2::decode(&mut k2_material)?; Ok((k1, k2)) - } + }, } } @@ -418,8 +429,8 @@ impl< fn translate Option>(mut f: F) { let prefix = G::prefix_hash(); let mut previous_key = prefix.clone(); - while let Some(next) = sp_io::storage::next_key(&previous_key) - .filter(|n| n.starts_with(&prefix)) + while let Some(next) = + sp_io::storage::next_key(&previous_key).filter(|n| n.starts_with(&prefix)) { previous_key = next; let value = match unhashed::get::(&previous_key) { @@ -458,11 +469,11 @@ impl< /// Test iterators for StorageDoubleMap #[cfg(test)] mod test_iterators { - use codec::{Encode, Decode}; use crate::{ hash::StorageHasher, - storage::{generator::StorageDoubleMap, IterableStorageDoubleMap, unhashed}, + storage::{generator::StorageDoubleMap, unhashed, IterableStorageDoubleMap}, }; + use codec::{Decode, Encode}; pub trait Config: 'static { type Origin; @@ -521,10 +532,7 @@ mod test_iterators { vec![(3, 3), (0, 0), (2, 2), (1, 1)], ); - assert_eq!( - DoubleMap::iter_values().collect::>(), - vec![3, 0, 2, 1], - ); + assert_eq!(DoubleMap::iter_values().collect::>(), vec![3, 0, 2, 1],); assert_eq!( DoubleMap::drain().collect::>(), @@ -551,15 +559,9 @@ mod test_iterators { vec![(1, 1), (2, 2), (0, 0), (3, 3)], ); - assert_eq!( - DoubleMap::iter_key_prefix(k1).collect::>(), - vec![1, 2, 0, 3], - ); + assert_eq!(DoubleMap::iter_key_prefix(k1).collect::>(), vec![1, 2, 0, 3],); - assert_eq!( - DoubleMap::iter_prefix_values(k1).collect::>(), - vec![1, 2, 0, 3], - ); + assert_eq!(DoubleMap::iter_prefix_values(k1).collect::>(), vec![1, 2, 0, 3],); assert_eq!( DoubleMap::drain_prefix(k1).collect::>(), @@ -580,15 +582,12 @@ mod test_iterators { } // Wrong key1 - unhashed::put( - &[prefix.clone(), vec![1, 2, 3]].concat(), - &3u64.encode() - ); + unhashed::put(&[prefix.clone(), vec![1, 2, 3]].concat(), &3u64.encode()); // Wrong key2 unhashed::put( &[prefix.clone(), crate::Blake2_128Concat::hash(&1u16.encode())].concat(), - &3u64.encode() + &3u64.encode(), ); // Wrong value @@ -597,11 +596,12 @@ mod test_iterators { prefix.clone(), crate::Blake2_128Concat::hash(&1u16.encode()), crate::Twox64Concat::hash(&2u32.encode()), - ].concat(), + ] + .concat(), &vec![1], ); - DoubleMap::translate(|_k1, _k2, v: u64| Some(v*2)); + DoubleMap::translate(|_k1, _k2, v: u64| Some(v * 2)); assert_eq!( DoubleMap::iter().collect::>(), vec![(3, 3, 6), (0, 0, 0), (2, 2, 4), (1, 1, 2)], diff --git a/frame/support/src/storage/generator/map.rs b/frame/support/src/storage/generator/map.rs index e58a001c679fd..48593dba17bd2 100644 --- a/frame/support/src/storage/generator/map.rs +++ b/frame/support/src/storage/generator/map.rs @@ -15,14 +15,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -#[cfg(not(feature = "std"))] -use sp_std::prelude::*; -use sp_std::borrow::Borrow; -use codec::{FullCodec, FullEncode, Decode, Encode, EncodeLike}; use crate::{ - storage::{self, unhashed, KeyPrefixIterator, StorageAppend, PrefixIterator}, - Never, hash::{StorageHasher, Twox128, ReversibleStorageHasher}, + hash::{ReversibleStorageHasher, StorageHasher, Twox128}, + storage::{self, unhashed, KeyPrefixIterator, PrefixIterator, StorageAppend}, + Never, }; +use codec::{Decode, Encode, EncodeLike, FullCodec, FullEncode}; +use sp_std::borrow::Borrow; +#[cfg(not(feature = "std"))] +use sp_std::prelude::*; /// Generator for `StorageMap` used by `decl_storage`. /// @@ -54,9 +55,8 @@ pub trait StorageMap { let module_prefix_hashed = Twox128::hash(Self::module_prefix()); let storage_prefix_hashed = Twox128::hash(Self::storage_prefix()); - let mut result = Vec::with_capacity( - module_prefix_hashed.len() + storage_prefix_hashed.len() - ); + let mut result = + Vec::with_capacity(module_prefix_hashed.len() + storage_prefix_hashed.len()); result.extend_from_slice(&module_prefix_hashed[..]); result.extend_from_slice(&storage_prefix_hashed[..]); @@ -71,7 +71,8 @@ pub trait StorageMap { fn from_query_to_optional_value(v: Self::Query) -> Option; /// Generate the full key used in top storage. - fn storage_map_final_key(key: KeyArg) -> Vec where + fn storage_map_final_key(key: KeyArg) -> Vec + where KeyArg: EncodeLike, { let module_prefix_hashed = Twox128::hash(Self::module_prefix()); @@ -79,7 +80,7 @@ pub trait StorageMap { let key_hashed = key.borrow().using_encoded(Self::Hasher::hash); let mut final_key = Vec::with_capacity( - module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len() + module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len(), ); final_key.extend_from_slice(&module_prefix_hashed[..]); @@ -98,11 +99,9 @@ pub struct StorageMapIterator { _phantom: ::sp_std::marker::PhantomData<(K, V, Hasher)>, } -impl< - K: Decode + Sized, - V: Decode + Sized, - Hasher: ReversibleStorageHasher -> Iterator for StorageMapIterator { +impl Iterator + for StorageMapIterator +{ type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { @@ -117,27 +116,25 @@ impl< if self.drain { unhashed::kill(&self.previous_key) } - let mut key_material = Hasher::reverse(&self.previous_key[self.prefix.len()..]); + let mut key_material = + Hasher::reverse(&self.previous_key[self.prefix.len()..]); match K::decode(&mut key_material) { Ok(key) => Some((key, value)), Err(_) => continue, } - } + }, None => continue, } - } + }, None => None, } } } } -impl< - K: FullCodec, - V: FullCodec, - G: StorageMap, -> storage::IterableStorageMap for G where - G::Hasher: ReversibleStorageHasher +impl> storage::IterableStorageMap for G +where + G::Hasher: ReversibleStorageHasher, { type Iterator = PrefixIterator<(K, V)>; type KeyIterator = KeyPrefixIterator; @@ -166,7 +163,7 @@ impl< closure: |raw_key_without_prefix| { let mut key_material = G::Hasher::reverse(raw_key_without_prefix); K::decode(&mut key_material) - } + }, } } @@ -180,8 +177,8 @@ impl< fn translate Option>(mut f: F) { let prefix = G::prefix_hash(); let mut previous_key = prefix.clone(); - while let Some(next) = sp_io::storage::next_key(&previous_key) - .filter(|n| n.starts_with(&prefix)) + while let Some(next) = + sp_io::storage::next_key(&previous_key).filter(|n| n.starts_with(&prefix)) { previous_key = next; let value = match unhashed::get::(&previous_key) { @@ -254,16 +251,21 @@ impl> storage::StorageMap } fn mutate, R, F: FnOnce(&mut Self::Query) -> R>(key: KeyArg, f: F) -> R { - Self::try_mutate(key, |v| Ok::(f(v))).expect("`Never` can not be constructed; qed") + Self::try_mutate(key, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") } - fn mutate_exists, R, F: FnOnce(&mut Option) -> R>(key: KeyArg, f: F) -> R { - Self::try_mutate_exists(key, |v| Ok::(f(v))).expect("`Never` can not be constructed; qed") + fn mutate_exists, R, F: FnOnce(&mut Option) -> R>( + key: KeyArg, + f: F, + ) -> R { + Self::try_mutate_exists(key, |v| Ok::(f(v))) + .expect("`Never` can not be constructed; qed") } fn try_mutate, R, E, F: FnOnce(&mut Self::Query) -> Result>( key: KeyArg, - f: F + f: F, ) -> Result { let final_key = Self::storage_map_final_key(key); let mut val = G::from_optional_value_to_query(unhashed::get(final_key.as_ref())); @@ -280,7 +282,7 @@ impl> storage::StorageMap fn try_mutate_exists, R, E, F: FnOnce(&mut Option) -> Result>( key: KeyArg, - f: F + f: F, ) -> Result { let final_key = Self::storage_map_final_key(key); let mut val = unhashed::get(final_key.as_ref()); @@ -319,7 +321,9 @@ impl> storage::StorageMap let key_hashed = key.borrow().using_encoded(OldHasher::hash); let mut final_key = Vec::with_capacity( - module_prefix_hashed.len() + storage_prefix_hashed.len() + key_hashed.as_ref().len() + module_prefix_hashed.len() + + storage_prefix_hashed.len() + + key_hashed.as_ref().len(), ); final_key.extend_from_slice(&module_prefix_hashed[..]); @@ -338,11 +342,11 @@ impl> storage::StorageMap /// Test iterators for StorageMap #[cfg(test)] mod test_iterators { - use codec::{Encode, Decode}; use crate::{ hash::StorageHasher, - storage::{generator::StorageMap, IterableStorageMap, unhashed}, + storage::{generator::StorageMap, unhashed, IterableStorageMap}, }; + use codec::{Decode, Encode}; pub trait Config: 'static { type Origin; @@ -421,7 +425,7 @@ mod test_iterators { &vec![1], ); - Map::translate(|_k1, v: u64| Some(v*2)); + Map::translate(|_k1, v: u64| Some(v * 2)); assert_eq!(Map::iter().collect::>(), vec![(3, 6), (0, 0), (2, 4), (1, 2)]); }) } diff --git a/frame/support/src/storage/generator/mod.rs b/frame/support/src/storage/generator/mod.rs index 578831314c1f6..86129091b7ef2 100644 --- a/frame/support/src/storage/generator/mod.rs +++ b/frame/support/src/storage/generator/mod.rs @@ -24,23 +24,25 @@ //! //! This is internal api and is subject to change. +mod double_map; mod map; mod nmap; -mod double_map; mod value; +pub use double_map::StorageDoubleMap; pub use map::StorageMap; pub use nmap::StorageNMap; -pub use double_map::StorageDoubleMap; pub use value::StorageValue; #[cfg(test)] #[allow(dead_code)] mod tests { - use sp_io::TestExternalities; + use crate::{ + assert_noop, assert_ok, + storage::{generator::StorageValue, unhashed, IterableStorageMap}, + }; use codec::Encode; - use crate::storage::{unhashed, generator::StorageValue, IterableStorageMap}; - use crate::{assert_noop, assert_ok}; + use sp_io::TestExternalities; struct Runtime; @@ -80,7 +82,7 @@ mod tests { // translate let translate_fn = |old: Option| -> Option<(u64, u64)> { - old.map(|o| (o.into(), (o*2).into())) + old.map(|o| (o.into(), (o * 2).into())) }; let res = Value::translate(translate_fn); debug_assert!(res.is_ok()); @@ -105,11 +107,16 @@ mod tests { ); // do translation. - NumberMap::translate(|k: u32, v: u64| if k % 2 == 0 { Some((k as u64) << 32 | v) } else { None }); + NumberMap::translate( + |k: u32, v: u64| if k % 2 == 0 { Some((k as u64) << 32 | v) } else { None }, + ); assert_eq!( NumberMap::iter().collect::>(), - (0..50u32).map(|x| x * 2).map(|x| (x, (x as u64) << 32 | x as u64)).collect::>(), + (0..50u32) + .map(|x| x * 2) + .map(|x| (x, (x as u64) << 32 | x as u64)) + .collect::>(), ); }) } @@ -123,20 +130,29 @@ mod tests { assert_eq!(DoubleMap::get(0, 0), 0); // `assert_noop` ensures that the state does not change - assert_noop!(Value::try_mutate(|value| -> Result<(), &'static str> { - *value = (2, 2); - Err("don't change value") - }), "don't change value"); + assert_noop!( + Value::try_mutate(|value| -> Result<(), &'static str> { + *value = (2, 2); + Err("don't change value") + }), + "don't change value" + ); - assert_noop!(NumberMap::try_mutate(0, |value| -> Result<(), &'static str> { - *value = 4; - Err("don't change value") - }), "don't change value"); + assert_noop!( + NumberMap::try_mutate(0, |value| -> Result<(), &'static str> { + *value = 4; + Err("don't change value") + }), + "don't change value" + ); - assert_noop!(DoubleMap::try_mutate(0, 0, |value| -> Result<(), &'static str> { - *value = 6; - Err("don't change value") - }), "don't change value"); + assert_noop!( + DoubleMap::try_mutate(0, 0, |value| -> Result<(), &'static str> { + *value = 6; + Err("don't change value") + }), + "don't change value" + ); // Showing this explicitly for clarity assert_eq!(Value::get(), (0, 0)); diff --git a/frame/support/src/storage/generator/nmap.rs b/frame/support/src/storage/generator/nmap.rs index 49c8c94ea7a94..54824c62048cd 100755 --- a/frame/support/src/storage/generator/nmap.rs +++ b/frame/support/src/storage/generator/nmap.rs @@ -228,7 +228,7 @@ where fn try_mutate(key: KArg, f: F) -> Result where KArg: EncodeLikeTuple + TupleToEncodedIter, - F: FnOnce(&mut Self::Query) -> Result + F: FnOnce(&mut Self::Query) -> Result, { let final_key = Self::storage_n_map_final_key::(key); let mut val = G::from_optional_value_to_query(unhashed::get(final_key.as_ref())); @@ -373,7 +373,7 @@ impl> closure: |raw_key_without_prefix| { let (final_key, _) = K::decode_final_key(raw_key_without_prefix)?; Ok(final_key) - } + }, } } @@ -394,16 +394,16 @@ impl> Some(value) => value, None => { log::error!("Invalid translate: fail to decode old value"); - continue; - } + continue + }, }; let final_key = match K::decode_final_key(&previous_key[prefix.len()..]) { Ok((final_key, _)) => final_key, Err(_) => { log::error!("Invalid translate: fail to decode key"); - continue; - } + continue + }, }; match f(final_key, value) { @@ -452,10 +452,7 @@ mod test_iterators { fn key_after_prefix(mut prefix: Vec) -> Vec { let last = prefix.iter_mut().last().unwrap(); - assert!( - *last != 255, - "mock function not implemented for this prefix" - ); + assert!(*last != 255, "mock function not implemented for this prefix"); *last += 1; prefix } @@ -498,10 +495,7 @@ mod test_iterators { vec![((3, 3), 3), ((0, 0), 0), ((2, 2), 2), ((1, 1), 1)], ); - assert_eq!( - NMap::iter_keys().collect::>(), - vec![(3, 3), (0, 0), (2, 2), (1, 1)], - ); + assert_eq!(NMap::iter_keys().collect::>(), vec![(3, 3), (0, 0), (2, 2), (1, 1)],); assert_eq!(NMap::iter_values().collect::>(), vec![3, 0, 2, 1],); @@ -511,10 +505,7 @@ mod test_iterators { ); assert_eq!(NMap::iter().collect::>(), vec![]); - assert_eq!( - unhashed::get(&key_before_prefix(prefix.clone())), - Some(1u64) - ); + assert_eq!(unhashed::get(&key_before_prefix(prefix.clone())), Some(1u64)); assert_eq!(unhashed::get(&key_after_prefix(prefix.clone())), Some(1u64)); // Prefix iterator @@ -533,15 +524,9 @@ mod test_iterators { vec![(1, 1), (2, 2), (0, 0), (3, 3)], ); - assert_eq!( - NMap::iter_key_prefix((k1,)).collect::>(), - vec![1, 2, 0, 3], - ); + assert_eq!(NMap::iter_key_prefix((k1,)).collect::>(), vec![1, 2, 0, 3],); - assert_eq!( - NMap::iter_prefix_values((k1,)).collect::>(), - vec![1, 2, 0, 3], - ); + assert_eq!(NMap::iter_prefix_values((k1,)).collect::>(), vec![1, 2, 0, 3],); assert_eq!( NMap::drain_prefix((k1,)).collect::>(), @@ -549,10 +534,7 @@ mod test_iterators { ); assert_eq!(NMap::iter_prefix((k1,)).collect::>(), vec![]); - assert_eq!( - unhashed::get(&key_before_prefix(prefix.clone())), - Some(1u64) - ); + assert_eq!(unhashed::get(&key_before_prefix(prefix.clone())), Some(1u64)); assert_eq!(unhashed::get(&key_after_prefix(prefix.clone())), Some(1u64)); // Translate @@ -569,11 +551,7 @@ mod test_iterators { // Wrong key2 unhashed::put( - &[ - prefix.clone(), - crate::Blake2_128Concat::hash(&1u16.encode()), - ] - .concat(), + &[prefix.clone(), crate::Blake2_128Concat::hash(&1u16.encode())].concat(), &3u64.encode(), ); diff --git a/frame/support/src/storage/generator/value.rs b/frame/support/src/storage/generator/value.rs index e07c952320aa5..c765e059ec149 100644 --- a/frame/support/src/storage/generator/value.rs +++ b/frame/support/src/storage/generator/value.rs @@ -15,12 +15,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{FullCodec, Encode, EncodeLike, Decode}; use crate::{ - Never, + hash::{StorageHasher, Twox128}, storage::{self, unhashed, StorageAppend}, - hash::{Twox128, StorageHasher}, + Never, }; +use codec::{Decode, Encode, EncodeLike, FullCodec}; /// Generator for `StorageValue` used by `decl_storage`. /// @@ -78,7 +78,8 @@ impl> storage::StorageValue for G { // attempt to get the length directly. let maybe_old = unhashed::get_raw(&key) - .map(|old_data| O::decode(&mut &old_data[..]).map_err(|_| ())).transpose()?; + .map(|old_data| O::decode(&mut &old_data[..]).map_err(|_| ())) + .transpose()?; let maybe_new = f(maybe_old); if let Some(new) = maybe_new.as_ref() { new.using_encoded(|d| unhashed::put_raw(&key, d)); diff --git a/frame/support/src/storage/hashed.rs b/frame/support/src/storage/hashed.rs index a0c9ab6708e7f..241caff809b3d 100644 --- a/frame/support/src/storage/hashed.rs +++ b/frame/support/src/storage/hashed.rs @@ -18,8 +18,8 @@ //! Operation on runtime storage using hashed keys. use super::unhashed; +use codec::{Decode, Encode}; use sp_std::prelude::*; -use codec::{Encode, Decode}; /// Return the value of the item in storage under `key`, or `None` if there is no explicit entry. pub fn get(hash: &HashFn, key: &[u8]) -> Option diff --git a/frame/support/src/storage/migration.rs b/frame/support/src/storage/migration.rs index 62db2eff839fb..701b2627f31c4 100644 --- a/frame/support/src/storage/migration.rs +++ b/frame/support/src/storage/migration.rs @@ -17,10 +17,9 @@ //! Some utilities for helping access storage with arbitrary key types. +use crate::{hash::ReversibleStorageHasher, storage::unhashed, StorageHasher, Twox128}; +use codec::{Decode, Encode}; use sp_std::prelude::*; -use codec::{Encode, Decode}; -use crate::{StorageHasher, Twox128, storage::unhashed}; -use crate::hash::ReversibleStorageHasher; use super::PrefixIterator; @@ -34,14 +33,18 @@ pub struct StorageIterator { impl StorageIterator { /// Construct iterator to iterate over map items in `module` for the map called `item`. - #[deprecated(note="Please use the storage_iter or storage_iter_with_suffix functions instead")] + #[deprecated( + note = "Please use the storage_iter or storage_iter_with_suffix functions instead" + )] pub fn new(module: &[u8], item: &[u8]) -> Self { #[allow(deprecated)] Self::with_suffix(module, item, &[][..]) } /// Construct iterator to iterate over map items in `module` for the map called `item`. - #[deprecated(note="Please use the storage_iter or storage_iter_with_suffix functions instead")] + #[deprecated( + note = "Please use the storage_iter or storage_iter_with_suffix functions instead" + )] pub fn with_suffix(module: &[u8], item: &[u8], suffix: &[u8]) -> Self { let mut prefix = Vec::new(); prefix.extend_from_slice(&Twox128::hash(module)); @@ -75,10 +78,10 @@ impl Iterator for StorageIterator { frame_support::storage::unhashed::kill(&next); } Some((self.previous_key[self.prefix.len()..].to_vec(), value)) - } + }, None => continue, } - } + }, None => None, } } @@ -95,14 +98,18 @@ pub struct StorageKeyIterator { impl StorageKeyIterator { /// Construct iterator to iterate over map items in `module` for the map called `item`. - #[deprecated(note="Please use the storage_key_iter or storage_key_iter_with_suffix functions instead")] + #[deprecated( + note = "Please use the storage_key_iter or storage_key_iter_with_suffix functions instead" + )] pub fn new(module: &[u8], item: &[u8]) -> Self { #[allow(deprecated)] Self::with_suffix(module, item, &[][..]) } /// Construct iterator to iterate over map items in `module` for the map called `item`. - #[deprecated(note="Please use the storage_key_iter or storage_key_iter_with_suffix functions instead")] + #[deprecated( + note = "Please use the storage_key_iter or storage_key_iter_with_suffix functions instead" + )] pub fn with_suffix(module: &[u8], item: &[u8], suffix: &[u8]) -> Self { let mut prefix = Vec::new(); prefix.extend_from_slice(&Twox128::hash(module)); @@ -141,13 +148,13 @@ impl Iterator frame_support::storage::unhashed::kill(&next); } Some((key, value)) - } + }, None => continue, } - } + }, Err(_) => continue, } - } + }, None => None, } } @@ -187,7 +194,11 @@ pub fn storage_key_iter( +pub fn storage_key_iter_with_suffix< + K: Decode + Sized, + T: Decode + Sized, + H: ReversibleStorageHasher, +>( module: &[u8], item: &[u8], suffix: &[u8], @@ -279,7 +290,7 @@ pub fn take_storage_item pub fn move_storage_from_pallet( storage_name: &[u8], old_pallet_name: &[u8], - new_pallet_name: &[u8] + new_pallet_name: &[u8], ) { let mut new_prefix = Vec::new(); new_prefix.extend_from_slice(&Twox128::hash(new_pallet_name)); @@ -347,18 +358,14 @@ pub fn move_prefix(from_prefix: &[u8], to_prefix: &[u8]) { #[cfg(test)] mod tests { + use super::{ + move_pallet, move_prefix, move_storage_from_pallet, storage_iter, storage_key_iter, + }; use crate::{ - pallet_prelude::{StorageValue, StorageMap, Twox64Concat, Twox128}, hash::StorageHasher, + pallet_prelude::{StorageMap, StorageValue, Twox128, Twox64Concat}, }; use sp_io::TestExternalities; - use super::{ - move_prefix, - move_pallet, - move_storage_from_pallet, - storage_iter, - storage_key_iter, - }; struct OldPalletStorageValuePrefix; impl frame_support::traits::StorageInstance for OldPalletStorageValuePrefix { @@ -459,21 +466,22 @@ mod tests { OldStorageMap::insert(3, 4); assert_eq!( - storage_key_iter::(b"my_old_pallet", b"foo_map").collect::>(), + storage_key_iter::(b"my_old_pallet", b"foo_map") + .collect::>(), vec![(1, 2), (3, 4)], ); assert_eq!( - storage_iter(b"my_old_pallet", b"foo_map").drain().map(|t| t.1).collect::>(), + storage_iter(b"my_old_pallet", b"foo_map") + .drain() + .map(|t| t.1) + .collect::>(), vec![2, 4], ); assert_eq!(OldStorageMap::iter().collect::>(), vec![]); // Empty because storage iterator skips over the entry under the first key - assert_eq!( - storage_iter::(b"my_old_pallet", b"foo_value").drain().next(), - None - ); + assert_eq!(storage_iter::(b"my_old_pallet", b"foo_value").drain().next(), None); assert_eq!(OldStorageValue::get(), Some(3)); }); } diff --git a/frame/support/src/storage/mod.rs b/frame/support/src/storage/mod.rs index 34fa804261d9e..f7e3f578de706 100644 --- a/frame/support/src/storage/mod.rs +++ b/frame/support/src/storage/mod.rs @@ -17,32 +17,32 @@ //! Stuff to do with the runtime's storage. -use sp_core::storage::ChildInfo; -use sp_std::prelude::*; -use codec::{FullCodec, FullEncode, Encode, EncodeLike, Decode}; +pub use self::types::StorageEntryMetadata; use crate::{ - hash::{Twox128, StorageHasher, ReversibleStorageHasher}, + hash::{ReversibleStorageHasher, StorageHasher, Twox128}, storage::types::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, KeyGenerator, ReversibleKeyGenerator, TupleToEncodedIter, }, }; +use codec::{Decode, Encode, EncodeLike, FullCodec, FullEncode}; +use sp_core::storage::ChildInfo; use sp_runtime::generic::{Digest, DigestItem}; pub use sp_runtime::TransactionOutcome; -pub use self::types::StorageEntryMetadata; +use sp_std::prelude::*; pub use types::Key; -pub mod unhashed; -pub mod hashed; pub mod bounded_btree_map; pub mod bounded_btree_set; pub mod bounded_vec; -pub mod weak_bounded_vec; pub mod child; #[doc(hidden)] pub mod generator; +pub mod hashed; pub mod migration; pub mod types; +pub mod unhashed; +pub mod weak_bounded_vec; #[cfg(all(feature = "std", any(test, debug_assertions)))] mod debug_helper { @@ -102,9 +102,7 @@ pub fn require_transaction() { /// /// Transactions can be nested to any depth. Commits happen to the parent transaction. pub fn with_transaction(f: impl FnOnce() -> TransactionOutcome) -> R { - use sp_io::storage::{ - start_transaction, commit_transaction, rollback_transaction, - }; + use sp_io::storage::{commit_transaction, rollback_transaction, start_transaction}; use TransactionOutcome::*; start_transaction(); @@ -113,8 +111,14 @@ pub fn with_transaction(f: impl FnOnce() -> TransactionOutcome) -> R { let _guard = debug_helper::inc_transaction_level(); match f() { - Commit(res) => { commit_transaction(); res }, - Rollback(res) => { rollback_transaction(); res }, + Commit(res) => { + commit_transaction(); + res + }, + Rollback(res) => { + rollback_transaction(); + res + }, } } @@ -206,7 +210,10 @@ pub trait StorageValue { /// /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. - fn decode_len() -> Option where T: StorageDecodeLength { + fn decode_len() -> Option + where + T: StorageDecodeLength, + { T::decode_len(&Self::hashed_key()) } } @@ -253,7 +260,10 @@ pub trait StorageMap { /// Mutate the value under a key. /// /// Deletes the item if mutated to a `None`. - fn mutate_exists, R, F: FnOnce(&mut Option) -> R>(key: KeyArg, f: F) -> R; + fn mutate_exists, R, F: FnOnce(&mut Option) -> R>( + key: KeyArg, + f: F, + ) -> R; /// Mutate the item, only if an `Ok` value is returned. Deletes the item if mutated to a `None`. fn try_mutate_exists, R, E, F: FnOnce(&mut Option) -> Result>( @@ -293,7 +303,8 @@ pub trait StorageMap { /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. fn decode_len>(key: KeyArg) -> Option - where V: StorageDecodeLength, + where + V: StorageDecodeLength, { V::decode_len(&Self::hashed_key_for(key)) } @@ -338,11 +349,9 @@ pub trait IterableStorageMap: StorageMap { } /// A strongly-typed double map in storage whose secondary keys and values can be iterated over. -pub trait IterableStorageDoubleMap< - K1: FullCodec, - K2: FullCodec, - V: FullCodec ->: StorageDoubleMap { +pub trait IterableStorageDoubleMap: + StorageDoubleMap +{ /// The type that iterates over all `key2`. type PartialKeyIterator: Iterator; @@ -402,19 +411,22 @@ pub trait IterableStorageNMap: StorageN /// remove values whose prefix is `kp` to the map while doing this, you'll get undefined /// results. fn iter_prefix(kp: KP) -> PrefixIterator<(>::Suffix, V)> - where K: HasReversibleKeyPrefix; + where + K: HasReversibleKeyPrefix; /// Enumerate all suffix keys in the map with prefix key `kp` in no particular order. If you /// add or remove values whose prefix is `kp` to the map while doing this, you'll get undefined /// results. fn iter_key_prefix(kp: KP) -> KeyPrefixIterator<>::Suffix> - where K: HasReversibleKeyPrefix; + where + K: HasReversibleKeyPrefix; /// Remove all elements from the map with prefix key `kp` and iterate through them in no /// particular order. If you add elements with prefix key `kp` to the map while doing this, /// you'll get undefined results. fn drain_prefix(kp: KP) -> PrefixIterator<(>::Suffix, V)> - where K: HasReversibleKeyPrefix; + where + K: HasReversibleKeyPrefix; /// Enumerate all elements in the map in no particular order. If you add or remove values to /// the map while doing this, you'll get undefined results. @@ -500,11 +512,13 @@ pub trait StorageDoubleMap { /// Remove all values under the first key. fn remove_prefix(k1: KArg1, limit: Option) -> sp_io::KillStorageResult - where KArg1: ?Sized + EncodeLike; + where + KArg1: ?Sized + EncodeLike; /// Iterate over values that share the first key. fn iter_prefix_values(k1: KArg1) -> PrefixIterator - where KArg1: ?Sized + EncodeLike; + where + KArg1: ?Sized + EncodeLike; /// Mutate the value under the given keys. fn mutate(k1: KArg1, k2: KArg2, f: F) -> R @@ -543,11 +557,8 @@ pub trait StorageDoubleMap { /// If the storage item is not encoded properly, the storage will be overwritten /// and set to `[item]`. Any default value set for the storage item will be ignored /// on overwrite. - fn append( - k1: KArg1, - k2: KArg2, - item: EncodeLikeItem, - ) where + fn append(k1: KArg1, k2: KArg2, item: EncodeLikeItem) + where KArg1: EncodeLike, KArg2: EncodeLike, Item: Encode, @@ -567,10 +578,10 @@ pub trait StorageDoubleMap { /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. fn decode_len(key1: KArg1, key2: KArg2) -> Option - where - KArg1: EncodeLike, - KArg2: EncodeLike, - V: StorageDecodeLength, + where + KArg1: EncodeLike, + KArg2: EncodeLike, + V: StorageDecodeLength, { V::decode_len(&Self::hashed_key_for(key1, key2)) } @@ -584,7 +595,10 @@ pub trait StorageDoubleMap { OldHasher2: StorageHasher, KeyArg1: EncodeLike, KeyArg2: EncodeLike, - >(key1: KeyArg1, key2: KeyArg2) -> Option; + >( + key1: KeyArg1, + key2: KeyArg2, + ) -> Option; } /// An implementation of a map with an arbitrary number of keys. @@ -626,10 +640,13 @@ pub trait StorageNMap { /// Remove all values under the partial prefix key. fn remove_prefix(partial_key: KP, limit: Option) -> sp_io::KillStorageResult - where K: HasKeyPrefix; + where + K: HasKeyPrefix; /// Iterate over values that share the partial prefix key. - fn iter_prefix_values(partial_key: KP) -> PrefixIterator where K: HasKeyPrefix; + fn iter_prefix_values(partial_key: KP) -> PrefixIterator + where + K: HasKeyPrefix; /// Mutate the value under a key. fn mutate(key: KArg, f: F) -> R @@ -742,7 +759,7 @@ impl Iterator for PrefixIterator { self.previous_key, ); continue - } + }, }; if self.drain { unhashed::kill(&self.previous_key) @@ -757,11 +774,11 @@ impl Iterator for PrefixIterator { e, ); continue - } + }, }; Some(item) - } + }, None => None, } } @@ -808,12 +825,12 @@ impl Iterator for KeyPrefixIterator { Ok(item) => return Some(item), Err(e) => { log::error!("key failed to decode at {:?}: {:?}", self.previous_key, e); - continue; - } + continue + }, } } - return None; + return None } } } @@ -872,7 +889,10 @@ impl ChildTriePrefixIterator<(K, T)> { /// Construct iterator to iterate over child trie items in `child_info` with the prefix `prefix`. /// /// NOTE: Iterator with [`Self::drain`] will remove any key or value who failed to decode - pub fn with_prefix_over_key(child_info: &ChildInfo, prefix: &[u8]) -> Self { + pub fn with_prefix_over_key( + child_info: &ChildInfo, + prefix: &[u8], + ) -> Self { let prefix = prefix.to_vec(); let previous_key = prefix.clone(); let closure = |raw_key_without_prefix: &[u8], raw_value: &[u8]| { @@ -889,7 +909,7 @@ impl ChildTriePrefixIterator<(K, T)> { drain: false, fetch_previous_key: true, closure, - } + } } } @@ -906,7 +926,7 @@ impl Iterator for ChildTriePrefixIterator { &self.child_info.storage_key(), &self.previous_key, ) - .filter(|n| n.starts_with(&self.prefix)) + .filter(|n| n.starts_with(&self.prefix)) }; break match maybe_next { Some(next) => { @@ -919,7 +939,7 @@ impl Iterator for ChildTriePrefixIterator { self.previous_key, ); continue - } + }, }; if self.drain { child::kill(&self.child_info, &self.previous_key) @@ -934,11 +954,11 @@ impl Iterator for ChildTriePrefixIterator { e, ); continue - } + }, }; Some(item) - } + }, None => None, } } @@ -1000,8 +1020,8 @@ pub trait StoragePrefixedMap { fn translate_values Option>(mut f: F) { let prefix = Self::final_prefix(); let mut previous_key = prefix.clone().to_vec(); - while let Some(next) = sp_io::storage::next_key(&previous_key) - .filter(|n| n.starts_with(&prefix)) + while let Some(next) = + sp_io::storage::next_key(&previous_key).filter(|n| n.starts_with(&prefix)) { previous_key = next; let maybe_value = unhashed::get::(&previous_key); @@ -1011,10 +1031,7 @@ pub trait StoragePrefixedMap { None => unhashed::kill(&previous_key), }, None => { - log::error!( - "old key failed to decode at {:?}", - previous_key, - ); + log::error!("old key failed to decode at {:?}", previous_key,); continue }, } @@ -1219,13 +1236,13 @@ where #[cfg(test)] mod test { use super::*; + use crate::{assert_ok, hash::Identity}; + use bounded_vec::BoundedVec; + use core::convert::{TryFrom, TryInto}; + use generator::StorageValue as _; use sp_core::hashing::twox_128; - use crate::{hash::Identity, assert_ok}; use sp_io::TestExternalities; - use generator::StorageValue as _; - use bounded_vec::BoundedVec; use weak_bounded_vec::WeakBoundedVec; - use core::convert::{TryFrom, TryInto}; #[test] fn prefixed_map_works() { @@ -1364,8 +1381,7 @@ mod test { #[test] fn key_prefix_iterator_works() { TestExternalities::default().execute_with(|| { - use crate::storage::generator::StorageMap; - use crate::hash::Twox64Concat; + use crate::{hash::Twox64Concat, storage::generator::StorageMap}; struct MyStorageMap; impl StorageMap for MyStorageMap { type Query = u64; @@ -1427,30 +1443,21 @@ mod test { assert_eq!( ChildTriePrefixIterator::with_prefix(&child_info_a, &[2]) .collect::, u16)>>(), - vec![ - (vec![], 8), - (vec![2, 3], 8), - ], + vec![(vec![], 8), (vec![2, 3], 8),], ); assert_eq!( ChildTriePrefixIterator::with_prefix(&child_info_a, &[2]) .drain() .collect::, u16)>>(), - vec![ - (vec![], 8), - (vec![2, 3], 8), - ], + vec![(vec![], 8), (vec![2, 3], 8),], ); // The only remaining is the ones outside prefix assert_eq!( ChildTriePrefixIterator::with_prefix(&child_info_a, &[]) .collect::, u8)>>(), - vec![ - (vec![1, 2, 3], 8), - (vec![3], 8), - ], + vec![(vec![1, 2, 3], 8), (vec![3], 8),], ); child::put(&child_info_a, &[1, 2, 3], &8u16); @@ -1462,28 +1469,21 @@ mod test { assert_eq!( ChildTriePrefixIterator::with_prefix_over_key::(&child_info_a, &[2]) .collect::>(), - vec![ - (u16::decode(&mut &[2, 3][..]).unwrap(), 8), - ], + vec![(u16::decode(&mut &[2, 3][..]).unwrap(), 8),], ); assert_eq!( ChildTriePrefixIterator::with_prefix_over_key::(&child_info_a, &[2]) .drain() .collect::>(), - vec![ - (u16::decode(&mut &[2, 3][..]).unwrap(), 8), - ], + vec![(u16::decode(&mut &[2, 3][..]).unwrap(), 8),], ); // The only remaining is the ones outside prefix assert_eq!( ChildTriePrefixIterator::with_prefix(&child_info_a, &[]) .collect::, u8)>>(), - vec![ - (vec![1, 2, 3], 8), - (vec![3], 8), - ], + vec![(vec![1, 2, 3], 8), (vec![3], 8),], ); }); } diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index b5ec8e0d4cf55..4784ef6bea001 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -18,15 +18,15 @@ //! Storage map type. Implements StorageDoubleMap, StorageIterableDoubleMap, //! StoragePrefixedDoubleMap traits and their methods directly. -use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use crate::{ metadata::{StorageEntryModifier, StorageEntryType}, storage::{ - StorageAppend, StorageTryAppend, StorageDecodeLength, StoragePrefixedMap, - types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, - traits::{GetDefault, StorageInstance, Get, StorageInfo}, + traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; +use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -53,18 +53,26 @@ pub struct StorageDoubleMap< Hasher2, Key2, Value, - QueryKind=OptionQuery, - OnEmpty=GetDefault, - MaxValues=GetDefault, + QueryKind = OptionQuery, + OnEmpty = GetDefault, + MaxValues = GetDefault, >( - core::marker::PhantomData< - (Prefix, Hasher1, Key1, Hasher2, Key2, Value, QueryKind, OnEmpty, MaxValues) - > + core::marker::PhantomData<( + Prefix, + Hasher1, + Key1, + Hasher2, + Key2, + Value, + QueryKind, + OnEmpty, + MaxValues, + )>, ); impl - crate::storage::generator::StorageDoubleMap for - StorageDoubleMap + crate::storage::generator::StorageDoubleMap + for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, @@ -94,8 +102,8 @@ where } impl - StoragePrefixedMap for - StorageDoubleMap + StoragePrefixedMap + for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, @@ -161,7 +169,8 @@ where pub fn try_get(k1: KArg1, k2: KArg2) -> Result where KArg1: EncodeLike, - KArg2: EncodeLike { + KArg2: EncodeLike, + { >::try_get(k1, k2) } @@ -175,8 +184,12 @@ where } /// Swap the values of two key-pairs. - pub fn swap(x_k1: XKArg1, x_k2: XKArg2, y_k1: YKArg1, y_k2: YKArg2) - where + pub fn swap( + x_k1: XKArg1, + x_k2: XKArg2, + y_k1: YKArg1, + y_k2: YKArg2, + ) where XKArg1: EncodeLike, XKArg2: EncodeLike, YKArg1: EncodeLike, @@ -206,13 +219,16 @@ where /// Remove all values under the first key. pub fn remove_prefix(k1: KArg1, limit: Option) -> sp_io::KillStorageResult - where KArg1: ?Sized + EncodeLike { + where + KArg1: ?Sized + EncodeLike, + { >::remove_prefix(k1, limit) } /// Iterate over values that share the first key. pub fn iter_prefix_values(k1: KArg1) -> crate::storage::PrefixIterator - where KArg1: ?Sized + EncodeLike + where + KArg1: ?Sized + EncodeLike, { >::iter_prefix_values(k1) } @@ -266,11 +282,8 @@ where /// If the storage item is not encoded properly, the storage will be overwritten /// and set to `[item]`. Any default value set for the storage item will be ignored /// on overwrite. - pub fn append( - k1: KArg1, - k2: KArg2, - item: EncodeLikeItem, - ) where + pub fn append(k1: KArg1, k2: KArg2, item: EncodeLikeItem) + where KArg1: EncodeLike, KArg2: EncodeLike, Item: Encode, @@ -310,10 +323,16 @@ where OldHasher2: crate::StorageHasher, KeyArg1: EncodeLike, KeyArg2: EncodeLike, - >(key1: KeyArg1, key2: KeyArg2) -> Option { - < - Self as crate::storage::StorageDoubleMap - >::migrate_keys::(key1, key2) + >( + key1: KeyArg1, + key2: KeyArg2, + ) -> Option { + >::migrate_keys::< + OldHasher1, + OldHasher2, + _, + _, + >(key1, key2) } /// Remove all value of the storage. @@ -360,9 +379,9 @@ where EncodeLikeItem: EncodeLike, Value: StorageTryAppend, { - < - Self as crate::storage::TryAppendDoubleMap - >::try_append(key1, key2, item) + >::try_append( + key1, key2, item, + ) } } @@ -401,7 +420,9 @@ where /// /// If you add elements with first key `k1` to the map while doing this, you'll get undefined /// results. - pub fn drain_prefix(k1: impl EncodeLike) -> crate::storage::PrefixIterator<(Key2, Value)> { + pub fn drain_prefix( + k1: impl EncodeLike, + ) -> crate::storage::PrefixIterator<(Key2, Value)> { >::drain_prefix(k1) } @@ -437,8 +458,8 @@ where } impl - StorageEntryMetadata for - StorageDoubleMap + StorageEntryMetadata + for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, @@ -469,8 +490,8 @@ where } impl - crate::traits::StorageInfoTrait for - StorageDoubleMap + crate::traits::StorageInfoTrait + for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, @@ -483,27 +504,25 @@ where MaxValues: Get>, { fn storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: Some( - Hasher1::max_len::() - .saturating_add(Hasher2::max_len::()) - .saturating_add(Value::max_encoded_len()) - .saturated_into(), - ), - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: Some( + Hasher1::max_len::() + .saturating_add(Hasher2::max_len::()) + .saturating_add(Value::max_encoded_len()) + .saturated_into(), + ), + }] } } /// It doesn't require to implement `MaxEncodedLen` and give no information for `max_size`. impl - crate::traits::PartialStorageInfoTrait for - StorageDoubleMap + crate::traits::PartialStorageInfoTrait + for StorageDoubleMap where Prefix: StorageInstance, Hasher1: crate::hash::StorageHasher, @@ -516,32 +535,32 @@ where MaxValues: Get>, { fn partial_storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: None - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: None, + }] } } #[cfg(test)] mod test { use super::*; - use assert_matches::assert_matches; - use sp_io::{TestExternalities, hashing::twox_128}; - use crate::hash::*; use crate::{ + hash::*, metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, storage::types::ValueQuery, }; + use assert_matches::assert_matches; + use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; impl StorageInstance for Prefix { - fn pallet_prefix() -> &'static str { "test" } + fn pallet_prefix() -> &'static str { + "test" + } const STORAGE_PREFIX: &'static str = "foo"; } @@ -554,11 +573,17 @@ mod test { #[test] fn test() { - type A = StorageDoubleMap< - Prefix, Blake2_128Concat, u16, Twox64Concat, u8, u32, OptionQuery - >; + type A = + StorageDoubleMap; type AValueQueryWithAnOnEmpty = StorageDoubleMap< - Prefix, Blake2_128Concat, u16, Twox64Concat, u8, u32, ValueQuery, ADefault + Prefix, + Blake2_128Concat, + u16, + Twox64Concat, + u8, + u32, + ValueQuery, + ADefault, >; type B = StorageDoubleMap; type C = StorageDoubleMap; @@ -600,17 +625,20 @@ mod test { A::remove(2, 20); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, 20, |v| { - *v = *v * 2; Ok(()) + *v = *v * 2; + Ok(()) }); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, 20, |v| { - *v = *v * 2; Ok(()) + *v = *v * 2; + Ok(()) }); assert_eq!(A::contains_key(2, 20), true); assert_eq!(A::get(2, 20), Some(97 * 4)); A::remove(2, 20); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, 20, |v| { - *v = *v * 2; Err(()) + *v = *v * 2; + Err(()) }); assert_eq!(A::contains_key(2, 20), false); @@ -649,7 +677,6 @@ mod test { assert_eq!(A::contains_key(2, 20), true); assert_eq!(A::get(2, 20), Some(100)); - A::insert(2, 20, 10); assert_eq!(A::take(2, 20), Some(10)); assert_eq!(A::contains_key(2, 20), false); @@ -674,7 +701,7 @@ mod test { C::insert(3, 30, 10); C::insert(4, 40, 10); - A::translate_values::(|v| Some((v * 2).into())); + A::translate_values::(|v| Some((v * 2).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40, 20), (3, 30, 20)]); A::insert(3, 30, 10); @@ -685,21 +712,27 @@ mod test { C::insert(3, 30, 10); C::insert(4, 40, 10); - A::translate::(|k1, k2, v| Some((k1 * k2 as u16 * v as u16).into())); + A::translate::(|k1, k2, v| Some((k1 * k2 as u16 * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40, 1600), (3, 30, 900)]); assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_matches!(A::ty(), StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - .. - }); - assert_matches!(AValueQueryWithAnOnEmpty::ty(), StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - .. - }); + assert_matches!( + A::ty(), + StorageEntryType::DoubleMap { + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + .. + } + ); + assert_matches!( + AValueQueryWithAnOnEmpty::ty(), + StorageEntryType::DoubleMap { + hasher: StorageHasher::Blake2_128Concat, + key2_hasher: StorageHasher::Twox64Concat, + .. + } + ); assert_eq!(A::NAME, "foo"); assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); diff --git a/frame/support/src/storage/types/key.rs b/frame/support/src/storage/types/key.rs index 39fe6bb42194d..0161c8198e1c6 100755 --- a/frame/support/src/storage/types/key.rs +++ b/frame/support/src/storage/types/key.rs @@ -76,29 +76,22 @@ impl KeyGenerator for Key const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[H::METADATA]; fn final_key + TupleToEncodedIter>(key: KArg) -> Vec { - H::hash( - &key.to_encoded_iter() - .next() - .expect("should have at least one element!"), - ) - .as_ref() - .to_vec() + H::hash(&key.to_encoded_iter().next().expect("should have at least one element!")) + .as_ref() + .to_vec() } fn migrate_key + TupleToEncodedIter>( key: &KArg, hash_fns: Self::HArg, ) -> Vec { - (hash_fns.0)( - &key.to_encoded_iter() - .next() - .expect("should have at least one element!"), - ) + (hash_fns.0)(&key.to_encoded_iter().next().expect("should have at least one element!")) } } impl KeyGeneratorMaxEncodedLen -for Key { + for Key +{ fn key_max_encoded_len() -> usize { H::max_len::() } @@ -120,9 +113,8 @@ impl KeyGenerator for Tuple { for_tuples!( type HArg = ( #(Tuple::HashFn),* ); ); type HashFn = Box Vec>; - const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = &[ - for_tuples!( #(Tuple::Hasher::METADATA),* ) - ]; + const HASHER_METADATA: &'static [crate::metadata::StorageHasher] = + &[for_tuples!( #(Tuple::Hasher::METADATA),* )]; fn final_key + TupleToEncodedIter>(key: KArg) -> Vec { let mut final_key = Vec::new(); @@ -212,9 +204,7 @@ pub trait TupleToEncodedIter { #[tuple_types_custom_trait_bound(Encode)] impl TupleToEncodedIter for Tuple { fn to_encoded_iter(&self) -> sp_std::vec::IntoIter> { - [for_tuples!( #(self.Tuple.encode()),* )] - .to_vec() - .into_iter() + [for_tuples!( #(self.Tuple.encode()),* )].to_vec().into_iter() } } @@ -230,7 +220,9 @@ pub trait ReversibleKeyGenerator: KeyGenerator { fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error>; } -impl ReversibleKeyGenerator for Key { +impl ReversibleKeyGenerator + for Key +{ type ReversibleHasher = H; fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error> { @@ -248,7 +240,7 @@ impl ReversibleKeyGenerator for Tuple { fn decode_final_key(key_material: &[u8]) -> Result<(Self::Key, &[u8]), codec::Error> { let mut current_key_material = key_material; Ok(( - (for_tuples!{ + (for_tuples! { #({ let (key, material) = Tuple::decode_final_key(current_key_material)?; current_key_material = material; diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index e15cc1b1550ae..c97a2d80ffb6b 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -18,15 +18,15 @@ //! Storage map type. Implements StorageMap, StorageIterableMap, StoragePrefixedMap traits and their //! methods directly. -use codec::{FullCodec, Decode, EncodeLike, Encode, MaxEncodedLen}; use crate::{ metadata::{StorageEntryModifier, StorageEntryType}, storage::{ - StorageAppend, StorageTryAppend, StorageDecodeLength, StoragePrefixedMap, - types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + StorageAppend, StorageDecodeLength, StoragePrefixedMap, StorageTryAppend, }, - traits::{GetDefault, StorageInstance, Get, StorageInfo}, + traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; +use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -44,10 +44,14 @@ use sp_std::prelude::*; /// If the keys are not trusted (e.g. can be set by a user), a cryptographic `hasher` such as /// `blake2_128_concat` must be used. Otherwise, other values in storage can be compromised. pub struct StorageMap< - Prefix, Hasher, Key, Value, QueryKind=OptionQuery, OnEmpty=GetDefault, MaxValues=GetDefault, ->( - core::marker::PhantomData<(Prefix, Hasher, Key, Value, QueryKind, OnEmpty, MaxValues)> -); + Prefix, + Hasher, + Key, + Value, + QueryKind = OptionQuery, + OnEmpty = GetDefault, + MaxValues = GetDefault, +>(core::marker::PhantomData<(Prefix, Hasher, Key, Value, QueryKind, OnEmpty, MaxValues)>); impl crate::storage::generator::StorageMap @@ -77,9 +81,8 @@ where } } -impl - StoragePrefixedMap for - StorageMap +impl StoragePrefixedMap + for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, @@ -148,7 +151,7 @@ where /// Mutate the value under a key. pub fn mutate, R, F: FnOnce(&mut QueryKind::Query) -> R>( key: KeyArg, - f: F + f: F, ) -> R { >::mutate(key, f) } @@ -165,7 +168,7 @@ where /// Mutate the value under a key. Deletes the item if mutated to a `None`. pub fn mutate_exists, R, F: FnOnce(&mut Option) -> R>( key: KeyArg, - f: F + f: F, ) -> R { >::mutate_exists(key, f) } @@ -198,7 +201,7 @@ where EncodeLikeKey: EncodeLike, Item: Encode, EncodeLikeItem: EncodeLike, - Value: StorageAppend + Value: StorageAppend, { >::append(key, item) } @@ -216,7 +219,8 @@ where /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. pub fn decode_len>(key: KeyArg) -> Option - where Value: StorageDecodeLength, + where + Value: StorageDecodeLength, { >::decode_len(key) } @@ -225,7 +229,7 @@ where /// /// If the key doesn't exist, then it's a no-op. If it does, then it returns its value. pub fn migrate_key>( - key: KeyArg + key: KeyArg, ) -> Option { >::migrate_key::(key) } @@ -263,19 +267,14 @@ where /// Try and append the given item to the value in the storage. /// /// Is only available if `Value` of the storage implements [`StorageTryAppend`]. - pub fn try_append( - key: KArg, - item: EncodeLikeItem, - ) -> Result<(), ()> + pub fn try_append(key: KArg, item: EncodeLikeItem) -> Result<(), ()> where KArg: EncodeLike + Clone, Item: Encode, EncodeLikeItem: EncodeLike, Value: StorageTryAppend, { - < - Self as crate::storage::TryAppendMap - >::try_append(key, item) + >::try_append(key, item) } } @@ -322,7 +321,8 @@ where } impl StorageEntryMetadata - for StorageMap where + for StorageMap +where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, Key: FullCodec + scale_info::StaticTypeInfo, @@ -347,9 +347,8 @@ impl StorageEntryMeta } } -impl - crate::traits::StorageInfoTrait for - StorageMap +impl crate::traits::StorageInfoTrait + for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, @@ -360,26 +359,24 @@ where MaxValues: Get>, { fn storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: Some( - Hasher::max_len::() - .saturating_add(Value::max_encoded_len()) - .saturated_into(), - ), - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: Some( + Hasher::max_len::() + .saturating_add(Value::max_encoded_len()) + .saturated_into(), + ), + }] } } /// It doesn't require to implement `MaxEncodedLen` and give no information for `max_size`. impl - crate::traits::PartialStorageInfoTrait for - StorageMap + crate::traits::PartialStorageInfoTrait + for StorageMap where Prefix: StorageInstance, Hasher: crate::hash::StorageHasher, @@ -390,32 +387,32 @@ where MaxValues: Get>, { fn partial_storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: None, - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: None, + }] } } #[cfg(test)] mod test { use super::*; - use assert_matches::assert_matches; - use sp_io::{TestExternalities, hashing::twox_128}; - use crate::hash::*; use crate::{ - metadata::{StorageEntryModifier, StorageHasher, StorageEntryType}, - storage::types::ValueQuery + hash::*, + metadata::{StorageEntryModifier, StorageEntryType, StorageHasher}, + storage::types::ValueQuery, }; + use assert_matches::assert_matches; + use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; impl StorageInstance for Prefix { - fn pallet_prefix() -> &'static str { "test" } + fn pallet_prefix() -> &'static str { + "test" + } const STORAGE_PREFIX: &'static str = "foo"; } @@ -429,9 +426,8 @@ mod test { #[test] fn test() { type A = StorageMap; - type AValueQueryWithAnOnEmpty = StorageMap< - Prefix, Blake2_128Concat, u16, u32, ValueQuery, ADefault - >; + type AValueQueryWithAnOnEmpty = + StorageMap; type B = StorageMap; type C = StorageMap; type WithLen = StorageMap>; @@ -473,17 +469,20 @@ mod test { A::remove(2); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, |v| { - *v = *v * 2; Ok(()) + *v = *v * 2; + Ok(()) }); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, |v| { - *v = *v * 2; Ok(()) + *v = *v * 2; + Ok(()) }); assert_eq!(A::contains_key(2), true); assert_eq!(A::get(2), Some(97 * 4)); A::remove(2); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(2, |v| { - *v = *v * 2; Err(()) + *v = *v * 2; + Err(()) }); assert_eq!(A::contains_key(2), false); @@ -521,7 +520,6 @@ mod test { assert_eq!(A::contains_key(2), true); assert_eq!(A::get(2), Some(100)); - A::insert(2, 10); assert_eq!(A::take(2), Some(10)); assert_eq!(A::contains_key(2), false); @@ -545,7 +543,7 @@ mod test { C::insert(3, 10); C::insert(4, 10); - A::translate_values::(|v| Some((v * 2).into())); + A::translate_values::(|v| Some((v * 2).into())); assert_eq!(A::iter().collect::>(), vec![(4, 20), (3, 20)]); A::insert(3, 10); @@ -556,19 +554,19 @@ mod test { C::insert(3, 10); C::insert(4, 10); - A::translate::(|k, v| Some((k * v as u16).into())); + A::translate::(|k, v| Some((k * v as u16).into())); assert_eq!(A::iter().collect::>(), vec![(4, 40), (3, 30)]); assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_matches!(A::ty(), StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - .. - }); - assert_matches!(AValueQueryWithAnOnEmpty::ty(), StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, - .. - }); + assert_matches!( + A::ty(), + StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, .. } + ); + assert_matches!( + AValueQueryWithAnOnEmpty::ty(), + StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, .. } + ); assert_eq!(A::NAME, "foo"); assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); assert_eq!(A::default(), Option::::None.encode()); diff --git a/frame/support/src/storage/types/mod.rs b/frame/support/src/storage/types/mod.rs index 46824c26ded3e..26644ad82db92 100644 --- a/frame/support/src/storage/types/mod.rs +++ b/frame/support/src/storage/types/mod.rs @@ -18,7 +18,7 @@ //! Storage types to build abstraction on storage, they implements storage traits such as //! StorageMap and others. -use crate::metadata::{StorageEntryType, StorageEntryModifier}; +use crate::metadata::{StorageEntryModifier, StorageEntryType}; use codec::FullCodec; use sp_std::prelude::*; @@ -31,7 +31,7 @@ mod value; pub use double_map::StorageDoubleMap; pub use key::{ EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, Key, KeyGenerator, - ReversibleKeyGenerator, TupleToEncodedIter, KeyGeneratorMaxEncodedLen, + KeyGeneratorMaxEncodedLen, ReversibleKeyGenerator, TupleToEncodedIter, }; pub use map::StorageMap; pub use nmap::StorageNMap; diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 0336b0837a3b5..bf8a9ed38ae65 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -22,12 +22,12 @@ use crate::{ metadata::{StorageEntryModifier, StorageEntryType}, storage::{ types::{ - EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, - OptionQuery, QueryKindTrait, StorageEntryMetadata, TupleToEncodedIter, + EncodeLikeTuple, HasKeyPrefix, HasReversibleKeyPrefix, OptionQuery, QueryKindTrait, + StorageEntryMetadata, TupleToEncodedIter, }, KeyGenerator, PrefixIterator, StorageAppend, StorageDecodeLength, StoragePrefixedMap, }, - traits::{Get, GetDefault, StorageInstance, StorageInfo}, + traits::{Get, GetDefault, StorageInfo, StorageInstance}, }; use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use sp_runtime::SaturatedConversion; @@ -52,10 +52,13 @@ use sp_std::prelude::*; /// such as `blake2_128_concat` must be used for the key hashers. Otherwise, other values /// in storage can be compromised. pub struct StorageNMap< - Prefix, Key, Value, QueryKind = OptionQuery, OnEmpty = GetDefault, MaxValues=GetDefault, ->( - core::marker::PhantomData<(Prefix, Key, Value, QueryKind, OnEmpty, MaxValues)>, -); + Prefix, + Key, + Value, + QueryKind = OptionQuery, + OnEmpty = GetDefault, + MaxValues = GetDefault, +>(core::marker::PhantomData<(Prefix, Key, Value, QueryKind, OnEmpty, MaxValues)>); impl crate::storage::generator::StorageNMap @@ -83,8 +86,7 @@ where } } -impl - crate::storage::StoragePrefixedMap +impl crate::storage::StoragePrefixedMap for StorageNMap where Prefix: StorageInstance, @@ -113,7 +115,9 @@ where MaxValues: Get>, { /// Get the storage key used to fetch a value corresponding to a specific key. - pub fn hashed_key_for + TupleToEncodedIter>(key: KArg) -> Vec { + pub fn hashed_key_for + TupleToEncodedIter>( + key: KArg, + ) -> Vec { >::hashed_key_for(key) } @@ -123,7 +127,9 @@ where } /// Load the value associated with the given key from the map. - pub fn get + TupleToEncodedIter>(key: KArg) -> QueryKind::Query { + pub fn get + TupleToEncodedIter>( + key: KArg, + ) -> QueryKind::Query { >::get(key) } @@ -137,7 +143,9 @@ where } /// Take a value from storage, removing it afterwards. - pub fn take + TupleToEncodedIter>(key: KArg) -> QueryKind::Query { + pub fn take + TupleToEncodedIter>( + key: KArg, + ) -> QueryKind::Query { >::take(key) } @@ -248,7 +256,9 @@ where /// /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. - pub fn decode_len + TupleToEncodedIter>(key: KArg) -> Option + pub fn decode_len + TupleToEncodedIter>( + key: KArg, + ) -> Option where Value: StorageDecodeLength, { @@ -260,7 +270,7 @@ where /// If the key doesn't exist, then it's a no-op. If it does, then it returns its value. pub fn migrate_keys(key: KArg, hash_fns: Key::HArg) -> Option where - KArg: EncodeLikeTuple + TupleToEncodedIter + KArg: EncodeLikeTuple + TupleToEncodedIter, { >::migrate_keys::<_>(key, hash_fns) } @@ -402,9 +412,8 @@ where } } -impl - crate::traits::StorageInfoTrait for - StorageNMap +impl crate::traits::StorageInfoTrait + for StorageNMap where Prefix: StorageInstance, Key: super::key::KeyGenerator + super::key::KeyGeneratorMaxEncodedLen, @@ -414,26 +423,23 @@ where MaxValues: Get>, { fn storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: Some( - Key::key_max_encoded_len() - .saturating_add(Value::max_encoded_len()) - .saturated_into(), - ), - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: Some( + Key::key_max_encoded_len() + .saturating_add(Value::max_encoded_len()) + .saturated_into(), + ), + }] } } /// It doesn't require to implement `MaxEncodedLen` and give no information for `max_size`. -impl - crate::traits::PartialStorageInfoTrait for - StorageNMap +impl crate::traits::PartialStorageInfoTrait + for StorageNMap where Prefix: StorageInstance, Key: super::key::KeyGenerator, @@ -443,23 +449,23 @@ where MaxValues: Get>, { fn partial_storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::final_prefix().to_vec(), - max_values: MaxValues::get(), - max_size: None, - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::final_prefix().to_vec(), + max_values: MaxValues::get(), + max_size: None, + }] } } #[cfg(test)] mod test { use super::*; - use crate::hash::*; - use crate::storage::types::{Key, ValueQuery}; - use crate::metadata::StorageEntryModifier; + use crate::{ + hash::*, + metadata::StorageEntryModifier, + storage::types::{Key, ValueQuery}, + }; use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; @@ -625,15 +631,9 @@ mod test { assert_eq!(A::iter().collect::>(), vec![(4, 40), (3, 30)]); assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!( - AValueQueryWithAnOnEmpty::MODIFIER, - StorageEntryModifier::Default - ); + assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!( - AValueQueryWithAnOnEmpty::default(), - 98u32.encode() - ); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); @@ -785,41 +785,23 @@ mod test { C::insert((3, 30), 10); C::insert((4, 40), 10); A::translate_values::(|v| Some((v * 2).into())); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40), 20), ((3, 30), 20)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40), 20), ((3, 30), 20)]); A::insert((3, 30), 10); A::insert((4, 40), 10); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40), 10), ((3, 30), 10)] - ); - assert_eq!( - A::drain().collect::>(), - vec![((4, 40), 10), ((3, 30), 10)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40), 10), ((3, 30), 10)]); + assert_eq!(A::drain().collect::>(), vec![((4, 40), 10), ((3, 30), 10)]); assert_eq!(A::iter().collect::>(), vec![]); C::insert((3, 30), 10); C::insert((4, 40), 10); A::translate::(|(k1, k2), v| Some((k1 * k2 as u16 * v as u16).into())); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40), 1600), ((3, 30), 900)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40), 1600), ((3, 30), 900)]); assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!( - AValueQueryWithAnOnEmpty::MODIFIER, - StorageEntryModifier::Default - ); + assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!( - AValueQueryWithAnOnEmpty::default(), - 98u32.encode() - ); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); @@ -831,14 +813,8 @@ mod test { A::insert((3, 31), 12); A::insert((4, 40), 13); A::insert((4, 41), 14); - assert_eq!( - A::iter_prefix_values((3,)).collect::>(), - vec![12, 11] - ); - assert_eq!( - A::iter_prefix_values((4,)).collect::>(), - vec![13, 14] - ); + assert_eq!(A::iter_prefix_values((3,)).collect::>(), vec![12, 11]); + assert_eq!(A::iter_prefix_values((4,)).collect::>(), vec![13, 14]); }); } @@ -846,52 +822,32 @@ mod test { fn test_3_keys() { type A = StorageNMap< Prefix, - ( - Key, - Key, - Key, - ), + (Key, Key, Key), u32, OptionQuery, >; type AValueQueryWithAnOnEmpty = StorageNMap< Prefix, - ( - Key, - Key, - Key, - ), + (Key, Key, Key), u32, ValueQuery, ADefault, >; type B = StorageNMap< Prefix, - ( - Key, - Key, - Key, - ), + (Key, Key, Key), u32, ValueQuery, >; type C = StorageNMap< Prefix, - ( - Key, - Key, - Key, - ), + (Key, Key, Key), u8, ValueQuery, >; type WithLen = StorageNMap< Prefix, - ( - Key, - Key, - Key, - ), + (Key, Key, Key), Vec, >; @@ -914,11 +870,7 @@ mod test { assert_eq!(AValueQueryWithAnOnEmpty::get((1, 10, 100)), 30); A::swap::< - ( - Key, - Key, - Key, - ), + (Key, Key, Key), _, _, >((1, 10, 100), (2, 20, 200)); @@ -1018,17 +970,11 @@ mod test { C::insert((3, 30, 300), 10); C::insert((4, 40, 400), 10); A::translate_values::(|v| Some((v * 2).into())); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40, 400), 20), ((3, 30, 300), 20)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40, 400), 20), ((3, 30, 300), 20)]); A::insert((3, 30, 300), 10); A::insert((4, 40, 400), 10); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40, 400), 10), ((3, 30, 300), 10)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40, 400), 10), ((3, 30, 300), 10)]); assert_eq!( A::drain().collect::>(), vec![((4, 40, 400), 10), ((3, 30, 300), 10)] @@ -1040,21 +986,12 @@ mod test { A::translate::(|(k1, k2, k3), v| { Some((k1 * k2 as u16 * v as u16 / k3 as u16).into()) }); - assert_eq!( - A::iter().collect::>(), - vec![((4, 40, 400), 4), ((3, 30, 300), 3)] - ); + assert_eq!(A::iter().collect::>(), vec![((4, 40, 400), 4), ((3, 30, 300), 3)]); assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); - assert_eq!( - AValueQueryWithAnOnEmpty::MODIFIER, - StorageEntryModifier::Default - ); + assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); assert_eq!(A::NAME, "Foo"); - assert_eq!( - AValueQueryWithAnOnEmpty::default(), - 98u32.encode() - ); + assert_eq!(AValueQueryWithAnOnEmpty::default(), 98u32.encode()); assert_eq!(A::default(), Option::::None.encode()); WithLen::remove_all(None); @@ -1066,22 +1003,10 @@ mod test { A::insert((3, 30, 301), 12); A::insert((4, 40, 400), 13); A::insert((4, 40, 401), 14); - assert_eq!( - A::iter_prefix_values((3,)).collect::>(), - vec![11, 12] - ); - assert_eq!( - A::iter_prefix_values((4,)).collect::>(), - vec![14, 13] - ); - assert_eq!( - A::iter_prefix_values((3, 30)).collect::>(), - vec![11, 12] - ); - assert_eq!( - A::iter_prefix_values((4, 40)).collect::>(), - vec![14, 13] - ); + assert_eq!(A::iter_prefix_values((3,)).collect::>(), vec![11, 12]); + assert_eq!(A::iter_prefix_values((4,)).collect::>(), vec![14, 13]); + assert_eq!(A::iter_prefix_values((3, 30)).collect::>(), vec![11, 12]); + assert_eq!(A::iter_prefix_values((4, 40)).collect::>(), vec![14, 13]); }); } } diff --git a/frame/support/src/storage/types/value.rs b/frame/support/src/storage/types/value.rs index 36c0873ce00ab..d7f15487592b1 100644 --- a/frame/support/src/storage/types/value.rs +++ b/frame/support/src/storage/types/value.rs @@ -17,16 +17,16 @@ //! Storage value type. Implements StorageValue trait and its method directly. -use codec::{FullCodec, Decode, EncodeLike, Encode, MaxEncodedLen}; use crate::{ metadata::{StorageEntryModifier, StorageEntryType}, storage::{ - StorageAppend, StorageTryAppend, StorageDecodeLength, - types::{OptionQuery, StorageEntryMetadata, QueryKindTrait}, - generator::{StorageValue as StorageValueT}, + generator::StorageValue as StorageValueT, + types::{OptionQuery, QueryKindTrait, StorageEntryMetadata}, + StorageAppend, StorageDecodeLength, StorageTryAppend, }, - traits::{GetDefault, StorageInstance, StorageInfo}, + traits::{GetDefault, StorageInfo, StorageInstance}, }; +use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use sp_arithmetic::traits::SaturatedConversion; use sp_std::prelude::*; @@ -36,12 +36,12 @@ use sp_std::prelude::*; /// ```nocompile /// Twox128(Prefix::pallet_prefix()) ++ Twox128(Prefix::STORAGE_PREFIX) /// ``` -pub struct StorageValue( - core::marker::PhantomData<(Prefix, Value, QueryKind, OnEmpty)> +pub struct StorageValue( + core::marker::PhantomData<(Prefix, Value, QueryKind, OnEmpty)>, ); -impl crate::storage::generator::StorageValue for - StorageValue +impl crate::storage::generator::StorageValue + for StorageValue where Prefix: StorageInstance, Value: FullCodec, @@ -71,13 +71,19 @@ where OnEmpty: crate::traits::Get + 'static, { /// Get the storage key. - pub fn hashed_key() -> [u8; 32] { >::hashed_key() } + pub fn hashed_key() -> [u8; 32] { + >::hashed_key() + } /// Does the value (explicitly) exist in storage? - pub fn exists() -> bool { >::exists() } + pub fn exists() -> bool { + >::exists() + } /// Load the value from the provided storage instance. - pub fn get() -> QueryKind::Query { >::get() } + pub fn get() -> QueryKind::Query { + >::get() + } /// Try to get the underlying value from the provided storage instance; `Ok` if it exists, /// `Err` if not. @@ -120,7 +126,9 @@ where /// Store a value under this key into the provided storage instance. /// /// this uses the query type rather than the underlying value. - pub fn set(val: QueryKind::Query) { >::set(val) } + pub fn set(val: QueryKind::Query) { + >::set(val) + } /// Mutate the value pub fn mutate R>(f: F) -> R { @@ -135,10 +143,14 @@ where } /// Clear the storage value. - pub fn kill() { >::kill() } + pub fn kill() { + >::kill() + } /// Take a value from storage, removing it afterwards. - pub fn take() -> QueryKind::Query { >::take() } + pub fn take() -> QueryKind::Query { + >::take() + } /// Append the given item to the value in the storage. /// @@ -153,7 +165,7 @@ where where Item: Encode, EncodeLikeItem: EncodeLike, - Value: StorageAppend + Value: StorageAppend, { >::append(item) } @@ -169,7 +181,10 @@ where /// /// `None` does not mean that `get()` does not return a value. The default value is completly /// ignored by this function. - pub fn decode_len() -> Option where Value: StorageDecodeLength { + pub fn decode_len() -> Option + where + Value: StorageDecodeLength, + { >::decode_len() } @@ -187,7 +202,8 @@ where } impl StorageEntryMetadata - for StorageValue where + for StorageValue +where Prefix: StorageInstance, Value: FullCodec + scale_info::StaticTypeInfo, QueryKind: QueryKindTrait, @@ -205,66 +221,56 @@ impl StorageEntryMetadata } } -impl - crate::traits::StorageInfoTrait for - StorageValue +impl crate::traits::StorageInfoTrait + for StorageValue where Prefix: StorageInstance, Value: FullCodec + MaxEncodedLen, QueryKind: QueryKindTrait, - OnEmpty: crate::traits::Get + 'static + OnEmpty: crate::traits::Get + 'static, { fn storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::hashed_key().to_vec(), - max_values: Some(1), - max_size: Some( - Value::max_encoded_len() - .saturated_into(), - ), - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::hashed_key().to_vec(), + max_values: Some(1), + max_size: Some(Value::max_encoded_len().saturated_into()), + }] } } /// It doesn't require to implement `MaxEncodedLen` and give no information for `max_size`. -impl - crate::traits::PartialStorageInfoTrait for - StorageValue +impl crate::traits::PartialStorageInfoTrait + for StorageValue where Prefix: StorageInstance, Value: FullCodec, QueryKind: QueryKindTrait, - OnEmpty: crate::traits::Get + 'static + OnEmpty: crate::traits::Get + 'static, { fn partial_storage_info() -> Vec { - vec![ - StorageInfo { - pallet_name: Self::module_prefix().to_vec(), - storage_name: Self::storage_prefix().to_vec(), - prefix: Self::hashed_key().to_vec(), - max_values: Some(1), - max_size: None, - } - ] + vec![StorageInfo { + pallet_name: Self::module_prefix().to_vec(), + storage_name: Self::storage_prefix().to_vec(), + prefix: Self::hashed_key().to_vec(), + max_values: Some(1), + max_size: None, + }] } } #[cfg(test)] mod test { use super::*; - use sp_io::{TestExternalities, hashing::twox_128}; - use crate::{ - metadata::StorageEntryModifier, - storage::types::ValueQuery, - }; + use crate::{metadata::StorageEntryModifier, storage::types::ValueQuery}; + use sp_io::{hashing::twox_128, TestExternalities}; struct Prefix; impl StorageInstance for Prefix { - fn pallet_prefix() -> &'static str { "test" } + fn pallet_prefix() -> &'static str { + "test" + } const STORAGE_PREFIX: &'static str = "foo"; } @@ -310,10 +316,16 @@ mod test { assert_eq!(A::try_get(), Ok(4)); A::set(Some(4)); - let _: Result<(), ()> = A::try_mutate(|v| { *v = Some(v.unwrap() * 2); Ok(()) }); + let _: Result<(), ()> = A::try_mutate(|v| { + *v = Some(v.unwrap() * 2); + Ok(()) + }); assert_eq!(A::try_get(), Ok(8)); - let _: Result<(), ()> = A::try_mutate(|v| { *v = Some(v.unwrap() * 2); Err(()) }); + let _: Result<(), ()> = A::try_mutate(|v| { + *v = Some(v.unwrap() * 2); + Err(()) + }); assert_eq!(A::try_get(), Ok(8)); A::kill(); @@ -322,7 +334,8 @@ mod test { AValueQueryWithAnOnEmpty::kill(); let _: Result<(), ()> = AValueQueryWithAnOnEmpty::try_mutate(|v| { - *v = *v * 2; Ok(()) + *v = *v * 2; + Ok(()) }); assert_eq!(AValueQueryWithAnOnEmpty::try_get(), Ok(97 * 2)); diff --git a/frame/support/src/storage/unhashed.rs b/frame/support/src/storage/unhashed.rs index 134b3debcd31b..f700771b2d5cc 100644 --- a/frame/support/src/storage/unhashed.rs +++ b/frame/support/src/storage/unhashed.rs @@ -17,8 +17,8 @@ //! Operation on unhashed runtime storage. +use codec::{Decode, Encode}; use sp_std::prelude::*; -use codec::{Encode, Decode}; /// Return the value of the item in storage under `key`, or `None` if there is no explicit entry. pub fn get(key: &[u8]) -> Option { diff --git a/frame/support/src/storage/weak_bounded_vec.rs b/frame/support/src/storage/weak_bounded_vec.rs index b3420c637713a..e8e0f1a9d2462 100644 --- a/frame/support/src/storage/weak_bounded_vec.rs +++ b/frame/support/src/storage/weak_bounded_vec.rs @@ -18,17 +18,16 @@ //! Traits, types and structs to support putting a bounded vector into storage, as a raw value, map //! or a double map. -use sp_std::prelude::*; -use sp_std::{convert::TryFrom, fmt, marker::PhantomData}; -use codec::{Encode, Decode, MaxEncodedLen}; +use crate::{ + storage::{StorageDecodeLength, StorageTryAppend}, + traits::Get, +}; +use codec::{Decode, Encode, MaxEncodedLen}; use core::{ ops::{Deref, Index, IndexMut}, slice::SliceIndex, }; -use crate::{ - traits::Get, - storage::{StorageDecodeLength, StorageTryAppend}, -}; +use sp_std::{convert::TryFrom, fmt, marker::PhantomData, prelude::*}; /// A weakly bounded vector. /// @@ -318,9 +317,9 @@ where #[cfg(test)] pub mod test { use super::*; + use crate::Twox128; use sp_io::TestExternalities; use sp_std::convert::TryInto; - use crate::Twox128; crate::parameter_types! { pub const Seven: u32 = 7; diff --git a/frame/support/src/traits.rs b/frame/support/src/traits.rs index ec47331285ef8..fcc3305c409c1 100644 --- a/frame/support/src/traits.rs +++ b/frame/support/src/traits.rs @@ -20,62 +20,67 @@ //! NOTE: If you're looking for `parameter_types`, it has moved in to the top-level module. pub mod tokens; -pub use tokens::fungible; -pub use tokens::fungibles; -pub use tokens::currency::{ - Currency, LockIdentifier, LockableCurrency, ReservableCurrency, NamedReservableCurrency, - VestingSchedule, +pub use tokens::{ + currency::{ + Currency, LockIdentifier, LockableCurrency, NamedReservableCurrency, ReservableCurrency, + VestingSchedule, + }, + fungible, fungibles, + imbalance::{Imbalance, OnUnbalanced, SignedImbalance}, + BalanceStatus, ExistenceRequirement, WithdrawReasons, }; -pub use tokens::imbalance::{Imbalance, OnUnbalanced, SignedImbalance}; -pub use tokens::{ExistenceRequirement, WithdrawReasons, BalanceStatus}; mod members; pub use members::{ - Contains, ContainsLengthBound, SortedMembers, InitializeMembers, ChangeMembers, All, IsInVec, - AsContains, + All, AsContains, ChangeMembers, Contains, ContainsLengthBound, InitializeMembers, IsInVec, + SortedMembers, }; mod validation; pub use validation::{ - ValidatorSet, ValidatorSetWithIdentification, OneSessionHandler, FindAuthor, VerifySeal, - EstimateNextNewSession, EstimateNextSessionRotation, KeyOwnerProofSystem, ValidatorRegistration, - Lateness, + EstimateNextNewSession, EstimateNextSessionRotation, FindAuthor, KeyOwnerProofSystem, Lateness, + OneSessionHandler, ValidatorRegistration, ValidatorSet, ValidatorSetWithIdentification, + VerifySeal, }; mod filter; pub use filter::{ - Filter, FilterStack, FilterStackGuard, ClearFilterGuard, InstanceFilter, IntegrityTest, - AllowAll, DenyAll, + AllowAll, ClearFilterGuard, DenyAll, Filter, FilterStack, FilterStackGuard, InstanceFilter, + IntegrityTest, }; mod misc; pub use misc::{ - Len, Get, GetDefault, HandleLifetime, TryDrop, Time, UnixTime, IsType, IsSubType, ExecuteBlock, - SameOrOther, OnNewAccount, OnKilledAccount, OffchainWorker, GetBacking, Backing, ExtrinsicCall, - EnsureInherentsAreFirst, ConstU32, + Backing, ConstU32, EnsureInherentsAreFirst, ExecuteBlock, ExtrinsicCall, Get, GetBacking, + GetDefault, HandleLifetime, IsSubType, IsType, Len, OffchainWorker, OnKilledAccount, + OnNewAccount, SameOrOther, Time, TryDrop, UnixTime, }; mod stored_map; -pub use stored_map::{StoredMap, StorageMapShim}; +pub use stored_map::{StorageMapShim, StoredMap}; mod randomness; pub use randomness::Randomness; mod metadata; pub use metadata::{ - CallMetadata, GetCallMetadata, GetCallName, PalletInfo, PalletVersion, GetPalletVersion, - PALLET_VERSION_STORAGE_KEY_POSTFIX, PalletInfoAccess, + CallMetadata, GetCallMetadata, GetCallName, GetPalletVersion, PalletInfo, PalletInfoAccess, + PalletVersion, PALLET_VERSION_STORAGE_KEY_POSTFIX, }; mod hooks; -pub use hooks::{Hooks, OnGenesis, OnInitialize, OnFinalize, OnIdle, OnRuntimeUpgrade, OnTimestampSet}; -#[cfg(feature = "try-runtime")] -pub use hooks::{OnRuntimeUpgradeHelpersExt, ON_RUNTIME_UPGRADE_PREFIX}; #[cfg(feature = "std")] pub use hooks::GenesisBuild; +pub use hooks::{ + Hooks, OnFinalize, OnGenesis, OnIdle, OnInitialize, OnRuntimeUpgrade, OnTimestampSet, +}; +#[cfg(feature = "try-runtime")] +pub use hooks::{OnRuntimeUpgradeHelpersExt, ON_RUNTIME_UPGRADE_PREFIX}; pub mod schedule; mod storage; -pub use storage::{Instance, PartialStorageInfoTrait, StorageInstance, StorageInfo, StorageInfoTrait}; +pub use storage::{ + Instance, PartialStorageInfoTrait, StorageInfo, StorageInfoTrait, StorageInstance, +}; mod dispatch; pub use dispatch::{EnsureOrigin, OriginTrait, UnfilteredDispatchable}; diff --git a/frame/support/src/traits/filter.rs b/frame/support/src/traits/filter.rs index 4b70fa177e5ca..b9f5037abc66e 100644 --- a/frame/support/src/traits/filter.rs +++ b/frame/support/src/traits/filter.rs @@ -32,11 +32,15 @@ pub enum AllowAll {} pub enum DenyAll {} impl Filter for AllowAll { - fn filter(_: &T) -> bool { true } + fn filter(_: &T) -> bool { + true + } } impl Filter for DenyAll { - fn filter(_: &T) -> bool { false } + fn filter(_: &T) -> bool { + false + } } /// Trait to add a constraint onto the filter. @@ -101,17 +105,28 @@ pub trait InstanceFilter: Sized + Send + Sync { fn filter(&self, _: &T) -> bool; /// Determines whether `self` matches at least everything that `_o` does. - fn is_superset(&self, _o: &Self) -> bool { false } + fn is_superset(&self, _o: &Self) -> bool { + false + } } impl InstanceFilter for () { - fn filter(&self, _: &T) -> bool { true } - fn is_superset(&self, _o: &Self) -> bool { true } + fn filter(&self, _: &T) -> bool { + true + } + fn is_superset(&self, _o: &Self) -> bool { + true + } } /// Re-expected for the macro. #[doc(hidden)] -pub use sp_std::{mem::{swap, take}, cell::RefCell, vec::Vec, boxed::Box}; +pub use sp_std::{ + boxed::Box, + cell::RefCell, + mem::{swap, take}, + vec::Vec, +}; #[macro_export] macro_rules! impl_filter_stack { @@ -206,7 +221,9 @@ pub mod test_impl_filter_stack { pub struct IsCallable; pub struct BaseFilter; impl Filter for BaseFilter { - fn filter(x: &u32) -> bool { x % 2 == 0 } + fn filter(x: &u32) -> bool { + x % 2 == 0 + } } impl_filter_stack!( crate::traits::filter::test_impl_filter_stack::IsCallable, diff --git a/frame/support/src/traits/hooks.rs b/frame/support/src/traits/hooks.rs index 5f7b35a9ad25c..37b07c3113018 100644 --- a/frame/support/src/traits/hooks.rs +++ b/frame/support/src/traits/hooks.rs @@ -17,9 +17,9 @@ //! Traits for hooking tasks to events in a blockchain's lifecycle. +use impl_trait_for_tuples::impl_for_tuples; use sp_arithmetic::traits::Saturating; use sp_runtime::traits::MaybeSerializeDeserialize; -use impl_trait_for_tuples::impl_for_tuples; /// The block initialization trait. /// @@ -33,7 +33,9 @@ pub trait OnInitialize { /// NOTE: This function is called BEFORE ANY extrinsic in a block is applied, /// including inherent extrinsics. Hence for instance, if you runtime includes /// `pallet_timestamp`, the `timestamp` is not yet up to date at this point. - fn on_initialize(_n: BlockNumber) -> crate::weights::Weight { 0 } + fn on_initialize(_n: BlockNumber) -> crate::weights::Weight { + 0 + } } #[impl_for_tuples(30)] @@ -71,7 +73,7 @@ pub trait OnIdle { /// in a block are applied but before `on_finalize` is executed. fn on_idle( _n: BlockNumber, - _remaining_weight: crate::weights::Weight + _remaining_weight: crate::weights::Weight, ) -> crate::weights::Weight { 0 } @@ -79,7 +81,7 @@ pub trait OnIdle { #[impl_for_tuples(30)] impl OnIdle for Tuple { - fn on_idle(n: BlockNumber, remaining_weight: crate::weights::Weight) -> crate::weights::Weight { + fn on_idle(n: BlockNumber, remaining_weight: crate::weights::Weight) -> crate::weights::Weight { let mut weight = 0; for_tuples!( #( let adjusted_remaining_weight = remaining_weight.saturating_sub(weight); @@ -170,13 +172,17 @@ pub trait OnRuntimeUpgrade { /// /// This hook is never meant to be executed on-chain but is meant to be used by testing tools. #[cfg(feature = "try-runtime")] - fn pre_upgrade() -> Result<(), &'static str> { Ok(()) } + fn pre_upgrade() -> Result<(), &'static str> { + Ok(()) + } /// Execute some post-checks after a runtime upgrade. /// /// This hook is never meant to be executed on-chain but is meant to be used by testing tools. #[cfg(feature = "try-runtime")] - fn post_upgrade() -> Result<(), &'static str> { Ok(()) } + fn post_upgrade() -> Result<(), &'static str> { + Ok(()) + } } #[impl_for_tuples(30)] @@ -214,7 +220,7 @@ pub trait Hooks { /// and pass the result to the next `on_idle` hook if it exists. fn on_idle( _n: BlockNumber, - _remaining_weight: crate::weights::Weight + _remaining_weight: crate::weights::Weight, ) -> crate::weights::Weight { 0 } @@ -222,7 +228,9 @@ pub trait Hooks { /// The block is being initialized. Implement to have something happen. /// /// Return the non-negotiable weight consumed in the block. - fn on_initialize(_n: BlockNumber) -> crate::weights::Weight { 0 } + fn on_initialize(_n: BlockNumber) -> crate::weights::Weight { + 0 + } /// Perform a module upgrade. /// @@ -238,7 +246,9 @@ pub trait Hooks { /// block local data are not accessible. /// /// Return the non-negotiable weight consumed for runtime upgrade. - fn on_runtime_upgrade() -> crate::weights::Weight { 0 } + fn on_runtime_upgrade() -> crate::weights::Weight { + 0 + } /// Execute some pre-checks prior to a runtime upgrade. /// @@ -282,7 +292,7 @@ pub trait Hooks { /// A trait to define the build function of a genesis config, T and I are placeholder for pallet /// trait and pallet instance. #[cfg(feature = "std")] -pub trait GenesisBuild: Default + MaybeSerializeDeserialize { +pub trait GenesisBuild: Default + MaybeSerializeDeserialize { /// The build function is called within an externalities allowing storage APIs. /// Thus one can write to storage using regular pallet storages. fn build(&self); diff --git a/frame/support/src/traits/members.rs b/frame/support/src/traits/members.rs index 8b9c2c90f541d..dbfc2e0120e4e 100644 --- a/frame/support/src/traits/members.rs +++ b/frame/support/src/traits/members.rs @@ -17,7 +17,7 @@ //! Traits for dealing with the idea of membership. -use sp_std::{prelude::*, marker::PhantomData}; +use sp_std::{marker::PhantomData, prelude::*}; /// A trait for querying whether a type can be said to "contain" a value. pub trait Contains { @@ -28,7 +28,9 @@ pub trait Contains { /// A `Contains` implementation which always returns `true`. pub struct All(PhantomData); impl Contains for All { - fn contains(_: &T) -> bool { true } + fn contains(_: &T) -> bool { + true + } } #[impl_trait_for_tuples::impl_for_tuples(30)] @@ -77,32 +79,46 @@ pub trait SortedMembers { fn sorted_members() -> Vec; /// Return `true` if this "contains" the given value `t`. - fn contains(t: &T) -> bool { Self::sorted_members().binary_search(t).is_ok() } + fn contains(t: &T) -> bool { + Self::sorted_members().binary_search(t).is_ok() + } /// Get the number of items in the set. - fn count() -> usize { Self::sorted_members().len() } + fn count() -> usize { + Self::sorted_members().len() + } /// Add an item that would satisfy `contains`. It does not make sure any other /// state is correctly maintained or generated. /// /// **Should be used for benchmarking only!!!** #[cfg(feature = "runtime-benchmarks")] - fn add(_t: &T) { unimplemented!() } + fn add(_t: &T) { + unimplemented!() + } } /// Adapter struct for turning an `OrderedMembership` impl into a `Contains` impl. pub struct AsContains(PhantomData<(OM,)>); impl> Contains for AsContains { - fn contains(t: &T) -> bool { OM::contains(t) } + fn contains(t: &T) -> bool { + OM::contains(t) + } } /// Trivial utility for implementing `Contains`/`OrderedMembership` with a `Vec`. pub struct IsInVec(PhantomData); impl>> Contains for IsInVec { - fn contains(t: &X) -> bool { T::get().contains(t) } + fn contains(t: &X) -> bool { + T::get().contains(t) + } } impl>> SortedMembers for IsInVec { - fn sorted_members() -> Vec { let mut r = T::get(); r.sort(); r } + fn sorted_members() -> Vec { + let mut r = T::get(); + r.sort(); + r + } } /// A trait for querying bound for the length of an implementation of `Contains` @@ -174,19 +190,19 @@ pub trait ChangeMembers { (Some(old), Some(new)) if old == new => { old_i = old_iter.next(); new_i = new_iter.next(); - } + }, (Some(old), Some(new)) if old < new => { outgoing.push(old.clone()); old_i = old_iter.next(); - } + }, (Some(old), None) => { outgoing.push(old.clone()); old_i = old_iter.next(); - } + }, (_, Some(new)) => { incoming.push(new.clone()); new_i = new_iter.next(); - } + }, } } (incoming, outgoing) diff --git a/frame/support/src/traits/metadata.rs b/frame/support/src/traits/metadata.rs index b13a0464b30c0..ba26305638448 100644 --- a/frame/support/src/traits/metadata.rs +++ b/frame/support/src/traits/metadata.rs @@ -17,7 +17,7 @@ //! Traits for managing information attached to pallets and their constituents. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_runtime::RuntimeDebug; /// Provides information about the pallet setup in the runtime. @@ -91,11 +91,7 @@ pub struct PalletVersion { impl PalletVersion { /// Creates a new instance of `Self`. pub fn new(major: u16, minor: u8, patch: u8) -> Self { - Self { - major, - minor, - patch, - } + Self { major, minor, patch } } /// Returns the storage key for a pallet version. @@ -139,13 +135,10 @@ impl PalletVersion { impl sp_std::cmp::PartialOrd for PalletVersion { fn partial_cmp(&self, other: &Self) -> Option { - let res = self.major + let res = self + .major .cmp(&other.major) - .then_with(|| - self.minor - .cmp(&other.minor) - .then_with(|| self.patch.cmp(&other.patch) - )); + .then_with(|| self.minor.cmp(&other.minor).then_with(|| self.patch.cmp(&other.patch))); Some(res) } diff --git a/frame/support/src/traits/misc.rs b/frame/support/src/traits/misc.rs index 9cab2626cd6cb..d6eb8331cdb5d 100644 --- a/frame/support/src/traits/misc.rs +++ b/frame/support/src/traits/misc.rs @@ -17,9 +17,9 @@ //! Smaller traits used in FRAME which don't need their own file. -use sp_runtime::{traits::Block as BlockT, DispatchError}; -use sp_arithmetic::traits::AtLeast32Bit; use crate::dispatch::Parameter; +use sp_arithmetic::traits::AtLeast32Bit; +use sp_runtime::{traits::Block as BlockT, DispatchError}; /// Anything that can have a `::len()` method. pub trait Len { @@ -27,7 +27,10 @@ pub trait Len { fn len(&self) -> usize; } -impl Len for T where ::IntoIter: ExactSizeIterator { +impl Len for T +where + ::IntoIter: ExactSizeIterator, +{ fn len(&self) -> usize { self.clone().into_iter().len() } @@ -42,7 +45,9 @@ pub trait Get { } impl Get for () { - fn get() -> T { T::default() } + fn get() -> T { + T::default() + } } /// Implement Get by returning Default for any type that implements Default. @@ -123,7 +128,10 @@ impl SameOrOther { } } - pub fn same(self) -> Result where A: Default { + pub fn same(self) -> Result + where + A: Default, + { match self { SameOrOther::Same(a) => Ok(a), SameOrOther::None => Ok(A::default()), @@ -131,7 +139,10 @@ impl SameOrOther { } } - pub fn other(self) -> Result where B: Default { + pub fn other(self) -> Result + where + B: Default, + { match self { SameOrOther::Same(a) => Err(a), SameOrOther::None => Ok(B::default()), @@ -157,10 +168,14 @@ pub trait OnKilledAccount { /// A simple, generic one-parameter event notifier/handler. pub trait HandleLifetime { /// An account was created. - fn created(_t: &T) -> Result<(), DispatchError> { Ok(()) } + fn created(_t: &T) -> Result<(), DispatchError> { + Ok(()) + } /// An account was killed. - fn killed(_t: &T) -> Result<(), DispatchError> { Ok(()) } + fn killed(_t: &T) -> Result<(), DispatchError> { + Ok(()) + } } impl HandleLifetime for () {} @@ -195,10 +210,18 @@ pub trait IsType: Into + From { } impl IsType for T { - fn from_ref(t: &T) -> &Self { t } - fn into_ref(&self) -> &T { self } - fn from_mut(t: &mut T) -> &mut Self { t } - fn into_mut(&mut self) -> &mut T { self } + fn from_ref(t: &T) -> &Self { + t + } + fn into_ref(&self) -> &T { + self + } + fn from_mut(t: &mut T) -> &mut Self { + t + } + fn into_mut(&mut self) -> &mut T { + self + } } /// Something that can be checked to be a of sub type `T`. @@ -300,8 +323,6 @@ pub trait GetBacking { fn get_backing(&self) -> Option; } - - /// A trait to ensure the inherent are before non-inherent in a block. /// /// This is typically implemented on runtime, through `construct_runtime!`. @@ -319,7 +340,8 @@ pub trait ExtrinsicCall: sp_runtime::traits::Extrinsic { } #[cfg(feature = "std")] -impl ExtrinsicCall for sp_runtime::testing::TestXt where +impl ExtrinsicCall for sp_runtime::testing::TestXt +where Call: codec::Codec + Sync + Send, { fn call(&self) -> &Self::Call { @@ -328,7 +350,7 @@ impl ExtrinsicCall for sp_runtime::testing::TestXt whe } impl ExtrinsicCall -for sp_runtime::generic::UncheckedExtrinsic + for sp_runtime::generic::UncheckedExtrinsic where Extra: sp_runtime::traits::SignedExtension, { diff --git a/frame/support/src/traits/schedule.rs b/frame/support/src/traits/schedule.rs index 58e4c419f2813..10a973a993df8 100644 --- a/frame/support/src/traits/schedule.rs +++ b/frame/support/src/traits/schedule.rs @@ -17,9 +17,9 @@ //! Traits and associated utilities for scheduling dispatchables in FRAME. -use sp_std::{prelude::*, fmt::Debug}; -use codec::{Encode, Decode, Codec, EncodeLike}; -use sp_runtime::{RuntimeDebug, DispatchError}; +use codec::{Codec, Decode, Encode, EncodeLike}; +use sp_runtime::{DispatchError, RuntimeDebug}; +use sp_std::{fmt::Debug, prelude::*}; /// Information relating to the period of a scheduled task. First item is the length of the /// period and the second is the number of times it should be executed in total before the task @@ -61,7 +61,7 @@ pub trait Anon { maybe_periodic: Option>, priority: Priority, origin: Origin, - call: Call + call: Call, ) -> Result; /// Cancel a scheduled task. If periodic, then it will cancel all further instances of that, @@ -107,7 +107,7 @@ pub trait Named { maybe_periodic: Option>, priority: Priority, origin: Origin, - call: Call + call: Call, ) -> Result; /// Cancel a scheduled, named task. If periodic, then it will cancel all further instances diff --git a/frame/support/src/traits/stored_map.rs b/frame/support/src/traits/stored_map.rs index 0e1660df546f4..715a5211be430 100644 --- a/frame/support/src/traits/stored_map.rs +++ b/frame/support/src/traits/stored_map.rs @@ -17,10 +17,9 @@ //! Traits and associated datatypes for managing abstract stored values. +use crate::{storage::StorageMap, traits::misc::HandleLifetime}; use codec::FullCodec; use sp_runtime::DispatchError; -use crate::storage::StorageMap; -use crate::traits::misc::HandleLifetime; /// An abstraction of a value stored within storage, but possibly as part of a larger composite /// item. @@ -47,25 +46,26 @@ pub trait StoredMap { let r = f(&mut account); *x = Some(account); r - } + }, }) } /// Mutate the item, removing or resetting to default value if it has been mutated to `None`. /// /// This is infallible as long as the value does not get destroyed. - fn mutate_exists( - k: &K, - f: impl FnOnce(&mut Option) -> R, - ) -> Result { + fn mutate_exists(k: &K, f: impl FnOnce(&mut Option) -> R) -> Result { Self::try_mutate_exists(k, |x| -> Result { Ok(f(x)) }) } /// Set the item to something new. - fn insert(k: &K, t: T) -> Result<(), DispatchError> { Self::mutate(k, |i| *i = t) } + fn insert(k: &K, t: T) -> Result<(), DispatchError> { + Self::mutate(k, |i| *i = t) + } /// Remove the item or otherwise replace it with its default value; we don't care which. - fn remove(k: &K) -> Result<(), DispatchError> { Self::mutate_exists(k, |x| *x = None) } + fn remove(k: &K) -> Result<(), DispatchError> { + Self::mutate_exists(k, |x| *x = None) + } } /// A shim for placing around a storage item in order to use it as a `StoredValue`. Ideally this @@ -81,12 +81,15 @@ pub trait StoredMap { /// system module's `CallOnCreatedAccount` and `CallKillAccount`. pub struct StorageMapShim(sp_std::marker::PhantomData<(S, L, K, T)>); impl< - S: StorageMap, - L: HandleLifetime, - K: FullCodec, - T: FullCodec + Default, -> StoredMap for StorageMapShim { - fn get(k: &K) -> T { S::get(k) } + S: StorageMap, + L: HandleLifetime, + K: FullCodec, + T: FullCodec + Default, + > StoredMap for StorageMapShim +{ + fn get(k: &K) -> T { + S::get(k) + } fn insert(k: &K, t: T) -> Result<(), DispatchError> { if !S::contains_key(&k) { L::created(k)?; diff --git a/frame/support/src/traits/tokens.rs b/frame/support/src/traits/tokens.rs index faf8ebfd306ce..aca62bcad65c7 100644 --- a/frame/support/src/traits/tokens.rs +++ b/frame/support/src/traits/tokens.rs @@ -17,15 +17,15 @@ //! Traits for working with tokens and their associated datastructures. +pub mod currency; pub mod fungible; pub mod fungibles; -pub mod currency; pub mod imbalance; +mod misc; pub mod nonfungible; pub mod nonfungibles; -mod misc; +pub use imbalance::Imbalance; pub use misc::{ - BalanceConversion, BalanceStatus, DepositConsequence, - ExistenceRequirement, WithdrawConsequence, WithdrawReasons, + BalanceConversion, BalanceStatus, DepositConsequence, ExistenceRequirement, + WithdrawConsequence, WithdrawReasons, }; -pub use imbalance::Imbalance; diff --git a/frame/support/src/traits/tokens/currency.rs b/frame/support/src/traits/tokens/currency.rs index 7882d04c035bf..6c73a1527b481 100644 --- a/frame/support/src/traits/tokens/currency.rs +++ b/frame/support/src/traits/tokens/currency.rs @@ -17,17 +17,19 @@ //! The Currency trait and associated types. -use sp_std::fmt::Debug; -use sp_runtime::traits::MaybeSerializeDeserialize; -use crate::dispatch::{DispatchResult, DispatchError}; -use super::misc::{Balance, WithdrawReasons, ExistenceRequirement}; -use super::imbalance::{Imbalance, SignedImbalance}; +use super::{ + imbalance::{Imbalance, SignedImbalance}, + misc::{Balance, ExistenceRequirement, WithdrawReasons}, +}; +use crate::dispatch::{DispatchError, DispatchResult}; use codec::MaxEncodedLen; +use sp_runtime::traits::MaybeSerializeDeserialize; +use sp_std::fmt::Debug; mod reservable; -pub use reservable::{ReservableCurrency, NamedReservableCurrency}; +pub use reservable::{NamedReservableCurrency, ReservableCurrency}; mod lockable; -pub use lockable::{LockableCurrency, VestingSchedule, LockIdentifier}; +pub use lockable::{LockIdentifier, LockableCurrency, VestingSchedule}; /// Abstraction over a fungible assets system. pub trait Currency { @@ -36,11 +38,11 @@ pub trait Currency { /// The opaque token type for an imbalance. This is returned by unbalanced operations /// and must be dealt with. It may be dropped but cannot be cloned. - type PositiveImbalance: Imbalance; + type PositiveImbalance: Imbalance; /// The opaque token type for an imbalance. This is returned by unbalanced operations /// and must be dealt with. It may be dropped but cannot be cloned. - type NegativeImbalance: Imbalance; + type NegativeImbalance: Imbalance; // PUBLIC IMMUTABLES @@ -123,17 +125,14 @@ pub trait Currency { /// /// As much funds up to `value` will be deducted as possible. If this is less than `value`, /// then a non-zero second item will be returned. - fn slash( - who: &AccountId, - value: Self::Balance - ) -> (Self::NegativeImbalance, Self::Balance); + fn slash(who: &AccountId, value: Self::Balance) -> (Self::NegativeImbalance, Self::Balance); /// Mints `value` to the free balance of `who`. /// /// If `who` doesn't exist, nothing is done and an Err returned. fn deposit_into_existing( who: &AccountId, - value: Self::Balance + value: Self::Balance, ) -> Result; /// Similar to deposit_creating, only accepts a `NegativeImbalance` and returns nothing on @@ -152,17 +151,11 @@ pub trait Currency { /// Adds up to `value` to the free balance of `who`. If `who` doesn't exist, it is created. /// /// Infallible. - fn deposit_creating( - who: &AccountId, - value: Self::Balance, - ) -> Self::PositiveImbalance; + fn deposit_creating(who: &AccountId, value: Self::Balance) -> Self::PositiveImbalance; /// Similar to deposit_creating, only accepts a `NegativeImbalance` and returns nothing on /// success. - fn resolve_creating( - who: &AccountId, - value: Self::NegativeImbalance, - ) { + fn resolve_creating(who: &AccountId, value: Self::NegativeImbalance) { let v = value.peek(); drop(value.offset(Self::deposit_creating(who, v))); } diff --git a/frame/support/src/traits/tokens/currency/lockable.rs b/frame/support/src/traits/tokens/currency/lockable.rs index ed3d1cf46362b..94bce216dcbcb 100644 --- a/frame/support/src/traits/tokens/currency/lockable.rs +++ b/frame/support/src/traits/tokens/currency/lockable.rs @@ -17,10 +17,8 @@ //! The lockable currency trait and some associated types. -use crate::dispatch::DispatchResult; -use crate::traits::misc::Get; -use super::Currency; -use super::super::misc::WithdrawReasons; +use super::{super::misc::WithdrawReasons, Currency}; +use crate::{dispatch::DispatchResult, traits::misc::Get}; /// An identifier for a lock. Used for disambiguating different locks so that /// they can be individually replaced or removed. @@ -63,10 +61,7 @@ pub trait LockableCurrency: Currency { ); /// Remove an existing lock. - fn remove_lock( - id: LockIdentifier, - who: &AccountId, - ); + fn remove_lock(id: LockIdentifier, who: &AccountId); } /// A vesting schedule over a currency. This allows a particular currency to have vesting limits @@ -80,7 +75,8 @@ pub trait VestingSchedule { /// Get the amount that is currently being vested and cannot be transferred out of this account. /// Returns `None` if the account has no vesting schedule. - fn vesting_balance(who: &AccountId) -> Option<>::Balance>; + fn vesting_balance(who: &AccountId) + -> Option<>::Balance>; /// Adds a vesting schedule to a given account. /// diff --git a/frame/support/src/traits/tokens/currency/reservable.rs b/frame/support/src/traits/tokens/currency/reservable.rs index 69017357cfa84..41220ca81cacd 100644 --- a/frame/support/src/traits/tokens/currency/reservable.rs +++ b/frame/support/src/traits/tokens/currency/reservable.rs @@ -17,9 +17,8 @@ //! The reservable currency trait. -use super::Currency; -use super::super::misc::BalanceStatus; -use crate::dispatch::{DispatchResult, DispatchError}; +use super::{super::misc::BalanceStatus, Currency}; +use crate::dispatch::{DispatchError, DispatchResult}; /// A currency where funds can be reserved from the user. pub trait ReservableCurrency: Currency { @@ -33,7 +32,7 @@ pub trait ReservableCurrency: Currency { /// is less than `value`, then a non-zero second item will be returned. fn slash_reserved( who: &AccountId, - value: Self::Balance + value: Self::Balance, ) -> (Self::NegativeImbalance, Self::Balance); /// The amount of the balance of a given account that is externally reserved; this can still get @@ -94,7 +93,7 @@ pub trait NamedReservableCurrency: ReservableCurrency { fn slash_reserved_named( id: &Self::ReserveIdentifier, who: &AccountId, - value: Self::Balance + value: Self::Balance, ) -> (Self::NegativeImbalance, Self::Balance); /// The amount of the balance of a given account that is externally reserved; this can still get @@ -114,7 +113,11 @@ pub trait NamedReservableCurrency: ReservableCurrency { /// /// If the free balance is lower than `value`, then no funds will be moved and an `Err` will /// be returned to notify of this. This is different behavior than `unreserve`. - fn reserve_named(id: &Self::ReserveIdentifier, who: &AccountId, value: Self::Balance) -> DispatchResult; + fn reserve_named( + id: &Self::ReserveIdentifier, + who: &AccountId, + value: Self::Balance, + ) -> DispatchResult; /// Moves up to `value` from reserved balance to free balance. This function cannot fail. /// @@ -126,7 +129,11 @@ pub trait NamedReservableCurrency: ReservableCurrency { /// - This is different from `reserve`. /// - If the remaining reserved balance is less than `ExistentialDeposit`, it will /// invoke `on_reserved_too_low` and could reap the account. - fn unreserve_named(id: &Self::ReserveIdentifier, who: &AccountId, value: Self::Balance) -> Self::Balance; + fn unreserve_named( + id: &Self::ReserveIdentifier, + who: &AccountId, + value: Self::Balance, + ) -> Self::Balance; /// Moves up to `value` from reserved balance of account `slashed` to balance of account /// `beneficiary`. `beneficiary` must exist for this to succeed. If it does not, `Err` will be @@ -147,16 +154,21 @@ pub trait NamedReservableCurrency: ReservableCurrency { /// /// This will reserve extra amount of current reserved balance is less than `value`. /// And unreserve if current reserved balance is greater than `value`. - fn ensure_reserved_named(id: &Self::ReserveIdentifier, who: &AccountId, value: Self::Balance) -> DispatchResult { + fn ensure_reserved_named( + id: &Self::ReserveIdentifier, + who: &AccountId, + value: Self::Balance, + ) -> DispatchResult { let current = Self::reserved_balance_named(id, who); - if current > value { + if current > value { // we always have enough balance to unreserve here Self::unreserve_named(id, who, current - value); Ok(()) } else if value > current { // we checked value > current Self::reserve_named(id, who, value - current) - } else { // current == value + } else { + // current == value Ok(()) } } @@ -173,7 +185,10 @@ pub trait NamedReservableCurrency: ReservableCurrency { /// Slash all the reserved balance, returning the negative imbalance created. /// /// Is a no-op if the value to be slashed is zero. - fn slash_all_reserved_named(id: &Self::ReserveIdentifier, who: &AccountId) -> Self::NegativeImbalance { + fn slash_all_reserved_named( + id: &Self::ReserveIdentifier, + who: &AccountId, + ) -> Self::NegativeImbalance { let value = Self::reserved_balance_named(id, who); Self::slash_reserved_named(id, who, value).0 } diff --git a/frame/support/src/traits/tokens/fungible.rs b/frame/support/src/traits/tokens/fungible.rs index 5472212aaa65e..b033236d447bb 100644 --- a/frame/support/src/traits/tokens/fungible.rs +++ b/frame/support/src/traits/tokens/fungible.rs @@ -17,16 +17,20 @@ //! The traits for dealing with a single fungible token class and any associated types. -use super::*; +use super::{ + misc::{Balance, DepositConsequence, WithdrawConsequence}, + *, +}; +use crate::{ + dispatch::{DispatchError, DispatchResult}, + traits::misc::Get, +}; use sp_runtime::traits::Saturating; -use crate::traits::misc::Get; -use crate::dispatch::{DispatchResult, DispatchError}; -use super::misc::{DepositConsequence, WithdrawConsequence, Balance}; mod balanced; mod imbalance; pub use balanced::{Balanced, Unbalanced}; -pub use imbalance::{Imbalance, HandleImbalanceDrop, DebtOf, CreditOf}; +pub use imbalance::{CreditOf, DebtOf, HandleImbalanceDrop, Imbalance}; /// Trait for providing balance-inspection access to a fungible asset. pub trait Inspect { @@ -84,7 +88,10 @@ pub trait Mutate: Inspect { let extra = Self::can_withdraw(&source, amount).into_result()?; Self::can_deposit(&dest, amount.saturating_add(extra)).into_result()?; let actual = Self::burn_from(source, amount)?; - debug_assert!(actual == amount.saturating_add(extra), "can_withdraw must agree with withdraw; qed"); + debug_assert!( + actual == amount.saturating_add(extra), + "can_withdraw must agree with withdraw; qed" + ); match Self::mint_into(dest, actual) { Ok(_) => Ok(actual), Err(err) => { @@ -93,7 +100,7 @@ pub trait Mutate: Inspect { let revert = Self::mint_into(source, actual); debug_assert!(revert.is_ok(), "withdrew funds previously; qed"); Err(err) - } + }, } } } @@ -129,8 +136,11 @@ pub trait MutateHold: InspectHold + Transfer { /// /// If `best_effort` is `true`, then the amount actually unreserved and returned as the inner /// value of `Ok` may be smaller than the `amount` passed. - fn release(who: &AccountId, amount: Self::Balance, best_effort: bool) - -> Result; + fn release( + who: &AccountId, + amount: Self::Balance, + best_effort: bool, + ) -> Result; /// Transfer held funds into a destination account. /// @@ -160,17 +170,17 @@ pub trait BalancedHold: Balanced + MutateHold { /// /// As much funds that are on hold up to `amount` will be deducted as possible. If this is less /// than `amount`, then a non-zero second item will be returned. - fn slash_held(who: &AccountId, amount: Self::Balance) - -> (CreditOf, Self::Balance); + fn slash_held( + who: &AccountId, + amount: Self::Balance, + ) -> (CreditOf, Self::Balance); } -impl< - AccountId, - T: Balanced + MutateHold, -> BalancedHold for T { - fn slash_held(who: &AccountId, amount: Self::Balance) - -> (CreditOf, Self::Balance) - { +impl + MutateHold> BalancedHold for T { + fn slash_held( + who: &AccountId, + amount: Self::Balance, + ) -> (CreditOf, Self::Balance) { let actual = match Self::release(who, amount, true) { Ok(x) => x, Err(_) => return (Imbalance::default(), amount), @@ -185,15 +195,14 @@ pub struct ItemOf< F: fungibles::Inspect, A: Get<>::AssetId>, AccountId, ->( - sp_std::marker::PhantomData<(F, A, AccountId)> -); +>(sp_std::marker::PhantomData<(F, A, AccountId)>); impl< - F: fungibles::Inspect, - A: Get<>::AssetId>, - AccountId, -> Inspect for ItemOf { + F: fungibles::Inspect, + A: Get<>::AssetId>, + AccountId, + > Inspect for ItemOf +{ type Balance = >::Balance; fn total_issuance() -> Self::Balance { >::total_issuance(A::get()) @@ -216,10 +225,11 @@ impl< } impl< - F: fungibles::Mutate, - A: Get<>::AssetId>, - AccountId, -> Mutate for ItemOf { + F: fungibles::Mutate, + A: Get<>::AssetId>, + AccountId, + > Mutate for ItemOf +{ fn mint_into(who: &AccountId, amount: Self::Balance) -> DispatchResult { >::mint_into(A::get(), who, amount) } @@ -229,22 +239,27 @@ impl< } impl< - F: fungibles::Transfer, - A: Get<>::AssetId>, - AccountId, -> Transfer for ItemOf { - fn transfer(source: &AccountId, dest: &AccountId, amount: Self::Balance, keep_alive: bool) - -> Result - { + F: fungibles::Transfer, + A: Get<>::AssetId>, + AccountId, + > Transfer for ItemOf +{ + fn transfer( + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + keep_alive: bool, + ) -> Result { >::transfer(A::get(), source, dest, amount, keep_alive) } } impl< - F: fungibles::InspectHold, - A: Get<>::AssetId>, - AccountId, -> InspectHold for ItemOf { + F: fungibles::InspectHold, + A: Get<>::AssetId>, + AccountId, + > InspectHold for ItemOf +{ fn balance_on_hold(who: &AccountId) -> Self::Balance { >::balance_on_hold(A::get(), who) } @@ -254,16 +269,19 @@ impl< } impl< - F: fungibles::MutateHold, - A: Get<>::AssetId>, - AccountId, -> MutateHold for ItemOf { + F: fungibles::MutateHold, + A: Get<>::AssetId>, + AccountId, + > MutateHold for ItemOf +{ fn hold(who: &AccountId, amount: Self::Balance) -> DispatchResult { >::hold(A::get(), who, amount) } - fn release(who: &AccountId, amount: Self::Balance, best_effort: bool) - -> Result - { + fn release( + who: &AccountId, + amount: Self::Balance, + best_effort: bool, + ) -> Result { >::release(A::get(), who, amount, best_effort) } fn transfer_held( @@ -285,23 +303,30 @@ impl< } impl< - F: fungibles::Unbalanced, - A: Get<>::AssetId>, - AccountId, -> Unbalanced for ItemOf { + F: fungibles::Unbalanced, + A: Get<>::AssetId>, + AccountId, + > Unbalanced for ItemOf +{ fn set_balance(who: &AccountId, amount: Self::Balance) -> DispatchResult { >::set_balance(A::get(), who, amount) } fn set_total_issuance(amount: Self::Balance) -> () { >::set_total_issuance(A::get(), amount) } - fn decrease_balance(who: &AccountId, amount: Self::Balance) -> Result { + fn decrease_balance( + who: &AccountId, + amount: Self::Balance, + ) -> Result { >::decrease_balance(A::get(), who, amount) } fn decrease_balance_at_most(who: &AccountId, amount: Self::Balance) -> Self::Balance { >::decrease_balance_at_most(A::get(), who, amount) } - fn increase_balance(who: &AccountId, amount: Self::Balance) -> Result { + fn increase_balance( + who: &AccountId, + amount: Self::Balance, + ) -> Result { >::increase_balance(A::get(), who, amount) } fn increase_balance_at_most(who: &AccountId, amount: Self::Balance) -> Self::Balance { diff --git a/frame/support/src/traits/tokens/fungible/balanced.rs b/frame/support/src/traits/tokens/fungible/balanced.rs index 1cd0fcf0ca414..8054cc415ba32 100644 --- a/frame/support/src/traits/tokens/fungible/balanced.rs +++ b/frame/support/src/traits/tokens/fungible/balanced.rs @@ -18,12 +18,16 @@ //! The trait and associated types for sets of fungible tokens that manage total issuance without //! requiring atomic balanced operations. -use super::*; +use super::{super::Imbalance as ImbalanceT, *}; +use crate::{ + dispatch::{DispatchError, DispatchResult}, + traits::misc::{SameOrOther, TryDrop}, +}; +use sp_runtime::{ + traits::{CheckedAdd, Zero}, + ArithmeticError, TokenError, +}; use sp_std::marker::PhantomData; -use sp_runtime::{TokenError, ArithmeticError, traits::{CheckedAdd, Zero}}; -use super::super::Imbalance as ImbalanceT; -use crate::traits::misc::{SameOrOther, TryDrop}; -use crate::dispatch::{DispatchResult, DispatchError}; /// A fungible token class where any creation and deletion of tokens is semi-explicit and where the /// total supply is maintained automatically. @@ -65,10 +69,7 @@ pub trait Balanced: Inspect { /// /// As much funds up to `value` will be deducted as possible. If this is less than `value`, /// then a non-zero second item will be returned. - fn slash( - who: &AccountId, - amount: Self::Balance, - ) -> (CreditOf, Self::Balance); + fn slash(who: &AccountId, amount: Self::Balance) -> (CreditOf, Self::Balance); /// Mints exactly `value` into the account of `who`. /// @@ -132,7 +133,7 @@ pub trait Balanced: Inspect { SameOrOther::Other(rest) => { debug_assert!(false, "ok withdraw return must be at least debt value; qed"); Err(rest) - } + }, } } } @@ -158,9 +159,10 @@ pub trait Unbalanced: Inspect { /// /// Minimum balance will be respected and the returned imbalance may be up to /// `Self::minimum_balance() - 1` greater than `amount`. - fn decrease_balance(who: &AccountId, amount: Self::Balance) - -> Result - { + fn decrease_balance( + who: &AccountId, + amount: Self::Balance, + ) -> Result { let old_balance = Self::balance(who); let (mut new_balance, mut amount) = if old_balance < amount { Err(TokenError::NoFunds)? @@ -182,9 +184,7 @@ pub trait Unbalanced: Inspect { /// `Self::minimum_balance() - 1` greater than `amount`. /// /// Return the imbalance by which the account was reduced. - fn decrease_balance_at_most(who: &AccountId, amount: Self::Balance) - -> Self::Balance - { + fn decrease_balance_at_most(who: &AccountId, amount: Self::Balance) -> Self::Balance { let old_balance = Self::balance(who); let (mut new_balance, mut amount) = if old_balance < amount { (Zero::zero(), old_balance) @@ -217,9 +217,10 @@ pub trait Unbalanced: Inspect { /// /// Minimum balance will be respected and an error will be returned if /// `amount < Self::minimum_balance()` when the account of `who` is zero. - fn increase_balance(who: &AccountId, amount: Self::Balance) - -> Result - { + fn increase_balance( + who: &AccountId, + amount: Self::Balance, + ) -> Result { let old_balance = Self::balance(who); let new_balance = old_balance.checked_add(&amount).ok_or(ArithmeticError::Overflow)?; if new_balance < Self::minimum_balance() { @@ -237,9 +238,7 @@ pub trait Unbalanced: Inspect { /// `amount < Self::minimum_balance()`. /// /// Return the imbalance by which the account was increased. - fn increase_balance_at_most(who: &AccountId, amount: Self::Balance) - -> Self::Balance - { + fn increase_balance_at_most(who: &AccountId, amount: Self::Balance) -> Self::Balance { let old_balance = Self::balance(who); let mut new_balance = old_balance.saturating_add(amount); let mut amount = new_balance - old_balance; @@ -300,16 +299,12 @@ type Debt = Imbalance< >; /// Create some `Credit` item. Only for internal use. -fn credit>( - amount: U::Balance, -) -> Credit { +fn credit>(amount: U::Balance) -> Credit { Imbalance::new(amount) } /// Create some `Debt` item. Only for internal use. -fn debt>( - amount: U::Balance, -) -> Debt { +fn debt>(amount: U::Balance) -> Debt { Imbalance::new(amount) } @@ -328,10 +323,7 @@ impl> Balanced for U { U::set_total_issuance(new); credit(new - old) } - fn slash( - who: &AccountId, - amount: Self::Balance, - ) -> (Credit, Self::Balance) { + fn slash(who: &AccountId, amount: Self::Balance) -> (Credit, Self::Balance) { let slashed = U::decrease_balance_at_most(who, amount); // `slashed` could be less than, greater than or equal to `amount`. // If slashed == amount, it means the account had at least amount in it and it could all be @@ -344,7 +336,7 @@ impl> Balanced for U { } fn deposit( who: &AccountId, - amount: Self::Balance + amount: Self::Balance, ) -> Result, DispatchError> { let increase = U::increase_balance(who, amount)?; Ok(debt(increase)) diff --git a/frame/support/src/traits/tokens/fungible/imbalance.rs b/frame/support/src/traits/tokens/fungible/imbalance.rs index ab3694359ce9c..e6d3b5bed66aa 100644 --- a/frame/support/src/traits/tokens/fungible/imbalance.rs +++ b/frame/support/src/traits/tokens/fungible/imbalance.rs @@ -18,13 +18,10 @@ //! The imbalance type and its associates, which handles keeps everything adding up properly with //! unbalanced operations. -use super::*; +use super::{super::Imbalance as ImbalanceT, balanced::Balanced, misc::Balance, *}; +use crate::traits::misc::{SameOrOther, TryDrop}; +use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::marker::PhantomData; -use sp_runtime::{RuntimeDebug, traits::Zero}; -use super::misc::Balance; -use super::balanced::Balanced; -use crate::traits::misc::{TryDrop, SameOrOther}; -use super::super::Imbalance as ImbalanceT; /// Handler for when an imbalance gets dropped. This could handle either a credit (negative) or /// debt (positive) imbalance. @@ -49,11 +46,9 @@ pub struct Imbalance< _phantom: PhantomData<(OnDrop, OppositeOnDrop)>, } -impl< - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop -> Drop for Imbalance { +impl, OppositeOnDrop: HandleImbalanceDrop> Drop + for Imbalance +{ fn drop(&mut self) { if !self.amount.is_zero() { OnDrop::handle(self.amount) @@ -61,42 +56,34 @@ impl< } } -impl< - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> TryDrop for Imbalance { +impl, OppositeOnDrop: HandleImbalanceDrop> TryDrop + for Imbalance +{ /// Drop an instance cleanly. Only works if its value represents "no-operation". fn try_drop(self) -> Result<(), Self> { self.drop_zero() } } -impl< - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> Default for Imbalance { +impl, OppositeOnDrop: HandleImbalanceDrop> Default + for Imbalance +{ fn default() -> Self { Self::zero() } } -impl< - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> Imbalance { +impl, OppositeOnDrop: HandleImbalanceDrop> + Imbalance +{ pub(crate) fn new(amount: B) -> Self { Self { amount, _phantom: PhantomData } } } -impl< - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> ImbalanceT for Imbalance { +impl, OppositeOnDrop: HandleImbalanceDrop> + ImbalanceT for Imbalance +{ type Opposite = Imbalance; fn zero() -> Self { @@ -127,9 +114,10 @@ impl< self.amount = self.amount.saturating_add(other.amount); sp_std::mem::forget(other); } - fn offset(self, other: Imbalance) - -> SameOrOther> - { + fn offset( + self, + other: Imbalance, + ) -> SameOrOther> { let (a, b) = (self.amount, other.amount); sp_std::mem::forget((self, other)); diff --git a/frame/support/src/traits/tokens/fungibles.rs b/frame/support/src/traits/tokens/fungibles.rs index 490f28dfb453a..3f5a1c75860c2 100644 --- a/frame/support/src/traits/tokens/fungibles.rs +++ b/frame/support/src/traits/tokens/fungibles.rs @@ -17,15 +17,17 @@ //! The traits for sets of fungible tokens and any associated types. -use super::*; +use super::{ + misc::{AssetId, Balance}, + *, +}; use crate::dispatch::{DispatchError, DispatchResult}; -use super::misc::{AssetId, Balance}; use sp_runtime::traits::Saturating; mod balanced; pub use balanced::{Balanced, Unbalanced}; mod imbalance; -pub use imbalance::{Imbalance, HandleImbalanceDrop, DebtOf, CreditOf}; +pub use imbalance::{CreditOf, DebtOf, HandleImbalanceDrop, Imbalance}; /// Trait for providing balance-inspection access to a set of named fungible assets. pub trait Inspect { @@ -48,8 +50,11 @@ pub trait Inspect { fn reducible_balance(asset: Self::AssetId, who: &AccountId, keep_alive: bool) -> Self::Balance; /// Returns `true` if the `asset` balance of `who` may be increased by `amount`. - fn can_deposit(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> DepositConsequence; + fn can_deposit( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> DepositConsequence; /// Returns `Failed` if the `asset` balance of `who` may not be decreased by `amount`, otherwise /// the consequence. @@ -87,8 +92,11 @@ pub trait Mutate: Inspect { /// Due to minimum balance requirements, it's possible that the amount withdrawn could be up to /// `Self::minimum_balance() - 1` more than the `amount`. The total amount withdrawn is returned /// in an `Ok` result. This may be safely ignored if you don't mind the overall supply reducing. - fn burn_from(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Result; + fn burn_from( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result; /// Attempt to reduce the `asset` balance of `who` by as much as possible up to `amount`, and /// possibly slightly more due to minimum_balance requirements. If no decrease is possible then @@ -97,9 +105,11 @@ pub trait Mutate: Inspect { /// /// The default implementation just uses `withdraw` along with `reducible_balance` to ensure /// that is doesn't fail. - fn slash(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Result - { + fn slash( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { Self::burn_from(asset, who, Self::reducible_balance(asset, who, false).min(amount)) } @@ -114,7 +124,10 @@ pub trait Mutate: Inspect { let extra = Self::can_withdraw(asset, &source, amount).into_result()?; Self::can_deposit(asset, &dest, amount.saturating_add(extra)).into_result()?; let actual = Self::burn_from(asset, source, amount)?; - debug_assert!(actual == amount.saturating_add(extra), "can_withdraw must agree with withdraw; qed"); + debug_assert!( + actual == amount.saturating_add(extra), + "can_withdraw must agree with withdraw; qed" + ); match Self::mint_into(asset, dest, actual) { Ok(_) => Ok(actual), Err(err) => { @@ -123,7 +136,7 @@ pub trait Mutate: Inspect { let revert = Self::mint_into(asset, source, actual); debug_assert!(revert.is_ok(), "withdrew funds previously; qed"); Err(err) - } + }, } } } @@ -158,8 +171,12 @@ pub trait MutateHold: InspectHold + Transfer { /// /// If `best_effort` is `true`, then the amount actually released and returned as the inner /// value of `Ok` may be smaller than the `amount` passed. - fn release(asset: Self::AssetId, who: &AccountId, amount: Self::Balance, best_effort: bool) - -> Result; + fn release( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + best_effort: bool, + ) -> Result; /// Transfer held funds into a destination account. /// @@ -190,17 +207,19 @@ pub trait BalancedHold: Balanced + MutateHold { /// /// As much funds up to `amount` will be deducted as possible. If this is less than `amount`, /// then a non-zero second item will be returned. - fn slash_held(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> (CreditOf, Self::Balance); + fn slash_held( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> (CreditOf, Self::Balance); } -impl< - AccountId, - T: Balanced + MutateHold, -> BalancedHold for T { - fn slash_held(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> (CreditOf, Self::Balance) - { +impl + MutateHold> BalancedHold for T { + fn slash_held( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> (CreditOf, Self::Balance) { let actual = match Self::release(asset, who, amount, true) { Ok(x) => x, Err(_) => return (Imbalance::zero(asset), amount), diff --git a/frame/support/src/traits/tokens/fungibles/balanced.rs b/frame/support/src/traits/tokens/fungibles/balanced.rs index a1016f8c11955..626925fb4c1ee 100644 --- a/frame/support/src/traits/tokens/fungibles/balanced.rs +++ b/frame/support/src/traits/tokens/fungibles/balanced.rs @@ -19,11 +19,16 @@ //! requiring atomic balanced operations. use super::*; -use sp_std::marker::PhantomData; -use sp_runtime::{ArithmeticError, TokenError, traits::{Zero, CheckedAdd}}; +use crate::{ + dispatch::{DispatchError, DispatchResult}, + traits::misc::{SameOrOther, TryDrop}, +}; use sp_arithmetic::traits::Saturating; -use crate::dispatch::{DispatchError, DispatchResult}; -use crate::traits::misc::{SameOrOther, TryDrop}; +use sp_runtime::{ + traits::{CheckedAdd, Zero}, + ArithmeticError, TokenError, +}; +use sp_std::marker::PhantomData; /// A fungible token class where any creation and deletion of tokens is semi-explicit and where the /// total supply is maintained automatically. @@ -55,9 +60,10 @@ pub trait Balanced: Inspect { /// /// This is just the same as burning and issuing the same amount and has no effect on the /// total issuance. - fn pair(asset: Self::AssetId, amount: Self::Balance) - -> (DebtOf, CreditOf) - { + fn pair( + asset: Self::AssetId, + amount: Self::Balance, + ) -> (DebtOf, CreditOf) { (Self::rescind(asset, amount), Self::issue(asset, amount)) } @@ -143,11 +149,11 @@ pub trait Balanced: Inspect { Ok(SameOrOther::Other(rest)) => { debug_assert!(false, "ok withdraw return must be at least debt value; qed"); Err(rest) - } + }, Err(_) => { debug_assert!(false, "debt.asset is credit.asset; qed"); Ok(CreditOf::::zero(asset)) - } + }, } } } @@ -173,9 +179,11 @@ pub trait Unbalanced: Inspect { /// /// Minimum balance will be respected and the returned imbalance may be up to /// `Self::minimum_balance() - 1` greater than `amount`. - fn decrease_balance(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Result - { + fn decrease_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { let old_balance = Self::balance(asset, who); let (mut new_balance, mut amount) = if old_balance < amount { Err(TokenError::NoFunds)? @@ -197,9 +205,11 @@ pub trait Unbalanced: Inspect { /// `Self::minimum_balance() - 1` greater than `amount`. /// /// Return the imbalance by which the account was reduced. - fn decrease_balance_at_most(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Self::Balance - { + fn decrease_balance_at_most( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Self::Balance { let old_balance = Self::balance(asset, who); let (mut new_balance, mut amount) = if old_balance < amount { (Zero::zero(), old_balance) @@ -232,9 +242,11 @@ pub trait Unbalanced: Inspect { /// /// Minimum balance will be respected and an error will be returned if /// `amount < Self::minimum_balance()` when the account of `who` is zero. - fn increase_balance(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Result - { + fn increase_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { let old_balance = Self::balance(asset, who); let new_balance = old_balance.checked_add(&amount).ok_or(ArithmeticError::Overflow)?; if new_balance < Self::minimum_balance(asset) { @@ -252,9 +264,11 @@ pub trait Unbalanced: Inspect { /// `amount < Self::minimum_balance()`. /// /// Return the imbalance by which the account was increased. - fn increase_balance_at_most(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) - -> Self::Balance - { + fn increase_balance_at_most( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Self::Balance { let old_balance = Self::balance(asset, who); let mut new_balance = old_balance.saturating_add(amount); let mut amount = new_balance - old_balance; @@ -361,7 +375,7 @@ impl> Balanced for U { fn deposit( asset: Self::AssetId, who: &AccountId, - amount: Self::Balance + amount: Self::Balance, ) -> Result, DispatchError> { let increase = U::increase_balance(asset, who, amount)?; Ok(debt(asset, increase)) diff --git a/frame/support/src/traits/tokens/fungibles/imbalance.rs b/frame/support/src/traits/tokens/fungibles/imbalance.rs index 9ecdeac1d4f06..2195cacc42822 100644 --- a/frame/support/src/traits/tokens/fungibles/imbalance.rs +++ b/frame/support/src/traits/tokens/fungibles/imbalance.rs @@ -18,12 +18,14 @@ //! The imbalance type and its associates, which handles keeps everything adding up properly with //! unbalanced operations. -use super::*; +use super::{ + balanced::Balanced, + fungibles::{AssetId, Balance}, + *, +}; +use crate::traits::misc::{SameOrOther, TryDrop}; +use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::marker::PhantomData; -use sp_runtime::{RuntimeDebug, traits::Zero}; -use super::fungibles::{AssetId, Balance}; -use super::balanced::Balanced; -use crate::traits::misc::{TryDrop, SameOrOther}; /// Handler for when an imbalance gets dropped. This could handle either a credit (negative) or /// debt (positive) imbalance. @@ -50,11 +52,12 @@ pub struct Imbalance< } impl< - A: AssetId, - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop -> Drop for Imbalance { + A: AssetId, + B: Balance, + OnDrop: HandleImbalanceDrop, + OppositeOnDrop: HandleImbalanceDrop, + > Drop for Imbalance +{ fn drop(&mut self) { if !self.amount.is_zero() { OnDrop::handle(self.asset, self.amount) @@ -63,11 +66,12 @@ impl< } impl< - A: AssetId, - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> TryDrop for Imbalance { + A: AssetId, + B: Balance, + OnDrop: HandleImbalanceDrop, + OppositeOnDrop: HandleImbalanceDrop, + > TryDrop for Imbalance +{ /// Drop an instance cleanly. Only works if its value represents "no-operation". fn try_drop(self) -> Result<(), Self> { self.drop_zero() @@ -75,11 +79,12 @@ impl< } impl< - A: AssetId, - B: Balance, - OnDrop: HandleImbalanceDrop, - OppositeOnDrop: HandleImbalanceDrop, -> Imbalance { + A: AssetId, + B: Balance, + OnDrop: HandleImbalanceDrop, + OppositeOnDrop: HandleImbalanceDrop, + > Imbalance +{ pub fn zero(asset: A) -> Self { Self { asset, amount: Zero::zero(), _phantom: PhantomData } } @@ -122,7 +127,10 @@ impl< Err(other) } } - pub fn offset(self, other: Imbalance) -> Result< + pub fn offset( + self, + other: Imbalance, + ) -> Result< SameOrOther>, (Self, Imbalance), > { diff --git a/frame/support/src/traits/tokens/imbalance.rs b/frame/support/src/traits/tokens/imbalance.rs index 9652b9a0275a1..0f7b38a65efc8 100644 --- a/frame/support/src/traits/tokens/imbalance.rs +++ b/frame/support/src/traits/tokens/imbalance.rs @@ -18,16 +18,16 @@ //! The imbalance trait type and its associates, which handles keeps everything adding up properly //! with unbalanced operations. -use sp_std::ops::Div; +use crate::traits::misc::{SameOrOther, TryDrop}; use sp_runtime::traits::Saturating; -use crate::traits::misc::{TryDrop, SameOrOther}; +use sp_std::ops::Div; -mod split_two_ways; -mod signed_imbalance; mod on_unbalanced; -pub use split_two_ways::SplitTwoWays; -pub use signed_imbalance::SignedImbalance; +mod signed_imbalance; +mod split_two_ways; pub use on_unbalanced::OnUnbalanced; +pub use signed_imbalance::SignedImbalance; +pub use split_two_ways::SplitTwoWays; /// A trait for a not-quite Linear Type that tracks an imbalance. /// @@ -78,10 +78,13 @@ pub trait Imbalance: Sized + TryDrop + Default { /// NOTE: This requires up to `first + second` room for a multiply, and `first + second` should /// fit into a `u32`. Overflow will safely saturate in both cases. fn ration(self, first: u32, second: u32) -> (Self, Self) - where Balance: From + Saturating + Div + where + Balance: From + Saturating + Div, { let total: u32 = first.saturating_add(second); - if total == 0 { return (Self::zero(), Self::zero()) } + if total == 0 { + return (Self::zero(), Self::zero()) + } let amount1 = self.peek().saturating_mul(first.into()) / total.into(); self.split(amount1) } @@ -100,7 +103,8 @@ pub trait Imbalance: Sized + TryDrop + Default { /// /// A convenient replacement for `split` and `merge`. fn ration_merge(self, first: u32, second: u32, others: (Self, Self)) -> (Self, Self) - where Balance: From + Saturating + Div + where + Balance: From + Saturating + Div, { let (a, b) = self.ration(first, second); (a.merge(others.0), b.merge(others.1)) @@ -121,7 +125,8 @@ pub trait Imbalance: Sized + TryDrop + Default { /// /// A convenient replacement for `split` and `merge`. fn ration_merge_into(self, first: u32, second: u32, others: &mut (Self, Self)) - where Balance: From + Saturating + Div + where + Balance: From + Saturating + Div, { let (a, b) = self.ration(first, second); others.0.subsume(a); @@ -167,7 +172,7 @@ pub trait Imbalance: Sized + TryDrop + Default { /// greater value than the `other`. Otherwise returns `Err` with an instance of /// the `Opposite`. In both cases the value represents the combination of `self` /// and `other`. - fn offset(self, other: Self::Opposite)-> SameOrOther; + fn offset(self, other: Self::Opposite) -> SameOrOther; /// The raw value of self. fn peek(&self) -> Balance; diff --git a/frame/support/src/traits/tokens/imbalance/on_unbalanced.rs b/frame/support/src/traits/tokens/imbalance/on_unbalanced.rs index f3ecc14308e74..bc7df0e2acf33 100644 --- a/frame/support/src/traits/tokens/imbalance/on_unbalanced.rs +++ b/frame/support/src/traits/tokens/imbalance/on_unbalanced.rs @@ -33,7 +33,10 @@ pub trait OnUnbalanced { /// Handler for some imbalances. The different imbalances might have different origins or /// meanings, dependent on the context. Will default to simply calling on_unbalanced for all /// of them. Infallible. - fn on_unbalanceds(amounts: impl Iterator) where Imbalance: crate::traits::Imbalance { + fn on_unbalanceds(amounts: impl Iterator) + where + Imbalance: crate::traits::Imbalance, + { Self::on_unbalanced(amounts.fold(Imbalance::zero(), |i, x| x.merge(i))) } @@ -44,7 +47,9 @@ pub trait OnUnbalanced { /// Actually handle a non-zero imbalance. You probably want to implement this rather than /// `on_unbalanced`. - fn on_nonzero_unbalanced(amount: Imbalance) { drop(amount); } + fn on_nonzero_unbalanced(amount: Imbalance) { + drop(amount); + } } impl OnUnbalanced for () {} diff --git a/frame/support/src/traits/tokens/imbalance/signed_imbalance.rs b/frame/support/src/traits/tokens/imbalance/signed_imbalance.rs index e3523f86804fd..59302b975854f 100644 --- a/frame/support/src/traits/tokens/imbalance/signed_imbalance.rs +++ b/frame/support/src/traits/tokens/imbalance/signed_imbalance.rs @@ -17,14 +17,14 @@ //! Convenience type for managing an imbalance whose sign is unknown. +use super::super::imbalance::Imbalance; +use crate::traits::misc::SameOrOther; use codec::FullCodec; -use sp_std::fmt::Debug; use sp_runtime::traits::{AtLeast32BitUnsigned, MaybeSerializeDeserialize}; -use crate::traits::misc::SameOrOther; -use super::super::imbalance::Imbalance; +use sp_std::fmt::Debug; /// Either a positive or a negative imbalance. -pub enum SignedImbalance>{ +pub enum SignedImbalance> { /// A positive imbalance (funds have been created but none destroyed). Positive(PositiveImbalance), /// A negative imbalance (funds have been destroyed but none created). @@ -32,10 +32,11 @@ pub enum SignedImbalance>{ } impl< - P: Imbalance, - N: Imbalance, - B: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default, -> SignedImbalance { + P: Imbalance, + N: Imbalance, + B: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default, + > SignedImbalance +{ /// Create a `Positive` instance of `Self` whose value is zero. pub fn zero() -> Self { SignedImbalance::Positive(P::zero()) diff --git a/frame/support/src/traits/tokens/imbalance/split_two_ways.rs b/frame/support/src/traits/tokens/imbalance/split_two_ways.rs index f3f9870b62cd2..882b43c2e914c 100644 --- a/frame/support/src/traits/tokens/imbalance/split_two_ways.rs +++ b/frame/support/src/traits/tokens/imbalance/split_two_ways.rs @@ -17,29 +17,24 @@ //! Means for splitting an imbalance into two and hanlding them differently. -use sp_std::{ops::Div, marker::PhantomData}; +use super::super::imbalance::{Imbalance, OnUnbalanced}; use sp_core::u32_trait::Value as U32; use sp_runtime::traits::Saturating; -use super::super::imbalance::{Imbalance, OnUnbalanced}; +use sp_std::{marker::PhantomData, ops::Div}; /// Split an unbalanced amount two ways between a common divisor. -pub struct SplitTwoWays< - Balance, - Imbalance, - Part1, - Target1, - Part2, - Target2, ->(PhantomData<(Balance, Imbalance, Part1, Target1, Part2, Target2)>); +pub struct SplitTwoWays( + PhantomData<(Balance, Imbalance, Part1, Target1, Part2, Target2)>, +); impl< - Balance: From + Saturating + Div, - I: Imbalance, - Part1: U32, - Target1: OnUnbalanced, - Part2: U32, - Target2: OnUnbalanced, -> OnUnbalanced for SplitTwoWays + Balance: From + Saturating + Div, + I: Imbalance, + Part1: U32, + Target1: OnUnbalanced, + Part2: U32, + Target2: OnUnbalanced, + > OnUnbalanced for SplitTwoWays { fn on_nonzero_unbalanced(amount: I) { let total: u32 = Part1::VALUE + Part2::VALUE; diff --git a/frame/support/src/traits/tokens/misc.rs b/frame/support/src/traits/tokens/misc.rs index 208cbe2fce863..8e44b20f2d1b4 100644 --- a/frame/support/src/traits/tokens/misc.rs +++ b/frame/support/src/traits/tokens/misc.rs @@ -17,11 +17,11 @@ //! Miscellaneous types. -use sp_std::fmt::Debug; -use codec::{Encode, Decode, FullCodec}; +use codec::{Decode, Encode, FullCodec}; +use sp_arithmetic::traits::{AtLeast32BitUnsigned, Zero}; use sp_core::RuntimeDebug; -use sp_arithmetic::traits::{Zero, AtLeast32BitUnsigned}; -use sp_runtime::{DispatchError, ArithmeticError, TokenError}; +use sp_runtime::{ArithmeticError, DispatchError, TokenError}; +use sp_std::fmt::Debug; /// One of a number of consequences of withdrawing a fungible from an account. #[derive(Copy, Clone, Eq, PartialEq)] @@ -161,12 +161,18 @@ impl WithdrawReasons { } /// Simple amalgamation trait to collect together properties for an AssetId under one roof. -pub trait AssetId: FullCodec + Copy + Eq + PartialEq + Debug {} +pub trait AssetId: FullCodec + Copy + Eq + PartialEq + Debug {} impl AssetId for T {} /// Simple amalgamation trait to collect together properties for a Balance under one roof. -pub trait Balance: AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug + scale_info::TypeInfo {} -impl Balance for T {} +pub trait Balance: + AtLeast32BitUnsigned + FullCodec + Copy + Default + Debug + scale_info::TypeInfo +{ +} +impl Balance + for T +{ +} /// Converts a balance value into an asset balance. pub trait BalanceConversion { diff --git a/frame/support/src/traits/tokens/nonfungible.rs b/frame/support/src/traits/tokens/nonfungible.rs index 27e6cf8126a8e..821884f6e3905 100644 --- a/frame/support/src/traits/tokens/nonfungible.rs +++ b/frame/support/src/traits/tokens/nonfungible.rs @@ -24,12 +24,11 @@ //! For an NFT API which has dual-level namespacing, the traits in `nonfungibles` are better to //! use. -use codec::{Encode, Decode}; -use sp_std::prelude::*; -use sp_runtime::TokenError; -use crate::dispatch::DispatchResult; -use crate::traits::Get; use super::nonfungibles; +use crate::{dispatch::DispatchResult, traits::Get}; +use codec::{Decode, Encode}; +use sp_runtime::TokenError; +use sp_std::prelude::*; /// Trait for providing an interface to a read-only NFT-like set of asset instances. pub trait Inspect { @@ -43,7 +42,9 @@ pub trait Inspect { /// Returns the attribute value of `instance` corresponding to `key`. /// /// By default this is `None`; no attributes are defined. - fn attribute(_instance: &Self::InstanceId, _key: &[u8]) -> Option> { None } + fn attribute(_instance: &Self::InstanceId, _key: &[u8]) -> Option> { + None + } /// Returns the strongly-typed attribute value of `instance` corresponding to `key`. /// @@ -56,7 +57,9 @@ pub trait Inspect { /// Returns `true` if the asset `instance` may be transferred. /// /// Default implementation is that all assets are transferable. - fn can_transfer(_instance: &Self::InstanceId) -> bool { true } + fn can_transfer(_instance: &Self::InstanceId) -> bool { + true + } } /// Interface for enumerating assets in existence or owned by a given account over a collection @@ -117,15 +120,14 @@ pub struct ItemOf< F: nonfungibles::Inspect, A: Get<>::ClassId>, AccountId, ->( - sp_std::marker::PhantomData<(F, A, AccountId)> -); +>(sp_std::marker::PhantomData<(F, A, AccountId)>); impl< - F: nonfungibles::Inspect, - A: Get<>::ClassId>, - AccountId, -> Inspect for ItemOf { + F: nonfungibles::Inspect, + A: Get<>::ClassId>, + AccountId, + > Inspect for ItemOf +{ type InstanceId = >::InstanceId; fn owner(instance: &Self::InstanceId) -> Option { >::owner(&A::get(), instance) @@ -142,10 +144,11 @@ impl< } impl< - F: nonfungibles::InspectEnumerable, - A: Get<>::ClassId>, - AccountId, -> InspectEnumerable for ItemOf { + F: nonfungibles::InspectEnumerable, + A: Get<>::ClassId>, + AccountId, + > InspectEnumerable for ItemOf +{ fn instances() -> Box> { >::instances(&A::get()) } @@ -155,10 +158,11 @@ impl< } impl< - F: nonfungibles::Mutate, - A: Get<>::ClassId>, - AccountId, -> Mutate for ItemOf { + F: nonfungibles::Mutate, + A: Get<>::ClassId>, + AccountId, + > Mutate for ItemOf +{ fn mint_into(instance: &Self::InstanceId, who: &AccountId) -> DispatchResult { >::mint_into(&A::get(), instance, who) } @@ -178,10 +182,11 @@ impl< } impl< - F: nonfungibles::Transfer, - A: Get<>::ClassId>, - AccountId, -> Transfer for ItemOf { + F: nonfungibles::Transfer, + A: Get<>::ClassId>, + AccountId, + > Transfer for ItemOf +{ fn transfer(instance: &Self::InstanceId, destination: &AccountId) -> DispatchResult { >::transfer(&A::get(), instance, destination) } diff --git a/frame/support/src/traits/tokens/nonfungibles.rs b/frame/support/src/traits/tokens/nonfungibles.rs index b50c5f4d9814c..64bbf3a8edf7a 100644 --- a/frame/support/src/traits/tokens/nonfungibles.rs +++ b/frame/support/src/traits/tokens/nonfungibles.rs @@ -27,10 +27,10 @@ //! Implementations of these traits may be converted to implementations of corresponding //! `nonfungible` traits by using the `nonfungible::ItemOf` type adapter. -use sp_std::prelude::*; -use codec::{Encode, Decode}; -use sp_runtime::TokenError; use crate::dispatch::DispatchResult; +use codec::{Decode, Encode}; +use sp_runtime::TokenError; +use sp_std::prelude::*; /// Trait for providing an interface to many read-only NFT-like sets of asset instances. pub trait Inspect { @@ -48,14 +48,18 @@ pub trait Inspect { /// Returns the owner of the asset `class`, if there is one. For many NFTs this may not make /// any sense, so users of this API should not be surprised to find an asset class results in /// `None` here. - fn class_owner(_class: &Self::ClassId) -> Option { None } + fn class_owner(_class: &Self::ClassId) -> Option { + None + } /// Returns the attribute value of `instance` of `class` corresponding to `key`. /// /// By default this is `None`; no attributes are defined. - fn attribute(_class: &Self::ClassId, _instance: &Self::InstanceId, _key: &[u8]) - -> Option> - { + fn attribute( + _class: &Self::ClassId, + _instance: &Self::InstanceId, + _key: &[u8], + ) -> Option> { None } @@ -74,15 +78,14 @@ pub trait Inspect { /// Returns the attribute value of `class` corresponding to `key`. /// /// By default this is `None`; no attributes are defined. - fn class_attribute(_class: &Self::ClassId, _key: &[u8]) -> Option> { None } + fn class_attribute(_class: &Self::ClassId, _key: &[u8]) -> Option> { + None + } /// Returns the strongly-typed attribute value of `class` corresponding to `key`. /// /// By default this just attempts to use `class_attribute`. - fn typed_class_attribute( - class: &Self::ClassId, - key: &K, - ) -> Option { + fn typed_class_attribute(class: &Self::ClassId, key: &K) -> Option { key.using_encoded(|d| Self::class_attribute(class, d)) .and_then(|v| V::decode(&mut &v[..]).ok()) } @@ -90,7 +93,9 @@ pub trait Inspect { /// Returns `true` if the asset `instance` of `class` may be transferred. /// /// Default implementation is that all assets are transferable. - fn can_transfer(_class: &Self::ClassId, _instance: &Self::InstanceId) -> bool { true } + fn can_transfer(_class: &Self::ClassId, _instance: &Self::InstanceId) -> bool { + true + } } /// Interface for enumerating assets in existence or owned by a given account over many collections @@ -106,7 +111,10 @@ pub trait InspectEnumerable: Inspect { fn owned(who: &AccountId) -> Box>; /// Returns an iterator of the asset instances of `class` owned by `who`. - fn owned_in_class(class: &Self::ClassId, who: &AccountId) -> Box>; + fn owned_in_class( + class: &Self::ClassId, + who: &AccountId, + ) -> Box>; } /// Trait for providing an interface for multiple classes of NFT-like assets which may be minted, @@ -151,19 +159,13 @@ pub trait Mutate: Inspect { key: &K, value: &V, ) -> DispatchResult { - key.using_encoded(|k| value.using_encoded(|v| - Self::set_attribute(class, instance, k, v) - )) + key.using_encoded(|k| value.using_encoded(|v| Self::set_attribute(class, instance, k, v))) } /// Set attribute `value` of asset `class`'s `key`. /// /// By default, this is not a supported operation. - fn set_class_attribute( - _class: &Self::ClassId, - _key: &[u8], - _value: &[u8], - ) -> DispatchResult { + fn set_class_attribute(_class: &Self::ClassId, _key: &[u8], _value: &[u8]) -> DispatchResult { Err(TokenError::Unsupported.into()) } @@ -175,9 +177,7 @@ pub trait Mutate: Inspect { key: &K, value: &V, ) -> DispatchResult { - key.using_encoded(|k| value.using_encoded(|v| - Self::set_class_attribute(class, k, v) - )) + key.using_encoded(|k| value.using_encoded(|v| Self::set_class_attribute(class, k, v))) } } diff --git a/frame/support/src/traits/validation.rs b/frame/support/src/traits/validation.rs index d0583d6991fe6..5a68f289df48f 100644 --- a/frame/support/src/traits/validation.rs +++ b/frame/support/src/traits/validation.rs @@ -17,13 +17,14 @@ //! Traits for dealing with validation and validators. -use sp_std::prelude::*; +use crate::{dispatch::Parameter, weights::Weight}; use codec::{Codec, Decode}; -use sp_runtime::traits::{Convert, Zero}; -use sp_runtime::{BoundToRuntimeAppPublic, ConsensusEngineId, Permill, RuntimeAppPublic}; +use sp_runtime::{ + traits::{Convert, Zero}, + BoundToRuntimeAppPublic, ConsensusEngineId, Permill, RuntimeAppPublic, +}; use sp_staking::SessionIndex; -use crate::dispatch::Parameter; -use crate::weights::Weight; +use sp_std::prelude::*; /// A trait for online node inspection in a session. /// @@ -54,12 +55,14 @@ pub trait ValidatorSetWithIdentification: ValidatorSet { pub trait FindAuthor { /// Find the author of a block based on the pre-runtime digests. fn find_author<'a, I>(digests: I) -> Option - where I: 'a + IntoIterator; + where + I: 'a + IntoIterator; } impl FindAuthor for () { fn find_author<'a, I>(_: I) -> Option - where I: 'a + IntoIterator + where + I: 'a + IntoIterator, { None } @@ -81,7 +84,9 @@ pub trait OneSessionHandler: BoundToRuntimeAppPublic { /// for the second session, therefore the first call to `on_new_session` /// should provide the same validator set. fn on_genesis_session<'a, I: 'a>(validators: I) - where I: Iterator, ValidatorId: 'a; + where + I: Iterator, + ValidatorId: 'a; /// Session set has changed; act appropriately. Note that this can be called /// before initialization of your module. @@ -92,11 +97,10 @@ pub trait OneSessionHandler: BoundToRuntimeAppPublic { /// /// The `validators` are the validators of the incoming session, and `queued_validators` /// will follow. - fn on_new_session<'a, I: 'a>( - changed: bool, - validators: I, - queued_validators: I, - ) where I: Iterator, ValidatorId: 'a; + fn on_new_session<'a, I: 'a>(changed: bool, validators: I, queued_validators: I) + where + I: Iterator, + ValidatorId: 'a; /// A notification for end of the session. /// diff --git a/frame/support/src/traits/voting.rs b/frame/support/src/traits/voting.rs index f5afbac129555..62c6217ad59bc 100644 --- a/frame/support/src/traits/voting.rs +++ b/frame/support/src/traits/voting.rs @@ -18,7 +18,7 @@ //! Traits and associated data structures concerned with voting, and moving between tokens and //! votes. -use sp_arithmetic::traits::{UniqueSaturatedInto, UniqueSaturatedFrom, SaturatedConversion}; +use sp_arithmetic::traits::{SaturatedConversion, UniqueSaturatedFrom, UniqueSaturatedInto}; /// A trait similar to `Convert` to convert values from `B` an abstract balance type /// into u64 and back from u128. (This conversion is used in election and other places where complex @@ -69,7 +69,6 @@ impl CurrencyToVote for U128CurrencyToVote { } } - /// A naive implementation of `CurrencyConvert` that simply saturates all conversions. /// /// # Warning @@ -77,7 +76,9 @@ impl CurrencyToVote for U128CurrencyToVote { /// This is designed to be used mostly for testing. Use with care, and think about the consequences. pub struct SaturatingCurrencyToVote; -impl + UniqueSaturatedFrom> CurrencyToVote for SaturatingCurrencyToVote { +impl + UniqueSaturatedFrom> CurrencyToVote + for SaturatingCurrencyToVote +{ fn to_vote(value: B, _: B) -> u64 { value.unique_saturated_into() } diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index ce9008302deb8..3cdc9840f2ffa 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -127,16 +127,21 @@ //! - Ubuntu 19.10 (GNU/Linux 5.3.0-18-generic x86_64) //! - rustc 1.42.0 (b8cedc004 2020-03-09) -#[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; -use codec::{Encode, Decode}; +use crate::dispatch::{DispatchError, DispatchErrorWithPostInfo, DispatchResultWithPostInfo}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::{RuntimeDebug, traits::SignedExtension}; -use sp_runtime::generic::{CheckedExtrinsic, UncheckedExtrinsic}; -use crate::dispatch::{DispatchErrorWithPostInfo, DispatchResultWithPostInfo, DispatchError}; -use sp_runtime::traits::SaturatedConversion; -use sp_arithmetic::{Perbill, traits::{BaseArithmetic, Saturating, Unsigned}}; +#[cfg(feature = "std")] +use serde::{Deserialize, Serialize}; use smallvec::{smallvec, SmallVec}; +use sp_arithmetic::{ + traits::{BaseArithmetic, Saturating, Unsigned}, + Perbill, +}; +use sp_runtime::{ + generic::{CheckedExtrinsic, UncheckedExtrinsic}, + traits::{SaturatedConversion, SignedExtension}, + RuntimeDebug, +}; /// Re-export priority as type pub use sp_runtime::transaction_validity::TransactionPriority; @@ -153,7 +158,7 @@ pub mod constants { pub const WEIGHT_PER_SECOND: Weight = 1_000_000_000_000; pub const WEIGHT_PER_MILLIS: Weight = WEIGHT_PER_SECOND / 1000; // 1_000_000_000 pub const WEIGHT_PER_MICROS: Weight = WEIGHT_PER_MILLIS / 1000; // 1_000_000 - pub const WEIGHT_PER_NANOS: Weight = WEIGHT_PER_MICROS / 1000; // 1_000 + pub const WEIGHT_PER_NANOS: Weight = WEIGHT_PER_MICROS / 1000; // 1_000 parameter_types! { /// Importing a block with 0 txs takes ~5 ms @@ -268,13 +273,17 @@ pub trait OneOrMany { } impl OneOrMany for DispatchClass { - type Iter = sp_std::iter::Once; - fn into_iter(self) -> Self::Iter { sp_std::iter::once(self) } + type Iter = sp_std::iter::Once; + fn into_iter(self) -> Self::Iter { + sp_std::iter::once(self) + } } impl<'a> OneOrMany for &'a [DispatchClass] { - type Iter = sp_std::iter::Cloned>; - fn into_iter(self) -> Self::Iter { self.iter().cloned() } + type Iter = sp_std::iter::Cloned>; + fn into_iter(self) -> Self::Iter { + self.iter().cloned() + } } /// Primitives related to priority management of Frame. @@ -366,43 +375,32 @@ pub fn extract_actual_weight(result: &DispatchResultWithPostInfo, info: &Dispatc match result { Ok(post_info) => &post_info, Err(err) => &err.post_info, - }.calc_actual_weight(info) + } + .calc_actual_weight(info) } impl From<(Option, Pays)> for PostDispatchInfo { fn from(post_weight_info: (Option, Pays)) -> Self { let (actual_weight, pays_fee) = post_weight_info; - Self { - actual_weight, - pays_fee, - } + Self { actual_weight, pays_fee } } } impl From for PostDispatchInfo { fn from(pays_fee: Pays) -> Self { - Self { - actual_weight: None, - pays_fee, - } + Self { actual_weight: None, pays_fee } } } impl From> for PostDispatchInfo { fn from(actual_weight: Option) -> Self { - Self { - actual_weight, - pays_fee: Default::default(), - } + Self { actual_weight, pays_fee: Default::default() } } } impl From<()> for PostDispatchInfo { fn from(_: ()) -> Self { - Self { - actual_weight: None, - pays_fee: Default::default(), - } + Self { actual_weight: None, pays_fee: Default::default() } } } @@ -435,8 +433,9 @@ pub trait WithPostDispatchInfo { fn with_weight(self, actual_weight: Weight) -> DispatchErrorWithPostInfo; } -impl WithPostDispatchInfo for T where - T: Into +impl WithPostDispatchInfo for T +where + T: Into, { fn with_weight(self, actual_weight: Weight) -> DispatchErrorWithPostInfo { DispatchErrorWithPostInfo { @@ -543,8 +542,9 @@ impl WeighData for FunctionOf { // `WeighData` as a closure #[allow(deprecated)] -impl WeighData for FunctionOf where - WD : Fn(Args) -> Weight +impl WeighData for FunctionOf +where + WD: Fn(Args) -> Weight, { fn weigh_data(&self, args: Args) -> Weight { (self.0)(args) @@ -561,8 +561,9 @@ impl ClassifyDispatch for FunctionOf // `ClassifyDispatch` as a raw value #[allow(deprecated)] -impl ClassifyDispatch for FunctionOf where - CD : Fn(Args) -> DispatchClass +impl ClassifyDispatch for FunctionOf +where + CD: Fn(Args) -> DispatchClass, { fn classify_dispatch(&self, args: Args) -> DispatchClass { (self.1)(args) @@ -579,8 +580,9 @@ impl PaysFee for FunctionOf { // `PaysFee` as a closure #[allow(deprecated)] -impl PaysFee for FunctionOf where - PF : Fn(Args) -> Pays +impl PaysFee for FunctionOf +where + PF: Fn(Args) -> Pays, { fn pays_fee(&self, args: Args) -> Pays { (self.2)(args) @@ -600,8 +602,7 @@ where } /// Implementation for checked extrinsic. -impl GetDispatchInfo - for CheckedExtrinsic +impl GetDispatchInfo for CheckedExtrinsic where Call: GetDispatchInfo, { @@ -615,11 +616,7 @@ where impl GetDispatchInfo for sp_runtime::testing::TestXt { fn get_dispatch_info(&self) -> DispatchInfo { // for testing: weight == size. - DispatchInfo { - weight: self.encode().len() as _, - pays_fee: Pays::Yes, - ..Default::default() - } + DispatchInfo { weight: self.encode().len() as _, pays_fee: Pays::Yes, ..Default::default() } } } @@ -691,32 +688,35 @@ pub trait WeightToFeePolynomial { /// This should not be overriden in most circumstances. Calculation is done in the /// `Balance` type and never overflows. All evaluation is saturating. fn calc(weight: &Weight) -> Self::Balance { - Self::polynomial().iter().fold(Self::Balance::saturated_from(0u32), |mut acc, args| { - let w = Self::Balance::saturated_from(*weight).saturating_pow(args.degree.into()); - - // The sum could get negative. Therefore we only sum with the accumulator. - // The Perbill Mul implementation is non overflowing. - let frac = args.coeff_frac * w; - let integer = args.coeff_integer.saturating_mul(w); - - if args.negative { - acc = acc.saturating_sub(frac); - acc = acc.saturating_sub(integer); - } else { - acc = acc.saturating_add(frac); - acc = acc.saturating_add(integer); - } + Self::polynomial() + .iter() + .fold(Self::Balance::saturated_from(0u32), |mut acc, args| { + let w = Self::Balance::saturated_from(*weight).saturating_pow(args.degree.into()); - acc - }) + // The sum could get negative. Therefore we only sum with the accumulator. + // The Perbill Mul implementation is non overflowing. + let frac = args.coeff_frac * w; + let integer = args.coeff_integer.saturating_mul(w); + + if args.negative { + acc = acc.saturating_sub(frac); + acc = acc.saturating_sub(integer); + } else { + acc = acc.saturating_add(frac); + acc = acc.saturating_add(integer); + } + + acc + }) } } /// Implementor of `WeightToFeePolynomial` that maps one unit of weight to one unit of fee. pub struct IdentityFee(sp_std::marker::PhantomData); -impl WeightToFeePolynomial for IdentityFee where - T: BaseArithmetic + From + Copy + Unsigned +impl WeightToFeePolynomial for IdentityFee +where + T: BaseArithmetic + From + Copy + Unsigned, { type Balance = T; @@ -814,8 +814,8 @@ impl PerDispatchClass { #[cfg(test)] #[allow(dead_code)] mod tests { - use crate::{decl_module, parameter_types, traits::Get}; use super::*; + use crate::{decl_module, parameter_types, traits::Get}; pub trait Config: 'static { type Origin; @@ -926,24 +926,15 @@ mod tests { #[test] fn extract_actual_weight_works() { - let pre = DispatchInfo { - weight: 1000, - .. Default::default() - }; + let pre = DispatchInfo { weight: 1000, ..Default::default() }; assert_eq!(extract_actual_weight(&Ok(Some(7).into()), &pre), 7); assert_eq!(extract_actual_weight(&Ok(Some(1000).into()), &pre), 1000); - assert_eq!( - extract_actual_weight(&Err(DispatchError::BadOrigin.with_weight(9)), &pre), - 9 - ); + assert_eq!(extract_actual_weight(&Err(DispatchError::BadOrigin.with_weight(9)), &pre), 9); } #[test] fn extract_actual_weight_caps_at_pre_weight() { - let pre = DispatchInfo { - weight: 1000, - .. Default::default() - }; + let pre = DispatchInfo { weight: 1000, ..Default::default() }; assert_eq!(extract_actual_weight(&Ok(Some(1250).into()), &pre), 1000); assert_eq!( extract_actual_weight(&Err(DispatchError::BadOrigin.with_weight(1300)), &pre), diff --git a/frame/support/test/src/pallet_version.rs b/frame/support/test/src/pallet_version.rs index aaa46c3ef2c60..882c0b78b7338 100644 --- a/frame/support/test/src/pallet_version.rs +++ b/frame/support/test/src/pallet_version.rs @@ -25,8 +25,5 @@ fn ensure_that_current_pallet_version_is_correct() { patch: env!("CARGO_PKG_VERSION_PATCH").parse().unwrap(), }; - assert_eq!( - expected, - crate_to_pallet_version!(), - ) + assert_eq!(expected, crate_to_pallet_version!(),) } diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 35b8096c29bee..335eaa6591610 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -19,13 +19,17 @@ //! * error declareed with decl_error works //! * integrity test is generated -#![recursion_limit="128"] +#![recursion_limit = "128"] +use frame_support::traits::PalletInfo as _; use scale_info::TypeInfo; -use sp_runtime::{generic, traits::{BlakeTwo256, Verify}, DispatchError}; -use sp_core::{H256, sr25519}; +use sp_core::{sr25519, H256}; +use sp_runtime::{ + generic, + traits::{BlakeTwo256, Verify}, + DispatchError, +}; use sp_std::cell::RefCell; -use frame_support::traits::PalletInfo as _; mod system; @@ -52,7 +56,7 @@ mod module1 { } #[derive(Clone, PartialEq, Eq, Debug, codec::Encode, codec::Decode, TypeInfo)] - pub struct Origin(pub core::marker::PhantomData::<(T, I)>); + pub struct Origin(pub core::marker::PhantomData<(T, I)>); frame_support::decl_event! { pub enum Event where @@ -264,8 +268,8 @@ pub type Block = generic::Block; pub type UncheckedExtrinsic = generic::UncheckedExtrinsic; mod origin_test { - use frame_support::traits::{Filter, OriginTrait}; use super::{module3, nested, system, Block, UncheckedExtrinsic}; + use frame_support::traits::{Filter, OriginTrait}; impl nested::module3::Config for RuntimeOriginTest {} impl module3::Config for RuntimeOriginTest {} @@ -557,10 +561,22 @@ fn get_call_names() { fn get_module_names() { use frame_support::dispatch::GetCallMetadata; let module_names = Call::get_module_names(); - assert_eq!([ - "System", "Module1_1", "Module2", "Module1_2", "NestedModule3", "Module3", - "Module1_4", "Module1_6", "Module1_7", "Module1_8", "Module1_9", - ], module_names); + assert_eq!( + [ + "System", + "Module1_1", + "Module2", + "Module1_2", + "NestedModule3", + "Module3", + "Module1_4", + "Module1_6", + "Module1_7", + "Module1_8", + "Module1_9", + ], + module_names + ); } #[test] @@ -578,8 +594,8 @@ fn call_subtype_conversion() { #[test] fn test_metadata() { - use scale_info::meta_type; use frame_support::metadata::*; + use scale_info::meta_type; let pallets = vec![ PalletMetadata { @@ -593,10 +609,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_1", - storage: Some(PalletStorageMetadata { - prefix: "Instance1Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance1Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::>().into()), constants: vec![], @@ -605,10 +618,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module2", - storage: Some(PalletStorageMetadata { - prefix: "Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::().into()), constants: vec![], @@ -617,10 +627,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_2", - storage: Some(PalletStorageMetadata { - prefix: "Instance2Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance2Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::>().into()), constants: vec![], @@ -629,10 +636,7 @@ fn test_metadata() { }, PalletMetadata { name: "NestedModule3", - storage: Some(PalletStorageMetadata { - prefix: "Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::().into()), constants: vec![], @@ -641,10 +645,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module3", - storage: Some(PalletStorageMetadata { - prefix: "Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::().into()), constants: vec![], @@ -653,10 +654,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_3", - storage: Some(PalletStorageMetadata { - prefix: "Instance3Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance3Module", entries: vec![] }), calls: None, event: None, constants: vec![], @@ -683,10 +681,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_6", - storage: Some(PalletStorageMetadata { - prefix: "Instance6Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance6Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::>().into()), constants: vec![], @@ -695,10 +690,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_7", - storage: Some(PalletStorageMetadata { - prefix: "Instance7Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance7Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(PalletEventMetadata { ty: meta_type::>(), @@ -709,10 +701,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_8", - storage: Some(PalletStorageMetadata { - prefix: "Instance8Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance8Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::>().into()), constants: vec![], @@ -721,10 +710,7 @@ fn test_metadata() { }, PalletMetadata { name: "Module1_9", - storage: Some(PalletStorageMetadata { - prefix: "Instance9Module", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Instance9Module", entries: vec![] }), calls: Some(meta_type::>().into()), event: Some(meta_type::>().into()), constants: vec![], @@ -736,13 +722,11 @@ fn test_metadata() { let extrinsic = ExtrinsicMetadata { ty: meta_type::(), version: 4, - signed_extensions: vec![ - SignedExtensionMetadata { - identifier: "UnitSignedExtension", - ty: meta_type::<()>(), - additional_signed: meta_type::<()>(), - } - ] + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: meta_type::<()>(), + additional_signed: meta_type::<()>(), + }], }; let expected_metadata: RuntimeMetadataPrefixed = diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index ae7e6f07f7923..ec8bcda6d8ba4 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -27,8 +27,11 @@ mod tests { } pub trait Config: frame_support_test::Config { - type Origin2: codec::Codec + codec::EncodeLike + Default - + codec::MaxEncodedLen + scale_info::TypeInfo; + type Origin2: codec::Codec + + codec::EncodeLike + + Default + + codec::MaxEncodedLen + + scale_info::TypeInfo; } frame_support::decl_storage! { @@ -187,35 +190,35 @@ mod tests { modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![1, 0, 0, 0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETU32WITHCONFIGMYDEFOPT", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "GetU32WithBuilder", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0, 0, 0, 0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderSome", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "GetOptU32WithBuilderNone", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "MAPU32", @@ -226,7 +229,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBMAPU32", @@ -237,7 +240,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0], - docs:vec![], + docs: vec![], }, // StorageEntryMetadata { // name: "MAPU32MYDEF", @@ -270,7 +273,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0, 0, 0, 0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETMAPU32", @@ -281,7 +284,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![0, 0, 0, 0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "GETMAPU32MYDEF", @@ -292,7 +295,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![109, 97, 112, 100], // "map" - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "PUBGETMAPU32MYDEF", @@ -303,7 +306,7 @@ mod tests { value: scale_info::meta_type::<[u8; 4]>(), }, default: vec![112, 117, 98, 109], // "pubmap" - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "DOUBLEMAP", @@ -316,7 +319,7 @@ mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "DOUBLEMAP2", @@ -329,28 +332,31 @@ mod tests { key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE1", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<(Option,)>()), default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE2", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::Plain(scale_info::meta_type::<([[(u16, Option<()>); 32]; 12], u32)>()), + ty: StorageEntryType::Plain(scale_info::meta_type::<( + [[(u16, Option<()>); 32]; 12], + u32, + )>()), default: [0u8; 1156].to_vec(), - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "COMPLEXTYPE3", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::<[u32; 25]>()), default: [0u8; 100].to_vec(), - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMAP", @@ -361,7 +367,7 @@ mod tests { value: scale_info::meta_type::(), }, default: vec![0], - docs:vec![], + docs: vec![], }, StorageEntryMetadata { name: "NMAP2", @@ -372,7 +378,7 @@ mod tests { value: scale_info::meta_type::(), }, default: vec![0], - docs:vec![], + docs: vec![], }, ], } @@ -381,9 +387,9 @@ mod tests { #[test] fn storage_info() { use frame_support::{ - StorageHasher, - traits::{StorageInfoTrait, StorageInfo}, pallet_prelude::*, + traits::{StorageInfo, StorageInfoTrait}, + StorageHasher, }; let prefix = |pallet_name, storage_name| { let mut res = [0u8; 32]; @@ -670,9 +676,9 @@ mod test2 { #[test] fn storage_info() { use frame_support::{ - StorageHasher, - traits::{StorageInfoTrait, StorageInfo}, pallet_prelude::*, + traits::{StorageInfo, StorageInfoTrait}, + StorageHasher, }; let prefix = |pallet_name, storage_name| { let mut res = [0u8; 32]; @@ -714,7 +720,6 @@ mod test2 { ], ); } - } #[cfg(test)] @@ -748,8 +753,8 @@ mod test3 { #[cfg(test)] #[allow(dead_code)] mod test_append_and_len { + use codec::{Decode, Encode}; use sp_io::TestExternalities; - use codec::{Encode, Decode}; pub trait Config: frame_support_test::Config {} diff --git a/frame/support/test/tests/derive_no_bound.rs b/frame/support/test/tests/derive_no_bound.rs index 3081a332b72c1..457ece8b85901 100644 --- a/frame/support/test/tests/derive_no_bound.rs +++ b/frame/support/test/tests/derive_no_bound.rs @@ -19,7 +19,7 @@ //! RuntimeDebugNoBound use frame_support::{ - DebugNoBound, CloneNoBound, EqNoBound, PartialEqNoBound, RuntimeDebugNoBound, DefaultNoBound, + CloneNoBound, DebugNoBound, DefaultNoBound, EqNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; #[derive(RuntimeDebugNoBound)] @@ -59,7 +59,7 @@ fn test_struct_named() { phantom: Default::default(), }; - let a_default: StructNamed:: = Default::default(); + let a_default: StructNamed = Default::default(); assert_eq!(a_default.a, 0); assert_eq!(a_default.b, 0); assert_eq!(a_default.c, 0); @@ -90,14 +90,9 @@ struct StructUnnamed(u32, u64, T::C, core::marker::PhantomData< #[test] fn test_struct_unnamed() { - let a_1 = StructUnnamed::( - 1, - 2, - 3, - Default::default(), - ); + let a_1 = StructUnnamed::(1, 2, 3, Default::default()); - let a_default: StructUnnamed:: = Default::default(); + let a_default: StructUnnamed = Default::default(); assert_eq!(a_default.0, 0); assert_eq!(a_default.1, 0); assert_eq!(a_default.2, 0); @@ -108,17 +103,9 @@ fn test_struct_unnamed() { assert_eq!(a_2.1, 2); assert_eq!(a_2.2, 3); assert_eq!(a_2, a_1); - assert_eq!( - format!("{:?}", a_1), - String::from("StructUnnamed(1, 2, 3, PhantomData)") - ); + assert_eq!(format!("{:?}", a_1), String::from("StructUnnamed(1, 2, 3, PhantomData)")); - let b = StructUnnamed::( - 1, - 2, - 4, - Default::default(), - ); + let b = StructUnnamed::(1, 2, 4, Default::default()); assert!(b != a_1); } @@ -126,12 +113,7 @@ fn test_struct_unnamed() { #[derive(DebugNoBound, CloneNoBound, EqNoBound, PartialEqNoBound, DefaultNoBound)] enum Enum { VariantUnnamed(u32, u64, T::C, core::marker::PhantomData<(U, V)>), - VariantNamed { - a: u32, - b: u64, - c: T::C, - phantom: core::marker::PhantomData<(U, V)>, - }, + VariantNamed { a: u32, b: u64, c: T::C, phantom: core::marker::PhantomData<(U, V)> }, VariantUnit, VariantUnit2, } @@ -139,11 +121,7 @@ enum Enum { // enum that will have a named default. #[derive(DebugNoBound, CloneNoBound, EqNoBound, PartialEqNoBound, DefaultNoBound)] enum Enum2 { - VariantNamed { - a: u32, - b: u64, - c: T::C, - }, + VariantNamed { a: u32, b: u64, c: T::C }, VariantUnnamed(u32, u64, T::C), VariantUnit, VariantUnit2, @@ -153,18 +131,14 @@ enum Enum2 { #[derive(DebugNoBound, CloneNoBound, EqNoBound, PartialEqNoBound, DefaultNoBound)] enum Enum3 { VariantUnit, - VariantNamed { - a: u32, - b: u64, - c: T::C, - }, + VariantNamed { a: u32, b: u64, c: T::C }, VariantUnnamed(u32, u64, T::C), VariantUnit2, } #[test] fn test_enum() { - type TestEnum = Enum::; + type TestEnum = Enum; let variant_0 = TestEnum::VariantUnnamed(1, 2, 3, Default::default()); let variant_0_bis = TestEnum::VariantUnnamed(1, 2, 4, Default::default()); let variant_1 = TestEnum::VariantNamed { a: 1, b: 2, c: 3, phantom: Default::default() }; @@ -179,14 +153,8 @@ fn test_enum() { TestEnum::VariantUnnamed(0, 0, 0, Default::default()) ); - assert_eq!( - Enum2::::default(), - Enum2::::VariantNamed { a: 0, b: 0, c: 0}, - ); - assert_eq!( - Enum3::::default(), - Enum3::::VariantUnit, - ); + assert_eq!(Enum2::::default(), Enum2::::VariantNamed { a: 0, b: 0, c: 0 },); + assert_eq!(Enum3::::default(), Enum3::::VariantUnit,); assert!(variant_0 != variant_0_bis); assert!(variant_1 != variant_1_bis); @@ -216,12 +184,6 @@ fn test_enum() { format!("{:?}", variant_1), String::from("Enum::VariantNamed { a: 1, b: 2, c: 3, phantom: PhantomData }"), ); - assert_eq!( - format!("{:?}", variant_2), - String::from("Enum::VariantUnit"), - ); - assert_eq!( - format!("{:?}", variant_3), - String::from("Enum::VariantUnit2"), - ); + assert_eq!(format!("{:?}", variant_2), String::from("Enum::VariantUnit"),); + assert_eq!(format!("{:?}", variant_3), String::from("Enum::VariantUnit2"),); } diff --git a/frame/support/test/tests/final_keys.rs b/frame/support/test/tests/final_keys.rs index 9839a3d3b2d94..e89f961d893f5 100644 --- a/frame/support/test/tests/final_keys.rs +++ b/frame/support/test/tests/final_keys.rs @@ -15,10 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use frame_support::storage::unhashed; use codec::Encode; -use frame_support::{StorageDoubleMap, StorageMap, StorageValue, StoragePrefixedMap}; -use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; +use frame_support::{ + storage::unhashed, StorageDoubleMap, StorageMap, StoragePrefixedMap, StorageValue, +}; +use sp_io::{ + hashing::{blake2_128, twox_128, twox_64}, + TestExternalities, +}; mod no_instance { pub trait Config: frame_support_test::Config {} @@ -27,7 +31,7 @@ mod no_instance { pub struct Module for enum Call where origin: T::Origin, system=frame_support_test {} } - frame_support::decl_storage!{ + frame_support::decl_storage! { trait Store for Module as FinalKeysNone { pub Value config(value): u32; @@ -52,7 +56,7 @@ mod instance { for enum Call where origin: T::Origin, system=frame_support_test {} } - frame_support::decl_storage!{ + frame_support::decl_storage! { trait Store for Module, I: Instance = DefaultInstance> as FinalKeysSome { diff --git a/frame/support/test/tests/genesisconfig.rs b/frame/support/test/tests/genesisconfig.rs index a30b021d13e51..d488e8bfbfaff 100644 --- a/frame/support/test/tests/genesisconfig.rs +++ b/frame/support/test/tests/genesisconfig.rs @@ -40,7 +40,5 @@ impl Config for Test {} #[test] fn init_genesis_config() { - GenesisConfig:: { - t: Default::default(), - }; + GenesisConfig:: { t: Default::default() }; } diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 3cb4a7ee8a95e..8ede4df48b32e 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -15,21 +15,26 @@ // See the License for the specific language governing permissions and // limitations under the License. -#![recursion_limit="128"] +#![recursion_limit = "128"] -use codec::{Codec, EncodeLike, Encode, Decode}; -use scale_info::TypeInfo; -use sp_runtime::{generic, BuildStorage, traits::{BlakeTwo256, Verify}}; +use codec::{Codec, Decode, Encode, EncodeLike}; use frame_support::{ - Parameter, traits::Get, parameter_types, + inherent::{InherentData, InherentIdentifier, MakeFatalError, ProvideInherent}, metadata::{ - PalletStorageMetadata, StorageEntryModifier, StorageEntryType, - StorageEntryMetadata, StorageHasher, + PalletStorageMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, + StorageHasher, }, - StorageValue, StorageMap, StorageDoubleMap, - inherent::{ProvideInherent, InherentData, InherentIdentifier, MakeFatalError}, + parameter_types, + traits::Get, + Parameter, StorageDoubleMap, StorageMap, StorageValue, +}; +use scale_info::TypeInfo; +use sp_core::{sr25519, H256}; +use sp_runtime::{ + generic, + traits::{BlakeTwo256, Verify}, + BuildStorage, }; -use sp_core::{H256, sr25519}; mod system; @@ -42,7 +47,10 @@ mod module1 { use super::*; use sp_std::ops::Add; - pub trait Config: system::Config where ::BlockNumber: From { + pub trait Config: system::Config + where + ::BlockNumber: From, + { type Event: From> + Into<::Event>; type Origin: From>; type SomeParameter: Get; @@ -102,15 +110,19 @@ mod module1 { } #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] - pub enum Origin, I> where T::BlockNumber: From { + pub enum Origin, I> + where + T::BlockNumber: From, + { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), } pub const INHERENT_IDENTIFIER: InherentIdentifier = *b"12345678"; - impl, I: Instance> ProvideInherent for Module where - T::BlockNumber: From + impl, I: Instance> ProvideInherent for Module + where + T::BlockNumber: From, { type Call = Call; type Error = MakeFatalError<()>; @@ -120,7 +132,10 @@ mod module1 { unimplemented!(); } - fn check_inherent(_: &Self::Call, _: &InherentData) -> std::result::Result<(), Self::Error> { + fn check_inherent( + _: &Self::Call, + _: &InherentData, + ) -> std::result::Result<(), Self::Error> { unimplemented!(); } @@ -136,7 +151,7 @@ mod module1 { mod module2 { use super::*; - pub trait Config: system::Config { + pub trait Config: system::Config { type Amount: Parameter + Default; type Event: From> + Into<::Event>; type Origin: From>; @@ -168,7 +183,7 @@ mod module2 { } #[derive(PartialEq, Eq, Clone, sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)] - pub enum Origin, I=DefaultInstance> { + pub enum Origin, I = DefaultInstance> { Members(u32), _Phantom(std::marker::PhantomData<(T, I)>), } @@ -184,7 +199,10 @@ mod module2 { unimplemented!(); } - fn check_inherent(_call: &Self::Call, _data: &InherentData) -> std::result::Result<(), Self::Error> { + fn check_inherent( + _call: &Self::Call, + _data: &InherentData, + ) -> std::result::Result<(), Self::Error> { unimplemented!(); } @@ -199,7 +217,9 @@ mod module2 { mod module3 { use super::*; - pub trait Config: module2::Config + module2::Config + system::Config { + pub trait Config: + module2::Config + module2::Config + system::Config + { type Currency: Currency; type Currency2: Currency; } @@ -256,7 +276,7 @@ pub type BlockNumber = u64; pub type Index = u64; impl system::Config for Runtime { - type BaseCallFilter= frame_support::traits::AllowAll; + type BaseCallFilter = frame_support::traits::AllowAll; type Hash = H256; type Origin = Origin; type BlockNumber = BlockNumber; @@ -299,15 +319,9 @@ pub type Block = generic::Block; pub type UncheckedExtrinsic = generic::UncheckedExtrinsic; fn new_test_ext() -> sp_io::TestExternalities { - GenesisConfig{ - module_1_1: module1::GenesisConfig { - value: 3, - test: 2, - }, - module_1_2: module1::GenesisConfig { - value: 4, - test: 5, - }, + GenesisConfig { + module_1_1: module1::GenesisConfig { value: 3, test: 2 }, + module_1_2: module1::GenesisConfig { value: 4, test: 5 }, module_2: module2::GenesisConfig { value: 4, map: vec![(0, 0)], @@ -320,14 +334,17 @@ fn new_test_ext() -> sp_io::TestExternalities { }, module_2_2: Default::default(), module_2_3: Default::default(), - }.build_storage().unwrap().into() + } + .build_storage() + .unwrap() + .into() } #[test] fn storage_instance_independence() { let mut storage = sp_core::storage::Storage { top: std::collections::BTreeMap::new(), - children_default: std::collections::HashMap::new() + children_default: std::collections::HashMap::new(), }; sp_state_machine::BasicExternalities::execute_with_storage(&mut storage, || { module2::Value::::put(0); @@ -360,7 +377,7 @@ fn storage_with_instance_basic_operation() { assert_eq!(Value::get(), 1); assert_eq!(Value::take(), 1); assert_eq!(Value::get(), 0); - Value::mutate(|a| *a=2); + Value::mutate(|a| *a = 2); assert_eq!(Value::get(), 2); Value::kill(); assert_eq!(Value::exists(), false); @@ -373,7 +390,7 @@ fn storage_with_instance_basic_operation() { assert_eq!(Map::get(key), 1); assert_eq!(Map::take(key), 1); assert_eq!(Map::get(key), 0); - Map::mutate(key, |a| *a=2); + Map::mutate(key, |a| *a = 2); assert_eq!(Map::get(key), 2); Map::remove(key); assert_eq!(Map::contains_key(key), false); @@ -387,7 +404,7 @@ fn storage_with_instance_basic_operation() { assert_eq!(DoubleMap::get(&key1, &key2), 1); assert_eq!(DoubleMap::take(&key1, &key2), 1); assert_eq!(DoubleMap::get(&key1, &key2), 0); - DoubleMap::mutate(&key1, &key2, |a| *a=2); + DoubleMap::mutate(&key1, &key2, |a| *a = 2); assert_eq!(DoubleMap::get(&key1, &key2), 2); DoubleMap::remove(&key1, &key2); assert_eq!(DoubleMap::get(&key1, &key2), 0); @@ -398,8 +415,7 @@ fn expected_metadata() -> PalletStorageMetadata { PalletStorageMetadata { prefix: "Instance2Module2", entries: vec![ - StorageEntryMetadata - { + StorageEntryMetadata { name: "Value", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Plain(scale_info::meta_type::()), @@ -430,11 +446,10 @@ fn expected_metadata() -> PalletStorageMetadata { default: [0u8; 8].to_vec(), docs: vec![], }, - ] + ], } } - #[test] fn test_instance_storage_metadata() { let metadata = Module2_2::storage_metadata(); diff --git a/frame/support/test/tests/issue2219.rs b/frame/support/test/tests/issue2219.rs index dacddccedafc2..f83dd2fc3ceed 100644 --- a/frame/support/test/tests/issue2219.rs +++ b/frame/support/test/tests/issue2219.rs @@ -15,23 +15,24 @@ // See the License for the specific language governing permissions and // limitations under the License. -use frame_support::sp_runtime::generic; -use frame_support::sp_runtime::traits::{BlakeTwo256, Verify}; -use frame_support::codec::{Encode, Decode}; -use frame_support::scale_info::TypeInfo; -use sp_core::{H256, sr25519}; -use serde::{Serialize, Deserialize}; +use frame_support::{ + codec::{Decode, Encode}, + scale_info::TypeInfo, + sp_runtime::{ + generic, + traits::{BlakeTwo256, Verify}, + }, +}; +use serde::{Deserialize, Serialize}; +use sp_core::{sr25519, H256}; mod system; mod module { use super::*; - pub type Request = ( - ::AccountId, - Role, - ::BlockNumber, - ); + pub type Request = + (::AccountId, Role, ::BlockNumber); pub type Requests = Vec>; #[derive(Encode, Decode, Copy, Clone, Eq, PartialEq, Debug, TypeInfo)] @@ -90,14 +91,12 @@ mod module { #[derive(Encode, Decode, Copy, Clone, Serialize, Deserialize)] pub struct Data { - pub data: T::BlockNumber, + pub data: T::BlockNumber, } impl Default for Data { fn default() -> Self { - Self { - data: T::BlockNumber::default(), - } + Self { data: T::BlockNumber::default() } } } @@ -186,9 +185,6 @@ frame_support::construct_runtime!( #[test] fn create_genesis_config() { GenesisConfig { - module: module::GenesisConfig { - request_life_time: 0, - enable_storage_role: true, - } + module: module::GenesisConfig { request_life_time: 0, enable_storage_role: true }, }; } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 9e7a03f825854..9a34619a290fc 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -16,50 +16,90 @@ // limitations under the License. use frame_support::{ - weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, - traits::{GetCallName, OnInitialize, OnFinalize, OnRuntimeUpgrade, GetPalletVersion, OnGenesis}, - dispatch::{UnfilteredDispatchable, Parameter}, - storage::unhashed, + dispatch::{Parameter, UnfilteredDispatchable}, scale_info, + storage::unhashed, + traits::{ + GetCallName, GetPalletVersion, OnFinalize, OnGenesis, OnInitialize, OnRuntimeUpgrade, + }, + weights::{DispatchClass, DispatchInfo, GetDispatchInfo, Pays}, +}; +use sp_io::{ + hashing::{blake2_128, twox_128, twox_64}, + TestExternalities, }; use sp_runtime::DispatchError; -use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; pub struct SomeType1; -impl From for u64 { fn from(_t: SomeType1) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType1) -> Self { + 0u64 + } +} pub struct SomeType2; -impl From for u64 { fn from(_t: SomeType2) -> Self { 100u64 } } +impl From for u64 { + fn from(_t: SomeType2) -> Self { + 100u64 + } +} pub struct SomeType3; -impl From for u64 { fn from(_t: SomeType3) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType3) -> Self { + 0u64 + } +} pub struct SomeType4; -impl From for u64 { fn from(_t: SomeType4) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType4) -> Self { + 0u64 + } +} pub struct SomeType5; -impl From for u64 { fn from(_t: SomeType5) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType5) -> Self { + 0u64 + } +} pub struct SomeType6; -impl From for u64 { fn from(_t: SomeType6) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType6) -> Self { + 0u64 + } +} pub struct SomeType7; -impl From for u64 { fn from(_t: SomeType7) -> Self { 0u64 } } +impl From for u64 { + fn from(_t: SomeType7) -> Self { + 0u64 + } +} -pub trait SomeAssociation1 { type _1: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; } -impl SomeAssociation1 for u64 { type _1 = u64; } +pub trait SomeAssociation1 { + type _1: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; +} +impl SomeAssociation1 for u64 { + type _1 = u64; +} -pub trait SomeAssociation2 { type _2: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; } -impl SomeAssociation2 for u64 { type _2 = u64; } +pub trait SomeAssociation2 { + type _2: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; +} +impl SomeAssociation2 for u64 { + type _2 = u64; +} #[frame_support::pallet] pub mod pallet { use super::{ - SomeType1, SomeType2, SomeType3, SomeType4, SomeType5, SomeType6, SomeType7, - SomeAssociation1, SomeAssociation2, + SomeAssociation1, SomeAssociation2, SomeType1, SomeType2, SomeType3, SomeType4, SomeType5, + SomeType6, SomeType7, }; - use frame_support::scale_info; - use frame_support::pallet_prelude::*; + use frame_support::{pallet_prelude::*, scale_info}; use frame_system::pallet_prelude::*; type BalanceOf = ::Balance; @@ -90,14 +130,19 @@ pub mod pallet { #[pallet::extra_constants] impl Pallet - where T::AccountId: From + SomeAssociation1 + From, + where + T::AccountId: From + SomeAssociation1 + From, { /// Some doc /// Some doc - fn some_extra() -> T::AccountId { SomeType2.into() } + fn some_extra() -> T::AccountId { + SomeType2.into() + } /// Some doc - fn some_extra_extra() -> T::AccountId { SomeType1.into() } + fn some_extra_extra() -> T::AccountId { + SomeType1.into() + } } #[pallet::pallet] @@ -107,7 +152,8 @@ pub mod pallet { #[pallet::hooks] impl Hooks> for Pallet - where T::AccountId: From + From + SomeAssociation1, + where + T::AccountId: From + From + SomeAssociation1, { fn on_initialize(_: BlockNumberFor) -> Weight { T::AccountId::from(SomeType1); // Test for where clause @@ -134,7 +180,8 @@ pub mod pallet { #[pallet::call] impl Pallet - where T::AccountId: From + From + SomeAssociation1 + where + T::AccountId: From + From + SomeAssociation1, { /// Doc comment put in metadata #[pallet::weight(Weight::from(*_foo))] @@ -167,9 +214,7 @@ pub mod pallet { // Test for DispatchResult return type #[pallet::weight(1)] - pub fn foo_no_post_info( - _origin: OriginFor, - ) -> DispatchResult { + pub fn foo_no_post_info(_origin: OriginFor) -> DispatchResult { Ok(()) } } @@ -182,7 +227,10 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(fn deposit_event)] - pub enum Event where T::AccountId: SomeAssociation1 + From{ + pub enum Event + where + T::AccountId: SomeAssociation1 + From, + { /// doc comment put in metadata Proposed(::AccountId), /// doc @@ -192,8 +240,10 @@ pub mod pallet { } #[pallet::storage] - pub type ValueWhereClause where T::AccountId: SomeAssociation2 = - StorageValue<_, ::_2>; + pub type ValueWhereClause + where + T::AccountId: SomeAssociation2, + = StorageValue<_, ::_2>; #[pallet::storage] pub type Value = StorageValue; @@ -204,28 +254,32 @@ pub mod pallet { #[pallet::type_value] pub fn MyDefault() -> u16 - where T::AccountId: From + From + SomeAssociation1 + where + T::AccountId: From + From + SomeAssociation1, { T::AccountId::from(SomeType7); // Test where clause works 4u16 } #[pallet::storage] - pub type Map where T::AccountId: From = - StorageMap<_, Blake2_128Concat, u8, u16, ValueQuery, MyDefault>; + pub type Map + where + T::AccountId: From, + = StorageMap<_, Blake2_128Concat, u8, u16, ValueQuery, MyDefault>; #[pallet::storage] - pub type Map2 = StorageMap< - Hasher = Twox64Concat, Key = u16, Value = u32, MaxValues = ConstU32<3> - >; + pub type Map2 = + StorageMap>; #[pallet::storage] pub type DoubleMap = StorageDoubleMap<_, Blake2_128Concat, u8, Twox64Concat, u16, u32>; #[pallet::storage] pub type DoubleMap2 = StorageDoubleMap< - Hasher1 = Twox64Concat, Key1 = u16, - Hasher2 = Blake2_128Concat, Key2 = u32, + Hasher1 = Twox64Concat, + Key1 = u16, + Hasher2 = Blake2_128Concat, + Key2 = u32, Value = u64, MaxValues = ConstU32<5>, >; @@ -256,26 +310,14 @@ pub mod pallet { #[cfg(feature = "conditional-storage")] #[pallet::storage] #[pallet::getter(fn conditional_double_map)] - pub type ConditionalDoubleMap = StorageDoubleMap< - _, - Blake2_128Concat, - u8, - Twox64Concat, - u16, - u32, - >; + pub type ConditionalDoubleMap = + StorageDoubleMap<_, Blake2_128Concat, u8, Twox64Concat, u16, u32>; #[cfg(feature = "conditional-storage")] #[pallet::storage] #[pallet::getter(fn conditional_nmap)] - pub type ConditionalNMap = StorageNMap< - _, - ( - storage::Key, - storage::Key, - ), - u32, - >; + pub type ConditionalNMap = + StorageNMap<_, (storage::Key, storage::Key), u32>; #[pallet::genesis_config] #[derive(Default)] @@ -285,7 +327,8 @@ pub mod pallet { #[pallet::genesis_build] impl GenesisBuild for GenesisConfig - where T::AccountId: From + SomeAssociation1 + From + where + T::AccountId: From + SomeAssociation1 + From, { fn build(&self) { T::AccountId::from(SomeType1); // Test for where clause @@ -294,22 +337,28 @@ pub mod pallet { } #[pallet::origin] - #[derive(EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, scale_info::TypeInfo)] + #[derive( + EqNoBound, + RuntimeDebugNoBound, + CloneNoBound, + PartialEqNoBound, + Encode, + Decode, + scale_info::TypeInfo, + )] pub struct Origin(PhantomData); #[pallet::validate_unsigned] impl ValidateUnsigned for Pallet - where T::AccountId: From + SomeAssociation1 + From + From + where + T::AccountId: From + SomeAssociation1 + From + From, { type Call = Call; - fn validate_unsigned( - _source: TransactionSource, - call: &Self::Call - ) -> TransactionValidity { + fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType5); // Test for where clause if matches!(call, Call::foo_transactional(_)) { - return Ok(ValidTransaction::default()); + return Ok(ValidTransaction::default()) } Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) } @@ -317,7 +366,8 @@ pub mod pallet { #[pallet::inherent] impl ProvideInherent for Pallet - where T::AccountId: From + SomeAssociation1 + From + From + where + T::AccountId: From + SomeAssociation1 + From + From, { type Call = Call; type Error = InherentError; @@ -370,13 +420,14 @@ pub mod pallet { // Test that a pallet with non generic event and generic genesis_config is correctly handled #[frame_support::pallet] pub mod pallet2 { - use super::{SomeType1, SomeAssociation1}; + use super::{SomeAssociation1, SomeType1}; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config - where ::AccountId: From + SomeAssociation1, + where + ::AccountId: From + SomeAssociation1, { type Event: From + IsType<::Event>; } @@ -386,16 +437,13 @@ pub mod pallet2 { pub struct Pallet(_); #[pallet::hooks] - impl Hooks> for Pallet - where T::AccountId: From + SomeAssociation1, + impl Hooks> for Pallet where + T::AccountId: From + SomeAssociation1 { } #[pallet::call] - impl Pallet - where T::AccountId: From + SomeAssociation1, - { - } + impl Pallet where T::AccountId: From + SomeAssociation1 {} #[pallet::storage] pub type SomeValue = StorageValue<_, Vec>; @@ -408,24 +456,25 @@ pub mod pallet2 { #[pallet::genesis_config] pub struct GenesisConfig - where T::AccountId: From + SomeAssociation1, + where + T::AccountId: From + SomeAssociation1, { phantom: PhantomData, } impl Default for GenesisConfig - where T::AccountId: From + SomeAssociation1, + where + T::AccountId: From + SomeAssociation1, { fn default() -> Self { - GenesisConfig { - phantom: Default::default(), - } + GenesisConfig { phantom: Default::default() } } } #[pallet::genesis_build] impl GenesisBuild for GenesisConfig - where T::AccountId: From + SomeAssociation1, + where + T::AccountId: From + SomeAssociation1, { fn build(&self) {} } @@ -442,9 +491,9 @@ pub mod pallet3 { } frame_support::parameter_types!( - pub const MyGetParam: u32= 10; - pub const MyGetParam2: u32= 11; - pub const MyGetParam3: u32= 12; + pub const MyGetParam: u32 = 10; + pub const MyGetParam2: u32 = 11; + pub const MyGetParam3: u32 = 12; pub const BlockHashCount: u32 = 250; ); @@ -506,13 +555,20 @@ fn transactional_works() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo_transactional(0).dispatch_bypass_filter(None.into()) - .err().unwrap(); + pallet::Call::::foo_transactional(0) + .dispatch_bypass_filter(None.into()) + .err() + .unwrap(); assert!(frame_system::Pallet::::events().is_empty()); - pallet::Call::::foo_transactional(1).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo_transactional(1) + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( - frame_system::Pallet::::events().iter().map(|e| &e.event).collect::>(), + frame_system::Pallet::::events() + .iter() + .map(|e| &e.event) + .collect::>(), vec![&Event::Example(pallet::Event::Something(0))], ); }) @@ -523,11 +579,7 @@ fn call_expand() { let call_foo = pallet::Call::::foo(3, 0); assert_eq!( call_foo.get_dispatch_info(), - DispatchInfo { - weight: 3, - class: DispatchClass::Normal, - pays_fee: Pays::Yes, - } + DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } ); assert_eq!(call_foo.get_call_name(), "foo"); assert_eq!( @@ -548,11 +600,7 @@ fn error_expand() { ); assert_eq!( DispatchError::from(pallet::Error::::InsufficientProposersBalance), - DispatchError::Module { - index: 1, - error: 0, - message: Some("InsufficientProposersBalance"), - }, + DispatchError::Module { index: 1, error: 0, message: Some("InsufficientProposersBalance") }, ); } @@ -569,13 +617,17 @@ fn inherent_expand() { traits::EnsureInherentsAreFirst, }; use sp_core::Hasher; - use sp_runtime::{traits::{BlakeTwo256, Header}, Digest}; + use sp_runtime::{ + traits::{BlakeTwo256, Header}, + Digest, + }; let inherents = InherentData::new().create_extrinsics(); - let expected = vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: None }, - ]; + let expected = vec![UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: None, + }]; assert_eq!(expected, inherents); let block = Block::new( @@ -587,8 +639,14 @@ fn inherent_expand() { Digest::default(), ), vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(1, 0)), signature: None }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(1, 0)), + signature: None, + }, ], ); @@ -603,8 +661,14 @@ fn inherent_expand() { Digest::default(), ), vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(0, 0)), signature: None }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(0, 0)), + signature: None, + }, ], ); @@ -618,9 +682,10 @@ fn inherent_expand() { BlakeTwo256::hash(b"test"), Digest::default(), ), - vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_transactional(0)), signature: None }, - ], + vec![UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_transactional(0)), + signature: None, + }], ); let mut inherent = InherentData::new(); @@ -635,9 +700,10 @@ fn inherent_expand() { BlakeTwo256::hash(b"test"), Digest::default(), ), - vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: Some((1, (), ())) }, - ], + vec![UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: Some((1, (), ())), + }], ); let mut inherent = InherentData::new(); @@ -653,8 +719,14 @@ fn inherent_expand() { Digest::default(), ), vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(1, 1)), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_transactional(0)), signature: None }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(1, 1)), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_transactional(0)), + signature: None, + }, ], ); @@ -669,9 +741,18 @@ fn inherent_expand() { Digest::default(), ), vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(1, 1)), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_transactional(0)), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: None }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(1, 1)), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_transactional(0)), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: None, + }, ], ); @@ -686,9 +767,18 @@ fn inherent_expand() { Digest::default(), ), vec![ - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(1, 1)), signature: None }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo(1, 0)), signature: Some((1, (), ())) }, - UncheckedExtrinsic { function: Call::Example(pallet::Call::foo_no_post_info()), signature: None }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(1, 1)), + signature: None, + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo(1, 0)), + signature: Some((1, (), ())), + }, + UncheckedExtrinsic { + function: Call::Example(pallet::Call::foo_no_post_info()), + signature: None, + }, ], ); @@ -698,7 +788,8 @@ fn inherent_expand() { #[test] fn validate_unsigned_expand() { use frame_support::pallet_prelude::{ - InvalidTransaction, TransactionSource, TransactionValidityError, ValidTransaction, ValidateUnsigned, + InvalidTransaction, TransactionSource, TransactionValidityError, ValidTransaction, + ValidateUnsigned, }; let call = pallet::Call::::foo_no_post_info(); @@ -739,8 +830,7 @@ fn pallet_new_call_variant() { #[test] fn storage_expand() { - use frame_support::pallet_prelude::*; - use frame_support::storage::StoragePrefixedMap; + use frame_support::{pallet_prelude::*, storage::StoragePrefixedMap}; fn twox_64_concat(d: &[u8]) -> Vec { let mut v = twox_64(d).to_vec(); @@ -870,7 +960,7 @@ fn metadata() { name: "BlockWeights", ty: scale_info::meta_type::(), value: vec![], - docs: vec![] + docs: vec![], }, PalletConstantMetadata { name: "BlockLength", @@ -900,8 +990,8 @@ fn metadata() { name: "SS58Prefix", ty: scale_info::meta_type::(), value: vec![], - docs: vec![] - } + docs: vec![], + }, ], error: Some(scale_info::meta_type::>().into()), }, @@ -996,20 +1086,25 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { keys: scale_info::meta_type::<(u16, u32)>(), - hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], value: scale_info::meta_type::(), }, default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { @@ -1020,7 +1115,8 @@ fn metadata() { default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { @@ -1033,12 +1129,16 @@ fn metadata() { default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { keys: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], value: scale_info::meta_type::(), }, default: vec![0], @@ -1053,19 +1153,13 @@ fn metadata() { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], - docs: vec![ - " Some comment", - " Some comment", - ], + docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam2", ty: scale_info::meta_type::(), value: vec![11, 0, 0, 0], - docs: vec![ - " Some comment", - " Some comment", - ], + docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam3", @@ -1077,21 +1171,18 @@ fn metadata() { name: "some_extra", ty: scale_info::meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], - docs: vec![ - " Some doc", - " Some doc", - ], + docs: vec![" Some doc", " Some doc"], }, PalletConstantMetadata { name: "some_extra_extra", ty: scale_info::meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], - docs: vec![ - " Some doc", - ], + docs: vec![" Some doc"], }, ], - error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + error: Some(PalletErrorMetadata { + ty: scale_info::meta_type::>(), + }), }, PalletMetadata { index: 1, @@ -1184,20 +1275,25 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { keys: scale_info::meta_type::<(u16, u32)>(), - hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], value: scale_info::meta_type::(), }, default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Plain(scale_info::meta_type::()), default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { @@ -1208,7 +1304,8 @@ fn metadata() { default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { @@ -1221,12 +1318,16 @@ fn metadata() { default: vec![0], docs: vec![], }, - #[cfg(feature = "conditional-storage")] StorageEntryMetadata { + #[cfg(feature = "conditional-storage")] + StorageEntryMetadata { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::NMap { keys: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], value: scale_info::meta_type::(), }, default: vec![0], @@ -1241,19 +1342,13 @@ fn metadata() { name: "MyGetParam", ty: scale_info::meta_type::(), value: vec![10, 0, 0, 0], - docs: vec![ - " Some comment", - " Some comment", - ], + docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam2", ty: scale_info::meta_type::(), value: vec![11, 0, 0, 0], - docs: vec![ - " Some comment", - " Some comment", - ], + docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam3", @@ -1265,60 +1360,49 @@ fn metadata() { name: "some_extra", ty: scale_info::meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], - docs: vec![ - " Some doc", - " Some doc", - ], + docs: vec![" Some doc", " Some doc"], }, PalletConstantMetadata { name: "some_extra_extra", ty: scale_info::meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], - docs: vec![ - " Some doc", - ], + docs: vec![" Some doc"], }, ], - error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + error: Some(PalletErrorMetadata { + ty: scale_info::meta_type::>(), + }), }, PalletMetadata { index: 2, name: "Example2", - storage: Some(PalletStorageMetadata { - prefix: "Example2", - entries: vec![], - }), + storage: Some(PalletStorageMetadata { prefix: "Example2", entries: vec![] }), calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), constants: vec![], error: None, - } + }, ]; let extrinsic = ExtrinsicMetadata { ty: scale_info::meta_type::(), version: 4, - signed_extensions: vec![ - SignedExtensionMetadata { - identifier: "UnitSignedExtension", - ty: scale_info::meta_type::<()>(), - additional_signed: scale_info::meta_type::<()>(), - } - ] + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + }], }; - let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { - RuntimeMetadata::V14(metadata) => { - metadata - }, + RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match Runtime::metadata().1 { - RuntimeMetadata::V14(metadata) => { - metadata - }, + RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; @@ -1339,9 +1423,9 @@ fn test_pallet_info_access() { #[test] fn test_storage_info() { use frame_support::{ - StorageHasher, - traits::{StorageInfoTrait, StorageInfo}, pallet_prelude::*, + traits::{StorageInfo, StorageInfoTrait}, + StorageHasher, }; let prefix = |pallet_name, storage_name| { @@ -1462,14 +1546,12 @@ fn test_storage_info() { assert_eq!( Example2::storage_info(), - vec![ - StorageInfo { - pallet_name: b"Example2".to_vec(), - storage_name: b"SomeValue".to_vec(), - prefix: prefix(b"Example2", b"SomeValue").to_vec(), - max_values: Some(1), - max_size: None, - }, - ], + vec![StorageInfo { + pallet_name: b"Example2".to_vec(), + storage_name: b"SomeValue".to_vec(), + prefix: prefix(b"Example2", b"SomeValue").to_vec(), + max_values: Some(1), + max_size: None, + },], ); } diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index 27e6aaba9e47f..def3e130b41ed 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -23,15 +23,19 @@ impl SomeAssociation for u64 { } mod pallet_old { + use super::SomeAssociation; use frame_support::{ - decl_storage, decl_error, decl_event, decl_module, weights::Weight, traits::Get, Parameter + decl_error, decl_event, decl_module, decl_storage, traits::Get, weights::Weight, Parameter, }; use frame_system::ensure_root; - use super::SomeAssociation; pub trait Config: frame_system::Config { type SomeConst: Get; - type Balance: Parameter + codec::HasCompact + From + Into + Default + type Balance: Parameter + + codec::HasCompact + + From + + Into + + Default + SomeAssociation; type Event: From> + Into<::Event>; } @@ -50,7 +54,10 @@ mod pallet_old { } decl_event!( - pub enum Event where Balance = ::Balance { + pub enum Event + where + Balance = ::Balance, + { /// Dummy event, just here so there's a generic type that's used. Dummy(Balance), } @@ -92,15 +99,19 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { use super::SomeAssociation; - use frame_support::scale_info; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - use frame_system::ensure_root; + use frame_support::{pallet_prelude::*, scale_info}; + use frame_system::{ensure_root, pallet_prelude::*}; #[pallet::config] pub trait Config: frame_system::Config { - type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + SomeAssociation + scale_info::StaticTypeInfo; + type Balance: Parameter + + codec::HasCompact + + From + + Into + + Default + + MaybeSerializeDeserialize + + SomeAssociation + + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; @@ -126,7 +137,7 @@ pub mod pallet { #[pallet::weight(>::into(new_value.clone()))] pub fn set_dummy( origin: OriginFor, - #[pallet::compact] new_value: T::Balance + #[pallet::compact] new_value: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; @@ -157,13 +168,22 @@ pub mod pallet { #[pallet::storage] type Bar = StorageMap<_, Blake2_128Concat, T::AccountId, T::Balance, ValueQuery>; - #[pallet::type_value] pub fn OnFooEmpty() -> T::Balance { 3.into() } + #[pallet::type_value] + pub fn OnFooEmpty() -> T::Balance { + 3.into() + } #[pallet::storage] type Foo = StorageValue<_, T::Balance, ValueQuery, OnFooEmpty>; #[pallet::storage] type Double = StorageDoubleMap< - _, Blake2_128Concat, u32, Twox64Concat, u64, ::A, ValueQuery + _, + Blake2_128Concat, + u32, + Twox64Concat, + u64, + ::A, + ValueQuery, >; #[pallet::genesis_config] @@ -257,14 +277,9 @@ frame_support::construct_runtime!( #[cfg(test)] mod test { - use super::Runtime; - use super::pallet; - use super::pallet_old; + use super::{pallet, pallet_old, Runtime}; use codec::{Decode, Encode}; - use scale_info::{ - form::PortableForm, - Variant, - }; + use scale_info::{form::PortableForm, Variant}; #[test] fn metadata() { @@ -281,21 +296,16 @@ mod test { pretty_assertions::assert_eq!(ty1, ty2); }; - let get_enum_variants = |ty_id| { - match types.resolve(ty_id).map(|ty| ty.type_def()) { - Some(ty) => { - match ty { - scale_info::TypeDef::Variant(var) => { - var.variants() - } - _ => panic!("Expected variant type") - } - } - _ => panic!("No type found") - } + let get_enum_variants = |ty_id| match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => match ty { + scale_info::TypeDef::Variant(var) => var.variants(), + _ => panic!("Expected variant type"), + }, + _ => panic!("No type found"), }; - let assert_enum_variants = |vs1: &[Variant], vs2: &[Variant]| { + let assert_enum_variants = |vs1: &[Variant], + vs2: &[Variant]| { assert_eq!(vs1.len(), vs2.len()); for i in 0..vs1.len() { let v1 = &vs2[i]; @@ -341,14 +351,16 @@ mod test { assert_eq!( pallet_old::Event::::decode( &mut &pallet::Event::::Dummy(10).encode()[..] - ).unwrap(), + ) + .unwrap(), pallet_old::Event::::Dummy(10), ); assert_eq!( pallet_old::Call::::decode( &mut &pallet::Call::::set_dummy(10).encode()[..] - ).unwrap(), + ) + .unwrap(), pallet_old::Call::::set_dummy(10), ); } diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index 3fbc6e49192b6..af07ed83aae7d 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -17,7 +17,7 @@ mod pallet_old { use frame_support::{ - decl_storage, decl_error, decl_event, decl_module, weights::Weight, traits::Get, Parameter + decl_error, decl_event, decl_module, decl_storage, traits::Get, weights::Weight, Parameter, }; use frame_system::ensure_root; @@ -39,7 +39,10 @@ mod pallet_old { } decl_event!( - pub enum Event where Balance = >::Balance { + pub enum Event + where + Balance = >::Balance, + { /// Dummy event, just here so there's a generic type that's used. Dummy(Balance), } @@ -82,15 +85,18 @@ mod pallet_old { #[frame_support::pallet] pub mod pallet { - use frame_support::scale_info; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - use frame_system::ensure_root; + use frame_support::{pallet_prelude::*, scale_info}; + use frame_system::{ensure_root, pallet_prelude::*}; #[pallet::config] pub trait Config: frame_system::Config { - type Balance: Parameter + codec::HasCompact + From + Into + Default - + MaybeSerializeDeserialize + scale_info::StaticTypeInfo; + type Balance: Parameter + + codec::HasCompact + + From + + Into + + Default + + MaybeSerializeDeserialize + + scale_info::StaticTypeInfo; #[pallet::constant] type SomeConst: Get; type Event: From> + IsType<::Event>; @@ -116,7 +122,7 @@ pub mod pallet { #[pallet::weight(>::into(new_value.clone()))] pub fn set_dummy( origin: OriginFor, - #[pallet::compact] new_value: T::Balance + #[pallet::compact] new_value: T::Balance, ) -> DispatchResultWithPostInfo { ensure_root(origin)?; @@ -151,12 +157,14 @@ pub mod pallet { #[pallet::storage] type Foo, I: 'static = ()> = StorageValue<_, T::Balance, ValueQuery, OnFooEmpty>; - #[pallet::type_value] pub fn OnFooEmpty, I: 'static>() -> T::Balance { 3.into() } + #[pallet::type_value] + pub fn OnFooEmpty, I: 'static>() -> T::Balance { + 3.into() + } #[pallet::storage] - type Double = StorageDoubleMap< - _, Blake2_128Concat, u32, Twox64Concat, u64, u16, ValueQuery - >; + type Double = + StorageDoubleMap<_, Blake2_128Concat, u32, Twox64Concat, u64, u16, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig, I: 'static = ()> { @@ -272,14 +280,9 @@ frame_support::construct_runtime!( #[cfg(test)] mod test { - use super::Runtime; - use super::pallet; - use super::pallet_old; + use super::{pallet, pallet_old, Runtime}; use codec::{Decode, Encode}; - use scale_info::{ - form::PortableForm, - Variant, - }; + use scale_info::{form::PortableForm, Variant}; #[test] fn metadata() { @@ -290,21 +293,16 @@ mod test { _ => unreachable!(), }; - let get_enum_variants = |ty_id| { - match types.resolve(ty_id).map(|ty| ty.type_def()) { - Some(ty) => { - match ty { - scale_info::TypeDef::Variant(var) => { - var.variants() - } - _ => panic!("Expected variant type") - } - } - _ => panic!("No type found") - } + let get_enum_variants = |ty_id| match types.resolve(ty_id).map(|ty| ty.type_def()) { + Some(ty) => match ty { + scale_info::TypeDef::Variant(var) => var.variants(), + _ => panic!("Expected variant type"), + }, + _ => panic!("No type found"), }; - let assert_enum_variants = |vs1: &[Variant], vs2: &[Variant]| { + let assert_enum_variants = |vs1: &[Variant], + vs2: &[Variant]| { assert_eq!(vs1.len(), vs2.len()); for i in 0..vs1.len() { let v1 = &vs2[i]; @@ -352,14 +350,16 @@ mod test { assert_eq!( pallet_old::Event::::decode( &mut &pallet::Event::::Dummy(10).encode()[..] - ).unwrap(), + ) + .unwrap(), pallet_old::Event::::Dummy(10), ); assert_eq!( pallet_old::Call::::decode( &mut &pallet::Call::::set_dummy(10).encode()[..] - ).unwrap(), + ) + .unwrap(), pallet_old::Call::::set_dummy(10), ); } diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 7a8356bb73ffb..e47a8879455d4 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -16,22 +16,24 @@ // limitations under the License. use frame_support::{ - weights::{DispatchInfo, DispatchClass, Pays, GetDispatchInfo}, - traits::{ - GetCallName, GetPalletVersion, OnInitialize, OnFinalize, OnRuntimeUpgrade, OnGenesis, - }, dispatch::UnfilteredDispatchable, storage::unhashed, + traits::{ + GetCallName, GetPalletVersion, OnFinalize, OnGenesis, OnInitialize, OnRuntimeUpgrade, + }, + weights::{DispatchClass, DispatchInfo, GetDispatchInfo, Pays}, +}; +use sp_io::{ + hashing::{blake2_128, twox_128, twox_64}, + TestExternalities, }; use sp_runtime::DispatchError; -use sp_io::{TestExternalities, hashing::{twox_64, twox_128, blake2_128}}; #[frame_support::pallet] pub mod pallet { - use sp_std::any::TypeId; - use frame_support::scale_info; - use frame_support::pallet_prelude::*; + use frame_support::{pallet_prelude::*, scale_info}; use frame_system::pallet_prelude::*; + use sp_std::any::TypeId; type BalanceOf = >::Balance; @@ -74,15 +76,17 @@ pub mod pallet { 31 } } - fn integrity_test() { - } + fn integrity_test() {} } #[pallet::call] impl, I: 'static> Pallet { /// Doc comment put in metadata #[pallet::weight(Weight::from(*_foo))] - pub fn foo(origin: OriginFor, #[pallet::compact] _foo: u32) -> DispatchResultWithPostInfo { + pub fn foo( + origin: OriginFor, + #[pallet::compact] _foo: u32, + ) -> DispatchResultWithPostInfo { let _ = origin; Self::deposit_event(Event::Something(3)); Ok(().into()) @@ -93,14 +97,13 @@ pub mod pallet { #[frame_support::transactional] pub fn foo_transactional( origin: OriginFor, - #[pallet::compact] _foo: u32 + #[pallet::compact] _foo: u32, ) -> DispatchResultWithPostInfo { let _ = origin; Ok(().into()) } } - #[pallet::error] pub enum Error { /// doc comment put into metadata @@ -140,14 +143,8 @@ pub mod pallet { #[pallet::storage] #[pallet::getter(fn nmap2)] - pub type NMap2 = StorageNMap< - _, - ( - storage::Key, - storage::Key, - ), - u64, - >; + pub type NMap2 = + StorageNMap<_, (storage::Key, storage::Key), u64>; #[pallet::genesis_config] #[derive(Default)] @@ -156,14 +153,19 @@ pub mod pallet { } #[pallet::genesis_build] - impl, I:'static> GenesisBuild for GenesisConfig { + impl, I: 'static> GenesisBuild for GenesisConfig { fn build(&self) {} } #[pallet::origin] #[derive( - EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, - scale_info::TypeInfo + EqNoBound, + RuntimeDebugNoBound, + CloneNoBound, + PartialEqNoBound, + Encode, + Decode, + scale_info::TypeInfo, )] #[scale_info(skip_type_params(T, I))] pub struct Origin(PhantomData<(T, I)>); @@ -173,7 +175,7 @@ pub mod pallet { type Call = Call; fn validate_unsigned( _source: TransactionSource, - _call: &Self::Call + _call: &Self::Call, ) -> TransactionValidity { Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) } @@ -197,8 +199,7 @@ pub mod pallet { #[derive(codec::Encode, sp_runtime::RuntimeDebug)] #[cfg_attr(feature = "std", derive(codec::Decode))] - pub enum InherentError { - } + pub enum InherentError {} impl frame_support::inherent::IsFatalError for InherentError { fn is_fatal_error(&self) -> bool { @@ -236,9 +237,7 @@ pub mod pallet2 { impl, I: 'static> Default for GenesisConfig { fn default() -> Self { - GenesisConfig { - phantom: Default::default(), - } + GenesisConfig { phantom: Default::default() } } } @@ -249,7 +248,7 @@ pub mod pallet2 { } frame_support::parameter_types!( - pub const MyGetParam: u32= 10; + pub const MyGetParam: u32 = 10; pub const BlockHashCount: u32 = 250; ); @@ -280,12 +279,12 @@ impl frame_system::Config for Runtime { } impl pallet::Config for Runtime { type Event = Event; - type MyGetParam= MyGetParam; + type MyGetParam = MyGetParam; type Balance = u64; } impl pallet::Config for Runtime { type Event = Event; - type MyGetParam= MyGetParam; + type MyGetParam = MyGetParam; type Balance = u64; } impl pallet2::Config for Runtime { @@ -320,26 +319,15 @@ fn call_expand() { let call_foo = pallet::Call::::foo { _foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), - DispatchInfo { - weight: 3, - class: DispatchClass::Normal, - pays_fee: Pays::Yes, - } + DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } ); assert_eq!(call_foo.get_call_name(), "foo"); - assert_eq!( - pallet::Call::::get_call_names(), - &["foo", "foo_transactional"], - ); + assert_eq!(pallet::Call::::get_call_names(), &["foo", "foo_transactional"],); let call_foo = pallet::Call::::foo { _foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), - DispatchInfo { - weight: 3, - class: DispatchClass::Normal, - pays_fee: Pays::Yes, - } + DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } ); assert_eq!(call_foo.get_call_name(), "foo"); assert_eq!( @@ -360,11 +348,7 @@ fn error_expand() { ); assert_eq!( DispatchError::from(pallet::Error::::InsufficientProposersBalance), - DispatchError::Module { - index: 1, - error: 0, - message: Some("InsufficientProposersBalance"), - }, + DispatchError::Module { index: 1, error: 0, message: Some("InsufficientProposersBalance") }, ); assert_eq!( @@ -372,16 +356,16 @@ fn error_expand() { String::from("InsufficientProposersBalance"), ); assert_eq!( - <&'static str>::from(pallet::Error::::InsufficientProposersBalance), + <&'static str>::from( + pallet::Error::::InsufficientProposersBalance + ), "InsufficientProposersBalance", ); assert_eq!( - DispatchError::from(pallet::Error::::InsufficientProposersBalance), - DispatchError::Module { - index: 2, - error: 0, - message: Some("InsufficientProposersBalance"), - }, + DispatchError::from( + pallet::Error::::InsufficientProposersBalance + ), + DispatchError::Module { index: 2, error: 0, message: Some("InsufficientProposersBalance") }, ); } @@ -395,7 +379,9 @@ fn instance_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo { _foo: 3 }.dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { _foo: 3 } + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), @@ -405,7 +391,8 @@ fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); pallet::Call::::foo { _foo: 3 } - .dispatch_bypass_filter(None.into()).unwrap(); + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Instance1Example(pallet::Event::Something(3)), @@ -415,8 +402,7 @@ fn pallet_expand_deposit_event() { #[test] fn storage_expand() { - use frame_support::pallet_prelude::*; - use frame_support::storage::StoragePrefixedMap; + use frame_support::{pallet_prelude::*, storage::StoragePrefixedMap}; fn twox_64_concat(d: &[u8]) -> Vec { let mut v = twox_64(d).to_vec(); @@ -597,13 +583,15 @@ fn metadata() { name: "System", storage: None, calls: Some(scale_info::meta_type::>().into()), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), + event: Some(PalletEventMetadata { + ty: scale_info::meta_type::>(), + }), constants: vec![ PalletConstantMetadata { name: "BlockWeights", ty: scale_info::meta_type::(), value: vec![], - docs: vec![] + docs: vec![], }, PalletConstantMetadata { name: "BlockLength", @@ -633,10 +621,12 @@ fn metadata() { name: "SS58Prefix", ty: scale_info::meta_type::(), value: vec![], - docs: vec![] - } + docs: vec![], + }, ], - error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), + error: Some(PalletErrorMetadata { + ty: scale_info::meta_type::>(), + }), }; let example_pallet_metadata = PalletMetadata { @@ -726,14 +716,12 @@ fn metadata() { }), calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::>() }), - constants: vec![ - PalletConstantMetadata { - name: "MyGetParam", - ty: scale_info::meta_type::(), - value: vec![10, 0, 0, 0], - docs: vec![], - }, - ], + constants: vec![PalletConstantMetadata { + name: "MyGetParam", + ty: scale_info::meta_type::(), + value: vec![10, 0, 0, 0], + docs: vec![], + }], error: Some(PalletErrorMetadata { ty: scale_info::meta_type::>() }), }; @@ -765,37 +753,28 @@ fn metadata() { _ => unreachable!(), } - let pallets = vec![ - system_pallet_metadata, - example_pallet_metadata, - example_pallet_instance1_metadata, - ]; + let pallets = + vec![system_pallet_metadata, example_pallet_metadata, example_pallet_instance1_metadata]; let extrinsic = ExtrinsicMetadata { ty: scale_info::meta_type::(), version: 4, - signed_extensions: vec![ - SignedExtensionMetadata { - identifier: "UnitSignedExtension", - ty: scale_info::meta_type::<()>(), - additional_signed: scale_info::meta_type::<()>(), - } - ] + signed_extensions: vec![SignedExtensionMetadata { + identifier: "UnitSignedExtension", + ty: scale_info::meta_type::<()>(), + additional_signed: scale_info::meta_type::<()>(), + }], }; let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); let expected_metadata = match expected_metadata.1 { - RuntimeMetadata::V14(metadata) => { - metadata - }, + RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; let actual_metadata = match Runtime::metadata().1 { - RuntimeMetadata::V14(metadata) => { - metadata - }, + RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; @@ -807,9 +786,15 @@ fn metadata() { fn test_pallet_info_access() { assert_eq!(::name(), "System"); assert_eq!(::name(), "Example"); - assert_eq!(::name(), "Instance1Example"); + assert_eq!( + ::name(), + "Instance1Example" + ); assert_eq!(::name(), "Example2"); - assert_eq!(::name(), "Instance1Example2"); + assert_eq!( + ::name(), + "Instance1Example2" + ); assert_eq!(::index(), 0); assert_eq!(::index(), 1); diff --git a/frame/support/test/tests/pallet_version.rs b/frame/support/test/tests/pallet_version.rs index ed0bf52a0346f..5048f47f67522 100644 --- a/frame/support/test/tests/pallet_version.rs +++ b/frame/support/test/tests/pallet_version.rs @@ -17,15 +17,22 @@ //! Tests related to the pallet version. -#![recursion_limit="128"] +#![recursion_limit = "128"] use codec::{Decode, Encode}; -use sp_runtime::{generic, traits::{BlakeTwo256, Verify}, BuildStorage}; use frame_support::{ - traits::{PALLET_VERSION_STORAGE_KEY_POSTFIX, PalletVersion, OnRuntimeUpgrade, GetPalletVersion}, - crate_to_pallet_version, weights::Weight, + crate_to_pallet_version, + traits::{ + GetPalletVersion, OnRuntimeUpgrade, PalletVersion, PALLET_VERSION_STORAGE_KEY_POSTFIX, + }, + weights::Weight, +}; +use sp_core::{sr25519, H256}; +use sp_runtime::{ + generic, + traits::{BlakeTwo256, Verify}, + BuildStorage, }; -use sp_core::{H256, sr25519}; /// A version that we will check for in the tests const SOME_TEST_VERSION: PalletVersion = PalletVersion { major: 3000, minor: 30, patch: 13 }; @@ -47,7 +54,7 @@ mod module1 { mod module2 { use super::*; - pub trait Config: frame_system::Config {} + pub trait Config: frame_system::Config {} frame_support::decl_module! { pub struct Module, I: Instance=DefaultInstance> for enum Call where @@ -82,8 +89,7 @@ mod pallet3 { use frame_system::pallet_prelude::*; #[pallet::config] - pub trait Config: frame_system::Config { - } + pub trait Config: frame_system::Config {} #[pallet::pallet] pub struct Pallet(_); @@ -91,13 +97,12 @@ mod pallet3 { #[pallet::hooks] impl Hooks> for Pallet { fn on_runtime_upgrade() -> Weight { - return 3; + return 3 } } #[pallet::call] - impl Pallet { - } + impl Pallet {} } #[frame_support::pallet] @@ -106,22 +111,20 @@ mod pallet4 { use frame_system::pallet_prelude::*; #[pallet::config] - pub trait Config: frame_system::Config { - } + pub trait Config: frame_system::Config {} #[pallet::pallet] - pub struct Pallet(PhantomData<(T, I)>); + pub struct Pallet(PhantomData<(T, I)>); #[pallet::hooks] impl, I: 'static> Hooks> for Pallet { fn on_runtime_upgrade() -> Weight { - return 3; + return 3 } } #[pallet::call] - impl, I: 'static> Pallet { - } + impl, I: 'static> Pallet {} } impl module1::Config for Runtime {} @@ -210,8 +213,8 @@ fn get_pallet_version_storage_key_for_pallet(pallet: &str) -> [u8; 32] { fn check_pallet_version(pallet: &str) { let key = get_pallet_version_storage_key_for_pallet(pallet); let value = sp_io::storage::get(&key).expect("Pallet version exists"); - let version = PalletVersion::decode(&mut &value[..]) - .expect("Pallet version is encoded correctly"); + let version = + PalletVersion::decode(&mut &value[..]).expect("Pallet version is encoded correctly"); assert_eq!(crate_to_pallet_version!(), version); } diff --git a/frame/support/test/tests/pallet_with_name_trait_is_valid.rs b/frame/support/test/tests/pallet_with_name_trait_is_valid.rs index 665bbc2b5c513..867d952741011 100644 --- a/frame/support/test/tests/pallet_with_name_trait_is_valid.rs +++ b/frame/support/test/tests/pallet_with_name_trait_is_valid.rs @@ -28,7 +28,10 @@ frame_support::decl_storage! { } frame_support::decl_event!( - pub enum Event where B = ::Balance { + pub enum Event + where + B = ::Balance, + { Dummy(B), } ); diff --git a/frame/support/test/tests/storage_transaction.rs b/frame/support/test/tests/storage_transaction.rs index b518c60e957c6..4e97a87377b17 100644 --- a/frame/support/test/tests/storage_transaction.rs +++ b/frame/support/test/tests/storage_transaction.rs @@ -16,8 +16,10 @@ // limitations under the License. use frame_support::{ - assert_ok, assert_noop, transactional, StorageMap, StorageValue, - dispatch::{DispatchError, DispatchResult}, storage::{with_transaction, TransactionOutcome::*}, + assert_noop, assert_ok, + dispatch::{DispatchError, DispatchResult}, + storage::{with_transaction, TransactionOutcome::*}, + transactional, StorageMap, StorageValue, }; use sp_io::TestExternalities; use sp_std::result; @@ -41,7 +43,7 @@ frame_support::decl_module! { } } -frame_support::decl_storage!{ +frame_support::decl_storage! { trait Store for Module as StorageTransactions { pub Value: u32; pub Map: map hasher(twox_64_concat) String => u32; @@ -62,7 +64,6 @@ impl Config for Runtime {} #[test] fn storage_transaction_basic_commit() { TestExternalities::default().execute_with(|| { - assert_eq!(Value::get(), 0); assert!(!Map::contains_key("val0")); @@ -82,7 +83,6 @@ fn storage_transaction_basic_commit() { #[test] fn storage_transaction_basic_rollback() { TestExternalities::default().execute_with(|| { - assert_eq!(Value::get(), 0); assert_eq!(Map::get("val0"), 0); diff --git a/frame/support/test/tests/system.rs b/frame/support/test/tests/system.rs index d611264f3a048..a702e913e457e 100644 --- a/frame/support/test/tests/system.rs +++ b/frame/support/test/tests/system.rs @@ -16,7 +16,9 @@ // limitations under the License. use frame_support::{ - codec::{Encode, Decode, EncodeLike}, traits::Get, weights::RuntimeDbWeight, + codec::{Decode, Encode, EncodeLike}, + traits::Get, + weights::RuntimeDbWeight, }; pub trait Config: 'static + Eq + Clone { @@ -45,7 +47,10 @@ impl Module { } frame_support::decl_event!( - pub enum Event where BlockNumber = ::BlockNumber { + pub enum Event + where + BlockNumber = ::BlockNumber, + { ExtrinsicSuccess, ExtrinsicFailed, Ignore(BlockNumber), @@ -83,7 +88,8 @@ pub type Origin = RawOrigin<::AccountId>; #[allow(dead_code)] pub fn ensure_root(o: OuterOrigin) -> Result<(), &'static str> - where OuterOrigin: Into, OuterOrigin>> +where + OuterOrigin: Into, OuterOrigin>>, { o.into().map(|_| ()).map_err(|_| "bad origin: expected to be a root origin") } diff --git a/frame/system/benches/bench.rs b/frame/system/benches/bench.rs index 02ea48bdde032..e3f60733a6237 100644 --- a/frame/system/benches/bench.rs +++ b/frame/system/benches/bench.rs @@ -15,11 +15,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -use criterion::{Criterion, criterion_group, criterion_main, black_box}; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use frame_support::{decl_event, decl_module}; use frame_system as system; -use frame_support::{decl_module, decl_event}; use sp_core::H256; -use sp_runtime::{Perbill, traits::{BlakeTwo256, IdentityLookup}, testing::Header}; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, + Perbill, +}; mod module { use super::*; @@ -104,17 +108,18 @@ fn deposit_events(n: usize) { let mut t = new_test_ext(); t.execute_with(|| { for _ in 0..n { - module::Module::::deposit_event( - module::Event::Complex(vec![1, 2, 3], 2, 3, 899) - ); + module::Module::::deposit_event(module::Event::Complex( + vec![1, 2, 3], + 2, + 3, + 899, + )); } }); } fn sr_system_benchmark(c: &mut Criterion) { - c.bench_function("deposit 100 events", |b| { - b.iter(|| deposit_events(black_box(100))) - }); + c.bench_function("deposit 100 events", |b| b.iter(|| deposit_events(black_box(100)))); } criterion_group!(benches, sr_system_benchmark); diff --git a/frame/system/benchmarking/src/lib.rs b/frame/system/benchmarking/src/lib.rs index 7146bcd60645b..4b25dcd06a636 100644 --- a/frame/system/benchmarking/src/lib.rs +++ b/frame/system/benchmarking/src/lib.rs @@ -20,17 +20,12 @@ #![cfg_attr(not(feature = "std"), no_std)] use codec::Encode; -use sp_std::vec; -use sp_std::prelude::*; -use sp_core::{ChangesTrieConfiguration, storage::well_known_keys}; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_support::{storage, traits::Get, weights::DispatchClass}; +use frame_system::{Call, DigestItemOf, Pallet as System, RawOrigin}; +use sp_core::{storage::well_known_keys, ChangesTrieConfiguration}; use sp_runtime::traits::Hash; -use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; -use frame_support::{ - storage, - traits::Get, - weights::DispatchClass, -}; -use frame_system::{Pallet as System, Call, RawOrigin, DigestItemOf}; +use sp_std::{prelude::*, vec}; mod mock; @@ -144,8 +139,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Pallet, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/system/src/extensions/check_genesis.rs b/frame/system/src/extensions/check_genesis.rs index d5b3a49c9f326..6f409d5d3d4ad 100644 --- a/frame/system/src/extensions/check_genesis.rs +++ b/frame/system/src/extensions/check_genesis.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode}; use crate::{Config, Pallet}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_runtime::{ traits::{SignedExtension, Zero}, diff --git a/frame/system/src/extensions/check_mortality.rs b/frame/system/src/extensions/check_mortality.rs index d3d27f09606a4..69cca765efea9 100644 --- a/frame/system/src/extensions/check_mortality.rs +++ b/frame/system/src/extensions/check_mortality.rs @@ -15,14 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode}; -use crate::{Config, Pallet, BlockHash}; +use crate::{BlockHash, Config, Pallet}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_runtime::{ generic::Era, - traits::{SignedExtension, DispatchInfoOf, SaturatedConversion}, + traits::{DispatchInfoOf, SaturatedConversion, SignedExtension}, transaction_validity::{ - ValidTransaction, TransactionValidityError, InvalidTransaction, TransactionValidity, + InvalidTransaction, TransactionValidity, TransactionValidityError, ValidTransaction, }, }; @@ -86,7 +86,7 @@ impl SignedExtension for CheckMortality { #[cfg(test)] mod tests { use super::*; - use crate::mock::{Test, new_test_ext, System, CALL}; + use crate::mock::{new_test_ext, System, Test, CALL}; use frame_support::weights::{DispatchClass, DispatchInfo, Pays}; use sp_core::H256; @@ -95,7 +95,10 @@ mod tests { new_test_ext().execute_with(|| { // future assert_eq!( - CheckMortality::::from(Era::mortal(4, 2)).additional_signed().err().unwrap(), + CheckMortality::::from(Era::mortal(4, 2)) + .additional_signed() + .err() + .unwrap(), InvalidTransaction::AncientBirthBlock.into(), ); @@ -109,7 +112,8 @@ mod tests { #[test] fn signed_ext_check_era_should_change_longevity() { new_test_ext().execute_with(|| { - let normal = DispatchInfo { weight: 100, class: DispatchClass::Normal, pays_fee: Pays::Yes }; + let normal = + DispatchInfo { weight: 100, class: DispatchClass::Normal, pays_fee: Pays::Yes }; let len = 0_usize; let ext = ( crate::CheckWeight::::new(), diff --git a/frame/system/src/extensions/check_nonce.rs b/frame/system/src/extensions/check_nonce.rs index 9012ad58aeb4a..081a0efa3db71 100644 --- a/frame/system/src/extensions/check_nonce.rs +++ b/frame/system/src/extensions/check_nonce.rs @@ -15,15 +15,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode}; use crate::Config; +use codec::{Decode, Encode}; use frame_support::weights::DispatchInfo; use scale_info::TypeInfo; use sp_runtime::{ - traits::{SignedExtension, DispatchInfoOf, Dispatchable, One}, + traits::{DispatchInfoOf, Dispatchable, One, SignedExtension}, transaction_validity::{ - ValidTransaction, TransactionValidityError, InvalidTransaction, TransactionValidity, - TransactionLongevity, + InvalidTransaction, TransactionLongevity, TransactionValidity, TransactionValidityError, + ValidTransaction, }, }; use sp_std::vec; @@ -55,8 +55,9 @@ impl sp_std::fmt::Debug for CheckNonce { } } -impl SignedExtension for CheckNonce where - T::Call: Dispatchable +impl SignedExtension for CheckNonce +where + T::Call: Dispatchable, { type AccountId = T::AccountId; type Call = T::Call; @@ -64,7 +65,9 @@ impl SignedExtension for CheckNonce where type Pre = (); const IDENTIFIER: &'static str = "CheckNonce"; - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } fn pre_dispatch( self, @@ -75,13 +78,12 @@ impl SignedExtension for CheckNonce where ) -> Result<(), TransactionValidityError> { let mut account = crate::Account::::get(who); if self.0 != account.nonce { - return Err( - if self.0 < account.nonce { - InvalidTransaction::Stale - } else { - InvalidTransaction::Future - }.into() - ) + return Err(if self.0 < account.nonce { + InvalidTransaction::Stale + } else { + InvalidTransaction::Future + } + .into()) } account.nonce += T::Index::one(); crate::Account::::insert(who, account); @@ -121,19 +123,22 @@ impl SignedExtension for CheckNonce where #[cfg(test)] mod tests { use super::*; - use crate::mock::{Test, new_test_ext, CALL}; + use crate::mock::{new_test_ext, Test, CALL}; use frame_support::{assert_noop, assert_ok}; #[test] fn signed_ext_check_nonce_works() { new_test_ext().execute_with(|| { - crate::Account::::insert(1, crate::AccountInfo { - nonce: 1, - consumers: 0, - providers: 0, - sufficients: 0, - data: 0, - }); + crate::Account::::insert( + 1, + crate::AccountInfo { + nonce: 1, + consumers: 0, + providers: 0, + sufficients: 0, + data: 0, + }, + ); let info = DispatchInfo::default(); let len = 0_usize; // stale diff --git a/frame/system/src/extensions/check_spec_version.rs b/frame/system/src/extensions/check_spec_version.rs index ad95b57147dc9..0217aefae6b9d 100644 --- a/frame/system/src/extensions/check_spec_version.rs +++ b/frame/system/src/extensions/check_spec_version.rs @@ -16,12 +16,9 @@ // limitations under the License. use crate::{Config, Pallet}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::{ - traits::SignedExtension, - transaction_validity::TransactionValidityError, -}; +use sp_runtime::{traits::SignedExtension, transaction_validity::TransactionValidityError}; /// Ensure the runtime version registered in the transaction is the same as at present. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] diff --git a/frame/system/src/extensions/check_tx_version.rs b/frame/system/src/extensions/check_tx_version.rs index 13043cf6d33a7..9418d3ff5d937 100644 --- a/frame/system/src/extensions/check_tx_version.rs +++ b/frame/system/src/extensions/check_tx_version.rs @@ -16,12 +16,9 @@ // limitations under the License. use crate::{Config, Pallet}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::{ - traits::SignedExtension, - transaction_validity::TransactionValidityError, -}; +use sp_runtime::{traits::SignedExtension, transaction_validity::TransactionValidityError}; /// Ensure the transaction version registered in the transaction is the same as at present. #[derive(Encode, Decode, Clone, Eq, PartialEq, TypeInfo)] diff --git a/frame/system/src/extensions/check_weight.rs b/frame/system/src/extensions/check_weight.rs index 3569eea51dbb7..4e920b61753a6 100644 --- a/frame/system/src/extensions/check_weight.rs +++ b/frame/system/src/extensions/check_weight.rs @@ -16,28 +16,29 @@ // limitations under the License. use crate::{limits::BlockWeights, Config, Pallet}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use frame_support::{ + traits::Get, + weights::{priority::FrameTransactionPriority, DispatchClass, DispatchInfo, PostDispatchInfo}, +}; use scale_info::TypeInfo; use sp_runtime::{ - traits::{SignedExtension, DispatchInfoOf, Dispatchable, PostDispatchInfoOf}, + traits::{DispatchInfoOf, Dispatchable, PostDispatchInfoOf, SignedExtension}, transaction_validity::{ - ValidTransaction, TransactionValidityError, InvalidTransaction, TransactionValidity, - TransactionPriority, + InvalidTransaction, TransactionPriority, TransactionValidity, TransactionValidityError, + ValidTransaction, }, DispatchResult, }; -use frame_support::{ - traits::Get, - weights::{PostDispatchInfo, DispatchInfo, DispatchClass, priority::FrameTransactionPriority}, -}; /// Block resource (weight) limit check. #[derive(Encode, Decode, Clone, Eq, PartialEq, Default, TypeInfo)] #[scale_info(skip_type_params(T))] pub struct CheckWeight(sp_std::marker::PhantomData); -impl CheckWeight where - T::Call: Dispatchable, +impl CheckWeight +where + T::Call: Dispatchable, { /// Checks if the current extrinsic does not exceed the maximum weight a single extrinsic /// with given `DispatchClass` can have. @@ -46,9 +47,7 @@ impl CheckWeight where ) -> Result<(), TransactionValidityError> { let max = T::BlockWeights::get().get(info.class).max_extrinsic; match max { - Some(max) if info.weight > max => { - Err(InvalidTransaction::ExhaustsResources.into()) - }, + Some(max) if info.weight > max => Err(InvalidTransaction::ExhaustsResources.into()), _ => Ok(()), } } @@ -89,8 +88,7 @@ impl CheckWeight where fn get_priority(info: &DispatchInfoOf) -> TransactionPriority { match info.class { // Normal transaction. - DispatchClass::Normal => - FrameTransactionPriority::Normal(info.weight.into()).into(), + DispatchClass::Normal => FrameTransactionPriority::Normal(info.weight.into()).into(), // Don't use up the whole priority space, to allow things like `tip` to be taken into // account as well. DispatchClass::Operational => @@ -124,10 +122,7 @@ impl CheckWeight where /// Do the validate checks. This can be applied to both signed and unsigned. /// /// It only checks that the block weight and length limit will not exceed. - pub fn do_validate( - info: &DispatchInfoOf, - len: usize, - ) -> TransactionValidity { + pub fn do_validate(info: &DispatchInfoOf, len: usize) -> TransactionValidity { // ignore the next length. If they return `Ok`, then it is below the limit. let _ = Self::check_block_length(info, len)?; // during validation we skip block limit check. Since the `validate_transaction` @@ -143,17 +138,20 @@ pub fn calculate_consumed_weight( maximum_weight: BlockWeights, mut all_weight: crate::ConsumedWeight, info: &DispatchInfoOf, -) -> Result where - Call: Dispatchable, +) -> Result +where + Call: Dispatchable, { - let extrinsic_weight = info.weight.saturating_add(maximum_weight.get(info.class).base_extrinsic); + let extrinsic_weight = + info.weight.saturating_add(maximum_weight.get(info.class).base_extrinsic); let limit_per_class = maximum_weight.get(info.class); // add the weight. If class is unlimited, use saturating add instead of checked one. if limit_per_class.max_total.is_none() && limit_per_class.reserved.is_none() { all_weight.add(extrinsic_weight, info.class) } else { - all_weight.checked_add(extrinsic_weight, info.class) + all_weight + .checked_add(extrinsic_weight, info.class) .map_err(|_| InvalidTransaction::ExhaustsResources)?; } @@ -162,7 +160,7 @@ pub fn calculate_consumed_weight( // Check if we don't exceed per-class allowance match limit_per_class.max_total { Some(max) if per_class > max => { - return Err(InvalidTransaction::ExhaustsResources.into()); + return Err(InvalidTransaction::ExhaustsResources.into()) }, // There is no `max_total` limit (`None`), // or we are below the limit. @@ -175,8 +173,8 @@ pub fn calculate_consumed_weight( match limit_per_class.reserved { // We are over the limit in reserved pool. Some(reserved) if per_class > reserved => { - return Err(InvalidTransaction::ExhaustsResources.into()); - } + return Err(InvalidTransaction::ExhaustsResources.into()) + }, // There is either no limit in reserved pool (`None`), // or we are below the limit. _ => {}, @@ -186,8 +184,9 @@ pub fn calculate_consumed_weight( Ok(all_weight) } -impl SignedExtension for CheckWeight where - T::Call: Dispatchable +impl SignedExtension for CheckWeight +where + T::Call: Dispatchable, { type AccountId = T::AccountId; type Call = T::Call; @@ -195,7 +194,9 @@ impl SignedExtension for CheckWeight where type Pre = (); const IDENTIFIER: &'static str = "CheckWeight"; - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } fn pre_dispatch( self, @@ -280,17 +281,24 @@ impl sp_std::fmt::Debug for CheckWeight { #[cfg(test)] mod tests { use super::*; - use crate::{BlockWeight, AllExtrinsicsLen}; - use crate::mock::{Test, CALL, new_test_ext, System}; + use crate::{ + mock::{new_test_ext, System, Test, CALL}, + AllExtrinsicsLen, BlockWeight, + }; + use frame_support::{ + assert_err, assert_ok, + weights::{Pays, Weight}, + }; use sp_std::marker::PhantomData; - use frame_support::{assert_err, assert_ok, weights::{Weight, Pays}}; fn block_weights() -> crate::limits::BlockWeights { ::BlockWeights::get() } fn normal_weight_limit() -> Weight { - block_weights().get(DispatchClass::Normal).max_total + block_weights() + .get(DispatchClass::Normal) + .max_total .unwrap_or_else(|| block_weights().max_block) } @@ -336,7 +344,10 @@ mod tests { ..Default::default() }; let len = 0_usize; - assert_err!(CheckWeight::::do_validate(&max, len), InvalidTransaction::ExhaustsResources); + assert_err!( + CheckWeight::::do_validate(&max, len), + InvalidTransaction::ExhaustsResources + ); }); } @@ -344,16 +355,15 @@ mod tests { fn operational_extrinsic_limited_by_operational_space_limit() { new_test_ext().execute_with(|| { let weights = block_weights(); - let operational_limit = weights.get(DispatchClass::Operational).max_total + let operational_limit = weights + .get(DispatchClass::Operational) + .max_total .unwrap_or_else(|| weights.max_block); let base_weight = weights.get(DispatchClass::Normal).base_extrinsic; let weight = operational_limit - base_weight; - let okay = DispatchInfo { - weight, - class: DispatchClass::Operational, - ..Default::default() - }; + let okay = + DispatchInfo { weight, class: DispatchClass::Operational, ..Default::default() }; let max = DispatchInfo { weight: weight + 1, class: DispatchClass::Operational, @@ -368,7 +378,10 @@ mod tests { ..Default::default() }) ); - assert_err!(CheckWeight::::do_validate(&max, len), InvalidTransaction::ExhaustsResources); + assert_err!( + CheckWeight::::do_validate(&max, len), + InvalidTransaction::ExhaustsResources + ); }); } @@ -390,7 +403,11 @@ mod tests { // So normal extrinsic can be 758 weight (-5 for base extrinsic weight) // And Operational can be 256 to produce a full block (-5 for base) let max_normal = DispatchInfo { weight: 753, ..Default::default() }; - let rest_operational = DispatchInfo { weight: 251, class: DispatchClass::Operational, ..Default::default() }; + let rest_operational = DispatchInfo { + weight: 251, + class: DispatchClass::Operational, + ..Default::default() + }; let len = 0_usize; @@ -409,7 +426,11 @@ mod tests { new_test_ext().execute_with(|| { // We switch the order of `full_block_with_normal_and_operational` let max_normal = DispatchInfo { weight: 753, ..Default::default() }; - let rest_operational = DispatchInfo { weight: 251, class: DispatchClass::Operational, ..Default::default() }; + let rest_operational = DispatchInfo { + weight: 251, + class: DispatchClass::Operational, + ..Default::default() + }; let len = 0_usize; @@ -427,17 +448,24 @@ mod tests { new_test_ext().execute_with(|| { // An on_initialize takes up the whole block! (Every time!) System::register_extra_weight_unchecked(Weight::max_value(), DispatchClass::Mandatory); - let dispatch_normal = DispatchInfo { weight: 251, class: DispatchClass::Normal, ..Default::default() }; - let dispatch_operational = DispatchInfo { weight: 251, class: DispatchClass::Operational, ..Default::default() }; + let dispatch_normal = + DispatchInfo { weight: 251, class: DispatchClass::Normal, ..Default::default() }; + let dispatch_operational = DispatchInfo { + weight: 251, + class: DispatchClass::Operational, + ..Default::default() + }; let len = 0_usize; - assert_err!( CheckWeight::::do_pre_dispatch(&dispatch_normal, len), + assert_err!( + CheckWeight::::do_pre_dispatch(&dispatch_normal, len), InvalidTransaction::ExhaustsResources ); // Thank goodness we can still do an operational transaction to possibly save the blockchain. assert_ok!(CheckWeight::::do_pre_dispatch(&dispatch_operational, len)); // Not too much though - assert_err!(CheckWeight::::do_pre_dispatch(&dispatch_operational, len), + assert_err!( + CheckWeight::::do_pre_dispatch(&dispatch_operational, len), InvalidTransaction::ExhaustsResources ); // Even with full block, validity of single transaction should be correct. @@ -449,7 +477,11 @@ mod tests { fn signed_ext_check_weight_works_operational_tx() { new_test_ext().execute_with(|| { let normal = DispatchInfo { weight: 100, ..Default::default() }; - let op = DispatchInfo { weight: 100, class: DispatchClass::Operational, pays_fee: Pays::Yes }; + let op = DispatchInfo { + weight: 100, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; let len = 0_usize; let normal_limit = normal_weight_limit(); @@ -458,7 +490,8 @@ mod tests { current_weight.set(normal_limit, DispatchClass::Normal) }); // will not fit. - assert_err!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &normal, len), + assert_err!( + CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &normal, len), InvalidTransaction::ExhaustsResources ); // will fit. @@ -467,7 +500,8 @@ mod tests { // likewise for length limit. let len = 100_usize; AllExtrinsicsLen::::put(normal_length_limit()); - assert_err!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &normal, len), + assert_err!( + CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &normal, len), InvalidTransaction::ExhaustsResources ); assert_ok!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &op, len)); @@ -477,8 +511,13 @@ mod tests { #[test] fn signed_ext_check_weight_works() { new_test_ext().execute_with(|| { - let normal = DispatchInfo { weight: 100, class: DispatchClass::Normal, pays_fee: Pays::Yes }; - let op = DispatchInfo { weight: 100, class: DispatchClass::Operational, pays_fee: Pays::Yes }; + let normal = + DispatchInfo { weight: 100, class: DispatchClass::Normal, pays_fee: Pays::Yes }; + let op = DispatchInfo { + weight: 100, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; let len = 0_usize; let priority = CheckWeight::(PhantomData) @@ -487,10 +526,8 @@ mod tests { .priority; assert_eq!(priority, 100); - let priority = CheckWeight::(PhantomData) - .validate(&1, CALL, &op, len) - .unwrap() - .priority; + let priority = + CheckWeight::(PhantomData).validate(&1, CALL, &op, len).unwrap().priority; assert_eq!(priority, frame_support::weights::priority::LIMIT + 100); }) } @@ -503,7 +540,11 @@ mod tests { let reset_check_weight = |tx, s, f| { AllExtrinsicsLen::::put(0); let r = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, tx, s); - if f { assert!(r.is_err()) } else { assert!(r.is_ok()) } + if f { + assert!(r.is_err()) + } else { + assert!(r.is_ok()) + } }; reset_check_weight(&normal, normal_limit - 1, false); @@ -511,7 +552,8 @@ mod tests { reset_check_weight(&normal, normal_limit + 1, true); // Operational ones don't have this limit. - let op = DispatchInfo { weight: 0, class: DispatchClass::Operational, pays_fee: Pays::Yes }; + let op = + DispatchInfo { weight: 0, class: DispatchClass::Operational, pays_fee: Pays::Yes }; reset_check_weight(&op, normal_limit, false); reset_check_weight(&op, normal_limit + 100, false); reset_check_weight(&op, 1024, false); @@ -519,21 +561,16 @@ mod tests { }) } - #[test] fn signed_ext_check_weight_works_normal_tx() { new_test_ext().execute_with(|| { let normal_limit = normal_weight_limit(); let small = DispatchInfo { weight: 100, ..Default::default() }; let base_extrinsic = block_weights().get(DispatchClass::Normal).base_extrinsic; - let medium = DispatchInfo { - weight: normal_limit - base_extrinsic, - ..Default::default() - }; - let big = DispatchInfo { - weight: normal_limit - base_extrinsic + 1, - ..Default::default() - }; + let medium = + DispatchInfo { weight: normal_limit - base_extrinsic, ..Default::default() }; + let big = + DispatchInfo { weight: normal_limit - base_extrinsic + 1, ..Default::default() }; let len = 0_usize; let reset_check_weight = |i, f, s| { @@ -541,7 +578,11 @@ mod tests { current_weight.set(s, DispatchClass::Normal) }); let r = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, i, len); - if f { assert!(r.is_err()) } else { assert!(r.is_ok()) } + if f { + assert!(r.is_err()) + } else { + assert!(r.is_ok()) + } }; reset_check_weight(&small, false, 0); @@ -555,10 +596,8 @@ mod tests { new_test_ext().execute_with(|| { // This is half of the max block weight let info = DispatchInfo { weight: 512, ..Default::default() }; - let post_info = PostDispatchInfo { - actual_weight: Some(128), - pays_fee: Default::default(), - }; + let post_info = + PostDispatchInfo { actual_weight: Some(128), pays_fee: Default::default() }; let len = 0_usize; let base_extrinsic = block_weights().get(DispatchClass::Normal).base_extrinsic; @@ -571,11 +610,8 @@ mod tests { let pre = CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &info, len).unwrap(); assert_eq!(BlockWeight::::get().total(), info.weight + 256); - assert_ok!( CheckWeight::::post_dispatch(pre, &info, &post_info, len, &Ok(()))); - assert_eq!( - BlockWeight::::get().total(), - post_info.actual_weight.unwrap() + 256, - ); + assert_ok!(CheckWeight::::post_dispatch(pre, &info, &post_info, len, &Ok(()))); + assert_eq!(BlockWeight::::get().total(), post_info.actual_weight.unwrap() + 256,); }) } @@ -583,10 +619,8 @@ mod tests { fn signed_ext_check_weight_actual_weight_higher_than_max_is_capped() { new_test_ext().execute_with(|| { let info = DispatchInfo { weight: 512, ..Default::default() }; - let post_info = PostDispatchInfo { - actual_weight: Some(700), - pays_fee: Default::default(), - }; + let post_info = + PostDispatchInfo { actual_weight: Some(700), pays_fee: Default::default() }; let len = 0_usize; BlockWeight::::mutate(|current_weight| { @@ -616,10 +650,7 @@ mod tests { let len = 0_usize; // Initial weight from `weights.base_block` - assert_eq!( - System::block_weight().total(), - weights.base_block - ); + assert_eq!(System::block_weight().total(), weights.base_block); assert_ok!(CheckWeight::(PhantomData).pre_dispatch(&1, CALL, &free, len)); assert_eq!( System::block_weight().total(), @@ -635,7 +666,11 @@ mod tests { // Max normal is 768 (75%) // Max mandatory is unlimited let max_normal = DispatchInfo { weight: 753, ..Default::default() }; - let mandatory = DispatchInfo { weight: 1019, class: DispatchClass::Mandatory, ..Default::default() }; + let mandatory = DispatchInfo { + weight: 1019, + class: DispatchClass::Mandatory, + ..Default::default() + }; let len = 0_usize; @@ -671,18 +706,24 @@ mod tests { assert_eq!(maximum_weight.max_block, all_weight.total()); // fits into reserved - let mandatory1 = DispatchInfo { weight: 5, class: DispatchClass::Mandatory, ..Default::default() }; + let mandatory1 = + DispatchInfo { weight: 5, class: DispatchClass::Mandatory, ..Default::default() }; // does not fit into reserved and the block is full. - let mandatory2 = DispatchInfo { weight: 6, class: DispatchClass::Mandatory, ..Default::default() }; + let mandatory2 = + DispatchInfo { weight: 6, class: DispatchClass::Mandatory, ..Default::default() }; // when - assert_ok!( - calculate_consumed_weight::<::Call>( - maximum_weight.clone(), all_weight.clone(), &mandatory1 - ) - ); + assert_ok!(calculate_consumed_weight::<::Call>( + maximum_weight.clone(), + all_weight.clone(), + &mandatory1 + )); assert_err!( - calculate_consumed_weight::<::Call>( maximum_weight, all_weight, &mandatory2), + calculate_consumed_weight::<::Call>( + maximum_weight, + all_weight, + &mandatory2 + ), InvalidTransaction::ExhaustsResources ); } diff --git a/frame/system/src/extensions/mod.rs b/frame/system/src/extensions/mod.rs index 8b6c9b49e4d6b..0af9722e475d1 100644 --- a/frame/system/src/extensions/mod.rs +++ b/frame/system/src/extensions/mod.rs @@ -21,4 +21,3 @@ pub mod check_nonce; pub mod check_spec_version; pub mod check_tx_version; pub mod check_weight; - diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index 62536a2a88e3d..da28eac180952 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -66,58 +66,56 @@ #[cfg(feature = "std")] use serde::Serialize; -use sp_std::prelude::*; -#[cfg(any(feature = "std", test))] -use sp_std::map; -use sp_std::marker::PhantomData; -use sp_std::fmt::Debug; -use sp_version::RuntimeVersion; use sp_runtime::{ - RuntimeDebug, Perbill, DispatchError, Either, generic, + generic, traits::{ - self, CheckEqual, AtLeast32Bit, Zero, Lookup, LookupError, - SimpleBitOps, Hash, Member, MaybeDisplay, BadOrigin, - MaybeSerializeDeserialize, MaybeMallocSizeOf, StaticLookup, One, Bounded, - Dispatchable, AtLeast32BitUnsigned, Saturating, BlockNumberProvider, + self, AtLeast32Bit, AtLeast32BitUnsigned, BadOrigin, BlockNumberProvider, Bounded, + CheckEqual, Dispatchable, Hash, Lookup, LookupError, MaybeDisplay, MaybeMallocSizeOf, + MaybeSerializeDeserialize, Member, One, Saturating, SimpleBitOps, StaticLookup, Zero, }, + DispatchError, Either, Perbill, RuntimeDebug, }; +#[cfg(any(feature = "std", test))] +use sp_std::map; +use sp_std::{fmt::Debug, marker::PhantomData, prelude::*}; +use sp_version::RuntimeVersion; -use sp_core::{ChangesTrieConfiguration, storage::well_known_keys}; +use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen}; use frame_support::{ - Parameter, storage, + dispatch::{DispatchResult, DispatchResultWithPostInfo}, + storage, traits::{ - SortedMembers, Get, PalletInfo, OnNewAccount, OnKilledAccount, HandleLifetime, - StoredMap, EnsureOrigin, OriginTrait, Filter, + EnsureOrigin, Filter, Get, HandleLifetime, OnKilledAccount, OnNewAccount, OriginTrait, + PalletInfo, SortedMembers, StoredMap, }, weights::{ - Weight, RuntimeDbWeight, DispatchInfo, DispatchClass, - extract_actual_weight, PerDispatchClass, + extract_actual_weight, DispatchClass, DispatchInfo, PerDispatchClass, RuntimeDbWeight, + Weight, }, - dispatch::{DispatchResultWithPostInfo, DispatchResult}, + Parameter, }; -use codec::{Encode, Decode, FullCodec, EncodeLike, MaxEncodedLen}; use scale_info::TypeInfo; +use sp_core::{storage::well_known_keys, ChangesTrieConfiguration}; #[cfg(feature = "std")] use frame_support::traits::GenesisBuild; #[cfg(any(feature = "std", test))] use sp_io::TestExternalities; -pub mod offchain; pub mod limits; #[cfg(test)] pub(crate) mod mock; +pub mod offchain; mod extensions; -pub mod weights; -#[cfg(test)] -mod tests; #[cfg(feature = "std")] pub mod mocking; - +#[cfg(test)] +mod tests; +pub mod weights; pub use extensions::{ - check_mortality::CheckMortality, check_genesis::CheckGenesis, check_nonce::CheckNonce, + check_genesis::CheckGenesis, check_mortality::CheckMortality, check_nonce::CheckNonce, check_spec_version::CheckSpecVersion, check_tx_version::CheckTxVersion, check_weight::CheckWeight, }; @@ -155,7 +153,7 @@ impl SetCode for () { #[frame_support::pallet] pub mod pallet { - use crate::{*, pallet_prelude::*, self as frame_system}; + use crate::{self as frame_system, pallet_prelude::*, *}; use frame_support::pallet_prelude::*; /// System configuration trait. Implemented by runtime. @@ -175,39 +173,69 @@ pub mod pallet { type BlockLength: Get; /// The `Origin` type used by dispatchable calls. - type Origin: - Into, Self::Origin>> + type Origin: Into, Self::Origin>> + From> + Clone - + OriginTrait; + + OriginTrait; /// The aggregated `Call` type. type Call: Dispatchable + Debug; /// Account index (aka nonce) type. This stores the number of previous transactions associated /// with a sender account. - type Index: - Parameter + Member + MaybeSerializeDeserialize + Debug + Default + MaybeDisplay + AtLeast32Bit + type Index: Parameter + + Member + + MaybeSerializeDeserialize + + Debug + + Default + + MaybeDisplay + + AtLeast32Bit + Copy; /// The block number type used by the runtime. - type BlockNumber: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + - AtLeast32BitUnsigned + Default + Bounded + Copy + sp_std::hash::Hash + - sp_std::str::FromStr + MaybeMallocSizeOf + MaxEncodedLen; + type BlockNumber: Parameter + + Member + + MaybeSerializeDeserialize + + Debug + + MaybeDisplay + + AtLeast32BitUnsigned + + Default + + Bounded + + Copy + + sp_std::hash::Hash + + sp_std::str::FromStr + + MaybeMallocSizeOf + + MaxEncodedLen; /// The output of the `Hashing` function. - type Hash: - Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + SimpleBitOps + Ord - + Default + Copy + CheckEqual + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf + MaxEncodedLen; + type Hash: Parameter + + Member + + MaybeSerializeDeserialize + + Debug + + MaybeDisplay + + SimpleBitOps + + Ord + + Default + + Copy + + CheckEqual + + sp_std::hash::Hash + + AsRef<[u8]> + + AsMut<[u8]> + + MaybeMallocSizeOf + + MaxEncodedLen; /// The hashing system (algorithm) being used in the runtime (e.g. Blake2). - type Hashing: Hash + TypeInfo; + type Hashing: Hash + TypeInfo; /// The user account identifier type for the runtime. - type AccountId: Parameter + Member + MaybeSerializeDeserialize + Debug + MaybeDisplay + Ord - + Default + MaxEncodedLen; + type AccountId: Parameter + + Member + + MaybeSerializeDeserialize + + Debug + + MaybeDisplay + + Ord + + Default + + MaxEncodedLen; /// Converting trait to take a source type and convert to `AccountId`. /// @@ -215,16 +243,17 @@ pub mod pallet { /// It's perfectly reasonable for this to be an identity conversion (with the source type being /// `AccountId`), but other pallets (e.g. Indices pallet) may provide more functional/efficient /// alternatives. - type Lookup: StaticLookup; + type Lookup: StaticLookup; /// The block header. - type Header: Parameter + traits::Header< - Number=Self::BlockNumber, - Hash=Self::Hash, - >; + type Header: Parameter + traits::Header; /// The aggregated event type of the runtime. - type Event: Parameter + Member + From> + Debug + IsType<::Event>; + type Event: Parameter + + Member + + From> + + Debug + + IsType<::Event>; /// Maximum number of block number to block hash mappings to keep (oldest pruned first). #[pallet::constant] @@ -289,9 +318,7 @@ pub mod pallet { } fn integrity_test() { - T::BlockWeights::get() - .validate() - .expect("The weights are invalid."); + T::BlockWeights::get().validate().expect("The weights are invalid."); } } @@ -414,7 +441,10 @@ pub mod pallet { T::SystemWeightInfo::set_storage(items.len() as u32), DispatchClass::Operational, ))] - pub fn set_storage(origin: OriginFor, items: Vec) -> DispatchResultWithPostInfo { + pub fn set_storage( + origin: OriginFor, + items: Vec, + ) -> DispatchResultWithPostInfo { ensure_root(origin)?; for i in &items { storage::unhashed::put_raw(&i.0, &i.1); @@ -474,7 +504,10 @@ pub mod pallet { /// - 1 event. /// # #[pallet::weight(T::SystemWeightInfo::remark_with_event(remark.len() as u32))] - pub fn remark_with_event(origin: OriginFor, remark: Vec) -> DispatchResultWithPostInfo { + pub fn remark_with_event( + origin: OriginFor, + remark: Vec, + ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let hash = T::Hashing::hash(&remark[..]); Self::deposit_event(Event::Remarked(who, hash)); @@ -580,8 +613,7 @@ pub mod pallet { /// Events deposited for the current block. #[pallet::storage] #[pallet::getter(fn events)] - pub type Events = - StorageValue<_, Vec>, ValueQuery>; + pub type Events = StorageValue<_, Vec>, ValueQuery>; /// The number of events in the `Events` list. #[pallet::storage] @@ -630,10 +662,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - changes_trie_config: Default::default(), - code: Default::default(), - } + Self { changes_trie_config: Default::default(), code: Default::default() } } } @@ -649,7 +678,10 @@ pub mod pallet { sp_io::storage::set(well_known_keys::CODE, &self.code); sp_io::storage::set(well_known_keys::EXTRINSIC_INDEX, &0u32.encode()); if let Some(ref changes_trie_config) = self.changes_trie_config { - sp_io::storage::set(well_known_keys::CHANGES_TRIE_CONFIG, &changes_trie_config.encode()); + sp_io::storage::set( + well_known_keys::CHANGES_TRIE_CONFIG, + &changes_trie_config.encode(), + ); } } } @@ -661,17 +693,25 @@ pub mod migrations { #[allow(dead_code)] /// Migrate from unique `u8` reference counting to triple `u32` reference counting. pub fn migrate_all() -> frame_support::weights::Weight { - Account::::translate::<(T::Index, u8, T::AccountData), _>(|_key, (nonce, rc, data)| - Some(AccountInfo { nonce, consumers: rc as RefCount, providers: 1, sufficients: 0, data }) - ); + Account::::translate::<(T::Index, u8, T::AccountData), _>(|_key, (nonce, rc, data)| { + Some(AccountInfo { + nonce, + consumers: rc as RefCount, + providers: 1, + sufficients: 0, + data, + }) + }); T::BlockWeights::get().max_block } #[allow(dead_code)] /// Migrate from unique `u32` reference counting to triple `u32` reference counting. pub fn migrate_to_dual_ref_count() -> frame_support::weights::Weight { - Account::::translate::<(T::Index, RefCount, T::AccountData), _>(|_key, (nonce, consumers, data)| - Some(AccountInfo { nonce, consumers, providers: 1, sufficients: 0, data }) + Account::::translate::<(T::Index, RefCount, T::AccountData), _>( + |_key, (nonce, consumers, data)| { + Some(AccountInfo { nonce, consumers, providers: 1, sufficients: 0, data }) + }, ); T::BlockWeights::get().max_block } @@ -681,7 +721,7 @@ pub mod migrations { Account::::translate::<(T::Index, RefCount, RefCount, T::AccountData), _>( |_key, (nonce, consumers, providers, data)| { Some(AccountInfo { nonce, consumers, providers, sufficients: 0, data }) - } + }, ); T::BlockWeights::get().max_block } @@ -701,7 +741,7 @@ impl GenesisConfig { /// Kept in order not to break dependency. pub fn assimilate_storage( &self, - storage: &mut sp_runtime::Storage + storage: &mut sp_runtime::Storage, ) -> Result<(), String> { >::assimilate_storage(self, storage) } @@ -822,18 +862,14 @@ impl LastRuntimeUpgradeInfo { impl From for LastRuntimeUpgradeInfo { fn from(version: sp_version::RuntimeVersion) -> Self { - Self { - spec_version: version.spec_version.into(), - spec_name: version.spec_name, - } + Self { spec_version: version.spec_version.into(), spec_name: version.spec_name } } } pub struct EnsureRoot(sp_std::marker::PhantomData); -impl< - O: Into, O>> + From>, - AccountId, -> EnsureOrigin for EnsureRoot { +impl, O>> + From>, AccountId> + EnsureOrigin for EnsureRoot +{ type Success = (); fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -849,10 +885,9 @@ impl< } pub struct EnsureSigned(sp_std::marker::PhantomData); -impl< - O: Into, O>> + From>, - AccountId: Default, -> EnsureOrigin for EnsureSigned { +impl, O>> + From>, AccountId: Default> + EnsureOrigin for EnsureSigned +{ type Success = AccountId; fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -869,10 +904,11 @@ impl< pub struct EnsureSignedBy(sp_std::marker::PhantomData<(Who, AccountId)>); impl< - O: Into, O>> + From>, - Who: SortedMembers, - AccountId: PartialEq + Clone + Ord + Default, -> EnsureOrigin for EnsureSignedBy { + O: Into, O>> + From>, + Who: SortedMembers, + AccountId: PartialEq + Clone + Ord + Default, + > EnsureOrigin for EnsureSignedBy +{ type Success = AccountId; fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -893,10 +929,9 @@ impl< } pub struct EnsureNone(sp_std::marker::PhantomData); -impl< - O: Into, O>> + From>, - AccountId, -> EnsureOrigin for EnsureNone { +impl, O>> + From>, AccountId> + EnsureOrigin for EnsureNone +{ type Success = (); fn try_origin(o: O) -> Result { o.into().and_then(|o| match o { @@ -929,17 +964,16 @@ impl EnsureOrigin for EnsureNever { /// Origin check will pass if `L` or `R` origin check passes. `L` is tested first. pub struct EnsureOneOf(sp_std::marker::PhantomData<(AccountId, L, R)>); impl< - AccountId, - O: Into, O>> + From>, - L: EnsureOrigin, - R: EnsureOrigin, -> EnsureOrigin for EnsureOneOf { + AccountId, + O: Into, O>> + From>, + L: EnsureOrigin, + R: EnsureOrigin, + > EnsureOrigin for EnsureOneOf +{ type Success = Either; fn try_origin(o: O) -> Result { - L::try_origin(o).map_or_else( - |o| R::try_origin(o).map(|o| Either::Right(o)), - |o| Ok(Either::Left(o)), - ) + L::try_origin(o) + .map_or_else(|o| R::try_origin(o).map(|o| Either::Right(o)), |o| Ok(Either::Left(o))) } #[cfg(feature = "runtime-benchmarks")] @@ -951,7 +985,8 @@ impl< /// Ensure that the origin `o` represents a signed extrinsic (i.e. transaction). /// Returns `Ok` with the account that signed the extrinsic or an `Err` otherwise. pub fn ensure_signed(o: OuterOrigin) -> Result - where OuterOrigin: Into, OuterOrigin>> +where + OuterOrigin: Into, OuterOrigin>>, { match o.into() { Ok(RawOrigin::Signed(t)) => Ok(t), @@ -961,7 +996,8 @@ pub fn ensure_signed(o: OuterOrigin) -> Result(o: OuterOrigin) -> Result<(), BadOrigin> - where OuterOrigin: Into, OuterOrigin>> +where + OuterOrigin: Into, OuterOrigin>>, { match o.into() { Ok(RawOrigin::Root) => Ok(()), @@ -971,7 +1007,8 @@ pub fn ensure_root(o: OuterOrigin) -> Result<(), BadOrig /// Ensure that the origin `o` represents an unsigned extrinsic. Returns `Ok` or an `Err` otherwise. pub fn ensure_none(o: OuterOrigin) -> Result<(), BadOrigin> - where OuterOrigin: Into, OuterOrigin>> +where + OuterOrigin: Into, OuterOrigin>>, { match o.into() { Ok(RawOrigin::None) => Ok(()), @@ -1057,14 +1094,16 @@ impl Pallet { /// Increment the provider reference counter on an account. pub fn inc_providers(who: &T::AccountId) -> IncRefStatus { - Account::::mutate(who, |a| if a.providers == 0 && a.sufficients == 0 { - // Account is being created. - a.providers = 1; - Self::on_created_account(who.clone(), a); - IncRefStatus::Created - } else { - a.providers = a.providers.saturating_add(1); - IncRefStatus::Existed + Account::::mutate(who, |a| { + if a.providers == 0 && a.sufficients == 0 { + // Account is being created. + a.providers = 1; + Self::on_created_account(who.clone(), a); + IncRefStatus::Created + } else { + a.providers = a.providers.saturating_add(1); + IncRefStatus::Existed + } }) } @@ -1088,18 +1127,18 @@ impl Pallet { Pallet::::on_killed_account(who.clone()); Ok(DecRefStatus::Reaped) - } + }, (1, c, _) if c > 0 => { // Cannot remove last provider if there are consumers. Err(DispatchError::ConsumerRemaining) - } + }, (x, _, _) => { // Account will continue to exist as there is either > 1 provider or // > 0 sufficients. account.providers = x - 1; *maybe_account = Some(account); Ok(DecRefStatus::Exists) - } + }, } } else { log::error!( @@ -1113,14 +1152,16 @@ impl Pallet { /// Increment the self-sufficient reference counter on an account. pub fn inc_sufficients(who: &T::AccountId) -> IncRefStatus { - Account::::mutate(who, |a| if a.providers + a.sufficients == 0 { - // Account is being created. - a.sufficients = 1; - Self::on_created_account(who.clone(), a); - IncRefStatus::Created - } else { - a.sufficients = a.sufficients.saturating_add(1); - IncRefStatus::Existed + Account::::mutate(who, |a| { + if a.providers + a.sufficients == 0 { + // Account is being created. + a.sufficients = 1; + Self::on_created_account(who.clone(), a); + IncRefStatus::Created + } else { + a.sufficients = a.sufficients.saturating_add(1); + IncRefStatus::Existed + } }) } @@ -1141,12 +1182,12 @@ impl Pallet { (0, 0) | (1, 0) => { Pallet::::on_killed_account(who.clone()); DecRefStatus::Reaped - } + }, (x, _) => { account.sufficients = x - 1; *maybe_account = Some(account); DecRefStatus::Exists - } + }, } } else { log::error!( @@ -1178,24 +1219,28 @@ impl Pallet { /// /// The account `who`'s `providers` must be non-zero or this will return an error. pub fn inc_consumers(who: &T::AccountId) -> Result<(), DispatchError> { - Account::::try_mutate(who, |a| if a.providers > 0 { - a.consumers = a.consumers.saturating_add(1); - Ok(()) - } else { - Err(DispatchError::NoProviders) + Account::::try_mutate(who, |a| { + if a.providers > 0 { + a.consumers = a.consumers.saturating_add(1); + Ok(()) + } else { + Err(DispatchError::NoProviders) + } }) } /// Decrement the reference counter on an account. This *MUST* only be done once for every time /// you called `inc_consumers` on `who`. pub fn dec_consumers(who: &T::AccountId) { - Account::::mutate(who, |a| if a.consumers > 0 { - a.consumers -= 1; - } else { - log::error!( - target: "runtime::system", - "Logic error: Unexpected underflow in reducing consumer", - ); + Account::::mutate(who, |a| { + if a.consumers > 0 { + a.consumers -= 1; + } else { + log::error!( + target: "runtime::system", + "Logic error: Unexpected underflow in reducing consumer", + ); + } }) } @@ -1233,14 +1278,13 @@ impl Pallet { pub fn deposit_event_indexed(topics: &[T::Hash], event: T::Event) { let block_number = Self::block_number(); // Don't populate events on genesis. - if block_number.is_zero() { return } + if block_number.is_zero() { + return + } let phase = ExecutionPhase::::get().unwrap_or_default(); - let event = EventRecord { - phase, - event, - topics: topics.iter().cloned().collect::>(), - }; + let event = + EventRecord { phase, event, topics: topics.iter().cloned().collect::>() }; // Index of the to be added event. let event_idx = { @@ -1366,12 +1410,18 @@ impl Pallet { if let Some(storage_changes_root) = storage_changes_root { let item = generic::DigestItem::ChangesTrieRoot( T::Hash::decode(&mut &storage_changes_root[..]) - .expect("Node is configured to use the same hash; qed") + .expect("Node is configured to use the same hash; qed"), ); digest.push(item); } - ::new(number, extrinsics_root, storage_root, parent_hash, digest) + ::new( + number, + extrinsics_root, + storage_root, + parent_hash, + digest, + ) } /// Deposits a log and ensures it matches the block's log data. @@ -1448,7 +1498,9 @@ impl Pallet { } /// Return the chain's current runtime version. - pub fn runtime_version() -> RuntimeVersion { T::Version::get() } + pub fn runtime_version() -> RuntimeVersion { + T::Version::get() + } /// Retrieve the account transaction counter from storage. pub fn account_nonce(who: impl EncodeLike) -> T::Index { @@ -1471,20 +1523,18 @@ impl Pallet { /// To be called immediately after an extrinsic has been applied. pub fn note_applied_extrinsic(r: &DispatchResultWithPostInfo, mut info: DispatchInfo) { info.weight = extract_actual_weight(r, &info); - Self::deposit_event( - match r { - Ok(_) => Event::ExtrinsicSuccess(info), - Err(err) => { - log::trace!( - target: "runtime::system", - "Extrinsic failed at block({:?}): {:?}", - Self::block_number(), - err, - ); - Event::ExtrinsicFailed(err.error, info) - }, - } - ); + Self::deposit_event(match r { + Ok(_) => Event::ExtrinsicSuccess(info), + Err(err) => { + log::trace!( + target: "runtime::system", + "Extrinsic failed at block({:?}): {:?}", + Self::block_number(), + err, + ); + Event::ExtrinsicFailed(err.error, info) + }, + }); let next_extrinsic_index = Self::extrinsic_index().unwrap_or_default() + 1u32; @@ -1495,8 +1545,8 @@ impl Pallet { /// To be called immediately after `note_applied_extrinsic` of the last extrinsic of the block /// has been called. pub fn note_finished_extrinsics() { - let extrinsic_index: u32 = storage::unhashed::take(well_known_keys::EXTRINSIC_INDEX) - .unwrap_or_default(); + let extrinsic_index: u32 = + storage::unhashed::take(well_known_keys::EXTRINSIC_INDEX).unwrap_or_default(); ExtrinsicCount::::put(extrinsic_index); ExecutionPhase::::put(Phase::Finalization); } @@ -1579,8 +1629,7 @@ impl HandleLifetime for Consumer { } } -impl BlockNumberProvider for Pallet -{ +impl BlockNumberProvider for Pallet { type BlockNumber = ::BlockNumber; fn current_block_number() -> Self::BlockNumber { @@ -1618,7 +1667,7 @@ impl StoredMap for Pallet { DecRefStatus::Reaped => return Ok(result), DecRefStatus::Exists => { // Update value as normal... - } + }, } } else if !was_providing && !is_providing { return Ok(result) @@ -1629,14 +1678,15 @@ impl StoredMap for Pallet { } /// Split an `option` into two constituent options, as defined by a `splitter` function. -pub fn split_inner(option: Option, splitter: impl FnOnce(T) -> (R, S)) - -> (Option, Option) -{ +pub fn split_inner( + option: Option, + splitter: impl FnOnce(T) -> (R, S), +) -> (Option, Option) { match option { Some(inner) => { let (r, s) = splitter(inner); (Some(r), Some(s)) - } + }, None => (None, None), } } @@ -1659,7 +1709,7 @@ impl Lookup for ChainContext { /// Prelude to be used alongside pallet macro, for ease of use. pub mod pallet_prelude { - pub use crate::{ensure_signed, ensure_none, ensure_root}; + pub use crate::{ensure_none, ensure_root, ensure_signed}; /// Type alias for the `Origin` associated type of system config. pub type OriginFor = ::Origin; diff --git a/frame/system/src/limits.rs b/frame/system/src/limits.rs index 3d1b26fe3df42..687fb6f3dd367 100644 --- a/frame/system/src/limits.rs +++ b/frame/system/src/limits.rs @@ -25,9 +25,9 @@ //! `DispatchClass`. This module contains configuration object for both resources, //! which should be passed to `frame_system` configuration when runtime is being set up. -use frame_support::weights::{Weight, DispatchClass, constants, PerDispatchClass, OneOrMany}; +use frame_support::weights::{constants, DispatchClass, OneOrMany, PerDispatchClass, Weight}; use scale_info::TypeInfo; -use sp_runtime::{RuntimeDebug, Perbill}; +use sp_runtime::{Perbill, RuntimeDebug}; /// Block length limit configuration. #[derive(RuntimeDebug, Clone, codec::Encode, codec::Decode, TypeInfo)] @@ -41,29 +41,26 @@ pub struct BlockLength { impl Default for BlockLength { fn default() -> Self { - BlockLength::max_with_normal_ratio( - 5 * 1024 * 1024, - DEFAULT_NORMAL_RATIO, - ) + BlockLength::max_with_normal_ratio(5 * 1024 * 1024, DEFAULT_NORMAL_RATIO) } } impl BlockLength { /// Create new `BlockLength` with `max` for every class. pub fn max(max: u32) -> Self { - Self { - max: PerDispatchClass::new(|_| max), - } + Self { max: PerDispatchClass::new(|_| max) } } /// Create new `BlockLength` with `max` for `Operational` & `Mandatory` /// and `normal * max` for `Normal`. pub fn max_with_normal_ratio(max: u32, normal: Perbill) -> Self { Self { - max: PerDispatchClass::new(|class| if class == DispatchClass::Normal { - normal * max - } else { - max + max: PerDispatchClass::new(|class| { + if class == DispatchClass::Normal { + normal * max + } else { + max + } }), } } @@ -207,10 +204,7 @@ pub struct BlockWeights { impl Default for BlockWeights { fn default() -> Self { - Self::with_sensible_defaults( - 1 * constants::WEIGHT_PER_SECOND, - DEFAULT_NORMAL_RATIO, - ) + Self::with_sensible_defaults(1 * constants::WEIGHT_PER_SECOND, DEFAULT_NORMAL_RATIO) } } @@ -246,7 +240,8 @@ impl BlockWeights { weights.max_extrinsic.unwrap_or(0) <= max_for_class.saturating_sub(base_for_class), &mut error, "[{:?}] {:?} (max_extrinsic) can't be greater than {:?} (max for class)", - class, weights.max_extrinsic, + class, + weights.max_extrinsic, max_for_class.saturating_sub(base_for_class), ); // Max extrinsic should not be 0 @@ -261,21 +256,27 @@ impl BlockWeights { reserved > base_for_class || reserved == 0, &mut error, "[{:?}] {:?} (reserved) has to be greater than {:?} (base extrinsic) if set", - class, reserved, base_for_class, + class, + reserved, + base_for_class, ); // Make sure max block is greater than max_total if it's set. error_assert!( self.max_block >= weights.max_total.unwrap_or(0), &mut error, "[{:?}] {:?} (max block) has to be greater than {:?} (max for class)", - class, self.max_block, weights.max_total, + class, + self.max_block, + weights.max_total, ); // Make sure we can fit at least one extrinsic. error_assert!( self.max_block > base_for_class + self.base_block, &mut error, "[{:?}] {:?} (max block) must fit at least one extrinsic {:?} (base weight)", - class, self.max_block, base_for_class + self.base_block, + class, + self.max_block, + base_for_class + self.base_block, ); } @@ -310,10 +311,7 @@ impl BlockWeights { /// Assumptions: /// - Average block initialization is assumed to be `10%`. /// - `Operational` transactions have reserved allowance (`1.0 - normal_ratio`) - pub fn with_sensible_defaults( - expected_block_weight: Weight, - normal_ratio: Perbill, - ) -> Self { + pub fn with_sensible_defaults(expected_block_weight: Weight, normal_ratio: Perbill) -> Self { let normal_weight = normal_ratio * expected_block_weight; Self::builder() .for_class(DispatchClass::Normal, |weights| { @@ -389,7 +387,7 @@ impl BlockWeightsBuilder { for class in class.into_iter() { action(self.weights.per_class.get_mut(class)); } - self + self } /// Construct the `BlockWeights` object. @@ -409,7 +407,8 @@ impl BlockWeightsBuilder { for class in DispatchClass::all() { let per_class = weights.per_class.get_mut(*class); if per_class.max_extrinsic.is_none() && init_cost.is_some() { - per_class.max_extrinsic = per_class.max_total + per_class.max_extrinsic = per_class + .max_total .map(|x| x.saturating_sub(init_weight)) .map(|x| x.saturating_sub(per_class.base_extrinsic)); } @@ -436,8 +435,6 @@ mod tests { #[test] fn default_weights_are_valid() { - BlockWeights::default() - .validate() - .unwrap(); + BlockWeights::default().validate().unwrap(); } } diff --git a/frame/system/src/mock.rs b/frame/system/src/mock.rs index e9b6fb7d968ec..480e8b1a26bae 100644 --- a/frame/system/src/mock.rs +++ b/frame/system/src/mock.rs @@ -16,13 +16,14 @@ // limitations under the License. use crate::{self as frame_system, *}; -use sp_std::cell::RefCell; +use frame_support::parameter_types; use sp_core::H256; use sp_runtime::{ + testing::Header, traits::{BlakeTwo256, IdentityLookup}, - testing::Header, BuildStorage, + BuildStorage, }; -use frame_support::parameter_types; +use sp_std::cell::RefCell; type UncheckedExtrinsic = mocking::MockUncheckedExtrinsic; type Block = mocking::MockBlock; @@ -75,13 +76,15 @@ parameter_types! { limits::BlockLength::max_with_normal_ratio(1024, NORMAL_DISPATCH_RATIO); } -thread_local!{ +thread_local! { pub static KILLED: RefCell> = RefCell::new(vec![]); } pub struct RecordKilled; impl OnKilledAccount for RecordKilled { - fn on_killed_account(who: &u64) { KILLED.with(|r| r.borrow_mut().push(*who)) } + fn on_killed_account(who: &u64) { + KILLED.with(|r| r.borrow_mut().push(*who)) + } } impl Config for Test { @@ -117,12 +120,14 @@ pub const CALL: &::Call = &Call::System(frame_system::Call::set_ /// Create new externalities for `System` module tests. pub fn new_test_ext() -> sp_io::TestExternalities { - let mut ext: sp_io::TestExternalities = GenesisConfig::default() - .build_storage().unwrap().into(); + let mut ext: sp_io::TestExternalities = + GenesisConfig::default().build_storage().unwrap().into(); // Add to each test the initial weight of a block - ext.execute_with(|| System::register_extra_weight_unchecked( - ::BlockWeights::get().base_block, - DispatchClass::Mandatory - )); + ext.execute_with(|| { + System::register_extra_weight_unchecked( + ::BlockWeights::get().base_block, + DispatchClass::Mandatory, + ) + }); ext } diff --git a/frame/system/src/mocking.rs b/frame/system/src/mocking.rs index 9f80c59a9c4d2..7e6026b726186 100644 --- a/frame/system/src/mocking.rs +++ b/frame/system/src/mocking.rs @@ -21,7 +21,10 @@ use sp_runtime::generic; /// An unchecked extrinsic type to be used in tests. pub type MockUncheckedExtrinsic = generic::UncheckedExtrinsic< - ::AccountId, ::Call, Signature, Extra, + ::AccountId, + ::Call, + Signature, + Extra, >; /// An implementation of `sp_runtime::traits::Block` to be used in tests. diff --git a/frame/system/src/offchain.rs b/frame/system/src/offchain.rs index 0502df22493e1..b0801b61759c1 100644 --- a/frame/system/src/offchain.rs +++ b/frame/system/src/offchain.rs @@ -57,12 +57,16 @@ #![warn(missing_docs)] use codec::Encode; -use sp_std::collections::btree_set::BTreeSet; -use sp_std::convert::{TryInto, TryFrom}; -use sp_std::prelude::{Box, Vec}; -use sp_runtime::app_crypto::RuntimeAppPublic; -use sp_runtime::traits::{Extrinsic as ExtrinsicT, IdentifyAccount, One}; use frame_support::RuntimeDebug; +use sp_runtime::{ + app_crypto::RuntimeAppPublic, + traits::{Extrinsic as ExtrinsicT, IdentifyAccount, One}, +}; +use sp_std::{ + collections::btree_set::BTreeSet, + convert::{TryFrom, TryInto}, + prelude::{Box, Vec}, +}; /// Marker struct used to flag using all supported keys to sign a payload. pub struct ForAll {} @@ -76,7 +80,7 @@ pub struct ForAny {} /// utility function can be used. However, this struct is used by `Signer` /// to submit a signed transactions providing the signature along with the call. pub struct SubmitTransaction, OverarchingCall> { - _phantom: sp_std::marker::PhantomData<(T, OverarchingCall)> + _phantom: sp_std::marker::PhantomData<(T, OverarchingCall)>, } impl SubmitTransaction @@ -120,10 +124,7 @@ pub struct Signer, X = Fo impl, X> Default for Signer { fn default() -> Self { - Self { - accounts: Default::default(), - _phantom: Default::default(), - } + Self { accounts: Default::default(), _phantom: Default::default() } } } @@ -161,72 +162,73 @@ impl, X> Signer let keystore_accounts = self.keystore_accounts(); match self.accounts { None => Box::new(keystore_accounts), - Some(ref keys) => { - let keystore_lookup: BTreeSet<::Public> = keystore_accounts - .map(|account| account.public).collect(); - - Box::new(keys.into_iter() - .enumerate() - .map(|(index, key)| { - let account_id = key.clone().into_account(); - Account::new(index, account_id, key.clone()) - }) - .filter(move |account| keystore_lookup.contains(&account.public))) - } + Some(ref keys) => { + let keystore_lookup: BTreeSet<::Public> = + keystore_accounts.map(|account| account.public).collect(); + + Box::new( + keys.into_iter() + .enumerate() + .map(|(index, key)| { + let account_id = key.clone().into_account(); + Account::new(index, account_id, key.clone()) + }) + .filter(move |account| keystore_lookup.contains(&account.public)), + ) + }, } } fn keystore_accounts(&self) -> impl Iterator> { - C::RuntimeAppPublic::all() - .into_iter() - .enumerate() - .map(|(index, key)| { - let generic_public = C::GenericPublic::from(key); - let public: T::Public = generic_public.into(); - let account_id = public.clone().into_account(); - Account::new(index, account_id, public) - }) + C::RuntimeAppPublic::all().into_iter().enumerate().map(|(index, key)| { + let generic_public = C::GenericPublic::from(key); + let public: T::Public = generic_public.into(); + let account_id = public.clone().into_account(); + Account::new(index, account_id, public) + }) } } - impl> Signer { - fn for_all(&self, f: F) -> Vec<(Account, R)> where + fn for_all(&self, f: F) -> Vec<(Account, R)> + where F: Fn(&Account) -> Option, { let accounts = self.accounts_from_keys(); accounts .into_iter() - .filter_map(|account| { - f(&account).map(|res| (account, res)) - }) + .filter_map(|account| f(&account).map(|res| (account, res))) .collect() } } impl> Signer { - fn for_any(&self, f: F) -> Option<(Account, R)> where + fn for_any(&self, f: F) -> Option<(Account, R)> + where F: Fn(&Account) -> Option, { let accounts = self.accounts_from_keys(); for account in accounts.into_iter() { let res = f(&account); if let Some(res) = res { - return Some((account, res)); + return Some((account, res)) } } None } } -impl> SignMessage for Signer { +impl> SignMessage + for Signer +{ type SignatureData = Vec<(Account, T::Signature)>; fn sign_message(&self, message: &[u8]) -> Self::SignatureData { self.for_all(|account| C::sign(message, account.public.clone())) } - fn sign(&self, f: F) -> Self::SignatureData where + fn sign(&self, f: F) -> Self::SignatureData + where F: Fn(&Account) -> TPayload, TPayload: SignedPayload, { @@ -234,14 +236,17 @@ impl> SignMessage for } } -impl> SignMessage for Signer { +impl> SignMessage + for Signer +{ type SignatureData = Option<(Account, T::Signature)>; fn sign_message(&self, message: &[u8]) -> Self::SignatureData { self.for_any(|account| C::sign(message, account.public.clone())) } - fn sign(&self, f: F) -> Self::SignatureData where + fn sign(&self, f: F) -> Self::SignatureData + where F: Fn(&Account) -> TPayload, TPayload: SignedPayload, { @@ -250,16 +255,14 @@ impl> SignMessage for } impl< - T: CreateSignedTransaction + SigningTypes, - C: AppCrypto, - LocalCall, -> SendSignedTransaction for Signer { + T: CreateSignedTransaction + SigningTypes, + C: AppCrypto, + LocalCall, + > SendSignedTransaction for Signer +{ type Result = Option<(Account, Result<(), ()>)>; - fn send_signed_transaction( - &self, - f: impl Fn(&Account) -> LocalCall, - ) -> Self::Result { + fn send_signed_transaction(&self, f: impl Fn(&Account) -> LocalCall) -> Self::Result { self.for_any(|account| { let call = f(account); self.send_single_signed_transaction(account, call) @@ -268,16 +271,14 @@ impl< } impl< - T: SigningTypes + CreateSignedTransaction, - C: AppCrypto, - LocalCall, -> SendSignedTransaction for Signer { + T: SigningTypes + CreateSignedTransaction, + C: AppCrypto, + LocalCall, + > SendSignedTransaction for Signer +{ type Result = Vec<(Account, Result<(), ()>)>; - fn send_signed_transaction( - &self, - f: impl Fn(&Account) -> LocalCall, - ) -> Self::Result { + fn send_signed_transaction(&self, f: impl Fn(&Account) -> LocalCall) -> Self::Result { self.for_all(|account| { let call = f(account); self.send_single_signed_transaction(account, call) @@ -286,10 +287,11 @@ impl< } impl< - T: SigningTypes + SendTransactionTypes, - C: AppCrypto, - LocalCall, -> SendUnsignedTransaction for Signer { + T: SigningTypes + SendTransactionTypes, + C: AppCrypto, + LocalCall, + > SendUnsignedTransaction for Signer +{ type Result = Option<(Account, Result<(), ()>)>; fn send_unsigned_transaction( @@ -303,7 +305,7 @@ impl< { self.for_any(|account| { let payload = f(account); - let signature= payload.sign::()?; + let signature = payload.sign::()?; let call = f2(payload, signature); self.submit_unsigned_transaction(call) }) @@ -311,10 +313,11 @@ impl< } impl< - T: SigningTypes + SendTransactionTypes, - C: AppCrypto, - LocalCall, -> SendUnsignedTransaction for Signer { + T: SigningTypes + SendTransactionTypes, + C: AppCrypto, + LocalCall, + > SendUnsignedTransaction for Signer +{ type Result = Vec<(Account, Result<(), ()>)>; fn send_unsigned_transaction( @@ -324,7 +327,8 @@ impl< ) -> Self::Result where F: Fn(&Account) -> TPayload, - TPayload: SignedPayload { + TPayload: SignedPayload, + { self.for_all(|account| { let payload = f(account); let signature = payload.sign::()?; @@ -352,16 +356,13 @@ impl Account { } } -impl Clone for Account where +impl Clone for Account +where T::AccountId: Clone, T::Public: Clone, { fn clone(&self) -> Self { - Self { - index: self.index, - id: self.id.clone(), - public: self.public.clone(), - } + Self { index: self.index, id: self.id.clone(), public: self.public.clone() } } } @@ -395,15 +396,13 @@ pub trait AppCrypto { type RuntimeAppPublic: RuntimeAppPublic; /// A raw crypto public key wrapped by `RuntimeAppPublic`. - type GenericPublic: - From + type GenericPublic: From + Into + TryFrom + Into; /// A matching raw crypto `Signature` type. - type GenericSignature: - From<::Signature> + type GenericSignature: From<::Signature> + Into<::Signature> + TryFrom + Into; @@ -424,16 +423,15 @@ pub trait AppCrypto { fn verify(payload: &[u8], public: Public, signature: Signature) -> bool { let p: Self::GenericPublic = match public.try_into() { Ok(a) => a, - _ => return false + _ => return false, }; let x = Into::::into(p); let signature: Self::GenericSignature = match signature.try_into() { Ok(a) => a, - _ => return false + _ => return false, }; - let signature = Into::<< - Self::RuntimeAppPublic as RuntimeAppPublic - >::Signature>::into(signature); + let signature = + Into::<::Signature>::into(signature); x.verify(&payload, &signature) } @@ -460,17 +458,13 @@ pub trait SigningTypes: crate::Config { + scale_info::TypeInfo; /// A matching `Signature` type. - type Signature: Clone - + PartialEq - + core::fmt::Debug - + codec::Codec - + scale_info::TypeInfo; + type Signature: Clone + PartialEq + core::fmt::Debug + codec::Codec + scale_info::TypeInfo; } /// A definition of types required to submit transactions from within the runtime. pub trait SendTransactionTypes { /// The extrinsic type expected by the runtime. - type Extrinsic: ExtrinsicT + codec::Encode; + type Extrinsic: ExtrinsicT + codec::Encode; /// The runtime's call type. /// /// This has additional bound to be able to be created from pallet-local `Call` types. @@ -484,7 +478,9 @@ pub trait SendTransactionTypes { /// This will most likely include creation of `SignedExtra` (a set of `SignedExtensions`). /// Note that the result can be altered by inspecting the `Call` (for instance adjusting /// fees, or mortality depending on the `pallet` being called). -pub trait CreateSignedTransaction: SendTransactionTypes + SigningTypes { +pub trait CreateSignedTransaction: + SendTransactionTypes + SigningTypes +{ /// Attempt to create signed extrinsic data that encodes call from given account. /// /// Runtime implementation is free to construct the payload to sign and the signature @@ -516,18 +512,19 @@ pub trait SignMessage { /// /// This method expects `f` to return a `SignedPayload` /// object which is then used for signing. - fn sign(&self, f: F) -> Self::SignatureData where + fn sign(&self, f: F) -> Self::SignatureData + where F: Fn(&Account) -> TPayload, - TPayload: SignedPayload, - ; + TPayload: SignedPayload; } /// Submit a signed transaction to the transaction pool. pub trait SendSignedTransaction< T: SigningTypes + CreateSignedTransaction, C: AppCrypto, - LocalCall -> { + LocalCall, +> +{ /// A submission result. /// /// This should contain an indication of success and the account that was used for signing. @@ -539,10 +536,7 @@ pub trait SendSignedTransaction< /// to be returned. /// The call is then wrapped into a transaction (see `#CreateSignedTransaction`), signed and /// submitted to the pool. - fn send_signed_transaction( - &self, - f: impl Fn(&Account) -> LocalCall, - ) -> Self::Result; + fn send_signed_transaction(&self, f: impl Fn(&Account) -> LocalCall) -> Self::Result; /// Wraps the call into transaction, signs using given account and submits to the pool. fn send_single_signed_transaction( @@ -561,10 +555,9 @@ pub trait SendSignedTransaction< call.into(), account.public.clone(), account.id.clone(), - account_data.nonce + account_data.nonce, )?; - let res = SubmitTransaction:: - ::submit_transaction(call, Some(signature)); + let res = SubmitTransaction::::submit_transaction(call, Some(signature)); if res.is_ok() { // increment the nonce. This is fine, since the code should always @@ -578,10 +571,7 @@ pub trait SendSignedTransaction< } /// Submit an unsigned transaction onchain with a signed payload -pub trait SendUnsignedTransaction< - T: SigningTypes + SendTransactionTypes, - LocalCall, -> { +pub trait SendUnsignedTransaction, LocalCall> { /// A submission result. /// /// Should contain the submission result and the account(s) that signed the payload. @@ -603,12 +593,8 @@ pub trait SendUnsignedTransaction< TPayload: SignedPayload; /// Submits an unsigned call to the transaction pool. - fn submit_unsigned_transaction( - &self, - call: LocalCall - ) -> Option> { - Some(SubmitTransaction:: - ::submit_unsigned_transaction(call.into())) + fn submit_unsigned_transaction(&self, call: LocalCall) -> Option> { + Some(SubmitTransaction::::submit_unsigned_transaction(call.into())) } } @@ -633,14 +619,13 @@ pub trait SignedPayload: Encode { } } - #[cfg(test)] mod tests { use super::*; + use crate::mock::{Call, Test as TestRuntime, CALL}; use codec::Decode; - use crate::mock::{Test as TestRuntime, Call, CALL}; use sp_core::offchain::{testing, TransactionPoolExt}; - use sp_runtime::testing::{UintAuthorityId, TestSignature, TestXt}; + use sp_runtime::testing::{TestSignature, TestXt, UintAuthorityId}; impl SigningTypes for TestRuntime { type Public = UintAuthorityId; @@ -677,16 +662,8 @@ mod tests { type GenericSignature = TestSignature; } - fn assert_account( - next: Option<(Account, Result<(), ()>)>, - index: usize, - id: u64, - ) { - assert_eq!(next, Some((Account { - index, - id, - public: id.into(), - }, Ok(())))); + fn assert_account(next: Option<(Account, Result<(), ()>)>, index: usize, id: u64) { + assert_eq!(next, Some((Account { index, id, public: id.into() }, Ok(())))); } #[test] @@ -701,16 +678,10 @@ mod tests { t.execute_with(|| { // when - let result = Signer:: - ::all_accounts() + let result = Signer::::all_accounts() .send_unsigned_transaction( - |account| SimplePayload { - data: vec![1, 2, 3], - public: account.public.clone() - }, - |_payload, _signature| { - CALL.clone() - } + |account| SimplePayload { data: vec![1, 2, 3], public: account.public.clone() }, + |_payload, _signature| CALL.clone(), ); // then @@ -742,16 +713,10 @@ mod tests { t.execute_with(|| { // when - let result = Signer:: - ::any_account() + let result = Signer::::any_account() .send_unsigned_transaction( - |account| SimplePayload { - data: vec![1, 2, 3], - public: account.public.clone() - }, - |_payload, _signature| { - CALL.clone() - } + |account| SimplePayload { data: vec![1, 2, 3], public: account.public.clone() }, + |_payload, _signature| CALL.clone(), ); // then @@ -779,17 +744,11 @@ mod tests { t.execute_with(|| { // when - let result = Signer:: - ::all_accounts() + let result = Signer::::all_accounts() .with_filter(vec![0xf2.into(), 0xf1.into()]) .send_unsigned_transaction( - |account| SimplePayload { - data: vec![1, 2, 3], - public: account.public.clone() - }, - |_payload, _signature| { - CALL.clone() - } + |account| SimplePayload { data: vec![1, 2, 3], public: account.public.clone() }, + |_payload, _signature| CALL.clone(), ); // then @@ -819,17 +778,11 @@ mod tests { t.execute_with(|| { // when - let result = Signer:: - ::any_account() + let result = Signer::::any_account() .with_filter(vec![0xf2.into(), 0xf1.into()]) .send_unsigned_transaction( - |account| SimplePayload { - data: vec![1, 2, 3], - public: account.public.clone() - }, - |_payload, _signature| { - CALL.clone() - } + |account| SimplePayload { data: vec![1, 2, 3], public: account.public.clone() }, + |_payload, _signature| CALL.clone(), ); // then @@ -844,5 +797,4 @@ mod tests { assert_eq!(tx1.signature, None); }); } - } diff --git a/frame/system/src/tests.rs b/frame/system/src/tests.rs index 77d4baee88ac1..f171fe661f693 100644 --- a/frame/system/src/tests.rs +++ b/frame/system/src/tests.rs @@ -16,11 +16,14 @@ // limitations under the License. use crate::*; -use mock::{*, Origin}; -use sp_core::H256; -use sp_runtime::{DispatchError, DispatchErrorWithPostInfo, traits::{Header, BlakeTwo256}}; use frame_support::{ - assert_noop, assert_ok, weights::WithPostDispatchInfo, dispatch::PostDispatchInfo + assert_noop, assert_ok, dispatch::PostDispatchInfo, weights::WithPostDispatchInfo, +}; +use mock::{Origin, *}; +use sp_core::H256; +use sp_runtime::{ + traits::{BlakeTwo256, Header}, + DispatchError, DispatchErrorWithPostInfo, }; #[test] @@ -36,13 +39,10 @@ fn stored_map_works() { assert_ok!(System::insert(&0, 42)); assert!(!System::is_provider_required(&0)); - assert_eq!(Account::::get(0), AccountInfo { - nonce: 0, - providers: 1, - consumers: 0, - sufficients: 0, - data: 42, - }); + assert_eq!( + Account::::get(0), + AccountInfo { nonce: 0, providers: 1, consumers: 0, sufficients: 0, data: 42 } + ); assert_ok!(System::inc_consumers(&0)); assert!(System::is_provider_required(&0)); @@ -154,40 +154,25 @@ fn provider_required_to_support_consumer() { #[test] fn deposit_event_should_work() { new_test_ext().execute_with(|| { - System::initialize( - &1, - &[0u8; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&1, &[0u8; 32].into(), &Default::default(), InitKind::Full); System::note_finished_extrinsics(); System::deposit_event(SysEvent::CodeUpdated); System::finalize(); assert_eq!( System::events(), - vec![ - EventRecord { - phase: Phase::Finalization, - event: SysEvent::CodeUpdated.into(), - topics: vec![], - } - ] + vec![EventRecord { + phase: Phase::Finalization, + event: SysEvent::CodeUpdated.into(), + topics: vec![], + }] ); - System::initialize( - &2, - &[0u8; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&2, &[0u8; 32].into(), &Default::default(), InitKind::Full); System::deposit_event(SysEvent::NewAccount(32)); System::note_finished_initialize(); System::deposit_event(SysEvent::KilledAccount(42)); System::note_applied_extrinsic(&Ok(().into()), Default::default()); - System::note_applied_extrinsic( - &Err(DispatchError::BadOrigin.into()), - Default::default() - ); + System::note_applied_extrinsic(&Err(DispatchError::BadOrigin.into()), Default::default()); System::note_finished_extrinsics(); System::deposit_event(SysEvent::NewAccount(3)); System::finalize(); @@ -214,7 +199,8 @@ fn deposit_event_should_work() { event: SysEvent::ExtrinsicFailed( DispatchError::BadOrigin.into(), Default::default() - ).into(), + ) + .into(), topics: vec![] }, EventRecord { @@ -230,78 +216,56 @@ fn deposit_event_should_work() { #[test] fn deposit_event_uses_actual_weight() { new_test_ext().execute_with(|| { - System::initialize( - &1, - &[0u8; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&1, &[0u8; 32].into(), &Default::default(), InitKind::Full); System::note_finished_initialize(); - let pre_info = DispatchInfo { - weight: 1000, - .. Default::default() - }; - System::note_applied_extrinsic( - &Ok(Some(300).into()), - pre_info, - ); - System::note_applied_extrinsic( - &Ok(Some(1000).into()), - pre_info, - ); + let pre_info = DispatchInfo { weight: 1000, ..Default::default() }; + System::note_applied_extrinsic(&Ok(Some(300).into()), pre_info); + System::note_applied_extrinsic(&Ok(Some(1000).into()), pre_info); System::note_applied_extrinsic( // values over the pre info should be capped at pre dispatch value &Ok(Some(1200).into()), pre_info, ); - System::note_applied_extrinsic( - &Err(DispatchError::BadOrigin.with_weight(999)), - pre_info, - ); + System::note_applied_extrinsic(&Err(DispatchError::BadOrigin.with_weight(999)), pre_info); assert_eq!( System::events(), vec![ EventRecord { phase: Phase::ApplyExtrinsic(0), - event: SysEvent::ExtrinsicSuccess( - DispatchInfo { - weight: 300, - .. Default::default() - }, - ).into(), + event: SysEvent::ExtrinsicSuccess(DispatchInfo { + weight: 300, + ..Default::default() + },) + .into(), topics: vec![] }, EventRecord { phase: Phase::ApplyExtrinsic(1), - event: SysEvent::ExtrinsicSuccess( - DispatchInfo { - weight: 1000, - .. Default::default() - }, - ).into(), + event: SysEvent::ExtrinsicSuccess(DispatchInfo { + weight: 1000, + ..Default::default() + },) + .into(), topics: vec![] }, EventRecord { phase: Phase::ApplyExtrinsic(2), - event: SysEvent::ExtrinsicSuccess( - DispatchInfo { - weight: 1000, - .. Default::default() - }, - ).into(), + event: SysEvent::ExtrinsicSuccess(DispatchInfo { + weight: 1000, + ..Default::default() + },) + .into(), topics: vec![] }, EventRecord { phase: Phase::ApplyExtrinsic(3), event: SysEvent::ExtrinsicFailed( DispatchError::BadOrigin.into(), - DispatchInfo { - weight: 999, - .. Default::default() - }, - ).into(), + DispatchInfo { weight: 999, ..Default::default() }, + ) + .into(), topics: vec![] }, ] @@ -314,19 +278,10 @@ fn deposit_event_topics() { new_test_ext().execute_with(|| { const BLOCK_NUMBER: u64 = 1; - System::initialize( - &BLOCK_NUMBER, - &[0u8; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&BLOCK_NUMBER, &[0u8; 32].into(), &Default::default(), InitKind::Full); System::note_finished_extrinsics(); - let topics = vec![ - H256::repeat_byte(1), - H256::repeat_byte(2), - H256::repeat_byte(3), - ]; + let topics = vec![H256::repeat_byte(1), H256::repeat_byte(2), H256::repeat_byte(3)]; // We deposit a few events with different sets of topics. System::deposit_event_indexed(&topics[0..3], SysEvent::NewAccount(1).into()); @@ -359,18 +314,9 @@ fn deposit_event_topics() { // Check that the topic-events mapping reflects the deposited topics. // Note that these are indexes of the events. - assert_eq!( - System::event_topics(&topics[0]), - vec![(BLOCK_NUMBER, 0), (BLOCK_NUMBER, 1)], - ); - assert_eq!( - System::event_topics(&topics[1]), - vec![(BLOCK_NUMBER, 0), (BLOCK_NUMBER, 2)], - ); - assert_eq!( - System::event_topics(&topics[2]), - vec![(BLOCK_NUMBER, 0)], - ); + assert_eq!(System::event_topics(&topics[0]), vec![(BLOCK_NUMBER, 0), (BLOCK_NUMBER, 1)],); + assert_eq!(System::event_topics(&topics[1]), vec![(BLOCK_NUMBER, 0), (BLOCK_NUMBER, 2)],); + assert_eq!(System::event_topics(&topics[2]), vec![(BLOCK_NUMBER, 0)],); }); } @@ -390,30 +336,19 @@ fn prunes_block_hash_mappings() { new_test_ext().execute_with(|| { // simulate import of 15 blocks for n in 1..=15 { - System::initialize( - &n, - &[n as u8 - 1; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&n, &[n as u8 - 1; 32].into(), &Default::default(), InitKind::Full); System::finalize(); } // first 5 block hashes are pruned for n in 0..5 { - assert_eq!( - System::block_hash(n), - H256::zero(), - ); + assert_eq!(System::block_hash(n), H256::zero(),); } // the remaining 10 are kept for n in 5..15 { - assert_eq!( - System::block_hash(n), - [n as u8; 32].into(), - ); + assert_eq!(System::block_hash(n), [n as u8; 32].into(),); } }) } @@ -453,10 +388,7 @@ fn set_code_checks_works() { let mut ext = new_test_ext(); ext.register_extension(sp_core::traits::ReadRuntimeVersionExt::new(read_runtime_version)); ext.execute_with(|| { - let res = System::set_code( - RawOrigin::Root.into(), - vec![1, 2, 3, 4], - ); + let res = System::set_code(RawOrigin::Root.into(), vec![1, 2, 3, 4]); assert_eq!(expected.map_err(DispatchErrorWithPostInfo::from), res); }); @@ -473,7 +405,8 @@ fn set_code_with_real_wasm_blob() { System::set_code( RawOrigin::Root.into(), substrate_test_runtime_client::runtime::wasm_binary_unwrap().to_vec(), - ).unwrap(); + ) + .unwrap(); assert_eq!( System::events(), @@ -496,9 +429,10 @@ fn runtime_upgraded_with_set_storage() { RawOrigin::Root.into(), vec![( well_known_keys::CODE.to_vec(), - substrate_test_runtime_client::runtime::wasm_binary_unwrap().to_vec() + substrate_test_runtime_client::runtime::wasm_binary_unwrap().to_vec(), )], - ).unwrap(); + ) + .unwrap(); }); } @@ -531,20 +465,12 @@ fn ensure_one_of_works() { #[test] fn extrinsics_root_is_calculated_correctly() { new_test_ext().execute_with(|| { - System::initialize( - &1, - &[0u8; 32].into(), - &Default::default(), - InitKind::Full, - ); + System::initialize(&1, &[0u8; 32].into(), &Default::default(), InitKind::Full); System::note_finished_initialize(); System::note_extrinsic(vec![1]); System::note_applied_extrinsic(&Ok(().into()), Default::default()); System::note_extrinsic(vec![2]); - System::note_applied_extrinsic( - &Err(DispatchError::BadOrigin.into()), - Default::default() - ); + System::note_applied_extrinsic(&Err(DispatchError::BadOrigin.into()), Default::default()); System::note_finished_extrinsics(); let header = System::finalize(); diff --git a/frame/system/src/weights.rs b/frame/system/src/weights.rs index c6284ba17d63f..e49b745c68c59 100644 --- a/frame/system/src/weights.rs +++ b/frame/system/src/weights.rs @@ -35,57 +35,58 @@ // --output=./frame/system/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for frame_system. pub trait WeightInfo { - fn remark(b: u32, ) -> Weight; - fn remark_with_event(b: u32, ) -> Weight; + fn remark(b: u32) -> Weight; + fn remark_with_event(b: u32) -> Weight; fn set_heap_pages() -> Weight; fn set_changes_trie_config() -> Weight; - fn set_storage(i: u32, ) -> Weight; - fn kill_storage(i: u32, ) -> Weight; - fn kill_prefix(p: u32, ) -> Weight; + fn set_storage(i: u32) -> Weight; + fn kill_storage(i: u32) -> Weight; + fn kill_prefix(p: u32) -> Weight; } /// Weights for frame_system using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn remark(_b: u32, ) -> Weight { + fn remark(_b: u32) -> Weight { (1_038_000 as Weight) } - fn remark_with_event(b: u32, ) -> Weight { + fn remark_with_event(b: u32) -> Weight { (5_246_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(b as Weight)) } fn set_heap_pages() -> Weight { - (1_586_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (1_586_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn set_changes_trie_config() -> Weight { (7_181_000 as Weight) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn set_storage(i: u32, ) -> Weight { + fn set_storage(i: u32) -> Weight { (0 as Weight) // Standard Error: 0 .saturating_add((568_000 as Weight).saturating_mul(i as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(i as Weight))) } - fn kill_storage(i: u32, ) -> Weight { + fn kill_storage(i: u32) -> Weight { (2_278_000 as Weight) // Standard Error: 0 .saturating_add((423_000 as Weight).saturating_mul(i as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(i as Weight))) } - fn kill_prefix(p: u32, ) -> Weight { + fn kill_prefix(p: u32) -> Weight { (8_243_000 as Weight) // Standard Error: 1_000 .saturating_add((795_000 as Weight).saturating_mul(p as Weight)) @@ -95,36 +96,35 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn remark(_b: u32, ) -> Weight { + fn remark(_b: u32) -> Weight { (1_038_000 as Weight) } - fn remark_with_event(b: u32, ) -> Weight { + fn remark_with_event(b: u32) -> Weight { (5_246_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(b as Weight)) } fn set_heap_pages() -> Weight { - (1_586_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (1_586_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn set_changes_trie_config() -> Weight { (7_181_000 as Weight) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn set_storage(i: u32, ) -> Weight { + fn set_storage(i: u32) -> Weight { (0 as Weight) // Standard Error: 0 .saturating_add((568_000 as Weight).saturating_mul(i as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(i as Weight))) } - fn kill_storage(i: u32, ) -> Weight { + fn kill_storage(i: u32) -> Weight { (2_278_000 as Weight) // Standard Error: 0 .saturating_add((423_000 as Weight).saturating_mul(i as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(i as Weight))) } - fn kill_prefix(p: u32, ) -> Weight { + fn kill_prefix(p: u32) -> Weight { (8_243_000 as Weight) // Standard Error: 1_000 .saturating_add((795_000 as Weight).saturating_mul(p as Weight)) diff --git a/frame/timestamp/src/benchmarking.rs b/frame/timestamp/src/benchmarking.rs index 5d0178dc14846..84391380da832 100644 --- a/frame/timestamp/src/benchmarking.rs +++ b/frame/timestamp/src/benchmarking.rs @@ -20,9 +20,9 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; -use frame_system::RawOrigin; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, TrackedStorageKey}; use frame_support::{ensure, traits::OnFinalize}; -use frame_benchmarking::{benchmarks, TrackedStorageKey, impl_benchmark_test_suite}; +use frame_system::RawOrigin; use crate::Pallet as Timestamp; @@ -57,8 +57,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Timestamp, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Timestamp, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/timestamp/src/lib.rs b/frame/timestamp/src/lib.rs index 9d8c0d6c9b801..8772c2c3c3ba4 100644 --- a/frame/timestamp/src/lib.rs +++ b/frame/timestamp/src/lib.rs @@ -95,28 +95,30 @@ mod benchmarking; pub mod weights; -use sp_std::{result, cmp}; -use frame_support::traits::{Time, UnixTime, OnTimestampSet}; -use sp_runtime::traits::{AtLeast32Bit, Zero, SaturatedConversion, Scale}; -use sp_timestamp::{ - InherentError, INHERENT_IDENTIFIER, InherentType, -}; +use frame_support::traits::{OnTimestampSet, Time, UnixTime}; +use sp_runtime::traits::{AtLeast32Bit, SaturatedConversion, Scale, Zero}; +use sp_std::{cmp, result}; +use sp_timestamp::{InherentError, InherentType, INHERENT_IDENTIFIER}; pub use weights::WeightInfo; pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// The pallet configuration trait #[pallet::config] pub trait Config: frame_system::Config { /// Type used for expressing timestamp. - type Moment: Parameter + Default + AtLeast32Bit - + Scale + Copy + MaxEncodedLen + type Moment: Parameter + + Default + + AtLeast32Bit + + Scale + + Copy + + MaxEncodedLen + scale_info::StaticTypeInfo; /// Something which can be notified when the timestamp is set. Set this to `()` if not needed. @@ -209,7 +211,8 @@ pub mod pallet { const INHERENT_IDENTIFIER: InherentIdentifier = INHERENT_IDENTIFIER; fn create_inherent(data: &InherentData) -> Option { - let inherent_data = data.get_data::(&INHERENT_IDENTIFIER) + let inherent_data = data + .get_data::(&INHERENT_IDENTIFIER) .expect("Timestamp inherent data not correctly encoded") .expect("Timestamp inherent data must be provided"); let data = (*inherent_data).saturated_into::(); @@ -218,7 +221,10 @@ pub mod pallet { Some(Call::set { now: next_time.into() }) } - fn check_inherent(call: &Self::Call, data: &InherentData) -> result::Result<(), Self::Error> { + fn check_inherent( + call: &Self::Call, + data: &InherentData, + ) -> result::Result<(), Self::Error> { const MAX_TIMESTAMP_DRIFT_MILLIS: sp_timestamp::Timestamp = sp_timestamp::Timestamp::new(30 * 1000); @@ -227,7 +233,8 @@ pub mod pallet { _ => return Ok(()), }; - let data = data.get_data::(&INHERENT_IDENTIFIER) + let data = data + .get_data::(&INHERENT_IDENTIFIER) .expect("Timestamp inherent data not correctly encoded") .expect("Timestamp inherent data must be provided"); @@ -294,13 +301,16 @@ impl UnixTime for Pallet { #[cfg(test)] mod tests { - use crate as pallet_timestamp; use super::*; + use crate as pallet_timestamp; use frame_support::{assert_ok, parameter_types}; - use sp_io::TestExternalities; use sp_core::H256; - use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; + use sp_io::TestExternalities; + use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, + }; pub fn new_test_ext() -> TestExternalities { let t = frame_system::GenesisConfig::default().build_storage::().unwrap(); @@ -381,7 +391,9 @@ mod tests { } #[test] - #[should_panic(expected = "Timestamp must increment by at least between sequential blocks")] + #[should_panic( + expected = "Timestamp must increment by at least between sequential blocks" + )] fn block_period_minimum_enforced() { new_test_ext().execute_with(|| { Timestamp::set_timestamp(42); diff --git a/frame/timestamp/src/weights.rs b/frame/timestamp/src/weights.rs index cf4fa6ea3d639..35b0116cf4704 100644 --- a/frame/timestamp/src/weights.rs +++ b/frame/timestamp/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/timestamp/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_timestamp. diff --git a/frame/tips/src/benchmarking.rs b/frame/tips/src/benchmarking.rs index 6c304fabb5a25..794a6815b3a3c 100644 --- a/frame/tips/src/benchmarking.rs +++ b/frame/tips/src/benchmarking.rs @@ -21,8 +21,8 @@ use super::*; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Saturating; use crate::Module as TipsMod; @@ -32,9 +32,9 @@ const SEED: u32 = 0; // Create the pre-requisite information needed to create a `report_awesome`. fn setup_awesome(length: u32) -> (T::AccountId, Vec, T::AccountId) { let caller = whitelisted_caller(); - let value = T::TipReportDepositBase::get() - + T::DataDepositPerByte::get() * length.into() - + T::Currency::minimum_balance(); + let value = T::TipReportDepositBase::get() + + T::DataDepositPerByte::get() * length.into() + + T::Currency::minimum_balance(); let _ = T::Currency::make_free_balance_be(&caller, value); let reason = vec![0; length as usize]; let awesome_person = account("awesome", 0, SEED); @@ -42,12 +42,13 @@ fn setup_awesome(length: u32) -> (T::AccountId, Vec, T::AccountId } // Create the pre-requisite information needed to call `tip_new`. -fn setup_tip(r: u32, t: u32) -> - Result<(T::AccountId, Vec, T::AccountId, BalanceOf), &'static str> -{ +fn setup_tip( + r: u32, + t: u32, +) -> Result<(T::AccountId, Vec, T::AccountId, BalanceOf), &'static str> { let tippers_count = T::Tippers::count(); - for i in 0 .. t { + for i in 0..t { let member = account("member", i, SEED); T::Tippers::add(&member); ensure!(T::Tippers::contains(&member), "failed to add tipper"); @@ -63,10 +64,8 @@ fn setup_tip(r: u32, t: u32) -> // Create `t` new tips for the tip proposal with `hash`. // This function automatically makes the tip able to close. -fn create_tips(t: u32, hash: T::Hash, value: BalanceOf) -> - Result<(), &'static str> -{ - for i in 0 .. t { +fn create_tips(t: u32, hash: T::Hash, value: BalanceOf) -> Result<(), &'static str> { + for i in 0..t { let caller = account("member", i, SEED); ensure!(T::Tippers::contains(&caller), "caller is not a tipper"); TipsMod::::tip(RawOrigin::Signed(caller).into(), hash, value)?; @@ -193,8 +192,4 @@ benchmarks! { }: _(RawOrigin::Root, hash) } -impl_benchmark_test_suite!( - TipsMod, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(TipsMod, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/tips/src/lib.rs b/frame/tips/src/lib.rs index b8d63d8b9343d..8805172e5aea9 100644 --- a/frame/tips/src/lib.rs +++ b/frame/tips/src/lib.rs @@ -54,23 +54,24 @@ #![cfg_attr(not(feature = "std"), no_std)] -mod tests; mod benchmarking; +mod tests; pub mod weights; -use sp_std::prelude::*; -use frame_support::{decl_module, decl_storage, decl_event, ensure, decl_error, Parameter}; -use frame_support::traits::{ - Currency, Get, ExistenceRequirement::{KeepAlive}, - ReservableCurrency +use frame_support::{ + decl_error, decl_event, decl_module, decl_storage, ensure, + traits::{Currency, ExistenceRequirement::KeepAlive, Get, ReservableCurrency}, + Parameter, }; +use sp_std::prelude::*; -use sp_runtime::{ Percent, RuntimeDebug, traits::{ - Zero, AccountIdConversion, Hash, BadOrigin -}}; -use frame_support::traits::{SortedMembers, ContainsLengthBound, OnUnbalanced, EnsureOrigin}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use frame_support::traits::{ContainsLengthBound, EnsureOrigin, OnUnbalanced, SortedMembers}; use frame_system::{self as system, ensure_signed}; +use sp_runtime::{ + traits::{AccountIdConversion, BadOrigin, Hash, Zero}, + Percent, RuntimeDebug, +}; pub use weights::WeightInfo; pub type BalanceOf = pallet_treasury::BalanceOf; @@ -484,9 +485,9 @@ impl Module { if m < a { continue } else { - break true; + break true } - } + }, } }); } @@ -495,7 +496,10 @@ impl Module { /// /// Up to three balance operations. /// Plus `O(T)` (`T` is Tippers length). - fn payout_tip(hash: T::Hash, tip: OpenTip, T::BlockNumber, T::Hash>) { + fn payout_tip( + hash: T::Hash, + tip: OpenTip, T::BlockNumber, T::Hash>, + ) { let mut tips = tip.tips; Self::retain_active_tips(&mut tips); tips.sort_by_key(|i| i.1); @@ -549,22 +553,18 @@ impl Module { tips: Vec<(AccountId, Balance)>, } - use frame_support::{Twox64Concat, migration::storage_key_iter}; + use frame_support::{migration::storage_key_iter, Twox64Concat}; for (hash, old_tip) in storage_key_iter::< T::Hash, OldOpenTip, T::BlockNumber, T::Hash>, Twox64Concat, - >(b"Treasury", b"Tips").drain() + >(b"Treasury", b"Tips") + .drain() { - let (finder, deposit, finders_fee) = match old_tip.finder { - Some((finder, deposit)) => { - (finder, deposit, true) - }, - None => { - (T::AccountId::default(), Zero::zero(), false) - }, + Some((finder, deposit)) => (finder, deposit, true), + None => (T::AccountId::default(), Zero::zero(), false), }; let new_tip = OpenTip { reason: old_tip.reason, @@ -573,7 +573,7 @@ impl Module { deposit, closes: old_tip.closes, tips: old_tip.tips, - finders_fee + finders_fee, }; Tips::::insert(hash, new_tip) } diff --git a/frame/tips/src/tests.rs b/frame/tips/src/tests.rs index 7cf4c31a6495c..eb52acf8026b8 100644 --- a/frame/tips/src/tests.rs +++ b/frame/tips/src/tests.rs @@ -19,21 +19,19 @@ #![cfg(test)] -use crate as tips; use super::*; -use std::cell::RefCell; +use crate as tips; use frame_support::{ - assert_noop, assert_ok, parameter_types, - weights::Weight, traits::SortedMembers, - PalletId, pallet_prelude::GenesisBuild, + assert_noop, assert_ok, pallet_prelude::GenesisBuild, parameter_types, traits::SortedMembers, + weights::Weight, PalletId, }; -use sp_runtime::Permill; use sp_core::H256; use sp_runtime::{ - Perbill, testing::Header, - traits::{BlakeTwo256, IdentityLookup, BadOrigin}, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, + Perbill, Permill, }; +use std::cell::RefCell; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -102,9 +100,7 @@ thread_local! { pub struct TenToFourteen; impl SortedMembers for TenToFourteen { fn sorted_members() -> Vec { - TEN_TO_FOURTEEN.with(|v| { - v.borrow().clone() - }) + TEN_TO_FOURTEEN.with(|v| v.borrow().clone()) } #[cfg(feature = "runtime-benchmarks")] fn add(new: &u128) { @@ -119,7 +115,9 @@ impl ContainsLengthBound for TenToFourteen { fn max_len() -> usize { TEN_TO_FOURTEEN.with(|v| v.borrow().len()) } - fn min_len() -> usize { 0 } + fn min_len() -> usize { + 0 + } } parameter_types! { pub const ProposalBond: Permill = Permill::from_percent(5); @@ -142,7 +140,7 @@ impl pallet_treasury::Config for Test { type ProposalBondMinimum = ProposalBondMinimum; type SpendPeriod = SpendPeriod; type Burn = Burn; - type BurnDestination = (); // Just gets burned. + type BurnDestination = (); // Just gets burned. type WeightInfo = (); type SpendFunds = (); type MaxApprovals = MaxApprovals; @@ -165,19 +163,21 @@ impl Config for Test { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized at ED. balances: vec![(0, 100), (1, 98), (2, 1)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&pallet_treasury::GenesisConfig, &mut t).unwrap(); t.into() } fn last_event() -> RawEvent { - System::events().into_iter().map(|r| r.event) - .filter_map(|e| { - if let Event::TipsModTestInst(inner) = e { Some(inner) } else { None } - }) + System::events() + .into_iter() + .map(|r| r.event) + .filter_map(|e| if let Event::TipsModTestInst(inner) = e { Some(inner) } else { None }) .last() .unwrap() } @@ -267,13 +267,19 @@ fn close_tip_works() { assert_ok!(TipsModTestInst::tip(Origin::signed(11), h.clone(), 10)); - assert_noop!(TipsModTestInst::close_tip(Origin::signed(0), h.into()), Error::::StillOpen); + assert_noop!( + TipsModTestInst::close_tip(Origin::signed(0), h.into()), + Error::::StillOpen + ); assert_ok!(TipsModTestInst::tip(Origin::signed(12), h.clone(), 10)); assert_eq!(last_event(), RawEvent::TipClosing(h)); - assert_noop!(TipsModTestInst::close_tip(Origin::signed(0), h.into()), Error::::Premature); + assert_noop!( + TipsModTestInst::close_tip(Origin::signed(0), h.into()), + Error::::Premature + ); System::set_block_number(2); assert_noop!(TipsModTestInst::close_tip(Origin::none(), h.into()), BadOrigin); @@ -282,7 +288,10 @@ fn close_tip_works() { assert_eq!(last_event(), RawEvent::TipClosed(h, 3, 10)); - assert_noop!(TipsModTestInst::close_tip(Origin::signed(100), h.into()), Error::::UnknownTip); + assert_noop!( + TipsModTestInst::close_tip(Origin::signed(100), h.into()), + Error::::UnknownTip + ); }); } @@ -305,10 +314,7 @@ fn slash_tip_works() { assert_eq!(last_event(), RawEvent::NewTip(h)); // can't remove from any origin - assert_noop!( - TipsModTestInst::slash_tip(Origin::signed(0), h.clone()), - BadOrigin, - ); + assert_noop!(TipsModTestInst::slash_tip(Origin::signed(0), h.clone()), BadOrigin,); // can remove from root. assert_ok!(TipsModTestInst::slash_tip(Origin::root(), h.clone())); @@ -330,10 +336,16 @@ fn retract_tip_works() { assert_ok!(TipsModTestInst::tip(Origin::signed(10), h.clone(), 10)); assert_ok!(TipsModTestInst::tip(Origin::signed(11), h.clone(), 10)); assert_ok!(TipsModTestInst::tip(Origin::signed(12), h.clone(), 10)); - assert_noop!(TipsModTestInst::retract_tip(Origin::signed(10), h.clone()), Error::::NotFinder); + assert_noop!( + TipsModTestInst::retract_tip(Origin::signed(10), h.clone()), + Error::::NotFinder + ); assert_ok!(TipsModTestInst::retract_tip(Origin::signed(0), h.clone())); System::set_block_number(2); - assert_noop!(TipsModTestInst::close_tip(Origin::signed(0), h.into()), Error::::UnknownTip); + assert_noop!( + TipsModTestInst::close_tip(Origin::signed(0), h.into()), + Error::::UnknownTip + ); // with tip new Balances::make_free_balance_be(&Treasury::account_id(), 101); @@ -341,10 +353,16 @@ fn retract_tip_works() { let h = tip_hash(); assert_ok!(TipsModTestInst::tip(Origin::signed(11), h.clone(), 10)); assert_ok!(TipsModTestInst::tip(Origin::signed(12), h.clone(), 10)); - assert_noop!(TipsModTestInst::retract_tip(Origin::signed(0), h.clone()), Error::::NotFinder); + assert_noop!( + TipsModTestInst::retract_tip(Origin::signed(0), h.clone()), + Error::::NotFinder + ); assert_ok!(TipsModTestInst::retract_tip(Origin::signed(10), h.clone())); System::set_block_number(2); - assert_noop!(TipsModTestInst::close_tip(Origin::signed(10), h.into()), Error::::UnknownTip); + assert_noop!( + TipsModTestInst::close_tip(Origin::signed(10), h.into()), + Error::::UnknownTip + ); }); } @@ -416,7 +434,7 @@ fn test_last_reward_migration() { who: 10, finder: Some((20, 30)), closes: Some(13), - tips: vec![(40, 50), (60, 70)] + tips: vec![(40, 50), (60, 70)], }; let reason2 = BlakeTwo256::hash(b"reason2"); @@ -427,24 +445,17 @@ fn test_last_reward_migration() { who: 20, finder: None, closes: Some(13), - tips: vec![(40, 50), (60, 70)] + tips: vec![(40, 50), (60, 70)], }; let data = vec![ - ( - Tips::::hashed_key_for(hash1), - old_tip_finder.encode().to_vec() - ), - ( - Tips::::hashed_key_for(hash2), - old_tip_no_finder.encode().to_vec() - ), + (Tips::::hashed_key_for(hash1), old_tip_finder.encode().to_vec()), + (Tips::::hashed_key_for(hash2), old_tip_no_finder.encode().to_vec()), ]; s.top = data.into_iter().collect(); sp_io::TestExternalities::new(s).execute_with(|| { - TipsModTestInst::migrate_retract_tip_for_tip_new(); // Test w/ finder @@ -481,10 +492,12 @@ fn test_last_reward_migration() { fn genesis_funding_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); let initial_funding = 100; - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized with 100. balances: vec![(0, 100), (Treasury::account_id(), initial_funding)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&pallet_treasury::GenesisConfig, &mut t).unwrap(); let mut t: sp_io::TestExternalities = t.into(); diff --git a/frame/tips/src/weights.rs b/frame/tips/src/weights.rs index ceee79bd6f07e..70fa966381bf3 100644 --- a/frame/tips/src/weights.rs +++ b/frame/tips/src/weights.rs @@ -35,27 +35,29 @@ // --output=./frame/tips/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_tips. pub trait WeightInfo { - fn report_awesome(r: u32, ) -> Weight; + fn report_awesome(r: u32) -> Weight; fn retract_tip() -> Weight; - fn tip_new(r: u32, t: u32, ) -> Weight; - fn tip(t: u32, ) -> Weight; - fn close_tip(t: u32, ) -> Weight; - fn slash_tip(t: u32, ) -> Weight; + fn tip_new(r: u32, t: u32) -> Weight; + fn tip(t: u32) -> Weight; + fn close_tip(t: u32) -> Weight; + fn slash_tip(t: u32) -> Weight; } /// Weights for pallet_tips using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn report_awesome(r: u32, ) -> Weight { + fn report_awesome(r: u32) -> Weight { (49_844_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(r as Weight)) @@ -67,7 +69,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn tip_new(r: u32, t: u32, ) -> Weight { + fn tip_new(r: u32, t: u32) -> Weight { (31_777_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(r as Weight)) @@ -76,21 +78,21 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn tip(t: u32, ) -> Weight { + fn tip(t: u32) -> Weight { (22_361_000 as Weight) // Standard Error: 0 .saturating_add((584_000 as Weight).saturating_mul(t as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn close_tip(t: u32, ) -> Weight { + fn close_tip(t: u32) -> Weight { (84_470_000 as Weight) // Standard Error: 0 .saturating_add((326_000 as Weight).saturating_mul(t as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn slash_tip(t: u32, ) -> Weight { + fn slash_tip(t: u32) -> Weight { (25_214_000 as Weight) // Standard Error: 0 .saturating_add((8_000 as Weight).saturating_mul(t as Weight)) @@ -101,7 +103,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn report_awesome(r: u32, ) -> Weight { + fn report_awesome(r: u32) -> Weight { (49_844_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(r as Weight)) @@ -113,7 +115,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn tip_new(r: u32, t: u32, ) -> Weight { + fn tip_new(r: u32, t: u32) -> Weight { (31_777_000 as Weight) // Standard Error: 0 .saturating_add((2_000 as Weight).saturating_mul(r as Weight)) @@ -122,21 +124,21 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn tip(t: u32, ) -> Weight { + fn tip(t: u32) -> Weight { (22_361_000 as Weight) // Standard Error: 0 .saturating_add((584_000 as Weight).saturating_mul(t as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn close_tip(t: u32, ) -> Weight { + fn close_tip(t: u32) -> Weight { (84_470_000 as Weight) // Standard Error: 0 .saturating_add((326_000 as Weight).saturating_mul(t as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn slash_tip(t: u32, ) -> Weight { + fn slash_tip(t: u32) -> Weight { (25_214_000 as Weight) // Standard Error: 0 .saturating_add((8_000 as Weight).saturating_mul(t as Weight)) diff --git a/frame/transaction-payment/rpc/src/lib.rs b/frame/transaction-payment/rpc/src/lib.rs index efe9f010d139b..945156d12a6a4 100644 --- a/frame/transaction-payment/rpc/src/lib.rs +++ b/frame/transaction-payment/rpc/src/lib.rs @@ -17,33 +17,31 @@ //! RPC interface for the transaction payment pallet. -use std::sync::Arc; -use std::convert::TryInto; +pub use self::gen_client::Client as TransactionPaymentClient; use codec::{Codec, Decode}; -use sp_blockchain::HeaderBackend; use jsonrpc_core::{Error as RpcError, ErrorCode, Result}; use jsonrpc_derive::rpc; -use sp_runtime::{generic::BlockId, traits::{Block as BlockT, MaybeDisplay}}; +pub use pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi as TransactionPaymentRuntimeApi; +use pallet_transaction_payment_rpc_runtime_api::{FeeDetails, InclusionFee, RuntimeDispatchInfo}; use sp_api::ProvideRuntimeApi; +use sp_blockchain::HeaderBackend; use sp_core::Bytes; use sp_rpc::number::NumberOrHex; -use pallet_transaction_payment_rpc_runtime_api::{FeeDetails, InclusionFee, RuntimeDispatchInfo}; -pub use pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi as TransactionPaymentRuntimeApi; -pub use self::gen_client::Client as TransactionPaymentClient; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, MaybeDisplay}, +}; +use std::{convert::TryInto, sync::Arc}; #[rpc] pub trait TransactionPaymentApi { #[rpc(name = "payment_queryInfo")] - fn query_info( - &self, - encoded_xt: Bytes, - at: Option - ) -> Result; + fn query_info(&self, encoded_xt: Bytes, at: Option) -> Result; #[rpc(name = "payment_queryFeeDetails")] fn query_fee_details( &self, encoded_xt: Bytes, - at: Option + at: Option, ) -> Result>; } @@ -77,10 +75,8 @@ impl From for i64 { } } -impl TransactionPaymentApi< - ::Hash, - RuntimeDispatchInfo, -> for TransactionPayment +impl TransactionPaymentApi<::Hash, RuntimeDispatchInfo> + for TransactionPayment where Block: BlockT, C: 'static + ProvideRuntimeApi + HeaderBackend, @@ -90,13 +86,12 @@ where fn query_info( &self, encoded_xt: Bytes, - at: Option<::Hash> + at: Option<::Hash>, ) -> Result> { let api = self.client.runtime_api(); let at = BlockId::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. - self.client.info().best_hash - )); + self.client.info().best_hash)); let encoded_len = encoded_xt.len() as u32; @@ -120,8 +115,7 @@ where let api = self.client.runtime_api(); let at = BlockId::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. - self.client.info().best_hash - )); + self.client.info().best_hash)); let encoded_len = encoded_xt.len() as u32; @@ -136,11 +130,13 @@ where data: Some(format!("{:?}", e).into()), })?; - let try_into_rpc_balance = |value: Balance| value.try_into().map_err(|_| RpcError { - code: ErrorCode::InvalidParams, - message: format!("{} doesn't fit in NumberOrHex representation", value), - data: None, - }); + let try_into_rpc_balance = |value: Balance| { + value.try_into().map_err(|_| RpcError { + code: ErrorCode::InvalidParams, + message: format!("{} doesn't fit in NumberOrHex representation", value), + data: None, + }) + }; Ok(FeeDetails { inclusion_fee: if let Some(inclusion_fee) = fee_details.inclusion_fee { diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index abc9dce460ce3..afef8c800cdfa 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -47,28 +47,28 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; use sp_runtime::{ - FixedU128, FixedPointNumber, FixedPointOperand, Perquintill, RuntimeDebug, - transaction_validity::{ - TransactionPriority, ValidTransaction, TransactionValidityError, TransactionValidity, - }, traits::{ - Saturating, SignedExtension, SaturatedConversion, Convert, Dispatchable, - DispatchInfoOf, PostDispatchInfoOf, + Convert, DispatchInfoOf, Dispatchable, PostDispatchInfoOf, SaturatedConversion, Saturating, + SignedExtension, }, + transaction_validity::{ + TransactionPriority, TransactionValidity, TransactionValidityError, ValidTransaction, + }, + FixedPointNumber, FixedPointOperand, FixedU128, Perquintill, RuntimeDebug, }; use sp_std::prelude::*; use frame_support::{ + dispatch::DispatchResult, traits::Get, weights::{ - Weight, DispatchInfo, PostDispatchInfo, GetDispatchInfo, Pays, WeightToFeePolynomial, - WeightToFeeCoefficient, DispatchClass, + DispatchClass, DispatchInfo, GetDispatchInfo, Pays, PostDispatchInfo, Weight, + WeightToFeeCoefficient, WeightToFeePolynomial, }, - dispatch::DispatchResult, }; mod payment; @@ -76,7 +76,7 @@ mod types; pub use pallet::*; pub use payment::*; -pub use types::{InclusionFee, FeeDetails, RuntimeDispatchInfo}; +pub use types::{FeeDetails, InclusionFee, RuntimeDispatchInfo}; /// Fee multiplier. pub type Multiplier = FixedU128; @@ -153,7 +153,11 @@ impl MultiplierUpdate for () { } impl MultiplierUpdate for TargetedFeeAdjustment - where T: frame_system::Config, S: Get, V: Get, M: Get, +where + T: frame_system::Config, + S: Get, + V: Get, + M: Get, { fn min() -> Multiplier { M::get() @@ -167,7 +171,11 @@ impl MultiplierUpdate for TargetedFeeAdjustment } impl Convert for TargetedFeeAdjustment - where T: frame_system::Config, S: Get, V: Get, M: Get, +where + T: frame_system::Config, + S: Get, + V: Get, + M: Get, { fn convert(previous: Multiplier) -> Multiplier { // Defensive only. The multiplier in storage should always be at most positive. Nonetheless @@ -178,12 +186,13 @@ impl Convert for TargetedFeeAdjustment>::block_weight(); - let normal_block_weight = *current_block_weight - .get(DispatchClass::Normal) - .min(&normal_max_weight); + let normal_block_weight = + *current_block_weight.get(DispatchClass::Normal).min(&normal_max_weight); let s = S::get(); let v = V::get(); @@ -233,9 +242,9 @@ impl Default for Releases { #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -273,16 +282,14 @@ pub mod pallet { } #[pallet::type_value] - pub fn NextFeeMultiplierOnEmpty() -> Multiplier { Multiplier::saturating_from_integer(1) } + pub fn NextFeeMultiplierOnEmpty() -> Multiplier { + Multiplier::saturating_from_integer(1) + } #[pallet::storage] #[pallet::getter(fn next_fee_multiplier)] - pub type NextFeeMultiplier = StorageValue< - _, - Multiplier, - ValueQuery, - NextFeeMultiplierOnEmpty - >; + pub type NextFeeMultiplier = + StorageValue<_, Multiplier, ValueQuery, NextFeeMultiplierOnEmpty>; #[pallet::storage] pub(super) type StorageVersion = StorageValue<_, Releases, ValueQuery>; @@ -319,9 +326,10 @@ pub mod pallet { use sp_std::convert::TryInto; assert!( ::max_value() >= - Multiplier::checked_from_integer( - T::BlockWeights::get().max_block.try_into().unwrap() - ).unwrap(), + Multiplier::checked_from_integer( + T::BlockWeights::get().max_block.try_into().unwrap() + ) + .unwrap(), ); // This is the minimum value of the multiplier. Make sure that if we collapse to this @@ -332,13 +340,13 @@ pub mod pallet { let mut target = T::FeeMultiplierUpdate::target() * T::BlockWeights::get().get(DispatchClass::Normal).max_total.expect( "Setting `max_total` for `Normal` dispatch class is not compatible with \ - `transaction-payment` pallet." + `transaction-payment` pallet.", ); // add 1 percent; let addition = target / 100; if addition == 0 { // this is most likely because in a test setup we set everything to (). - return; + return } target += addition; @@ -346,7 +354,9 @@ pub mod pallet { sp_io::TestExternalities::new_empty().execute_with(|| { >::set_block_consumed_resources(target, 0); let next = T::FeeMultiplierUpdate::convert(min_value); - assert!(next > min_value, "The minimum bound of the multiplier is too low. When \ + assert!( + next > min_value, + "The minimum bound of the multiplier is too low. When \ block saturation is more than target by 1% and multiplier is minimal then \ the multiplier doesn't increase." ); @@ -355,8 +365,9 @@ pub mod pallet { } } -impl Pallet where - BalanceOf: FixedPointOperand +impl Pallet +where + BalanceOf: FixedPointOperand, { /// Query the data that we know about the fee of a given `call`. /// @@ -399,11 +410,8 @@ impl Pallet where } /// Compute the final fee value for a particular transaction. - pub fn compute_fee( - len: u32, - info: &DispatchInfoOf, - tip: BalanceOf, - ) -> BalanceOf where + pub fn compute_fee(len: u32, info: &DispatchInfoOf, tip: BalanceOf) -> BalanceOf + where T::Call: Dispatchable, { Self::compute_fee_details(len, info, tip).final_fee() @@ -414,7 +422,8 @@ impl Pallet where len: u32, info: &DispatchInfoOf, tip: BalanceOf, - ) -> FeeDetails> where + ) -> FeeDetails> + where T::Call: Dispatchable, { Self::compute_fee_raw(len, info.weight, tip, info.pays_fee, info.class) @@ -429,7 +438,8 @@ impl Pallet where info: &DispatchInfoOf, post_info: &PostDispatchInfoOf, tip: BalanceOf, - ) -> BalanceOf where + ) -> BalanceOf + where T::Call: Dispatchable, { Self::compute_actual_fee_details(len, info, post_info, tip).final_fee() @@ -441,7 +451,8 @@ impl Pallet where info: &DispatchInfoOf, post_info: &PostDispatchInfoOf, tip: BalanceOf, - ) -> FeeDetails> where + ) -> FeeDetails> + where T::Call: Dispatchable, { Self::compute_fee_raw( @@ -478,15 +489,12 @@ impl Pallet where inclusion_fee: Some(InclusionFee { base_fee, len_fee: fixed_len_fee, - adjusted_weight_fee + adjusted_weight_fee, }), - tip + tip, } } else { - FeeDetails { - inclusion_fee: None, - tip - } + FeeDetails { inclusion_fee: None, tip } } } @@ -498,7 +506,8 @@ impl Pallet where } } -impl Convert> for Pallet where +impl Convert> for Pallet +where T: Config, BalanceOf: FixedPointOperand, { @@ -518,7 +527,8 @@ impl Convert> for Pallet where #[scale_info(skip_type_params(T))] pub struct ChargeTransactionPayment(#[codec(compact)] BalanceOf); -impl ChargeTransactionPayment where +impl ChargeTransactionPayment +where T::Call: Dispatchable, BalanceOf: Send + Sync + FixedPointOperand, { @@ -548,8 +558,10 @@ impl ChargeTransactionPayment where let tip = self.0; let fee = Pallet::::compute_fee(len as u32, info, tip); - <::OnChargeTransaction as OnChargeTransaction>::withdraw_fee(who, call, info, fee, tip) - .map(|i| (fee, i)) + <::OnChargeTransaction as OnChargeTransaction>::withdraw_fee( + who, call, info, fee, tip, + ) + .map(|i| (fee, i)) } /// Get an appropriate priority for a transaction with the given length and info. @@ -562,11 +574,16 @@ impl ChargeTransactionPayment where /// and the entire block weight `(1/1)`, its priority is `fee * min(1, 4) = fee * 1`. This means /// that the transaction which consumes more resources (either length or weight) with the same /// `fee` ends up having lower priority. - fn get_priority(len: usize, info: &DispatchInfoOf, final_fee: BalanceOf) -> TransactionPriority { + fn get_priority( + len: usize, + info: &DispatchInfoOf, + final_fee: BalanceOf, + ) -> TransactionPriority { let weight_saturation = T::BlockWeights::get().max_block / info.weight.max(1); let max_block_length = *T::BlockLength::get().max.get(DispatchClass::Normal); let len_saturation = max_block_length as u64 / (len as u64).max(1); - let coefficient: BalanceOf = weight_saturation.min(len_saturation).saturated_into::>(); + let coefficient: BalanceOf = + weight_saturation.min(len_saturation).saturated_into::>(); final_fee.saturating_mul(coefficient).saturated_into::() } } @@ -582,7 +599,8 @@ impl sp_std::fmt::Debug for ChargeTransactionPayment { } } -impl SignedExtension for ChargeTransactionPayment where +impl SignedExtension for ChargeTransactionPayment +where BalanceOf: Send + Sync + From + FixedPointOperand, T::Call: Dispatchable, { @@ -598,7 +616,9 @@ impl SignedExtension for ChargeTransactionPayment where // imbalance resulting from withdrawing the fee <::OnChargeTransaction as OnChargeTransaction>::LiquidityInfo, ); - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } fn validate( &self, @@ -608,10 +628,7 @@ impl SignedExtension for ChargeTransactionPayment where len: usize, ) -> TransactionValidity { let (fee, _) = self.withdraw_fee(who, call, info, len)?; - Ok(ValidTransaction { - priority: Self::get_priority(len, info, fee), - ..Default::default() - }) + Ok(ValidTransaction { priority: Self::get_priority(len, info, fee), ..Default::default() }) } fn pre_dispatch( @@ -619,7 +636,7 @@ impl SignedExtension for ChargeTransactionPayment where who: &Self::AccountId, call: &Self::Call, info: &DispatchInfoOf, - len: usize + len: usize, ) -> Result { let (_fee, imbalance) = self.withdraw_fee(who, call, info, len)?; Ok((self.0, who.clone(), imbalance)) @@ -633,13 +650,10 @@ impl SignedExtension for ChargeTransactionPayment where _result: &DispatchResult, ) -> Result<(), TransactionValidityError> { let (tip, who, imbalance) = pre; - let actual_fee = Pallet::::compute_actual_fee( - len as u32, - info, - post_info, - tip, - ); - T::OnChargeTransaction::correct_and_deposit_fee(&who, info, post_info, actual_fee, tip, imbalance)?; + let actual_fee = Pallet::::compute_actual_fee(len as u32, info, post_info, tip); + T::OnChargeTransaction::correct_and_deposit_fee( + &who, info, post_info, actual_fee, tip, imbalance, + )?; Ok(()) } } @@ -664,11 +678,11 @@ mod tests { use frame_support::{ assert_noop, assert_ok, parameter_types, + traits::{Currency, Imbalance, OnUnbalanced}, weights::{ - DispatchClass, DispatchInfo, PostDispatchInfo, GetDispatchInfo, Weight, - WeightToFeePolynomial, WeightToFeeCoefficients, WeightToFeeCoefficient, + DispatchClass, DispatchInfo, GetDispatchInfo, PostDispatchInfo, Weight, + WeightToFeeCoefficient, WeightToFeeCoefficients, WeightToFeePolynomial, }, - traits::{Currency, OnUnbalanced, Imbalance}, }; use frame_system as system; use pallet_balances::Call as BalancesCall; @@ -779,7 +793,7 @@ mod tests { pub struct DealWithFees; impl OnUnbalanced> for DealWithFees { fn on_unbalanceds( - mut fees_then_tips: impl Iterator> + mut fees_then_tips: impl Iterator>, ) { if let Some(fees) = fees_then_tips.next() { FEE_UNBALANCED_AMOUNT.with(|a| *a.borrow_mut() += fees.peek()); @@ -801,17 +815,12 @@ mod tests { balance_factor: u64, base_weight: u64, byte_fee: u64, - weight_to_fee: u64 + weight_to_fee: u64, } impl Default for ExtBuilder { fn default() -> Self { - Self { - balance_factor: 1, - base_weight: 0, - byte_fee: 1, - weight_to_fee: 1, - } + Self { balance_factor: 1, base_weight: 0, byte_fee: 1, weight_to_fee: 1 } } } @@ -848,12 +857,14 @@ mod tests { (3, 30 * self.balance_factor), (4, 40 * self.balance_factor), (5, 50 * self.balance_factor), - (6, 60 * self.balance_factor) + (6, 60 * self.balance_factor), ] } else { vec![] }, - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } } @@ -865,24 +876,15 @@ mod tests { } fn post_info_from_weight(w: Weight) -> PostDispatchInfo { - PostDispatchInfo { - actual_weight: Some(w), - pays_fee: Default::default(), - } + PostDispatchInfo { actual_weight: Some(w), pays_fee: Default::default() } } fn post_info_from_pays(p: Pays) -> PostDispatchInfo { - PostDispatchInfo { - actual_weight: None, - pays_fee: p, - } + PostDispatchInfo { actual_weight: None, pays_fee: p } } fn default_post_info() -> PostDispatchInfo { - PostDispatchInfo { - actual_weight: None, - pays_fee: Default::default(), - } + PostDispatchInfo { actual_weight: None, pays_fee: Default::default() } } #[test] @@ -891,37 +893,42 @@ mod tests { .balance_factor(10) .base_weight(5) .build() - .execute_with(|| - { - let len = 10; - let pre = ChargeTransactionPayment::::from(0) - .pre_dispatch(&1, CALL, &info_from_weight(5), len) - .unwrap(); - assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10); - - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &info_from_weight(5), &default_post_info(), len, &Ok(())) - ); - assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10); - assert_eq!(FEE_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5 + 5 + 10); - assert_eq!(TIP_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 0); - - FEE_UNBALANCED_AMOUNT.with(|a| *a.borrow_mut() = 0); - - let pre = ChargeTransactionPayment::::from(5 /* tipped */) - .pre_dispatch(&2, CALL, &info_from_weight(100), len) - .unwrap(); - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); - - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &info_from_weight(100), &post_info_from_weight(50), len, &Ok(())) - ); - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 50 - 5); - assert_eq!(FEE_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5 + 10 + 50); - assert_eq!(TIP_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5); - }); + .execute_with(|| { + let len = 10; + let pre = ChargeTransactionPayment::::from(0) + .pre_dispatch(&1, CALL, &info_from_weight(5), len) + .unwrap(); + assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10); + + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &info_from_weight(5), + &default_post_info(), + len, + &Ok(()) + )); + assert_eq!(Balances::free_balance(1), 100 - 5 - 5 - 10); + assert_eq!(FEE_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5 + 5 + 10); + assert_eq!(TIP_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 0); + + FEE_UNBALANCED_AMOUNT.with(|a| *a.borrow_mut() = 0); + + let pre = ChargeTransactionPayment::::from(5 /* tipped */) + .pre_dispatch(&2, CALL, &info_from_weight(100), len) + .unwrap(); + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); + + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &info_from_weight(100), + &post_info_from_weight(50), + len, + &Ok(()) + )); + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 50 - 5); + assert_eq!(FEE_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5 + 10 + 50); + assert_eq!(TIP_UNBALANCED_AMOUNT.with(|a| a.borrow().clone()), 5); + }); } #[test] @@ -930,39 +937,38 @@ mod tests { .balance_factor(10) .base_weight(5) .build() - .execute_with(|| - { - let len = 10; - >::put(Multiplier::saturating_from_rational(3, 2)); - - let pre = ChargeTransactionPayment::::from(5 /* tipped */) - .pre_dispatch(&2, CALL, &info_from_weight(100), len) - .unwrap(); - // 5 base fee, 10 byte fee, 3/2 * 100 weight fee, 5 tip - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 150 - 5); - - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &info_from_weight(100), &post_info_from_weight(50), len, &Ok(())) - ); - // 75 (3/2 of the returned 50 units of weight) is refunded - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 75 - 5); - }); + .execute_with(|| { + let len = 10; + >::put(Multiplier::saturating_from_rational(3, 2)); + + let pre = ChargeTransactionPayment::::from(5 /* tipped */) + .pre_dispatch(&2, CALL, &info_from_weight(100), len) + .unwrap(); + // 5 base fee, 10 byte fee, 3/2 * 100 weight fee, 5 tip + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 150 - 5); + + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &info_from_weight(100), + &post_info_from_weight(50), + len, + &Ok(()) + )); + // 75 (3/2 of the returned 50 units of weight) is refunded + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 75 - 5); + }); } #[test] fn signed_extension_transaction_payment_is_bounded() { - ExtBuilder::default() - .balance_factor(1000) - .byte_fee(0) - .build() - .execute_with(|| - { + ExtBuilder::default().balance_factor(1000).byte_fee(0).build().execute_with(|| { // maximum weight possible - assert_ok!( - ChargeTransactionPayment::::from(0) - .pre_dispatch(&1, CALL, &info_from_weight(Weight::max_value()), 10) - ); + assert_ok!(ChargeTransactionPayment::::from(0).pre_dispatch( + &1, + CALL, + &info_from_weight(Weight::max_value()), + 10 + )); // fee will be proportional to what is the actual maximum weight in the runtime. assert_eq!( Balances::free_balance(&1), @@ -977,36 +983,38 @@ mod tests { .base_weight(100) .balance_factor(0) .build() - .execute_with(|| - { - // 1 ain't have a penny. - assert_eq!(Balances::free_balance(1), 0); - - let len = 100; - - // This is a completely free (and thus wholly insecure/DoS-ridden) transaction. - let operational_transaction = DispatchInfo { - weight: 0, - class: DispatchClass::Operational, - pays_fee: Pays::No, - }; - assert_ok!( - ChargeTransactionPayment::::from(0) - .validate(&1, CALL, &operational_transaction , len) - ); - - // like a InsecureFreeNormal - let free_transaction = DispatchInfo { - weight: 0, - class: DispatchClass::Normal, - pays_fee: Pays::Yes, - }; - assert_noop!( - ChargeTransactionPayment::::from(0) - .validate(&1, CALL, &free_transaction , len), - TransactionValidityError::Invalid(InvalidTransaction::Payment), - ); - }); + .execute_with(|| { + // 1 ain't have a penny. + assert_eq!(Balances::free_balance(1), 0); + + let len = 100; + + // This is a completely free (and thus wholly insecure/DoS-ridden) transaction. + let operational_transaction = DispatchInfo { + weight: 0, + class: DispatchClass::Operational, + pays_fee: Pays::No, + }; + assert_ok!(ChargeTransactionPayment::::from(0).validate( + &1, + CALL, + &operational_transaction, + len + )); + + // like a InsecureFreeNormal + let free_transaction = + DispatchInfo { weight: 0, class: DispatchClass::Normal, pays_fee: Pays::Yes }; + assert_noop!( + ChargeTransactionPayment::::from(0).validate( + &1, + CALL, + &free_transaction, + len + ), + TransactionValidityError::Invalid(InvalidTransaction::Payment), + ); + }); } #[test] @@ -1015,25 +1023,22 @@ mod tests { .base_weight(5) .balance_factor(10) .build() - .execute_with(|| - { - // all fees should be x1.5 - >::put(Multiplier::saturating_from_rational(3, 2)); - let len = 10; - - assert_ok!( - ChargeTransactionPayment::::from(10) // tipped - .pre_dispatch(&1, CALL, &info_from_weight(3), len) - ); - assert_eq!( - Balances::free_balance(1), - 100 // original + .execute_with(|| { + // all fees should be x1.5 + >::put(Multiplier::saturating_from_rational(3, 2)); + let len = 10; + + assert_ok!(ChargeTransactionPayment::::from(10) // tipped + .pre_dispatch(&1, CALL, &info_from_weight(3), len)); + assert_eq!( + Balances::free_balance(1), + 100 // original - 10 // tip - 5 // base - 10 // len - (3 * 3 / 2) // adjusted weight - ); - }) + ); + }) } #[test] @@ -1042,15 +1047,10 @@ mod tests { let origin = 111111; let extra = (); let xt = TestXt::new(call, Some((origin, extra))); - let info = xt.get_dispatch_info(); + let info = xt.get_dispatch_info(); let ext = xt.encode(); let len = ext.len() as u32; - ExtBuilder::default() - .base_weight(5) - .weight_fee(2) - .build() - .execute_with(|| - { + ExtBuilder::default().base_weight(5).weight_fee(2).build().execute_with(|| { // all fees should be x1.5 >::put(Multiplier::saturating_from_rational(3, 2)); @@ -1059,13 +1059,11 @@ mod tests { RuntimeDispatchInfo { weight: info.weight, class: info.class, - partial_fee: - 5 * 2 /* base * weight_fee */ + partial_fee: 5 * 2 /* base * weight_fee */ + len as u64 /* len * 1 */ + info.weight.min(BlockWeights::get().max_block) as u64 * 2 * 3 / 2 /* weight */ }, ); - }); } @@ -1076,37 +1074,36 @@ mod tests { .byte_fee(10) .balance_factor(0) .build() - .execute_with(|| - { - // Next fee multiplier is zero - assert_eq!(>::get(), Multiplier::one()); - - // Tip only, no fees works - let dispatch_info = DispatchInfo { - weight: 0, - class: DispatchClass::Operational, - pays_fee: Pays::No, - }; - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 10), 10); - // No tip, only base fee works - let dispatch_info = DispatchInfo { - weight: 0, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); - // Tip + base fee works - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 69), 169); - // Len (byte fee) + base fee works - assert_eq!(Pallet::::compute_fee(42, &dispatch_info, 0), 520); - // Weight fee + base fee works - let dispatch_info = DispatchInfo { - weight: 1000, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 1100); - }); + .execute_with(|| { + // Next fee multiplier is zero + assert_eq!(>::get(), Multiplier::one()); + + // Tip only, no fees works + let dispatch_info = DispatchInfo { + weight: 0, + class: DispatchClass::Operational, + pays_fee: Pays::No, + }; + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 10), 10); + // No tip, only base fee works + let dispatch_info = DispatchInfo { + weight: 0, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); + // Tip + base fee works + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 69), 169); + // Len (byte fee) + base fee works + assert_eq!(Pallet::::compute_fee(42, &dispatch_info, 0), 520); + // Weight fee + base fee works + let dispatch_info = DispatchInfo { + weight: 1000, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 1100); + }); } #[test] @@ -1116,30 +1113,29 @@ mod tests { .byte_fee(10) .balance_factor(0) .build() - .execute_with(|| - { - // Add a next fee multiplier. Fees will be x3/2. - >::put(Multiplier::saturating_from_rational(3, 2)); - // Base fee is unaffected by multiplier - let dispatch_info = DispatchInfo { - weight: 0, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); - - // Everything works together :) - let dispatch_info = DispatchInfo { - weight: 123, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - // 123 weight, 456 length, 100 base - assert_eq!( - Pallet::::compute_fee(456, &dispatch_info, 789), - 100 + (3 * 123 / 2) + 4560 + 789, - ); - }); + .execute_with(|| { + // Add a next fee multiplier. Fees will be x3/2. + >::put(Multiplier::saturating_from_rational(3, 2)); + // Base fee is unaffected by multiplier + let dispatch_info = DispatchInfo { + weight: 0, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); + + // Everything works together :) + let dispatch_info = DispatchInfo { + weight: 123, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + // 123 weight, 456 length, 100 base + assert_eq!( + Pallet::::compute_fee(456, &dispatch_info, 789), + 100 + (3 * 123 / 2) + 4560 + 789, + ); + }); } #[test] @@ -1149,31 +1145,30 @@ mod tests { .byte_fee(10) .balance_factor(0) .build() - .execute_with(|| - { - // Add a next fee multiplier. All fees will be x1/2. - >::put(Multiplier::saturating_from_rational(1, 2)); - - // Base fee is unaffected by multiplier. - let dispatch_info = DispatchInfo { - weight: 0, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); - - // Everything works together. - let dispatch_info = DispatchInfo { - weight: 123, - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - // 123 weight, 456 length, 100 base - assert_eq!( - Pallet::::compute_fee(456, &dispatch_info, 789), - 100 + (123 / 2) + 4560 + 789, - ); - }); + .execute_with(|| { + // Add a next fee multiplier. All fees will be x1/2. + >::put(Multiplier::saturating_from_rational(1, 2)); + + // Base fee is unaffected by multiplier. + let dispatch_info = DispatchInfo { + weight: 0, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + assert_eq!(Pallet::::compute_fee(0, &dispatch_info, 0), 100); + + // Everything works together. + let dispatch_info = DispatchInfo { + weight: 123, + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + // 123 weight, 456 length, 100 base + assert_eq!( + Pallet::::compute_fee(456, &dispatch_info, 789), + 100 + (123 / 2) + 4560 + 789, + ); + }); } #[test] @@ -1183,23 +1178,18 @@ mod tests { .byte_fee(10) .balance_factor(0) .build() - .execute_with(|| - { - // Overflow is handled - let dispatch_info = DispatchInfo { - weight: Weight::max_value(), - class: DispatchClass::Operational, - pays_fee: Pays::Yes, - }; - assert_eq!( - Pallet::::compute_fee( - u32::MAX, - &dispatch_info, + .execute_with(|| { + // Overflow is handled + let dispatch_info = DispatchInfo { + weight: Weight::max_value(), + class: DispatchClass::Operational, + pays_fee: Pays::Yes, + }; + assert_eq!( + Pallet::::compute_fee(u32::MAX, &dispatch_info, u64::MAX), u64::MAX - ), - u64::MAX - ); - }); + ); + }); } #[test] @@ -1208,30 +1198,34 @@ mod tests { .balance_factor(10) .base_weight(5) .build() - .execute_with(|| - { - // So events are emitted - System::set_block_number(10); - let len = 10; - let pre = ChargeTransactionPayment::::from(5 /* tipped */) - .pre_dispatch(&2, CALL, &info_from_weight(100), len) - .unwrap(); - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); - - // kill the account between pre and post dispatch - assert_ok!(Balances::transfer(Some(2).into(), 3, Balances::free_balance(2))); - assert_eq!(Balances::free_balance(2), 0); - - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &info_from_weight(100), &post_info_from_weight(50), len, &Ok(())) - ); - assert_eq!(Balances::free_balance(2), 0); - // Transfer Event - System::assert_has_event(Event::Balances(pallet_balances::Event::Transfer(2, 3, 80))); - // Killed Event - System::assert_has_event(Event::System(system::Event::KilledAccount(2))); - }); + .execute_with(|| { + // So events are emitted + System::set_block_number(10); + let len = 10; + let pre = ChargeTransactionPayment::::from(5 /* tipped */) + .pre_dispatch(&2, CALL, &info_from_weight(100), len) + .unwrap(); + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); + + // kill the account between pre and post dispatch + assert_ok!(Balances::transfer(Some(2).into(), 3, Balances::free_balance(2))); + assert_eq!(Balances::free_balance(2), 0); + + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &info_from_weight(100), + &post_info_from_weight(50), + len, + &Ok(()) + )); + assert_eq!(Balances::free_balance(2), 0); + // Transfer Event + System::assert_has_event(Event::Balances(pallet_balances::Event::Transfer( + 2, 3, 80, + ))); + // Killed Event + System::assert_has_event(Event::System(system::Event::KilledAccount(2))); + }); } #[test] @@ -1240,20 +1234,22 @@ mod tests { .balance_factor(10) .base_weight(5) .build() - .execute_with(|| - { - let len = 10; - let pre = ChargeTransactionPayment::::from(5 /* tipped */) - .pre_dispatch(&2, CALL, &info_from_weight(100), len) - .unwrap(); - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); - - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &info_from_weight(100), &post_info_from_weight(101), len, &Ok(())) - ); - assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); - }); + .execute_with(|| { + let len = 10; + let pre = ChargeTransactionPayment::::from(5 /* tipped */) + .pre_dispatch(&2, CALL, &info_from_weight(100), len) + .unwrap(); + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); + + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &info_from_weight(100), + &post_info_from_weight(101), + len, + &Ok(()) + )); + assert_eq!(Balances::free_balance(2), 200 - 5 - 10 - 100 - 5); + }); } #[test] @@ -1262,29 +1258,28 @@ mod tests { .balance_factor(10) .base_weight(5) .build() - .execute_with(|| - { - // So events are emitted - System::set_block_number(10); - let len = 10; - let dispatch_info = DispatchInfo { - weight: 100, - pays_fee: Pays::No, - class: DispatchClass::Normal, - }; - let user = 69; - let pre = ChargeTransactionPayment::::from(0) - .pre_dispatch(&user, CALL, &dispatch_info, len) - .unwrap(); - assert_eq!(Balances::total_balance(&user), 0); - assert_ok!( - ChargeTransactionPayment:: - ::post_dispatch(pre, &dispatch_info, &default_post_info(), len, &Ok(())) - ); - assert_eq!(Balances::total_balance(&user), 0); - // No events for such a scenario - assert_eq!(System::events().len(), 0); - }); + .execute_with(|| { + // So events are emitted + System::set_block_number(10); + let len = 10; + let dispatch_info = + DispatchInfo { weight: 100, pays_fee: Pays::No, class: DispatchClass::Normal }; + let user = 69; + let pre = ChargeTransactionPayment::::from(0) + .pre_dispatch(&user, CALL, &dispatch_info, len) + .unwrap(); + assert_eq!(Balances::total_balance(&user), 0); + assert_ok!(ChargeTransactionPayment::::post_dispatch( + pre, + &dispatch_info, + &default_post_info(), + len, + &Ok(()) + )); + assert_eq!(Balances::total_balance(&user), 0); + // No events for such a scenario + assert_eq!(System::events().len(), 0); + }); } #[test] @@ -1293,32 +1288,36 @@ mod tests { .balance_factor(10) .base_weight(7) .build() - .execute_with(|| - { - let info = info_from_weight(100); - let post_info = post_info_from_weight(33); - let prev_balance = Balances::free_balance(2); - let len = 10; - let tip = 5; - - >::put(Multiplier::saturating_from_rational(5, 4)); - - let pre = ChargeTransactionPayment::::from(tip) - .pre_dispatch(&2, CALL, &info, len) - .unwrap(); - - ChargeTransactionPayment:: - ::post_dispatch(pre, &info, &post_info, len, &Ok(())) + .execute_with(|| { + let info = info_from_weight(100); + let post_info = post_info_from_weight(33); + let prev_balance = Balances::free_balance(2); + let len = 10; + let tip = 5; + + >::put(Multiplier::saturating_from_rational(5, 4)); + + let pre = ChargeTransactionPayment::::from(tip) + .pre_dispatch(&2, CALL, &info, len) + .unwrap(); + + ChargeTransactionPayment::::post_dispatch( + pre, + &info, + &post_info, + len, + &Ok(()), + ) .unwrap(); - let refund_based_fee = prev_balance - Balances::free_balance(2); - let actual_fee = Pallet:: - ::compute_actual_fee(len as u32, &info, &post_info, tip); + let refund_based_fee = prev_balance - Balances::free_balance(2); + let actual_fee = + Pallet::::compute_actual_fee(len as u32, &info, &post_info, tip); - // 33 weight, 10 length, 7 base, 5 tip - assert_eq!(actual_fee, 7 + 10 + (33 * 5 / 4) + 5); - assert_eq!(refund_based_fee, actual_fee); - }); + // 33 weight, 10 length, 7 base, 5 tip + assert_eq!(actual_fee, 7 + 10 + (33 * 5 / 4) + 5); + assert_eq!(refund_based_fee, actual_fee); + }); } #[test] @@ -1327,31 +1326,35 @@ mod tests { .balance_factor(10) .base_weight(7) .build() - .execute_with(|| - { - let info = info_from_weight(100); - let post_info = post_info_from_pays(Pays::No); - let prev_balance = Balances::free_balance(2); - let len = 10; - let tip = 5; - - >::put(Multiplier::saturating_from_rational(5, 4)); - - let pre = ChargeTransactionPayment::::from(tip) - .pre_dispatch(&2, CALL, &info, len) + .execute_with(|| { + let info = info_from_weight(100); + let post_info = post_info_from_pays(Pays::No); + let prev_balance = Balances::free_balance(2); + let len = 10; + let tip = 5; + + >::put(Multiplier::saturating_from_rational(5, 4)); + + let pre = ChargeTransactionPayment::::from(tip) + .pre_dispatch(&2, CALL, &info, len) + .unwrap(); + + ChargeTransactionPayment::::post_dispatch( + pre, + &info, + &post_info, + len, + &Ok(()), + ) .unwrap(); - ChargeTransactionPayment:: - ::post_dispatch(pre, &info, &post_info, len, &Ok(())) - .unwrap(); - - let refund_based_fee = prev_balance - Balances::free_balance(2); - let actual_fee = Pallet:: - ::compute_actual_fee(len as u32, &info, &post_info, tip); + let refund_based_fee = prev_balance - Balances::free_balance(2); + let actual_fee = + Pallet::::compute_actual_fee(len as u32, &info, &post_info, tip); - // Only 5 tip is paid - assert_eq!(actual_fee, 5); - assert_eq!(refund_based_fee, actual_fee); - }); + // Only 5 tip is paid + assert_eq!(actual_fee, 5); + assert_eq!(refund_based_fee, actual_fee); + }); } } diff --git a/frame/transaction-payment/src/payment.rs b/frame/transaction-payment/src/payment.rs index a574dd2a5626c..c350c2bb4698c 100644 --- a/frame/transaction-payment/src/payment.rs +++ b/frame/transaction-payment/src/payment.rs @@ -1,10 +1,12 @@ ///! Traits and default implementation for paying transaction fees. - use crate::Config; use codec::FullCodec; use sp_runtime::{ - traits::{AtLeast32BitUnsigned, DispatchInfoOf, MaybeSerializeDeserialize, PostDispatchInfoOf, Saturating, Zero}, + traits::{ + AtLeast32BitUnsigned, DispatchInfoOf, MaybeSerializeDeserialize, PostDispatchInfoOf, + Saturating, Zero, + }, transaction_validity::InvalidTransaction, }; use sp_std::{fmt::Debug, marker::PhantomData}; @@ -20,7 +22,12 @@ type NegativeImbalanceOf = /// Handle withdrawing, refunding and depositing of transaction fees. pub trait OnChargeTransaction { /// The underlying integer type in which fees are calculated. - type Balance: AtLeast32BitUnsigned + FullCodec + Copy + MaybeSerializeDeserialize + Debug + Default + type Balance: AtLeast32BitUnsigned + + FullCodec + + Copy + + MaybeSerializeDeserialize + + Debug + + Default + scale_info::TypeInfo; type LiquidityInfo: Default; @@ -68,10 +75,14 @@ where T: Config, T::TransactionByteFee: Get<::AccountId>>::Balance>, C: Currency<::AccountId>, - C::PositiveImbalance: - Imbalance<::AccountId>>::Balance, Opposite = C::NegativeImbalance>, - C::NegativeImbalance: - Imbalance<::AccountId>>::Balance, Opposite = C::PositiveImbalance>, + C::PositiveImbalance: Imbalance< + ::AccountId>>::Balance, + Opposite = C::NegativeImbalance, + >, + C::NegativeImbalance: Imbalance< + ::AccountId>>::Balance, + Opposite = C::PositiveImbalance, + >, OU: OnUnbalanced>, { type LiquidityInfo = Option>; @@ -88,7 +99,7 @@ where tip: Self::Balance, ) -> Result { if fee.is_zero() { - return Ok(None); + return Ok(None) } let withdraw_reason = if tip.is_zero() { @@ -122,8 +133,8 @@ where // refund to the the account that paid the fees. If this fails, the // account might have dropped below the existential balance. In // that case we don't refund anything. - let refund_imbalance = - C::deposit_into_existing(&who, refund_amount).unwrap_or_else(|_| C::PositiveImbalance::zero()); + let refund_imbalance = C::deposit_into_existing(&who, refund_amount) + .unwrap_or_else(|_| C::PositiveImbalance::zero()); // merge the imbalance caused by paying the fees and refunding parts of it again. let adjusted_paid = paid .offset(refund_imbalance) diff --git a/frame/transaction-payment/src/types.rs b/frame/transaction-payment/src/types.rs index b5d46a9167a75..345bd39718a73 100644 --- a/frame/transaction-payment/src/types.rs +++ b/frame/transaction-payment/src/types.rs @@ -17,14 +17,14 @@ //! Types for transaction-payment RPC. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use sp_runtime::traits::{AtLeast32BitUnsigned, Zero}; use sp_std::prelude::*; -use frame_support::weights::{Weight, DispatchClass}; +use frame_support::weights::{DispatchClass, Weight}; /// The base fee and adjusted weight and length fees constitute the _inclusion fee_. #[derive(Encode, Decode, Clone, Eq, PartialEq)] @@ -80,7 +80,11 @@ impl FeeDetails { /// final_fee = inclusion_fee + tip; /// ``` pub fn final_fee(&self) -> Balance { - self.inclusion_fee.as_ref().map(|i| i.inclusion_fee()).unwrap_or_else(|| Zero::zero()).saturating_add(self.tip) + self.inclusion_fee + .as_ref() + .map(|i| i.inclusion_fee()) + .unwrap_or_else(|| Zero::zero()) + .saturating_add(self.tip) } } @@ -105,13 +109,18 @@ pub struct RuntimeDispatchInfo { #[cfg(feature = "std")] mod serde_balance { - use serde::{Deserialize, Serializer, Deserializer}; + use serde::{Deserialize, Deserializer, Serializer}; - pub fn serialize(t: &T, serializer: S) -> Result { + pub fn serialize( + t: &T, + serializer: S, + ) -> Result { serializer.serialize_str(&t.to_string()) } - pub fn deserialize<'de, D: Deserializer<'de>, T: std::str::FromStr>(deserializer: D) -> Result { + pub fn deserialize<'de, D: Deserializer<'de>, T: std::str::FromStr>( + deserializer: D, + ) -> Result { let s = String::deserialize(deserializer)?; s.parse::().map_err(|_| serde::de::Error::custom("Parse from string failed")) } diff --git a/frame/transaction-storage/src/benchmarking.rs b/frame/transaction-storage/src/benchmarking.rs index ffb4d23de119f..64081c3202c05 100644 --- a/frame/transaction-storage/src/benchmarking.rs +++ b/frame/transaction-storage/src/benchmarking.rs @@ -19,17 +19,18 @@ #![cfg(feature = "runtime-benchmarks")] -use sp_std::*; use super::*; -use sp_runtime::traits::{Zero, One, Bounded}; +use frame_benchmarking::{benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_support::traits::{Currency, OnFinalize, OnInitialize}; +use frame_system::{EventRecord, Pallet as System, RawOrigin}; +use sp_runtime::traits::{Bounded, One, Zero}; +use sp_std::*; use sp_transaction_storage_proof::TransactionStorageProof; -use frame_system::{RawOrigin, Pallet as System, EventRecord}; -use frame_benchmarking::{benchmarks, whitelisted_caller, impl_benchmark_test_suite}; -use frame_support::{traits::{Currency, OnFinalize, OnInitialize}}; use crate::Pallet as TransactionStorage; -const PROOF: &[u8] = &hex_literal::hex!(" +const PROOF: &[u8] = &hex_literal::hex!( + " 0104000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 @@ -75,9 +76,11 @@ const PROOF: &[u8] = &hex_literal::hex!(" 0c8e67d9b280f2b31a5707d52b892977acaac84d530bd188544c5f9b80b4f23ac50c8e67d9b280f2b31a5707d52b89297 7acaac84d530bd188544c5f9b80b4f23ac50c8e67d9b280f2b31a5707d52b892977acaac84d530bd188544c5f9b104401 0000 -"); +" +); -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; fn assert_last_event(generic_event: ::Event) { let events = System::::events(); @@ -90,7 +93,9 @@ pub fn run_to_block(n: T::BlockNumber) { while frame_system::Pallet::::block_number() < n { crate::Pallet::::on_finalize(frame_system::Pallet::::block_number()); frame_system::Pallet::::on_finalize(frame_system::Pallet::::block_number()); - frame_system::Pallet::::set_block_number(frame_system::Pallet::::block_number() + One::one()); + frame_system::Pallet::::set_block_number( + frame_system::Pallet::::block_number() + One::one(), + ); frame_system::Pallet::::on_initialize(frame_system::Pallet::::block_number()); crate::Pallet::::on_initialize(frame_system::Pallet::::block_number()); } @@ -140,8 +145,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - TransactionStorage, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(TransactionStorage, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/transaction-storage/src/lib.rs b/frame/transaction-storage/src/lib.rs index 6661637d57edc..35ec04a893abb 100644 --- a/frame/transaction-storage/src/lib.rs +++ b/frame/transaction-storage/src/lib.rs @@ -28,24 +28,24 @@ mod mock; #[cfg(test)] mod tests; +use codec::{Decode, Encode}; use frame_support::{ - traits::{ReservableCurrency, Currency, OnUnbalanced}, dispatch::{Dispatchable, GetDispatchInfo}, + traits::{Currency, OnUnbalanced, ReservableCurrency}, }; -use sp_std::prelude::*; -use sp_std::{result}; -use codec::{Encode, Decode}; -use sp_runtime::traits::{Saturating, BlakeTwo256, Hash, Zero, One}; +use sp_runtime::traits::{BlakeTwo256, Hash, One, Saturating, Zero}; +use sp_std::{prelude::*, result}; use sp_transaction_storage_proof::{ - TransactionStorageProof, InherentError, - random_chunk, encode_index, - CHUNK_SIZE, INHERENT_IDENTIFIER, DEFAULT_STORAGE_PERIOD, + encode_index, random_chunk, InherentError, TransactionStorageProof, CHUNK_SIZE, + DEFAULT_STORAGE_PERIOD, INHERENT_IDENTIFIER, }; /// A type alias for the balance type from this pallet's point of view. -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = <::Currency as Currency<::AccountId>> - ::NegativeImbalance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; // Re-export pallet items so that they can be accessed from the crate namespace. pub use pallet::*; @@ -76,16 +76,19 @@ fn num_chunks(bytes: u32) -> u32 { #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { /// The overarching event type. type Event: From> + IsType<::Event>; /// A dispatchable call. - type Call: Parameter + Dispatchable + GetDispatchInfo + From>; + type Call: Parameter + + Dispatchable + + GetDispatchInfo + + From>; /// The currency trait. type Currency: ReservableCurrency; /// Handler for the unbalanced decrease when fees are burned. @@ -145,8 +148,7 @@ pub mod pallet { fn on_finalize(n: T::BlockNumber) { assert!( - >::take() - || { + >::take() || { // Proof is not required for early or empty blocks. let number = >::block_number(); let period = >::get(); @@ -174,12 +176,12 @@ pub mod pallet { /// Additionally contains a DB write. /// # #[pallet::weight(T::WeightInfo::store(data.len() as u32))] - pub fn store( - origin: OriginFor, - data: Vec, - ) -> DispatchResult { + pub fn store(origin: OriginFor, data: Vec) -> DispatchResult { ensure!(data.len() > 0, Error::::EmptyTransaction); - ensure!(data.len() <= MaxTransactionSize::::get() as usize, Error::::TransactionTooLarge); + ensure!( + data.len() <= MaxTransactionSize::::get() as usize, + Error::::TransactionTooLarge + ); let sender = ensure_signed(origin)?; Self::apply_fee(sender, data.len() as u32)?; @@ -189,8 +191,8 @@ pub mod pallet { let root = sp_io::trie::blake2_256_ordered_root(chunks); let content_hash = sp_io::hashing::blake2_256(&data); - let extrinsic_index = >::extrinsic_index().ok_or_else( - || Error::::BadContext)?; + let extrinsic_index = >::extrinsic_index() + .ok_or_else(|| Error::::BadContext)?; sp_io::transaction_index::index(extrinsic_index, data.len() as u32, content_hash); let mut index = 0; @@ -277,11 +279,14 @@ pub mod pallet { let selected_chunk_index = random_chunk(parent_hash.as_ref(), total_chunks); let (info, chunk_index) = match >::get(target_number) { Some(infos) => { - let index = match infos.binary_search_by_key(&selected_chunk_index, |info| info.block_chunks) { + let index = match infos + .binary_search_by_key(&selected_chunk_index, |info| info.block_chunks) + { Ok(index) => index, Err(index) => index, }; - let info = infos.get(index).ok_or_else(|| Error::::MissingStateData)?.clone(); + let info = + infos.get(index).ok_or_else(|| Error::::MissingStateData)?.clone(); let chunks = num_chunks(info.size); let prev_chunks = info.block_chunks - chunks; (info, selected_chunk_index - prev_chunks) @@ -317,23 +322,13 @@ pub mod pallet { /// Collection of transaction metadata by block number. #[pallet::storage] #[pallet::getter(fn transaction_roots)] - pub(super) type Transactions = StorageMap< - _, - Blake2_128Concat, - T::BlockNumber, - Vec, - OptionQuery, - >; + pub(super) type Transactions = + StorageMap<_, Blake2_128Concat, T::BlockNumber, Vec, OptionQuery>; /// Count indexed chunks for each block. #[pallet::storage] - pub(super) type ChunkCount = StorageMap< - _, - Blake2_128Concat, - T::BlockNumber, - u32, - ValueQuery, - >; + pub(super) type ChunkCount = + StorageMap<_, Blake2_128Concat, T::BlockNumber, u32, ValueQuery>; #[pallet::storage] #[pallet::getter(fn byte_fee)] @@ -362,13 +357,13 @@ pub mod pallet { // Intermediates #[pallet::storage] - pub(super) type BlockTransactions = StorageValue<_, Vec, ValueQuery>; + pub(super) type BlockTransactions = + StorageValue<_, Vec, ValueQuery>; /// Was the proof checked in this block? #[pallet::storage] pub(super) type ProofChecked = StorageValue<_, bool, ValueQuery>; - #[pallet::genesis_config] pub struct GenesisConfig { pub byte_fee: BalanceOf, @@ -409,11 +404,16 @@ pub mod pallet { const INHERENT_IDENTIFIER: InherentIdentifier = INHERENT_IDENTIFIER; fn create_inherent(data: &InherentData) -> Option { - let proof = data.get_data::(&Self::INHERENT_IDENTIFIER).unwrap_or(None); + let proof = data + .get_data::(&Self::INHERENT_IDENTIFIER) + .unwrap_or(None); proof.map(|proof| Call::check_proof { proof }) } - fn check_inherent(_call: &Self::Call, _data: &InherentData) -> result::Result<(), Self::Error> { + fn check_inherent( + _call: &Self::Call, + _data: &InherentData, + ) -> result::Result<(), Self::Error> { Ok(()) } diff --git a/frame/transaction-storage/src/mock.rs b/frame/transaction-storage/src/mock.rs index 344d7b7369533..17a5d8097b671 100644 --- a/frame/transaction-storage/src/mock.rs +++ b/frame/transaction-storage/src/mock.rs @@ -19,13 +19,16 @@ use crate as pallet_transaction_storage; use crate::TransactionStorageProof; -use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header, BuildStorage}; use frame_support::{ parameter_types, - traits::{OnInitialize, OnFinalize}, + traits::{OnFinalize, OnInitialize}, +}; +use sp_core::H256; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; - type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; pub type Block = frame_system::mocking::MockBlock; @@ -104,7 +107,7 @@ pub fn new_test_ext() -> sp_io::TestExternalities { let t = GenesisConfig { system: Default::default(), balances: pallet_balances::GenesisConfig:: { - balances: vec![(1, 1000000000), (2, 100), (3, 100), (4, 100)] + balances: vec![(1, 1000000000), (2, 100), (3, 100), (4, 100)], }, transaction_storage: pallet_transaction_storage::GenesisConfig:: { storage_period: 10, @@ -113,7 +116,9 @@ pub fn new_test_ext() -> sp_io::TestExternalities { max_block_transactions: crate::DEFAULT_MAX_BLOCK_TRANSACTIONS, max_transaction_size: crate::DEFAULT_MAX_TRANSACTION_SIZE, }, - }.build_storage().unwrap(); + } + .build_storage() + .unwrap(); t.into() } diff --git a/frame/transaction-storage/src/tests.rs b/frame/transaction-storage/src/tests.rs index 50594f1bce9dc..c443f51ffb50f 100644 --- a/frame/transaction-storage/src/tests.rs +++ b/frame/transaction-storage/src/tests.rs @@ -17,10 +17,9 @@ //! Tests for transction-storage pallet. -use super::*; +use super::{Pallet as TransactionStorage, *}; use crate::mock::*; -use super::Pallet as TransactionStorage; -use frame_support::{assert_ok, assert_noop}; +use frame_support::{assert_noop, assert_ok}; use frame_system::RawOrigin; use sp_transaction_storage_proof::registration::build_proof; @@ -41,9 +40,12 @@ fn discards_data() { )); let proof_provider = || { let block_num = >::block_number(); - if block_num == 11 { + if block_num == 11 { let parent_hash = >::parent_hash(); - Some(build_proof(parent_hash.as_ref(), vec![vec![0u8; 2000], vec![0u8; 2000]]).unwrap()) + Some( + build_proof(parent_hash.as_ref(), vec![vec![0u8; 2000], vec![0u8; 2000]]) + .unwrap(), + ) } else { None } @@ -64,15 +66,16 @@ fn burns_fee() { new_test_ext().execute_with(|| { run_to_block(1, || None); let caller = 1; - assert_noop!(TransactionStorage::::store( + assert_noop!( + TransactionStorage::::store( RawOrigin::Signed(5).into(), vec![0u8; 2000 as usize] ), Error::::InsufficientFunds, ); assert_ok!(TransactionStorage::::store( - RawOrigin::Signed(caller.clone()).into(), - vec![0u8; 2000 as usize] + RawOrigin::Signed(caller.clone()).into(), + vec![0u8; 2000 as usize] )); assert_eq!(Balances::free_balance(1), 1_000_000_000 - 2000 * 2 - 200); }); @@ -89,34 +92,23 @@ fn checks_proof() { )); run_to_block(10, || None); let parent_hash = >::parent_hash(); - let proof = build_proof( - parent_hash.as_ref(), - vec![vec![0u8; MAX_DATA_SIZE as usize]] - ).unwrap(); - assert_noop!(TransactionStorage::::check_proof( - Origin::none(), - proof, - ), + let proof = + build_proof(parent_hash.as_ref(), vec![vec![0u8; MAX_DATA_SIZE as usize]]).unwrap(); + assert_noop!( + TransactionStorage::::check_proof(Origin::none(), proof,), Error::::UnexpectedProof, ); run_to_block(11, || None); let parent_hash = >::parent_hash(); - let invalid_proof = build_proof( - parent_hash.as_ref(), - vec![vec![0u8; 1000]] - ).unwrap(); - assert_noop!(TransactionStorage::::check_proof( - Origin::none(), - invalid_proof, - ), - Error::::InvalidProof, + let invalid_proof = build_proof(parent_hash.as_ref(), vec![vec![0u8; 1000]]).unwrap(); + assert_noop!( + TransactionStorage::::check_proof(Origin::none(), invalid_proof,), + Error::::InvalidProof, ); - let proof = build_proof( - parent_hash.as_ref(), - vec![vec![0u8; MAX_DATA_SIZE as usize]] - ).unwrap(); + let proof = + build_proof(parent_hash.as_ref(), vec![vec![0u8; MAX_DATA_SIZE as usize]]).unwrap(); assert_ok!(TransactionStorage::::check_proof(Origin::none(), proof)); }); } @@ -127,20 +119,20 @@ fn renews_data() { run_to_block(1, || None); let caller = 1; assert_ok!(TransactionStorage::::store( - RawOrigin::Signed(caller.clone()).into(), - vec![0u8; 2000] + RawOrigin::Signed(caller.clone()).into(), + vec![0u8; 2000] )); let info = BlockTransactions::::get().last().unwrap().clone(); run_to_block(6, || None); assert_ok!(TransactionStorage::::renew( - RawOrigin::Signed(caller.clone()).into(), - 1, // block - 0, // transaction + RawOrigin::Signed(caller.clone()).into(), + 1, // block + 0, // transaction )); assert_eq!(Balances::free_balance(1), 1_000_000_000 - 4000 * 2 - 200 * 2); let proof_provider = || { let block_num = >::block_number(); - if block_num == 11 || block_num == 16 { + if block_num == 11 || block_num == 16 { let parent_hash = >::parent_hash(); Some(build_proof(parent_hash.as_ref(), vec![vec![0u8; 2000]]).unwrap()) } else { @@ -154,4 +146,3 @@ fn renews_data() { assert!(Transactions::::get(6).is_none()); }); } - diff --git a/frame/transaction-storage/src/weights.rs b/frame/transaction-storage/src/weights.rs index 46fc664d977c6..fb1b7f4f95128 100644 --- a/frame/transaction-storage/src/weights.rs +++ b/frame/transaction-storage/src/weights.rs @@ -35,16 +35,18 @@ // --output=./frame/transaction-storage/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_transaction_storage. pub trait WeightInfo { - fn store(l: u32, ) -> Weight; + fn store(l: u32) -> Weight; fn renew() -> Weight; fn check_proof_max() -> Weight; } @@ -52,7 +54,7 @@ pub trait WeightInfo { /// Weights for pallet_transaction_storage using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn store(l: u32, ) -> Weight { + fn store(l: u32) -> Weight { (0 as Weight) // Standard Error: 0 .saturating_add((8_000 as Weight).saturating_mul(l as Weight)) @@ -73,7 +75,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn store(l: u32, ) -> Weight { + fn store(l: u32) -> Weight { (0 as Weight) // Standard Error: 0 .saturating_add((8_000 as Weight).saturating_mul(l as Weight)) diff --git a/frame/treasury/src/benchmarking.rs b/frame/treasury/src/benchmarking.rs index cc5db8ce94c72..98fed2c6a536a 100644 --- a/frame/treasury/src/benchmarking.rs +++ b/frame/treasury/src/benchmarking.rs @@ -19,20 +19,18 @@ #![cfg(feature = "runtime-benchmarks")] -use super::{*, Pallet as Treasury}; +use super::{Pallet as Treasury, *}; -use frame_benchmarking::{benchmarks_instance_pallet, account, impl_benchmark_test_suite}; -use frame_support::{traits::OnInitialize, ensure}; +use frame_benchmarking::{account, benchmarks_instance_pallet, impl_benchmark_test_suite}; +use frame_support::{ensure, traits::OnInitialize}; use frame_system::RawOrigin; const SEED: u32 = 0; // Create the pre-requisite information needed to create a treasury `propose_spend`. -fn setup_proposal, I: 'static>(u: u32) -> ( - T::AccountId, - BalanceOf, - ::Source, -) { +fn setup_proposal, I: 'static>( + u: u32, +) -> (T::AccountId, BalanceOf, ::Source) { let caller = account("caller", u, SEED); let value: BalanceOf = T::ProposalBondMinimum::get().saturating_mul(100u32.into()); let _ = T::Currency::make_free_balance_be(&caller, value); @@ -43,13 +41,9 @@ fn setup_proposal, I: 'static>(u: u32) -> ( // Create proposals that are approved for use in `on_initialize`. fn create_approved_proposals, I: 'static>(n: u32) -> Result<(), &'static str> { - for i in 0 .. n { + for i in 0..n { let (caller, value, lookup) = setup_proposal::(i); - Treasury::::propose_spend( - RawOrigin::Signed(caller).into(), - value, - lookup - )?; + Treasury::::propose_spend(RawOrigin::Signed(caller).into(), value, lookup)?; let proposal_id = >::get() - 1; Treasury::::approve_proposal(RawOrigin::Root.into(), proposal_id)?; } @@ -102,8 +96,4 @@ benchmarks_instance_pallet! { } } -impl_benchmark_test_suite!( - Treasury, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Treasury, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/treasury/src/lib.rs b/frame/treasury/src/lib.rs index e648a313fa10f..3d2975baddc8a 100644 --- a/frame/treasury/src/lib.rs +++ b/frame/treasury/src/lib.rs @@ -57,38 +57,41 @@ #![cfg_attr(not(feature = "std"), no_std)] +mod benchmarking; #[cfg(test)] mod tests; -mod benchmarking; pub mod weights; -use codec::{Encode, Decode, MaxEncodedLen}; +use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; -use sp_std::prelude::*; use sp_runtime::{ + traits::{AccountIdConversion, Saturating, StaticLookup, Zero}, Permill, RuntimeDebug, - traits::{ - Zero, StaticLookup, AccountIdConversion, Saturating - } }; +use sp_std::prelude::*; -use frame_support::{print, PalletId}; -use frame_support::traits::{ - Currency, Get, Imbalance, OnUnbalanced, ExistenceRequirement::KeepAlive, - ReservableCurrency, WithdrawReasons +use frame_support::{ + print, + traits::{ + Currency, ExistenceRequirement::KeepAlive, Get, Imbalance, OnUnbalanced, + ReservableCurrency, WithdrawReasons, + }, + weights::Weight, + PalletId, }; -use frame_support::weights::Weight; -pub use weights::WeightInfo; pub use pallet::*; +pub use weights::WeightInfo; pub type BalanceOf = <>::Currency as Currency<::AccountId>>::Balance; -pub type PositiveImbalanceOf = - <>::Currency as Currency<::AccountId>>::PositiveImbalance; -pub type NegativeImbalanceOf = - <>::Currency as Currency<::AccountId>>::NegativeImbalance; +pub type PositiveImbalanceOf = <>::Currency as Currency< + ::AccountId, +>>::PositiveImbalance; +pub type NegativeImbalanceOf = <>::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; /// A trait to allow the Treasury Pallet to spend it's funds for other purposes. /// There is an expectation that the implementer of this trait will correctly manage @@ -131,9 +134,9 @@ pub struct Proposal { #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -205,17 +208,14 @@ pub mod pallet { Twox64Concat, ProposalIndex, Proposal>, - OptionQuery + OptionQuery, >; /// Proposal indices that have been approved but not yet awarded. #[pallet::storage] #[pallet::getter(fn approvals)] - pub type Approvals, I: 'static = ()> = StorageValue< - _, - BoundedVec, - ValueQuery - >; + pub type Approvals, I: 'static = ()> = + StorageValue<_, BoundedVec, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig; @@ -230,10 +230,12 @@ pub mod pallet { #[cfg(feature = "std")] impl GenesisConfig { /// Direct implementation of `GenesisBuild::assimilate_storage`. - #[deprecated(note = "use ` as GenesisBuild>::assimilate_storage` instead")] + #[deprecated( + note = "use ` as GenesisBuild>::assimilate_storage` instead" + )] pub fn assimilate_storage, I: 'static>( &self, - storage: &mut sp_runtime::Storage + storage: &mut sp_runtime::Storage, ) -> Result<(), String> { >::assimilate_storage(self, storage) } @@ -272,8 +274,8 @@ pub mod pallet { } /// Old name generated by `decl_event`. - #[deprecated(note = "use `Event` instead")] - pub type RawEvent = Event; + #[deprecated(note = "use `Event` instead")] + pub type RawEvent = Event; /// Error for the treasury pallet. #[pallet::error] @@ -320,7 +322,7 @@ pub mod pallet { pub fn propose_spend( origin: OriginFor, #[pallet::compact] value: BalanceOf, - beneficiary: ::Source + beneficiary: ::Source, ) -> DispatchResult { let proposer = ensure_signed(origin)?; let beneficiary = T::Lookup::lookup(beneficiary)?; @@ -349,11 +351,12 @@ pub mod pallet { #[pallet::weight((T::WeightInfo::reject_proposal(), DispatchClass::Operational))] pub fn reject_proposal( origin: OriginFor, - #[pallet::compact] proposal_id: ProposalIndex + #[pallet::compact] proposal_id: ProposalIndex, ) -> DispatchResult { T::RejectOrigin::ensure_origin(origin)?; - let proposal = >::take(&proposal_id).ok_or(Error::::InvalidIndex)?; + let proposal = + >::take(&proposal_id).ok_or(Error::::InvalidIndex)?; let value = proposal.bond; let imbalance = T::Currency::slash_reserved(&proposal.proposer, value).0; T::OnSlash::on_unbalanced(imbalance); @@ -375,12 +378,13 @@ pub mod pallet { #[pallet::weight((T::WeightInfo::approve_proposal(T::MaxApprovals::get()), DispatchClass::Operational))] pub fn approve_proposal( origin: OriginFor, - #[pallet::compact] proposal_id: ProposalIndex + #[pallet::compact] proposal_id: ProposalIndex, ) -> DispatchResult { T::ApproveOrigin::ensure_origin(origin)?; ensure!(>::contains_key(proposal_id), Error::::InvalidIndex); - Approvals::::try_append(proposal_id).map_err(|_| Error::::TooManyApprovals)?; + Approvals::::try_append(proposal_id) + .map_err(|_| Error::::TooManyApprovals)?; Ok(()) } } @@ -444,7 +448,12 @@ impl, I: 'static> Pallet { total_weight += T::WeightInfo::on_initialize_proposals(proposals_len); // Call Runtime hooks to external pallet using treasury to compute spend funds. - T::SpendFunds::spend_funds( &mut budget_remaining, &mut imbalance, &mut total_weight, &mut missed_any); + T::SpendFunds::spend_funds( + &mut budget_remaining, + &mut imbalance, + &mut total_weight, + &mut missed_any, + ); if !missed_any { // burn some proportion of the remaining budget if we run a surplus. @@ -461,12 +470,9 @@ impl, I: 'static> Pallet { // proof: budget_remaining is account free balance minus ED; // Thus we can't spend more than account free balance minus ED; // Thus account is kept alive; qed; - if let Err(problem) = T::Currency::settle( - &account_id, - imbalance, - WithdrawReasons::TRANSFER, - KeepAlive - ) { + if let Err(problem) = + T::Currency::settle(&account_id, imbalance, WithdrawReasons::TRANSFER, KeepAlive) + { print("Inconsistent state - couldn't settle imbalance for funds spent by treasury"); // Nothing else to do here. drop(problem); diff --git a/frame/treasury/src/tests.rs b/frame/treasury/src/tests.rs index a59491e1f6e9d..cf341d5ad80f5 100644 --- a/frame/treasury/src/tests.rs +++ b/frame/treasury/src/tests.rs @@ -28,12 +28,12 @@ use sp_runtime::{ }; use frame_support::{ - assert_noop, assert_ok, parameter_types, - traits::OnInitialize, PalletId, pallet_prelude::GenesisBuild, + assert_noop, assert_ok, pallet_prelude::GenesisBuild, parameter_types, traits::OnInitialize, + PalletId, }; -use crate as treasury; use super::*; +use crate as treasury; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -119,7 +119,7 @@ impl Config for Test { type ProposalBondMinimum = ProposalBondMinimum; type SpendPeriod = SpendPeriod; type Burn = Burn; - type BurnDestination = (); // Just gets burned. + type BurnDestination = (); // Just gets burned. type WeightInfo = (); type SpendFunds = (); type MaxApprovals = MaxApprovals; @@ -127,10 +127,12 @@ impl Config for Test { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized at ED. balances: vec![(0, 100), (1, 98), (2, 1)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&crate::GenesisConfig, &mut t).unwrap(); t.into() } @@ -320,9 +322,9 @@ fn treasury_account_doesnt_get_deleted() { #[test] fn inexistent_account_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ - balances: vec![(0, 100), (1, 99), (2, 1)], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![(0, 100), (1, 99), (2, 1)] } + .assimilate_storage(&mut t) + .unwrap(); // Treasury genesis config is not build thus treasury account does not exist let mut t: sp_io::TestExternalities = t.into(); @@ -353,10 +355,12 @@ fn inexistent_account_works() { fn genesis_funding_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); let initial_funding = 100; - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { // Total issuance will be 200 with treasury account initialized with 100. balances: vec![(0, 100), (Treasury::account_id(), initial_funding)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); GenesisBuild::::assimilate_storage(&crate::GenesisConfig, &mut t).unwrap(); let mut t: sp_io::TestExternalities = t.into(); @@ -372,13 +376,16 @@ fn max_approvals_limited() { Balances::make_free_balance_be(&Treasury::account_id(), u64::MAX); Balances::make_free_balance_be(&0, u64::MAX); - for _ in 0 .. MaxApprovals::get() { + for _ in 0..MaxApprovals::get() { assert_ok!(Treasury::propose_spend(Origin::signed(0), 100, 3)); assert_ok!(Treasury::approve_proposal(Origin::root(), 0)); } // One too many will fail assert_ok!(Treasury::propose_spend(Origin::signed(0), 100, 3)); - assert_noop!(Treasury::approve_proposal(Origin::root(), 0), Error::::TooManyApprovals); + assert_noop!( + Treasury::approve_proposal(Origin::root(), 0), + Error::::TooManyApprovals + ); }); } diff --git a/frame/treasury/src/weights.rs b/frame/treasury/src/weights.rs index d293399e7b480..329bdd34d0c20 100644 --- a/frame/treasury/src/weights.rs +++ b/frame/treasury/src/weights.rs @@ -35,19 +35,21 @@ // --output=./frame/treasury/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_treasury. pub trait WeightInfo { fn propose_spend() -> Weight; fn reject_proposal() -> Weight; - fn approve_proposal(p: u32, ) -> Weight; - fn on_initialize_proposals(p: u32, ) -> Weight; + fn approve_proposal(p: u32) -> Weight; + fn on_initialize_proposals(p: u32) -> Weight; } /// Weights for pallet_treasury using the Substrate node and recommended hardware. @@ -63,14 +65,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn approve_proposal(p: u32, ) -> Weight { + fn approve_proposal(p: u32) -> Weight { (14_337_000 as Weight) // Standard Error: 2_000 .saturating_add((116_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn on_initialize_proposals(p: u32, ) -> Weight { + fn on_initialize_proposals(p: u32) -> Weight { (50_379_000 as Weight) // Standard Error: 18_000 .saturating_add((59_595_000 as Weight).saturating_mul(p as Weight)) @@ -93,14 +95,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn approve_proposal(p: u32, ) -> Weight { + fn approve_proposal(p: u32) -> Weight { (14_337_000 as Weight) // Standard Error: 2_000 .saturating_add((116_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn on_initialize_proposals(p: u32, ) -> Weight { + fn on_initialize_proposals(p: u32) -> Weight { (50_379_000 as Weight) // Standard Error: 18_000 .saturating_add((59_595_000 as Weight).saturating_mul(p as Weight)) diff --git a/frame/try-runtime/src/lib.rs b/frame/try-runtime/src/lib.rs index dcd3a47878237..b2dfdfac6429e 100644 --- a/frame/try-runtime/src/lib.rs +++ b/frame/try-runtime/src/lib.rs @@ -19,8 +19,8 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; use frame_support::weights::Weight; +use sp_std::prelude::*; sp_api::decl_runtime_apis! { /// Runtime api for testing the execution of a runtime upgrade. diff --git a/frame/uniques/src/benchmarking.rs b/frame/uniques/src/benchmarking.rs index ca6d656bd5005..20ddbb15d5360 100644 --- a/frame/uniques/src/benchmarking.rs +++ b/frame/uniques/src/benchmarking.rs @@ -19,22 +19,26 @@ #![cfg(feature = "runtime-benchmarks")] -use sp_std::{prelude::*, convert::TryInto}; use super::*; -use sp_runtime::traits::Bounded; -use frame_system::RawOrigin as SystemOrigin; use frame_benchmarking::{ - benchmarks_instance_pallet, account, whitelisted_caller, whitelist_account, impl_benchmark_test_suite + account, benchmarks_instance_pallet, impl_benchmark_test_suite, whitelist_account, + whitelisted_caller, +}; +use frame_support::{ + dispatch::UnfilteredDispatchable, + traits::{EnsureOrigin, Get}, + BoundedVec, }; -use frame_support::{traits::{Get, EnsureOrigin}, dispatch::UnfilteredDispatchable, BoundedVec}; +use frame_system::RawOrigin as SystemOrigin; +use sp_runtime::traits::Bounded; +use sp_std::{convert::TryInto, prelude::*}; use crate::Pallet as Uniques; const SEED: u32 = 0; -fn create_class, I: 'static>() - -> (T::ClassId, T::AccountId, ::Source) -{ +fn create_class, I: 'static>( +) -> (T::ClassId, T::AccountId, ::Source) { let caller: T::AccountId = whitelisted_caller(); let caller_lookup = T::Lookup::unlookup(caller.clone()); let class = Default::default(); @@ -43,13 +47,13 @@ fn create_class, I: 'static>() SystemOrigin::Signed(caller.clone()).into(), class, caller_lookup.clone(), - ).is_ok()); + ) + .is_ok()); (class, caller, caller_lookup) } -fn add_class_metadata, I: 'static>() - -> (T::AccountId, ::Source) -{ +fn add_class_metadata, I: 'static>( +) -> (T::AccountId, ::Source) { let caller = Class::::get(T::ClassId::default()).unwrap().owner; if caller != whitelisted_caller() { whitelist_account!(caller); @@ -60,13 +64,14 @@ fn add_class_metadata, I: 'static>() Default::default(), vec![0; T::StringLimit::get() as usize].try_into().unwrap(), false, - ).is_ok()); + ) + .is_ok()); (caller, caller_lookup) } -fn mint_instance, I: 'static>(index: u16) - -> (T::InstanceId, T::AccountId, ::Source) -{ +fn mint_instance, I: 'static>( + index: u16, +) -> (T::InstanceId, T::AccountId, ::Source) { let caller = Class::::get(T::ClassId::default()).unwrap().admin; if caller != whitelisted_caller() { whitelist_account!(caller); @@ -78,13 +83,14 @@ fn mint_instance, I: 'static>(index: u16) Default::default(), instance, caller_lookup.clone(), - ).is_ok()); + ) + .is_ok()); (instance, caller, caller_lookup) } -fn add_instance_metadata, I: 'static>(instance: T::InstanceId) - -> (T::AccountId, ::Source) -{ +fn add_instance_metadata, I: 'static>( + instance: T::InstanceId, +) -> (T::AccountId, ::Source) { let caller = Class::::get(T::ClassId::default()).unwrap().owner; if caller != whitelisted_caller() { whitelist_account!(caller); @@ -96,13 +102,14 @@ fn add_instance_metadata, I: 'static>(instance: T::InstanceId) instance, vec![0; T::StringLimit::get() as usize].try_into().unwrap(), false, - ).is_ok()); + ) + .is_ok()); (caller, caller_lookup) } -fn add_instance_attribute, I: 'static>(instance: T::InstanceId) - -> (BoundedVec, T::AccountId, ::Source) -{ +fn add_instance_attribute, I: 'static>( + instance: T::InstanceId, +) -> (BoundedVec, T::AccountId, ::Source) { let caller = Class::::get(T::ClassId::default()).unwrap().owner; if caller != whitelisted_caller() { whitelist_account!(caller); @@ -115,7 +122,8 @@ fn add_instance_attribute, I: 'static>(instance: T::InstanceId) Some(instance), key.clone(), vec![0; T::ValueLimit::get() as usize].try_into().unwrap(), - ).is_ok()); + ) + .is_ok()); (key, caller, caller_lookup) } diff --git a/frame/uniques/src/functions.rs b/frame/uniques/src/functions.rs index 28ff5ac6a7033..5d1e75735752b 100644 --- a/frame/uniques/src/functions.rs +++ b/frame/uniques/src/functions.rs @@ -19,7 +19,7 @@ use super::*; use frame_support::{ensure, traits::Get}; -use sp_runtime::{DispatchResult, DispatchError}; +use sp_runtime::{DispatchError, DispatchResult}; impl, I: 'static> Pallet { pub(crate) fn do_transfer( @@ -52,9 +52,7 @@ impl, I: 'static> Pallet { class: T::ClassId, instance: T::InstanceId, owner: T::AccountId, - with_details: impl FnOnce( - &ClassDetailsFor, - ) -> DispatchResult, + with_details: impl FnOnce(&ClassDetailsFor) -> DispatchResult, ) -> DispatchResult { ensure!(!Asset::::contains_key(class, instance), Error::::AlreadyExists); @@ -63,8 +61,8 @@ impl, I: 'static> Pallet { with_details(&class_details)?; - let instances = class_details.instances.checked_add(1) - .ok_or(ArithmeticError::Overflow)?; + let instances = + class_details.instances.checked_add(1).ok_or(ArithmeticError::Overflow)?; class_details.instances = instances; let deposit = match class_details.free_holding { @@ -76,7 +74,7 @@ impl, I: 'static> Pallet { let owner = owner.clone(); Account::::insert((&owner, &class, &instance), ()); - let details = InstanceDetails { owner, approved: None, is_frozen: false, deposit}; + let details = InstanceDetails { owner, approved: None, is_frozen: false, deposit }; Asset::::insert(&class, &instance, details); Ok(()) })?; @@ -88,23 +86,23 @@ impl, I: 'static> Pallet { pub(super) fn do_burn( class: T::ClassId, instance: T::InstanceId, - with_details: impl FnOnce( - &ClassDetailsFor, - &InstanceDetailsFor, - ) -> DispatchResult, + with_details: impl FnOnce(&ClassDetailsFor, &InstanceDetailsFor) -> DispatchResult, ) -> DispatchResult { - let owner = Class::::try_mutate(&class, |maybe_class_details| -> Result { - let class_details = maybe_class_details.as_mut().ok_or(Error::::Unknown)?; - let details = Asset::::get(&class, &instance) - .ok_or(Error::::Unknown)?; - with_details(&class_details, &details)?; - - // Return the deposit. - T::Currency::unreserve(&class_details.owner, details.deposit); - class_details.total_deposit.saturating_reduce(details.deposit); - class_details.instances.saturating_dec(); - Ok(details.owner) - })?; + let owner = Class::::try_mutate( + &class, + |maybe_class_details| -> Result { + let class_details = maybe_class_details.as_mut().ok_or(Error::::Unknown)?; + let details = + Asset::::get(&class, &instance).ok_or(Error::::Unknown)?; + with_details(&class_details, &details)?; + + // Return the deposit. + T::Currency::unreserve(&class_details.owner, details.deposit); + class_details.total_deposit.saturating_reduce(details.deposit); + class_details.instances.saturating_dec(); + Ok(details.owner) + }, + )?; Asset::::remove(&class, &instance); Account::::remove((&owner, &class, &instance)); diff --git a/frame/uniques/src/impl_nonfungibles.rs b/frame/uniques/src/impl_nonfungibles.rs index 7113f314697a1..fb1e28d4c77bf 100644 --- a/frame/uniques/src/impl_nonfungibles.rs +++ b/frame/uniques/src/impl_nonfungibles.rs @@ -18,10 +18,12 @@ //! Implementations for `nonfungibles` traits. use super::*; -use sp_std::convert::TryFrom; -use frame_support::traits::tokens::nonfungibles::{Inspect, InspectEnumerable, Mutate, Transfer}; -use frame_support::BoundedSlice; +use frame_support::{ + traits::tokens::nonfungibles::{Inspect, InspectEnumerable, Mutate, Transfer}, + BoundedSlice, +}; use sp_runtime::DispatchResult; +use sp_std::convert::TryFrom; impl, I: 'static> Inspect<::AccountId> for Pallet { type InstanceId = T::InstanceId; @@ -43,9 +45,11 @@ impl, I: 'static> Inspect<::AccountId> for Palle /// When `key` is empty, we return the instance metadata value. /// /// By default this is `None`; no attributes are defined. - fn attribute(class: &Self::ClassId, instance: &Self::InstanceId, key: &[u8]) - -> Option> - { + fn attribute( + class: &Self::ClassId, + instance: &Self::InstanceId, + key: &[u8], + ) -> Option> { if key.is_empty() { // We make the empty key map to the instance metadata value. InstanceMetadataOf::::get(class, instance).map(|m| m.data.into()) @@ -60,9 +64,7 @@ impl, I: 'static> Inspect<::AccountId> for Palle /// When `key` is empty, we return the instance metadata value. /// /// By default this is `None`; no attributes are defined. - fn class_attribute(class: &Self::ClassId, key: &[u8]) - -> Option> - { + fn class_attribute(class: &Self::ClassId, key: &[u8]) -> Option> { if key.is_empty() { // We make the empty key map to the instance metadata value. ClassMetadataOf::::get(class).map(|m| m.data.into()) @@ -132,7 +134,10 @@ impl, I: 'static> InspectEnumerable for Pallet /// Returns an iterator of the asset instances of `class` owned by `who`. /// /// NOTE: iterating this list invokes a storage read per item. - fn owned_in_class(class: &Self::ClassId, who: &T::AccountId) -> Box> { + fn owned_in_class( + class: &Self::ClassId, + who: &T::AccountId, + ) -> Box> { Box::new(Account::::iter_key_prefix((who, class))) } } diff --git a/frame/uniques/src/lib.rs b/frame/uniques/src/lib.rs index bf3598692a2e6..10b0fc5c66458 100644 --- a/frame/uniques/src/lib.rs +++ b/frame/uniques/src/lib.rs @@ -27,33 +27,36 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -pub mod weights; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[cfg(test)] pub mod mock; #[cfg(test)] mod tests; +pub mod weights; -mod types; mod functions; mod impl_nonfungibles; +mod types; pub use types::*; -use sp_std::prelude::*; -use sp_runtime::{RuntimeDebug, ArithmeticError, traits::{Zero, StaticLookup, Saturating}}; -use codec::{Encode, Decode, HasCompact}; -use frame_support::traits::{Currency, ReservableCurrency, BalanceStatus::Reserved}; +use codec::{Decode, Encode, HasCompact}; +use frame_support::traits::{BalanceStatus::Reserved, Currency, ReservableCurrency}; use frame_system::Config as SystemConfig; +use sp_runtime::{ + traits::{Saturating, StaticLookup, Zero}, + ArithmeticError, RuntimeDebug, +}; +use sp_std::prelude::*; -pub use weights::WeightInfo; pub use pallet::*; +pub use weights::WeightInfo; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -182,7 +185,7 @@ pub mod pallet { NMapKey>, ), (BoundedVec, DepositBalanceOf), - OptionQuery + OptionQuery, >; #[pallet::event] @@ -414,7 +417,10 @@ pub mod pallet { ensure!(class_details.owner == check_owner, Error::::NoPermission); } ensure!(class_details.instances == witness.instances, Error::::BadWitness); - ensure!(class_details.instance_metadatas == witness.instance_metadatas, Error::::BadWitness); + ensure!( + class_details.instance_metadatas == witness.instance_metadatas, + Error::::BadWitness + ); ensure!(class_details.attributes == witness.attributes, Error::::BadWitness); for (instance, details) in Asset::::drain_prefix(&class) { @@ -485,7 +491,10 @@ pub mod pallet { Self::do_burn(class, instance, |class_details, details| { let is_permitted = class_details.admin == origin || details.owner == origin; ensure!(is_permitted, Error::::NoPermission); - ensure!(check_owner.map_or(true, |o| o == details.owner), Error::::WrongOwner); + ensure!( + check_owner.map_or(true, |o| o == details.owner), + Error::::WrongOwner + ); Ok(()) }) } @@ -605,8 +614,8 @@ pub mod pallet { ) -> DispatchResult { let origin = ensure_signed(origin)?; - let mut details = Asset::::get(&class, &instance) - .ok_or(Error::::Unknown)?; + let mut details = + Asset::::get(&class, &instance).ok_or(Error::::Unknown)?; let class_details = Class::::get(&class).ok_or(Error::::Unknown)?; ensure!(class_details.freezer == origin, Error::::NoPermission); @@ -635,8 +644,8 @@ pub mod pallet { ) -> DispatchResult { let origin = ensure_signed(origin)?; - let mut details = Asset::::get(&class, &instance) - .ok_or(Error::::Unknown)?; + let mut details = + Asset::::get(&class, &instance).ok_or(Error::::Unknown)?; let class_details = Class::::get(&class).ok_or(Error::::Unknown)?; ensure!(class_details.admin == origin, Error::::NoPermission); @@ -659,7 +668,7 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::freeze_class())] pub fn freeze_class( origin: OriginFor, - #[pallet::compact] class: T::ClassId + #[pallet::compact] class: T::ClassId, ) -> DispatchResult { let origin = ensure_signed(origin)?; @@ -686,7 +695,7 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::thaw_class())] pub fn thaw_class( origin: OriginFor, - #[pallet::compact] class: T::ClassId + #[pallet::compact] class: T::ClassId, ) -> DispatchResult { let origin = ensure_signed(origin)?; @@ -724,7 +733,7 @@ pub mod pallet { let details = maybe_details.as_mut().ok_or(Error::::Unknown)?; ensure!(&origin == &details.owner, Error::::NoPermission); if details.owner == owner { - return Ok(()); + return Ok(()) } // Move the deposit to the new owner. @@ -804,8 +813,8 @@ pub mod pallet { let delegate = T::Lookup::lookup(delegate)?; let class_details = Class::::get(&class).ok_or(Error::::Unknown)?; - let mut details = Asset::::get(&class, &instance) - .ok_or(Error::::Unknown)?; + let mut details = + Asset::::get(&class, &instance).ok_or(Error::::Unknown)?; if let Some(check) = maybe_check { let permitted = &check == &class_details.admin || &check == &details.owner; @@ -849,8 +858,8 @@ pub mod pallet { .or_else(|origin| ensure_signed(origin).map(Some).map_err(DispatchError::from))?; let class_details = Class::::get(&class).ok_or(Error::::Unknown)?; - let mut details = Asset::::get(&class, &instance) - .ok_or(Error::::Unknown)?; + let mut details = + Asset::::get(&class, &instance).ok_or(Error::::Unknown)?; if let Some(check) = maybe_check { let permitted = &check == &class_details.admin || &check == &details.owner; ensure!(permitted, Error::::NoPermission); @@ -1055,8 +1064,7 @@ pub mod pallet { .map(|_| None) .or_else(|origin| ensure_signed(origin).map(Some))?; - let mut class_details = Class::::get(&class) - .ok_or(Error::::Unknown)?; + let mut class_details = Class::::get(&class).ok_or(Error::::Unknown)?; if let Some(check_owner) = &maybe_check_owner { ensure!(check_owner == &class_details.owner, Error::::NoPermission); @@ -1084,11 +1092,7 @@ pub mod pallet { } class_details.total_deposit.saturating_accrue(deposit); - *metadata = Some(InstanceMetadata { - deposit, - data: data.clone(), - is_frozen, - }); + *metadata = Some(InstanceMetadata { deposit, data: data.clone(), is_frozen }); Class::::insert(&class, &class_details); Self::deposit_event(Event::MetadataSet(class, instance, data, is_frozen)); @@ -1119,8 +1123,7 @@ pub mod pallet { .map(|_| None) .or_else(|origin| ensure_signed(origin).map(Some))?; - let mut class_details = Class::::get(&class) - .ok_or(Error::::Unknown)?; + let mut class_details = Class::::get(&class).ok_or(Error::::Unknown)?; if let Some(check_owner) = &maybe_check_owner { ensure!(check_owner == &class_details.owner, Error::::NoPermission); } @@ -1195,11 +1198,7 @@ pub mod pallet { Class::::insert(&class, details); - *metadata = Some(ClassMetadata { - deposit, - data: data.clone(), - is_frozen, - }); + *metadata = Some(ClassMetadata { deposit, data: data.clone(), is_frozen }); Self::deposit_event(Event::ClassMetadataSet(class, data, is_frozen)); Ok(()) diff --git a/frame/uniques/src/mock.rs b/frame/uniques/src/mock.rs index 254acd6c419cf..4b80aa73030cf 100644 --- a/frame/uniques/src/mock.rs +++ b/frame/uniques/src/mock.rs @@ -20,9 +20,12 @@ use super::*; use crate as pallet_uniques; +use frame_support::{construct_runtime, parameter_types}; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; -use frame_support::{parameter_types, construct_runtime}; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; diff --git a/frame/uniques/src/tests.rs b/frame/uniques/src/tests.rs index 4673ff71f8ed9..a182fef2eb72c 100644 --- a/frame/uniques/src/tests.rs +++ b/frame/uniques/src/tests.rs @@ -19,9 +19,9 @@ use super::*; use crate::mock::*; -use sp_std::convert::TryInto; -use frame_support::{assert_ok, assert_noop, traits::Currency}; +use frame_support::{assert_noop, assert_ok, traits::Currency}; use pallet_balances::Error as BalancesError; +use sp_std::convert::TryInto; fn assets() -> Vec<(u64, u32, u32)> { let mut r: Vec<_> = Account::::iter().map(|x| x.0).collect(); @@ -31,13 +31,15 @@ fn assets() -> Vec<(u64, u32, u32)> { assert_eq!(r, s); for class in Asset::::iter() .map(|x| x.0) - .scan(None, |s, item| if s.map_or(false, |last| last == item) { + .scan(None, |s, item| { + if s.map_or(false, |last| last == item) { *s = Some(item); Some(None) } else { Some(Some(item)) } - ).filter_map(|item| item) + }) + .filter_map(|item| item) { let details = Class::::get(class).unwrap(); let instances = Asset::::iter_prefix(class).count() as u32; @@ -181,7 +183,10 @@ fn origin_guards_should_work() { new_test_ext().execute_with(|| { assert_ok!(Uniques::force_create(Origin::root(), 0, 1, true)); assert_ok!(Uniques::mint(Origin::signed(1), 0, 42, 1)); - assert_noop!(Uniques::transfer_ownership(Origin::signed(2), 0, 2), Error::::NoPermission); + assert_noop!( + Uniques::transfer_ownership(Origin::signed(2), 0, 2), + Error::::NoPermission + ); assert_noop!(Uniques::set_team(Origin::signed(2), 0, 2, 2, 2), Error::::NoPermission); assert_noop!(Uniques::freeze(Origin::signed(2), 0, 42), Error::::NoPermission); assert_noop!(Uniques::thaw(Origin::signed(2), 0, 42), Error::::NoPermission); @@ -205,7 +210,10 @@ fn transfer_owner_should_work() { assert_eq!(Balances::reserved_balance(&1), 0); assert_eq!(Balances::reserved_balance(&2), 2); - assert_noop!(Uniques::transfer_ownership(Origin::signed(1), 0, 1), Error::::NoPermission); + assert_noop!( + Uniques::transfer_ownership(Origin::signed(1), 0, 1), + Error::::NoPermission + ); // Mint and set metadata now and make sure that deposit gets transferred back. assert_ok!(Uniques::set_class_metadata(Origin::signed(2), 0, bvec![0u8; 20], false)); @@ -279,7 +287,10 @@ fn set_class_metadata_should_work() { // Clear Metadata assert_ok!(Uniques::set_class_metadata(Origin::root(), 0, bvec![0u8; 15], false)); - assert_noop!(Uniques::clear_class_metadata(Origin::signed(2), 0), Error::::NoPermission); + assert_noop!( + Uniques::clear_class_metadata(Origin::signed(2), 0), + Error::::NoPermission + ); assert_noop!(Uniques::clear_class_metadata(Origin::signed(1), 1), Error::::Unknown); assert_ok!(Uniques::clear_class_metadata(Origin::signed(1), 0)); assert!(!ClassMetadataOf::::contains_key(0)); @@ -330,7 +341,10 @@ fn set_instance_metadata_should_work() { // Clear Metadata assert_ok!(Uniques::set_metadata(Origin::root(), 0, 42, bvec![0u8; 15], false)); - assert_noop!(Uniques::clear_metadata(Origin::signed(2), 0, 42), Error::::NoPermission); + assert_noop!( + Uniques::clear_metadata(Origin::signed(2), 0, 42), + Error::::NoPermission + ); assert_noop!(Uniques::clear_metadata(Origin::signed(1), 1, 42), Error::::Unknown); assert_ok!(Uniques::clear_metadata(Origin::signed(1), 0, 42)); assert!(!InstanceMetadataOf::::contains_key(0, 42)); @@ -347,26 +361,32 @@ fn set_attribute_should_work() { assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, None, bvec![0], bvec![0])); assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, Some(0), bvec![0], bvec![0])); assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, Some(0), bvec![1], bvec![0])); - assert_eq!(attributes(0), vec![ - (None, bvec![0], bvec![0]), - (Some(0), bvec![0], bvec![0]), - (Some(0), bvec![1], bvec![0]), - ]); + assert_eq!( + attributes(0), + vec![ + (None, bvec![0], bvec![0]), + (Some(0), bvec![0], bvec![0]), + (Some(0), bvec![1], bvec![0]), + ] + ); assert_eq!(Balances::reserved_balance(1), 9); assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, None, bvec![0], bvec![0; 10])); - assert_eq!(attributes(0), vec![ - (None, bvec![0], bvec![0; 10]), - (Some(0), bvec![0], bvec![0]), - (Some(0), bvec![1], bvec![0]), - ]); + assert_eq!( + attributes(0), + vec![ + (None, bvec![0], bvec![0; 10]), + (Some(0), bvec![0], bvec![0]), + (Some(0), bvec![1], bvec![0]), + ] + ); assert_eq!(Balances::reserved_balance(1), 18); assert_ok!(Uniques::clear_attribute(Origin::signed(1), 0, Some(0), bvec![1])); - assert_eq!(attributes(0), vec![ - (None, bvec![0], bvec![0; 10]), - (Some(0), bvec![0], bvec![0]), - ]); + assert_eq!( + attributes(0), + vec![(None, bvec![0], bvec![0; 10]), (Some(0), bvec![0], bvec![0]),] + ); assert_eq!(Balances::reserved_balance(1), 15); let w = Class::::get(0).unwrap().destroy_witness(); @@ -386,11 +406,14 @@ fn set_attribute_should_respect_freeze() { assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, None, bvec![0], bvec![0])); assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, Some(0), bvec![0], bvec![0])); assert_ok!(Uniques::set_attribute(Origin::signed(1), 0, Some(1), bvec![0], bvec![0])); - assert_eq!(attributes(0), vec![ - (None, bvec![0], bvec![0]), - (Some(0), bvec![0], bvec![0]), - (Some(1), bvec![0], bvec![0]), - ]); + assert_eq!( + attributes(0), + vec![ + (None, bvec![0], bvec![0]), + (Some(0), bvec![0], bvec![0]), + (Some(1), bvec![0], bvec![0]), + ] + ); assert_eq!(Balances::reserved_balance(1), 9); assert_ok!(Uniques::set_class_metadata(Origin::signed(1), 0, bvec![], true)); @@ -406,7 +429,7 @@ fn set_attribute_should_respect_freeze() { } #[test] -fn force_asset_status_should_work(){ +fn force_asset_status_should_work() { new_test_ext().execute_with(|| { Balances::make_free_balance_be(&1, 100); @@ -484,13 +507,28 @@ fn cancel_approval_works() { assert_ok!(Uniques::mint(Origin::signed(1), 0, 42, 2)); assert_ok!(Uniques::approve_transfer(Origin::signed(2), 0, 42, 3)); - assert_noop!(Uniques::cancel_approval(Origin::signed(2), 1, 42, None), Error::::Unknown); - assert_noop!(Uniques::cancel_approval(Origin::signed(2), 0, 43, None), Error::::Unknown); - assert_noop!(Uniques::cancel_approval(Origin::signed(3), 0, 42, None), Error::::NoPermission); - assert_noop!(Uniques::cancel_approval(Origin::signed(2), 0, 42, Some(4)), Error::::WrongDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::signed(2), 1, 42, None), + Error::::Unknown + ); + assert_noop!( + Uniques::cancel_approval(Origin::signed(2), 0, 43, None), + Error::::Unknown + ); + assert_noop!( + Uniques::cancel_approval(Origin::signed(3), 0, 42, None), + Error::::NoPermission + ); + assert_noop!( + Uniques::cancel_approval(Origin::signed(2), 0, 42, Some(4)), + Error::::WrongDelegate + ); assert_ok!(Uniques::cancel_approval(Origin::signed(2), 0, 42, Some(3))); - assert_noop!(Uniques::cancel_approval(Origin::signed(2), 0, 42, None), Error::::NoDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::signed(2), 0, 42, None), + Error::::NoDelegate + ); }); } @@ -501,12 +539,24 @@ fn cancel_approval_works_with_admin() { assert_ok!(Uniques::mint(Origin::signed(1), 0, 42, 2)); assert_ok!(Uniques::approve_transfer(Origin::signed(2), 0, 42, 3)); - assert_noop!(Uniques::cancel_approval(Origin::signed(1), 1, 42, None), Error::::Unknown); - assert_noop!(Uniques::cancel_approval(Origin::signed(1), 0, 43, None), Error::::Unknown); - assert_noop!(Uniques::cancel_approval(Origin::signed(1), 0, 42, Some(4)), Error::::WrongDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::signed(1), 1, 42, None), + Error::::Unknown + ); + assert_noop!( + Uniques::cancel_approval(Origin::signed(1), 0, 43, None), + Error::::Unknown + ); + assert_noop!( + Uniques::cancel_approval(Origin::signed(1), 0, 42, Some(4)), + Error::::WrongDelegate + ); assert_ok!(Uniques::cancel_approval(Origin::signed(1), 0, 42, Some(3))); - assert_noop!(Uniques::cancel_approval(Origin::signed(1), 0, 42, None), Error::::NoDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::signed(1), 0, 42, None), + Error::::NoDelegate + ); }); } @@ -519,9 +569,15 @@ fn cancel_approval_works_with_force() { assert_ok!(Uniques::approve_transfer(Origin::signed(2), 0, 42, 3)); assert_noop!(Uniques::cancel_approval(Origin::root(), 1, 42, None), Error::::Unknown); assert_noop!(Uniques::cancel_approval(Origin::root(), 0, 43, None), Error::::Unknown); - assert_noop!(Uniques::cancel_approval(Origin::root(), 0, 42, Some(4)), Error::::WrongDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::root(), 0, 42, Some(4)), + Error::::WrongDelegate + ); assert_ok!(Uniques::cancel_approval(Origin::root(), 0, 42, Some(3))); - assert_noop!(Uniques::cancel_approval(Origin::root(), 0, 42, None), Error::::NoDelegate); + assert_noop!( + Uniques::cancel_approval(Origin::root(), 0, 42, None), + Error::::NoDelegate + ); }); } diff --git a/frame/uniques/src/types.rs b/frame/uniques/src/types.rs index 55f206f117d0e..1e4405aa09c84 100644 --- a/frame/uniques/src/types.rs +++ b/frame/uniques/src/types.rs @@ -18,8 +18,8 @@ //! Various basic types for use in the assets pallet. use super::*; -use scale_info::TypeInfo; use frame_support::{traits::Get, BoundedVec}; +use scale_info::TypeInfo; pub(super) type DepositBalanceOf = <>::Currency as Currency<::AccountId>>::Balance; @@ -28,12 +28,8 @@ pub(super) type ClassDetailsFor = pub(super) type InstanceDetailsFor = InstanceDetails<::AccountId, DepositBalanceOf>; - #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, TypeInfo)] -pub struct ClassDetails< - AccountId, - DepositBalance, -> { +pub struct ClassDetails { /// Can change `owner`, `issuer`, `freezer` and `admin` accounts. pub(super) owner: AccountId, /// Can mint tokens. diff --git a/frame/uniques/src/weights.rs b/frame/uniques/src/weights.rs index a2263d6cd3486..029944826b6d9 100644 --- a/frame/uniques/src/weights.rs +++ b/frame/uniques/src/weights.rs @@ -35,22 +35,24 @@ // --output=./frame/uniques/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_uniques. pub trait WeightInfo { fn create() -> Weight; fn force_create() -> Weight; - fn destroy(n: u32, m: u32, a: u32, ) -> Weight; + fn destroy(n: u32, m: u32, a: u32) -> Weight; fn mint() -> Weight; fn burn() -> Weight; fn transfer() -> Weight; - fn redeposit(i: u32, ) -> Weight; + fn redeposit(i: u32) -> Weight; fn freeze() -> Weight; fn thaw() -> Weight; fn freeze_class() -> Weight; @@ -81,7 +83,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn destroy(n: u32, m: u32, a: u32, ) -> Weight { + fn destroy(n: u32, m: u32, a: u32) -> Weight { (0 as Weight) // Standard Error: 13_000 .saturating_add((16_619_000 as Weight).saturating_mul(n as Weight)) @@ -111,7 +113,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn redeposit(i: u32, ) -> Weight { + fn redeposit(i: u32) -> Weight { (0 as Weight) // Standard Error: 13_000 .saturating_add((26_322_000 as Weight).saturating_mul(i as Weight)) @@ -209,7 +211,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn destroy(n: u32, m: u32, a: u32, ) -> Weight { + fn destroy(n: u32, m: u32, a: u32) -> Weight { (0 as Weight) // Standard Error: 13_000 .saturating_add((16_619_000 as Weight).saturating_mul(n as Weight)) @@ -239,7 +241,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn redeposit(i: u32, ) -> Weight { + fn redeposit(i: u32) -> Weight { (0 as Weight) // Standard Error: 13_000 .saturating_add((26_322_000 as Weight).saturating_mul(i as Weight)) diff --git a/frame/utility/src/benchmarking.rs b/frame/utility/src/benchmarking.rs index 44019e48c1eb4..ae4eb68661ea7 100644 --- a/frame/utility/src/benchmarking.rs +++ b/frame/utility/src/benchmarking.rs @@ -20,8 +20,8 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; const SEED: u32 = 0; @@ -65,8 +65,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Pallet, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Pallet, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/utility/src/lib.rs b/frame/utility/src/lib.rs index 96e83a20a6420..2d06649502560 100644 --- a/frame/utility/src/lib.rs +++ b/frame/utility/src/lib.rs @@ -52,36 +52,35 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -mod tests; mod benchmarking; +mod tests; pub mod weights; -use sp_std::prelude::*; -use codec::{Encode, Decode}; -use sp_core::TypeId; -use sp_io::hashing::blake2_256; +use codec::{Decode, Encode}; use frame_support::{ - transactional, - traits::{OriginTrait, UnfilteredDispatchable, IsSubType}, - weights::{GetDispatchInfo, extract_actual_weight}, dispatch::PostDispatchInfo, + traits::{IsSubType, OriginTrait, UnfilteredDispatchable}, + transactional, + weights::{extract_actual_weight, GetDispatchInfo}, }; +use sp_core::TypeId; +use sp_io::hashing::blake2_256; use sp_runtime::traits::Dispatchable; +use sp_std::prelude::*; pub use weights::WeightInfo; pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet(_); - /// Configuration trait. #[pallet::config] pub trait Config: frame_system::Config { @@ -89,9 +88,11 @@ pub mod pallet { type Event: From + IsType<::Event>; /// The overarching call type. - type Call: Parameter + Dispatchable - + GetDispatchInfo + From> - + UnfilteredDispatchable + type Call: Parameter + + Dispatchable + + GetDispatchInfo + + From> + + UnfilteredDispatchable + IsSubType> + IsType<::Call>; @@ -170,7 +171,7 @@ pub mod pallet { // Take the weight of this function itself into account. let base_weight = T::WeightInfo::batch(index.saturating_add(1) as u32); // Return the actual used weight + base_weight of this call. - return Ok(Some(base_weight + weight).into()); + return Ok(Some(base_weight + weight).into()) } } Self::deposit_event(Event::BatchCompleted); @@ -213,13 +214,16 @@ pub mod pallet { let info = call.get_dispatch_info(); let result = call.dispatch(origin); // Always take into account the base weight of this call. - let mut weight = T::WeightInfo::as_derivative().saturating_add(T::DbWeight::get().reads_writes(1, 1)); + let mut weight = T::WeightInfo::as_derivative() + .saturating_add(T::DbWeight::get().reads_writes(1, 1)); // Add the real weight of the dispatch. weight = weight.saturating_add(extract_actual_weight(&result, &info)); - result.map_err(|mut err| { - err.post_info = Some(weight).into(); - err - }).map(|_| Some(weight).into()) + result + .map_err(|mut err| { + err.post_info = Some(weight).into(); + err + }) + .map(|_| Some(weight).into()) } /// Send a batch of dispatch calls and atomically execute them. @@ -291,7 +295,6 @@ pub mod pallet { Ok(Some(base_weight + weight).into()) } } - } /// A pallet identifier. These are per pallet and should be stored in a registry somewhere. diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index aa6bea8a27d36..61890972d3a03 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -21,23 +21,26 @@ use super::*; +use crate as utility; use frame_support::{ - assert_ok, assert_noop, parameter_types, assert_err_ignore_postinfo, decl_module, - weights::{Weight, Pays}, + assert_err_ignore_postinfo, assert_noop, assert_ok, decl_module, dispatch::{DispatchError, DispatchErrorWithPostInfo, Dispatchable}, + parameter_types, storage, traits::Filter, - storage, + weights::{Pays, Weight}, }; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; -use crate as utility; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; // example module to test behaviors. pub mod example { use super::*; - use frame_system::ensure_signed; use frame_support::dispatch::{DispatchResultWithPostInfo, WithPostDispatchInfo}; - pub trait Config: frame_system::Config { } + use frame_system::ensure_signed; + pub trait Config: frame_system::Config {} decl_module! { pub struct Module for enum Call where origin: ::Origin { @@ -160,14 +163,15 @@ type ExampleCall = example::Call; type UtilityCall = crate::Call; use frame_system::Call as SystemCall; -use pallet_balances::Call as BalancesCall; -use pallet_balances::Error as BalancesError; +use pallet_balances::{Call as BalancesCall, Error as BalancesError}; pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 10), (3, 10), (4, 10), (5, 2)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext @@ -178,11 +182,14 @@ fn as_derivative_works() { new_test_ext().execute_with(|| { let sub_1_0 = Utility::derivative_account_id(1, 0); assert_ok!(Balances::transfer(Origin::signed(1), sub_1_0, 5)); - assert_err_ignore_postinfo!(Utility::as_derivative( - Origin::signed(1), - 1, - Box::new(Call::Balances(BalancesCall::transfer(6, 3))), - ), BalancesError::::InsufficientBalance); + assert_err_ignore_postinfo!( + Utility::as_derivative( + Origin::signed(1), + 1, + Box::new(Call::Balances(BalancesCall::transfer(6, 3))), + ), + BalancesError::::InsufficientBalance + ); assert_ok!(Utility::as_derivative( Origin::signed(1), 0, @@ -256,11 +263,14 @@ fn as_derivative_handles_weight_refund() { #[test] fn as_derivative_filters() { new_test_ext().execute_with(|| { - assert_err_ignore_postinfo!(Utility::as_derivative( - Origin::signed(1), - 1, - Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))), - ), DispatchError::BadOrigin); + assert_err_ignore_postinfo!( + Utility::as_derivative( + Origin::signed(1), + 1, + Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))), + ), + DispatchError::BadOrigin + ); }); } @@ -272,11 +282,14 @@ fn batch_with_root_works() { assert!(!TestBaseCallFilter::filter(&call)); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); - assert_ok!(Utility::batch(Origin::root(), vec![ - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), - call, // Check filters are correctly bypassed - ])); + assert_ok!(Utility::batch( + Origin::root(), + vec![ + Call::Balances(BalancesCall::force_transfer(1, 2, 5)), + Call::Balances(BalancesCall::force_transfer(1, 2, 5)), + call, // Check filters are correctly bypassed + ] + )); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); assert_eq!(storage::unhashed::get_raw(&k), Some(k)); @@ -288,12 +301,13 @@ fn batch_with_signed_works() { new_test_ext().execute_with(|| { assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); - assert_ok!( - Utility::batch(Origin::signed(1), vec![ + assert_ok!(Utility::batch( + Origin::signed(1), + vec![ Call::Balances(BalancesCall::transfer(2, 5)), Call::Balances(BalancesCall::transfer(2, 5)) - ]), - ); + ] + ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); }); @@ -302,12 +316,13 @@ fn batch_with_signed_works() { #[test] fn batch_with_signed_filters() { new_test_ext().execute_with(|| { - assert_ok!( - Utility::batch(Origin::signed(1), vec![ - Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1)) - ]), + assert_ok!(Utility::batch( + Origin::signed(1), + vec![Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))] + ),); + System::assert_last_event( + utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into(), ); - System::assert_last_event(utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into()); }); } @@ -316,13 +331,14 @@ fn batch_early_exit_works() { new_test_ext().execute_with(|| { assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); - assert_ok!( - Utility::batch(Origin::signed(1), vec![ + assert_ok!(Utility::batch( + Origin::signed(1), + vec![ Call::Balances(BalancesCall::transfer(2, 5)), Call::Balances(BalancesCall::transfer(2, 10)), Call::Balances(BalancesCall::transfer(2, 5)), - ]), - ); + ] + ),); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::free_balance(2), 15); }); @@ -381,7 +397,9 @@ fn batch_handles_weight_refund() { let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); - System::assert_last_event(utility::Event::BatchInterrupted(1, DispatchError::Other("")).into()); + System::assert_last_event( + utility::Event::BatchInterrupted(1, DispatchError::Other("")).into(), + ); // No weight is refunded assert_eq!(extract_actual_weight(&result, &info), info.weight); @@ -394,7 +412,9 @@ fn batch_handles_weight_refund() { let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); - System::assert_last_event(utility::Event::BatchInterrupted(1, DispatchError::Other("")).into()); + System::assert_last_event( + utility::Event::BatchInterrupted(1, DispatchError::Other("")).into(), + ); assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Partial batch completion @@ -405,7 +425,9 @@ fn batch_handles_weight_refund() { let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); - System::assert_last_event(utility::Event::BatchInterrupted(1, DispatchError::Other("")).into()); + System::assert_last_event( + utility::Event::BatchInterrupted(1, DispatchError::Other("")).into(), + ); assert_eq!( extract_actual_weight(&result, &info), // Real weight is 2 calls at end_weight @@ -419,12 +441,13 @@ fn batch_all_works() { new_test_ext().execute_with(|| { assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); - assert_ok!( - Utility::batch_all(Origin::signed(1), vec![ + assert_ok!(Utility::batch_all( + Origin::signed(1), + vec![ Call::Balances(BalancesCall::transfer(2, 5)), Call::Balances(BalancesCall::transfer(2, 5)) - ]), - ); + ] + ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); }); @@ -439,14 +462,19 @@ fn batch_all_revert() { assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); assert_noop!( - Utility::batch_all(Origin::signed(1), vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 10)), - Call::Balances(BalancesCall::transfer(2, 5)), - ]), + Utility::batch_all( + Origin::signed(1), + vec![ + Call::Balances(BalancesCall::transfer(2, 5)), + Call::Balances(BalancesCall::transfer(2, 10)), + Call::Balances(BalancesCall::transfer(2, 5)), + ] + ), DispatchErrorWithPostInfo { post_info: PostDispatchInfo { - actual_weight: Some(::WeightInfo::batch_all(2) + info.weight * 2), + actual_weight: Some( + ::WeightInfo::batch_all(2) + info.weight * 2 + ), pays_fee: Pays::Yes }, error: pallet_balances::Error::::InsufficientBalance.into() @@ -525,15 +553,11 @@ fn batch_all_handles_weight_refund() { #[test] fn batch_all_does_not_nest() { new_test_ext().execute_with(|| { - let batch_all = Call::Utility( - UtilityCall::batch_all( - vec![ - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - ] - ) - ); + let batch_all = Call::Utility(UtilityCall::batch_all(vec![ + Call::Balances(BalancesCall::transfer(2, 1)), + Call::Balances(BalancesCall::transfer(2, 1)), + Call::Balances(BalancesCall::transfer(2, 1)), + ])); let info = batch_all.get_dispatch_info(); @@ -557,7 +581,9 @@ fn batch_all_does_not_nest() { // Batch will end with `Ok`, but does not actually execute as we can see from the event // and balances. assert_ok!(Utility::batch_all(Origin::signed(1), vec![batch_nested])); - System::assert_has_event(utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into()); + System::assert_has_event( + utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into(), + ); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); }); diff --git a/frame/utility/src/weights.rs b/frame/utility/src/weights.rs index 0bab97201008c..0d5fd61cd7c85 100644 --- a/frame/utility/src/weights.rs +++ b/frame/utility/src/weights.rs @@ -35,24 +35,26 @@ // --output=./frame/utility/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_utility. pub trait WeightInfo { - fn batch(c: u32, ) -> Weight; + fn batch(c: u32) -> Weight; fn as_derivative() -> Weight; - fn batch_all(c: u32, ) -> Weight; + fn batch_all(c: u32) -> Weight; } /// Weights for pallet_utility using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn batch(c: u32, ) -> Weight { + fn batch(c: u32) -> Weight { (14_618_000 as Weight) // Standard Error: 0 .saturating_add((610_000 as Weight).saturating_mul(c as Weight)) @@ -60,7 +62,7 @@ impl WeightInfo for SubstrateWeight { fn as_derivative() -> Weight { (3_175_000 as Weight) } - fn batch_all(c: u32, ) -> Weight { + fn batch_all(c: u32) -> Weight { (14_561_000 as Weight) // Standard Error: 0 .saturating_add((1_013_000 as Weight).saturating_mul(c as Weight)) @@ -69,7 +71,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn batch(c: u32, ) -> Weight { + fn batch(c: u32) -> Weight { (14_618_000 as Weight) // Standard Error: 0 .saturating_add((610_000 as Weight).saturating_mul(c as Weight)) @@ -77,7 +79,7 @@ impl WeightInfo for () { fn as_derivative() -> Weight { (3_175_000 as Weight) } - fn batch_all(c: u32, ) -> Weight { + fn batch_all(c: u32) -> Weight { (14_561_000 as Weight) // Standard Error: 0 .saturating_add((1_013_000 as Weight).saturating_mul(c as Weight)) diff --git a/frame/vesting/src/benchmarking.rs b/frame/vesting/src/benchmarking.rs index 6fd27e1877229..fba4369dba9d3 100644 --- a/frame/vesting/src/benchmarking.rs +++ b/frame/vesting/src/benchmarking.rs @@ -21,15 +21,16 @@ use super::*; -use frame_system::{RawOrigin, Pallet as System}; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_system::{Pallet as System, RawOrigin}; use sp_runtime::traits::Bounded; use crate::Pallet as Vesting; const SEED: u32 = 0; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; fn add_locks(who: &T::AccountId, n: u8) { for id in 0..n { diff --git a/frame/vesting/src/lib.rs b/frame/vesting/src/lib.rs index 25d4d79da5bab..c8e88abf2977d 100644 --- a/frame/vesting/src/lib.rs +++ b/frame/vesting/src/lib.rs @@ -88,14 +88,14 @@ pub struct VestingInfo { pub starting_block: BlockNumber, } -impl< - Balance: AtLeast32BitUnsigned + Copy, - BlockNumber: AtLeast32BitUnsigned + Copy, -> VestingInfo { +impl + VestingInfo +{ /// Amount locked at block `n`. - pub fn locked_at< - BlockNumberToBalance: Convert - >(&self, n: BlockNumber) -> Balance { + pub fn locked_at>( + &self, + n: BlockNumber, + ) -> Balance { // Number of blocks that count toward vesting // Saturating to 0 when n < starting_block let vested_block_count = n.saturating_sub(self.starting_block); @@ -136,12 +136,8 @@ pub mod pallet { /// Information regarding the vesting of a given account. #[pallet::storage] #[pallet::getter(fn vesting)] - pub type Vesting = StorageMap< - _, - Blake2_128Concat, - T::AccountId, - VestingInfo, T::BlockNumber>, - >; + pub type Vesting = + StorageMap<_, Blake2_128Concat, T::AccountId, VestingInfo, T::BlockNumber>>; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -155,9 +151,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - GenesisConfig { - vesting: Default::default(), - } + GenesisConfig { vesting: Default::default() } } } @@ -179,11 +173,7 @@ pub mod pallet { let length_as_balance = T::BlockNumberToBalance::convert(length); let per_block = locked / length_as_balance.max(sp_runtime::traits::One::one()); - Vesting::::insert(who, VestingInfo { - locked: locked, - per_block: per_block, - starting_block: begin - }); + Vesting::::insert(who, VestingInfo { locked, per_block, starting_block: begin }); let reasons = WithdrawReasons::TRANSFER | WithdrawReasons::RESERVE; T::Currency::set_lock(VESTING_ID, who, locked, reasons); } @@ -253,7 +243,10 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::vest_other_locked(MaxLocksOf::::get()) .max(T::WeightInfo::vest_other_unlocked(MaxLocksOf::::get())) )] - pub fn vest_other(origin: OriginFor, target: ::Source) -> DispatchResult { + pub fn vest_other( + origin: OriginFor, + target: ::Source, + ) -> DispatchResult { ensure_signed(origin)?; Self::update_lock(T::Lookup::lookup(target)?) } @@ -286,10 +279,20 @@ pub mod pallet { let who = T::Lookup::lookup(target)?; ensure!(!Vesting::::contains_key(&who), Error::::ExistingVestingSchedule); - T::Currency::transfer(&transactor, &who, schedule.locked, ExistenceRequirement::AllowDeath)?; - - Self::add_vesting_schedule(&who, schedule.locked, schedule.per_block, schedule.starting_block) - .expect("user does not have an existing vesting schedule; q.e.d."); + T::Currency::transfer( + &transactor, + &who, + schedule.locked, + ExistenceRequirement::AllowDeath, + )?; + + Self::add_vesting_schedule( + &who, + schedule.locked, + schedule.per_block, + schedule.starting_block, + ) + .expect("user does not have an existing vesting schedule; q.e.d."); Ok(()) } @@ -325,10 +328,20 @@ pub mod pallet { let source = T::Lookup::lookup(source)?; ensure!(!Vesting::::contains_key(&target), Error::::ExistingVestingSchedule); - T::Currency::transfer(&source, &target, schedule.locked, ExistenceRequirement::AllowDeath)?; - - Self::add_vesting_schedule(&target, schedule.locked, schedule.per_block, schedule.starting_block) - .expect("user does not have an existing vesting schedule; q.e.d."); + T::Currency::transfer( + &source, + &target, + schedule.locked, + ExistenceRequirement::AllowDeath, + )?; + + Self::add_vesting_schedule( + &target, + schedule.locked, + schedule.per_block, + schedule.starting_block, + ) + .expect("user does not have an existing vesting schedule; q.e.d."); Ok(()) } @@ -356,8 +369,9 @@ impl Pallet { } } -impl VestingSchedule for Pallet where - BalanceOf: MaybeSerializeDeserialize + Debug +impl VestingSchedule for Pallet +where + BalanceOf: MaybeSerializeDeserialize + Debug, { type Moment = T::BlockNumber; type Currency = T::Currency; @@ -387,17 +401,15 @@ impl VestingSchedule for Pallet where who: &T::AccountId, locked: BalanceOf, per_block: BalanceOf, - starting_block: T::BlockNumber + starting_block: T::BlockNumber, ) -> DispatchResult { - if locked.is_zero() { return Ok(()) } + if locked.is_zero() { + return Ok(()) + } if Vesting::::contains_key(who) { Err(Error::::ExistingVestingSchedule)? } - let vesting_schedule = VestingInfo { - locked, - per_block, - starting_block - }; + let vesting_schedule = VestingInfo { locked, per_block, starting_block }; Vesting::::insert(who, vesting_schedule); // it can't fail, but even if somehow it did, we don't really care. let res = Self::update_lock(who.clone()); diff --git a/frame/vesting/src/tests.rs b/frame/vesting/src/tests.rs index 7c59a61081d3b..2ee0e83933cb6 100644 --- a/frame/vesting/src/tests.rs +++ b/frame/vesting/src/tests.rs @@ -24,336 +24,312 @@ use crate::mock::{Balances, ExtBuilder, System, Test, Vesting}; #[test] fn check_vesting_status() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user1_free_balance = Balances::free_balance(&1); - let user2_free_balance = Balances::free_balance(&2); - let user12_free_balance = Balances::free_balance(&12); - assert_eq!(user1_free_balance, 256 * 10); // Account 1 has free balance - assert_eq!(user2_free_balance, 256 * 20); // Account 2 has free balance - assert_eq!(user12_free_balance, 256 * 10); // Account 12 has free balance - let user1_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 128, // Vesting over 10 blocks - starting_block: 0, - }; - let user2_vesting_schedule = VestingInfo { - locked: 256 * 20, - per_block: 256, // Vesting over 20 blocks - starting_block: 10, - }; - let user12_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_eq!(Vesting::vesting(&1), Some(user1_vesting_schedule)); // Account 1 has a vesting schedule - assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); // Account 2 has a vesting schedule - assert_eq!(Vesting::vesting(&12), Some(user12_vesting_schedule)); // Account 12 has a vesting schedule - - // Account 1 has only 128 units vested from their illiquid 256 * 5 units at block 1 - assert_eq!(Vesting::vesting_balance(&1), Some(128 * 9)); - // Account 2 has their full balance locked - assert_eq!(Vesting::vesting_balance(&2), Some(user2_free_balance)); - // Account 12 has only their illiquid funds locked - assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); - - System::set_block_number(10); - assert_eq!(System::block_number(), 10); - - // Account 1 has fully vested by block 10 - assert_eq!(Vesting::vesting_balance(&1), Some(0)); - // Account 2 has started vesting by block 10 - assert_eq!(Vesting::vesting_balance(&2), Some(user2_free_balance)); - // Account 12 has started vesting by block 10 - assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); - - System::set_block_number(30); - assert_eq!(System::block_number(), 30); - - assert_eq!(Vesting::vesting_balance(&1), Some(0)); // Account 1 is still fully vested, and not negative - assert_eq!(Vesting::vesting_balance(&2), Some(0)); // Account 2 has fully vested by block 30 - assert_eq!(Vesting::vesting_balance(&12), Some(0)); // Account 2 has fully vested by block 30 - - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user1_free_balance = Balances::free_balance(&1); + let user2_free_balance = Balances::free_balance(&2); + let user12_free_balance = Balances::free_balance(&12); + assert_eq!(user1_free_balance, 256 * 10); // Account 1 has free balance + assert_eq!(user2_free_balance, 256 * 20); // Account 2 has free balance + assert_eq!(user12_free_balance, 256 * 10); // Account 12 has free balance + let user1_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 128, // Vesting over 10 blocks + starting_block: 0, + }; + let user2_vesting_schedule = VestingInfo { + locked: 256 * 20, + per_block: 256, // Vesting over 20 blocks + starting_block: 10, + }; + let user12_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_eq!(Vesting::vesting(&1), Some(user1_vesting_schedule)); // Account 1 has a vesting schedule + assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); // Account 2 has a vesting schedule + assert_eq!(Vesting::vesting(&12), Some(user12_vesting_schedule)); // Account 12 has a vesting schedule + + // Account 1 has only 128 units vested from their illiquid 256 * 5 units at block 1 + assert_eq!(Vesting::vesting_balance(&1), Some(128 * 9)); + // Account 2 has their full balance locked + assert_eq!(Vesting::vesting_balance(&2), Some(user2_free_balance)); + // Account 12 has only their illiquid funds locked + assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); + + System::set_block_number(10); + assert_eq!(System::block_number(), 10); + + // Account 1 has fully vested by block 10 + assert_eq!(Vesting::vesting_balance(&1), Some(0)); + // Account 2 has started vesting by block 10 + assert_eq!(Vesting::vesting_balance(&2), Some(user2_free_balance)); + // Account 12 has started vesting by block 10 + assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); + + System::set_block_number(30); + assert_eq!(System::block_number(), 30); + + assert_eq!(Vesting::vesting_balance(&1), Some(0)); // Account 1 is still fully vested, and not negative + assert_eq!(Vesting::vesting_balance(&2), Some(0)); // Account 2 has fully vested by block 30 + assert_eq!(Vesting::vesting_balance(&12), Some(0)); // Account 2 has fully vested by block 30 + }); } #[test] fn unvested_balance_should_not_transfer() { - ExtBuilder::default() - .existential_deposit(10) - .build() - .execute_with(|| { - let user1_free_balance = Balances::free_balance(&1); - assert_eq!(user1_free_balance, 100); // Account 1 has free balance - // Account 1 has only 5 units vested at block 1 (plus 50 unvested) - assert_eq!(Vesting::vesting_balance(&1), Some(45)); - assert_noop!( - Balances::transfer(Some(1).into(), 2, 56), - pallet_balances::Error::::LiquidityRestrictions, - ); // Account 1 cannot send more than vested amount - }); + ExtBuilder::default().existential_deposit(10).build().execute_with(|| { + let user1_free_balance = Balances::free_balance(&1); + assert_eq!(user1_free_balance, 100); // Account 1 has free balance + // Account 1 has only 5 units vested at block 1 (plus 50 unvested) + assert_eq!(Vesting::vesting_balance(&1), Some(45)); + assert_noop!( + Balances::transfer(Some(1).into(), 2, 56), + pallet_balances::Error::::LiquidityRestrictions, + ); // Account 1 cannot send more than vested amount + }); } #[test] fn vested_balance_should_transfer() { - ExtBuilder::default() - .existential_deposit(10) - .build() - .execute_with(|| { - let user1_free_balance = Balances::free_balance(&1); - assert_eq!(user1_free_balance, 100); // Account 1 has free balance - // Account 1 has only 5 units vested at block 1 (plus 50 unvested) - assert_eq!(Vesting::vesting_balance(&1), Some(45)); - assert_ok!(Vesting::vest(Some(1).into())); - assert_ok!(Balances::transfer(Some(1).into(), 2, 55)); - }); + ExtBuilder::default().existential_deposit(10).build().execute_with(|| { + let user1_free_balance = Balances::free_balance(&1); + assert_eq!(user1_free_balance, 100); // Account 1 has free balance + // Account 1 has only 5 units vested at block 1 (plus 50 unvested) + assert_eq!(Vesting::vesting_balance(&1), Some(45)); + assert_ok!(Vesting::vest(Some(1).into())); + assert_ok!(Balances::transfer(Some(1).into(), 2, 55)); + }); } #[test] fn vested_balance_should_transfer_using_vest_other() { - ExtBuilder::default() - .existential_deposit(10) - .build() - .execute_with(|| { - let user1_free_balance = Balances::free_balance(&1); - assert_eq!(user1_free_balance, 100); // Account 1 has free balance - // Account 1 has only 5 units vested at block 1 (plus 50 unvested) - assert_eq!(Vesting::vesting_balance(&1), Some(45)); - assert_ok!(Vesting::vest_other(Some(2).into(), 1)); - assert_ok!(Balances::transfer(Some(1).into(), 2, 55)); - }); + ExtBuilder::default().existential_deposit(10).build().execute_with(|| { + let user1_free_balance = Balances::free_balance(&1); + assert_eq!(user1_free_balance, 100); // Account 1 has free balance + // Account 1 has only 5 units vested at block 1 (plus 50 unvested) + assert_eq!(Vesting::vesting_balance(&1), Some(45)); + assert_ok!(Vesting::vest_other(Some(2).into(), 1)); + assert_ok!(Balances::transfer(Some(1).into(), 2, 55)); + }); } #[test] fn extra_balance_should_transfer() { - ExtBuilder::default() - .existential_deposit(10) - .build() - .execute_with(|| { - assert_ok!(Balances::transfer(Some(3).into(), 1, 100)); - assert_ok!(Balances::transfer(Some(3).into(), 2, 100)); - - let user1_free_balance = Balances::free_balance(&1); - assert_eq!(user1_free_balance, 200); // Account 1 has 100 more free balance than normal - - let user2_free_balance = Balances::free_balance(&2); - assert_eq!(user2_free_balance, 300); // Account 2 has 100 more free balance than normal - - // Account 1 has only 5 units vested at block 1 (plus 150 unvested) - assert_eq!(Vesting::vesting_balance(&1), Some(45)); - assert_ok!(Vesting::vest(Some(1).into())); - assert_ok!(Balances::transfer(Some(1).into(), 3, 155)); // Account 1 can send extra units gained - - // Account 2 has no units vested at block 1, but gained 100 - assert_eq!(Vesting::vesting_balance(&2), Some(200)); - assert_ok!(Vesting::vest(Some(2).into())); - assert_ok!(Balances::transfer(Some(2).into(), 3, 100)); // Account 2 can send extra units gained - }); + ExtBuilder::default().existential_deposit(10).build().execute_with(|| { + assert_ok!(Balances::transfer(Some(3).into(), 1, 100)); + assert_ok!(Balances::transfer(Some(3).into(), 2, 100)); + + let user1_free_balance = Balances::free_balance(&1); + assert_eq!(user1_free_balance, 200); // Account 1 has 100 more free balance than normal + + let user2_free_balance = Balances::free_balance(&2); + assert_eq!(user2_free_balance, 300); // Account 2 has 100 more free balance than normal + + // Account 1 has only 5 units vested at block 1 (plus 150 unvested) + assert_eq!(Vesting::vesting_balance(&1), Some(45)); + assert_ok!(Vesting::vest(Some(1).into())); + assert_ok!(Balances::transfer(Some(1).into(), 3, 155)); // Account 1 can send extra units gained + + // Account 2 has no units vested at block 1, but gained 100 + assert_eq!(Vesting::vesting_balance(&2), Some(200)); + assert_ok!(Vesting::vest(Some(2).into())); + assert_ok!(Balances::transfer(Some(2).into(), 3, 100)); // Account 2 can send extra units gained + }); } #[test] fn liquid_funds_should_transfer_with_delayed_vesting() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user12_free_balance = Balances::free_balance(&12); - - assert_eq!(user12_free_balance, 2560); // Account 12 has free balance - // Account 12 has liquid funds - assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); - - // Account 12 has delayed vesting - let user12_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_eq!(Vesting::vesting(&12), Some(user12_vesting_schedule)); - - // Account 12 can still send liquid funds - assert_ok!(Balances::transfer(Some(12).into(), 3, 256 * 5)); - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user12_free_balance = Balances::free_balance(&12); + + assert_eq!(user12_free_balance, 2560); // Account 12 has free balance + // Account 12 has liquid funds + assert_eq!(Vesting::vesting_balance(&12), Some(user12_free_balance - 256 * 5)); + + // Account 12 has delayed vesting + let user12_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_eq!(Vesting::vesting(&12), Some(user12_vesting_schedule)); + + // Account 12 can still send liquid funds + assert_ok!(Balances::transfer(Some(12).into(), 3, 256 * 5)); + }); } #[test] fn vested_transfer_works() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user3_free_balance = Balances::free_balance(&3); - let user4_free_balance = Balances::free_balance(&4); - assert_eq!(user3_free_balance, 256 * 30); - assert_eq!(user4_free_balance, 256 * 40); - // Account 4 should not have any vesting yet. - assert_eq!(Vesting::vesting(&4), None); - // Make the schedule for the new transfer. - let new_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_ok!(Vesting::vested_transfer(Some(3).into(), 4, new_vesting_schedule)); - // Now account 4 should have vesting. - assert_eq!(Vesting::vesting(&4), Some(new_vesting_schedule)); - // Ensure the transfer happened correctly. - let user3_free_balance_updated = Balances::free_balance(&3); - assert_eq!(user3_free_balance_updated, 256 * 25); - let user4_free_balance_updated = Balances::free_balance(&4); - assert_eq!(user4_free_balance_updated, 256 * 45); - // Account 4 has 5 * 256 locked. - assert_eq!(Vesting::vesting_balance(&4), Some(256 * 5)); - - System::set_block_number(20); - assert_eq!(System::block_number(), 20); - - // Account 4 has 5 * 64 units vested by block 20. - assert_eq!(Vesting::vesting_balance(&4), Some(10 * 64)); - - System::set_block_number(30); - assert_eq!(System::block_number(), 30); - - // Account 4 has fully vested. - assert_eq!(Vesting::vesting_balance(&4), Some(0)); - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user3_free_balance = Balances::free_balance(&3); + let user4_free_balance = Balances::free_balance(&4); + assert_eq!(user3_free_balance, 256 * 30); + assert_eq!(user4_free_balance, 256 * 40); + // Account 4 should not have any vesting yet. + assert_eq!(Vesting::vesting(&4), None); + // Make the schedule for the new transfer. + let new_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_ok!(Vesting::vested_transfer(Some(3).into(), 4, new_vesting_schedule)); + // Now account 4 should have vesting. + assert_eq!(Vesting::vesting(&4), Some(new_vesting_schedule)); + // Ensure the transfer happened correctly. + let user3_free_balance_updated = Balances::free_balance(&3); + assert_eq!(user3_free_balance_updated, 256 * 25); + let user4_free_balance_updated = Balances::free_balance(&4); + assert_eq!(user4_free_balance_updated, 256 * 45); + // Account 4 has 5 * 256 locked. + assert_eq!(Vesting::vesting_balance(&4), Some(256 * 5)); + + System::set_block_number(20); + assert_eq!(System::block_number(), 20); + + // Account 4 has 5 * 64 units vested by block 20. + assert_eq!(Vesting::vesting_balance(&4), Some(10 * 64)); + + System::set_block_number(30); + assert_eq!(System::block_number(), 30); + + // Account 4 has fully vested. + assert_eq!(Vesting::vesting_balance(&4), Some(0)); + }); } #[test] fn vested_transfer_correctly_fails() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user2_free_balance = Balances::free_balance(&2); - let user4_free_balance = Balances::free_balance(&4); - assert_eq!(user2_free_balance, 256 * 20); - assert_eq!(user4_free_balance, 256 * 40); - // Account 2 should already have a vesting schedule. - let user2_vesting_schedule = VestingInfo { - locked: 256 * 20, - per_block: 256, // Vesting over 20 blocks - starting_block: 10, - }; - assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); - - // The vesting schedule we will try to create, fails due to pre-existence of schedule. - let new_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_noop!( - Vesting::vested_transfer(Some(4).into(), 2, new_vesting_schedule), - Error::::ExistingVestingSchedule, - ); - - // Fails due to too low transfer amount. - let new_vesting_schedule_too_low = VestingInfo { - locked: 256 * 1, - per_block: 64, - starting_block: 10, - }; - assert_noop!( - Vesting::vested_transfer(Some(3).into(), 4, new_vesting_schedule_too_low), - Error::::AmountLow, - ); - - // Verify no currency transfer happened. - assert_eq!(user2_free_balance, 256 * 20); - assert_eq!(user4_free_balance, 256 * 40); - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user2_free_balance = Balances::free_balance(&2); + let user4_free_balance = Balances::free_balance(&4); + assert_eq!(user2_free_balance, 256 * 20); + assert_eq!(user4_free_balance, 256 * 40); + // Account 2 should already have a vesting schedule. + let user2_vesting_schedule = VestingInfo { + locked: 256 * 20, + per_block: 256, // Vesting over 20 blocks + starting_block: 10, + }; + assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); + + // The vesting schedule we will try to create, fails due to pre-existence of schedule. + let new_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_noop!( + Vesting::vested_transfer(Some(4).into(), 2, new_vesting_schedule), + Error::::ExistingVestingSchedule, + ); + + // Fails due to too low transfer amount. + let new_vesting_schedule_too_low = + VestingInfo { locked: 256 * 1, per_block: 64, starting_block: 10 }; + assert_noop!( + Vesting::vested_transfer(Some(3).into(), 4, new_vesting_schedule_too_low), + Error::::AmountLow, + ); + + // Verify no currency transfer happened. + assert_eq!(user2_free_balance, 256 * 20); + assert_eq!(user4_free_balance, 256 * 40); + }); } #[test] fn force_vested_transfer_works() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user3_free_balance = Balances::free_balance(&3); - let user4_free_balance = Balances::free_balance(&4); - assert_eq!(user3_free_balance, 256 * 30); - assert_eq!(user4_free_balance, 256 * 40); - // Account 4 should not have any vesting yet. - assert_eq!(Vesting::vesting(&4), None); - // Make the schedule for the new transfer. - let new_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_noop!(Vesting::force_vested_transfer(Some(4).into(), 3, 4, new_vesting_schedule), BadOrigin); - assert_ok!(Vesting::force_vested_transfer(RawOrigin::Root.into(), 3, 4, new_vesting_schedule)); - // Now account 4 should have vesting. - assert_eq!(Vesting::vesting(&4), Some(new_vesting_schedule)); - // Ensure the transfer happened correctly. - let user3_free_balance_updated = Balances::free_balance(&3); - assert_eq!(user3_free_balance_updated, 256 * 25); - let user4_free_balance_updated = Balances::free_balance(&4); - assert_eq!(user4_free_balance_updated, 256 * 45); - // Account 4 has 5 * 256 locked. - assert_eq!(Vesting::vesting_balance(&4), Some(256 * 5)); - - System::set_block_number(20); - assert_eq!(System::block_number(), 20); - - // Account 4 has 5 * 64 units vested by block 20. - assert_eq!(Vesting::vesting_balance(&4), Some(10 * 64)); - - System::set_block_number(30); - assert_eq!(System::block_number(), 30); - - // Account 4 has fully vested. - assert_eq!(Vesting::vesting_balance(&4), Some(0)); - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user3_free_balance = Balances::free_balance(&3); + let user4_free_balance = Balances::free_balance(&4); + assert_eq!(user3_free_balance, 256 * 30); + assert_eq!(user4_free_balance, 256 * 40); + // Account 4 should not have any vesting yet. + assert_eq!(Vesting::vesting(&4), None); + // Make the schedule for the new transfer. + let new_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_noop!( + Vesting::force_vested_transfer(Some(4).into(), 3, 4, new_vesting_schedule), + BadOrigin + ); + assert_ok!(Vesting::force_vested_transfer( + RawOrigin::Root.into(), + 3, + 4, + new_vesting_schedule + )); + // Now account 4 should have vesting. + assert_eq!(Vesting::vesting(&4), Some(new_vesting_schedule)); + // Ensure the transfer happened correctly. + let user3_free_balance_updated = Balances::free_balance(&3); + assert_eq!(user3_free_balance_updated, 256 * 25); + let user4_free_balance_updated = Balances::free_balance(&4); + assert_eq!(user4_free_balance_updated, 256 * 45); + // Account 4 has 5 * 256 locked. + assert_eq!(Vesting::vesting_balance(&4), Some(256 * 5)); + + System::set_block_number(20); + assert_eq!(System::block_number(), 20); + + // Account 4 has 5 * 64 units vested by block 20. + assert_eq!(Vesting::vesting_balance(&4), Some(10 * 64)); + + System::set_block_number(30); + assert_eq!(System::block_number(), 30); + + // Account 4 has fully vested. + assert_eq!(Vesting::vesting_balance(&4), Some(0)); + }); } #[test] fn force_vested_transfer_correctly_fails() { - ExtBuilder::default() - .existential_deposit(256) - .build() - .execute_with(|| { - let user2_free_balance = Balances::free_balance(&2); - let user4_free_balance = Balances::free_balance(&4); - assert_eq!(user2_free_balance, 256 * 20); - assert_eq!(user4_free_balance, 256 * 40); - // Account 2 should already have a vesting schedule. - let user2_vesting_schedule = VestingInfo { - locked: 256 * 20, - per_block: 256, // Vesting over 20 blocks - starting_block: 10, - }; - assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); - - // The vesting schedule we will try to create, fails due to pre-existence of schedule. - let new_vesting_schedule = VestingInfo { - locked: 256 * 5, - per_block: 64, // Vesting over 20 blocks - starting_block: 10, - }; - assert_noop!( - Vesting::force_vested_transfer(RawOrigin::Root.into(), 4, 2, new_vesting_schedule), - Error::::ExistingVestingSchedule, - ); - - // Fails due to too low transfer amount. - let new_vesting_schedule_too_low = VestingInfo { - locked: 256 * 1, - per_block: 64, - starting_block: 10, - }; - assert_noop!( - Vesting::force_vested_transfer(RawOrigin::Root.into(), 3, 4, new_vesting_schedule_too_low), - Error::::AmountLow, - ); - - // Verify no currency transfer happened. - assert_eq!(user2_free_balance, 256 * 20); - assert_eq!(user4_free_balance, 256 * 40); - }); + ExtBuilder::default().existential_deposit(256).build().execute_with(|| { + let user2_free_balance = Balances::free_balance(&2); + let user4_free_balance = Balances::free_balance(&4); + assert_eq!(user2_free_balance, 256 * 20); + assert_eq!(user4_free_balance, 256 * 40); + // Account 2 should already have a vesting schedule. + let user2_vesting_schedule = VestingInfo { + locked: 256 * 20, + per_block: 256, // Vesting over 20 blocks + starting_block: 10, + }; + assert_eq!(Vesting::vesting(&2), Some(user2_vesting_schedule)); + + // The vesting schedule we will try to create, fails due to pre-existence of schedule. + let new_vesting_schedule = VestingInfo { + locked: 256 * 5, + per_block: 64, // Vesting over 20 blocks + starting_block: 10, + }; + assert_noop!( + Vesting::force_vested_transfer(RawOrigin::Root.into(), 4, 2, new_vesting_schedule), + Error::::ExistingVestingSchedule, + ); + + // Fails due to too low transfer amount. + let new_vesting_schedule_too_low = + VestingInfo { locked: 256 * 1, per_block: 64, starting_block: 10 }; + assert_noop!( + Vesting::force_vested_transfer( + RawOrigin::Root.into(), + 3, + 4, + new_vesting_schedule_too_low + ), + Error::::AmountLow, + ); + + // Verify no currency transfer happened. + assert_eq!(user2_free_balance, 256 * 20); + assert_eq!(user4_free_balance, 256 * 40); + }); } diff --git a/frame/vesting/src/weights.rs b/frame/vesting/src/weights.rs index 053453d757f38..55057e0009b35 100644 --- a/frame/vesting/src/weights.rs +++ b/frame/vesting/src/weights.rs @@ -35,62 +35,64 @@ // --output=./frame/vesting/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_vesting. pub trait WeightInfo { - fn vest_locked(l: u32, ) -> Weight; - fn vest_unlocked(l: u32, ) -> Weight; - fn vest_other_locked(l: u32, ) -> Weight; - fn vest_other_unlocked(l: u32, ) -> Weight; - fn vested_transfer(l: u32, ) -> Weight; - fn force_vested_transfer(l: u32, ) -> Weight; + fn vest_locked(l: u32) -> Weight; + fn vest_unlocked(l: u32) -> Weight; + fn vest_other_locked(l: u32) -> Weight; + fn vest_other_unlocked(l: u32) -> Weight; + fn vested_transfer(l: u32) -> Weight; + fn force_vested_transfer(l: u32) -> Weight; } /// Weights for pallet_vesting using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn vest_locked(l: u32, ) -> Weight { + fn vest_locked(l: u32) -> Weight { (42_905_000 as Weight) // Standard Error: 13_000 .saturating_add((232_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn vest_unlocked(l: u32, ) -> Weight { + fn vest_unlocked(l: u32) -> Weight { (45_650_000 as Weight) // Standard Error: 12_000 .saturating_add((215_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn vest_other_locked(l: u32, ) -> Weight { + fn vest_other_locked(l: u32) -> Weight { (42_273_000 as Weight) // Standard Error: 15_000 .saturating_add((246_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn vest_other_unlocked(l: u32, ) -> Weight { + fn vest_other_unlocked(l: u32) -> Weight { (45_324_000 as Weight) // Standard Error: 12_000 .saturating_add((214_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn vested_transfer(l: u32, ) -> Weight { + fn vested_transfer(l: u32) -> Weight { (96_661_000 as Weight) // Standard Error: 10_000 .saturating_add((211_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn force_vested_transfer(l: u32, ) -> Weight { + fn force_vested_transfer(l: u32) -> Weight { (98_812_000 as Weight) // Standard Error: 13_000 .saturating_add((139_000 as Weight).saturating_mul(l as Weight)) @@ -101,42 +103,42 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn vest_locked(l: u32, ) -> Weight { + fn vest_locked(l: u32) -> Weight { (42_905_000 as Weight) // Standard Error: 13_000 .saturating_add((232_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn vest_unlocked(l: u32, ) -> Weight { + fn vest_unlocked(l: u32) -> Weight { (45_650_000 as Weight) // Standard Error: 12_000 .saturating_add((215_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn vest_other_locked(l: u32, ) -> Weight { + fn vest_other_locked(l: u32) -> Weight { (42_273_000 as Weight) // Standard Error: 15_000 .saturating_add((246_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn vest_other_unlocked(l: u32, ) -> Weight { + fn vest_other_unlocked(l: u32) -> Weight { (45_324_000 as Weight) // Standard Error: 12_000 .saturating_add((214_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn vested_transfer(l: u32, ) -> Weight { + fn vested_transfer(l: u32) -> Weight { (96_661_000 as Weight) // Standard Error: 10_000 .saturating_add((211_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn force_vested_transfer(l: u32, ) -> Weight { + fn force_vested_transfer(l: u32) -> Weight { (98_812_000 as Weight) // Standard Error: 13_000 .saturating_add((139_000 as Weight).saturating_mul(l as Weight)) diff --git a/primitives/api/proc-macro/src/decl_runtime_apis.rs b/primitives/api/proc-macro/src/decl_runtime_apis.rs index 4a8b49049e760..bae7a40f86390 100644 --- a/primitives/api/proc-macro/src/decl_runtime_apis.rs +++ b/primitives/api/proc-macro/src/decl_runtime_apis.rs @@ -16,21 +16,25 @@ // limitations under the License. use crate::utils::{ - generate_crate_access, generate_hidden_includes, generate_runtime_mod_name_for_trait, - fold_fn_decl_for_client_side, extract_parameter_names_types_and_borrows, - generate_native_call_generator_fn_name, return_type_extract_type, - generate_method_runtime_api_impl_name, generate_call_api_at_fn_name, prefix_function_with_trait, - replace_wild_card_parameter_names, AllowSelfRefInParameters, + extract_parameter_names_types_and_borrows, fold_fn_decl_for_client_side, + generate_call_api_at_fn_name, generate_crate_access, generate_hidden_includes, + generate_method_runtime_api_impl_name, generate_native_call_generator_fn_name, + generate_runtime_mod_name_for_trait, prefix_function_with_trait, + replace_wild_card_parameter_names, return_type_extract_type, AllowSelfRefInParameters, }; -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ - spanned::Spanned, parse_macro_input, parse::{Parse, ParseStream, Result, Error}, ReturnType, - fold::{self, Fold}, parse_quote, ItemTrait, Generics, GenericParam, Attribute, FnArg, Type, - visit::{Visit, self}, TraitBound, Meta, NestedMeta, Lit, TraitItem, Ident, TraitItemMethod, + fold::{self, Fold}, + parse::{Error, Parse, ParseStream, Result}, + parse_macro_input, parse_quote, + spanned::Spanned, + visit::{self, Visit}, + Attribute, FnArg, GenericParam, Generics, Ident, ItemTrait, Lit, Meta, NestedMeta, ReturnType, + TraitBound, TraitItem, TraitItemMethod, Type, }; use std::collections::HashMap; @@ -59,9 +63,8 @@ const CHANGED_IN_ATTRIBUTE: &str = "changed_in"; /// Is used when a trait method was renamed. const RENAMED_ATTRIBUTE: &str = "renamed"; /// All attributes that we support in the declaration of a runtime api trait. -const SUPPORTED_ATTRIBUTE_NAMES: &[&str] = &[ - CORE_TRAIT_ATTRIBUTE, API_VERSION_ATTRIBUTE, CHANGED_IN_ATTRIBUTE, RENAMED_ATTRIBUTE, -]; +const SUPPORTED_ATTRIBUTE_NAMES: &[&str] = + &[CORE_TRAIT_ATTRIBUTE, API_VERSION_ATTRIBUTE, CHANGED_IN_ATTRIBUTE, RENAMED_ATTRIBUTE]; /// The structure used for parsing the runtime api declarations. struct RuntimeApiDecls { @@ -94,14 +97,12 @@ fn extend_generics_with_block(generics: &mut Generics) { /// attribute body as `TokenStream`. fn remove_supported_attributes(attrs: &mut Vec) -> HashMap<&'static str, Attribute> { let mut result = HashMap::new(); - attrs.retain(|v| { - match SUPPORTED_ATTRIBUTE_NAMES.iter().find(|a| v.path.is_ident(a)) { - Some(attribute) => { - result.insert(*attribute, v.clone()); - false - }, - None => true, - } + attrs.retain(|v| match SUPPORTED_ATTRIBUTE_NAMES.iter().find(|a| v.path.is_ident(a)) { + Some(attribute) => { + result.insert(*attribute, v.clone()); + false + }, + None => true, }); result @@ -226,16 +227,17 @@ fn generate_native_call_generators(decl: &ItemTrait) -> Result { ) ) } else { - quote!( Ok(res) ) + quote!(Ok(res)) }; let input_names = params.iter().map(|v| &v.0); // If the type is using the block generic type, we will encode/decode it to make it // compatible. To ensure that we forward it by ref/value, we use the value given by the // the user. Otherwise if it is not using the block, we don't need to add anything. - let input_borrows = params - .iter() - .map(|v| if type_is_using_block(&v.1) { v.2.clone() } else { None }); + let input_borrows = + params + .iter() + .map(|v| if type_is_using_block(&v.1) { v.2.clone() } else { None }); // Replace all `Block` with `NodeBlock`, add `'a` lifetime to references and collect // all the function inputs. @@ -304,28 +306,23 @@ fn parse_renamed_attribute(renamed: &Attribute) -> Result<(String, u32)> { ); match meta { - Meta::List(list) => { + Meta::List(list) => if list.nested.len() > 2 && list.nested.is_empty() { err } else { let mut itr = list.nested.iter(); let old_name = match itr.next() { - Some(NestedMeta::Lit(Lit::Str(i))) => { - i.value() - }, + Some(NestedMeta::Lit(Lit::Str(i))) => i.value(), _ => return err, }; let version = match itr.next() { - Some(NestedMeta::Lit(Lit::Int(i))) => { - i.base10_parse()? - }, + Some(NestedMeta::Lit(Lit::Int(i))) => i.base10_parse()?, _ => return err, }; Ok((old_name, version)) - } - }, + }, _ => err, } } @@ -353,23 +350,19 @@ fn generate_call_api_at_calls(decl: &ItemTrait) -> Result { fn_.span(), format!( "`{}` and `{}` are not supported at once.", - RENAMED_ATTRIBUTE, - CHANGED_IN_ATTRIBUTE - ) - )); + RENAMED_ATTRIBUTE, CHANGED_IN_ATTRIBUTE + ), + )) } // We do not need to generate this function for a method that signature was changed. if attrs.contains_key(CHANGED_IN_ATTRIBUTE) { - continue; + continue } // Parse the renamed attributes. let mut renames = Vec::new(); - if let Some((_, a)) = attrs - .iter() - .find(|a| a.0 == &RENAMED_ATTRIBUTE) - { + if let Some((_, a)) = attrs.iter().find(|a| a.0 == &RENAMED_ATTRIBUTE) { let (old_name, version) = parse_renamed_attribute(a)?; renames.push((version, prefix_function_with_trait(&trait_name, &old_name))); } @@ -381,7 +374,7 @@ fn generate_call_api_at_calls(decl: &ItemTrait) -> Result { versions.push(version); old_names.push(old_name); (versions, old_names) - } + }, ); // Generate the generator function @@ -456,27 +449,32 @@ fn generate_runtime_decls(decls: &[ItemTrait]) -> Result { extend_generics_with_block(&mut decl.generics); let mod_name = generate_runtime_mod_name_for_trait(&decl.ident); let found_attributes = remove_supported_attributes(&mut decl.attrs); - let api_version = get_api_version(&found_attributes).map(|v| { - generate_runtime_api_version(v as u32) - })?; + let api_version = + get_api_version(&found_attributes).map(|v| generate_runtime_api_version(v as u32))?; let id = generate_runtime_api_id(&decl.ident.to_string()); let call_api_at_calls = generate_call_api_at_calls(&decl)?; // Remove methods that have the `changed_in` attribute as they are not required for the // runtime anymore. - decl.items = decl.items.iter_mut().filter_map(|i| match i { - TraitItem::Method(ref mut method) => { - if remove_supported_attributes(&mut method.attrs).contains_key(CHANGED_IN_ATTRIBUTE) { - None - } else { - // Make sure we replace all the wild card parameter names. - replace_wild_card_parameter_names(&mut method.sig); - Some(TraitItem::Method(method.clone())) - } - } - r => Some(r.clone()), - }).collect(); + decl.items = decl + .items + .iter_mut() + .filter_map(|i| match i { + TraitItem::Method(ref mut method) => { + if remove_supported_attributes(&mut method.attrs) + .contains_key(CHANGED_IN_ATTRIBUTE) + { + None + } else { + // Make sure we replace all the wild card parameter names. + replace_wild_card_parameter_names(&mut method.sig); + Some(TraitItem::Method(method.clone())) + } + }, + r => Some(r.clone()), + }) + .collect(); let native_call_generators = generate_native_call_generators(&decl)?; @@ -533,8 +531,10 @@ impl<'a> ToClientSideDecl<'a> { result } - fn fold_trait_item_method(&mut self, method: TraitItemMethod) - -> (TraitItemMethod, Option, TraitItemMethod) { + fn fold_trait_item_method( + &mut self, + method: TraitItemMethod, + ) -> (TraitItemMethod, Option, TraitItemMethod) { let crate_ = self.crate_; let context = quote!( #crate_::ExecutionContext::OffchainCall(None) ); let fn_impl = self.create_method_runtime_api_impl(method.clone()); @@ -547,8 +547,9 @@ impl<'a> ToClientSideDecl<'a> { fn create_method_decl_with_context(&mut self, method: TraitItemMethod) -> TraitItemMethod { let crate_ = self.crate_; let context_arg: syn::FnArg = parse_quote!( context: #crate_::ExecutionContext ); - let mut fn_decl_ctx = self.create_method_decl(method, quote!( context )); - fn_decl_ctx.sig.ident = Ident::new(&format!("{}_with_context", &fn_decl_ctx.sig.ident), Span::call_site()); + let mut fn_decl_ctx = self.create_method_decl(method, quote!(context)); + fn_decl_ctx.sig.ident = + Ident::new(&format!("{}_with_context", &fn_decl_ctx.sig.ident), Span::call_site()); fn_decl_ctx.sig.inputs.insert(2, context_arg); fn_decl_ctx @@ -556,9 +557,12 @@ impl<'a> ToClientSideDecl<'a> { /// Takes the given method and creates a `method_runtime_api_impl` method that will be /// implemented in the runtime for the client side. - fn create_method_runtime_api_impl(&mut self, mut method: TraitItemMethod) -> Option { + fn create_method_runtime_api_impl( + &mut self, + mut method: TraitItemMethod, + ) -> Option { if remove_supported_attributes(&mut method.attrs).contains_key(CHANGED_IN_ATTRIBUTE) { - return None; + return None } let fn_sig = &method.sig; @@ -566,36 +570,35 @@ impl<'a> ToClientSideDecl<'a> { // Get types and if the value is borrowed from all parameters. // If there is an error, we push it as the block to the user. - let param_types = match extract_parameter_names_types_and_borrows( - fn_sig, - AllowSelfRefInParameters::No, - ) { - Ok(res) => res.into_iter().map(|v| { - let ty = v.1; - let borrow = v.2; - quote!( #borrow #ty ) - }).collect::>(), - Err(e) => { - self.errors.push(e.to_compile_error()); - Vec::new() - } - }; + let param_types = + match extract_parameter_names_types_and_borrows(fn_sig, AllowSelfRefInParameters::No) { + Ok(res) => res + .into_iter() + .map(|v| { + let ty = v.1; + let borrow = v.2; + quote!( #borrow #ty ) + }) + .collect::>(), + Err(e) => { + self.errors.push(e.to_compile_error()); + Vec::new() + }, + }; let name = generate_method_runtime_api_impl_name(&self.trait_, &method.sig.ident); let block_id = self.block_id; let crate_ = self.crate_; - Some( - parse_quote!{ - #[doc(hidden)] - fn #name( - &self, - at: &#block_id, - context: #crate_::ExecutionContext, - params: Option<( #( #param_types ),* )>, - params_encoded: Vec, - ) -> std::result::Result<#crate_::NativeOrEncoded<#ret_type>, #crate_::ApiError>; - } - ) + Some(parse_quote! { + #[doc(hidden)] + fn #name( + &self, + at: &#block_id, + context: #crate_::ExecutionContext, + params: Option<( #( #param_types ),* )>, + params_encoded: Vec, + ) -> std::result::Result<#crate_::NativeOrEncoded<#ret_type>, #crate_::ApiError>; + }) } /// Takes the method declared by the user and creates the declaration we require for the runtime @@ -614,7 +617,7 @@ impl<'a> ToClientSideDecl<'a> { Err(e) => { self.errors.push(e.to_compile_error()); Vec::new() - } + }, }; let params2 = params.clone(); let ret_type = return_type_extract_type(&method.sig.output); @@ -635,7 +638,8 @@ impl<'a> ToClientSideDecl<'a> { Error::new( method.span(), "`changed_in` version can not be greater than the `api_version`", - ).to_compile_error() + ) + .to_compile_error(), ); } @@ -646,49 +650,48 @@ impl<'a> ToClientSideDecl<'a> { method.sig.ident = ident; method.attrs.push(parse_quote!( #[deprecated] )); - let panic = format!("Calling `{}` should not return a native value!", method.sig.ident); - (quote!( panic!(#panic) ), quote!( None )) + let panic = + format!("Calling `{}` should not return a native value!", method.sig.ident); + (quote!(panic!(#panic)), quote!(None)) }, - Ok(None) => (quote!( Ok(n) ), quote!( Some(( #( #params2 ),* )) )), + Ok(None) => (quote!(Ok(n)), quote!( Some(( #( #params2 ),* )) )), Err(e) => { self.errors.push(e.to_compile_error()); - (quote!( unimplemented!() ), quote!( None )) - } + (quote!(unimplemented!()), quote!(None)) + }, }; let function_name = method.sig.ident.to_string(); // Generate the default implementation that calls the `method_runtime_api_impl` method. - method.default = Some( - parse_quote! { - { - let runtime_api_impl_params_encoded = - #crate_::Encode::encode(&( #( &#params ),* )); - - self.#name_impl( - __runtime_api_at_param__, - #context, - #param_tuple, - runtime_api_impl_params_encoded, - ).and_then(|r| - match r { - #crate_::NativeOrEncoded::Native(n) => { - #native_handling - }, - #crate_::NativeOrEncoded::Encoded(r) => { - <#ret_type as #crate_::Decode>::decode(&mut &r[..]) - .map_err(|err| - #crate_::ApiError::FailedToDecodeReturnValue { - function: #function_name, - error: err, - } - ) - } + method.default = Some(parse_quote! { + { + let runtime_api_impl_params_encoded = + #crate_::Encode::encode(&( #( &#params ),* )); + + self.#name_impl( + __runtime_api_at_param__, + #context, + #param_tuple, + runtime_api_impl_params_encoded, + ).and_then(|r| + match r { + #crate_::NativeOrEncoded::Native(n) => { + #native_handling + }, + #crate_::NativeOrEncoded::Encoded(r) => { + <#ret_type as #crate_::Decode>::decode(&mut &r[..]) + .map_err(|err| + #crate_::ApiError::FailedToDecodeReturnValue { + function: #function_name, + error: err, + } + ) } - ) - } + } + ) } - ); + }); method } @@ -705,11 +708,7 @@ impl<'a> Fold for ToClientSideDecl<'a> { if is_core_trait { // Add all the supertraits we want to have for `Core`. - input.supertraits = parse_quote!( - 'static - + Send - + Sync - ); + input.supertraits = parse_quote!('static + Send + Sync); } else { // Add the `Core` runtime api as super trait. let crate_ = &self.crate_; @@ -729,24 +728,22 @@ fn parse_runtime_api_version(version: &Attribute) -> Result { let meta = version.parse_meta()?; let err = Err(Error::new( - meta.span(), - &format!( - "Unexpected `{api_version}` attribute. The supported format is `{api_version}(1)`", - api_version = API_VERSION_ATTRIBUTE - ) - ) - ); + meta.span(), + &format!( + "Unexpected `{api_version}` attribute. The supported format is `{api_version}(1)`", + api_version = API_VERSION_ATTRIBUTE + ), + )); match meta { - Meta::List(list) => { + Meta::List(list) => if list.nested.len() != 1 { err } else if let Some(NestedMeta::Lit(Lit::Int(i))) = list.nested.first() { i.base10_parse() } else { err - } - }, + }, _ => err, } } @@ -798,14 +795,18 @@ fn generate_runtime_info_impl(trait_: &ItemTrait, version: u64) -> TokenStream { /// Get changed in version from the user given attribute or `Ok(None)`, if no attribute was given. fn get_changed_in(found_attributes: &HashMap<&'static str, Attribute>) -> Result> { - found_attributes.get(&CHANGED_IN_ATTRIBUTE) + found_attributes + .get(&CHANGED_IN_ATTRIBUTE) .map(|v| parse_runtime_api_version(v).map(Some)) .unwrap_or(Ok(None)) } /// Get the api version from the user given attribute or `Ok(1)`, if no attribute was given. fn get_api_version(found_attributes: &HashMap<&'static str, Attribute>) -> Result { - found_attributes.get(&API_VERSION_ATTRIBUTE).map(parse_runtime_api_version).unwrap_or(Ok(1)) + found_attributes + .get(&API_VERSION_ATTRIBUTE) + .map(parse_runtime_api_version) + .unwrap_or(Ok(1)) } /// Generate the declaration of the trait for the client side. @@ -863,7 +864,10 @@ impl CheckTraitDecl { /// Check that the given method declarations are correct. /// /// Any error is stored in `self.errors`. - fn check_method_declarations<'a>(&mut self, methods: impl Iterator) { + fn check_method_declarations<'a>( + &mut self, + methods: impl Iterator, + ) { let mut method_to_signature_changed = HashMap::>>::new(); methods.into_iter().for_each(|method| { @@ -871,7 +875,10 @@ impl CheckTraitDecl { let changed_in = match get_changed_in(&attributes) { Ok(r) => r, - Err(e) => { self.errors.push(e); return; }, + Err(e) => { + self.errors.push(e); + return + }, }; method_to_signature_changed @@ -912,16 +919,13 @@ impl<'ast> Visit<'ast> for CheckTraitDecl { fn visit_generic_param(&mut self, input: &'ast GenericParam) { match input { - GenericParam::Type(ty) if ty.ident == BLOCK_GENERIC_IDENT => { - self.errors.push( - Error::new( - input.span(), - "`Block: BlockT` generic parameter will be added automatically by the \ - `decl_runtime_apis!` macro!" - ) - ) - }, - _ => {} + GenericParam::Type(ty) if ty.ident == BLOCK_GENERIC_IDENT => + self.errors.push(Error::new( + input.span(), + "`Block: BlockT` generic parameter will be added automatically by the \ + `decl_runtime_apis!` macro!", + )), + _ => {}, } visit::visit_generic_param(self, input); @@ -930,14 +934,12 @@ impl<'ast> Visit<'ast> for CheckTraitDecl { fn visit_trait_bound(&mut self, input: &'ast TraitBound) { if let Some(last_ident) = input.path.segments.last().map(|v| &v.ident) { if last_ident == "BlockT" || last_ident == BLOCK_GENERIC_IDENT { - self.errors.push( - Error::new( - input.span(), - "`Block: BlockT` generic parameter will be added automatically by the \ + self.errors.push(Error::new( + input.span(), + "`Block: BlockT` generic parameter will be added automatically by the \ `decl_runtime_apis!` macro! If you try to use a different trait than the \ - substrate `Block` trait, please rename it locally." - ) - ) + substrate `Block` trait, please rename it locally.", + )) } } @@ -965,7 +967,9 @@ pub fn decl_runtime_apis_impl(input: proc_macro::TokenStream) -> proc_macro::Tok // Parse all trait declarations let RuntimeApiDecls { decls: api_decls } = parse_macro_input!(input as RuntimeApiDecls); - decl_runtime_apis_impl_inner(&api_decls).unwrap_or_else(|e| e.to_compile_error()).into() + decl_runtime_apis_impl_inner(&api_decls) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } fn decl_runtime_apis_impl_inner(api_decls: &[ItemTrait]) -> Result { @@ -975,13 +979,11 @@ fn decl_runtime_apis_impl_inner(api_decls: &[ItemTrait]) -> Result let runtime_decls = generate_runtime_decls(api_decls)?; let client_side_decls = generate_client_side_decls(api_decls)?; - Ok( - quote!( - #hidden_includes + Ok(quote!( + #hidden_includes - #runtime_decls + #runtime_decls - #client_side_decls - ) - ) + #client_side_decls + )) } diff --git a/primitives/api/proc-macro/src/impl_runtime_apis.rs b/primitives/api/proc-macro/src/impl_runtime_apis.rs index e81c52bbb0b18..bc0f027e1efaa 100644 --- a/primitives/api/proc-macro/src/impl_runtime_apis.rs +++ b/primitives/api/proc-macro/src/impl_runtime_apis.rs @@ -16,12 +16,12 @@ // limitations under the License. use crate::utils::{ - generate_crate_access, generate_hidden_includes, - generate_runtime_mod_name_for_trait, generate_method_runtime_api_impl_name, - extract_parameter_names_types_and_borrows, generate_native_call_generator_fn_name, - return_type_extract_type, generate_call_api_at_fn_name, prefix_function_with_trait, extract_all_signature_types, extract_block_type_from_trait_path, extract_impl_trait, - AllowSelfRefInParameters, RequireQualifiedTraitPath, + extract_parameter_names_types_and_borrows, generate_call_api_at_fn_name, generate_crate_access, + generate_hidden_includes, generate_method_runtime_api_impl_name, + generate_native_call_generator_fn_name, generate_runtime_mod_name_for_trait, + prefix_function_with_trait, return_type_extract_type, AllowSelfRefInParameters, + RequireQualifiedTraitPath, }; use proc_macro2::{Span, TokenStream}; @@ -29,9 +29,12 @@ use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{ - spanned::Spanned, parse_macro_input, Ident, Type, ItemImpl, Path, Signature, Attribute, - ImplItem, parse::{Parse, ParseStream, Result, Error}, PathArguments, GenericArgument, TypePath, - fold::{self, Fold}, parse_quote, + fold::{self, Fold}, + parse::{Error, Parse, ParseStream, Result}, + parse_macro_input, parse_quote, + spanned::Spanned, + Attribute, GenericArgument, Ident, ImplItem, ItemImpl, Path, PathArguments, Signature, Type, + TypePath, }; use std::collections::HashSet; @@ -66,9 +69,10 @@ fn generate_impl_call( signature: &Signature, runtime: &Type, input: &Ident, - impl_trait: &Path + impl_trait: &Path, ) -> Result { - let params = extract_parameter_names_types_and_borrows(signature, AllowSelfRefInParameters::No)?; + let params = + extract_parameter_names_types_and_borrows(signature, AllowSelfRefInParameters::No)?; let c = generate_crate_access(HIDDEN_INCLUDES_ID); let fn_name = &signature.ident; @@ -78,27 +82,25 @@ fn generate_impl_call( let ptypes = params.iter().map(|v| &v.1); let pborrow = params.iter().map(|v| &v.2); - Ok( - quote!( - let (#( #pnames ),*) : ( #( #ptypes ),* ) = - match #c::DecodeLimit::decode_all_with_depth_limit( - #c::MAX_EXTRINSIC_DEPTH, - &#input, - ) { - Ok(res) => res, - Err(e) => panic!("Bad input data provided to {}: {}", #fn_name_str, e), - }; - - #[allow(deprecated)] - <#runtime as #impl_trait>::#fn_name(#( #pborrow #pnames2 ),*) - ) - ) + Ok(quote!( + let (#( #pnames ),*) : ( #( #ptypes ),* ) = + match #c::DecodeLimit::decode_all_with_depth_limit( + #c::MAX_EXTRINSIC_DEPTH, + &#input, + ) { + Ok(res) => res, + Err(e) => panic!("Bad input data provided to {}: {}", #fn_name_str, e), + }; + + #[allow(deprecated)] + <#runtime as #impl_trait>::#fn_name(#( #pborrow #pnames2 ),*) + )) } /// Generate all the implementation calls for the given functions. fn generate_impl_calls( impls: &[ItemImpl], - input: &Ident + input: &Ident, ) -> Result)>> { let mut impl_calls = Vec::new(); @@ -113,12 +115,8 @@ fn generate_impl_calls( for item in &impl_.items { if let ImplItem::Method(method) = item { - let impl_call = generate_impl_call( - &method.sig, - &impl_.self_ty, - input, - &impl_trait - )?; + let impl_call = + generate_impl_call(&method.sig, &impl_.self_ty, input, &impl_trait)?; impl_calls.push(( impl_trait_ident.clone(), @@ -137,15 +135,16 @@ fn generate_impl_calls( fn generate_dispatch_function(impls: &[ItemImpl]) -> Result { let data = Ident::new("__sp_api__input_data", Span::call_site()); let c = generate_crate_access(HIDDEN_INCLUDES_ID); - let impl_calls = generate_impl_calls(impls, &data)? - .into_iter() - .map(|(trait_, fn_name, impl_, attrs)| { - let name = prefix_function_with_trait(&trait_, &fn_name); - quote!( - #( #attrs )* - #name => Some(#c::Encode::encode(&{ #impl_ })), - ) - }); + let impl_calls = + generate_impl_calls(impls, &data)? + .into_iter() + .map(|(trait_, fn_name, impl_, attrs)| { + let name = prefix_function_with_trait(&trait_, &fn_name); + quote!( + #( #attrs )* + #name => Some(#c::Encode::encode(&{ #impl_ })), + ) + }); Ok(quote!( #[cfg(feature = "std")] @@ -163,34 +162,33 @@ fn generate_wasm_interface(impls: &[ItemImpl]) -> Result { let input = Ident::new("input", Span::call_site()); let c = generate_crate_access(HIDDEN_INCLUDES_ID); - let impl_calls = generate_impl_calls(impls, &input)? - .into_iter() - .map(|(trait_, fn_name, impl_, attrs)| { - let fn_name = Ident::new( - &prefix_function_with_trait(&trait_, &fn_name), - Span::call_site() - ); - - quote!( - #( #attrs )* - #[cfg(not(feature = "std"))] - #[no_mangle] - pub unsafe fn #fn_name(input_data: *mut u8, input_len: usize) -> u64 { - let mut #input = if input_len == 0 { - &[0u8; 0] - } else { - unsafe { - #c::slice::from_raw_parts(input_data, input_len) - } - }; - - #c::init_runtime_logger(); - - let output = (move || { #impl_ })(); - #c::to_substrate_wasm_fn_return_value(&output) - } - ) - }); + let impl_calls = + generate_impl_calls(impls, &input)? + .into_iter() + .map(|(trait_, fn_name, impl_, attrs)| { + let fn_name = + Ident::new(&prefix_function_with_trait(&trait_, &fn_name), Span::call_site()); + + quote!( + #( #attrs )* + #[cfg(not(feature = "std"))] + #[no_mangle] + pub unsafe fn #fn_name(input_data: *mut u8, input_len: usize) -> u64 { + let mut #input = if input_len == 0 { + &[0u8; 0] + } else { + unsafe { + #c::slice::from_raw_parts(input_data, input_len) + } + }; + + #c::init_runtime_logger(); + + let output = (move || { #impl_ })(); + #c::to_substrate_wasm_fn_return_value(&output) + } + ) + }); Ok(quote!( #( #impl_calls )* )) } @@ -414,7 +412,6 @@ fn generate_api_impl_for_runtime(impls: &[ItemImpl]) -> Result { Ok(quote!( #( #impls_prepared )* )) } - /// Auxiliary data structure that is used to convert `impl Api for Runtime` to /// `impl Api for RuntimeApi`. /// This requires us to replace the runtime `Block` with the node `Block`, @@ -430,11 +427,8 @@ struct ApiRuntimeImplToApiRuntimeApiImpl<'a> { impl<'a> Fold for ApiRuntimeImplToApiRuntimeApiImpl<'a> { fn fold_type_path(&mut self, input: TypePath) -> TypePath { - let new_ty_path = if input == *self.runtime_block { - parse_quote!( __SR_API_BLOCK__ ) - } else { - input - }; + let new_ty_path = + if input == *self.runtime_block { parse_quote!(__SR_API_BLOCK__) } else { input }; fold::fold_type_path(self, new_ty_path) } @@ -451,12 +445,18 @@ impl<'a> Fold for ApiRuntimeImplToApiRuntimeApiImpl<'a> { // Generate the access to the native parameters let param_tuple_access = if input.sig.inputs.len() == 1 { - vec![ quote!( p ) ] + vec![quote!(p)] } else { - input.sig.inputs.iter().enumerate().map(|(i, _)| { - let i = syn::Index::from(i); - quote!( p.#i ) - }).collect::>() + input + .sig + .inputs + .iter() + .enumerate() + .map(|(i, _)| { + let i = syn::Index::from(i); + quote!( p.#i ) + }) + .collect::>() }; let (param_types, error) = match extract_parameter_names_types_and_borrows( @@ -464,12 +464,14 @@ impl<'a> Fold for ApiRuntimeImplToApiRuntimeApiImpl<'a> { AllowSelfRefInParameters::No, ) { Ok(res) => ( - res.into_iter().map(|v| { - let ty = v.1; - let borrow = v.2; - quote!( #borrow #ty ) - }).collect::>(), - None + res.into_iter() + .map(|v| { + let ty = v.1; + let borrow = v.2; + quote!( #borrow #ty ) + }) + .collect::>(), + None, ), Err(e) => (Vec::new(), Some(e.to_compile_error())), }; @@ -483,10 +485,8 @@ impl<'a> Fold for ApiRuntimeImplToApiRuntimeApiImpl<'a> { params_encoded: Vec, }; - input.sig.ident = generate_method_runtime_api_impl_name( - &self.impl_trait, - &input.sig.ident, - ); + input.sig.ident = + generate_method_runtime_api_impl_name(&self.impl_trait, &input.sig.ident); let ret_type = return_type_extract_type(&input.sig.output); // Generate the correct return type. @@ -544,43 +544,34 @@ impl<'a> Fold for ApiRuntimeImplToApiRuntimeApiImpl<'a> { let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); // Implement the trait for the `RuntimeApiImpl` - input.self_ty = Box::new( - parse_quote!( RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall> ) - ); + input.self_ty = + Box::new(parse_quote!( RuntimeApiImpl<__SR_API_BLOCK__, RuntimeApiImplCall> )); + input.generics.params.push(parse_quote!( + __SR_API_BLOCK__: #crate_::BlockT + std::panic::UnwindSafe + + std::panic::RefUnwindSafe + )); input.generics.params.push( - parse_quote!( - __SR_API_BLOCK__: #crate_::BlockT + std::panic::UnwindSafe + - std::panic::RefUnwindSafe - ) - ); - input.generics.params.push( - parse_quote!( RuntimeApiImplCall: #crate_::CallApiAt<__SR_API_BLOCK__> + 'static ) + parse_quote!( RuntimeApiImplCall: #crate_::CallApiAt<__SR_API_BLOCK__> + 'static ), ); let where_clause = input.generics.make_where_clause(); - where_clause.predicates.push( - parse_quote! { - RuntimeApiImplCall::StateBackend: - #crate_::StateBackend<#crate_::HashFor<__SR_API_BLOCK__>> - } - ); + where_clause.predicates.push(parse_quote! { + RuntimeApiImplCall::StateBackend: + #crate_::StateBackend<#crate_::HashFor<__SR_API_BLOCK__>> + }); // Require that all types used in the function signatures are unwind safe. extract_all_signature_types(&input.items).iter().for_each(|i| { - where_clause.predicates.push( - parse_quote! { - #i: std::panic::UnwindSafe + std::panic::RefUnwindSafe - } - ); + where_clause.predicates.push(parse_quote! { + #i: std::panic::UnwindSafe + std::panic::RefUnwindSafe + }); }); - where_clause.predicates.push( - parse_quote! { - __SR_API_BLOCK__::Header: std::panic::UnwindSafe + std::panic::RefUnwindSafe - } - ); + where_clause.predicates.push(parse_quote! { + __SR_API_BLOCK__::Header: std::panic::UnwindSafe + std::panic::RefUnwindSafe + }); input.attrs = filter_cfg_attrs(&input.attrs); @@ -650,14 +641,12 @@ fn generate_runtime_api_versions(impls: &[ItemImpl]) -> Result { let span = trait_.span(); if !processed_traits.insert(trait_) { - return Err( - Error::new( - span, - "Two traits with the same name detected! \ + return Err(Error::new( + span, + "Two traits with the same name detected! \ The trait name is used to generate its ID. \ - Please rename one trait at the declaration!" - ) - ) + Please rename one trait at the declaration!", + )) } let id: Path = parse_quote!( #path ID ); @@ -692,7 +681,9 @@ pub fn impl_runtime_apis_impl(input: proc_macro::TokenStream) -> proc_macro::Tok // Parse all impl blocks let RuntimeApiImpls { impls: api_impls } = parse_macro_input!(input as RuntimeApiImpls); - impl_runtime_apis_impl_inner(&api_impls).unwrap_or_else(|e| e.to_compile_error()).into() + impl_runtime_apis_impl_inner(&api_impls) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } fn impl_runtime_apis_impl_inner(api_impls: &[ItemImpl]) -> Result { @@ -704,27 +695,25 @@ fn impl_runtime_apis_impl_inner(api_impls: &[ItemImpl]) -> Result { let wasm_interface = generate_wasm_interface(api_impls)?; let api_impls_for_runtime_api = generate_api_impl_for_runtime_api(api_impls)?; - Ok( - quote!( - #hidden_includes + Ok(quote!( + #hidden_includes - #base_runtime_api + #base_runtime_api - #api_impls_for_runtime + #api_impls_for_runtime - #api_impls_for_runtime_api + #api_impls_for_runtime_api - #runtime_api_versions + #runtime_api_versions - pub mod api { - use super::*; + pub mod api { + use super::*; - #dispatch_impl + #dispatch_impl - #wasm_interface - } - ) - ) + #wasm_interface + } + )) } // Filters all attributes except the cfg ones. diff --git a/primitives/api/proc-macro/src/lib.rs b/primitives/api/proc-macro/src/lib.rs index 30767efd41c11..b8731d70ca3cf 100644 --- a/primitives/api/proc-macro/src/lib.rs +++ b/primitives/api/proc-macro/src/lib.rs @@ -21,9 +21,9 @@ use proc_macro::TokenStream; +mod decl_runtime_apis; mod impl_runtime_apis; mod mock_impl_runtime_apis; -mod decl_runtime_apis; mod utils; #[proc_macro] diff --git a/primitives/api/proc-macro/src/mock_impl_runtime_apis.rs b/primitives/api/proc-macro/src/mock_impl_runtime_apis.rs index 738420615b622..77f8a07f85c48 100644 --- a/primitives/api/proc-macro/src/mock_impl_runtime_apis.rs +++ b/primitives/api/proc-macro/src/mock_impl_runtime_apis.rs @@ -16,10 +16,10 @@ // limitations under the License. use crate::utils::{ - generate_crate_access, generate_hidden_includes, - generate_method_runtime_api_impl_name, extract_parameter_names_types_and_borrows, - return_type_extract_type, extract_block_type_from_trait_path, extract_impl_trait, - AllowSelfRefInParameters, RequireQualifiedTraitPath, + extract_block_type_from_trait_path, extract_impl_trait, + extract_parameter_names_types_and_borrows, generate_crate_access, generate_hidden_includes, + generate_method_runtime_api_impl_name, return_type_extract_type, AllowSelfRefInParameters, + RequireQualifiedTraitPath, }; use proc_macro2::{Span, TokenStream}; @@ -27,8 +27,11 @@ use proc_macro2::{Span, TokenStream}; use quote::{quote, quote_spanned}; use syn::{ - spanned::Spanned, parse_macro_input, Ident, Type, ItemImpl, TypePath, parse_quote, - parse::{Parse, ParseStream, Result, Error}, fold::{self, Fold}, Attribute, Pat, + fold::{self, Fold}, + parse::{Error, Parse, ParseStream, Result}, + parse_macro_input, parse_quote, + spanned::Spanned, + Attribute, Ident, ItemImpl, Pat, Type, TypePath, }; /// Unique identifier used to make the hidden includes unique for this macro. @@ -62,10 +65,7 @@ impl Parse for RuntimeApiImpls { } /// Implement the `ApiExt` trait and the `Core` runtime api. -fn implement_common_api_traits( - block_type: TypePath, - self_ty: Type, -) -> Result { +fn implement_common_api_traits(block_type: TypePath, self_ty: Type) -> Result { let crate_ = generate_crate_access(HIDDEN_INCLUDES_ID); Ok(quote!( @@ -168,11 +168,13 @@ fn implement_common_api_traits( /// If the attribute was found, it will be automatically removed from the vec. fn has_advanced_attribute(attributes: &mut Vec) -> bool { let mut found = false; - attributes.retain(|attr| if attr.path.is_ident(ADVANCED_ATTRIBUTE) { - found = true; - false - } else { - true + attributes.retain(|attr| { + if attr.path.is_ident(ADVANCED_ATTRIBUTE) { + found = true; + false + } else { + true + } }); found @@ -214,7 +216,7 @@ fn get_at_param_name( let name = param_names.remove(0); Ok((quote!( #name ), ptype_and_borrows.0)) } else { - Ok((quote!( _ ), default_block_id_type.clone())) + Ok((quote!(_), default_block_id_type.clone())) } } @@ -235,24 +237,27 @@ impl<'a> Fold for FoldRuntimeApiImpl<'a> { let is_advanced = has_advanced_attribute(&mut input.attrs); let mut errors = Vec::new(); - let (mut param_names, mut param_types_and_borrows) = match extract_parameter_names_types_and_borrows( - &input.sig, - AllowSelfRefInParameters::YesButIgnore, - ) { - Ok(res) => ( - res.iter().map(|v| v.0.clone()).collect::>(), - res.iter().map(|v| { - let ty = &v.1; - let borrow = &v.2; - (quote_spanned!(ty.span() => #borrow #ty ), v.2.is_some()) - }).collect::>(), - ), - Err(e) => { - errors.push(e.to_compile_error()); - - (Default::default(), Default::default()) - } - }; + let (mut param_names, mut param_types_and_borrows) = + match extract_parameter_names_types_and_borrows( + &input.sig, + AllowSelfRefInParameters::YesButIgnore, + ) { + Ok(res) => ( + res.iter().map(|v| v.0.clone()).collect::>(), + res.iter() + .map(|v| { + let ty = &v.1; + let borrow = &v.2; + (quote_spanned!(ty.span() => #borrow #ty ), v.2.is_some()) + }) + .collect::>(), + ), + Err(e) => { + errors.push(e.to_compile_error()); + + (Default::default(), Default::default()) + }, + }; let block_type = &self.block_type; let block_id_type = quote!( &#crate_::BlockId<#block_type> ); @@ -267,8 +272,8 @@ impl<'a> Fold for FoldRuntimeApiImpl<'a> { Ok(res) => res, Err(e) => { errors.push(e.to_compile_error()); - (quote!( _ ), block_id_type) - } + (quote!(_), block_id_type) + }, }; let param_types = param_types_and_borrows.iter().map(|v| &v.0); @@ -281,10 +286,8 @@ impl<'a> Fold for FoldRuntimeApiImpl<'a> { _: Vec, }; - input.sig.ident = generate_method_runtime_api_impl_name( - &self.impl_trait, - &input.sig.ident, - ); + input.sig.ident = + generate_method_runtime_api_impl_name(&self.impl_trait, &input.sig.ident); // When using advanced, the user needs to declare the correct return type on its own, // otherwise do it for the user. @@ -360,28 +363,24 @@ fn generate_runtime_api_impls(impls: &[ItemImpl]) -> Result { + Some(self_ty) => if self_ty == impl_.self_ty { Some(self_ty) } else { - let mut error =Error::new( + let mut error = Error::new( impl_.self_ty.span(), "Self type should not change between runtime apis", ); - error.combine(Error::new( - self_ty.span(), - "First self type found here", - )); + error.combine(Error::new(self_ty.span(), "First self type found here")); return Err(error) - } - }, + }, None => Some(impl_.self_ty.clone()), }; global_block_type = match global_block_type.take() { - Some(global_block_type) => { + Some(global_block_type) => if global_block_type == *block_type { Some(global_block_type) } else { @@ -396,15 +395,11 @@ fn generate_runtime_api_impls(impls: &[ItemImpl]) -> Result Some(block_type.clone()), }; - let mut visitor = FoldRuntimeApiImpl { - block_type, - impl_trait: &impl_trait.ident, - }; + let mut visitor = FoldRuntimeApiImpl { block_type, impl_trait: &impl_trait.ident }; result.push(visitor.fold_item_impl(impl_.clone())); } @@ -421,7 +416,9 @@ pub fn mock_impl_runtime_apis_impl(input: proc_macro::TokenStream) -> proc_macro // Parse all impl blocks let RuntimeApiImpls { impls: api_impls } = parse_macro_input!(input as RuntimeApiImpls); - mock_impl_runtime_apis_impl_inner(&api_impls).unwrap_or_else(|e| e.to_compile_error()).into() + mock_impl_runtime_apis_impl_inner(&api_impls) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } fn mock_impl_runtime_apis_impl_inner(api_impls: &[ItemImpl]) -> Result { diff --git a/primitives/api/proc-macro/src/utils.rs b/primitives/api/proc-macro/src/utils.rs index aa3c69d46a29d..a3f21638751e9 100644 --- a/primitives/api/proc-macro/src/utils.rs +++ b/primitives/api/proc-macro/src/utils.rs @@ -15,11 +15,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use syn::{ - Result, Ident, Signature, parse_quote, Type, Pat, spanned::Spanned, FnArg, Error, token::And, - ImplItem, ReturnType, PathArguments, Path, GenericArgument, TypePath, ItemImpl, + parse_quote, spanned::Spanned, token::And, Error, FnArg, GenericArgument, Ident, ImplItem, + ItemImpl, Pat, Path, PathArguments, Result, ReturnType, Signature, Type, TypePath, }; use quote::quote; @@ -49,18 +49,19 @@ pub fn generate_hidden_includes(unique_id: &'static str) -> TokenStream { Err(e) => { let err = Error::new(Span::call_site(), e).to_compile_error(); quote!( #err ) - } + }, } } /// Generates the access to the `sc_client` crate. pub fn generate_crate_access(unique_id: &'static str) -> TokenStream { if env::var("CARGO_PKG_NAME").unwrap() == "sp-api" { - quote!( sp_api ) + quote!(sp_api) } else { let mod_name = generate_hidden_includes_mod_name(unique_id); quote!( self::#mod_name::sp_api ) - }.into() + } + .into() } /// Generates the name of the module that contains the trait declaration for the runtime. @@ -76,7 +77,7 @@ pub fn generate_method_runtime_api_impl_name(trait_: &Ident, method: &Ident) -> /// Get the type of a `syn::ReturnType`. pub fn return_type_extract_type(rt: &ReturnType) -> Type { match rt { - ReturnType::Default => parse_quote!( () ), + ReturnType::Default => parse_quote!(()), ReturnType::Type(_, ref ty) => *ty.clone(), } } @@ -84,10 +85,13 @@ pub fn return_type_extract_type(rt: &ReturnType) -> Type { /// Replace the `_` (wild card) parameter names in the given signature with unique identifiers. pub fn replace_wild_card_parameter_names(input: &mut Signature) { let mut generated_pattern_counter = 0; - input.inputs.iter_mut().for_each(|arg| if let FnArg::Typed(arg) = arg { - arg.pat = Box::new( - generate_unique_pattern((*arg.pat).clone(), &mut generated_pattern_counter), - ); + input.inputs.iter_mut().for_each(|arg| { + if let FnArg::Typed(arg) = arg { + arg.pat = Box::new(generate_unique_pattern( + (*arg.pat).clone(), + &mut generated_pattern_counter, + )); + } }); } @@ -101,7 +105,7 @@ pub fn fold_fn_decl_for_client_side( // Add `&self, at:& BlockId` as parameters to each function at the beginning. input.inputs.insert(0, parse_quote!( __runtime_api_at_param__: &#block_id )); - input.inputs.insert(0, parse_quote!( &self )); + input.inputs.insert(0, parse_quote!(&self)); // Wrap the output in a `Result` input.output = { @@ -114,10 +118,8 @@ pub fn fold_fn_decl_for_client_side( pub fn generate_unique_pattern(pat: Pat, counter: &mut u32) -> Pat { match pat { Pat::Wild(_) => { - let generated_name = Ident::new( - &format!("__runtime_api_generated_name_{}__", counter), - pat.span(), - ); + let generated_name = + Ident::new(&format!("__runtime_api_generated_name_{}__", counter), pat.span()); *counter += 1; parse_quote!( #generated_name ) @@ -145,26 +147,20 @@ pub fn extract_parameter_names_types_and_borrows( match input { FnArg::Typed(arg) => { let (ty, borrow) = match &*arg.ty { - Type::Reference(t) => { - ((*t.elem).clone(), Some(t.and_token)) - }, - t => { (t.clone(), None) }, + Type::Reference(t) => ((*t.elem).clone(), Some(t.and_token)), + t => (t.clone(), None), }; - let name = generate_unique_pattern( - (*arg.pat).clone(), - &mut generated_pattern_counter, - ); + let name = + generate_unique_pattern((*arg.pat).clone(), &mut generated_pattern_counter); result.push((name, ty, borrow)); }, - FnArg::Receiver(_) if matches!(allow_self, AllowSelfRefInParameters::No) => { - return Err(Error::new(input.span(), "`self` parameter not supported!")) - }, - FnArg::Receiver(recv) => { + FnArg::Receiver(_) if matches!(allow_self, AllowSelfRefInParameters::No) => + return Err(Error::new(input.span(), "`self` parameter not supported!")), + FnArg::Receiver(recv) => if recv.mutability.is_some() || recv.reference.is_none() { return Err(Error::new(recv.span(), "Only `&self` is supported!")) - } - }, + }, } } @@ -190,7 +186,8 @@ pub fn prefix_function_with_trait(trait_: &Ident, function: &F) -> /// /// If a type is a reference, the inner type is extracted (without the reference). pub fn extract_all_signature_types(items: &[ImplItem]) -> Vec { - items.iter() + items + .iter() .filter_map(|i| match i { ImplItem::Method(method) => Some(&method.sig), _ => None, @@ -201,13 +198,17 @@ pub fn extract_all_signature_types(items: &[ImplItem]) -> Vec { ReturnType::Type(_, ty) => Some((**ty).clone()), }; - sig.inputs.iter().filter_map(|i| match i { - FnArg::Typed(arg) => Some(&arg.ty), - _ => None, - }).map(|ty| match &**ty { - Type::Reference(t) => (*t.elem).clone(), - _ => (**ty).clone(), - }).chain(ret_ty) + sig.inputs + .iter() + .filter_map(|i| match i { + FnArg::Typed(arg) => Some(&arg.ty), + _ => None, + }) + .map(|ty| match &**ty { + Type::Reference(t) => (*t.elem).clone(), + _ => (**ty).clone(), + }) + .chain(ret_ty) }) .collect() } @@ -223,19 +224,20 @@ pub fn extract_block_type_from_trait_path(trait_: &Path) -> Result<&TypePath> { .ok_or_else(|| Error::new(span, "Empty path not supported"))?; match &generics.arguments { - PathArguments::AngleBracketed(ref args) => { - args.args.first().and_then(|v| match v { + PathArguments::AngleBracketed(ref args) => args + .args + .first() + .and_then(|v| match v { GenericArgument::Type(Type::Path(ref block)) => Some(block), - _ => None - }).ok_or_else(|| Error::new(args.span(), "Missing `Block` generic parameter.")) - }, + _ => None, + }) + .ok_or_else(|| Error::new(args.span(), "Missing `Block` generic parameter.")), PathArguments::None => { let span = trait_.segments.last().as_ref().unwrap().span(); Err(Error::new(span, "Missing `Block` generic parameter.")) }, - PathArguments::Parenthesized(_) => { - Err(Error::new(generics.arguments.span(), "Unexpected parentheses in path!")) - }, + PathArguments::Parenthesized(_) => + Err(Error::new(generics.arguments.span(), "Unexpected parentheses in path!")), } } @@ -252,19 +254,20 @@ pub fn extract_impl_trait<'a>( impl_: &'a ItemImpl, require: RequireQualifiedTraitPath, ) -> Result<&'a Path> { - impl_.trait_.as_ref().map(|v| &v.1).ok_or_else( - || Error::new(impl_.span(), "Only implementation of traits are supported!") - ).and_then(|p| { - if p.segments.len() > 1 || matches!(require, RequireQualifiedTraitPath::No) { - Ok(p) - } else { - Err( - Error::new( + impl_ + .trait_ + .as_ref() + .map(|v| &v.1) + .ok_or_else(|| Error::new(impl_.span(), "Only implementation of traits are supported!")) + .and_then(|p| { + if p.segments.len() > 1 || matches!(require, RequireQualifiedTraitPath::No) { + Ok(p) + } else { + Err(Error::new( p.span(), "The implemented trait has to be referenced with a path, \ - e.g. `impl client::Core for Runtime`." - ) - ) - } - }) + e.g. `impl client::Core for Runtime`.", + )) + } + }) } diff --git a/primitives/api/src/lib.rs b/primitives/api/src/lib.rs index ea023677adf34..0ec1c5aeadbbe 100644 --- a/primitives/api/src/lib.rs +++ b/primitives/api/src/lib.rs @@ -70,13 +70,7 @@ extern crate self as sp_api; #[doc(hidden)] -#[cfg(feature = "std")] -pub use sp_state_machine::{ - OverlayedChanges, StorageProof, Backend as StateBackend, ChangesTrieState, InMemoryBackend, -}; -#[doc(hidden)] -#[cfg(feature = "std")] -pub use sp_core::NativeOrEncoded; +pub use codec::{self, Decode, DecodeLimit, Encode}; #[doc(hidden)] #[cfg(feature = "std")] pub use hash_db::Hasher; @@ -84,27 +78,34 @@ pub use hash_db::Hasher; #[cfg(not(feature = "std"))] pub use sp_core::to_substrate_wasm_fn_return_value; #[doc(hidden)] +#[cfg(feature = "std")] +pub use sp_core::NativeOrEncoded; +use sp_core::OpaqueMetadata; +#[doc(hidden)] +pub use sp_core::{offchain, ExecutionContext}; +#[doc(hidden)] pub use sp_runtime::{ + generic::BlockId, traits::{ - Block as BlockT, GetNodeBlockType, GetRuntimeBlockType, HashFor, NumberFor, - Header as HeaderT, Hash as HashT, + Block as BlockT, GetNodeBlockType, GetRuntimeBlockType, Hash as HashT, HashFor, + Header as HeaderT, NumberFor, }, - generic::BlockId, transaction_validity::TransactionValidity, RuntimeString, TransactionOutcome, + transaction_validity::TransactionValidity, + RuntimeString, TransactionOutcome, }; #[doc(hidden)] -pub use sp_core::{offchain, ExecutionContext}; -#[doc(hidden)] -pub use sp_version::{ApiId, RuntimeVersion, ApisVec, create_apis_vec}; -#[doc(hidden)] -pub use sp_std::{slice, mem}; +#[cfg(feature = "std")] +pub use sp_state_machine::{ + Backend as StateBackend, ChangesTrieState, InMemoryBackend, OverlayedChanges, StorageProof, +}; #[cfg(feature = "std")] use sp_std::result; #[doc(hidden)] -pub use codec::{Encode, Decode, DecodeLimit, self}; -use sp_core::OpaqueMetadata; +pub use sp_std::{mem, slice}; +#[doc(hidden)] +pub use sp_version::{create_apis_vec, ApiId, ApisVec, RuntimeVersion}; #[cfg(feature = "std")] -use std::{panic::UnwindSafe, cell::RefCell}; - +use std::{cell::RefCell, panic::UnwindSafe}; /// Maximum nesting level for extrinsics. pub const MAX_EXTRINSIC_DEPTH: u32 = 256; @@ -386,18 +387,18 @@ pub type ProofRecorder = sp_state_machine::ProofRecorder<::Hash> /// A type that is used as cache for the storage transactions. #[cfg(feature = "std")] -pub type StorageTransactionCache = - sp_state_machine::StorageTransactionCache< - >>::Transaction, HashFor, NumberFor - >; +pub type StorageTransactionCache = sp_state_machine::StorageTransactionCache< + >>::Transaction, + HashFor, + NumberFor, +>; #[cfg(feature = "std")] -pub type StorageChanges = - sp_state_machine::StorageChanges< - >>::Transaction, - HashFor, - NumberFor - >; +pub type StorageChanges = sp_state_machine::StorageChanges< + >>::Transaction, + HashFor, + NumberFor, +>; /// Extract the state backend type for a type that implements `ProvideRuntimeApi`. #[cfg(feature = "std")] @@ -463,29 +464,31 @@ pub trait ApiExt { /// Depending on the outcome of the closure, the transaction is committed or rolled-back. /// /// The internal result of the closure is returned afterwards. - fn execute_in_transaction TransactionOutcome, R>( - &self, - call: F, - ) -> R where Self: Sized; + fn execute_in_transaction TransactionOutcome, R>(&self, call: F) -> R + where + Self: Sized; /// Checks if the given api is implemented and versions match. - fn has_api( - &self, - at: &BlockId, - ) -> Result where Self: Sized; + fn has_api(&self, at: &BlockId) -> Result + where + Self: Sized; /// Check if the given api is implemented and the version passes a predicate. fn has_api_with bool>( &self, at: &BlockId, pred: P, - ) -> Result where Self: Sized; + ) -> Result + where + Self: Sized; /// Returns the version of the given api. fn api_version( &self, at: &BlockId, - ) -> Result, ApiError> where Self: Sized; + ) -> Result, ApiError> + where + Self: Sized; /// Start recording all accessed trie nodes for generating proofs. fn record_proof(&mut self); @@ -509,10 +512,9 @@ pub trait ApiExt { backend: &Self::StateBackend, changes_trie_state: Option<&ChangesTrieState, NumberFor>>, parent_hash: Block::Hash, - ) -> Result< - StorageChanges, - String - > where Self: Sized; + ) -> Result, String> + where + Self: Sized; } /// Parameters for [`CallApiAt::call_api_at`]. @@ -557,10 +559,7 @@ pub trait CallApiAt { ) -> Result, ApiError>; /// Returns the runtime version at the given block. - fn runtime_version_at( - &self, - at: &BlockId, - ) -> Result; + fn runtime_version_at(&self, at: &BlockId) -> Result; } /// Auxiliary wrapper that holds an api instance and binds it to the given lifetime. diff --git a/primitives/api/test/benches/bench.rs b/primitives/api/test/benches/bench.rs index 20ddbbe7116dc..b3d96a2db6a56 100644 --- a/primitives/api/test/benches/bench.rs +++ b/primitives/api/test/benches/bench.rs @@ -15,14 +15,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -use criterion::{Criterion, criterion_group, criterion_main}; -use substrate_test_runtime_client::{ - DefaultTestClientBuilderExt, TestClientBuilder, - TestClientBuilderExt, runtime::TestAPI, -}; +use criterion::{criterion_group, criterion_main, Criterion}; +use sp_api::ProvideRuntimeApi; use sp_runtime::generic::BlockId; use sp_state_machine::ExecutionStrategy; -use sp_api::ProvideRuntimeApi; +use substrate_test_runtime_client::{ + runtime::TestAPI, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, +}; fn sp_api_benchmark(c: &mut Criterion) { c.bench_function("add one with same runtime api", |b| { @@ -58,13 +57,17 @@ fn sp_api_benchmark(c: &mut Criterion) { }); c.bench_function("calling function by function pointer in wasm", |b| { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::AlwaysWasm) + .build(); let block_id = BlockId::Number(client.chain_info().best_number); b.iter(|| client.runtime_api().benchmark_indirect_call(&block_id).unwrap()) }); c.bench_function("calling function in wasm", |b| { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::AlwaysWasm) + .build(); let block_id = BlockId::Number(client.chain_info().best_number); b.iter(|| client.runtime_api().benchmark_direct_call(&block_id).unwrap()) }); diff --git a/primitives/api/test/tests/decl_and_impl.rs b/primitives/api/test/tests/decl_and_impl.rs index 54fb37133f468..5eeb2a6a771ed 100644 --- a/primitives/api/test/tests/decl_and_impl.rs +++ b/primitives/api/test/tests/decl_and_impl.rs @@ -16,12 +16,13 @@ // limitations under the License. use sp_api::{ - RuntimeApiInfo, decl_runtime_apis, impl_runtime_apis, mock_impl_runtime_apis, - ApiError, - ApiExt, + decl_runtime_apis, impl_runtime_apis, mock_impl_runtime_apis, ApiError, ApiExt, RuntimeApiInfo, }; -use sp_runtime::{traits::{GetNodeBlockType, Block as BlockT}, generic::BlockId}; use sp_core::NativeOrEncoded; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, GetNodeBlockType}, +}; use substrate_test_runtime_client::runtime::Block; /// The declaration of the `Runtime` type and the implementation of the `GetNodeBlockType` @@ -142,16 +143,22 @@ type TestClient = substrate_test_runtime_client::client::Client< #[test] fn test_client_side_function_signature() { - let _test: fn(&RuntimeApiImpl, &BlockId, u64) -> Result<(), ApiError> = - RuntimeApiImpl::::test; - let _something_with_block: - fn(&RuntimeApiImpl, &BlockId, Block) -> Result = - RuntimeApiImpl::::something_with_block; + let _test: fn( + &RuntimeApiImpl, + &BlockId, + u64, + ) -> Result<(), ApiError> = RuntimeApiImpl::::test; + let _something_with_block: fn( + &RuntimeApiImpl, + &BlockId, + Block, + ) -> Result = RuntimeApiImpl::::something_with_block; #[allow(deprecated)] - let _same_name_before_version_2: - fn(&RuntimeApiImpl, &BlockId) -> Result = - RuntimeApiImpl::::same_name_before_version_2; + let _same_name_before_version_2: fn( + &RuntimeApiImpl, + &BlockId, + ) -> Result = RuntimeApiImpl::::same_name_before_version_2; } #[test] @@ -186,9 +193,7 @@ fn check_runtime_api_versions() { fn mock_runtime_api_has_api() { let mock = MockApi { block: None }; - assert!( - mock.has_api::>(&BlockId::Number(0)).unwrap(), - ); + assert!(mock.has_api::>(&BlockId::Number(0)).unwrap(),); assert!(mock.has_api::>(&BlockId::Number(0)).unwrap()); } diff --git a/primitives/api/test/tests/runtime_calls.rs b/primitives/api/test/tests/runtime_calls.rs index b60c7a09cb616..b0b14ec1e944e 100644 --- a/primitives/api/test/tests/runtime_calls.rs +++ b/primitives/api/test/tests/runtime_calls.rs @@ -15,21 +15,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -use sp_api::{ProvideRuntimeApi, Core}; +use sp_api::{Core, ProvideRuntimeApi}; +use sp_runtime::{ + generic::BlockId, + traits::{HashFor, Header as HeaderT}, +}; +use sp_state_machine::{ + create_proof_check_backend, execution_proof_check_on_trie_backend, ExecutionStrategy, +}; use substrate_test_runtime_client::{ prelude::*, + runtime::{Block, DecodeFails, Header, TestAPI, Transfer}, DefaultTestClientBuilderExt, TestClientBuilder, - runtime::{TestAPI, DecodeFails, Transfer, Block, Header}, -}; -use sp_runtime::{generic::BlockId, traits::{Header as HeaderT, HashFor}}; -use sp_state_machine::{ - ExecutionStrategy, create_proof_check_backend, - execution_proof_check_on_trie_backend, }; -use sp_consensus::SelectChain; use codec::Encode; use sc_block_builder::BlockBuilderProvider; +use sp_consensus::SelectChain; fn calling_function_with_strat(strat: ExecutionStrategy) { let client = TestClientBuilder::new().set_execution_strategy(strat).build(); @@ -52,7 +54,9 @@ fn calling_wasm_runtime_function() { #[test] #[should_panic(expected = "FailedToConvertParameter { function: \"fail_convert_parameter\"")] fn calling_native_runtime_function_with_non_decodable_parameter() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::NativeWhenPossible).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::NativeWhenPossible) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); runtime_api.fail_convert_parameter(&block_id, DecodeFails::new()).unwrap(); @@ -61,7 +65,9 @@ fn calling_native_runtime_function_with_non_decodable_parameter() { #[test] #[should_panic(expected = "FailedToConvertReturnValue { function: \"fail_convert_return_value\"")] fn calling_native_runtime_function_with_non_decodable_return_value() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::NativeWhenPossible).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::NativeWhenPossible) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); runtime_api.fail_convert_return_value(&block_id).unwrap(); @@ -69,7 +75,9 @@ fn calling_native_runtime_function_with_non_decodable_return_value() { #[test] fn calling_native_runtime_signature_changed_function() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::NativeWhenPossible).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::NativeWhenPossible) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); @@ -78,7 +86,9 @@ fn calling_native_runtime_signature_changed_function() { #[test] fn calling_wasm_runtime_signature_changed_old_function() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::AlwaysWasm) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); @@ -103,10 +113,11 @@ fn calling_with_both_strategy_and_fail_on_native_should_work() { assert_eq!(runtime_api.fail_on_native(&block_id).unwrap(), 1); } - #[test] fn calling_with_native_else_wasm_and_fail_on_wasm_should_work() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::NativeElseWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::NativeElseWasm) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); assert_eq!(runtime_api.fail_on_wasm(&block_id).unwrap(), 1); @@ -114,7 +125,9 @@ fn calling_with_native_else_wasm_and_fail_on_wasm_should_work() { #[test] fn calling_with_native_else_wasm_and_fail_on_native_should_work() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::NativeElseWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::NativeElseWasm) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); assert_eq!(runtime_api.fail_on_native(&block_id).unwrap(), 1); @@ -122,7 +135,9 @@ fn calling_with_native_else_wasm_and_fail_on_native_should_work() { #[test] fn use_trie_function() { - let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm).build(); + let client = TestClientBuilder::new() + .set_execution_strategy(ExecutionStrategy::AlwaysWasm) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); assert_eq!(runtime_api.use_trie(&block_id).unwrap(), 2); @@ -133,10 +148,18 @@ fn initialize_block_works() { let client = TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::Both).build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); - runtime_api.initialize_block( - &block_id, - &Header::new(1, Default::default(), Default::default(), Default::default(), Default::default()), - ).unwrap(); + runtime_api + .initialize_block( + &block_id, + &Header::new( + 1, + Default::default(), + Default::default(), + Default::default(), + Default::default(), + ), + ) + .unwrap(); assert_eq!(runtime_api.get_block_number(&block_id).unwrap(), 1); } @@ -165,7 +188,8 @@ fn record_proof_works() { nonce: 0, from: AccountKeyring::Alice.into(), to: Default::default(), - }.into_signed_tx(); + } + .into_signed_tx(); // Build the block and record proof let mut builder = client @@ -177,15 +201,12 @@ fn record_proof_works() { let backend = create_proof_check_backend::>( storage_root, proof.expect("Proof was generated"), - ).expect("Creates proof backend."); + ) + .expect("Creates proof backend."); // Use the proof backend to execute `execute_block`. let mut overlay = Default::default(); - let executor = NativeExecutor::::new( - WasmExecutionMethod::Interpreted, - None, - 8, - ); + let executor = NativeExecutor::::new(WasmExecutionMethod::Interpreted, None, 8); execution_proof_check_on_trie_backend::<_, u64, _, _>( &backend, &mut overlay, @@ -194,7 +215,8 @@ fn record_proof_works() { "Core_execute_block", &block.encode(), &runtime_code, - ).expect("Executes block while using the proof backend"); + ) + .expect("Executes block while using the proof backend"); } #[test] @@ -203,7 +225,8 @@ fn call_runtime_api_with_multiple_arguments() { let data = vec![1, 2, 4, 5, 6, 7, 8, 8, 10, 12]; let block_id = BlockId::Number(client.chain_info().best_number); - client.runtime_api() + client + .runtime_api() .test_multiple_arguments(&block_id, data.clone(), data.clone(), data.len() as u32) .unwrap(); } @@ -213,8 +236,8 @@ fn disable_logging_works() { if std::env::var("RUN_TEST").is_ok() { sp_tracing::try_init_simple(); - let mut builder = TestClientBuilder::new() - .set_execution_strategy(ExecutionStrategy::AlwaysWasm); + let mut builder = + TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::AlwaysWasm); builder.genesis_init_mut().set_wasm_code( substrate_test_runtime_client::runtime::wasm_binary_logging_disabled_unwrap().to_vec(), ); diff --git a/primitives/application-crypto/src/ecdsa.rs b/primitives/application-crypto/src/ecdsa.rs index fe54dab39eef8..915e16ba3b1a2 100644 --- a/primitives/application-crypto/src/ecdsa.rs +++ b/primitives/application-crypto/src/ecdsa.rs @@ -17,7 +17,7 @@ //! Ecdsa crypto types. -use crate::{RuntimePublic, KeyTypeId}; +use crate::{KeyTypeId, RuntimePublic}; use sp_std::vec::Vec; @@ -33,9 +33,9 @@ mod app { } } -pub use app::{Public as AppPublic, Signature as AppSignature}; #[cfg(feature = "full_crypto")] pub use app::Pair as AppPair; +pub use app::{Public as AppPublic, Signature as AppSignature}; impl RuntimePublic for Public { type Signature = Signature; diff --git a/primitives/application-crypto/src/ed25519.rs b/primitives/application-crypto/src/ed25519.rs index 98eb4727df63e..09ce48fcb274c 100644 --- a/primitives/application-crypto/src/ed25519.rs +++ b/primitives/application-crypto/src/ed25519.rs @@ -17,7 +17,7 @@ //! Ed25519 crypto types. -use crate::{RuntimePublic, KeyTypeId}; +use crate::{KeyTypeId, RuntimePublic}; use sp_std::vec::Vec; @@ -33,9 +33,9 @@ mod app { } } -pub use app::{Public as AppPublic, Signature as AppSignature}; #[cfg(feature = "full_crypto")] pub use app::Pair as AppPair; +pub use app::{Public as AppPublic, Signature as AppSignature}; impl RuntimePublic for Public { type Signature = Signature; diff --git a/primitives/application-crypto/src/lib.rs b/primitives/application-crypto/src/lib.rs index 24d12e3a31e6a..4c326150e9244 100644 --- a/primitives/application-crypto/src/lib.rs +++ b/primitives/application-crypto/src/lib.rs @@ -18,15 +18,18 @@ //! Traits and macros for constructing application specific strongly typed crypto wrappers. #![warn(missing_docs)] - #![cfg_attr(not(feature = "std"), no_std)] -#[doc(hidden)] -pub use sp_core::{self, crypto::{CryptoType, CryptoTypePublicPair, Public, Derive, IsWrappedBy, Wraps}, RuntimeDebug}; +pub use sp_core::crypto::{key_types, CryptoTypeId, KeyTypeId}; #[doc(hidden)] #[cfg(feature = "full_crypto")] -pub use sp_core::crypto::{SecretStringError, DeriveJunction, Ss58Codec, Pair}; -pub use sp_core::crypto::{KeyTypeId, CryptoTypeId, key_types}; +pub use sp_core::crypto::{DeriveJunction, Pair, SecretStringError, Ss58Codec}; +#[doc(hidden)] +pub use sp_core::{ + self, + crypto::{CryptoType, CryptoTypePublicPair, Derive, IsWrappedBy, Public, Wraps}, + RuntimeDebug, +}; #[doc(hidden)] pub use codec; @@ -36,15 +39,11 @@ pub use scale_info; #[cfg(feature = "std")] pub use serde; #[doc(hidden)] -pub use sp_std::{ - convert::TryFrom, - ops::Deref, - vec::Vec, -}; +pub use sp_std::{convert::TryFrom, ops::Deref, vec::Vec}; +pub mod ecdsa; pub mod ed25519; pub mod sr25519; -pub mod ecdsa; mod traits; pub use traits::*; @@ -63,8 +62,17 @@ pub use traits::*; macro_rules! app_crypto { ($module:ident, $key_type:expr) => { $crate::app_crypto_public_full_crypto!($module::Public, $key_type, $module::CRYPTO_ID); - $crate::app_crypto_public_common!($module::Public, $module::Signature, $key_type, $module::CRYPTO_ID); - $crate::app_crypto_signature_full_crypto!($module::Signature, $key_type, $module::CRYPTO_ID); + $crate::app_crypto_public_common!( + $module::Public, + $module::Signature, + $key_type, + $module::CRYPTO_ID + ); + $crate::app_crypto_signature_full_crypto!( + $module::Signature, + $key_type, + $module::CRYPTO_ID + ); $crate::app_crypto_signature_common!($module::Signature, $key_type); $crate::app_crypto_pair!($module::Pair, $key_type, $module::CRYPTO_ID); }; @@ -84,8 +92,17 @@ macro_rules! app_crypto { macro_rules! app_crypto { ($module:ident, $key_type:expr) => { $crate::app_crypto_public_not_full_crypto!($module::Public, $key_type, $module::CRYPTO_ID); - $crate::app_crypto_public_common!($module::Public, $module::Signature, $key_type, $module::CRYPTO_ID); - $crate::app_crypto_signature_not_full_crypto!($module::Signature, $key_type, $module::CRYPTO_ID); + $crate::app_crypto_public_common!( + $module::Public, + $module::Signature, + $key_type, + $module::CRYPTO_ID + ); + $crate::app_crypto_signature_not_full_crypto!( + $module::Signature, + $key_type, + $module::CRYPTO_ID + ); $crate::app_crypto_signature_common!($module::Signature, $key_type); }; } @@ -95,7 +112,7 @@ macro_rules! app_crypto { #[macro_export] macro_rules! app_crypto_pair { ($pair:ty, $key_type:expr, $crypto_type:expr) => { - $crate::wrap!{ + $crate::wrap! { /// A generic `AppPublic` wrapper type over $pair crypto; this has no specific App. #[derive(Clone)] pub struct Pair($pair); @@ -113,12 +130,16 @@ macro_rules! app_crypto_pair { $crate::app_crypto_pair_functions_if_std!($pair); - fn derive< - Iter: Iterator - >(&self, path: Iter, seed: Option) -> Result<(Self, Option), Self::DeriveError> { + fn derive>( + &self, + path: Iter, + seed: Option, + ) -> Result<(Self, Option), Self::DeriveError> { self.0.derive(path, seed).map(|x| (Self(x.0), x.1)) } - fn from_seed(seed: &Self::Seed) -> Self { Self(<$pair>::from_seed(seed)) } + fn from_seed(seed: &Self::Seed) -> Self { + Self(<$pair>::from_seed(seed)) + } fn from_seed_slice(seed: &[u8]) -> Result { <$pair>::from_seed_slice(seed).map(Self) } @@ -139,8 +160,12 @@ macro_rules! app_crypto_pair { ) -> bool { <$pair>::verify_weak(sig, message, pubkey) } - fn public(&self) -> Self::Public { Public(self.0.public()) } - fn to_raw_vec(&self) -> $crate::Vec { self.0.to_raw_vec() } + fn public(&self) -> Self::Public { + Public(self.0.public()) + } + fn to_raw_vec(&self) -> $crate::Vec { + self.0.to_raw_vec() + } } impl $crate::AppKey for Pair { @@ -169,22 +194,22 @@ macro_rules! app_crypto_pair_functions_if_std { (Self(r.0), r.1, r.2) } - fn from_phrase(phrase: &str, password: Option<&str>) - -> Result<(Self, Self::Seed), $crate::SecretStringError> - { + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Self, Self::Seed), $crate::SecretStringError> { <$pair>::from_phrase(phrase, password).map(|r| (Self(r.0), r.1)) } - } + }; } #[doc(hidden)] #[cfg(not(feature = "std"))] #[macro_export] macro_rules! app_crypto_pair_functions_if_std { - ($pair:ty) => {} + ($pair:ty) => {}; } - /// Declares Public type which is functionally equivalent to `$public`, but is new /// Application-specific type whose identifier is `$key_type`. /// can only be used together with `full_crypto` feature @@ -193,7 +218,7 @@ macro_rules! app_crypto_pair_functions_if_std { #[macro_export] macro_rules! app_crypto_public_full_crypto { ($public:ty, $key_type:expr, $crypto_type:expr) => { - $crate::wrap!{ + $crate::wrap! { /// A generic `AppPublic` wrapper type over $public crypto; this has no specific App. #[derive( Clone, Default, Eq, Hash, PartialEq, PartialOrd, Ord, @@ -219,7 +244,7 @@ macro_rules! app_crypto_public_full_crypto { const ID: $crate::KeyTypeId = $key_type; const CRYPTO_ID: $crate::CryptoTypeId = $crypto_type; } - } + }; } /// Declares Public type which is functionally equivalent to `$public`, but is new @@ -230,7 +255,7 @@ macro_rules! app_crypto_public_full_crypto { #[macro_export] macro_rules! app_crypto_public_not_full_crypto { ($public:ty, $key_type:expr, $crypto_type:expr) => { - $crate::wrap!{ + $crate::wrap! { /// A generic `AppPublic` wrapper type over $public crypto; this has no specific App. #[derive( Clone, Default, Eq, PartialEq, Ord, PartialOrd, @@ -251,7 +276,7 @@ macro_rules! app_crypto_public_not_full_crypto { const ID: $crate::KeyTypeId = $key_type; const CRYPTO_ID: $crate::CryptoTypeId = $crypto_type; } - } + }; } /// Declares Public type which is functionally equivalent to `$public`, but is new @@ -264,15 +289,21 @@ macro_rules! app_crypto_public_common { $crate::app_crypto_public_common_if_std!(); impl AsRef<[u8]> for Public { - fn as_ref(&self) -> &[u8] { self.0.as_ref() } + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } } impl AsMut<[u8]> for Public { - fn as_mut(&mut self) -> &mut [u8] { self.0.as_mut() } + fn as_mut(&mut self) -> &mut [u8] { + self.0.as_mut() + } } impl $crate::Public for Public { - fn from_slice(x: &[u8]) -> Self { Self(<$public>::from_slice(x)) } + fn from_slice(x: &[u8]) -> Self { + Self(<$public>::from_slice(x)) + } fn to_public_crypto_pair(&self) -> $crate::CryptoTypePublicPair { $crate::CryptoTypePublicPair($crypto_type, self.to_raw_vec()) @@ -283,14 +314,20 @@ macro_rules! app_crypto_public_common { type Generic = $public; } - impl $crate::RuntimeAppPublic for Public where $public: $crate::RuntimePublic { + impl $crate::RuntimeAppPublic for Public + where + $public: $crate::RuntimePublic, + { const ID: $crate::KeyTypeId = $key_type; const CRYPTO_ID: $crate::CryptoTypeId = $crypto_type; type Signature = Signature; fn all() -> $crate::Vec { - <$public as $crate::RuntimePublic>::all($key_type).into_iter().map(Self).collect() + <$public as $crate::RuntimePublic>::all($key_type) + .into_iter() + .map(Self) + .collect() } fn generate_pair(seed: Option<$crate::Vec>) -> Self { @@ -298,11 +335,8 @@ macro_rules! app_crypto_public_common { } fn sign>(&self, msg: &M) -> Option { - <$public as $crate::RuntimePublic>::sign( - self.as_ref(), - $key_type, - msg, - ).map(Signature) + <$public as $crate::RuntimePublic>::sign(self.as_ref(), $key_type, msg) + .map(Signature) } fn verify>(&self, msg: &M, signature: &Self::Signature) -> bool { @@ -322,10 +356,7 @@ macro_rules! app_crypto_public_common { impl From<&Public> for $crate::CryptoTypePublicPair { fn from(key: &Public) -> Self { - $crate::CryptoTypePublicPair( - $crypto_type, - $crate::Public::to_raw_vec(key), - ) + $crate::CryptoTypePublicPair($crypto_type, $crate::Public::to_raw_vec(key)) } } @@ -336,7 +367,7 @@ macro_rules! app_crypto_public_common { <$public>::try_from(data).map(Into::into) } } - } + }; } /// Implements traits for the public key type if `feature = "std"` is enabled. @@ -346,8 +377,9 @@ macro_rules! app_crypto_public_common { macro_rules! app_crypto_public_common_if_std { () => { impl $crate::Derive for Public { - fn derive>(&self, - path: Iter + fn derive>( + &self, + path: Iter, ) -> Option { self.0.derive(path).map(Self) } @@ -361,8 +393,9 @@ macro_rules! app_crypto_public_common_if_std { } impl $crate::serde::Serialize for Public { - fn serialize(&self, serializer: S) -> std::result::Result where - S: $crate::serde::Serializer + fn serialize(&self, serializer: S) -> std::result::Result + where + S: $crate::serde::Serializer, { use $crate::Ss58Codec; serializer.serialize_str(&self.to_ss58check()) @@ -370,15 +403,16 @@ macro_rules! app_crypto_public_common_if_std { } impl<'de> $crate::serde::Deserialize<'de> for Public { - fn deserialize(deserializer: D) -> std::result::Result where - D: $crate::serde::Deserializer<'de> + fn deserialize(deserializer: D) -> std::result::Result + where + D: $crate::serde::Deserializer<'de>, { use $crate::Ss58Codec; Public::from_ss58check(&String::deserialize(deserializer)?) .map_err(|e| $crate::serde::de::Error::custom(format!("{:?}", e))) } } - } + }; } #[cfg(not(feature = "std"))] @@ -387,10 +421,9 @@ macro_rules! app_crypto_public_common_if_std { macro_rules! app_crypto_public_common_if_std { () => { impl $crate::Derive for Public {} - } + }; } - /// Declares Signature type which is functionally equivalent to `$sig`, but is new /// Application-specific type whose identifier is `$key_type`. /// can only be used together with `full_crypto` feature @@ -423,7 +456,7 @@ macro_rules! app_crypto_signature_full_crypto { const ID: $crate::KeyTypeId = $key_type; const CRYPTO_ID: $crate::CryptoTypeId = $crypto_type; } - } + }; } /// Declares Signature type which is functionally equivalent to `$sig`, but is new @@ -454,7 +487,7 @@ macro_rules! app_crypto_signature_not_full_crypto { const ID: $crate::KeyTypeId = $key_type; const CRYPTO_ID: $crate::CryptoTypeId = $crypto_type; } - } + }; } /// Declares Signature type which is functionally equivalent to `$sig`, but is new @@ -467,11 +500,15 @@ macro_rules! app_crypto_signature_common { impl $crate::Deref for Signature { type Target = [u8]; - fn deref(&self) -> &Self::Target { self.0.as_ref() } + fn deref(&self) -> &Self::Target { + self.0.as_ref() + } } impl AsRef<[u8]> for Signature { - fn as_ref(&self) -> &[u8] { self.0.as_ref() } + fn as_ref(&self) -> &[u8] { + self.0.as_ref() + } } impl $crate::AppSignature for Signature { @@ -485,7 +522,7 @@ macro_rules! app_crypto_signature_common { Ok(<$sig>::try_from(data.as_slice())?.into()) } } - } + }; } /// Implement bidirectional `From` and on-way `AsRef`/`AsMut` for two types, `$inner` and `$outer`. @@ -553,10 +590,9 @@ macro_rules! with_pair { } } - #[doc(hidden)] #[macro_export] #[cfg(all(not(feature = "std"), not(feature = "full_crypto")))] macro_rules! with_pair { - ( $( $def:tt )* ) => {} + ( $( $def:tt )* ) => {}; } diff --git a/primitives/application-crypto/src/sr25519.rs b/primitives/application-crypto/src/sr25519.rs index f3ce867858339..f51236f2ab384 100644 --- a/primitives/application-crypto/src/sr25519.rs +++ b/primitives/application-crypto/src/sr25519.rs @@ -17,7 +17,7 @@ //! Sr25519 crypto types. -use crate::{RuntimePublic, KeyTypeId}; +use crate::{KeyTypeId, RuntimePublic}; use sp_std::vec::Vec; @@ -33,9 +33,9 @@ mod app { } } -pub use app::{Public as AppPublic, Signature as AppSignature}; #[cfg(feature = "full_crypto")] pub use app::Pair as AppPair; +pub use app::{Public as AppPublic, Signature as AppSignature}; impl RuntimePublic for Public { type Signature = Signature; diff --git a/primitives/application-crypto/src/traits.rs b/primitives/application-crypto/src/traits.rs index e64b976b65b68..376d12f0c7a3e 100644 --- a/primitives/application-crypto/src/traits.rs +++ b/primitives/application-crypto/src/traits.rs @@ -19,7 +19,7 @@ use sp_core::crypto::Pair; use codec::Codec; -use sp_core::crypto::{KeyTypeId, CryptoType, CryptoTypeId, IsWrappedBy, Public}; +use sp_core::crypto::{CryptoType, CryptoTypeId, IsWrappedBy, KeyTypeId, Public}; use sp_std::{fmt::Debug, vec::Vec}; /// An application-specific key. @@ -57,7 +57,7 @@ impl MaybeHash for T {} /// Type which implements Debug and Hash in std, not when no-std (no-std variant with crypto). #[cfg(all(not(feature = "std"), feature = "full_crypto"))] -pub trait MaybeDebugHash: sp_std::hash::Hash {} +pub trait MaybeDebugHash: sp_std::hash::Hash {} #[cfg(all(not(feature = "std"), feature = "full_crypto"))] impl MaybeDebugHash for T {} @@ -66,15 +66,23 @@ pub trait AppPublic: AppKey + Public + Ord + PartialOrd + Eq + PartialEq + Debug + MaybeHash + codec::Codec { /// The wrapped type which is just a plain instance of `Public`. - type Generic: - IsWrappedBy + Public + Ord + PartialOrd + Eq + PartialEq + Debug + MaybeHash + codec::Codec; + type Generic: IsWrappedBy + + Public + + Ord + + PartialOrd + + Eq + + PartialEq + + Debug + + MaybeHash + + codec::Codec; } /// A application's key pair. #[cfg(feature = "full_crypto")] -pub trait AppPair: AppKey + Pair::Public> { +pub trait AppPair: AppKey + Pair::Public> { /// The wrapped type which is just a plain instance of `Pair`. - type Generic: IsWrappedBy + Pair::Public as AppPublic>::Generic>; + type Generic: IsWrappedBy + + Pair::Public as AppPublic>::Generic>; } /// A application's signature. diff --git a/primitives/application-crypto/test/src/ecdsa.rs b/primitives/application-crypto/test/src/ecdsa.rs index 5ad10e79ef96f..c4aa6a2afbd61 100644 --- a/primitives/application-crypto/test/src/ecdsa.rs +++ b/primitives/application-crypto/test/src/ecdsa.rs @@ -16,28 +16,22 @@ // limitations under the License. //! Integration tests for ecdsa -use std::sync::Arc; +use sp_api::ProvideRuntimeApi; +use sp_application_crypto::ecdsa::{AppPair, AppPublic}; +use sp_core::{crypto::Pair, testing::ECDSA}; +use sp_keystore::{testing::KeyStore, SyncCryptoStore}; use sp_runtime::generic::BlockId; -use sp_core::{ - crypto::Pair, - testing::ECDSA, -}; -use sp_keystore::{ - SyncCryptoStore, - testing::KeyStore, -}; +use std::sync::Arc; use substrate_test_runtime_client::{ - TestClientBuilder, DefaultTestClientBuilderExt, TestClientBuilderExt, - runtime::TestAPI, + runtime::TestAPI, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, }; -use sp_api::ProvideRuntimeApi; -use sp_application_crypto::ecdsa::{AppPair, AppPublic}; #[test] fn ecdsa_works_in_runtime() { let keystore = Arc::new(KeyStore::new()); let test_client = TestClientBuilder::new().set_keystore(keystore.clone()).build(); - let (signature, public) = test_client.runtime_api() + let (signature, public) = test_client + .runtime_api() .test_ecdsa_crypto(&BlockId::Number(0)) .expect("Tests `ecdsa` crypto."); diff --git a/primitives/application-crypto/test/src/ed25519.rs b/primitives/application-crypto/test/src/ed25519.rs index 06b962f1902bc..7cfd801388c78 100644 --- a/primitives/application-crypto/test/src/ed25519.rs +++ b/primitives/application-crypto/test/src/ed25519.rs @@ -17,28 +17,22 @@ //! Integration tests for ed25519 -use std::sync::Arc; +use sp_api::ProvideRuntimeApi; +use sp_application_crypto::ed25519::{AppPair, AppPublic}; +use sp_core::{crypto::Pair, testing::ED25519}; +use sp_keystore::{testing::KeyStore, SyncCryptoStore}; use sp_runtime::generic::BlockId; -use sp_core::{ - crypto::Pair, - testing::ED25519, -}; -use sp_keystore::{ - SyncCryptoStore, - testing::KeyStore, -}; +use std::sync::Arc; use substrate_test_runtime_client::{ - TestClientBuilder, DefaultTestClientBuilderExt, TestClientBuilderExt, - runtime::TestAPI, + runtime::TestAPI, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, }; -use sp_api::ProvideRuntimeApi; -use sp_application_crypto::ed25519::{AppPair, AppPublic}; #[test] fn ed25519_works_in_runtime() { let keystore = Arc::new(KeyStore::new()); let test_client = TestClientBuilder::new().set_keystore(keystore.clone()).build(); - let (signature, public) = test_client.runtime_api() + let (signature, public) = test_client + .runtime_api() .test_ed25519_crypto(&BlockId::Number(0)) .expect("Tests `ed25519` crypto."); diff --git a/primitives/application-crypto/test/src/lib.rs b/primitives/application-crypto/test/src/lib.rs index bee926f8dd8c1..6b7734764e793 100644 --- a/primitives/application-crypto/test/src/lib.rs +++ b/primitives/application-crypto/test/src/lib.rs @@ -17,9 +17,9 @@ //! Integration tests for application crypto +#[cfg(test)] +mod ecdsa; #[cfg(test)] mod ed25519; #[cfg(test)] mod sr25519; -#[cfg(test)] -mod ecdsa; diff --git a/primitives/application-crypto/test/src/sr25519.rs b/primitives/application-crypto/test/src/sr25519.rs index 889f662b68140..12dfbc609fb01 100644 --- a/primitives/application-crypto/test/src/sr25519.rs +++ b/primitives/application-crypto/test/src/sr25519.rs @@ -17,28 +17,22 @@ //! Integration tests for sr25519 -use std::sync::Arc; +use sp_api::ProvideRuntimeApi; +use sp_application_crypto::sr25519::{AppPair, AppPublic}; +use sp_core::{crypto::Pair, testing::SR25519}; +use sp_keystore::{testing::KeyStore, SyncCryptoStore}; use sp_runtime::generic::BlockId; -use sp_core::{ - crypto::Pair, - testing::SR25519, -}; -use sp_keystore::{ - SyncCryptoStore, - testing::KeyStore, -}; +use std::sync::Arc; use substrate_test_runtime_client::{ - TestClientBuilder, DefaultTestClientBuilderExt, TestClientBuilderExt, - runtime::TestAPI, + runtime::TestAPI, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt, }; -use sp_api::ProvideRuntimeApi; -use sp_application_crypto::sr25519::{AppPair, AppPublic}; #[test] fn sr25519_works_in_runtime() { let keystore = Arc::new(KeyStore::new()); let test_client = TestClientBuilder::new().set_keystore(keystore.clone()).build(); - let (signature, public) = test_client.runtime_api() + let (signature, public) = test_client + .runtime_api() .test_sr25519_crypto(&BlockId::Number(0)) .expect("Tests `sr25519` crypto."); diff --git a/primitives/arithmetic/benches/bench.rs b/primitives/arithmetic/benches/bench.rs index fd535c1d2d0ff..02db00aa0bf82 100644 --- a/primitives/arithmetic/benches/bench.rs +++ b/primitives/arithmetic/benches/bench.rs @@ -15,9 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -use criterion::{Criterion, Throughput, BenchmarkId, criterion_group, criterion_main}; -use sp_arithmetic::biguint::{BigUint, Single}; +use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput}; use rand::Rng; +use sp_arithmetic::biguint::{BigUint, Single}; fn random_big_uint(size: usize) -> BigUint { let mut rng = rand::thread_rng(); @@ -73,7 +73,7 @@ fn bench_division(c: &mut Criterion) { } } -criterion_group!{ +criterion_group! { name = benches; config = Criterion::default(); targets = bench_addition, bench_subtraction, bench_multiplication, bench_division diff --git a/primitives/arithmetic/fuzzer/src/biguint.rs b/primitives/arithmetic/fuzzer/src/biguint.rs index 57be7f5342043..ca5b8379afff5 100644 --- a/primitives/arithmetic/fuzzer/src/biguint.rs +++ b/primitives/arithmetic/fuzzer/src/biguint.rs @@ -60,8 +60,13 @@ fn main() { let expected = ue.unwrap() + ve.unwrap(); let t = u.clone().add(&v); assert_eq!( - u128::try_from(t.clone()).unwrap(), expected, - "{:?} + {:?} ===> {:?} != {:?}", u, v, t, expected, + u128::try_from(t.clone()).unwrap(), + expected, + "{:?} + {:?} ===> {:?} != {:?}", + u, + v, + t, + expected, ); } @@ -74,8 +79,13 @@ fn main() { let t = t.unwrap(); let expected = expected.unwrap(); assert_eq!( - u128::try_from(t.clone()).unwrap(), expected, - "{:?} - {:?} ===> {:?} != {:?}", u, v, t, expected, + u128::try_from(t.clone()).unwrap(), + expected, + "{:?} - {:?} ===> {:?} != {:?}", + u, + v, + t, + expected, ); } } @@ -84,31 +94,51 @@ fn main() { let expected = ue.unwrap() * ve.unwrap(); let t = u.clone().mul(&v); assert_eq!( - u128::try_from(t.clone()).unwrap(), expected, - "{:?} * {:?} ===> {:?} != {:?}", u, v, t, expected, + u128::try_from(t.clone()).unwrap(), + expected, + "{:?} * {:?} ===> {:?} != {:?}", + u, + v, + t, + expected, ); } if check_digit_lengths(&u, &v, 4) { let (ue, ve) = (ue.unwrap(), ve.unwrap()); if ve == 0 { - return; + return } let (q, r) = (ue / ve, ue % ve); if let Some((qq, rr)) = u.clone().div(&v, true) { assert_eq!( - u128::try_from(qq.clone()).unwrap(), q, - "{:?} / {:?} ===> {:?} != {:?}", u, v, qq, q, + u128::try_from(qq.clone()).unwrap(), + q, + "{:?} / {:?} ===> {:?} != {:?}", + u, + v, + qq, + q, ); assert_eq!( - u128::try_from(rr.clone()).unwrap(), r, - "{:?} % {:?} ===> {:?} != {:?}", u, v, rr, r, + u128::try_from(rr.clone()).unwrap(), + r, + "{:?} % {:?} ===> {:?} != {:?}", + u, + v, + rr, + r, ); } else if v.len() == 1 { let qq = u.clone().div_unit(ve as Single); assert_eq!( - u128::try_from(qq.clone()).unwrap(), q, - "[single] {:?} / {:?} ===> {:?} != {:?}", u, v, qq, q, + u128::try_from(qq.clone()).unwrap(), + q, + "[single] {:?} / {:?} ===> {:?} != {:?}", + u, + v, + qq, + q, ); } else if v.msb() != 0 && u.msb() != 0 && u.len() > v.len() { panic!("div returned none for an unexpected reason"); @@ -175,7 +205,7 @@ fn assert_biguints_eq(a: &BigUint, b: &num_bigint::BigUint) { // `num_bigint::BigUint` doesn't expose it's internals, so we need to convert into that to // compare. - let limbs = (0 .. a.len()).map(|i| a.get(i)).collect(); + let limbs = (0..a.len()).map(|i| a.get(i)).collect(); let num_a = num_bigint::BigUint::new(limbs); assert!(&num_a == b, "\narithmetic: {:?}\nnum-bigint: {:?}", a, b); diff --git a/primitives/arithmetic/fuzzer/src/fixed_point.rs b/primitives/arithmetic/fuzzer/src/fixed_point.rs index db415ecb84c75..d8f058ae51e2c 100644 --- a/primitives/arithmetic/fuzzer/src/fixed_point.rs +++ b/primitives/arithmetic/fuzzer/src/fixed_point.rs @@ -28,7 +28,7 @@ //! [here](https://docs.rs/honggfuzz/). use honggfuzz::fuzz; -use sp_arithmetic::{FixedPointNumber, FixedI64, traits::Saturating}; +use sp_arithmetic::{traits::Saturating, FixedI64, FixedPointNumber}; fn main() { loop { @@ -38,7 +38,8 @@ fn main() { // Check `from_rational` and division are consistent. if y != 0 { - let f1 = FixedI64::saturating_from_integer(x) / FixedI64::saturating_from_integer(y); + let f1 = + FixedI64::saturating_from_integer(x) / FixedI64::saturating_from_integer(y); let f2 = FixedI64::saturating_from_rational(x, y); assert_eq!(f1.into_inner(), f2.into_inner()); } @@ -75,7 +76,8 @@ fn main() { let a = FixedI64::saturating_from_rational(2, 5); let b = a.saturating_mul_acc_int(x); let xx = FixedI64::saturating_from_integer(x); - let d = a.saturating_mul(xx).saturating_add(xx).into_inner() as i128 / FixedI64::accuracy() as i128; + let d = a.saturating_mul(xx).saturating_add(xx).into_inner() as i128 / + FixedI64::accuracy() as i128; assert_eq!(b, d); }); } diff --git a/primitives/arithmetic/fuzzer/src/multiply_by_rational.rs b/primitives/arithmetic/fuzzer/src/multiply_by_rational.rs index a1689716b56c6..d829a93ad4bb2 100644 --- a/primitives/arithmetic/fuzzer/src/multiply_by_rational.rs +++ b/primitives/arithmetic/fuzzer/src/multiply_by_rational.rs @@ -60,7 +60,7 @@ fn main() { fn mul_div(a: u128, b: u128, c: u128) -> u128 { use primitive_types::U256; if a.is_zero() { - return Zero::zero(); + return Zero::zero() } let c = c.max(1); diff --git a/primitives/arithmetic/fuzzer/src/normalize.rs b/primitives/arithmetic/fuzzer/src/normalize.rs index 48d52ba71bab6..7f9f8cb3c79e0 100644 --- a/primitives/arithmetic/fuzzer/src/normalize.rs +++ b/primitives/arithmetic/fuzzer/src/normalize.rs @@ -15,7 +15,6 @@ // See the License for the specific language governing permissions and // limitations under the License. - //! # Running //! Running this fuzzer can be done with `cargo hfuzz run normalize`. `honggfuzz` CLI options can //! be used by setting `HFUZZ_RUN_ARGS`, such as `-n 4` to use 4 threads. @@ -37,7 +36,9 @@ fn main() { loop { fuzz!(|data: (Vec, Ty)| { let (data, norm) = data; - if data.len() == 0 { return; } + if data.len() == 0 { + return + } let pre_sum: u128 = data.iter().map(|x| *x as u128).sum(); let normalized = data.normalize(norm); @@ -50,13 +51,7 @@ fn main() { let sum: u128 = normalized.iter().map(|x| *x as u128).sum(); // if this function returns Ok(), then it will ALWAYS be accurate. - assert_eq!( - sum, - norm as u128, - "sums don't match {:?}, {}", - normalized, - norm, - ); + assert_eq!(sum, norm as u128, "sums don't match {:?}, {}", normalized, norm,); } else { panic!("Should have returned Ok for input = {:?}, target = {:?}", data, norm); } diff --git a/primitives/arithmetic/fuzzer/src/per_thing_rational.rs b/primitives/arithmetic/fuzzer/src/per_thing_rational.rs index 47ba5a4803056..c7f6a14c5f79c 100644 --- a/primitives/arithmetic/fuzzer/src/per_thing_rational.rs +++ b/primitives/arithmetic/fuzzer/src/per_thing_rational.rs @@ -24,16 +24,11 @@ //! `cargo hfuzz run-debug per_thing_rational hfuzz_workspace/per_thing_rational/*.fuzz`. use honggfuzz::fuzz; -use sp_arithmetic::{ - PerThing, PerU16, Percent, Perbill, Perquintill, traits::SaturatedConversion, -}; +use sp_arithmetic::{traits::SaturatedConversion, PerThing, PerU16, Perbill, Percent, Perquintill}; fn main() { loop { - fuzz!(| - data: ((u16, u16), (u32, u32), (u64, u64)) - | { - + fuzz!(|data: ((u16, u16), (u32, u32), (u64, u64))| { let (u16_pair, u32_pair, u64_pair) = data; // peru16 @@ -109,7 +104,6 @@ fn main() { Perquintill::from_float(smaller as f64 / bigger.max(1) as f64), 1000, ); - }) } } diff --git a/primitives/arithmetic/src/biguint.rs b/primitives/arithmetic/src/biguint.rs index 859cf829246f1..5bb90a9b93258 100644 --- a/primitives/arithmetic/src/biguint.rs +++ b/primitives/arithmetic/src/biguint.rs @@ -17,9 +17,9 @@ //! Infinite precision unsigned integer for substrate runtime. -use num_traits::{Zero, One}; -use sp_std::{cmp::Ordering, ops, prelude::*, vec, cell::RefCell, convert::TryFrom}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use num_traits::{One, Zero}; +use sp_std::{cell::RefCell, cmp::Ordering, convert::TryFrom, ops, prelude::*, vec}; // A sensible value for this would be half of the dword size of the host machine. Since the // runtime is compiled to 32bit webassembly, using 32 and 64 for single and double respectively @@ -105,7 +105,9 @@ impl BigUint { } /// Number of limbs. - pub fn len(&self) -> usize { self.digits.len() } + pub fn len(&self) -> usize { + self.digits.len() + } /// A naive getter for limb at `index`. Note that the order is lsb -> msb. /// @@ -156,7 +158,9 @@ impl BigUint { // by definition, a big-int number should never have leading zero limbs. This function // has the ability to cause this. There is nothing to do if the number already has 1 // limb only. call it a day and return. - if self.len().is_zero() { return; } + if self.len().is_zero() { + return + } let index = self.digits.iter().position(|&elem| elem != 0).unwrap_or(self.len() - 1); if index > 0 { @@ -168,7 +172,9 @@ impl BigUint { /// is already bigger than `size` limbs. pub fn lpad(&mut self, size: usize) { let n = self.len(); - if n >= size { return; } + if n >= size { + return + } let pad = size - n; let mut new_digits = (0..pad).map(|_| 0).collect::>(); new_digits.extend(self.digits.iter()); @@ -260,15 +266,15 @@ impl BigUint { if self.get(j) == 0 { // Note: `with_capacity` allocates with 0. Explicitly set j + m to zero if // otherwise. - continue; + continue } let mut k = 0; for i in 0..m { // PROOF: (B−1) × (B−1) + (B−1) + (B−1) = B^2 −1 < B^2. addition is safe. - let t = mul_single(self.get(j), other.get(i)) - + Double::from(w.get(i + j)) - + Double::from(k); + let t = mul_single(self.get(j), other.get(i)) + + Double::from(w.get(i + j)) + + Double::from(k); w.set(i + j, (t % B) as Single); // PROOF: (B^2 - 1) / B < B. conversion is safe. k = (t / B) as Single; @@ -288,9 +294,9 @@ impl BigUint { let mut out = Self::with_capacity(n); let mut r: Single = 0; // PROOF: (B-1) * B + (B-1) still fits in double - let with_r = |x: Single, r: Single| { Double::from(r) * B + Double::from(x) }; + let with_r = |x: Single, r: Single| Double::from(r) * B + Double::from(x); for d in (0..n).rev() { - let (q, rr) = div_single(with_r(self.get(d), r), other) ; + let (q, rr) = div_single(with_r(self.get(d), r), other); out.set(d, q as Single); r = rr; } @@ -311,11 +317,7 @@ impl BigUint { /// /// Taken from "The Art of Computer Programming" by D.E. Knuth, vol 2, chapter 4. pub fn div(self, other: &Self, rem: bool) -> Option<(Self, Self)> { - if other.len() <= 1 - || other.msb() == 0 - || self.msb() == 0 - || self.len() <= other.len() - { + if other.len() <= 1 || other.msb() == 0 || self.msb() == 0 || self.len() <= other.len() { return None } let n = other.len(); @@ -344,9 +346,7 @@ impl BigUint { // PROOF: this always fits into `Double`. In the context of Single = u8, and // Double = u16, think of 255 * 256 + 255 which is just u16::MAX. let dividend = - Double::from(self_norm.get(j + n)) - * B - + Double::from(self_norm.get(j + n - 1)); + Double::from(self_norm.get(j + n)) * B + Double::from(self_norm.get(j + n - 1)); let divisor = other_norm.get(n - 1); div_single(dividend, divisor) }; @@ -377,23 +377,30 @@ impl BigUint { test(); while (*rhat.borrow() as Double) < B { - if !test() { break; } + if !test() { + break + } } let qhat = qhat.into_inner(); // we don't need rhat anymore. just let it go out of scope when it does. // step D4 - let lhs = Self { digits: (j..=j+n).rev().map(|d| self_norm.get(d)).collect() }; + let lhs = Self { digits: (j..=j + n).rev().map(|d| self_norm.get(d)).collect() }; let rhs = other_norm.clone().mul(&Self::from(qhat)); let maybe_sub = lhs.sub(&rhs); let mut negative = false; let sub = match maybe_sub { Ok(t) => t, - Err(t) => { negative = true; t } + Err(t) => { + negative = true; + t + }, }; - (j..=j+n).for_each(|d| { self_norm.set(d, sub.get(d - j)); }); + (j..=j + n).for_each(|d| { + self_norm.set(d, sub.get(d - j)); + }); // step D5 // PROOF: the `test()` specifically decreases qhat until it is below `B`. conversion @@ -403,9 +410,11 @@ impl BigUint { // step D6: add back if negative happened. if negative { q.set(j, q.get(j) - 1); - let u = Self { digits: (j..=j+n).rev().map(|d| self_norm.get(d)).collect() }; + let u = Self { digits: (j..=j + n).rev().map(|d| self_norm.get(d)).collect() }; let r = other_norm.clone().add(&u); - (j..=j+n).rev().for_each(|d| { self_norm.set(d, r.get(d - j)); }) + (j..=j + n).rev().for_each(|d| { + self_norm.set(d, r.get(d - j)); + }) } } @@ -415,9 +424,8 @@ impl BigUint { if normalizer_bits > 0 { let s = SHIFT as u32; let nb = normalizer_bits; - for d in 0..n-1 { - let v = self_norm.get(d) >> nb - | self_norm.get(d + 1).overflowing_shl(s - nb).0; + for d in 0..n - 1 { + let v = self_norm.get(d) >> nb | self_norm.get(d + 1).overflowing_shl(s - nb).0; r.set(d, v); } r.set(n - 1, self_norm.get(n - 1) >> normalizer_bits); @@ -445,7 +453,6 @@ impl sp_std::fmt::Debug for BigUint { fn fmt(&self, _: &mut sp_std::fmt::Formatter<'_>) -> sp_std::fmt::Result { Ok(()) } - } impl PartialEq for BigUint { @@ -475,7 +482,7 @@ impl Ord for BigUint { Ordering::Equal => lhs.cmp(rhs), _ => len_cmp, } - } + }, } } } @@ -632,18 +639,9 @@ pub mod tests { #[test] fn equality_works() { - assert_eq!( - BigUint { digits: vec![1, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, - true, - ); - assert_eq!( - BigUint { digits: vec![3, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, - false, - ); - assert_eq!( - BigUint { digits: vec![0, 1, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, - true, - ); + assert_eq!(BigUint { digits: vec![1, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, true,); + assert_eq!(BigUint { digits: vec![3, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, false,); + assert_eq!(BigUint { digits: vec![0, 1, 2, 3] } == BigUint { digits: vec![1, 2, 3] }, true,); } #[test] @@ -669,14 +667,8 @@ pub mod tests { use sp_std::convert::TryFrom; assert_eq!(u64::try_from(with_limbs(1)).unwrap(), 1); assert_eq!(u64::try_from(with_limbs(2)).unwrap(), u32::MAX as u64 + 2); - assert_eq!( - u64::try_from(with_limbs(3)).unwrap_err(), - "cannot fit a number into u64", - ); - assert_eq!( - u128::try_from(with_limbs(3)).unwrap(), - u32::MAX as u128 + u64::MAX as u128 + 3 - ); + assert_eq!(u64::try_from(with_limbs(3)).unwrap_err(), "cannot fit a number into u64",); + assert_eq!(u128::try_from(with_limbs(3)).unwrap(), u32::MAX as u128 + u64::MAX as u128 + 3); } #[test] diff --git a/primitives/arithmetic/src/fixed_point.rs b/primitives/arithmetic/src/fixed_point.rs index 52609a3830771..423ce1ae3d79b 100644 --- a/primitives/arithmetic/src/fixed_point.rs +++ b/primitives/arithmetic/src/fixed_point.rs @@ -17,22 +17,38 @@ //! Decimal Fixed Point implementations for Substrate runtime. -use sp_std::{ops::{self, Add, Sub, Mul, Div}, fmt::Debug, prelude::*, convert::{TryInto, TryFrom}}; -use codec::{Encode, Decode, CompactAs}; use crate::{ - helpers_128bit::multiply_by_rational, PerThing, + helpers_128bit::multiply_by_rational, traits::{ - SaturatedConversion, CheckedSub, CheckedAdd, CheckedMul, CheckedDiv, CheckedNeg, - Bounded, Saturating, UniqueSaturatedInto, Zero, One + Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedNeg, CheckedSub, One, + SaturatedConversion, Saturating, UniqueSaturatedInto, Zero, }, + PerThing, +}; +use codec::{CompactAs, Decode, Encode}; +use sp_std::{ + convert::{TryFrom, TryInto}, + fmt::Debug, + ops::{self, Add, Div, Mul, Sub}, + prelude::*, }; #[cfg(feature = "std")] use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; /// Integer types that can be used to interact with `FixedPointNumber` implementations. -pub trait FixedPointOperand: Copy + Clone + Bounded + Zero + Saturating - + PartialOrd + UniqueSaturatedInto + TryFrom + CheckedNeg {} +pub trait FixedPointOperand: + Copy + + Clone + + Bounded + + Zero + + Saturating + + PartialOrd + + UniqueSaturatedInto + + TryFrom + + CheckedNeg +{ +} impl FixedPointOperand for i128 {} impl FixedPointOperand for u128 {} @@ -53,11 +69,26 @@ impl FixedPointOperand for u8 {} /// to `Self::Inner::max_value() / Self::DIV`. /// This is also referred to as the _accuracy_ of the type in the documentation. pub trait FixedPointNumber: - Sized + Copy + Default + Debug - + Saturating + Bounded - + Eq + PartialEq + Ord + PartialOrd - + CheckedSub + CheckedAdd + CheckedMul + CheckedDiv - + Add + Sub + Div + Mul + Zero + One + Sized + + Copy + + Default + + Debug + + Saturating + + Bounded + + Eq + + PartialEq + + Ord + + PartialOrd + + CheckedSub + + CheckedAdd + + CheckedMul + + CheckedDiv + + Add + + Sub + + Div + + Mul + + Zero + + One { /// The underlying data type used for this fixed point number. type Inner: Debug + One + CheckedMul + CheckedDiv + FixedPointOperand; @@ -108,7 +139,10 @@ pub trait FixedPointNumber: /// Creates `self` from a rational number. Equal to `n / d`. /// /// Returns `None` if `d == 0` or `n / d` exceeds accuracy. - fn checked_from_rational(n: N, d: D) -> Option { + fn checked_from_rational( + n: N, + d: D, + ) -> Option { if d == D::zero() { return None } @@ -117,7 +151,8 @@ pub trait FixedPointNumber: let d: I129 = d.into(); let negative = n.negative != d.negative; - multiply_by_rational(n.value, Self::DIV.unique_saturated_into(), d.value).ok() + multiply_by_rational(n.value, Self::DIV.unique_saturated_into(), d.value) + .ok() .and_then(|value| from_i129(I129 { value, negative })) .map(Self::from_inner) } @@ -130,7 +165,8 @@ pub trait FixedPointNumber: let rhs: I129 = n.into(); let negative = lhs.negative != rhs.negative; - multiply_by_rational(lhs.value, rhs.value, Self::DIV.unique_saturated_into()).ok() + multiply_by_rational(lhs.value, rhs.value, Self::DIV.unique_saturated_into()) + .ok() .and_then(|value| from_i129(I129 { value, negative })) } @@ -149,7 +185,8 @@ pub trait FixedPointNumber: let rhs: I129 = d.into(); let negative = lhs.negative != rhs.negative; - lhs.value.checked_div(rhs.value) + lhs.value + .checked_div(rhs.value) .and_then(|n| n.checked_div(Self::DIV.unique_saturated_into())) .and_then(|value| from_i129(I129 { value, negative })) } @@ -212,7 +249,8 @@ pub trait FixedPointNumber: /// Returns the integer part. fn trunc(self) -> Self { - self.into_inner().checked_div(&Self::DIV) + self.into_inner() + .checked_div(&Self::DIV) .expect("panics only if DIV is zero, DIV is not zero; qed") .checked_mul(&Self::DIV) .map(Self::from_inner) @@ -281,7 +319,8 @@ struct I129 { impl From for I129 { fn from(n: N) -> I129 { if n < N::zero() { - let value: u128 = n.checked_neg() + let value: u128 = n + .checked_neg() .map(|n| n.unique_saturated_into()) .unwrap_or_else(|| N::max_value().unique_saturated_into().saturating_add(1)); I129 { value, negative: true } @@ -325,7 +364,16 @@ macro_rules! implement_fixed { /// #[doc = $title] #[derive( - Encode, Decode, CompactAs, Default, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, + Encode, + Decode, + CompactAs, + Default, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, scale_info::TypeInfo, )] pub struct $name($inner_type); @@ -389,7 +437,7 @@ macro_rules! implement_fixed { fn saturating_pow(self, exp: usize) -> Self { if exp == 0 { - return Self::saturating_from_integer(1); + return Self::saturating_from_integer(1) } let exp = exp as u32; @@ -474,7 +522,8 @@ macro_rules! implement_fixed { let rhs: I129 = other.0.into(); let negative = lhs.negative != rhs.negative; - multiply_by_rational(lhs.value, Self::DIV as u128, rhs.value).ok() + multiply_by_rational(lhs.value, Self::DIV as u128, rhs.value) + .ok() .and_then(|value| from_i129(I129 { value, negative })) .map(Self) } @@ -486,7 +535,8 @@ macro_rules! implement_fixed { let rhs: I129 = other.0.into(); let negative = lhs.negative != rhs.negative; - multiply_by_rational(lhs.value, rhs.value, Self::DIV as u128).ok() + multiply_by_rational(lhs.value, rhs.value, Self::DIV as u128) + .ok() .and_then(|value| from_i129(I129 { value, negative })) .map(Self) } @@ -527,7 +577,11 @@ macro_rules! implement_fixed { format!("{}{}", signum_for_zero, int) }; let precision = (Self::accuracy() as f64).log10() as usize; - let fractional = format!("{:0>weight$}", ((self.0 % Self::accuracy()) as i128).abs(), weight=precision); + let fractional = format!( + "{:0>weight$}", + ((self.0 % Self::accuracy()) as i128).abs(), + weight = precision + ); write!(f, "{}({}.{})", stringify!($name), integral, fractional) } @@ -537,7 +591,10 @@ macro_rules! implement_fixed { } } - impl From

for $name where P::Inner: FixedPointOperand { + impl From

for $name + where + P::Inner: FixedPointOperand, + { fn from(p: P) -> Self { let accuracy = P::ACCURACY; let value = p.deconstruct(); @@ -557,8 +614,8 @@ macro_rules! implement_fixed { type Err = &'static str; fn from_str(s: &str) -> Result { - let inner: ::Inner = s.parse() - .map_err(|_| "invalid string input for fixed point number")?; + let inner: ::Inner = + s.parse().map_err(|_| "invalid string input for fixed point number")?; Ok(Self::from_inner(inner)) } } @@ -613,50 +670,32 @@ macro_rules! implement_fixed { #[test] fn from_i129_works() { - let a = I129 { - value: 1, - negative: true, - }; + let a = I129 { value: 1, negative: true }; // Can't convert negative number to unsigned. assert_eq!(from_i129::(a), None); - let a = I129 { - value: u128::MAX - 1, - negative: false, - }; + let a = I129 { value: u128::MAX - 1, negative: false }; // Max - 1 value fits. assert_eq!(from_i129::(a), Some(u128::MAX - 1)); - let a = I129 { - value: u128::MAX, - negative: false, - }; + let a = I129 { value: u128::MAX, negative: false }; // Max value fits. assert_eq!(from_i129::(a), Some(u128::MAX)); - let a = I129 { - value: i128::MAX as u128 + 1, - negative: true, - }; + let a = I129 { value: i128::MAX as u128 + 1, negative: true }; // Min value fits. assert_eq!(from_i129::(a), Some(i128::MIN)); - let a = I129 { - value: i128::MAX as u128 + 1, - negative: false, - }; + let a = I129 { value: i128::MAX as u128 + 1, negative: false }; // Max + 1 does not fit. assert_eq!(from_i129::(a), None); - let a = I129 { - value: i128::MAX as u128, - negative: false, - }; + let a = I129 { value: i128::MAX as u128, negative: false }; // Max value fits. assert_eq!(from_i129::(a), Some(i128::MAX)); @@ -727,7 +766,6 @@ macro_rules! implement_fixed { // Min. assert_eq!($name::max_value(), b); - } } @@ -852,8 +890,7 @@ macro_rules! implement_fixed { let accuracy = $name::accuracy(); // Case where integer fits. - let a = $name::checked_from_integer(42) - .expect("42 * accuracy <= inner_max; qed"); + let a = $name::checked_from_integer(42).expect("42 * accuracy <= inner_max; qed"); assert_eq!(a.into_inner(), 42 * accuracy); // Max integer that fit. @@ -931,7 +968,7 @@ macro_rules! implement_fixed { if $name::SIGNED { // Negative case: -2.5 let a = $name::saturating_from_rational(-5, 2); - assert_eq!(a.into_inner(), 0 - 25 * accuracy / 10); + assert_eq!(a.into_inner(), 0 - 25 * accuracy / 10); // Other negative case: -2.5 let a = $name::saturating_from_rational(5, -2); @@ -1051,7 +1088,10 @@ macro_rules! implement_fixed { if $name::SIGNED { // Min - 1 => Underflow => None. - let a = $name::checked_from_rational(inner_max as u128 + 2, 0.saturating_sub(accuracy)); + let a = $name::checked_from_rational( + inner_max as u128 + 2, + 0.saturating_sub(accuracy), + ); assert_eq!(a, None); let a = $name::checked_from_rational(inner_max, 0 - 3 * accuracy).unwrap(); @@ -1166,15 +1206,15 @@ macro_rules! implement_fixed { // Max - 1. let b = $name::from_inner(inner_max - 1); - assert_eq!(a.checked_mul(&(b/2.into())), Some(b)); + assert_eq!(a.checked_mul(&(b / 2.into())), Some(b)); // Max. let c = $name::from_inner(inner_max); - assert_eq!(a.checked_mul(&(c/2.into())), Some(b)); + assert_eq!(a.checked_mul(&(c / 2.into())), Some(b)); // Max + 1 => None. let e = $name::from_inner(1); - assert_eq!(a.checked_mul(&(c/2.into()+e)), None); + assert_eq!(a.checked_mul(&(c / 2.into() + e)), None); if $name::SIGNED { // Min + 1. @@ -1195,8 +1235,14 @@ macro_rules! implement_fixed { let b = $name::saturating_from_rational(1, -2); assert_eq!(b.checked_mul(&42.into()), Some(0.saturating_sub(21).into())); - assert_eq!(b.checked_mul(&$name::max_value()), $name::max_value().checked_div(&0.saturating_sub(2).into())); - assert_eq!(b.checked_mul(&$name::min_value()), $name::min_value().checked_div(&0.saturating_sub(2).into())); + assert_eq!( + b.checked_mul(&$name::max_value()), + $name::max_value().checked_div(&0.saturating_sub(2).into()) + ); + assert_eq!( + b.checked_mul(&$name::min_value()), + $name::min_value().checked_div(&0.saturating_sub(2).into()) + ); assert_eq!(c.checked_mul(&$name::min_value()), None); } @@ -1206,8 +1252,14 @@ macro_rules! implement_fixed { assert_eq!(a.checked_mul(&42.into()), Some(21.into())); assert_eq!(c.checked_mul(&2.into()), Some(510.into())); assert_eq!(c.checked_mul(&$name::max_value()), None); - assert_eq!(a.checked_mul(&$name::max_value()), $name::max_value().checked_div(&2.into())); - assert_eq!(a.checked_mul(&$name::min_value()), $name::min_value().checked_div(&2.into())); + assert_eq!( + a.checked_mul(&$name::max_value()), + $name::max_value().checked_div(&2.into()) + ); + assert_eq!( + a.checked_mul(&$name::min_value()), + $name::min_value().checked_div(&2.into()) + ); } #[test] @@ -1233,13 +1285,25 @@ macro_rules! implement_fixed { if b < c { // Not executed by unsigned inners. - assert_eq!(a.checked_div_int(0.saturating_sub(2)), Some(0.saturating_sub(inner_max / (2 * accuracy)))); - assert_eq!(a.checked_div_int(0.saturating_sub(inner_max / accuracy)), Some(0.saturating_sub(1))); + assert_eq!( + a.checked_div_int(0.saturating_sub(2)), + Some(0.saturating_sub(inner_max / (2 * accuracy))) + ); + assert_eq!( + a.checked_div_int(0.saturating_sub(inner_max / accuracy)), + Some(0.saturating_sub(1)) + ); assert_eq!(b.checked_div_int(i128::MIN), Some(0)); assert_eq!(b.checked_div_int(inner_min / accuracy), Some(1)); assert_eq!(b.checked_div_int(1i8), None); - assert_eq!(b.checked_div_int(0.saturating_sub(2)), Some(0.saturating_sub(inner_min / (2 * accuracy)))); - assert_eq!(b.checked_div_int(0.saturating_sub(inner_min / accuracy)), Some(0.saturating_sub(1))); + assert_eq!( + b.checked_div_int(0.saturating_sub(2)), + Some(0.saturating_sub(inner_min / (2 * accuracy))) + ); + assert_eq!( + b.checked_div_int(0.saturating_sub(inner_min / accuracy)), + Some(0.saturating_sub(1)) + ); assert_eq!(c.checked_div_int(i128::MIN), Some(0)); assert_eq!(d.checked_div_int(i32::MIN), Some(0)); } @@ -1297,7 +1361,10 @@ macro_rules! implement_fixed { if $name::SIGNED { assert_eq!($name::from_inner(inner_min).saturating_abs(), $name::max_value()); - assert_eq!($name::saturating_from_rational(-1, 2).saturating_abs(), (1, 2).into()); + assert_eq!( + $name::saturating_from_rational(-1, 2).saturating_abs(), + (1, 2).into() + ); } } @@ -1322,31 +1389,72 @@ macro_rules! implement_fixed { #[test] fn saturating_pow_should_work() { - assert_eq!($name::saturating_from_integer(2).saturating_pow(0), $name::saturating_from_integer(1)); - assert_eq!($name::saturating_from_integer(2).saturating_pow(1), $name::saturating_from_integer(2)); - assert_eq!($name::saturating_from_integer(2).saturating_pow(2), $name::saturating_from_integer(4)); - assert_eq!($name::saturating_from_integer(2).saturating_pow(3), $name::saturating_from_integer(8)); - assert_eq!($name::saturating_from_integer(2).saturating_pow(50), - $name::saturating_from_integer(1125899906842624i64)); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(0), + $name::saturating_from_integer(1) + ); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(1), + $name::saturating_from_integer(2) + ); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(2), + $name::saturating_from_integer(4) + ); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(3), + $name::saturating_from_integer(8) + ); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(50), + $name::saturating_from_integer(1125899906842624i64) + ); assert_eq!($name::saturating_from_integer(1).saturating_pow(1000), (1).into()); - assert_eq!($name::saturating_from_integer(1).saturating_pow(usize::MAX), (1).into()); + assert_eq!( + $name::saturating_from_integer(1).saturating_pow(usize::MAX), + (1).into() + ); if $name::SIGNED { // Saturating. - assert_eq!($name::saturating_from_integer(2).saturating_pow(68), $name::max_value()); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(68), + $name::max_value() + ); assert_eq!($name::saturating_from_integer(-1).saturating_pow(1000), (1).into()); - assert_eq!($name::saturating_from_integer(-1).saturating_pow(1001), 0.saturating_sub(1).into()); - assert_eq!($name::saturating_from_integer(-1).saturating_pow(usize::MAX), 0.saturating_sub(1).into()); - assert_eq!($name::saturating_from_integer(-1).saturating_pow(usize::MAX - 1), (1).into()); + assert_eq!( + $name::saturating_from_integer(-1).saturating_pow(1001), + 0.saturating_sub(1).into() + ); + assert_eq!( + $name::saturating_from_integer(-1).saturating_pow(usize::MAX), + 0.saturating_sub(1).into() + ); + assert_eq!( + $name::saturating_from_integer(-1).saturating_pow(usize::MAX - 1), + (1).into() + ); } - assert_eq!($name::saturating_from_integer(114209).saturating_pow(5), $name::max_value()); - - assert_eq!($name::saturating_from_integer(1).saturating_pow(usize::MAX), (1).into()); - assert_eq!($name::saturating_from_integer(0).saturating_pow(usize::MAX), (0).into()); - assert_eq!($name::saturating_from_integer(2).saturating_pow(usize::MAX), $name::max_value()); + assert_eq!( + $name::saturating_from_integer(114209).saturating_pow(5), + $name::max_value() + ); + + assert_eq!( + $name::saturating_from_integer(1).saturating_pow(usize::MAX), + (1).into() + ); + assert_eq!( + $name::saturating_from_integer(0).saturating_pow(usize::MAX), + (0).into() + ); + assert_eq!( + $name::saturating_from_integer(2).saturating_pow(usize::MAX), + $name::max_value() + ); } #[test] @@ -1371,9 +1479,18 @@ macro_rules! implement_fixed { if b < c { // Not executed by unsigned inners. - assert_eq!(a.checked_div(&0.saturating_sub(2).into()), Some($name::from_inner(0.saturating_sub(inner_max / 2)))); - assert_eq!(a.checked_div(&-$name::max_value()), Some(0.saturating_sub(1).into())); - assert_eq!(b.checked_div(&0.saturating_sub(2).into()), Some($name::from_inner(0.saturating_sub(inner_min / 2)))); + assert_eq!( + a.checked_div(&0.saturating_sub(2).into()), + Some($name::from_inner(0.saturating_sub(inner_max / 2))) + ); + assert_eq!( + a.checked_div(&-$name::max_value()), + Some(0.saturating_sub(1).into()) + ); + assert_eq!( + b.checked_div(&0.saturating_sub(2).into()), + Some($name::from_inner(0.saturating_sub(inner_min / 2))) + ); assert_eq!(c.checked_div(&$name::max_value()), Some(0.into())); assert_eq!(b.checked_div(&b), Some($name::one())); } @@ -1430,14 +1547,10 @@ macro_rules! implement_fixed { assert_eq!(n, i + f); - let n = $name::saturating_from_rational(5, 2) - .frac() - .saturating_mul(10.into()); + let n = $name::saturating_from_rational(5, 2).frac().saturating_mul(10.into()); assert_eq!(n, 5.into()); - let n = $name::saturating_from_rational(1, 2) - .frac() - .saturating_mul(10.into()); + let n = $name::saturating_from_rational(1, 2).frac().saturating_mul(10.into()); assert_eq!(n, 5.into()); if $name::SIGNED { @@ -1447,14 +1560,10 @@ macro_rules! implement_fixed { assert_eq!(n, i - f); // The sign is attached to the integer part unless it is zero. - let n = $name::saturating_from_rational(-5, 2) - .frac() - .saturating_mul(10.into()); + let n = $name::saturating_from_rational(-5, 2).frac().saturating_mul(10.into()); assert_eq!(n, 5.into()); - let n = $name::saturating_from_rational(-1, 2) - .frac() - .saturating_mul(10.into()); + let n = $name::saturating_from_rational(-1, 2).frac().saturating_mul(10.into()); assert_eq!(n, 0.saturating_sub(5).into()); } } @@ -1567,30 +1676,51 @@ macro_rules! implement_fixed { #[test] fn fmt_should_work() { let zero = $name::zero(); - assert_eq!(format!("{:?}", zero), format!("{}(0.{:0>weight$})", stringify!($name), 0, weight=precision())); + assert_eq!( + format!("{:?}", zero), + format!("{}(0.{:0>weight$})", stringify!($name), 0, weight = precision()) + ); let one = $name::one(); - assert_eq!(format!("{:?}", one), format!("{}(1.{:0>weight$})", stringify!($name), 0, weight=precision())); + assert_eq!( + format!("{:?}", one), + format!("{}(1.{:0>weight$})", stringify!($name), 0, weight = precision()) + ); let frac = $name::saturating_from_rational(1, 2); - assert_eq!(format!("{:?}", frac), format!("{}(0.{:0weight$})", stringify!($name), 0, weight=precision())); + assert_eq!( + format!("{:?}", neg), + format!("{}(-1.{:0>weight$})", stringify!($name), 0, weight = precision()) + ); let frac = $name::saturating_from_rational(-314, 100); - assert_eq!(format!("{:?}", frac), format!("{}(-3.{:0 u128 { @@ -63,7 +67,9 @@ pub fn to_big_uint(x: u128) -> biguint::BigUint { /// /// Invariant: c must be greater than or equal to 1. pub fn multiply_by_rational(mut a: u128, mut b: u128, mut c: u128) -> Result { - if a.is_zero() || b.is_zero() { return Ok(Zero::zero()); } + if a.is_zero() || b.is_zero() { + return Ok(Zero::zero()) + } c = c.max(1); // a and b are interchangeable by definition in this function. It always helps to assume the @@ -102,9 +108,10 @@ pub fn multiply_by_rational(mut a: u128, mut b: u128, mut c: u128) -> Result (c / 2) { q = q.add(&to_big_uint(1)); } + let r: u128 = r.try_into().expect("reminder of div by c is always less than c; qed"); + if r > (c / 2) { + q = q.add(&to_big_uint(1)); + } q }; q.lstrip(); diff --git a/primitives/arithmetic/src/lib.rs b/primitives/arithmetic/src/lib.rs index 110e5c0728037..cf2e8a1a60640 100644 --- a/primitives/arithmetic/src/lib.rs +++ b/primitives/arithmetic/src/lib.rs @@ -34,18 +34,18 @@ macro_rules! assert_eq_error_rate { } pub mod biguint; +pub mod fixed_point; pub mod helpers_128bit; -pub mod traits; pub mod per_things; -pub mod fixed_point; pub mod rational; +pub mod traits; -pub use fixed_point::{FixedPointNumber, FixedPointOperand, FixedI64, FixedI128, FixedU128}; -pub use per_things::{PerThing, InnerOf, UpperOf, Percent, PerU16, Permill, Perbill, Perquintill}; +pub use fixed_point::{FixedI128, FixedI64, FixedPointNumber, FixedPointOperand, FixedU128}; +pub use per_things::{InnerOf, PerThing, PerU16, Perbill, Percent, Permill, Perquintill, UpperOf}; pub use rational::{Rational128, RationalInfinite}; -use sp_std::{prelude::*, cmp::Ordering, fmt::Debug, convert::TryInto}; -use traits::{BaseArithmetic, One, Zero, SaturatedConversion, Unsigned}; +use sp_std::{cmp::Ordering, convert::TryInto, fmt::Debug, prelude::*}; +use traits::{BaseArithmetic, One, SaturatedConversion, Unsigned, Zero}; /// Trait for comparing two numbers with an threshold. /// @@ -82,7 +82,6 @@ where _ => Ordering::Equal, } } - } } @@ -114,8 +113,10 @@ impl_normalize_for_numeric!(u8, u16, u32, u64, u128); impl Normalizable

for Vec

{ fn normalize(&self, targeted_sum: P) -> Result, &'static str> { - let uppers = - self.iter().map(|p| >::from(p.clone().deconstruct())).collect::>(); + let uppers = self + .iter() + .map(|p| >::from(p.clone().deconstruct())) + .collect::>(); let normalized = normalize(uppers.as_ref(), >::from(targeted_sum.deconstruct()))?; @@ -157,7 +158,8 @@ impl Normalizable

for Vec

{ /// /// * This proof is used in the implementation as well. pub fn normalize(input: &[T], targeted_sum: T) -> Result, &'static str> - where T: Clone + Copy + Ord + BaseArithmetic + Unsigned + Debug, +where + T: Clone + Copy + Ord + BaseArithmetic + Unsigned + Debug, { // compute sum and return error if failed. let mut sum = T::zero(); @@ -171,12 +173,12 @@ pub fn normalize(input: &[T], targeted_sum: T) -> Result, &'static str // Nothing to do here. if count.is_zero() { - return Ok(Vec::::new()); + return Ok(Vec::::new()) } let diff = targeted_sum.max(sum) - targeted_sum.min(sum); if diff.is_zero() { - return Ok(input.to_vec()); + return Ok(input.to_vec()) } let needs_bump = targeted_sum > sum; @@ -198,7 +200,8 @@ pub fn normalize(input: &[T], targeted_sum: T) -> Result, &'static str if !per_round.is_zero() { for _ in 0..count { - output_with_idx[min_index].1 = output_with_idx[min_index].1 + output_with_idx[min_index].1 = output_with_idx[min_index] + .1 .checked_add(&per_round) .expect("Proof provided in the module doc; qed."); if output_with_idx[min_index].1 >= threshold { @@ -210,7 +213,8 @@ pub fn normalize(input: &[T], targeted_sum: T) -> Result, &'static str // continue with the previous min_index while !leftover.is_zero() { - output_with_idx[min_index].1 = output_with_idx[min_index].1 + output_with_idx[min_index].1 = output_with_idx[min_index] + .1 .checked_add(&T::one()) .expect("Proof provided in the module doc; qed."); if output_with_idx[min_index].1 >= threshold { @@ -232,9 +236,8 @@ pub fn normalize(input: &[T], targeted_sum: T) -> Result, &'static str if !per_round.is_zero() { for _ in 0..count { - output_with_idx[max_index].1 = output_with_idx[max_index].1 - .checked_sub(&per_round) - .unwrap_or_else(|| { + output_with_idx[max_index].1 = + output_with_idx[max_index].1.checked_sub(&per_round).unwrap_or_else(|| { let remainder = per_round - output_with_idx[max_index].1; leftover += remainder; output_with_idx[max_index].1.saturating_sub(per_round) @@ -284,7 +287,7 @@ mod normalize_tests { normalize(vec![8 as $type, 9, 7, 10].as_ref(), 40).unwrap(), vec![10, 10, 10, 10], ); - } + }; } // it should work for all types as long as the length of vector can be converted to T. test_for!(u128); @@ -297,22 +300,13 @@ mod normalize_tests { #[test] fn fails_on_if_input_sum_large() { assert!(normalize(vec![1u8; 255].as_ref(), 10).is_ok()); - assert_eq!( - normalize(vec![1u8; 256].as_ref(), 10), - Err("sum of input cannot fit in `T`"), - ); + assert_eq!(normalize(vec![1u8; 256].as_ref(), 10), Err("sum of input cannot fit in `T`"),); } #[test] fn does_not_fail_on_subtraction_overflow() { - assert_eq!( - normalize(vec![1u8, 100, 100].as_ref(), 10).unwrap(), - vec![1, 9, 0], - ); - assert_eq!( - normalize(vec![1u8, 8, 9].as_ref(), 1).unwrap(), - vec![0, 1, 0], - ); + assert_eq!(normalize(vec![1u8, 100, 100].as_ref(), 10).unwrap(), vec![1, 9, 0],); + assert_eq!(normalize(vec![1u8, 8, 9].as_ref(), 1).unwrap(), vec![0, 1, 0],); } #[test] @@ -323,11 +317,9 @@ mod normalize_tests { #[test] fn works_for_per_thing() { assert_eq!( - vec![ - Perbill::from_percent(33), - Perbill::from_percent(33), - Perbill::from_percent(33) - ].normalize(Perbill::one()).unwrap(), + vec![Perbill::from_percent(33), Perbill::from_percent(33), Perbill::from_percent(33)] + .normalize(Perbill::one()) + .unwrap(), vec![ Perbill::from_parts(333333334), Perbill::from_parts(333333333), @@ -336,11 +328,9 @@ mod normalize_tests { ); assert_eq!( - vec![ - Perbill::from_percent(20), - Perbill::from_percent(15), - Perbill::from_percent(30) - ].normalize(Perbill::one()).unwrap(), + vec![Perbill::from_percent(20), Perbill::from_percent(15), Perbill::from_percent(30)] + .normalize(Perbill::one()) + .unwrap(), vec![ Perbill::from_parts(316666668), Perbill::from_parts(383333332), @@ -355,11 +345,9 @@ mod normalize_tests { // could have a situation where the sum cannot be calculated in the inner type. Calculating // using the upper type of the per_thing should assure this to be okay. assert_eq!( - vec![ - PerU16::from_percent(40), - PerU16::from_percent(40), - PerU16::from_percent(40), - ].normalize(PerU16::one()).unwrap(), + vec![PerU16::from_percent(40), PerU16::from_percent(40), PerU16::from_percent(40),] + .normalize(PerU16::one()) + .unwrap(), vec![ PerU16::from_parts(21845), // 33% PerU16::from_parts(21845), // 33% @@ -370,82 +358,40 @@ mod normalize_tests { #[test] fn normalize_works_all_le() { - assert_eq!( - normalize(vec![8u32, 9, 7, 10].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); + assert_eq!(normalize(vec![8u32, 9, 7, 10].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); - assert_eq!( - normalize(vec![7u32, 7, 7, 7].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); + assert_eq!(normalize(vec![7u32, 7, 7, 7].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); - assert_eq!( - normalize(vec![7u32, 7, 7, 10].as_ref(), 40).unwrap(), - vec![11, 11, 8, 10], - ); + assert_eq!(normalize(vec![7u32, 7, 7, 10].as_ref(), 40).unwrap(), vec![11, 11, 8, 10],); - assert_eq!( - normalize(vec![7u32, 8, 7, 10].as_ref(), 40).unwrap(), - vec![11, 8, 11, 10], - ); + assert_eq!(normalize(vec![7u32, 8, 7, 10].as_ref(), 40).unwrap(), vec![11, 8, 11, 10],); - assert_eq!( - normalize(vec![7u32, 7, 8, 10].as_ref(), 40).unwrap(), - vec![11, 11, 8, 10], - ); + assert_eq!(normalize(vec![7u32, 7, 8, 10].as_ref(), 40).unwrap(), vec![11, 11, 8, 10],); } #[test] fn normalize_works_some_ge() { - assert_eq!( - normalize(vec![8u32, 11, 9, 10].as_ref(), 40).unwrap(), - vec![10, 11, 9, 10], - ); + assert_eq!(normalize(vec![8u32, 11, 9, 10].as_ref(), 40).unwrap(), vec![10, 11, 9, 10],); } #[test] fn always_inc_min() { - assert_eq!( - normalize(vec![10u32, 7, 10, 10].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); - assert_eq!( - normalize(vec![10u32, 10, 7, 10].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); - assert_eq!( - normalize(vec![10u32, 10, 10, 7].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); + assert_eq!(normalize(vec![10u32, 7, 10, 10].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); + assert_eq!(normalize(vec![10u32, 10, 7, 10].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); + assert_eq!(normalize(vec![10u32, 10, 10, 7].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); } #[test] fn normalize_works_all_ge() { - assert_eq!( - normalize(vec![12u32, 11, 13, 10].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); + assert_eq!(normalize(vec![12u32, 11, 13, 10].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); - assert_eq!( - normalize(vec![13u32, 13, 13, 13].as_ref(), 40).unwrap(), - vec![10, 10, 10, 10], - ); + assert_eq!(normalize(vec![13u32, 13, 13, 13].as_ref(), 40).unwrap(), vec![10, 10, 10, 10],); - assert_eq!( - normalize(vec![13u32, 13, 13, 10].as_ref(), 40).unwrap(), - vec![12, 9, 9, 10], - ); + assert_eq!(normalize(vec![13u32, 13, 13, 10].as_ref(), 40).unwrap(), vec![12, 9, 9, 10],); - assert_eq!( - normalize(vec![13u32, 12, 13, 10].as_ref(), 40).unwrap(), - vec![9, 12, 9, 10], - ); + assert_eq!(normalize(vec![13u32, 12, 13, 10].as_ref(), 40).unwrap(), vec![9, 12, 9, 10],); - assert_eq!( - normalize(vec![13u32, 13, 12, 10].as_ref(), 40).unwrap(), - vec![9, 9, 12, 10], - ); + assert_eq!(normalize(vec![13u32, 13, 12, 10].as_ref(), 40).unwrap(), vec![9, 9, 12, 10],); } } diff --git a/primitives/arithmetic/src/per_things.rs b/primitives/arithmetic/src/per_things.rs index f9f8460cefd34..a89783276eabe 100644 --- a/primitives/arithmetic/src/per_things.rs +++ b/primitives/arithmetic/src/per_things.rs @@ -16,16 +16,20 @@ // limitations under the License. #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; -use sp_std::{ops, fmt, prelude::*, convert::{TryFrom, TryInto}}; -use codec::{Encode, CompactAs}; -use num_traits::Pow; use crate::traits::{ - SaturatedConversion, UniqueSaturatedInto, Saturating, BaseArithmetic, Bounded, Zero, Unsigned, - One, + BaseArithmetic, Bounded, One, SaturatedConversion, Saturating, UniqueSaturatedInto, Unsigned, + Zero, }; +use codec::{CompactAs, Encode}; +use num_traits::Pow; use sp_debug_derive::RuntimeDebug; +use sp_std::{ + convert::{TryFrom, TryInto}, + fmt, ops, + prelude::*, +}; /// Get the inner type of a `PerThing`. pub type InnerOf

=

::Inner; @@ -36,8 +40,19 @@ pub type UpperOf

=

::Upper; /// Something that implements a fixed point ration with an arbitrary granularity `X`, as _parts per /// `X`_. pub trait PerThing: - Sized + Saturating + Copy + Default + Eq + PartialEq + Ord + PartialOrd + Bounded + fmt::Debug - + ops::Div + ops::Mul + Pow + Sized + + Saturating + + Copy + + Default + + Eq + + PartialEq + + Ord + + PartialOrd + + Bounded + + fmt::Debug + + ops::Div + + ops::Mul + + Pow { /// The data type used to build this per-thingy. type Inner: BaseArithmetic + Unsigned + Copy + Into + fmt::Debug; @@ -56,16 +71,24 @@ pub trait PerThing: const ACCURACY: Self::Inner; /// Equivalent to `Self::from_parts(0)`. - fn zero() -> Self { Self::from_parts(Self::Inner::zero()) } + fn zero() -> Self { + Self::from_parts(Self::Inner::zero()) + } /// Return `true` if this is nothing. - fn is_zero(&self) -> bool { self.deconstruct() == Self::Inner::zero() } + fn is_zero(&self) -> bool { + self.deconstruct() == Self::Inner::zero() + } /// Equivalent to `Self::from_parts(Self::ACCURACY)`. - fn one() -> Self { Self::from_parts(Self::ACCURACY) } + fn one() -> Self { + Self::from_parts(Self::ACCURACY) + } /// Return `true` if this is one. - fn is_one(&self) -> bool { self.deconstruct() == Self::ACCURACY } + fn is_one(&self) -> bool { + self.deconstruct() == Self::ACCURACY + } /// Build this type from a percent. Equivalent to `Self::from_parts(x * Self::ACCURACY / 100)` /// but more accurate and can cope with potential type overflows. @@ -104,8 +127,13 @@ pub trait PerThing: /// ``` fn mul_floor(self, b: N) -> N where - N: Clone + UniqueSaturatedInto + ops::Rem + - ops::Div + ops::Mul + ops::Add + Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Rem + + ops::Div + + ops::Mul + + ops::Add + + Unsigned, Self::Inner: Into, { overflow_prune_mul::(b, self.deconstruct(), Rounding::Down) @@ -128,9 +156,14 @@ pub trait PerThing: /// ``` fn mul_ceil(self, b: N) -> N where - N: Clone + UniqueSaturatedInto + ops::Rem + - ops::Div + ops::Mul + ops::Add + Unsigned, - Self::Inner: Into + N: Clone + + UniqueSaturatedInto + + ops::Rem + + ops::Div + + ops::Mul + + ops::Add + + Unsigned, + Self::Inner: Into, { overflow_prune_mul::(b, self.deconstruct(), Rounding::Up) } @@ -146,9 +179,14 @@ pub trait PerThing: /// ``` fn saturating_reciprocal_mul(self, b: N) -> N where - N: Clone + UniqueSaturatedInto + ops::Rem + - ops::Div + ops::Mul + ops::Add + Saturating + - Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Rem + + ops::Div + + ops::Mul + + ops::Add + + Saturating + + Unsigned, Self::Inner: Into, { saturating_reciprocal_mul::(b, self.deconstruct(), Rounding::Nearest) @@ -168,9 +206,14 @@ pub trait PerThing: /// ``` fn saturating_reciprocal_mul_floor(self, b: N) -> N where - N: Clone + UniqueSaturatedInto + ops::Rem + - ops::Div + ops::Mul + ops::Add + Saturating + - Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Rem + + ops::Div + + ops::Mul + + ops::Add + + Saturating + + Unsigned, Self::Inner: Into, { saturating_reciprocal_mul::(b, self.deconstruct(), Rounding::Down) @@ -190,9 +233,14 @@ pub trait PerThing: /// ``` fn saturating_reciprocal_mul_ceil(self, b: N) -> N where - N: Clone + UniqueSaturatedInto + ops::Rem + - ops::Div + ops::Mul + ops::Add + Saturating + - Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Rem + + ops::Div + + ops::Mul + + ops::Add + + Saturating + + Unsigned, Self::Inner: Into, { saturating_reciprocal_mul::(b, self.deconstruct(), Rounding::Up) @@ -211,7 +259,9 @@ pub trait PerThing: /// Same as `Self::from_float`. #[deprecated = "Use from_float instead"] #[cfg(feature = "std")] - fn from_fraction(x: f64) -> Self { Self::from_float(x) } + fn from_fraction(x: f64) -> Self { + Self::from_float(x) + } /// Approximate the fraction `p/q` into a per-thing fraction. This will never overflow. /// @@ -233,18 +283,31 @@ pub trait PerThing: /// ``` fn from_rational(p: N, q: N) -> Self where - N: Clone + Ord + TryInto + TryInto + - ops::Div + ops::Rem + ops::Add + Unsigned, + N: Clone + + Ord + + TryInto + + TryInto + + ops::Div + + ops::Rem + + ops::Add + + Unsigned, Self::Inner: Into; /// Same as `Self::from_rational`. #[deprecated = "Use from_rational instead"] fn from_rational_approximation(p: N, q: N) -> Self - where - N: Clone + Ord + TryInto + TryInto - + ops::Div + ops::Rem + ops::Add + Unsigned - + Zero + One, - Self::Inner: Into, + where + N: Clone + + Ord + + TryInto + + TryInto + + ops::Div + + ops::Rem + + ops::Add + + Unsigned + + Zero + + One, + Self::Inner: Into, { Self::from_rational(p, q) } @@ -264,37 +327,38 @@ enum Rounding { /// bounds instead of overflowing. fn saturating_reciprocal_mul(x: N, part: P::Inner, rounding: Rounding) -> N where - N: Clone + UniqueSaturatedInto + ops::Div + ops::Mul + ops::Add + ops::Rem + Saturating + Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Div + + ops::Mul + + ops::Add + + ops::Rem + + Saturating + + Unsigned, P: PerThing, P::Inner: Into, { let maximum: N = P::ACCURACY.into(); - let c = rational_mul_correction::( - x.clone(), - P::ACCURACY, - part, - rounding, - ); + let c = rational_mul_correction::(x.clone(), P::ACCURACY, part, rounding); (x / part.into()).saturating_mul(maximum).saturating_add(c) } /// Overflow-prune multiplication. Accurately multiply a value by `self` without overflowing. fn overflow_prune_mul(x: N, part: P::Inner, rounding: Rounding) -> N where - N: Clone + UniqueSaturatedInto + ops::Div + ops::Mul + ops::Add + ops::Rem + Unsigned, + N: Clone + + UniqueSaturatedInto + + ops::Div + + ops::Mul + + ops::Add + + ops::Rem + + Unsigned, P: PerThing, P::Inner: Into, { let maximum: N = P::ACCURACY.into(); let part_n: N = part.into(); - let c = rational_mul_correction::( - x.clone(), - part, - P::ACCURACY, - rounding, - ); + let c = rational_mul_correction::(x.clone(), part, P::ACCURACY, rounding); (x / maximum) * part_n + c } @@ -304,10 +368,14 @@ where /// to `x / denom * numer` for an accurate result. fn rational_mul_correction(x: N, numer: P::Inner, denom: P::Inner, rounding: Rounding) -> N where - N: UniqueSaturatedInto + ops::Div + ops::Mul + ops::Add + ops::Rem + Unsigned, + N: UniqueSaturatedInto + + ops::Div + + ops::Mul + + ops::Add + + ops::Rem + + Unsigned, P: PerThing, - P::Inner: Into + P::Inner: Into, { let numer_upper = P::Upper::from(numer); let denom_n: N = denom.into(); @@ -324,16 +392,18 @@ where // Already rounded down Rounding::Down => {}, // Round up if the fractional part of the result is non-zero. - Rounding::Up => if rem_mul_upper % denom_upper > 0.into() { - // `rem * numer / denom` is less than `numer`, so this will not overflow. - rem_mul_div_inner += 1.into(); - }, + Rounding::Up => + if rem_mul_upper % denom_upper > 0.into() { + // `rem * numer / denom` is less than `numer`, so this will not overflow. + rem_mul_div_inner += 1.into(); + }, // Round up if the fractional part of the result is greater than a half. An exact half is // rounded down. - Rounding::Nearest => if rem_mul_upper % denom_upper > denom_upper / 2.into() { - // `rem * numer / denom` is less than `numer`, so this will not overflow. - rem_mul_div_inner += 1.into(); - }, + Rounding::Nearest => + if rem_mul_upper % denom_upper > denom_upper / 2.into() { + // `rem * numer / denom` is less than `numer`, so this will not overflow. + rem_mul_div_inner += 1.into(); + }, } rem_mul_div_inner.into() } @@ -1331,15 +1401,7 @@ macro_rules! implement_per_thing_with_perthousand { } } -implement_per_thing!( - Percent, - test_per_cent, - [u32, u64, u128], - 100u8, - u8, - u16, - "_Percent_", -); +implement_per_thing!(Percent, test_per_cent, [u32, u64, u128], 100u8, u8, u16, "_Percent_",); implement_per_thing_with_perthousand!( PerU16, test_peru16, diff --git a/primitives/arithmetic/src/rational.rs b/primitives/arithmetic/src/rational.rs index feb81eb572068..a15f5ac8c1650 100644 --- a/primitives/arithmetic/src/rational.rs +++ b/primitives/arithmetic/src/rational.rs @@ -15,10 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +use crate::{biguint::BigUint, helpers_128bit}; +use num_traits::{Bounded, One, Zero}; use sp_std::{cmp::Ordering, prelude::*}; -use crate::helpers_128bit; -use num_traits::{Zero, One, Bounded}; -use crate::biguint::BigUint; /// A wrapper for any rational number with infinitely large numerator and denominator. /// @@ -160,9 +159,11 @@ impl Rational128 { /// accurately calculated. pub fn lcm(&self, other: &Self) -> Result { // this should be tested better: two large numbers that are almost the same. - if self.1 == other.1 { return Ok(self.1) } + if self.1 == other.1 { + return Ok(self.1) + } let g = helpers_128bit::gcd(self.1, other.1); - helpers_128bit::multiply_by_rational(self.1 , other.1, g) + helpers_128bit::multiply_by_rational(self.1, other.1, g) } /// A saturating add that assumes `self` and `other` have the same denominator. @@ -170,7 +171,7 @@ impl Rational128 { if other.is_zero() { self } else { - Self(self.0.saturating_add(other.0) ,self.1) + Self(self.0.saturating_add(other.0), self.1) } } @@ -179,7 +180,7 @@ impl Rational128 { if other.is_zero() { self } else { - Self(self.0.saturating_sub(other.0) ,self.1) + Self(self.0.saturating_sub(other.0), self.1) } } @@ -190,7 +191,9 @@ impl Rational128 { let lcm = self.lcm(&other).map_err(|_| "failed to scale to denominator")?; let self_scaled = self.to_den(lcm).map_err(|_| "failed to scale to denominator")?; let other_scaled = other.to_den(lcm).map_err(|_| "failed to scale to denominator")?; - let n = self_scaled.0.checked_add(other_scaled.0) + let n = self_scaled + .0 + .checked_add(other_scaled.0) .ok_or("overflow while adding numerators")?; Ok(Self(n, self_scaled.1)) } @@ -203,7 +206,9 @@ impl Rational128 { let self_scaled = self.to_den(lcm).map_err(|_| "failed to scale to denominator")?; let other_scaled = other.to_den(lcm).map_err(|_| "failed to scale to denominator")?; - let n = self_scaled.0.checked_sub(other_scaled.0) + let n = self_scaled + .0 + .checked_sub(other_scaled.0) .ok_or("overflow while subtracting numerators")?; Ok(Self(n, self_scaled.1)) } @@ -243,7 +248,8 @@ impl Ord for Rational128 { } else { // Don't even compute gcd. let self_n = helpers_128bit::to_big_uint(self.0) * helpers_128bit::to_big_uint(other.1); - let other_n = helpers_128bit::to_big_uint(other.0) * helpers_128bit::to_big_uint(self.1); + let other_n = + helpers_128bit::to_big_uint(other.0) * helpers_128bit::to_big_uint(self.1); self_n.cmp(&other_n) } } @@ -256,7 +262,8 @@ impl PartialEq for Rational128 { self.0.eq(&other.0) } else { let self_n = helpers_128bit::to_big_uint(self.0) * helpers_128bit::to_big_uint(other.1); - let other_n = helpers_128bit::to_big_uint(other.0) * helpers_128bit::to_big_uint(self.1); + let other_n = + helpers_128bit::to_big_uint(other.0) * helpers_128bit::to_big_uint(self.1); self_n.eq(&other_n) } } @@ -264,8 +271,7 @@ impl PartialEq for Rational128 { #[cfg(test)] mod tests { - use super::*; - use super::helpers_128bit::*; + use super::{helpers_128bit::*, *}; const MAX128: u128 = u128::MAX; const MAX64: u128 = u64::MAX as u128; @@ -277,7 +283,9 @@ mod tests { fn mul_div(a: u128, b: u128, c: u128) -> u128 { use primitive_types::U256; - if a.is_zero() { return Zero::zero(); } + if a.is_zero() { + return Zero::zero() + } let c = c.max(1); // e for extended @@ -295,14 +303,8 @@ mod tests { #[test] fn truth_value_function_works() { - assert_eq!( - mul_div(2u128.pow(100), 8, 4), - 2u128.pow(101) - ); - assert_eq!( - mul_div(2u128.pow(100), 4, 8), - 2u128.pow(99) - ); + assert_eq!(mul_div(2u128.pow(100), 8, 4), 2u128.pow(101)); + assert_eq!(mul_div(2u128.pow(100), 4, 8), 2u128.pow(99)); // and it returns a if result cannot fit assert_eq!(mul_div(MAX128 - 10, 2, 1), MAX128 - 10); @@ -319,13 +321,10 @@ mod tests { assert_eq!(r(MAX128 / 2, MAX128).to_den(10), Ok(r(5, 10))); // large to perbill. This is very well needed for npos-elections. - assert_eq!( - r(MAX128 / 2, MAX128).to_den(1000_000_000), - Ok(r(500_000_000, 1000_000_000)) - ); + assert_eq!(r(MAX128 / 2, MAX128).to_den(1000_000_000), Ok(r(500_000_000, 1000_000_000))); // large to large - assert_eq!(r(MAX128 / 2, MAX128).to_den(MAX128/2), Ok(r(MAX128/4, MAX128/2))); + assert_eq!(r(MAX128 / 2, MAX128).to_den(MAX128 / 2), Ok(r(MAX128 / 4, MAX128 / 2))); } #[test] @@ -343,11 +342,11 @@ mod tests { // large numbers assert_eq!( - r(1_000_000_000, MAX128).lcm(&r(7_000_000_000, MAX128-1)), + r(1_000_000_000, MAX128).lcm(&r(7_000_000_000, MAX128 - 1)), Err("result cannot fit in u128"), ); assert_eq!( - r(1_000_000_000, MAX64).lcm(&r(7_000_000_000, MAX64-1)), + r(1_000_000_000, MAX64).lcm(&r(7_000_000_000, MAX64 - 1)), Ok(340282366920938463408034375210639556610), ); assert!(340282366920938463408034375210639556610 < MAX128); @@ -362,7 +361,7 @@ mod tests { // errors assert_eq!( - r(1, MAX128).checked_add(r(1, MAX128-1)), + r(1, MAX128).checked_add(r(1, MAX128 - 1)), Err("failed to scale to denominator"), ); assert_eq!( @@ -383,17 +382,14 @@ mod tests { // errors assert_eq!( - r(2, MAX128).checked_sub(r(1, MAX128-1)), + r(2, MAX128).checked_sub(r(1, MAX128 - 1)), Err("failed to scale to denominator"), ); assert_eq!( r(7, MAX128).checked_sub(r(MAX128, MAX128)), Err("overflow while subtracting numerators"), ); - assert_eq!( - r(1, 10).checked_sub(r(2,10)), - Err("overflow while subtracting numerators"), - ); + assert_eq!(r(1, 10).checked_sub(r(2, 10)), Err("overflow while subtracting numerators"),); } #[test] @@ -428,7 +424,7 @@ mod tests { ); assert_eq!( // MAX128 % 7 == 3 - multiply_by_rational(MAX128, 11 , 13).unwrap(), + multiply_by_rational(MAX128, 11, 13).unwrap(), (MAX128 / 13 * 11) + (8 * 11 / 13), ); assert_eq!( @@ -437,14 +433,8 @@ mod tests { (MAX128 / 1000 * 555) + (455 * 555 / 1000), ); - assert_eq!( - multiply_by_rational(2 * MAX64 - 1, MAX64, MAX64).unwrap(), - 2 * MAX64 - 1, - ); - assert_eq!( - multiply_by_rational(2 * MAX64 - 1, MAX64 - 1, MAX64).unwrap(), - 2 * MAX64 - 3, - ); + assert_eq!(multiply_by_rational(2 * MAX64 - 1, MAX64, MAX64).unwrap(), 2 * MAX64 - 1,); + assert_eq!(multiply_by_rational(2 * MAX64 - 1, MAX64 - 1, MAX64).unwrap(), 2 * MAX64 - 3,); assert_eq!( multiply_by_rational(MAX64 + 100, MAX64_2, MAX64_2 / 2).unwrap(), @@ -459,31 +449,23 @@ mod tests { multiply_by_rational(2u128.pow(66) - 1, 2u128.pow(65) - 1, 2u128.pow(65)).unwrap(), 73786976294838206461, ); - assert_eq!( - multiply_by_rational(1_000_000_000, MAX128 / 8, MAX128 / 2).unwrap(), - 250000000, - ); + assert_eq!(multiply_by_rational(1_000_000_000, MAX128 / 8, MAX128 / 2).unwrap(), 250000000,); assert_eq!( multiply_by_rational( 29459999999999999988000u128, 1000000000000000000u128, 10000000000000000000u128 - ).unwrap(), + ) + .unwrap(), 2945999999999999998800u128 ); } #[test] fn multiply_by_rational_a_b_are_interchangeable() { - assert_eq!( - multiply_by_rational(10, MAX128, MAX128 / 2), - Ok(20), - ); - assert_eq!( - multiply_by_rational(MAX128, 10, MAX128 / 2), - Ok(20), - ); + assert_eq!(multiply_by_rational(10, MAX128, MAX128 / 2), Ok(20),); + assert_eq!(multiply_by_rational(MAX128, 10, MAX128 / 2), Ok(20),); } #[test] diff --git a/primitives/arithmetic/src/traits.rs b/primitives/arithmetic/src/traits.rs index d0ce921d9d342..a441a0dcbc08d 100644 --- a/primitives/arithmetic/src/traits.rs +++ b/primitives/arithmetic/src/traits.rs @@ -17,58 +17,129 @@ //! Primitive traits for the runtime arithmetic. -use sp_std::{self, convert::{TryFrom, TryInto}}; use codec::HasCompact; pub use integer_sqrt::IntegerSquareRoot; pub use num_traits::{ - Zero, One, Bounded, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, CheckedNeg, - CheckedShl, CheckedShr, checked_pow, Signed, Unsigned, + checked_pow, Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedNeg, CheckedShl, CheckedShr, + CheckedSub, One, Signed, Unsigned, Zero, }; -use sp_std::ops::{ - Add, Sub, Mul, Div, Rem, AddAssign, SubAssign, MulAssign, DivAssign, - RemAssign, Shl, Shr +use sp_std::{ + self, + convert::{TryFrom, TryInto}, + ops::{ + Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, RemAssign, Shl, Shr, Sub, SubAssign, + }, }; /// A meta trait for arithmetic type operations, regardless of any limitation on size. pub trait BaseArithmetic: - From + - Zero + One + IntegerSquareRoot + - Add + AddAssign + - Sub + SubAssign + - Mul + MulAssign + - Div + DivAssign + - Rem + RemAssign + - Shl + Shr + - CheckedShl + CheckedShr + CheckedAdd + CheckedSub + CheckedMul + CheckedDiv + Saturating + - PartialOrd + Ord + Bounded + HasCompact + Sized + - TryFrom + TryInto + TryFrom + TryInto + TryFrom + TryInto + - TryFrom + TryInto + TryFrom + TryInto + TryFrom + TryInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto -{} - -impl + - Zero + One + IntegerSquareRoot + - Add + AddAssign + - Sub + SubAssign + - Mul + MulAssign + - Div + DivAssign + - Rem + RemAssign + - Shl + Shr + - CheckedShl + CheckedShr + CheckedAdd + CheckedSub + CheckedMul + CheckedDiv + Saturating + - PartialOrd + Ord + Bounded + HasCompact + Sized + - TryFrom + TryInto + TryFrom + TryInto + TryFrom + TryInto + - TryFrom + TryInto + TryFrom + TryInto + TryFrom + TryInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto + - UniqueSaturatedFrom + UniqueSaturatedInto -> BaseArithmetic for T {} + From + + Zero + + One + + IntegerSquareRoot + + Add + + AddAssign + + Sub + + SubAssign + + Mul + + MulAssign + + Div + + DivAssign + + Rem + + RemAssign + + Shl + + Shr + + CheckedShl + + CheckedShr + + CheckedAdd + + CheckedSub + + CheckedMul + + CheckedDiv + + Saturating + + PartialOrd + + Ord + + Bounded + + HasCompact + + Sized + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto +{ +} + +impl< + T: From + + Zero + + One + + IntegerSquareRoot + + Add + + AddAssign + + Sub + + SubAssign + + Mul + + MulAssign + + Div + + DivAssign + + Rem + + RemAssign + + Shl + + Shr + + CheckedShl + + CheckedShr + + CheckedAdd + + CheckedSub + + CheckedMul + + CheckedDiv + + Saturating + + PartialOrd + + Ord + + Bounded + + HasCompact + + Sized + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + TryFrom + + TryInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto + + UniqueSaturatedFrom + + UniqueSaturatedInto, + > BaseArithmetic for T +{ +} /// A meta trait for arithmetic. /// @@ -129,35 +200,49 @@ pub trait Saturating { fn saturating_pow(self, exp: usize) -> Self; /// Increment self by one, saturating. - fn saturating_inc(&mut self) where Self: One { + fn saturating_inc(&mut self) + where + Self: One, + { let mut o = Self::one(); sp_std::mem::swap(&mut o, self); *self = o.saturating_add(One::one()); } /// Decrement self by one, saturating at zero. - fn saturating_dec(&mut self) where Self: One { + fn saturating_dec(&mut self) + where + Self: One, + { let mut o = Self::one(); sp_std::mem::swap(&mut o, self); *self = o.saturating_sub(One::one()); } /// Increment self by some `amount`, saturating. - fn saturating_accrue(&mut self, amount: Self) where Self: One { + fn saturating_accrue(&mut self, amount: Self) + where + Self: One, + { let mut o = Self::one(); sp_std::mem::swap(&mut o, self); *self = o.saturating_add(amount); } /// Decrement self by some `amount`, saturating at zero. - fn saturating_reduce(&mut self, amount: Self) where Self: One { + fn saturating_reduce(&mut self, amount: Self) + where + Self: One, + { let mut o = Self::one(); sp_std::mem::swap(&mut o, self); *self = o.saturating_sub(amount); } } -impl Saturating for T { +impl Saturating + for T +{ fn saturating_add(self, o: Self) -> Self { ::saturating_add(self, o) } @@ -167,26 +252,24 @@ impl Self { - self.checked_mul(&o) - .unwrap_or_else(|| - if (self < T::zero()) != (o < T::zero()) { - Bounded::min_value() - } else { - Bounded::max_value() - } - ) + self.checked_mul(&o).unwrap_or_else(|| { + if (self < T::zero()) != (o < T::zero()) { + Bounded::min_value() + } else { + Bounded::max_value() + } + }) } fn saturating_pow(self, exp: usize) -> Self { let neg = self < T::zero() && exp % 2 != 0; - checked_pow(self, exp) - .unwrap_or_else(|| - if neg { - Bounded::min_value() - } else { - Bounded::max_value() - } - ) + checked_pow(self, exp).unwrap_or_else(|| { + if neg { + Bounded::min_value() + } else { + Bounded::max_value() + } + }) } } @@ -199,7 +282,10 @@ pub trait SaturatedConversion { /// This just uses `UniqueSaturatedFrom` internally but with this /// variant you can provide the destination type using turbofish syntax /// in case Rust happens not to assume the correct type. - fn saturated_from(t: T) -> Self where Self: UniqueSaturatedFrom { + fn saturated_from(t: T) -> Self + where + Self: UniqueSaturatedFrom, + { >::unique_saturated_from(t) } @@ -208,7 +294,10 @@ pub trait SaturatedConversion { /// This just uses `UniqueSaturatedInto` internally but with this /// variant you can provide the destination type using turbofish syntax /// in case Rust happens not to assume the correct type. - fn saturated_into(self) -> T where Self: UniqueSaturatedInto { + fn saturated_into(self) -> T + where + Self: UniqueSaturatedInto, + { >::unique_saturated_into(self) } } diff --git a/primitives/authority-discovery/src/lib.rs b/primitives/authority-discovery/src/lib.rs index b04ce43a2c747..871a35e6bf487 100644 --- a/primitives/authority-discovery/src/lib.rs +++ b/primitives/authority-discovery/src/lib.rs @@ -22,11 +22,7 @@ use sp_std::vec::Vec; mod app { - use sp_application_crypto::{ - key_types::AUTHORITY_DISCOVERY, - app_crypto, - sr25519, - }; + use sp_application_crypto::{app_crypto, key_types::AUTHORITY_DISCOVERY, sr25519}; app_crypto!(sr25519, AUTHORITY_DISCOVERY); } diff --git a/primitives/authorship/src/lib.rs b/primitives/authorship/src/lib.rs index 1350fa17ff301..254078b8445ae 100644 --- a/primitives/authorship/src/lib.rs +++ b/primitives/authorship/src/lib.rs @@ -19,11 +19,11 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{result::Result, prelude::*}; +use sp_std::{prelude::*, result::Result}; -use codec::{Encode, Decode}; -use sp_inherents::{Error, InherentIdentifier, InherentData, IsFatalError}; -use sp_runtime::{RuntimeString, traits::Header as HeaderT}; +use codec::{Decode, Encode}; +use sp_inherents::{Error, InherentData, InherentIdentifier, IsFatalError}; +use sp_runtime::{traits::Header as HeaderT, RuntimeString}; /// The identifier for the `uncles` inherent. pub const INHERENT_IDENTIFIER: InherentIdentifier = *b"uncles00"; diff --git a/primitives/blockchain/src/backend.rs b/primitives/blockchain/src/backend.rs index dbce364ce7987..d3e722d10e57e 100644 --- a/primitives/blockchain/src/backend.rs +++ b/primitives/blockchain/src/backend.rs @@ -19,11 +19,13 @@ use std::sync::Arc; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; -use sp_runtime::generic::BlockId; -use sp_runtime::Justifications; use log::warn; use parking_lot::RwLock; +use sp_runtime::{ + generic::BlockId, + traits::{Block as BlockT, Header as HeaderT, NumberFor}, + Justifications, +}; use crate::header_metadata::HeaderMetadata; @@ -38,7 +40,10 @@ pub trait HeaderBackend: Send + Sync { /// Get block status. fn status(&self, id: BlockId) -> Result; /// Get block number by hash. Returns `None` if the header is not in the chain. - fn number(&self, hash: Block::Hash) -> Result::Header as HeaderT>::Number>>; + fn number( + &self, + hash: Block::Hash, + ) -> Result::Header as HeaderT>::Number>>; /// Get block hash by number. Returns `None` if the header is not in the chain. fn hash(&self, number: NumberFor) -> Result>; @@ -60,28 +65,29 @@ pub trait HeaderBackend: Send + Sync { /// Get block header. Returns `UnknownBlock` error if block is not found. fn expect_header(&self, id: BlockId) -> Result { - self.header(id)?.ok_or_else(|| Error::UnknownBlock(format!("Expect header: {}", id))) + self.header(id)? + .ok_or_else(|| Error::UnknownBlock(format!("Expect header: {}", id))) } /// Convert an arbitrary block ID into a block number. Returns `UnknownBlock` error if block is not found. fn expect_block_number_from_id(&self, id: &BlockId) -> Result> { - self.block_number_from_id(id) - .and_then(|n| n.ok_or_else(|| - Error::UnknownBlock(format!("Expect block number from id: {}", id)) - )) + self.block_number_from_id(id).and_then(|n| { + n.ok_or_else(|| Error::UnknownBlock(format!("Expect block number from id: {}", id))) + }) } /// Convert an arbitrary block ID into a block hash. Returns `UnknownBlock` error if block is not found. fn expect_block_hash_from_id(&self, id: &BlockId) -> Result { - self.block_hash_from_id(id) - .and_then(|n| n.ok_or_else(|| - Error::UnknownBlock(format!("Expect block hash from id: {}", id)) - )) + self.block_hash_from_id(id).and_then(|n| { + n.ok_or_else(|| Error::UnknownBlock(format!("Expect block hash from id: {}", id))) + }) } } /// Blockchain database backend. Does not perform any validation. -pub trait Backend: HeaderBackend + HeaderMetadata { +pub trait Backend: + HeaderBackend + HeaderMetadata +{ /// Get block body. Returns `None` if block is not found. fn body(&self, id: BlockId) -> Result::Extrinsic>>>; /// Get block justifications. Returns `None` if no justification exists. @@ -120,14 +126,16 @@ pub trait Backend: HeaderBackend + HeaderMetadata x, // target not in blockchain - None => { return Ok(None); }, + None => { + return Ok(None) + }, } }; if let Some(max_number) = maybe_max_number { // target outside search range if target_header.number() > &max_number { - return Ok(None); + return Ok(None) } } @@ -148,12 +156,12 @@ pub trait Backend: HeaderBackend + HeaderMetadata= *target_header.number() { // header is on a dead fork. - return Ok(None); + return Ok(None) } self.leaves()? @@ -171,12 +179,13 @@ pub trait Backend: HeaderBackend + HeaderMetadata: HeaderBackend + HeaderMetadata: HeaderBackend + HeaderMetadata: HeaderBackend + HeaderMetadata Result>>; + fn indexed_transaction(&self, hash: &Block::Hash) -> Result>>; /// Check if indexed transaction exists. fn has_indexed_transaction(&self, hash: &Block::Hash) -> Result { @@ -253,7 +259,9 @@ pub trait Cache: Send + Sync { &self, key: &well_known_cache_keys::Id, block: &BlockId, - ) -> Result, Block::Hash), Option<(NumberFor, Block::Hash)>, Vec)>>; + ) -> Result< + Option<((NumberFor, Block::Hash), Option<(NumberFor, Block::Hash)>, Vec)>, + >; } /// Blockchain info @@ -272,7 +280,7 @@ pub struct Info { /// Last finalized state. pub finalized_state: Option<(Block::Hash, <::Header as HeaderT>::Number)>, /// Number of concurrent leave forks. - pub number_leaves: usize + pub number_leaves: usize, } /// Block status. diff --git a/primitives/blockchain/src/error.rs b/primitives/blockchain/src/error.rs index 0d6ac10a8800e..bc27c36401e89 100644 --- a/primitives/blockchain/src/error.rs +++ b/primitives/blockchain/src/error.rs @@ -17,12 +17,12 @@ //! Substrate client possible errors. -use std::{self, result}; -use sp_state_machine; -use sp_runtime::transaction_validity::TransactionValidityError; -use sp_consensus; use codec::Error as CodecError; use sp_api::ApiError; +use sp_consensus; +use sp_runtime::transaction_validity::TransactionValidityError; +use sp_state_machine; +use std::{self, result}; /// Client Result type alias pub type Result = result::Result; @@ -205,7 +205,10 @@ impl Error { /// Construct from a state db error. // Can not be done directly, since that would make cargo run out of stack if // `sc-state-db` is lib is added as dependency. - pub fn from_state_db(e: E) -> Self where E: std::fmt::Debug { + pub fn from_state_db(e: E) -> Self + where + E: std::fmt::Debug, + { Error::StateDatabase(format!("{:?}", e)) } } diff --git a/primitives/blockchain/src/header_metadata.rs b/primitives/blockchain/src/header_metadata.rs index 87d0057f32c24..928409963bcd4 100644 --- a/primitives/blockchain/src/header_metadata.rs +++ b/primitives/blockchain/src/header_metadata.rs @@ -18,9 +18,9 @@ //! Implements tree backend, cached header metadata and algorithms //! to compute routes efficiently over the tree of headers. -use sp_runtime::traits::{Block as BlockT, NumberFor, Header}; -use parking_lot::RwLock; use lru::LruCache; +use parking_lot::RwLock; +use sp_runtime::traits::{Block as BlockT, Header, NumberFor}; /// Set to the expected max difference between `best` and `finalized` blocks at sync. const LRU_CACHE_SIZE: usize = 5_000; @@ -86,10 +86,7 @@ pub fn lowest_common_ancestor + ?Sized>( backend.insert_header_metadata(orig_header_two.hash, orig_header_two); } - Ok(HashAndNumber { - hash: header_one.hash, - number: header_one.number, - }) + Ok(HashAndNumber { hash: header_one.hash, number: header_one.number }) } /// Compute a tree-route between two blocks. See tree-route docs for more details. @@ -105,51 +102,33 @@ pub fn tree_route>( let mut to_branch = Vec::new(); while to.number > from.number { - to_branch.push(HashAndNumber { - number: to.number, - hash: to.hash, - }); + to_branch.push(HashAndNumber { number: to.number, hash: to.hash }); to = backend.header_metadata(to.parent)?; } while from.number > to.number { - from_branch.push(HashAndNumber { - number: from.number, - hash: from.hash, - }); + from_branch.push(HashAndNumber { number: from.number, hash: from.hash }); from = backend.header_metadata(from.parent)?; } // numbers are equal now. walk backwards until the block is the same while to.hash != from.hash { - to_branch.push(HashAndNumber { - number: to.number, - hash: to.hash, - }); + to_branch.push(HashAndNumber { number: to.number, hash: to.hash }); to = backend.header_metadata(to.parent)?; - from_branch.push(HashAndNumber { - number: from.number, - hash: from.hash, - }); + from_branch.push(HashAndNumber { number: from.number, hash: from.hash }); from = backend.header_metadata(from.parent)?; } // add the pivot block. and append the reversed to-branch // (note that it's reverse order originals) let pivot = from_branch.len(); - from_branch.push(HashAndNumber { - number: to.number, - hash: to.hash, - }); + from_branch.push(HashAndNumber { number: to.number, hash: to.hash }); from_branch.extend(to_branch.into_iter().rev()); - Ok(TreeRoute { - route: from_branch, - pivot, - }) + Ok(TreeRoute { route: from_branch, pivot }) } /// Hash and number of a block. @@ -204,14 +183,16 @@ impl TreeRoute { /// Get the common ancestor block. This might be one of the two blocks of the /// route. pub fn common_block(&self) -> &HashAndNumber { - self.route.get(self.pivot).expect("tree-routes are computed between blocks; \ + self.route.get(self.pivot).expect( + "tree-routes are computed between blocks; \ which are included in the route; \ - thus it is never empty; qed") + thus it is never empty; qed", + ) } /// Get a slice of enacted blocks (descendents of the common ancestor) pub fn enacted(&self) -> &[HashAndNumber] { - &self.route[self.pivot + 1 ..] + &self.route[self.pivot + 1..] } } @@ -240,17 +221,13 @@ pub struct HeaderMetadataCache { impl HeaderMetadataCache { /// Creates a new LRU header metadata cache with `capacity`. pub fn new(capacity: usize) -> Self { - HeaderMetadataCache { - cache: RwLock::new(LruCache::new(capacity)), - } + HeaderMetadataCache { cache: RwLock::new(LruCache::new(capacity)) } } } impl Default for HeaderMetadataCache { fn default() -> Self { - HeaderMetadataCache { - cache: RwLock::new(LruCache::new(LRU_CACHE_SIZE)), - } + HeaderMetadataCache { cache: RwLock::new(LruCache::new(LRU_CACHE_SIZE)) } } } diff --git a/primitives/blockchain/src/lib.rs b/primitives/blockchain/src/lib.rs index 696050f57ac89..cd36cabe15517 100644 --- a/primitives/blockchain/src/lib.rs +++ b/primitives/blockchain/src/lib.rs @@ -18,9 +18,9 @@ //! Substrate blockchain traits and primitives. mod backend; -mod header_metadata; mod error; +mod header_metadata; -pub use error::*; pub use backend::*; +pub use error::*; pub use header_metadata::*; diff --git a/primitives/consensus/aura/src/digests.rs b/primitives/consensus/aura/src/digests.rs index e93214eeb4bac..eaa29036d98a1 100644 --- a/primitives/consensus/aura/src/digests.rs +++ b/primitives/consensus/aura/src/digests.rs @@ -22,9 +22,9 @@ //! `CompatibleDigestItem` trait to appear in public interfaces. use crate::AURA_ENGINE_ID; -use sp_runtime::generic::DigestItem; +use codec::{Codec, Encode}; use sp_consensus_slots::Slot; -use codec::{Encode, Codec}; +use sp_runtime::generic::DigestItem; use sp_std::fmt::Debug; /// A digest item which is usable with aura consensus. @@ -42,9 +42,10 @@ pub trait CompatibleDigestItem: Sized { fn as_aura_pre_digest(&self) -> Option; } -impl CompatibleDigestItem for DigestItem where +impl CompatibleDigestItem for DigestItem +where Signature: Codec, - Hash: Debug + Send + Sync + Eq + Clone + Codec + 'static + Hash: Debug + Send + Sync + Eq + Clone + Codec + 'static, { fn aura_seal(signature: Signature) -> Self { DigestItem::Seal(AURA_ENGINE_ID, signature.encode()) diff --git a/primitives/consensus/aura/src/inherents.rs b/primitives/consensus/aura/src/inherents.rs index 294f544f6725a..2a797b5d3f393 100644 --- a/primitives/consensus/aura/src/inherents.rs +++ b/primitives/consensus/aura/src/inherents.rs @@ -16,8 +16,7 @@ // limitations under the License. /// Contains the inherents for the AURA module - -use sp_inherents::{InherentIdentifier, InherentData, Error}; +use sp_inherents::{Error, InherentData, InherentIdentifier}; /// The Aura inherent identifier. pub const INHERENT_IDENTIFIER: InherentIdentifier = *b"auraslot"; @@ -28,13 +27,13 @@ pub type InherentType = sp_consensus_slots::Slot; /// Auxiliary trait to extract Aura inherent data. pub trait AuraInherentData { /// Get aura inherent data. - fn aura_inherent_data(&self) ->Result, Error>; + fn aura_inherent_data(&self) -> Result, Error>; /// Replace aura inherent data. fn aura_replace_inherent_data(&mut self, new: InherentType); } impl AuraInherentData for InherentData { - fn aura_inherent_data(&self) ->Result, Error> { + fn aura_inherent_data(&self) -> Result, Error> { self.get_data(&INHERENT_IDENTIFIER) } @@ -54,9 +53,7 @@ pub struct InherentDataProvider { impl InherentDataProvider { /// Create a new instance with the given slot. pub fn new(slot: InherentType) -> Self { - Self { - slot, - } + Self { slot } } /// Creates the inherent data provider by calculating the slot from the given @@ -65,13 +62,10 @@ impl InherentDataProvider { timestamp: sp_timestamp::Timestamp, duration: std::time::Duration, ) -> Self { - let slot = InherentType::from( - (timestamp.as_duration().as_millis() / duration.as_millis()) as u64 - ); + let slot = + InherentType::from((timestamp.as_duration().as_millis() / duration.as_millis()) as u64); - Self { - slot, - } + Self { slot } } } @@ -87,10 +81,7 @@ impl sp_std::ops::Deref for InherentDataProvider { #[cfg(feature = "std")] #[async_trait::async_trait] impl sp_inherents::InherentDataProvider for InherentDataProvider { - fn provide_inherent_data( - &self, - inherent_data: &mut InherentData, - ) ->Result<(), Error> { + fn provide_inherent_data(&self, inherent_data: &mut InherentData) -> Result<(), Error> { inherent_data.put_data(INHERENT_IDENTIFIER, &self.slot) } diff --git a/primitives/consensus/aura/src/lib.rs b/primitives/consensus/aura/src/lib.rs index a28e681fda27f..e6a319c1d1590 100644 --- a/primitives/consensus/aura/src/lib.rs +++ b/primitives/consensus/aura/src/lib.rs @@ -19,9 +19,9 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Encode, Decode, Codec}; -use sp_std::vec::Vec; +use codec::{Codec, Decode, Encode}; use sp_runtime::ConsensusEngineId; +use sp_std::vec::Vec; pub mod digests; pub mod inherents; @@ -46,7 +46,7 @@ pub mod sr25519 { pub mod ed25519 { mod app_ed25519 { - use sp_application_crypto::{app_crypto, key_types::AURA, ed25519}; + use sp_application_crypto::{app_crypto, ed25519, key_types::AURA}; app_crypto!(ed25519, AURA); } diff --git a/primitives/consensus/babe/src/digests.rs b/primitives/consensus/babe/src/digests.rs index 21d8cf392b602..470a028021ca1 100644 --- a/primitives/consensus/babe/src/digests.rs +++ b/primitives/consensus/babe/src/digests.rs @@ -22,8 +22,8 @@ use super::{ BabeEpochConfiguration, Slot, BABE_ENGINE_ID, }; use codec::{Codec, Decode, Encode}; -use sp_std::vec::Vec; use sp_runtime::{DigestItem, RuntimeDebug}; +use sp_std::vec::Vec; use sp_consensus_vrf::schnorrkel::{Randomness, VRFOutput, VRFProof}; @@ -143,14 +143,13 @@ pub enum NextConfigDescriptor { c: (u64, u64), /// Value of `allowed_slots` in `BabeEpochConfiguration`. allowed_slots: AllowedSlots, - } + }, } impl From for BabeEpochConfiguration { fn from(desc: NextConfigDescriptor) -> Self { match desc { - NextConfigDescriptor::V1 { c, allowed_slots } => - Self { c, allowed_slots }, + NextConfigDescriptor::V1 { c, allowed_slots } => Self { c, allowed_slots }, } } } @@ -176,8 +175,9 @@ pub trait CompatibleDigestItem: Sized { fn as_next_config_descriptor(&self) -> Option; } -impl CompatibleDigestItem for DigestItem where - Hash: Send + Sync + Eq + Clone + Codec + 'static +impl CompatibleDigestItem for DigestItem +where + Hash: Send + Sync + Eq + Clone + Codec + 'static, { fn babe_pre_digest(digest: PreDigest) -> Self { DigestItem::PreRuntime(BABE_ENGINE_ID, digest.encode()) diff --git a/primitives/consensus/babe/src/inherents.rs b/primitives/consensus/babe/src/inherents.rs index e160ca8644bc2..cecd61998a4db 100644 --- a/primitives/consensus/babe/src/inherents.rs +++ b/primitives/consensus/babe/src/inherents.rs @@ -17,7 +17,7 @@ //! Inherents for BABE -use sp_inherents::{InherentData, InherentIdentifier, Error}; +use sp_inherents::{Error, InherentData, InherentIdentifier}; use sp_std::result::Result; @@ -64,13 +64,10 @@ impl InherentDataProvider { timestamp: sp_timestamp::Timestamp, duration: std::time::Duration, ) -> Self { - let slot = InherentType::from( - (timestamp.as_duration().as_millis() / duration.as_millis()) as u64 - ); + let slot = + InherentType::from((timestamp.as_duration().as_millis() / duration.as_millis()) as u64); - Self { - slot, - } + Self { slot } } /// Returns the `slot` of this inherent data provider. diff --git a/primitives/consensus/babe/src/lib.rs b/primitives/consensus/babe/src/lib.rs index 67871db4fee5d..4417670f4144b 100644 --- a/primitives/consensus/babe/src/lib.rs +++ b/primitives/consensus/babe/src/lib.rs @@ -31,7 +31,7 @@ pub use sp_consensus_vrf::schnorrkel::{ use codec::{Decode, Encode}; use scale_info::TypeInfo; #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; #[cfg(feature = "std")] use sp_keystore::vrf::{VRFTranscriptData, VRFTranscriptValue}; use sp_runtime::{traits::Header, ConsensusEngineId, RuntimeDebug}; @@ -97,11 +97,7 @@ pub type BabeAuthorityWeight = u64; pub type BabeBlockWeight = u32; /// Make a VRF transcript from given randomness, slot number and epoch. -pub fn make_transcript( - randomness: &Randomness, - slot: Slot, - epoch: u64, -) -> Transcript { +pub fn make_transcript(randomness: &Randomness, slot: Slot, epoch: u64) -> Transcript { let mut transcript = Transcript::new(&BABE_ENGINE_ID); transcript.append_u64(b"slot number", *slot); transcript.append_u64(b"current epoch", epoch); @@ -111,18 +107,14 @@ pub fn make_transcript( /// Make a VRF transcript data container #[cfg(feature = "std")] -pub fn make_transcript_data( - randomness: &Randomness, - slot: Slot, - epoch: u64, -) -> VRFTranscriptData { +pub fn make_transcript_data(randomness: &Randomness, slot: Slot, epoch: u64) -> VRFTranscriptData { VRFTranscriptData { label: &BABE_ENGINE_ID, items: vec![ ("slot number", VRFTranscriptValue::U64(*slot)), ("current epoch", VRFTranscriptValue::U64(epoch)), ("chain randomness", VRFTranscriptValue::Bytes(randomness.to_vec())), - ] + ], } } @@ -281,20 +273,15 @@ where use digests::*; use sp_application_crypto::RuntimeAppPublic; - let find_pre_digest = |header: &H| { - header - .digest() - .logs() - .iter() - .find_map(|log| log.as_babe_pre_digest()) - }; + let find_pre_digest = + |header: &H| header.digest().logs().iter().find_map(|log| log.as_babe_pre_digest()); let verify_seal_signature = |mut header: H, offender: &AuthorityId| { let seal = header.digest_mut().pop()?.as_babe_seal()?; let pre_hash = header.hash(); if !offender.verify(&pre_hash.as_ref(), &seal) { - return None; + return None } Some(()) @@ -303,7 +290,7 @@ where let verify_proof = || { // we must have different headers for the equivocation to be valid if proof.first_header.hash() == proof.second_header.hash() { - return None; + return None } let first_pre_digest = find_pre_digest(&proof.first_header)?; @@ -314,12 +301,12 @@ where if proof.slot != first_pre_digest.slot() || first_pre_digest.slot() != second_pre_digest.slot() { - return None; + return None } // both headers must have been authored by the same authority if first_pre_digest.authority_index() != second_pre_digest.authority_index() { - return None; + return None } // we finally verify that the expected authority has signed both headers and diff --git a/primitives/consensus/common/src/block_import.rs b/primitives/consensus/common/src/block_import.rs index a444e15095ef6..c742e24a0cc01 100644 --- a/primitives/consensus/common/src/block_import.rs +++ b/primitives/consensus/common/src/block_import.rs @@ -17,16 +17,14 @@ //! Block import helpers. -use sp_runtime::traits::{Block as BlockT, DigestItemFor, Header as HeaderT, NumberFor, HashFor}; -use sp_runtime::{Justification, Justifications}; -use serde::{Serialize, Deserialize}; -use std::borrow::Cow; -use std::collections::HashMap; -use std::sync::Arc; -use std::any::Any; +use serde::{Deserialize, Serialize}; +use sp_runtime::{ + traits::{Block as BlockT, DigestItemFor, HashFor, Header as HeaderT, NumberFor}, + Justification, Justifications, +}; +use std::{any::Any, borrow::Cow, collections::HashMap, sync::Arc}; -use crate::Error; -use crate::import_queue::CacheKeyId; +use crate::{import_queue::CacheKeyId, Error}; /// Block import result. #[derive(Debug, PartialEq, Eq)] @@ -88,8 +86,8 @@ impl ImportResult { if aux.needs_justification { justification_sync_link.request_justification(hash, number); } - } - _ => {} + }, + _ => {}, } } } @@ -154,9 +152,7 @@ pub struct ImportedState { impl std::fmt::Debug for ImportedState { fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { - fmt.debug_struct("ImportedState") - .field("block", &self.block) - .finish() + fmt.debug_struct("ImportedState").field("block", &self.block).finish() } } @@ -226,12 +222,10 @@ pub struct BlockImportParams { impl BlockImportParams { /// Create a new block import params. - pub fn new( - origin: BlockOrigin, - header: Block::Header, - ) -> Self { + pub fn new(origin: BlockOrigin, header: Block::Header) -> Self { Self { - origin, header, + origin, + header, justifications: None, post_digests: Vec::new(), body: None, @@ -273,7 +267,9 @@ impl BlockImportParams { /// /// Actually this just sets `StorageChanges::Changes` to `None` and makes rustc think that `Self` now /// uses a different transaction type. - pub fn clear_storage_changes_and_mutate(self) -> BlockImportParams { + pub fn clear_storage_changes_and_mutate( + self, + ) -> BlockImportParams { // Preserve imported state. let state_action = match self.state_action { StateAction::ApplyChanges(StorageChanges::Import(state)) => @@ -305,14 +301,15 @@ impl BlockImportParams { let (k, v) = self.intermediates.remove_entry(key).ok_or(Error::NoIntermediate)?; v.downcast::().or_else(|v| { - self.intermediates.insert(k, v); - Err(Error::InvalidIntermediate) + self.intermediates.insert(k, v); + Err(Error::InvalidIntermediate) }) } /// Get a reference to a given intermediate. pub fn intermediate(&self, key: &[u8]) -> Result<&T, Error> { - self.intermediates.get(key) + self.intermediates + .get(key) .ok_or(Error::NoIntermediate)? .downcast_ref::() .ok_or(Error::InvalidIntermediate) @@ -320,7 +317,8 @@ impl BlockImportParams { /// Get a mutable reference to a given intermediate. pub fn intermediate_mut(&mut self, key: &[u8]) -> Result<&mut T, Error> { - self.intermediates.get_mut(key) + self.intermediates + .get_mut(key) .ok_or(Error::NoIntermediate)? .downcast_mut::() .ok_or(Error::InvalidIntermediate) @@ -353,8 +351,8 @@ pub trait BlockImport { #[async_trait::async_trait] impl BlockImport for crate::import_queue::BoxBlockImport - where - Transaction: Send + 'static, +where + Transaction: Send + 'static, { type Error = crate::error::Error; type Transaction = Transaction; @@ -381,10 +379,10 @@ impl BlockImport for crate::import_queue::BoxBlockImp #[async_trait::async_trait] impl BlockImport for Arc - where - for<'r> &'r T: BlockImport, - T: Send + Sync, - Transaction: Send + 'static, +where + for<'r> &'r T: BlockImport, + T: Send + Sync, + Transaction: Send + 'static, { type Error = E; type Transaction = Transaction; diff --git a/primitives/consensus/common/src/block_validation.rs b/primitives/consensus/common/src/block_validation.rs index fb0846fe9901a..9a9f21394f9ab 100644 --- a/primitives/consensus/common/src/block_validation.rs +++ b/primitives/consensus/common/src/block_validation.rs @@ -18,9 +18,9 @@ //! Block announcement validation. use crate::BlockStatus; +use futures::FutureExt as _; use sp_runtime::{generic::BlockId, traits::Block}; use std::{error::Error, future::Future, pin::Pin, sync::Arc}; -use futures::FutureExt as _; /// A type which provides access to chain information. pub trait Chain { @@ -92,6 +92,7 @@ impl BlockAnnounceValidator for DefaultBlockAnnounceValidator { } else { Ok(Validation::Success { is_new_best: false }) } - }.boxed() + } + .boxed() } } diff --git a/primitives/consensus/common/src/error.rs b/primitives/consensus/common/src/error.rs index d7461fe92032e..546f30d3e8202 100644 --- a/primitives/consensus/common/src/error.rs +++ b/primitives/consensus/common/src/error.rs @@ -16,8 +16,8 @@ // limitations under the License. //! Error types in Consensus -use sp_version::RuntimeVersion; use sp_core::ed25519::Public; +use sp_version::RuntimeVersion; use std::error; /// Result type alias. @@ -58,8 +58,10 @@ pub enum Error { #[error("Message sender {0:?} is not a valid authority")] InvalidAuthority(Public), /// Authoring interface does not match the runtime. - #[error("Authoring for current \ - runtime is not supported. Native ({native}) cannot author for on-chain ({on_chain}).")] + #[error( + "Authoring for current \ + runtime is not supported. Native ({native}) cannot author for on-chain ({on_chain})." + )] IncompatibleAuthoringRuntime { native: RuntimeVersion, on_chain: RuntimeVersion }, /// Authoring interface does not match the runtime. #[error("Authoring for current runtime is not supported since it has no version.")] @@ -81,7 +83,7 @@ pub enum Error { ChainLookup(String), /// Signing failed #[error("Failed to sign using key: {0:?}. Reason: {1}")] - CannotSign(Vec, String) + CannotSign(Vec, String), } impl core::convert::From for Error { diff --git a/primitives/consensus/common/src/evaluation.rs b/primitives/consensus/common/src/evaluation.rs index c18c8b127f991..19be5e5526349 100644 --- a/primitives/consensus/common/src/evaluation.rs +++ b/primitives/consensus/common/src/evaluation.rs @@ -18,7 +18,7 @@ //! Block evaluation and evaluation errors. use codec::Encode; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, One, CheckedConversion}; +use sp_runtime::traits::{Block as BlockT, CheckedConversion, Header as HeaderT, One}; // This is just a best effort to encode the number. None indicated that it's too big to encode // in a u128. @@ -48,15 +48,13 @@ pub fn evaluate_initial( parent_hash: &::Hash, parent_number: <::Header as HeaderT>::Number, ) -> Result<()> { - let encoded = Encode::encode(proposal); - let proposal = Block::decode(&mut &encoded[..]) - .map_err(|e| Error::BadProposalFormat(e))?; + let proposal = Block::decode(&mut &encoded[..]).map_err(|e| Error::BadProposalFormat(e))?; if *parent_hash != *proposal.header().parent_hash() { return Err(Error::WrongParentHash { expected: format!("{:?}", *parent_hash), - got: format!("{:?}", proposal.header().parent_hash()) + got: format!("{:?}", proposal.header().parent_hash()), }) } diff --git a/primitives/consensus/common/src/import_queue.rs b/primitives/consensus/common/src/import_queue.rs index 6cac6b1ff9201..6eb8d0a750a24 100644 --- a/primitives/consensus/common/src/import_queue.rs +++ b/primitives/consensus/common/src/import_queue.rs @@ -28,14 +28,17 @@ use std::collections::HashMap; -use sp_runtime::{Justifications, traits::{Block as BlockT, Header as _, NumberFor}}; +use sp_runtime::{ + traits::{Block as BlockT, Header as _, NumberFor}, + Justifications, +}; use crate::{ - error::Error as ConsensusError, block_import::{ - BlockImport, BlockOrigin, BlockImportParams, ImportedAux, JustificationImport, ImportResult, - BlockCheckParams, ImportedState, StateAction, + BlockCheckParams, BlockImport, BlockImportParams, BlockOrigin, ImportResult, ImportedAux, + ImportedState, JustificationImport, StateAction, }, + error::Error as ConsensusError, metrics::Metrics, }; pub use basic_queue::BasicQueue; @@ -43,18 +46,19 @@ pub use basic_queue::BasicQueue; /// A commonly-used Import Queue type. /// /// This defines the transaction type of the `BasicQueue` to be the transaction type for a client. -pub type DefaultImportQueue = BasicQueue>; +pub type DefaultImportQueue = + BasicQueue>; mod basic_queue; pub mod buffered_link; /// Shared block import struct used by the queue. -pub type BoxBlockImport = Box< - dyn BlockImport + Send + Sync ->; +pub type BoxBlockImport = + Box + Send + Sync>; /// Shared justification import struct used by the queue. -pub type BoxJustificationImport = Box + Send + Sync>; +pub type BoxJustificationImport = + Box + Send + Sync>; /// Maps to the Origin used by the network. pub type Origin = libp2p::PeerId; @@ -115,7 +119,7 @@ pub trait ImportQueue: Send { who: Origin, hash: B::Hash, number: NumberFor, - justifications: Justifications + justifications: Justifications, ); /// Polls for actions to perform on the network. /// @@ -133,10 +137,18 @@ pub trait Link: Send { &mut self, _imported: usize, _count: usize, - _results: Vec<(Result>, BlockImportError>, B::Hash)> - ) {} + _results: Vec<(Result>, BlockImportError>, B::Hash)>, + ) { + } /// Justification import result. - fn justification_imported(&mut self, _who: Origin, _hash: &B::Hash, _number: NumberFor, _success: bool) {} + fn justification_imported( + &mut self, + _who: Origin, + _hash: &B::Hash, + _number: NumberFor, + _success: bool, + ) { + } /// Request a justification for the given block. fn request_justification(&mut self, _hash: &B::Hash, _number: NumberFor) {} } @@ -180,7 +192,11 @@ pub async fn import_single_block, Transaction: Send + } /// Single block import function with metering. -pub(crate) async fn import_single_block_metered, Transaction: Send + 'static>( +pub(crate) async fn import_single_block_metered< + B: BlockT, + V: Verifier, + Transaction: Send + 'static, +>( import_handle: &mut impl BlockImport, block_origin: BlockOrigin, block: IncomingBlock, @@ -207,60 +223,61 @@ pub(crate) async fn import_single_block_metered, Trans let hash = header.hash(); let parent_hash = header.parent_hash().clone(); - let import_handler = |import| { - match import { - Ok(ImportResult::AlreadyInChain) => { - trace!(target: "sync", "Block already in chain {}: {:?}", number, hash); - Ok(BlockImportResult::ImportedKnown(number, peer.clone())) - }, - Ok(ImportResult::Imported(aux)) => Ok(BlockImportResult::ImportedUnknown(number, aux, peer.clone())), - Ok(ImportResult::MissingState) => { - debug!(target: "sync", "Parent state is missing for {}: {:?}, parent: {:?}", number, hash, parent_hash); - Err(BlockImportError::MissingState) - }, - Ok(ImportResult::UnknownParent) => { - debug!(target: "sync", "Block with unknown parent {}: {:?}, parent: {:?}", number, hash, parent_hash); - Err(BlockImportError::UnknownParent) - }, - Ok(ImportResult::KnownBad) => { - debug!(target: "sync", "Peer gave us a bad block {}: {:?}", number, hash); - Err(BlockImportError::BadBlock(peer.clone())) - }, - Err(e) => { - debug!(target: "sync", "Error importing block {}: {:?}: {:?}", number, hash, e); - Err(BlockImportError::Other(e)) - } - } + let import_handler = |import| match import { + Ok(ImportResult::AlreadyInChain) => { + trace!(target: "sync", "Block already in chain {}: {:?}", number, hash); + Ok(BlockImportResult::ImportedKnown(number, peer.clone())) + }, + Ok(ImportResult::Imported(aux)) => + Ok(BlockImportResult::ImportedUnknown(number, aux, peer.clone())), + Ok(ImportResult::MissingState) => { + debug!(target: "sync", "Parent state is missing for {}: {:?}, parent: {:?}", number, hash, parent_hash); + Err(BlockImportError::MissingState) + }, + Ok(ImportResult::UnknownParent) => { + debug!(target: "sync", "Block with unknown parent {}: {:?}, parent: {:?}", number, hash, parent_hash); + Err(BlockImportError::UnknownParent) + }, + Ok(ImportResult::KnownBad) => { + debug!(target: "sync", "Peer gave us a bad block {}: {:?}", number, hash); + Err(BlockImportError::BadBlock(peer.clone())) + }, + Err(e) => { + debug!(target: "sync", "Error importing block {}: {:?}: {:?}", number, hash, e); + Err(BlockImportError::Other(e)) + }, }; - match import_handler(import_handle.check_block(BlockCheckParams { - hash, - number, - parent_hash, - allow_missing_state: block.allow_missing_state, - import_existing: block.import_existing, - }).await)? { + match import_handler( + import_handle + .check_block(BlockCheckParams { + hash, + number, + parent_hash, + allow_missing_state: block.allow_missing_state, + import_existing: block.import_existing, + }) + .await, + )? { BlockImportResult::ImportedUnknown { .. } => (), r => return Ok(r), // Any other successful result means that the block is already imported. } let started = wasm_timer::Instant::now(); - let (mut import_block, maybe_keys) = verifier.verify( - block_origin, - header, - justifications, - block.body - ).await.map_err(|msg| { - if let Some(ref peer) = peer { - trace!(target: "sync", "Verifying {}({}) from {} failed: {}", number, hash, peer, msg); - } else { - trace!(target: "sync", "Verifying {}({}) failed: {}", number, hash, msg); - } - if let Some(metrics) = metrics.as_ref() { - metrics.report_verification(false, started.elapsed()); - } - BlockImportError::VerificationFailed(peer.clone(), msg) - })?; + let (mut import_block, maybe_keys) = verifier + .verify(block_origin, header, justifications, block.body) + .await + .map_err(|msg| { + if let Some(ref peer) = peer { + trace!(target: "sync", "Verifying {}({}) from {} failed: {}", number, hash, peer, msg); + } else { + trace!(target: "sync", "Verifying {}({}) failed: {}", number, hash, msg); + } + if let Some(metrics) = metrics.as_ref() { + metrics.report_verification(false, started.elapsed()); + } + BlockImportError::VerificationFailed(peer.clone(), msg) + })?; if let Some(metrics) = metrics.as_ref() { metrics.report_verification(true, started.elapsed()); diff --git a/primitives/consensus/common/src/import_queue/basic_queue.rs b/primitives/consensus/common/src/import_queue/basic_queue.rs index 8dd40d84df305..2610a92ad83e4 100644 --- a/primitives/consensus/common/src/import_queue/basic_queue.rs +++ b/primitives/consensus/common/src/import_queue/basic_queue.rs @@ -15,20 +15,25 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::{pin::Pin, time::Duration, marker::PhantomData}; -use futures::{prelude::*, task::Context, task::Poll}; +use futures::{ + prelude::*, + task::{Context, Poll}, +}; use futures_timer::Delay; -use sp_runtime::{Justification, Justifications, traits::{Block as BlockT, Header as HeaderT, NumberFor}}; -use sp_utils::mpsc::{TracingUnboundedSender, tracing_unbounded, TracingUnboundedReceiver}; use prometheus_endpoint::Registry; +use sp_runtime::{ + traits::{Block as BlockT, Header as HeaderT, NumberFor}, + Justification, Justifications, +}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{marker::PhantomData, pin::Pin, time::Duration}; use crate::{ block_import::BlockOrigin, import_queue::{ - BlockImportResult, BlockImportError, Verifier, BoxBlockImport, - BoxJustificationImport, ImportQueue, Link, Origin, - IncomingBlock, import_single_block_metered, - buffered_link::{self, BufferedLinkSender, BufferedLinkReceiver}, + buffered_link::{self, BufferedLinkReceiver, BufferedLinkSender}, + import_single_block_metered, BlockImportError, BlockImportResult, BoxBlockImport, + BoxJustificationImport, ImportQueue, IncomingBlock, Link, Origin, Verifier, }, metrics::Metrics, }; @@ -85,24 +90,20 @@ impl BasicQueue { spawner.spawn_essential_blocking("basic-block-import-worker", future.boxed()); - Self { - justification_sender, - block_import_sender, - result_port, - _phantom: PhantomData, - } + Self { justification_sender, block_import_sender, result_port, _phantom: PhantomData } } } impl ImportQueue for BasicQueue { fn import_blocks(&mut self, origin: BlockOrigin, blocks: Vec>) { if blocks.is_empty() { - return; + return } trace!(target: "sync", "Scheduling {} blocks for import", blocks.len()); - let res = - self.block_import_sender.unbounded_send(worker_messages::ImportBlocks(origin, blocks)); + let res = self + .block_import_sender + .unbounded_send(worker_messages::ImportBlocks(origin, blocks)); if res.is_err() { log::error!( @@ -145,7 +146,12 @@ mod worker_messages { use super::*; pub struct ImportBlocks(pub BlockOrigin, pub Vec>); - pub struct ImportJustification(pub Origin, pub B::Hash, pub NumberFor, pub Justification); + pub struct ImportJustification( + pub Origin, + pub B::Hash, + pub NumberFor, + pub Justification, + ); } /// The process of importing blocks. @@ -164,7 +170,8 @@ async fn block_import_process( delay_between_blocks: Duration, ) { loop { - let worker_messages::ImportBlocks(origin, blocks) = match block_import_receiver.next().await { + let worker_messages::ImportBlocks(origin, blocks) = match block_import_receiver.next().await + { Some(blocks) => blocks, None => { log::debug!( @@ -182,7 +189,8 @@ async fn block_import_process( &mut verifier, delay_between_blocks, metrics.clone(), - ).await; + ) + .await; result_sender.blocks_processed(res.imported, res.block_count, res.results); } @@ -214,11 +222,7 @@ impl BlockImportWorker { let (block_import_sender, block_import_port) = tracing_unbounded("mpsc_import_queue_worker_blocks"); - let mut worker = BlockImportWorker { - result_sender, - justification_import, - metrics, - }; + let mut worker = BlockImportWorker { result_sender, justification_import, metrics }; let delay_between_blocks = Duration::default(); @@ -248,29 +252,26 @@ impl BlockImportWorker { target: "block-import", "Stopping block import because result channel was closed!", ); - return; + return } // Make sure to first process all justifications while let Poll::Ready(justification) = futures::poll!(justification_port.next()) { match justification { - Some(ImportJustification(who, hash, number, justification)) => { - worker - .import_justification(who, hash, number, justification) - .await - } + Some(ImportJustification(who, hash, number, justification)) => + worker.import_justification(who, hash, number, justification).await, None => { log::debug!( target: "block-import", "Stopping block import because justification channel was closed!", ); - return; - } + return + }, } } if let Poll::Ready(()) = futures::poll!(&mut block_import_process) { - return; + return } // All futures that we polled are now pending. @@ -310,13 +311,10 @@ impl BlockImportWorker { }; if let Some(metrics) = self.metrics.as_ref() { - metrics - .justification_import_time - .observe(started.elapsed().as_secs_f64()); + metrics.justification_import_time.observe(started.elapsed().as_secs_f64()); } - self.result_sender - .justification_imported(who, &hash, number, success); + self.result_sender.justification_imported(who, &hash, number, success); } } @@ -382,7 +380,8 @@ async fn import_many_blocks, Transaction: Send + 'stat block, verifier, metrics.clone(), - ).await + ) + .await }; if let Some(metrics) = metrics.as_ref() { @@ -604,7 +603,7 @@ mod tests { block_on(futures::future::poll_fn(|cx| { while link.events.len() < 9 { match Future::poll(Pin::new(&mut worker), cx) { - Poll::Pending => {} + Poll::Pending => {}, Poll::Ready(()) => panic!("import queue worker should not conclude."), } diff --git a/primitives/consensus/common/src/import_queue/buffered_link.rs b/primitives/consensus/common/src/import_queue/buffered_link.rs index 0295f704c4efc..4c5864a1ad9f3 100644 --- a/primitives/consensus/common/src/import_queue/buffered_link.rs +++ b/primitives/consensus/common/src/import_queue/buffered_link.rs @@ -38,11 +38,14 @@ //! ``` //! +use crate::import_queue::{BlockImportError, BlockImportResult, Link, Origin}; use futures::prelude::*; use sp_runtime::traits::{Block as BlockT, NumberFor}; -use sp_utils::mpsc::{TracingUnboundedSender, TracingUnboundedReceiver, tracing_unbounded}; -use std::{pin::Pin, task::Context, task::Poll}; -use crate::import_queue::{Origin, Link, BlockImportResult, BlockImportError}; +use sp_utils::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; +use std::{ + pin::Pin, + task::{Context, Poll}, +}; /// Wraps around an unbounded channel from the `futures` crate. The sender implements `Link` and /// can be used to buffer commands, and the receiver can be used to poll said commands and transfer @@ -70,15 +73,17 @@ impl BufferedLinkSender { impl Clone for BufferedLinkSender { fn clone(&self) -> Self { - BufferedLinkSender { - tx: self.tx.clone(), - } + BufferedLinkSender { tx: self.tx.clone() } } } /// Internal buffered message. enum BlockImportWorkerMsg { - BlocksProcessed(usize, usize, Vec<(Result>, BlockImportError>, B::Hash)>), + BlocksProcessed( + usize, + usize, + Vec<(Result>, BlockImportError>, B::Hash)>, + ), JustificationImported(Origin, B::Hash, NumberFor, bool), RequestJustification(B::Hash, NumberFor), } @@ -88,9 +93,11 @@ impl Link for BufferedLinkSender { &mut self, imported: usize, count: usize, - results: Vec<(Result>, BlockImportError>, B::Hash)> + results: Vec<(Result>, BlockImportError>, B::Hash)>, ) { - let _ = self.tx.unbounded_send(BlockImportWorkerMsg::BlocksProcessed(imported, count, results)); + let _ = self + .tx + .unbounded_send(BlockImportWorkerMsg::BlocksProcessed(imported, count, results)); } fn justification_imported( @@ -98,14 +105,16 @@ impl Link for BufferedLinkSender { who: Origin, hash: &B::Hash, number: NumberFor, - success: bool + success: bool, ) { let msg = BlockImportWorkerMsg::JustificationImported(who, hash.clone(), number, success); let _ = self.tx.unbounded_send(msg); } fn request_justification(&mut self, hash: &B::Hash, number: NumberFor) { - let _ = self.tx.unbounded_send(BlockImportWorkerMsg::RequestJustification(hash.clone(), number)); + let _ = self + .tx + .unbounded_send(BlockImportWorkerMsg::RequestJustification(hash.clone(), number)); } } diff --git a/primitives/consensus/common/src/lib.rs b/primitives/consensus/common/src/lib.rs index 51b2a96e17758..eb524422a6e25 100644 --- a/primitives/consensus/common/src/lib.rs +++ b/primitives/consensus/common/src/lib.rs @@ -23,28 +23,28 @@ // This provides "unused" building blocks to other crates #![allow(dead_code)] - // our error-chain could potentially blow up otherwise -#![recursion_limit="128"] +#![recursion_limit = "128"] -#[macro_use] extern crate log; +#[macro_use] +extern crate log; -use std::sync::Arc; -use std::time::Duration; +use std::{sync::Arc, time::Duration}; +use futures::prelude::*; use sp_runtime::{ - generic::BlockId, traits::{Block as BlockT, DigestFor, NumberFor, HashFor}, + generic::BlockId, + traits::{Block as BlockT, DigestFor, HashFor, NumberFor}, }; -use futures::prelude::*; use sp_state_machine::StorageProof; +pub mod block_import; pub mod block_validation; pub mod error; -pub mod block_import; -mod select_chain; -pub mod import_queue; pub mod evaluation; +pub mod import_queue; mod metrics; +mod select_chain; pub use self::error::Error; pub use block_import::{ @@ -52,10 +52,10 @@ pub use block_import::{ ImportResult, ImportedAux, ImportedState, JustificationImport, JustificationSyncLink, StateAction, StorageChanges, }; -pub use select_chain::SelectChain; -pub use sp_state_machine::Backend as StateBackend; pub use import_queue::DefaultImportQueue; +pub use select_chain::SelectChain; pub use sp_inherents::InherentData; +pub use sp_state_machine::Backend as StateBackend; /// Block status. #[derive(Debug, PartialEq, Eq)] @@ -80,7 +80,9 @@ pub trait Environment { type Proposer: Proposer + Send + 'static; /// A future that resolves to the proposer. type CreateProposer: Future> - + Send + Unpin + 'static; + + Send + + Unpin + + 'static; /// Error which can occur upon creation. type Error: From + std::fmt::Debug + 'static; @@ -96,7 +98,8 @@ pub struct Proposal { /// Proof that was recorded while building the block. pub proof: Proof, /// The storage changes while building this block. - pub storage_changes: sp_state_machine::StorageChanges, NumberFor>, + pub storage_changes: + sp_state_machine::StorageChanges, NumberFor>, } /// Error that is returned when [`ProofRecording`] requested to record a proof, @@ -179,8 +182,7 @@ pub trait Proposer { /// The transaction type used by the backend. type Transaction: Default + Send + 'static; /// Future that resolves to a committed proposal with an optional proof. - type Proposal: - Future, Self::Error>> + type Proposal: Future, Self::Error>> + Send + Unpin + 'static; @@ -233,11 +235,19 @@ pub trait SyncOracle { pub struct NoNetwork; impl SyncOracle for NoNetwork { - fn is_major_syncing(&mut self) -> bool { false } - fn is_offline(&mut self) -> bool { false } + fn is_major_syncing(&mut self) -> bool { + false + } + fn is_offline(&mut self) -> bool { + false + } } -impl SyncOracle for Arc where T: ?Sized, for<'r> &'r T: SyncOracle { +impl SyncOracle for Arc +where + T: ?Sized, + for<'r> &'r T: SyncOracle, +{ fn is_major_syncing(&mut self) -> bool { <&T>::is_major_syncing(&mut &**self) } @@ -277,13 +287,10 @@ impl, Block: BlockT> CanAuthorWith) -> Result<(), String> { match self.0.runtime_version(at) { Ok(version) => self.0.native_version().can_author_with(&version), - Err(e) => { - Err(format!( - "Failed to get runtime version at `{}` and will disable authoring. Error: {}", - at, - e, - )) - } + Err(e) => Err(format!( + "Failed to get runtime version at `{}` and will disable authoring. Error: {}", + at, e, + )), } } } diff --git a/primitives/consensus/common/src/metrics.rs b/primitives/consensus/common/src/metrics.rs index 29d39436cbefc..c56f68625b6a0 100644 --- a/primitives/consensus/common/src/metrics.rs +++ b/primitives/consensus/common/src/metrics.rs @@ -18,12 +18,13 @@ //! Metering tools for consensus use prometheus_endpoint::{ - register, U64, Registry, PrometheusError, Opts, CounterVec, Histogram, HistogramVec, HistogramOpts + register, CounterVec, Histogram, HistogramOpts, HistogramVec, Opts, PrometheusError, Registry, + U64, }; use sp_runtime::traits::{Block as BlockT, NumberFor}; -use crate::import_queue::{BlockImportResult, BlockImportError}; +use crate::import_queue::{BlockImportError, BlockImportResult}; /// Generic Prometheus metrics for common consensus functionality. #[derive(Clone)] @@ -40,36 +41,29 @@ impl Metrics { import_queue_processed: register( CounterVec::new( Opts::new("import_queue_processed_total", "Blocks processed by import queue"), - &["result"] // 'success or failure + &["result"], // 'success or failure )?, registry, )?, block_verification_time: register( HistogramVec::new( - HistogramOpts::new( - "block_verification_time", - "Time taken to verify blocks", - ), + HistogramOpts::new("block_verification_time", "Time taken to verify blocks"), &["result"], )?, registry, )?, block_verification_and_import_time: register( - Histogram::with_opts( - HistogramOpts::new( - "block_verification_and_import_time", - "Time taken to verify and import blocks", - ), - )?, + Histogram::with_opts(HistogramOpts::new( + "block_verification_and_import_time", + "Time taken to verify and import blocks", + ))?, registry, )?, justification_import_time: register( - Histogram::with_opts( - HistogramOpts::new( - "justification_import_time", - "Time taken to import justifications", - ), - )?, + Histogram::with_opts(HistogramOpts::new( + "justification_import_time", + "Time taken to import justifications", + ))?, registry, )?, }) @@ -82,7 +76,7 @@ impl Metrics { let label = match result { Ok(_) => "success", Err(BlockImportError::IncompleteHeader(_)) => "incomplete_header", - Err(BlockImportError::VerificationFailed(_,_)) => "verification_failed", + Err(BlockImportError::VerificationFailed(_, _)) => "verification_failed", Err(BlockImportError::BadBlock(_)) => "bad_block", Err(BlockImportError::MissingState) => "missing_state", Err(BlockImportError::UnknownParent) => "unknown_parent", @@ -90,15 +84,13 @@ impl Metrics { Err(BlockImportError::Other(_)) => "failed", }; - self.import_queue_processed.with_label_values( - &[label] - ).inc(); + self.import_queue_processed.with_label_values(&[label]).inc(); } pub fn report_verification(&self, success: bool, time: std::time::Duration) { - self.block_verification_time.with_label_values( - &[if success { "success" } else { "verification_failed" }] - ).observe(time.as_secs_f64()); + self.block_verification_time + .with_label_values(&[if success { "success" } else { "verification_failed" }]) + .observe(time.as_secs_f64()); } pub fn report_verification_and_import(&self, time: std::time::Duration) { diff --git a/primitives/consensus/common/src/select_chain.rs b/primitives/consensus/common/src/select_chain.rs index e99a6756175d2..5408fc86b7bd4 100644 --- a/primitives/consensus/common/src/select_chain.rs +++ b/primitives/consensus/common/src/select_chain.rs @@ -18,7 +18,6 @@ use crate::error::Error; use sp_runtime::traits::{Block as BlockT, NumberFor}; - /// The SelectChain trait defines the strategy upon which the head is chosen /// if multiple forks are present for an opaque definition of "best" in the /// specific chain build. diff --git a/primitives/consensus/pow/src/lib.rs b/primitives/consensus/pow/src/lib.rs index 12d3440ea9d54..ac8bc589c136f 100644 --- a/primitives/consensus/pow/src/lib.rs +++ b/primitives/consensus/pow/src/lib.rs @@ -19,9 +19,9 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::vec::Vec; -use sp_runtime::ConsensusEngineId; use codec::Decode; +use sp_runtime::ConsensusEngineId; +use sp_std::vec::Vec; /// The `ConsensusEngineId` of PoW. pub const POW_ENGINE_ID: ConsensusEngineId = [b'p', b'o', b'w', b'_']; diff --git a/primitives/consensus/vrf/src/schnorrkel.rs b/primitives/consensus/vrf/src/schnorrkel.rs index 400bdb2f58088..687e0bd231820 100644 --- a/primitives/consensus/vrf/src/schnorrkel.rs +++ b/primitives/consensus/vrf/src/schnorrkel.rs @@ -17,13 +17,19 @@ //! Schnorrkel-based VRF. -use codec::{Encode, Decode, EncodeLike}; -use sp_std::{convert::TryFrom, prelude::*}; -use sp_core::U512; -use sp_std::ops::{Deref, DerefMut}; +use codec::{Decode, Encode, EncodeLike}; use schnorrkel::errors::MultiSignatureStage; +use sp_core::U512; +use sp_std::{ + convert::TryFrom, + ops::{Deref, DerefMut}, + prelude::*, +}; -pub use schnorrkel::{SignatureError, PublicKey, vrf::{VRF_PROOF_LENGTH, VRF_OUTPUT_LENGTH}}; +pub use schnorrkel::{ + vrf::{VRF_OUTPUT_LENGTH, VRF_PROOF_LENGTH}, + PublicKey, SignatureError, +}; /// The length of the Randomness. pub const RANDOMNESS_LENGTH: usize = VRF_OUTPUT_LENGTH; @@ -34,11 +40,15 @@ pub struct VRFOutput(pub schnorrkel::vrf::VRFOutput); impl Deref for VRFOutput { type Target = schnorrkel::vrf::VRFOutput; - fn deref(&self) -> &Self::Target { &self.0 } + fn deref(&self) -> &Self::Target { + &self.0 + } } impl DerefMut for VRFOutput { - fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } } impl Encode for VRFOutput { @@ -47,7 +57,7 @@ impl Encode for VRFOutput { } } -impl EncodeLike for VRFOutput { } +impl EncodeLike for VRFOutput {} impl Decode for VRFOutput { fn decode(i: &mut R) -> Result { @@ -82,11 +92,15 @@ impl Ord for VRFProof { impl Deref for VRFProof { type Target = schnorrkel::vrf::VRFProof; - fn deref(&self) -> &Self::Target { &self.0 } + fn deref(&self) -> &Self::Target { + &self.0 + } } impl DerefMut for VRFProof { - fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } } impl Encode for VRFProof { @@ -95,7 +109,7 @@ impl Encode for VRFProof { } } -impl EncodeLike for VRFProof { } +impl EncodeLike for VRFProof {} impl Decode for VRFProof { fn decode(i: &mut R) -> Result { @@ -113,8 +127,8 @@ impl TryFrom<[u8; VRF_PROOF_LENGTH]> for VRFProof { } fn convert_error(e: SignatureError) -> codec::Error { - use SignatureError::*; use MultiSignatureStage::*; + use SignatureError::*; match e { EquationFalse => "Signature error: `EquationFalse`".into(), PointDecompressionError => "Signature error: `PointDecompressionError`".into(), diff --git a/primitives/core/benches/bench.rs b/primitives/core/benches/bench.rs index 77680d53be6c4..44bcd657ba3f0 100644 --- a/primitives/core/benches/bench.rs +++ b/primitives/core/benches/bench.rs @@ -15,22 +15,21 @@ #[macro_use] extern crate criterion; -use criterion::{Criterion, black_box, Bencher, BenchmarkId}; -use sp_core::crypto::Pair as _; -use sp_core::hashing::{twox_128, blake2_128}; +use criterion::{black_box, Bencher, BenchmarkId, Criterion}; +use sp_core::{ + crypto::Pair as _, + hashing::{blake2_128, twox_128}, +}; const MAX_KEY_SIZE: u32 = 32; fn get_key(key_size: u32) -> Vec { - use rand::SeedableRng; - use rand::Rng; + use rand::{Rng, SeedableRng}; let rnd: [u8; 32] = rand::rngs::StdRng::seed_from_u64(12).gen(); let mut rnd = rnd.iter().cycle(); - (0..key_size) - .map(|_| *rnd.next().unwrap()) - .collect() + (0..key_size).map(|_| *rnd.next().unwrap()).collect() } fn bench_blake2_128(b: &mut Bencher, key: &Vec) { @@ -81,27 +80,21 @@ fn bench_ed25519(c: &mut Criterion) { let mut group = c.benchmark_group("ed25519"); for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::ed25519::Pair::generate().0; - group.bench_function( - BenchmarkId::new("signing", format!("{}", msg_size)), - |b| b.iter(|| key.sign(&msg)), - ); + group.bench_function(BenchmarkId::new("signing", format!("{}", msg_size)), |b| { + b.iter(|| key.sign(&msg)) + }); } for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::ed25519::Pair::generate().0; let sig = key.sign(&msg); let public = key.public(); - group.bench_function( - BenchmarkId::new("verifying", format!("{}", msg_size)), - |b| b.iter(|| sp_core::ed25519::Pair::verify(&sig, &msg, &public)), - ); + group.bench_function(BenchmarkId::new("verifying", format!("{}", msg_size)), |b| { + b.iter(|| sp_core::ed25519::Pair::verify(&sig, &msg, &public)) + }); } group.finish(); @@ -111,27 +104,21 @@ fn bench_sr25519(c: &mut Criterion) { let mut group = c.benchmark_group("sr25519"); for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::sr25519::Pair::generate().0; - group.bench_function( - BenchmarkId::new("signing", format!("{}", msg_size)), - |b| b.iter(|| key.sign(&msg)), - ); + group.bench_function(BenchmarkId::new("signing", format!("{}", msg_size)), |b| { + b.iter(|| key.sign(&msg)) + }); } for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::sr25519::Pair::generate().0; let sig = key.sign(&msg); let public = key.public(); - group.bench_function( - BenchmarkId::new("verifying", format!("{}", msg_size)), - |b| b.iter(|| sp_core::sr25519::Pair::verify(&sig, &msg, &public)), - ); + group.bench_function(BenchmarkId::new("verifying", format!("{}", msg_size)), |b| { + b.iter(|| sp_core::sr25519::Pair::verify(&sig, &msg, &public)) + }); } group.finish(); @@ -141,27 +128,21 @@ fn bench_ecdsa(c: &mut Criterion) { let mut group = c.benchmark_group("ecdsa"); for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::ecdsa::Pair::generate().0; - group.bench_function( - BenchmarkId::new("signing", format!("{}", msg_size)), - |b| b.iter(|| key.sign(&msg)), - ); + group.bench_function(BenchmarkId::new("signing", format!("{}", msg_size)), |b| { + b.iter(|| key.sign(&msg)) + }); } for msg_size in vec![32, 1024, 1024 * 1024] { - let msg = (0..msg_size) - .map(|_| rand::random::()) - .collect::>(); + let msg = (0..msg_size).map(|_| rand::random::()).collect::>(); let key = sp_core::ecdsa::Pair::generate().0; let sig = key.sign(&msg); let public = key.public(); - group.bench_function( - BenchmarkId::new("verifying", format!("{}", msg_size)), - |b| b.iter(|| sp_core::ecdsa::Pair::verify(&sig, &msg, &public)), - ); + group.bench_function(BenchmarkId::new("verifying", format!("{}", msg_size)), |b| { + b.iter(|| sp_core::ecdsa::Pair::verify(&sig, &msg, &public)) + }); } group.finish(); diff --git a/primitives/core/src/changes_trie.rs b/primitives/core/src/changes_trie.rs index abf5f79527609..f4ce83dc2c877 100644 --- a/primitives/core/src/changes_trie.rs +++ b/primitives/core/src/changes_trie.rs @@ -17,13 +17,16 @@ //! Substrate changes trie configuration. -#[cfg(any(feature = "std", test))] -use serde::{Serialize, Deserialize}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use num_traits::Zero; +#[cfg(any(feature = "std", test))] +use serde::{Deserialize, Serialize}; /// Substrate changes trie configuration. -#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf))] +#[cfg_attr( + any(feature = "std", test), + derive(Serialize, Deserialize, parity_util_mem::MallocSizeOf) +)] #[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode, scale_info::TypeInfo)] pub struct ChangesTrieConfiguration { /// Interval (in blocks) at which level1-digests are created. Digests are not @@ -62,32 +65,31 @@ impl ChangesTrieConfiguration { } /// Do we need to build digest at given block? - pub fn is_digest_build_required_at_block( - &self, - zero: Number, - block: Number, - ) -> bool - where - Number: From + PartialEq + - ::sp_std::ops::Rem + ::sp_std::ops::Sub + - ::sp_std::cmp::PartialOrd + Zero, + pub fn is_digest_build_required_at_block(&self, zero: Number, block: Number) -> bool + where + Number: From + + PartialEq + + ::sp_std::ops::Rem + + ::sp_std::ops::Sub + + ::sp_std::cmp::PartialOrd + + Zero, { - block > zero - && self.is_digest_build_enabled() - && ((block - zero) % self.digest_interval.into()).is_zero() + block > zero && + self.is_digest_build_enabled() && + ((block - zero) % self.digest_interval.into()).is_zero() } /// Returns max digest interval. One if digests are not created at all. pub fn max_digest_interval(&self) -> u32 { if !self.is_digest_build_enabled() { - return 1; + return 1 } // we'll get >1 loop iteration only when bad configuration parameters are selected let mut current_level = self.digest_levels; loop { if let Some(max_digest_interval) = self.digest_interval.checked_pow(current_level) { - return max_digest_interval; + return max_digest_interval } current_level -= 1; @@ -97,25 +99,28 @@ impl ChangesTrieConfiguration { /// Returns max level digest block number that has been created at block <= passed block number. /// /// Returns None if digests are not created at all. - pub fn prev_max_level_digest_block( - &self, - zero: Number, - block: Number, - ) -> Option - where - Number: Clone + From + PartialOrd + PartialEq + - ::sp_std::ops::Add + ::sp_std::ops::Sub + - ::sp_std::ops::Div + ::sp_std::ops::Mul + Zero, + pub fn prev_max_level_digest_block(&self, zero: Number, block: Number) -> Option + where + Number: Clone + + From + + PartialOrd + + PartialEq + + ::sp_std::ops::Add + + ::sp_std::ops::Sub + + ::sp_std::ops::Div + + ::sp_std::ops::Mul + + Zero, { if block <= zero { - return None; + return None } - let (next_begin, next_end) = self.next_max_level_digest_range(zero.clone(), block.clone())?; + let (next_begin, next_end) = + self.next_max_level_digest_range(zero.clone(), block.clone())?; // if 'next' digest includes our block, then it is a also a previous digest if next_end == block { - return Some(block); + return Some(block) } // if previous digest ends at zero block, then there are no previous digest @@ -136,13 +141,18 @@ impl ChangesTrieConfiguration { zero: Number, mut block: Number, ) -> Option<(Number, Number)> - where - Number: Clone + From + PartialOrd + PartialEq + - ::sp_std::ops::Add + ::sp_std::ops::Sub + - ::sp_std::ops::Div + ::sp_std::ops::Mul, + where + Number: Clone + + From + + PartialOrd + + PartialEq + + ::sp_std::ops::Add + + ::sp_std::ops::Sub + + ::sp_std::ops::Div + + ::sp_std::ops::Mul, { if !self.is_digest_build_enabled() { - return None; + return None } if block <= zero { @@ -152,7 +162,7 @@ impl ChangesTrieConfiguration { let max_digest_interval: Number = self.max_digest_interval().into(); let max_digests_since_zero = (block.clone() - zero.clone()) / max_digest_interval.clone(); if max_digests_since_zero == 0.into() { - return Some((zero.clone() + 1.into(), zero + max_digest_interval)); + return Some((zero.clone() + 1.into(), zero + max_digest_interval)) } let last_max_digest_block = zero + max_digests_since_zero * max_digest_interval.clone(); Some(if block == last_max_digest_block { @@ -169,14 +179,22 @@ impl ChangesTrieConfiguration { /// digest interval (in blocks) /// step between blocks we're interested in when digest is built /// ) - pub fn digest_level_at_block(&self, zero: Number, block: Number) -> Option<(u32, u32, u32)> - where - Number: Clone + From + PartialEq + - ::sp_std::ops::Rem + ::sp_std::ops::Sub + - ::sp_std::cmp::PartialOrd + Zero, + pub fn digest_level_at_block( + &self, + zero: Number, + block: Number, + ) -> Option<(u32, u32, u32)> + where + Number: Clone + + From + + PartialEq + + ::sp_std::ops::Rem + + ::sp_std::ops::Sub + + ::sp_std::cmp::PartialOrd + + Zero, { if !self.is_digest_build_required_at_block(zero.clone(), block.clone()) { - return None; + return None } let relative_block = block - zero; @@ -185,8 +203,9 @@ impl ChangesTrieConfiguration { let mut digest_step = 1u32; while current_level < self.digest_levels { let new_digest_interval = match digest_interval.checked_mul(self.digest_interval) { - Some(new_digest_interval) if (relative_block.clone() % new_digest_interval.into()).is_zero() - => new_digest_interval, + Some(new_digest_interval) + if (relative_block.clone() % new_digest_interval.into()).is_zero() => + new_digest_interval, _ => break, }; @@ -195,11 +214,7 @@ impl ChangesTrieConfiguration { current_level += 1; } - Some(( - current_level, - digest_interval, - digest_step, - )) + Some((current_level, digest_interval, digest_step)) } } @@ -208,10 +223,7 @@ mod tests { use super::ChangesTrieConfiguration; fn config(interval: u32, levels: u32) -> ChangesTrieConfiguration { - ChangesTrieConfiguration { - digest_interval: interval, - digest_levels: levels, - } + ChangesTrieConfiguration { digest_interval: interval, digest_levels: levels } } #[test] @@ -255,7 +267,10 @@ mod tests { assert_eq!(config(8, 4).digest_level_at_block(zero, zero + 8u64), Some((1, 8, 1))); assert_eq!(config(8, 4).digest_level_at_block(zero, zero + 64u64), Some((2, 64, 8))); assert_eq!(config(8, 4).digest_level_at_block(zero, zero + 512u64), Some((3, 512, 64))); - assert_eq!(config(8, 4).digest_level_at_block(zero, zero + 4096u64), Some((4, 4096, 512))); + assert_eq!( + config(8, 4).digest_level_at_block(zero, zero + 4096u64), + Some((4, 4096, 512)) + ); assert_eq!(config(8, 4).digest_level_at_block(zero, zero + 4112u64), Some((1, 8, 1))); } diff --git a/primitives/core/src/crypto.rs b/primitives/core/src/crypto.rs index 44d6a741b1a3e..31b21de32bd2b 100644 --- a/primitives/core/src/crypto.rs +++ b/primitives/core/src/crypto.rs @@ -19,38 +19,36 @@ //! Cryptographic utilities. // end::description[] -use crate::{sr25519, ed25519}; -use sp_std::hash::Hash; -use sp_std::vec::Vec; -use sp_std::str; #[cfg(feature = "std")] -use sp_std::convert::TryInto; -use sp_std::convert::TryFrom; +use crate::hexdisplay::HexDisplay; +use crate::{ed25519, sr25519}; +#[cfg(feature = "std")] +use base58::{FromBase58, ToBase58}; +use codec::{Decode, Encode, MaxEncodedLen}; #[cfg(feature = "std")] use parking_lot::Mutex; #[cfg(feature = "std")] -use rand::{RngCore, rngs::OsRng}; -use codec::{Encode, Decode, MaxEncodedLen}; -use scale_info::TypeInfo; +use rand::{rngs::OsRng, RngCore}; #[cfg(feature = "std")] use regex::Regex; +use scale_info::TypeInfo; +/// Trait for accessing reference to `SecretString`. +pub use secrecy::ExposeSecret; +/// A store for sensitive data. #[cfg(feature = "std")] -use base58::{FromBase58, ToBase58}; +pub use secrecy::SecretString; +use sp_runtime_interface::pass_by::PassByInner; #[cfg(feature = "std")] -use crate::hexdisplay::HexDisplay; +use sp_std::convert::TryInto; #[doc(hidden)] pub use sp_std::ops::Deref; -use sp_runtime_interface::pass_by::PassByInner; +use sp_std::{convert::TryFrom, hash::Hash, str, vec::Vec}; /// Trait to zeroize a memory buffer. pub use zeroize::Zeroize; -/// Trait for accessing reference to `SecretString`. -pub use secrecy::ExposeSecret; -/// A store for sensitive data. -#[cfg(feature = "std")] -pub use secrecy::SecretString; /// The root phrase for our publicly known keys. -pub const DEV_PHRASE: &str = "bottom drive obey lake curtain smoke basket hold race lonely fit walk"; +pub const DEV_PHRASE: &str = + "bottom drive obey lake curtain smoke basket hold race lonely fit walk"; /// The address of the associated root phrase for our publicly known keys. pub const DEV_ADDRESS: &str = "5DfhGyQdFobKM8NsWvEeAKk5EQQgYe9AydgJ7rMB6E1EqRzV"; @@ -119,22 +117,28 @@ pub enum DeriveJunction { #[cfg(feature = "full_crypto")] impl DeriveJunction { /// Consume self to return a soft derive junction with the same chain code. - pub fn soften(self) -> Self { DeriveJunction::Soft(self.unwrap_inner()) } + pub fn soften(self) -> Self { + DeriveJunction::Soft(self.unwrap_inner()) + } /// Consume self to return a hard derive junction with the same chain code. - pub fn harden(self) -> Self { DeriveJunction::Hard(self.unwrap_inner()) } + pub fn harden(self) -> Self { + DeriveJunction::Hard(self.unwrap_inner()) + } /// Create a new soft (vanilla) DeriveJunction from a given, encodable, value. /// /// If you need a hard junction, use `hard()`. pub fn soft(index: T) -> Self { let mut cc: [u8; JUNCTION_ID_LEN] = Default::default(); - index.using_encoded(|data| if data.len() > JUNCTION_ID_LEN { - let hash_result = blake2_rfc::blake2b::blake2b(JUNCTION_ID_LEN, &[], data); - let hash = hash_result.as_bytes(); - cc.copy_from_slice(hash); - } else { - cc[0..data.len()].copy_from_slice(data); + index.using_encoded(|data| { + if data.len() > JUNCTION_ID_LEN { + let hash_result = blake2_rfc::blake2b::blake2b(JUNCTION_ID_LEN, &[], data); + let hash = hash_result.as_bytes(); + cc.copy_from_slice(hash); + } else { + cc[0..data.len()].copy_from_slice(data); + } }); DeriveJunction::Soft(cc) } @@ -175,11 +179,8 @@ impl DeriveJunction { impl> From for DeriveJunction { fn from(j: T) -> DeriveJunction { let j = j.as_ref(); - let (code, hard) = if let Some(stripped) = j.strip_prefix('/') { - (stripped, true) - } else { - (j, false) - }; + let (code, hard) = + if let Some(stripped) = j.strip_prefix('/') { (stripped, true) } else { (j, false) }; let res = if let Ok(n) = str::parse::(code) { // number @@ -232,12 +233,11 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { /// Some if the string is a properly encoded SS58Check address. #[cfg(feature = "std")] fn from_ss58check(s: &str) -> Result { - Self::from_ss58check_with_version(s) - .and_then(|(r, v)| match v { - v if !v.is_custom() => Ok(r), - v if v == *DEFAULT_VERSION.lock() => Ok(r), - _ => Err(PublicError::UnknownVersion), - }) + Self::from_ss58check_with_version(s).and_then(|(r, v)| match v { + v if !v.is_custom() => Ok(r), + v if v == *DEFAULT_VERSION.lock() => Ok(r), + _ => Err(PublicError::UnknownVersion), + }) } /// Some if the string is a properly encoded SS58Check address. @@ -250,7 +250,9 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { let body_len = res.as_mut().len(); let data = s.from_base58().map_err(|_| PublicError::BadBase58)?; - if data.len() < 2 { return Err(PublicError::BadLength); } + if data.len() < 2 { + return Err(PublicError::BadLength) + } let (prefix_len, ident) = match data[0] { 0..=63 => (1, data[0] as u16), 64..=127 => { @@ -262,18 +264,22 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { let lower = (data[0] << 2) | (data[1] >> 6); let upper = data[1] & 0b00111111; (2, (lower as u16) | ((upper as u16) << 8)) - } + }, _ => return Err(PublicError::UnknownVersion), }; - if data.len() != prefix_len + body_len + CHECKSUM_LEN { return Err(PublicError::BadLength) } + if data.len() != prefix_len + body_len + CHECKSUM_LEN { + return Err(PublicError::BadLength) + } let format = ident.try_into().map_err(|_: ()| PublicError::UnknownVersion)?; - if !Self::format_is_allowed(format) { return Err(PublicError::FormatNotAllowed) } + if !Self::format_is_allowed(format) { + return Err(PublicError::FormatNotAllowed) + } let hash = ss58hash(&data[0..body_len + prefix_len]); let checksum = &hash.as_bytes()[0..CHECKSUM_LEN]; if data[body_len + prefix_len..body_len + prefix_len + CHECKSUM_LEN] != *checksum { // Invalid checksum. - return Err(PublicError::InvalidChecksum); + return Err(PublicError::InvalidChecksum) } res.as_mut().copy_from_slice(&data[prefix_len..body_len + prefix_len]); Ok((res, format)) @@ -283,12 +289,11 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { /// a derivation path following. #[cfg(feature = "std")] fn from_string(s: &str) -> Result { - Self::from_string_with_version(s) - .and_then(|(r, v)| match v { - v if !v.is_custom() => Ok(r), - v if v == *DEFAULT_VERSION.lock() => Ok(r), - _ => Err(PublicError::UnknownVersion), - }) + Self::from_string_with_version(s).and_then(|(r, v)| match v { + v if !v.is_custom() => Ok(r), + v if v == *DEFAULT_VERSION.lock() => Ok(r), + _ => Err(PublicError::UnknownVersion), + }) } /// Return the ss58-check string for this key. @@ -305,7 +310,7 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { // lower bits of the upper byte in the low pos let second = ((ident >> 8) as u8) | ((ident & 0b0000_0000_0000_0011) as u8) << 6; vec![first | 0b01000000, second] - } + }, _ => unreachable!("masked out the upper two bits; qed"), }; v.extend(self.as_ref()); @@ -316,7 +321,9 @@ pub trait Ss58Codec: Sized + AsMut<[u8]> + AsRef<[u8]> + Default { /// Return the ss58-check string for this key. #[cfg(feature = "std")] - fn to_ss58check(&self) -> String { self.to_ss58check_with_version(*DEFAULT_VERSION.lock()) } + fn to_ss58check(&self) -> String { + self.to_ss58check_with_version(*DEFAULT_VERSION.lock()) + } /// Some if the string is a properly encoded SS58Check address, optionally with /// a derivation path following. @@ -332,7 +339,7 @@ pub trait Derive: Sized { /// /// Will be `None` for public keys if there are any hard junctions in there. #[cfg(feature = "std")] - fn derive>(&self, _path: Iter) -> Option { + fn derive>(&self, _path: Iter) -> Option { None } } @@ -630,9 +637,7 @@ lazy_static::lazy_static! { impl + AsRef<[u8]> + Default + Derive> Ss58Codec for T { fn from_string(s: &str) -> Result { let cap = SS58_REGEX.captures(s).ok_or(PublicError::InvalidFormat)?; - let s = cap.name("ss58") - .map(|r| r.as_str()) - .unwrap_or(DEV_ADDRESS); + let s = cap.name("ss58").map(|r| r.as_str()).unwrap_or(DEV_ADDRESS); let addr = if let Some(stripped) = s.strip_prefix("0x") { let d = hex::decode(stripped).map_err(|_| PublicError::InvalidFormat)?; let mut r = Self::default(); @@ -648,28 +653,23 @@ impl + AsRef<[u8]> + Default + Derive> Ss58Codec for T { if cap["path"].is_empty() { Ok(addr) } else { - let path = JUNCTION_REGEX.captures_iter(&cap["path"]) - .map(|f| DeriveJunction::from(&f[1])); - addr.derive(path) - .ok_or(PublicError::InvalidPath) + let path = + JUNCTION_REGEX.captures_iter(&cap["path"]).map(|f| DeriveJunction::from(&f[1])); + addr.derive(path).ok_or(PublicError::InvalidPath) } } fn from_string_with_version(s: &str) -> Result<(Self, Ss58AddressFormat), PublicError> { let cap = SS58_REGEX.captures(s).ok_or(PublicError::InvalidFormat)?; let (addr, v) = Self::from_ss58check_with_version( - cap.name("ss58") - .map(|r| r.as_str()) - .unwrap_or(DEV_ADDRESS) + cap.name("ss58").map(|r| r.as_str()).unwrap_or(DEV_ADDRESS), )?; if cap["path"].is_empty() { Ok((addr, v)) } else { - let path = JUNCTION_REGEX.captures_iter(&cap["path"]) - .map(|f| DeriveJunction::from(&f[1])); - addr.derive(path) - .ok_or(PublicError::InvalidPath) - .map(|a| (a, v)) + let path = + JUNCTION_REGEX.captures_iter(&cap["path"]).map(|f| DeriveJunction::from(&f[1])); + addr.derive(path).ok_or(PublicError::InvalidPath).map(|a| (a, v)) } } } @@ -695,16 +695,22 @@ pub trait Public: fn from_slice(data: &[u8]) -> Self; /// Return a `Vec` filled with raw data. - fn to_raw_vec(&self) -> Vec { self.as_slice().to_vec() } + fn to_raw_vec(&self) -> Vec { + self.as_slice().to_vec() + } /// Return a slice filled with raw data. - fn as_slice(&self) -> &[u8] { self.as_ref() } + fn as_slice(&self) -> &[u8] { + self.as_ref() + } /// Return `CryptoTypePublicPair` from public key. fn to_public_crypto_pair(&self) -> CryptoTypePublicPair; } /// An opaque 32-byte cryptographic identifier. -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen, TypeInfo)] +#[derive( + Clone, Eq, PartialEq, Ord, PartialOrd, Default, Encode, Decode, MaxEncodedLen, TypeInfo, +)] #[cfg_attr(feature = "std", derive(Hash))] pub struct AccountId32([u8; 32]); @@ -810,14 +816,20 @@ impl sp_std::fmt::Debug for AccountId32 { #[cfg(feature = "std")] impl serde::Serialize for AccountId32 { - fn serialize(&self, serializer: S) -> Result where S: serde::Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { serializer.serialize_str(&self.to_ss58check()) } } #[cfg(feature = "std")] impl<'de> serde::Deserialize<'de> for AccountId32 { - fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { Ss58Codec::from_ss58check(&String::deserialize(deserializer)?) .map_err(|e| serde::de::Error::custom(format!("{:?}", e))) } @@ -852,11 +864,13 @@ mod dummy { pub struct Dummy; impl AsRef<[u8]> for Dummy { - fn as_ref(&self) -> &[u8] { &b""[..] } + fn as_ref(&self) -> &[u8] { + &b""[..] + } } impl AsMut<[u8]> for Dummy { - fn as_mut(&mut self) -> &mut[u8] { + fn as_mut(&mut self) -> &mut [u8] { unsafe { #[allow(mutable_transmutes)] sp_std::mem::transmute::<_, &'static mut [u8]>(&b""[..]) @@ -879,14 +893,18 @@ mod dummy { impl Derive for Dummy {} impl Public for Dummy { - fn from_slice(_: &[u8]) -> Self { Self } + fn from_slice(_: &[u8]) -> Self { + Self + } #[cfg(feature = "std")] - fn to_raw_vec(&self) -> Vec { vec![] } - fn as_slice(&self) -> &[u8] { b"" } + fn to_raw_vec(&self) -> Vec { + vec![] + } + fn as_slice(&self) -> &[u8] { + b"" + } fn to_public_crypto_pair(&self) -> CryptoTypePublicPair { - CryptoTypePublicPair( - CryptoTypeId(*b"dumm"), Public::to_raw_vec(self) - ) + CryptoTypePublicPair(CryptoTypeId(*b"dumm"), Public::to_raw_vec(self)) } } @@ -896,23 +914,41 @@ mod dummy { type Signature = Dummy; type DeriveError = (); #[cfg(feature = "std")] - fn generate_with_phrase(_: Option<&str>) -> (Self, String, Self::Seed) { Default::default() } + fn generate_with_phrase(_: Option<&str>) -> (Self, String, Self::Seed) { + Default::default() + } #[cfg(feature = "std")] - fn from_phrase(_: &str, _: Option<&str>) - -> Result<(Self, Self::Seed), SecretStringError> - { + fn from_phrase(_: &str, _: Option<&str>) -> Result<(Self, Self::Seed), SecretStringError> { Ok(Default::default()) } - fn derive< - Iter: Iterator, - >(&self, _: Iter, _: Option) -> Result<(Self, Option), Self::DeriveError> { Ok((Self, None)) } - fn from_seed(_: &Self::Seed) -> Self { Self } - fn from_seed_slice(_: &[u8]) -> Result { Ok(Self) } - fn sign(&self, _: &[u8]) -> Self::Signature { Self } - fn verify>(_: &Self::Signature, _: M, _: &Self::Public) -> bool { true } - fn verify_weak, M: AsRef<[u8]>>(_: &[u8], _: M, _: P) -> bool { true } - fn public(&self) -> Self::Public { Self } - fn to_raw_vec(&self) -> Vec { vec![] } + fn derive>( + &self, + _: Iter, + _: Option, + ) -> Result<(Self, Option), Self::DeriveError> { + Ok((Self, None)) + } + fn from_seed(_: &Self::Seed) -> Self { + Self + } + fn from_seed_slice(_: &[u8]) -> Result { + Ok(Self) + } + fn sign(&self, _: &[u8]) -> Self::Signature { + Self + } + fn verify>(_: &Self::Signature, _: M, _: &Self::Public) -> bool { + true + } + fn verify_weak, M: AsRef<[u8]>>(_: &[u8], _: M, _: P) -> bool { + true + } + fn public(&self) -> Self::Public { + Self + } + fn to_raw_vec(&self) -> Vec { + vec![] + } } } @@ -957,10 +993,14 @@ pub trait Pair: CryptoType + Sized + Clone + Send + Sync + 'static { /// Returns the KeyPair from the English BIP39 seed `phrase`, or `None` if it's invalid. #[cfg(feature = "std")] - fn from_phrase(phrase: &str, password: Option<&str>) -> Result<(Self, Self::Seed), SecretStringError>; + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Self, Self::Seed), SecretStringError>; /// Derive a child key from a series of given junctions. - fn derive>(&self, + fn derive>( + &self, path: Iter, seed: Option, ) -> Result<(Self, Option), Self::DeriveError>; @@ -1019,19 +1059,20 @@ pub trait Pair: CryptoType + Sized + Clone + Send + Sync + 'static { /// /// `None` is returned if no matches are found. #[cfg(feature = "std")] - fn from_string_with_seed(s: &str, password_override: Option<&str>) - -> Result<(Self, Option), SecretStringError> - { + fn from_string_with_seed( + s: &str, + password_override: Option<&str>, + ) -> Result<(Self, Option), SecretStringError> { let cap = SECRET_PHRASE_REGEX.captures(s).ok_or(SecretStringError::InvalidFormat)?; - let path = JUNCTION_REGEX.captures_iter(&cap["path"]) - .map(|f| DeriveJunction::from(&f[1])); + let path = JUNCTION_REGEX.captures_iter(&cap["path"]).map(|f| DeriveJunction::from(&f[1])); let phrase = cap.name("phrase").map(|r| r.as_str()).unwrap_or(DEV_PHRASE); let password = password_override.or_else(|| cap.name("password").map(|m| m.as_str())); let (root, seed) = if let Some(stripped) = phrase.strip_prefix("0x") { - hex::decode(stripped).ok() + hex::decode(stripped) + .ok() .and_then(|seed_vec| { let mut seed = Self::Seed::default(); if seed.as_ref().len() == seed_vec.len() { @@ -1043,8 +1084,7 @@ pub trait Pair: CryptoType + Sized + Clone + Send + Sync + 'static { }) .ok_or(SecretStringError::InvalidSeed)? } else { - Self::from_phrase(phrase, password) - .map_err(|_| SecretStringError::InvalidPhrase)? + Self::from_phrase(phrase, password).map_err(|_| SecretStringError::InvalidPhrase)? }; root.derive(path, Some(seed)).map_err(|_| SecretStringError::InvalidPath) } @@ -1075,19 +1115,25 @@ pub trait Wraps: Sized { type Inner: IsWrappedBy; } -impl IsWrappedBy for T where +impl IsWrappedBy for T +where Outer: AsRef + AsMut + From, T: From, { /// Get a reference to the inner from the outer. - fn from_ref(outer: &Outer) -> &Self { outer.as_ref() } + fn from_ref(outer: &Outer) -> &Self { + outer.as_ref() + } /// Get a mutable reference to the inner from the outer. - fn from_mut(outer: &mut Outer) -> &mut Self { outer.as_mut() } + fn from_mut(outer: &mut Outer) -> &mut Self { + outer.as_mut() + } } -impl UncheckedFrom for Outer where - Outer: Wraps, +impl UncheckedFrom for Outer +where + Outer: Wraps, Inner: IsWrappedBy + UncheckedFrom, { fn unchecked_from(t: T) -> Self { @@ -1111,8 +1157,19 @@ pub trait CryptoType { /// Values whose first character is `_` are reserved for private use and won't conflict with any /// public modules. #[derive( - Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Encode, Decode, PassByInner, - crate::RuntimeDebug, TypeInfo + Copy, + Clone, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Encode, + Decode, + PassByInner, + crate::RuntimeDebug, + TypeInfo, )] #[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize))] pub struct KeyTypeId(pub [u8; 4]); @@ -1135,7 +1192,7 @@ impl<'a> TryFrom<&'a str> for KeyTypeId { fn try_from(x: &'a str) -> Result { let b = x.as_bytes(); if b.len() != 4 { - return Err(()); + return Err(()) } let mut res = KeyTypeId::default(); res.0.copy_from_slice(&b[0..4]); @@ -1160,7 +1217,7 @@ impl sp_std::fmt::Display for CryptoTypePublicPair { Ok(id) => id.to_string(), Err(_) => { format!("{:#?}", self.0) - } + }, }; write!(f, "{}-{}", id, HexDisplay::from(&self.1)) } @@ -1196,16 +1253,16 @@ pub mod key_types { #[cfg(test)] mod tests { + use super::*; use crate::DeriveJunction; use hex_literal::hex; - use super::*; #[derive(Clone, Eq, PartialEq, Debug)] enum TestPair { Generated, GeneratedWithPhrase, - GeneratedFromPhrase{phrase: String, password: Option}, - Standard{phrase: String, password: Option, path: Vec}, + GeneratedFromPhrase { phrase: String, password: Option }, + Standard { phrase: String, password: Option, path: Vec }, Seed(Vec), } impl Default for TestPair { @@ -1251,9 +1308,7 @@ mod tests { vec![] } fn to_public_crypto_pair(&self) -> CryptoTypePublicPair { - CryptoTypePublicPair( - CryptoTypeId(*b"dumm"), self.to_raw_vec(), - ) + CryptoTypePublicPair(CryptoTypeId(*b"dumm"), self.to_raw_vec()) } } impl Pair for TestPair { @@ -1262,41 +1317,68 @@ mod tests { type Signature = [u8; 0]; type DeriveError = (); - fn generate() -> (Self, ::Seed) { (TestPair::Generated, [0u8; 8]) } + fn generate() -> (Self, ::Seed) { + (TestPair::Generated, [0u8; 8]) + } fn generate_with_phrase(_password: Option<&str>) -> (Self, String, ::Seed) { (TestPair::GeneratedWithPhrase, "".into(), [0u8; 8]) } - fn from_phrase(phrase: &str, password: Option<&str>) - -> Result<(Self, ::Seed), SecretStringError> - { - Ok((TestPair::GeneratedFromPhrase { - phrase: phrase.to_owned(), - password: password.map(Into::into) - }, [0u8; 8])) + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Self, ::Seed), SecretStringError> { + Ok(( + TestPair::GeneratedFromPhrase { + phrase: phrase.to_owned(), + password: password.map(Into::into), + }, + [0u8; 8], + )) + } + fn derive>( + &self, + path_iter: Iter, + _: Option<[u8; 8]>, + ) -> Result<(Self, Option<[u8; 8]>), Self::DeriveError> { + Ok(( + match self.clone() { + TestPair::Standard { phrase, password, path } => TestPair::Standard { + phrase, + password, + path: path.into_iter().chain(path_iter).collect(), + }, + TestPair::GeneratedFromPhrase { phrase, password } => + TestPair::Standard { phrase, password, path: path_iter.collect() }, + x => + if path_iter.count() == 0 { + x + } else { + return Err(()) + }, + }, + None, + )) } - fn derive>(&self, path_iter: Iter, _: Option<[u8; 8]>) - -> Result<(Self, Option<[u8; 8]>), Self::DeriveError> - { - Ok((match self.clone() { - TestPair::Standard {phrase, password, path} => - TestPair::Standard { phrase, password, path: path.into_iter().chain(path_iter).collect() }, - TestPair::GeneratedFromPhrase {phrase, password} => - TestPair::Standard { phrase, password, path: path_iter.collect() }, - x => if path_iter.count() == 0 { x } else { return Err(()) }, - }, None)) + fn from_seed(_seed: &::Seed) -> Self { + TestPair::Seed(_seed.as_ref().to_owned()) + } + fn sign(&self, _message: &[u8]) -> Self::Signature { + [] + } + fn verify>(_: &Self::Signature, _: M, _: &Self::Public) -> bool { + true } - fn from_seed(_seed: &::Seed) -> Self { TestPair::Seed(_seed.as_ref().to_owned()) } - fn sign(&self, _message: &[u8]) -> Self::Signature { [] } - fn verify>(_: &Self::Signature, _: M, _: &Self::Public) -> bool { true } fn verify_weak, M: AsRef<[u8]>>( _sig: &[u8], _message: M, - _pubkey: P - ) -> bool { true } - fn public(&self) -> Self::Public { TestPublic } - fn from_seed_slice(seed: &[u8]) - -> Result - { + _pubkey: P, + ) -> bool { + true + } + fn public(&self) -> Self::Public { + TestPublic + } + fn from_seed_slice(seed: &[u8]) -> Result { Ok(TestPair::Seed(seed.to_owned())) } fn to_raw_vec(&self) -> Vec { @@ -1328,43 +1410,83 @@ mod tests { fn interpret_std_secret_string_should_work() { assert_eq!( TestPair::from_string("hello world", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![] + }) ); assert_eq!( TestPair::from_string("hello world/1", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::soft(1)]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::soft(1)] + }) ); assert_eq!( TestPair::from_string("hello world/DOT", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::soft("DOT")]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::soft("DOT")] + }) ); assert_eq!( TestPair::from_string("hello world//1", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::hard(1)]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::hard(1)] + }) ); assert_eq!( TestPair::from_string("hello world//DOT", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::hard("DOT")]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::hard("DOT")] + }) ); assert_eq!( TestPair::from_string("hello world//1/DOT", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::hard(1), DeriveJunction::soft("DOT")]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::hard(1), DeriveJunction::soft("DOT")] + }) ); assert_eq!( TestPair::from_string("hello world//DOT/1", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: None, path: vec![DeriveJunction::hard("DOT"), DeriveJunction::soft(1)]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: None, + path: vec![DeriveJunction::hard("DOT"), DeriveJunction::soft(1)] + }) ); assert_eq!( TestPair::from_string("hello world///password", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: Some("password".to_owned()), path: vec![]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: Some("password".to_owned()), + path: vec![] + }) ); assert_eq!( TestPair::from_string("hello world//1/DOT///password", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: Some("password".to_owned()), path: vec![DeriveJunction::hard(1), DeriveJunction::soft("DOT")]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: Some("password".to_owned()), + path: vec![DeriveJunction::hard(1), DeriveJunction::soft("DOT")] + }) ); assert_eq!( TestPair::from_string("hello world/1//DOT///password", None), - Ok(TestPair::Standard{phrase: "hello world".to_owned(), password: Some("password".to_owned()), path: vec![DeriveJunction::soft(1), DeriveJunction::hard("DOT")]}) + Ok(TestPair::Standard { + phrase: "hello world".to_owned(), + password: Some("password".to_owned()), + path: vec![DeriveJunction::soft(1), DeriveJunction::hard("DOT")] + }) ); } @@ -1372,25 +1494,40 @@ mod tests { fn accountid_32_from_str_works() { use std::str::FromStr; assert!(AccountId32::from_str("5G9VdMwXvzza9pS8qE8ZHJk3CheHW9uucBn9ngW4C1gmmzpv").is_ok()); - assert!(AccountId32::from_str("5c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d").is_ok()); - assert!(AccountId32::from_str("0x5c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d").is_ok()); + assert!(AccountId32::from_str( + "5c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d" + ) + .is_ok()); + assert!(AccountId32::from_str( + "0x5c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d" + ) + .is_ok()); assert_eq!( AccountId32::from_str("99G9VdMwXvzza9pS8qE8ZHJk3CheHW9uucBn9ngW4C1gmmzpv").unwrap_err(), "invalid ss58 address.", ); assert_eq!( - AccountId32::from_str("gc55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d").unwrap_err(), + AccountId32::from_str( + "gc55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d" + ) + .unwrap_err(), "invalid hex address.", ); assert_eq!( - AccountId32::from_str("0xgc55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d").unwrap_err(), + AccountId32::from_str( + "0xgc55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d" + ) + .unwrap_err(), "invalid hex address.", ); // valid hex but invalid length will be treated as ss58. assert_eq!( - AccountId32::from_str("55c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d").unwrap_err(), + AccountId32::from_str( + "55c55177d67b064bb5d189a3e1ddad9bc6646e02e64d6e308f5acbb1533ac430d" + ) + .unwrap_err(), "invalid ss58 address.", ); } diff --git a/primitives/core/src/ecdsa.rs b/primitives/core/src/ecdsa.rs index 05908e36d37db..4ade3da27ea20 100644 --- a/primitives/core/src/ecdsa.rs +++ b/primitives/core/src/ecdsa.rs @@ -22,26 +22,31 @@ #[cfg(feature = "full_crypto")] use sp_std::vec::Vec; -use sp_std::cmp::Ordering; -use codec::{Encode, Decode, MaxEncodedLen}; +use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; +use sp_std::cmp::Ordering; -#[cfg(feature = "full_crypto")] -use core::convert::{TryFrom, TryInto}; -#[cfg(feature = "std")] -use substrate_bip39::seed_from_entropy; -#[cfg(feature = "std")] -use bip39::{Mnemonic, Language, MnemonicType}; -#[cfg(feature = "full_crypto")] -use crate::{hashing::blake2_256, crypto::{Pair as TraitPair, DeriveJunction, SecretStringError}}; #[cfg(feature = "std")] use crate::crypto::Ss58Codec; +use crate::crypto::{ + CryptoType, CryptoTypeId, CryptoTypePublicPair, Derive, Public as TraitPublic, UncheckedFrom, +}; +#[cfg(feature = "full_crypto")] +use crate::{ + crypto::{DeriveJunction, Pair as TraitPair, SecretStringError}, + hashing::blake2_256, +}; #[cfg(feature = "std")] -use serde::{de, Serializer, Serialize, Deserializer, Deserialize}; -use crate::crypto::{Public as TraitPublic, CryptoTypePublicPair, UncheckedFrom, CryptoType, Derive, CryptoTypeId}; -use sp_runtime_interface::pass_by::PassByInner; +use bip39::{Language, Mnemonic, MnemonicType}; +#[cfg(feature = "full_crypto")] +use core::convert::{TryFrom, TryInto}; #[cfg(feature = "full_crypto")] use secp256k1::{PublicKey, SecretKey}; +#[cfg(feature = "std")] +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; +use sp_runtime_interface::pass_by::PassByInner; +#[cfg(feature = "std")] +use substrate_bip39::seed_from_entropy; /// An identifier used to match public keys against ecdsa keys pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"ecds"); @@ -166,7 +171,6 @@ impl sp_std::convert::TryFrom<&[u8]> for Public { if data.len() == 33 { Ok(Self::from_slice(data)) } else { - Err(()) } } @@ -207,14 +211,20 @@ impl sp_std::fmt::Debug for Public { #[cfg(feature = "std")] impl Serialize for Public { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&self.to_ss58check()) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Public { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { Public::from_ss58check(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e))) } @@ -247,14 +257,20 @@ impl sp_std::convert::TryFrom<&[u8]> for Signature { #[cfg(feature = "std")] impl Serialize for Signature { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&hex::encode(self)) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Signature { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { let signature_hex = hex::decode(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e)))?; Signature::try_from(signature_hex.as_ref()) @@ -382,7 +398,9 @@ impl From<(secp256k1::Signature, secp256k1::RecoveryId)> for Signature { #[cfg(feature = "full_crypto")] impl<'a> TryFrom<&'a Signature> for (secp256k1::Signature, secp256k1::RecoveryId) { type Error = (); - fn try_from(x: &'a Signature) -> Result<(secp256k1::Signature, secp256k1::RecoveryId), Self::Error> { + fn try_from( + x: &'a Signature, + ) -> Result<(secp256k1::Signature, secp256k1::RecoveryId), Self::Error> { Ok(( secp256k1::Signature::parse_slice(&x.0[0..64]).expect("hardcoded to 64 bytes; qed"), secp256k1::RecoveryId::parse(x.0[64]).map_err(|_| ())?, @@ -431,21 +449,22 @@ impl TraitPair for Pair { let phrase = mnemonic.phrase(); let (pair, seed) = Self::from_phrase(phrase, password) .expect("All phrases generated by Mnemonic are valid; qed"); - ( - pair, - phrase.to_owned(), - seed, - ) + (pair, phrase.to_owned(), seed) } /// Generate key pair from given recovery phrase and password. #[cfg(feature = "std")] - fn from_phrase(phrase: &str, password: Option<&str>) -> Result<(Pair, Seed), SecretStringError> { + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Pair, Seed), SecretStringError> { let big_seed = seed_from_entropy( Mnemonic::from_phrase(phrase, Language::English) - .map_err(|_| SecretStringError::InvalidPhrase)?.entropy(), + .map_err(|_| SecretStringError::InvalidPhrase)? + .entropy(), password.unwrap_or(""), - ).map_err(|_| SecretStringError::InvalidSeed)?; + ) + .map_err(|_| SecretStringError::InvalidSeed)?; let mut seed = Seed::default(); seed.copy_from_slice(&big_seed[0..32]); Self::from_seed_slice(&big_seed[0..32]).map(|x| (x, seed)) @@ -463,16 +482,17 @@ impl TraitPair for Pair { /// /// You should never need to use this; generate(), generate_with_phrase fn from_seed_slice(seed_slice: &[u8]) -> Result { - let secret = SecretKey::parse_slice(seed_slice) - .map_err(|_| SecretStringError::InvalidSeedLength)?; + let secret = + SecretKey::parse_slice(seed_slice).map_err(|_| SecretStringError::InvalidSeedLength)?; let public = PublicKey::from_secret_key(&secret); - Ok(Pair{ public, secret }) + Ok(Pair { public, secret }) } /// Derive a child key from a series of given junctions. - fn derive>(&self, + fn derive>( + &self, path: Iter, - _seed: Option + _seed: Option, ) -> Result<(Pair, Option), DeriveError> { let mut acc = self.secret.serialize(); for j in path { @@ -498,7 +518,10 @@ impl TraitPair for Pair { /// Verify a signature on a message. Returns true if the signature is good. fn verify>(sig: &Self::Signature, message: M, pubkey: &Self::Public) -> bool { let message = secp256k1::Message::parse(&blake2_256(message.as_ref())); - let sig: (_, _) = match sig.try_into() { Ok(x) => x, _ => return false }; + let sig: (_, _) = match sig.try_into() { + Ok(x) => x, + _ => return false, + }; match secp256k1::recover(&message, &sig.0, &sig.1) { Ok(actual) => pubkey.0[..] == actual.serialize_compressed()[..], _ => false, @@ -511,9 +534,17 @@ impl TraitPair for Pair { /// size. Use it only if you're coming from byte buffers and need the speed. fn verify_weak, M: AsRef<[u8]>>(sig: &[u8], message: M, pubkey: P) -> bool { let message = secp256k1::Message::parse(&blake2_256(message.as_ref())); - if sig.len() != 65 { return false } - let ri = match secp256k1::RecoveryId::parse(sig[64]) { Ok(x) => x, _ => return false }; - let sig = match secp256k1::Signature::parse_slice(&sig[0..64]) { Ok(x) => x, _ => return false }; + if sig.len() != 65 { + return false + } + let ri = match secp256k1::RecoveryId::parse(sig[64]) { + Ok(x) => x, + _ => return false, + }; + let sig = match secp256k1::Signature::parse_slice(&sig[0..64]) { + Ok(x) => x, + _ => return false, + }; match secp256k1::recover(&message, &sig, &ri) { Ok(actual) => pubkey.as_ref() == &actual.serialize()[1..], _ => false, @@ -569,16 +600,16 @@ impl Pair { } impl CryptoType for Public { - #[cfg(feature="full_crypto")] + #[cfg(feature = "full_crypto")] type Pair = Pair; } impl CryptoType for Signature { - #[cfg(feature="full_crypto")] + #[cfg(feature = "full_crypto")] type Pair = Pair; } -#[cfg(feature="full_crypto")] +#[cfg(feature = "full_crypto")] impl CryptoType for Pair { type Pair = Pair; } @@ -586,16 +617,20 @@ impl CryptoType for Pair { #[cfg(test)] mod test { use super::*; + use crate::{ + crypto::{set_default_ss58_version, PublicError, DEV_PHRASE}, + keccak_256, + }; use hex_literal::hex; - use crate::{crypto::{DEV_PHRASE, set_default_ss58_version}, keccak_256}; use serde_json; - use crate::crypto::PublicError; #[test] fn default_phrase_should_be_used() { assert_eq!( Pair::from_string("//Alice///password", None).unwrap().public(), - Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")).unwrap().public(), + Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")) + .unwrap() + .public(), ); } @@ -614,9 +649,9 @@ mod test { #[test] fn test_vector_should_work() { - let pair = Pair::from_seed( - &hex!("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") - ); + let pair = Pair::from_seed(&hex!( + "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60" + )); let public = pair.public(); assert_eq!( public, @@ -635,8 +670,9 @@ mod test { fn test_vector_by_string_should_work() { let pair = Pair::from_string( "0x9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60", - None - ).unwrap(); + None, + ) + .unwrap(); let public = pair.public(); assert_eq!( public, @@ -804,7 +840,8 @@ mod test { // `msg` shouldn't be mangled let msg = [0u8; 32]; let sig1 = pair.sign_prehashed(&msg); - let sig2: Signature = secp256k1::sign(&secp256k1::Message::parse(&msg), &pair.secret).into(); + let sig2: Signature = + secp256k1::sign(&secp256k1::Message::parse(&msg), &pair.secret).into(); assert_eq!(sig1, sig2); @@ -816,9 +853,10 @@ mod test { // using pre-hashed `msg` works let msg = keccak_256(b"this should be hashed"); let sig1 = pair.sign_prehashed(&msg); - let sig2: Signature = secp256k1::sign(&secp256k1::Message::parse(&msg), &pair.secret).into(); + let sig2: Signature = + secp256k1::sign(&secp256k1::Message::parse(&msg), &pair.secret).into(); - assert_eq!(sig1, sig2); + assert_eq!(sig1, sig2); } #[test] diff --git a/primitives/core/src/ed25519.rs b/primitives/core/src/ed25519.rs index e0bd8b7cb0282..d786ee9d255ff 100644 --- a/primitives/core/src/ed25519.rs +++ b/primitives/core/src/ed25519.rs @@ -22,27 +22,29 @@ #[cfg(feature = "full_crypto")] use sp_std::vec::Vec; -use crate::{hash::H256, hash::H512}; -use codec::{Encode, Decode, MaxEncodedLen}; +use crate::hash::{H256, H512}; +use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; +#[cfg(feature = "std")] +use crate::crypto::Ss58Codec; +use crate::crypto::{ + CryptoType, CryptoTypeId, CryptoTypePublicPair, Derive, Public as TraitPublic, UncheckedFrom, +}; +#[cfg(feature = "full_crypto")] +use crate::crypto::{DeriveJunction, Pair as TraitPair, SecretStringError}; +#[cfg(feature = "std")] +use bip39::{Language, Mnemonic, MnemonicType}; #[cfg(feature = "full_crypto")] use core::convert::TryFrom; #[cfg(feature = "full_crypto")] use ed25519_dalek::{Signer as _, Verifier as _}; #[cfg(feature = "std")] -use substrate_bip39::seed_from_entropy; -#[cfg(feature = "std")] -use bip39::{Mnemonic, Language, MnemonicType}; -#[cfg(feature = "full_crypto")] -use crate::crypto::{Pair as TraitPair, DeriveJunction, SecretStringError}; -#[cfg(feature = "std")] -use crate::crypto::Ss58Codec; -#[cfg(feature = "std")] -use serde::{de, Serializer, Serialize, Deserializer, Deserialize}; -use crate::crypto::{Public as TraitPublic, CryptoTypePublicPair, UncheckedFrom, CryptoType, Derive, CryptoTypeId}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use sp_runtime_interface::pass_by::PassByInner; use sp_std::ops::Deref; +#[cfg(feature = "std")] +use substrate_bip39::seed_from_entropy; /// An identifier used to match public keys against ed25519 keys pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"ed25"); @@ -56,8 +58,18 @@ type Seed = [u8; 32]; /// A public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( - PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, - MaxEncodedLen, TypeInfo, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Encode, + Decode, + Default, + PassByInner, + MaxEncodedLen, + TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -71,7 +83,7 @@ impl Clone for Pair { Pair(ed25519_dalek::Keypair { public: self.0.public, secret: ed25519_dalek::SecretKey::from_bytes(self.0.secret.as_bytes()) - .expect("key is always the correct size; qed") + .expect("key is always the correct size; qed"), }) } } @@ -178,14 +190,20 @@ impl sp_std::fmt::Debug for Public { #[cfg(feature = "std")] impl Serialize for Public { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&self.to_ss58check()) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Public { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { Public::from_ss58check(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e))) } @@ -211,14 +229,20 @@ impl sp_std::convert::TryFrom<&[u8]> for Signature { #[cfg(feature = "std")] impl Serialize for Signature { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&hex::encode(self)) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Signature { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { let signature_hex = hex::decode(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e)))?; Signature::try_from(signature_hex.as_ref()) @@ -439,21 +463,22 @@ impl TraitPair for Pair { let phrase = mnemonic.phrase(); let (pair, seed) = Self::from_phrase(phrase, password) .expect("All phrases generated by Mnemonic are valid; qed"); - ( - pair, - phrase.to_owned(), - seed, - ) + (pair, phrase.to_owned(), seed) } /// Generate key pair from given recovery phrase and password. #[cfg(feature = "std")] - fn from_phrase(phrase: &str, password: Option<&str>) -> Result<(Pair, Seed), SecretStringError> { + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Pair, Seed), SecretStringError> { let big_seed = seed_from_entropy( Mnemonic::from_phrase(phrase, Language::English) - .map_err(|_| SecretStringError::InvalidPhrase)?.entropy(), + .map_err(|_| SecretStringError::InvalidPhrase)? + .entropy(), password.unwrap_or(""), - ).map_err(|_| SecretStringError::InvalidSeed)?; + ) + .map_err(|_| SecretStringError::InvalidSeed)?; let mut seed = Seed::default(); seed.copy_from_slice(&big_seed[0..32]); Self::from_seed_slice(&big_seed[0..32]).map(|x| (x, seed)) @@ -478,7 +503,8 @@ impl TraitPair for Pair { } /// Derive a child key from a series of given junctions. - fn derive>(&self, + fn derive>( + &self, path: Iter, _seed: Option, ) -> Result<(Pair, Option), DeriveError> { @@ -523,7 +549,7 @@ impl TraitPair for Pair { let sig = match ed25519_dalek::Signature::try_from(sig) { Ok(s) => s, - Err(_) => return false + Err(_) => return false, }; public_key.verify(message.as_ref(), &sig).is_ok() @@ -573,15 +599,17 @@ impl CryptoType for Pair { #[cfg(test)] mod test { use super::*; - use hex_literal::hex; use crate::crypto::DEV_PHRASE; + use hex_literal::hex; use serde_json; #[test] fn default_phrase_should_be_used() { assert_eq!( Pair::from_string("//Alice///password", None).unwrap().public(), - Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")).unwrap().public(), + Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")) + .unwrap() + .public(), ); } @@ -600,13 +628,16 @@ mod test { #[test] fn test_vector_should_work() { - let pair = Pair::from_seed( - &hex!("9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60") - ); - let public = pair.public(); - assert_eq!(public, Public::from_raw( - hex!("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") + let pair = Pair::from_seed(&hex!( + "9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60" )); + let public = pair.public(); + assert_eq!( + public, + Public::from_raw(hex!( + "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a" + )) + ); let message = b""; let signature = hex!("e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"); let signature = Signature::from_raw(signature); @@ -618,12 +649,16 @@ mod test { fn test_vector_by_string_should_work() { let pair = Pair::from_string( "0x9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60", - None - ).unwrap(); + None, + ) + .unwrap(); let public = pair.public(); - assert_eq!(public, Public::from_raw( - hex!("d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a") - )); + assert_eq!( + public, + Public::from_raw(hex!( + "d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a" + )) + ); let message = b""; let signature = hex!("e5564300c360ac729086e2cc806e828a84877f1eb8e5d974d873e065224901555fb8821590a33bacc61e39701cf9b46bd25bf5f0595bbe24655141438e7a100b"); let signature = Signature::from_raw(signature); @@ -645,9 +680,12 @@ mod test { fn seeded_pair_should_work() { let pair = Pair::from_seed(b"12345678901234567890123456789012"); let public = pair.public(); - assert_eq!(public, Public::from_raw( - hex!("2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee") - )); + assert_eq!( + public, + Public::from_raw(hex!( + "2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee" + )) + ); let message = hex!("2f8c6129d816cf51c374bc7f08c3e63ed156cf78aefb4a6550d97b87997977ee00000000000000000200d75a980182b10ab7d54bfed3c964073a0ee172f3daa62325af021a68f707511a4500000000000000"); let signature = pair.sign(&message[..]); println!("Correct signature: {:?}", signature); diff --git a/primitives/core/src/hash.rs b/primitives/core/src/hash.rs index 6ef1827a1ba0c..55a9664c9dad4 100644 --- a/primitives/core/src/hash.rs +++ b/primitives/core/src/hash.rs @@ -55,13 +55,34 @@ mod tests { #[test] fn test_h256() { let tests = vec![ - (Default::default(), "0x0000000000000000000000000000000000000000000000000000000000000000"), - (H256::from_low_u64_be(2), "0x0000000000000000000000000000000000000000000000000000000000000002"), - (H256::from_low_u64_be(15), "0x000000000000000000000000000000000000000000000000000000000000000f"), - (H256::from_low_u64_be(16), "0x0000000000000000000000000000000000000000000000000000000000000010"), - (H256::from_low_u64_be(1_000), "0x00000000000000000000000000000000000000000000000000000000000003e8"), - (H256::from_low_u64_be(100_000), "0x00000000000000000000000000000000000000000000000000000000000186a0"), - (H256::from_low_u64_be(u64::MAX), "0x000000000000000000000000000000000000000000000000ffffffffffffffff"), + ( + Default::default(), + "0x0000000000000000000000000000000000000000000000000000000000000000", + ), + ( + H256::from_low_u64_be(2), + "0x0000000000000000000000000000000000000000000000000000000000000002", + ), + ( + H256::from_low_u64_be(15), + "0x000000000000000000000000000000000000000000000000000000000000000f", + ), + ( + H256::from_low_u64_be(16), + "0x0000000000000000000000000000000000000000000000000000000000000010", + ), + ( + H256::from_low_u64_be(1_000), + "0x00000000000000000000000000000000000000000000000000000000000003e8", + ), + ( + H256::from_low_u64_be(100_000), + "0x00000000000000000000000000000000000000000000000000000000000186a0", + ), + ( + H256::from_low_u64_be(u64::MAX), + "0x000000000000000000000000000000000000000000000000ffffffffffffffff", + ), ]; for (number, expected) in tests { @@ -72,9 +93,21 @@ mod tests { #[test] fn test_invalid() { - assert!(ser::from_str::("\"0x000000000000000000000000000000000000000000000000000000000000000\"").unwrap_err().is_data()); - assert!(ser::from_str::("\"0x000000000000000000000000000000000000000000000000000000000000000g\"").unwrap_err().is_data()); - assert!(ser::from_str::("\"0x00000000000000000000000000000000000000000000000000000000000000000\"").unwrap_err().is_data()); + assert!(ser::from_str::( + "\"0x000000000000000000000000000000000000000000000000000000000000000\"" + ) + .unwrap_err() + .is_data()); + assert!(ser::from_str::( + "\"0x000000000000000000000000000000000000000000000000000000000000000g\"" + ) + .unwrap_err() + .is_data()); + assert!(ser::from_str::( + "\"0x00000000000000000000000000000000000000000000000000000000000000000\"" + ) + .unwrap_err() + .is_data()); assert!(ser::from_str::("\"\"").unwrap_err().is_data()); assert!(ser::from_str::("\"0\"").unwrap_err().is_data()); assert!(ser::from_str::("\"10\"").unwrap_err().is_data()); diff --git a/primitives/core/src/hasher.rs b/primitives/core/src/hasher.rs index 13a168c70f93c..01680de083762 100644 --- a/primitives/core/src/hasher.rs +++ b/primitives/core/src/hasher.rs @@ -18,9 +18,9 @@ //! Substrate Blake2b Hasher implementation pub mod blake2 { - use hash_db::Hasher; - use hash256_std_hasher::Hash256StdHasher; use crate::hash::H256; + use hash256_std_hasher::Hash256StdHasher; + use hash_db::Hasher; /// Concrete implementation of Hasher using Blake2b 256-bit hashes #[derive(Debug)] @@ -38,9 +38,9 @@ pub mod blake2 { } pub mod keccak { - use hash_db::Hasher; - use hash256_std_hasher::Hash256StdHasher; use crate::hash::H256; + use hash256_std_hasher::Hash256StdHasher; + use hash_db::Hasher; /// Concrete implementation of Hasher using Keccak 256-bit hashes #[derive(Debug)] diff --git a/primitives/core/src/hexdisplay.rs b/primitives/core/src/hexdisplay.rs index e590eec0e5aec..4d91db1567920 100644 --- a/primitives/core/src/hexdisplay.rs +++ b/primitives/core/src/hexdisplay.rs @@ -22,7 +22,9 @@ pub struct HexDisplay<'a>(&'a [u8]); impl<'a> HexDisplay<'a> { /// Create new instance that will display `d` as a hex string when displayed. - pub fn from(d: &'a R) -> Self { HexDisplay(d.as_bytes_ref()) } + pub fn from(d: &'a R) -> Self { + HexDisplay(d.as_bytes_ref()) + } } impl<'a> sp_std::fmt::Display for HexDisplay<'a> { @@ -60,15 +62,21 @@ pub trait AsBytesRef { } impl AsBytesRef for &[u8] { - fn as_bytes_ref(&self) -> &[u8] { self } + fn as_bytes_ref(&self) -> &[u8] { + self + } } impl AsBytesRef for [u8] { - fn as_bytes_ref(&self) -> &[u8] { &self } + fn as_bytes_ref(&self) -> &[u8] { + &self + } } impl AsBytesRef for sp_std::vec::Vec { - fn as_bytes_ref(&self) -> &[u8] { &self } + fn as_bytes_ref(&self) -> &[u8] { + &self + } } impl AsBytesRef for sp_storage::StorageKey { @@ -85,9 +93,11 @@ macro_rules! impl_non_endians { )* } } -impl_non_endians!([u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], - [u8; 10], [u8; 12], [u8; 14], [u8; 16], [u8; 20], [u8; 24], [u8; 28], [u8; 32], [u8; 40], - [u8; 48], [u8; 56], [u8; 64], [u8; 65], [u8; 80], [u8; 96], [u8; 112], [u8; 128]); +impl_non_endians!( + [u8; 1], [u8; 2], [u8; 3], [u8; 4], [u8; 5], [u8; 6], [u8; 7], [u8; 8], [u8; 10], [u8; 12], + [u8; 14], [u8; 16], [u8; 20], [u8; 24], [u8; 28], [u8; 32], [u8; 40], [u8; 48], [u8; 56], + [u8; 64], [u8; 65], [u8; 80], [u8; 96], [u8; 112], [u8; 128] +); /// Format into ASCII + # + hex, suitable for storage key preimages. #[cfg(feature = "std")] @@ -103,7 +113,7 @@ pub fn ascii_format(asciish: &[u8]) -> String { latch = true; } r.push_str(&format!("{:02x}", *c)); - } + }, } } r diff --git a/primitives/core/src/lib.rs b/primitives/core/src/lib.rs index afa97143873ad..40894cc7d9b81 100644 --- a/primitives/core/src/lib.rs +++ b/primitives/core/src/lib.rs @@ -18,7 +18,6 @@ //! Shareable Substrate types. #![warn(missing_docs)] - #![cfg_attr(not(feature = "std"), no_std)] /// Initialize a key-value collection from array. @@ -32,18 +31,17 @@ macro_rules! map { ); } -use sp_runtime_interface::pass_by::{PassByEnum, PassByInner}; -use sp_std::prelude::*; -use sp_std::ops::Deref; +#[doc(hidden)] +pub use codec::{Decode, Encode}; +use scale_info::TypeInfo; #[cfg(feature = "std")] -use std::borrow::Cow; +pub use serde; #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; +use sp_runtime_interface::pass_by::{PassByEnum, PassByInner}; +use sp_std::{ops::Deref, prelude::*}; #[cfg(feature = "std")] -pub use serde; -#[doc(hidden)] -pub use codec::{Encode, Decode}; -use scale_info::TypeInfo; +use std::borrow::Cow; pub use sp_debug_derive::RuntimeDebug; @@ -54,37 +52,39 @@ pub use impl_serde::serialize as bytes; pub mod hashing; #[cfg(feature = "full_crypto")] -pub use hashing::{blake2_128, blake2_256, twox_64, twox_128, twox_256, keccak_256}; -pub mod hexdisplay; +pub use hashing::{blake2_128, blake2_256, keccak_256, twox_128, twox_256, twox_64}; pub mod crypto; +pub mod hexdisplay; pub mod u32_trait; -pub mod ed25519; -pub mod sr25519; +mod changes_trie; pub mod ecdsa; +pub mod ed25519; pub mod hash; #[cfg(feature = "std")] mod hasher; pub mod offchain; pub mod sandbox; -pub mod uint; -mod changes_trie; +pub mod sr25519; +pub mod testing; #[cfg(feature = "std")] pub mod traits; -pub mod testing; +pub mod uint; -pub use self::hash::{H160, H256, H512, convert_hash}; -pub use self::uint::{U256, U512}; +pub use self::{ + hash::{convert_hash, H160, H256, H512}, + uint::{U256, U512}, +}; pub use changes_trie::{ChangesTrieConfiguration, ChangesTrieConfigurationRange}; #[cfg(feature = "full_crypto")] pub use crypto::{DeriveJunction, Pair, Public}; -pub use hash_db::Hasher; #[cfg(feature = "std")] pub use self::hasher::blake2::Blake2Hasher; #[cfg(feature = "std")] pub use self::hasher::keccak::KeccakHasher; +pub use hash_db::Hasher; pub use sp_storage as storage; @@ -118,14 +118,14 @@ impl ExecutionContext { use ExecutionContext::*; match self { - Importing | Syncing | BlockConstruction => - offchain::Capabilities::none(), + Importing | Syncing | BlockConstruction => offchain::Capabilities::none(), // Enable keystore, transaction pool and Offchain DB reads by default for offchain calls. OffchainCall(None) => [ offchain::Capability::Keystore, offchain::Capability::OffchainDbRead, offchain::Capability::TransactionPool, - ][..].into(), + ][..] + .into(), OffchainCall(Some((_, capabilities))) => *capabilities, } } @@ -134,19 +134,25 @@ impl ExecutionContext { /// Hex-serialized shim for `Vec`. #[derive(PartialEq, Eq, Clone, RuntimeDebug)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize, Hash, PartialOrd, Ord))] -pub struct Bytes(#[cfg_attr(feature = "std", serde(with="bytes"))] pub Vec); +pub struct Bytes(#[cfg_attr(feature = "std", serde(with = "bytes"))] pub Vec); impl From> for Bytes { - fn from(s: Vec) -> Self { Bytes(s) } + fn from(s: Vec) -> Self { + Bytes(s) + } } impl From for Bytes { - fn from(s: OpaqueMetadata) -> Self { Bytes(s.0) } + fn from(s: OpaqueMetadata) -> Self { + Bytes(s.0) + } } impl Deref for Bytes { type Target = [u8]; - fn deref(&self) -> &[u8] { &self.0[..] } + fn deref(&self) -> &[u8] { + &self.0[..] + } } impl codec::WrapperTypeEncode for Bytes {} @@ -185,8 +191,17 @@ impl sp_std::ops::Deref for OpaqueMetadata { /// Simple blob to hold a `PeerId` without committing to its format. #[derive( - Default, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, PassByInner, - TypeInfo + Default, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + PassByInner, + TypeInfo, )] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] pub struct OpaquePeerId(pub Vec); @@ -204,7 +219,7 @@ pub enum NativeOrEncoded { /// The native representation. Native(R), /// The encoded representation. - Encoded(Vec) + Encoded(Vec), } #[cfg(feature = "std")] diff --git a/primitives/core/src/offchain/mod.rs b/primitives/core/src/offchain/mod.rs index 3004284173780..2e5f04ac9d1fb 100644 --- a/primitives/core/src/offchain/mod.rs +++ b/primitives/core/src/offchain/mod.rs @@ -17,11 +17,14 @@ //! Offchain workers types -use codec::{Encode, Decode}; -use scale_info::TypeInfo; -use sp_std::{prelude::{Vec, Box}, convert::TryFrom}; use crate::{OpaquePeerId, RuntimeDebug}; -use sp_runtime_interface::pass_by::{PassByCodec, PassByInner, PassByEnum}; +use codec::{Decode, Encode}; +use scale_info::TypeInfo; +use sp_runtime_interface::pass_by::{PassByCodec, PassByEnum, PassByInner}; +use sp_std::{ + convert::TryFrom, + prelude::{Box, Vec}, +}; pub use crate::crypto::KeyTypeId; @@ -31,7 +34,7 @@ pub mod storage; pub mod testing; /// Persistent storage prefix used by the Offchain Worker API when creating a DB key. -pub const STORAGE_PREFIX : &[u8] = b"storage"; +pub const STORAGE_PREFIX: &[u8] = b"storage"; /// Offchain DB persistent (non-fork-aware) storage. pub trait OffchainStorage: Clone + Send + Sync { @@ -94,7 +97,9 @@ impl From for u32 { } /// Opaque type for offchain http requests. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, Encode, Decode, PassByInner)] +#[derive( + Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RuntimeDebug, Encode, Decode, PassByInner, +)] #[cfg_attr(feature = "std", derive(Hash))] pub struct HttpRequestId(pub u16); @@ -124,7 +129,7 @@ impl TryFrom for HttpError { e if e == HttpError::DeadlineReached as u8 as u32 => Ok(HttpError::DeadlineReached), e if e == HttpError::IoError as u8 as u32 => Ok(HttpError::IoError), e if e == HttpError::Invalid as u8 as u32 => Ok(HttpError::Invalid), - _ => Err(()) + _ => Err(()), } } } @@ -203,11 +208,15 @@ impl OpaqueMultiaddr { } /// Opaque timestamp type -#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Default, RuntimeDebug, PassByInner, Encode, Decode)] +#[derive( + Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Default, RuntimeDebug, PassByInner, Encode, Decode, +)] pub struct Timestamp(u64); /// Duration type -#[derive(Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Default, RuntimeDebug, PassByInner, Encode, Decode)] +#[derive( + Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Default, RuntimeDebug, PassByInner, Encode, Decode, +)] pub struct Duration(u64); impl Duration { @@ -291,11 +300,7 @@ impl Capabilities { /// Those calls should be allowed to sign and submit transactions /// and access offchain workers database (but read only!). pub fn rich_offchain_call() -> Self { - [ - Capability::TransactionPool, - Capability::Keystore, - Capability::OffchainDbRead, - ][..].into() + [Capability::TransactionPool, Capability::Keystore, Capability::OffchainDbRead][..].into() } /// Check if particular capability is enabled. @@ -351,7 +356,7 @@ pub trait Externalities: Send { &mut self, method: &str, uri: &str, - meta: &[u8] + meta: &[u8], ) -> Result; /// Append header to the request. @@ -371,7 +376,7 @@ pub trait Externalities: Send { &mut self, request_id: HttpRequestId, name: &str, - value: &str + value: &str, ) -> Result<(), ()>; /// Write a chunk of request body. @@ -393,7 +398,7 @@ pub trait Externalities: Send { &mut self, request_id: HttpRequestId, chunk: &[u8], - deadline: Option + deadline: Option, ) -> Result<(), HttpError>; /// Block and wait for the responses for given requests. @@ -409,7 +414,7 @@ pub trait Externalities: Send { fn http_response_wait( &mut self, ids: &[HttpRequestId], - deadline: Option + deadline: Option, ) -> Vec; /// Read all response headers. @@ -421,10 +426,7 @@ pub trait Externalities: Send { /// /// Returns an empty list if the identifier is unknown/invalid, hasn't /// received a response, or has finished. - fn http_response_headers( - &mut self, - request_id: HttpRequestId - ) -> Vec<(Vec, Vec)>; + fn http_response_headers(&mut self, request_id: HttpRequestId) -> Vec<(Vec, Vec)>; /// Read a chunk of body response to given buffer. /// @@ -449,7 +451,7 @@ pub trait Externalities: Send { &mut self, request_id: HttpRequestId, buffer: &mut [u8], - deadline: Option + deadline: Option, ) -> Result; /// Set the authorized nodes from runtime. @@ -467,11 +469,11 @@ pub trait Externalities: Send { impl Externalities for Box { fn is_validator(&self) -> bool { - (& **self).is_validator() + (&**self).is_validator() } fn network_state(&self) -> Result { - (& **self).network_state() + (&**self).network_state() } fn timestamp(&mut self) -> Timestamp { @@ -486,11 +488,21 @@ impl Externalities for Box { (&mut **self).random_seed() } - fn http_request_start(&mut self, method: &str, uri: &str, meta: &[u8]) -> Result { + fn http_request_start( + &mut self, + method: &str, + uri: &str, + meta: &[u8], + ) -> Result { (&mut **self).http_request_start(method, uri, meta) } - fn http_request_add_header(&mut self, request_id: HttpRequestId, name: &str, value: &str) -> Result<(), ()> { + fn http_request_add_header( + &mut self, + request_id: HttpRequestId, + name: &str, + value: &str, + ) -> Result<(), ()> { (&mut **self).http_request_add_header(request_id, name, value) } @@ -498,12 +510,16 @@ impl Externalities for Box { &mut self, request_id: HttpRequestId, chunk: &[u8], - deadline: Option + deadline: Option, ) -> Result<(), HttpError> { (&mut **self).http_request_write_body(request_id, chunk, deadline) } - fn http_response_wait(&mut self, ids: &[HttpRequestId], deadline: Option) -> Vec { + fn http_response_wait( + &mut self, + ids: &[HttpRequestId], + deadline: Option, + ) -> Vec { (&mut **self).http_response_wait(ids, deadline) } @@ -515,7 +531,7 @@ impl Externalities for Box { &mut self, request_id: HttpRequestId, buffer: &mut [u8], - deadline: Option + deadline: Option, ) -> Result { (&mut **self).http_response_read_body(request_id, buffer, deadline) } @@ -534,10 +550,7 @@ pub struct LimitedExternalities { impl LimitedExternalities { /// Create new externalities limited to given `capabilities`. pub fn new(capabilities: Capabilities, externalities: T) -> Self { - Self { - capabilities, - externalities, - } + Self { capabilities, externalities } } /// Check if given capability is allowed. @@ -576,12 +589,22 @@ impl Externalities for LimitedExternalities { self.externalities.random_seed() } - fn http_request_start(&mut self, method: &str, uri: &str, meta: &[u8]) -> Result { + fn http_request_start( + &mut self, + method: &str, + uri: &str, + meta: &[u8], + ) -> Result { self.check(Capability::Http, "http_request_start"); self.externalities.http_request_start(method, uri, meta) } - fn http_request_add_header(&mut self, request_id: HttpRequestId, name: &str, value: &str) -> Result<(), ()> { + fn http_request_add_header( + &mut self, + request_id: HttpRequestId, + name: &str, + value: &str, + ) -> Result<(), ()> { self.check(Capability::Http, "http_request_add_header"); self.externalities.http_request_add_header(request_id, name, value) } @@ -590,13 +613,17 @@ impl Externalities for LimitedExternalities { &mut self, request_id: HttpRequestId, chunk: &[u8], - deadline: Option + deadline: Option, ) -> Result<(), HttpError> { self.check(Capability::Http, "http_request_write_body"); self.externalities.http_request_write_body(request_id, chunk, deadline) } - fn http_response_wait(&mut self, ids: &[HttpRequestId], deadline: Option) -> Vec { + fn http_response_wait( + &mut self, + ids: &[HttpRequestId], + deadline: Option, + ) -> Vec { self.check(Capability::Http, "http_response_wait"); self.externalities.http_response_wait(ids, deadline) } @@ -610,7 +637,7 @@ impl Externalities for LimitedExternalities { &mut self, request_id: HttpRequestId, buffer: &mut [u8], - deadline: Option + deadline: Option, ) -> Result { self.check(Capability::Http, "http_response_read_body"); self.externalities.http_response_read_body(request_id, buffer, deadline) @@ -718,7 +745,8 @@ impl DbExternalities for LimitedExternalities { new_value: &[u8], ) -> bool { self.check(Capability::OffchainDbWrite, "local_storage_compare_and_set"); - self.externalities.local_storage_compare_and_set(kind, key, old_value, new_value) + self.externalities + .local_storage_compare_and_set(kind, key, old_value, new_value) } fn local_storage_get(&mut self, kind: StorageKind, key: &[u8]) -> Option> { diff --git a/primitives/core/src/offchain/storage.rs b/primitives/core/src/offchain/storage.rs index 4463c58ede5d5..ff72006cffd60 100644 --- a/primitives/core/src/offchain/storage.rs +++ b/primitives/core/src/offchain/storage.rs @@ -17,9 +17,11 @@ //! In-memory implementation of offchain workers database. -use std::collections::hash_map::{HashMap, Entry}; use crate::offchain::OffchainStorage; -use std::iter::Iterator; +use std::{ + collections::hash_map::{Entry, HashMap}, + iter::Iterator, +}; /// In-memory storage for offchain workers. #[derive(Debug, Clone, Default)] @@ -29,12 +31,12 @@ pub struct InMemOffchainStorage { impl InMemOffchainStorage { /// Consume the offchain storage and iterate over all key value pairs. - pub fn into_iter(self) -> impl Iterator,Vec)> { + pub fn into_iter(self) -> impl Iterator, Vec)> { self.storage.into_iter() } /// Iterate over all key value pairs by reference. - pub fn iter(&self) -> impl Iterator,&Vec)> { + pub fn iter(&self) -> impl Iterator, &Vec)> { self.storage.iter() } @@ -71,10 +73,13 @@ impl OffchainStorage for InMemOffchainStorage { let key = prefix.iter().chain(key).cloned().collect(); match self.storage.entry(key) { - Entry::Vacant(entry) => if old_value.is_none() { - entry.insert(new_value.to_vec()); - true - } else { false }, + Entry::Vacant(entry) => + if old_value.is_none() { + entry.insert(new_value.to_vec()); + true + } else { + false + }, Entry::Occupied(ref mut entry) if Some(entry.get().as_slice()) == old_value => { entry.insert(new_value.to_vec()); true diff --git a/primitives/core/src/offchain/testing.rs b/primitives/core/src/offchain/testing.rs index 76c81d4b9bc6c..ce88ece07da1d 100644 --- a/primitives/core/src/offchain/testing.rs +++ b/primitives/core/src/offchain/testing.rs @@ -20,24 +20,18 @@ //! Namely all ExecutionExtensions that allow mocking //! the extra APIs. +use crate::{ + offchain::{ + self, storage::InMemOffchainStorage, HttpError, HttpRequestId as RequestId, + HttpRequestStatus as RequestStatus, OffchainOverlayedChange, OffchainStorage, + OpaqueNetworkState, StorageKind, Timestamp, TransactionPool, + }, + OpaquePeerId, +}; use std::{ collections::{BTreeMap, VecDeque}, sync::Arc, }; -use crate::OpaquePeerId; -use crate::offchain::{ - self, - OffchainOverlayedChange, - storage::InMemOffchainStorage, - HttpError, - HttpRequestId as RequestId, - HttpRequestStatus as RequestStatus, - Timestamp, - StorageKind, - OpaqueNetworkState, - TransactionPool, - OffchainStorage, -}; use parking_lot::RwLock; @@ -75,9 +69,7 @@ impl TestPersistentOffchainDB { /// Create a new and empty offchain storage db for persistent items pub fn new() -> Self { - Self { - persistent: Arc::new(RwLock::new(InMemOffchainStorage::default())) - } + Self { persistent: Arc::new(RwLock::new(InMemOffchainStorage::default())) } } /// Apply a set of off-chain changes directly to the test backend @@ -88,7 +80,8 @@ impl TestPersistentOffchainDB { let mut me = self.persistent.write(); for ((_prefix, key), value_operation) in changes { match value_operation { - OffchainOverlayedChange::SetValue(val) => me.set(Self::PREFIX, key.as_slice(), val.as_slice()), + OffchainOverlayedChange::SetValue(val) => + me.set(Self::PREFIX, key.as_slice(), val.as_slice()), OffchainOverlayedChange::Remove => me.remove(Self::PREFIX, key.as_slice()), } } @@ -124,7 +117,6 @@ impl OffchainStorage for TestPersistentOffchainDB { } } - /// Internal state of the externalities. /// /// This can be used in tests to respond or assert stuff about interactions. @@ -151,20 +143,17 @@ impl OffchainState { id: u16, expected: PendingRequest, response: impl Into>, - response_headers: impl IntoIterator, + response_headers: impl IntoIterator, ) { match self.requests.get_mut(&RequestId(id)) { None => { panic!("Missing pending request: {:?}.\n\nAll: {:?}", id, self.requests); - } + }, Some(req) => { - assert_eq!( - *req, - expected, - ); + assert_eq!(*req, expected,); req.response = Some(response.into()); req.response_headers = response_headers.into_iter().collect(); - } + }, } } @@ -213,7 +202,9 @@ impl TestOffchainExt { } /// Create new `TestOffchainExt` and a reference to the internal state. - pub fn with_offchain_db(offchain_db: TestPersistentOffchainDB) -> (Self, Arc>) { + pub fn with_offchain_db( + offchain_db: TestPersistentOffchainDB, + ) -> (Self, Arc>) { let (ext, state) = Self::new(); ext.0.write().persistent_storage = offchain_db; (ext, state) @@ -226,10 +217,7 @@ impl offchain::Externalities for TestOffchainExt { } fn network_state(&self) -> Result { - Ok(OpaqueNetworkState { - peer_id: Default::default(), - external_addresses: vec![], - }) + Ok(OpaqueNetworkState { peer_id: Default::default(), external_addresses: vec![] }) } fn timestamp(&mut self) -> Timestamp { @@ -244,15 +232,23 @@ impl offchain::Externalities for TestOffchainExt { self.0.read().seed } - fn http_request_start(&mut self, method: &str, uri: &str, meta: &[u8]) -> Result { + fn http_request_start( + &mut self, + method: &str, + uri: &str, + meta: &[u8], + ) -> Result { let mut state = self.0.write(); let id = RequestId(state.requests.len() as u16); - state.requests.insert(id, PendingRequest { - method: method.into(), - uri: uri.into(), - meta: meta.into(), - ..Default::default() - }); + state.requests.insert( + id, + PendingRequest { + method: method.into(), + uri: uri.into(), + meta: meta.into(), + ..Default::default() + }, + ); Ok(id) } @@ -275,7 +271,7 @@ impl offchain::Externalities for TestOffchainExt { &mut self, request_id: RequestId, chunk: &[u8], - _deadline: Option + _deadline: Option, ) -> Result<(), HttpError> { let mut state = self.0.write(); @@ -302,12 +298,14 @@ impl offchain::Externalities for TestOffchainExt { ) -> Vec { let state = self.0.read(); - ids.iter().map(|id| match state.requests.get(id) { - Some(req) if req.response.is_none() => - panic!("No `response` provided for request with id: {:?}", id), - None => RequestStatus::Invalid, - _ => RequestStatus::Finished(200), - }).collect() + ids.iter() + .map(|id| match state.requests.get(id) { + Some(req) if req.response.is_none() => + panic!("No `response` provided for request with id: {:?}", id), + None => RequestStatus::Invalid, + _ => RequestStatus::Finished(200), + }) + .collect() } fn http_response_headers(&mut self, request_id: RequestId) -> Vec<(Vec, Vec)> { @@ -327,11 +325,12 @@ impl offchain::Externalities for TestOffchainExt { &mut self, request_id: RequestId, buffer: &mut [u8], - _deadline: Option + _deadline: Option, ) -> Result { let mut state = self.0.write(); if let Some(req) = state.requests.get_mut(&request_id) { - let response = req.response + let response = req + .response .as_mut() .unwrap_or_else(|| panic!("No response provided for request: {:?}", request_id)); @@ -377,14 +376,14 @@ impl offchain::DbExternalities for TestOffchainExt { kind: StorageKind, key: &[u8], old_value: Option<&[u8]>, - new_value: &[u8] + new_value: &[u8], ) -> bool { let mut state = self.0.write(); match kind { - StorageKind::LOCAL => state.local_storage - .compare_and_set(b"", key, old_value, new_value), - StorageKind::PERSISTENT => state.persistent_storage - .compare_and_set(b"", key, old_value, new_value), + StorageKind::LOCAL => + state.local_storage.compare_and_set(b"", key, old_value, new_value), + StorageKind::PERSISTENT => + state.persistent_storage.compare_and_set(b"", key, old_value, new_value), } } diff --git a/primitives/core/src/sandbox.rs b/primitives/core/src/sandbox.rs index a15a7af418313..acc3fda5e9b17 100644 --- a/primitives/core/src/sandbox.rs +++ b/primitives/core/src/sandbox.rs @@ -17,17 +17,15 @@ //! Definition of a sandbox environment. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_std::vec::Vec; /// Error error that can be returned from host function. -#[derive(Encode, Decode)] -#[derive(crate::RuntimeDebug)] +#[derive(Encode, Decode, crate::RuntimeDebug)] pub struct HostError; /// Describes an entity to define or import into the environment. -#[derive(Clone, PartialEq, Eq, Encode, Decode)] -#[derive(crate::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, crate::RuntimeDebug)] pub enum ExternEntity { /// Function that is specified by an index in a default table of /// a module that creates the sandbox. @@ -44,8 +42,7 @@ pub enum ExternEntity { /// /// Each entry has a two-level name and description of an entity /// being defined. -#[derive(Clone, PartialEq, Eq, Encode, Decode)] -#[derive(crate::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, crate::RuntimeDebug)] pub struct Entry { /// Module name of which corresponding entity being defined. pub module_name: Vec, @@ -56,8 +53,7 @@ pub struct Entry { } /// Definition of runtime that could be used by sandboxed code. -#[derive(Clone, PartialEq, Eq, Encode, Decode)] -#[derive(crate::RuntimeDebug)] +#[derive(Clone, PartialEq, Eq, Encode, Decode, crate::RuntimeDebug)] pub struct EnvironmentDefinition { /// Vector of all entries in the environment definition. pub entries: Vec, @@ -91,8 +87,8 @@ pub const ERR_EXECUTION: u32 = -3i32 as u32; #[cfg(test)] mod tests { use super::*; - use std::fmt; use codec::Codec; + use std::fmt; fn roundtrip(s: S) { let encoded = s.encode(); @@ -101,28 +97,22 @@ mod tests { #[test] fn env_def_roundtrip() { - roundtrip(EnvironmentDefinition { - entries: vec![], - }); + roundtrip(EnvironmentDefinition { entries: vec![] }); roundtrip(EnvironmentDefinition { - entries: vec![ - Entry { - module_name: b"kernel"[..].into(), - field_name: b"memory"[..].into(), - entity: ExternEntity::Memory(1337), - }, - ], + entries: vec![Entry { + module_name: b"kernel"[..].into(), + field_name: b"memory"[..].into(), + entity: ExternEntity::Memory(1337), + }], }); roundtrip(EnvironmentDefinition { - entries: vec![ - Entry { - module_name: b"env"[..].into(), - field_name: b"abort"[..].into(), - entity: ExternEntity::Function(228), - }, - ], + entries: vec![Entry { + module_name: b"env"[..].into(), + field_name: b"abort"[..].into(), + entity: ExternEntity::Function(228), + }], }); } } diff --git a/primitives/core/src/sr25519.rs b/primitives/core/src/sr25519.rs index ce8cd161bb9eb..b90bad948e755 100644 --- a/primitives/core/src/sr25519.rs +++ b/primitives/core/src/sr25519.rs @@ -21,35 +21,39 @@ //! Note: `CHAIN_CODE_LENGTH` must be equal to `crate::crypto::JUNCTION_ID_LEN` //! for this to work. // end::description[] +#[cfg(feature = "std")] +use crate::crypto::Ss58Codec; #[cfg(feature = "full_crypto")] -use sp_std::vec::Vec; +use crate::crypto::{DeriveJunction, Infallible, Pair as TraitPair, SecretStringError}; +#[cfg(feature = "std")] +use bip39::{Language, Mnemonic, MnemonicType}; #[cfg(feature = "full_crypto")] -use schnorrkel::{signing_context, ExpansionMode, Keypair, SecretKey, MiniSecretKey, PublicKey, - derive::{Derivation, ChainCode, CHAIN_CODE_LENGTH} +use schnorrkel::{ + derive::{ChainCode, Derivation, CHAIN_CODE_LENGTH}, + signing_context, ExpansionMode, Keypair, MiniSecretKey, PublicKey, SecretKey, }; +#[cfg(feature = "full_crypto")] +use sp_std::vec::Vec; #[cfg(feature = "std")] use std::convert::TryFrom; #[cfg(feature = "std")] use substrate_bip39::mini_secret_from_entropy; -#[cfg(feature = "std")] -use bip39::{Mnemonic, Language, MnemonicType}; -#[cfg(feature = "full_crypto")] -use crate::crypto::{ - Pair as TraitPair, DeriveJunction, Infallible, SecretStringError -}; -#[cfg(feature = "std")] -use crate::crypto::Ss58Codec; -use crate::crypto::{Public as TraitPublic, CryptoTypePublicPair, UncheckedFrom, CryptoType, Derive, CryptoTypeId}; -use crate::hash::{H256, H512}; -use codec::{Encode, Decode, MaxEncodedLen}; +use crate::{ + crypto::{ + CryptoType, CryptoTypeId, CryptoTypePublicPair, Derive, Public as TraitPublic, + UncheckedFrom, + }, + hash::{H256, H512}, +}; +use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; use sp_std::ops::Deref; -#[cfg(feature = "std")] -use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; #[cfg(feature = "full_crypto")] use schnorrkel::keys::{MINI_SECRET_KEY_LENGTH, SECRET_KEY_LENGTH}; +#[cfg(feature = "std")] +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use sp_runtime_interface::pass_by::PassByInner; // signing context @@ -62,8 +66,18 @@ pub const CRYPTO_ID: CryptoTypeId = CryptoTypeId(*b"sr25"); /// An Schnorrkel/Ristretto x25519 ("sr25519") public key. #[cfg_attr(feature = "full_crypto", derive(Hash))] #[derive( - PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Encode, Decode, Default, PassByInner, - MaxEncodedLen, TypeInfo, + PartialEq, + Eq, + PartialOrd, + Ord, + Clone, + Copy, + Encode, + Decode, + Default, + PassByInner, + MaxEncodedLen, + TypeInfo, )] pub struct Public(pub [u8; 32]); @@ -77,7 +91,7 @@ impl Clone for Pair { Pair(schnorrkel::Keypair { public: self.0.public, secret: schnorrkel::SecretKey::from_bytes(&self.0.secret.to_bytes()[..]) - .expect("key is always the correct size; qed") + .expect("key is always the correct size; qed"), }) } } @@ -177,14 +191,20 @@ impl sp_std::fmt::Debug for Public { #[cfg(feature = "std")] impl Serialize for Public { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&self.to_ss58check()) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Public { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { Public::from_ss58check(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e))) } @@ -212,14 +232,20 @@ impl sp_std::convert::TryFrom<&[u8]> for Signature { #[cfg(feature = "std")] impl Serialize for Signature { - fn serialize(&self, serializer: S) -> Result where S: Serializer { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { serializer.serialize_str(&hex::encode(self)) } } #[cfg(feature = "std")] impl<'de> Deserialize<'de> for Signature { - fn deserialize(deserializer: D) -> Result where D: Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { let signature_hex = hex::decode(&String::deserialize(deserializer)?) .map_err(|e| de::Error::custom(format!("{:?}", e)))?; Signature::try_from(signature_hex.as_ref()) @@ -351,7 +377,7 @@ impl Derive for Public { /// /// `None` if there are any hard junctions in there. #[cfg(feature = "std")] - fn derive>(&self, path: Iter) -> Option { + fn derive>(&self, path: Iter) -> Option { let mut acc = PublicKey::from_bytes(self.as_ref()).ok()?; for j in path { match j { @@ -472,8 +498,7 @@ impl TraitPair for Pair { /// /// A MiniSecretKey is literally what Ed25519 calls a SecretKey, which is just 32 random bytes. fn from_seed(seed: &Seed) -> Pair { - Self::from_seed_slice(&seed[..]) - .expect("32 bytes can always build a key; qed") + Self::from_seed_slice(&seed[..]).expect("32 bytes can always build a key; qed") } /// Get the public key. @@ -489,21 +514,17 @@ impl TraitPair for Pair { /// You should never need to use this; generate(), generate_with_phrase(), from_phrase() fn from_seed_slice(seed: &[u8]) -> Result { match seed.len() { - MINI_SECRET_KEY_LENGTH => { - Ok(Pair( - MiniSecretKey::from_bytes(seed) - .map_err(|_| SecretStringError::InvalidSeed)? - .expand_to_keypair(ExpansionMode::Ed25519) - )) - } - SECRET_KEY_LENGTH => { - Ok(Pair( - SecretKey::from_bytes(seed) - .map_err(|_| SecretStringError::InvalidSeed)? - .to_keypair() - )) - } - _ => Err(SecretStringError::InvalidSeedLength) + MINI_SECRET_KEY_LENGTH => Ok(Pair( + MiniSecretKey::from_bytes(seed) + .map_err(|_| SecretStringError::InvalidSeed)? + .expand_to_keypair(ExpansionMode::Ed25519), + )), + SECRET_KEY_LENGTH => Ok(Pair( + SecretKey::from_bytes(seed) + .map_err(|_| SecretStringError::InvalidSeed)? + .to_keypair(), + )), + _ => Err(SecretStringError::InvalidSeedLength), } } #[cfg(feature = "std")] @@ -512,20 +533,20 @@ impl TraitPair for Pair { let phrase = mnemonic.phrase(); let (pair, seed) = Self::from_phrase(phrase, password) .expect("All phrases generated by Mnemonic are valid; qed"); - ( - pair, - phrase.to_owned(), - seed, - ) + (pair, phrase.to_owned(), seed) } #[cfg(feature = "std")] - fn from_phrase(phrase: &str, password: Option<&str>) -> Result<(Pair, Seed), SecretStringError> { + fn from_phrase( + phrase: &str, + password: Option<&str>, + ) -> Result<(Pair, Seed), SecretStringError> { Mnemonic::from_phrase(phrase, Language::English) .map_err(|_| SecretStringError::InvalidPhrase) .map(|m| Self::from_entropy(m.entropy(), password)) } - fn derive>(&self, + fn derive>( + &self, path: Iter, seed: Option, ) -> Result<(Pair, Option), Self::DeriveError> { @@ -533,17 +554,22 @@ impl TraitPair for Pair { if let Ok(msk) = MiniSecretKey::from_bytes(&s) { if msk.expand(ExpansionMode::Ed25519) == self.0.secret { Some(msk) - } else { None } - } else { None } - } else { None }; + } else { + None + } + } else { + None + } + } else { + None + }; let init = self.0.secret.clone(); let (result, seed) = path.fold((init, seed), |(acc, acc_seed), j| match (j, acc_seed) { - (DeriveJunction::Soft(cc), _) => - (acc.derived_key_simple(ChainCode(cc), &[]).0, None), + (DeriveJunction::Soft(cc), _) => (acc.derived_key_simple(ChainCode(cc), &[]).0, None), (DeriveJunction::Hard(cc), maybe_seed) => { let seed = derive_hard_junction(&acc, &cc); (seed.expand(ExpansionMode::Ed25519), maybe_seed.map(|_| seed)) - } + }, }); Ok((Self(result.into()), seed.map(|s| MiniSecretKey::to_bytes(&s)))) } @@ -597,9 +623,9 @@ impl Pair { // Match both schnorrkel 0.1.1 and 0.8.0+ signatures, supporting both wallets // that have not been upgraded and those that have. match PublicKey::from_bytes(pubkey.as_ref()) { - Ok(pk) => pk.verify_simple_preaudit_deprecated( - SIGNING_CTX, message.as_ref(), &sig.0[..], - ).is_ok(), + Ok(pk) => pk + .verify_simple_preaudit_deprecated(SIGNING_CTX, message.as_ref(), &sig.0[..]) + .is_ok(), Err(_) => false, } } @@ -643,20 +669,16 @@ pub fn verify_batch( for signature in signatures { match schnorrkel::Signature::from_bytes(signature.as_ref()) { Ok(s) => sr_signatures.push(s), - Err(_) => return false + Err(_) => return false, }; } - let mut messages: Vec = messages.into_iter().map( - |msg| signing_context(SIGNING_CTX).bytes(msg) - ).collect(); + let mut messages: Vec = messages + .into_iter() + .map(|msg| signing_context(SIGNING_CTX).bytes(msg)) + .collect(); - schnorrkel::verify_batch( - &mut messages, - &sr_signatures, - &sr_pub_keys, - true, - ).is_ok() + schnorrkel::verify_batch(&mut messages, &sr_signatures, &sr_pub_keys, true).is_ok() } #[cfg(test)] @@ -686,7 +708,9 @@ mod compatibility_test { #[test] fn verify_known_old_message_should_work() { - let public = Public::from_raw(hex!("b4bfa1f7a5166695eb75299fd1c4c03ea212871c342f2c5dfea0902b2c246918")); + let public = Public::from_raw(hex!( + "b4bfa1f7a5166695eb75299fd1c4c03ea212871c342f2c5dfea0902b2c246918" + )); // signature generated by the 1.1 version with the same ^^ public key. let signature = Signature::from_raw(hex!( "5a9755f069939f45d96aaf125cf5ce7ba1db998686f87f2fb3cbdea922078741a73891ba265f70c31436e18a9acd14d189d73c12317ab6c313285cd938453202" @@ -700,7 +724,7 @@ mod compatibility_test { #[cfg(test)] mod test { use super::*; - use crate::crypto::{Ss58Codec, DEV_PHRASE, DEV_ADDRESS}; + use crate::crypto::{Ss58Codec, DEV_ADDRESS, DEV_PHRASE}; use hex_literal::hex; use serde_json; @@ -708,10 +732,14 @@ mod test { fn default_phrase_should_be_used() { assert_eq!( Pair::from_string("//Alice///password", None).unwrap().public(), - Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")).unwrap().public(), + Pair::from_string(&format!("{}//Alice", DEV_PHRASE), Some("password")) + .unwrap() + .public(), ); assert_eq!( - Pair::from_string(&format!("{}/Alice", DEV_PHRASE), None).as_ref().map(Pair::public), + Pair::from_string(&format!("{}/Alice", DEV_PHRASE), None) + .as_ref() + .map(Pair::public), Pair::from_string("/Alice", None).as_ref().map(Pair::public) ); } @@ -857,9 +885,9 @@ mod test { // The values in this test case are compared to the output of `node-test.js` in schnorrkel-js. // // This is to make sure that the wasm library is compatible. - let pk = Pair::from_seed( - &hex!("0000000000000000000000000000000000000000000000000000000000000000") - ); + let pk = Pair::from_seed(&hex!( + "0000000000000000000000000000000000000000000000000000000000000000" + )); let public = pk.public(); let js_signature = Signature::from_raw(hex!( "28a854d54903e056f89581c691c1f7d2ff39f8f896c9e9c22475e60902cc2b3547199e0e91fa32902028f2ca2355e8cdd16cfe19ba5e8b658c94aa80f3b81a00" diff --git a/primitives/core/src/testing.rs b/primitives/core/src/testing.rs index be1a83f170095..865a03714a891 100644 --- a/primitives/core/src/testing.rs +++ b/primitives/core/src/testing.rs @@ -162,7 +162,11 @@ impl crate::traits::SpawnNamed for TaskExecutor { #[cfg(feature = "std")] impl crate::traits::SpawnEssentialNamed for TaskExecutor { - fn spawn_essential_blocking(&self, _: &'static str, future: futures::future::BoxFuture<'static, ()>) { + fn spawn_essential_blocking( + &self, + _: &'static str, + future: futures::future::BoxFuture<'static, ()>, + ) { self.0.spawn_ok(future); } fn spawn_essential(&self, _: &'static str, future: futures::future::BoxFuture<'static, ()>) { diff --git a/primitives/core/src/traits.rs b/primitives/core/src/traits.rs index d6503cb86a05d..dfa61f606cb9e 100644 --- a/primitives/core/src/traits.rs +++ b/primitives/core/src/traits.rs @@ -99,11 +99,7 @@ impl<'a> RuntimeCode<'a> { /// /// This is only useful for tests that don't want to execute any code. pub fn empty() -> Self { - Self { - code_fetcher: &NoneFetchRuntimeCode, - hash: Vec::new(), - heap_pages: None, - } + Self { code_fetcher: &NoneFetchRuntimeCode, hash: Vec::new(), heap_pages: None } } } @@ -225,7 +221,11 @@ pub trait SpawnEssentialNamed: Clone + Send + Sync { /// Spawn the given blocking future. /// /// The given `name` is used to identify the future in tracing. - fn spawn_essential_blocking(&self, name: &'static str, future: futures::future::BoxFuture<'static, ()>); + fn spawn_essential_blocking( + &self, + name: &'static str, + future: futures::future::BoxFuture<'static, ()>, + ); /// Spawn the given non-blocking future. /// /// The given `name` is used to identify the future in tracing. @@ -233,7 +233,11 @@ pub trait SpawnEssentialNamed: Clone + Send + Sync { } impl SpawnEssentialNamed for Box { - fn spawn_essential_blocking(&self, name: &'static str, future: futures::future::BoxFuture<'static, ()>) { + fn spawn_essential_blocking( + &self, + name: &'static str, + future: futures::future::BoxFuture<'static, ()>, + ) { (**self).spawn_essential_blocking(name, future) } diff --git a/primitives/core/src/u32_trait.rs b/primitives/core/src/u32_trait.rs index 07f9bb0032832..37837e7c0548c 100644 --- a/primitives/core/src/u32_trait.rs +++ b/primitives/core/src/u32_trait.rs @@ -24,221 +24,547 @@ pub trait Value { } /// Type representing the value 0 for the `Value` trait. -pub struct _0; impl Value for _0 { const VALUE: u32 = 0; } +pub struct _0; +impl Value for _0 { + const VALUE: u32 = 0; +} /// Type representing the value 1 for the `Value` trait. -pub struct _1; impl Value for _1 { const VALUE: u32 = 1; } +pub struct _1; +impl Value for _1 { + const VALUE: u32 = 1; +} /// Type representing the value 2 for the `Value` trait. -pub struct _2; impl Value for _2 { const VALUE: u32 = 2; } +pub struct _2; +impl Value for _2 { + const VALUE: u32 = 2; +} /// Type representing the value 3 for the `Value` trait. -pub struct _3; impl Value for _3 { const VALUE: u32 = 3; } +pub struct _3; +impl Value for _3 { + const VALUE: u32 = 3; +} /// Type representing the value 4 for the `Value` trait. -pub struct _4; impl Value for _4 { const VALUE: u32 = 4; } +pub struct _4; +impl Value for _4 { + const VALUE: u32 = 4; +} /// Type representing the value 5 for the `Value` trait. -pub struct _5; impl Value for _5 { const VALUE: u32 = 5; } +pub struct _5; +impl Value for _5 { + const VALUE: u32 = 5; +} /// Type representing the value 6 for the `Value` trait. -pub struct _6; impl Value for _6 { const VALUE: u32 = 6; } +pub struct _6; +impl Value for _6 { + const VALUE: u32 = 6; +} /// Type representing the value 7 for the `Value` trait. -pub struct _7; impl Value for _7 { const VALUE: u32 = 7; } +pub struct _7; +impl Value for _7 { + const VALUE: u32 = 7; +} /// Type representing the value 8 for the `Value` trait. -pub struct _8; impl Value for _8 { const VALUE: u32 = 8; } +pub struct _8; +impl Value for _8 { + const VALUE: u32 = 8; +} /// Type representing the value 9 for the `Value` trait. -pub struct _9; impl Value for _9 { const VALUE: u32 = 9; } +pub struct _9; +impl Value for _9 { + const VALUE: u32 = 9; +} /// Type representing the value 10 for the `Value` trait. -pub struct _10; impl Value for _10 { const VALUE: u32 = 10; } +pub struct _10; +impl Value for _10 { + const VALUE: u32 = 10; +} /// Type representing the value 11 for the `Value` trait. -pub struct _11; impl Value for _11 { const VALUE: u32 = 11; } +pub struct _11; +impl Value for _11 { + const VALUE: u32 = 11; +} /// Type representing the value 12 for the `Value` trait. -pub struct _12; impl Value for _12 { const VALUE: u32 = 12; } +pub struct _12; +impl Value for _12 { + const VALUE: u32 = 12; +} /// Type representing the value 13 for the `Value` trait. -pub struct _13; impl Value for _13 { const VALUE: u32 = 13; } +pub struct _13; +impl Value for _13 { + const VALUE: u32 = 13; +} /// Type representing the value 14 for the `Value` trait. -pub struct _14; impl Value for _14 { const VALUE: u32 = 14; } +pub struct _14; +impl Value for _14 { + const VALUE: u32 = 14; +} /// Type representing the value 15 for the `Value` trait. -pub struct _15; impl Value for _15 { const VALUE: u32 = 15; } +pub struct _15; +impl Value for _15 { + const VALUE: u32 = 15; +} /// Type representing the value 16 for the `Value` trait. -pub struct _16; impl Value for _16 { const VALUE: u32 = 16; } +pub struct _16; +impl Value for _16 { + const VALUE: u32 = 16; +} /// Type representing the value 17 for the `Value` trait. -pub struct _17; impl Value for _17 { const VALUE: u32 = 17; } +pub struct _17; +impl Value for _17 { + const VALUE: u32 = 17; +} /// Type representing the value 18 for the `Value` trait. -pub struct _18; impl Value for _18 { const VALUE: u32 = 18; } +pub struct _18; +impl Value for _18 { + const VALUE: u32 = 18; +} /// Type representing the value 19 for the `Value` trait. -pub struct _19; impl Value for _19 { const VALUE: u32 = 19; } +pub struct _19; +impl Value for _19 { + const VALUE: u32 = 19; +} /// Type representing the value 20 for the `Value` trait. -pub struct _20; impl Value for _20 { const VALUE: u32 = 20; } +pub struct _20; +impl Value for _20 { + const VALUE: u32 = 20; +} /// Type representing the value 21 for the `Value` trait. -pub struct _21; impl Value for _21 { const VALUE: u32 = 21; } +pub struct _21; +impl Value for _21 { + const VALUE: u32 = 21; +} /// Type representing the value 22 for the `Value` trait. -pub struct _22; impl Value for _22 { const VALUE: u32 = 22; } +pub struct _22; +impl Value for _22 { + const VALUE: u32 = 22; +} /// Type representing the value 23 for the `Value` trait. -pub struct _23; impl Value for _23 { const VALUE: u32 = 23; } +pub struct _23; +impl Value for _23 { + const VALUE: u32 = 23; +} /// Type representing the value 24 for the `Value` trait. -pub struct _24; impl Value for _24 { const VALUE: u32 = 24; } +pub struct _24; +impl Value for _24 { + const VALUE: u32 = 24; +} /// Type representing the value 25 for the `Value` trait. -pub struct _25; impl Value for _25 { const VALUE: u32 = 25; } +pub struct _25; +impl Value for _25 { + const VALUE: u32 = 25; +} /// Type representing the value 26 for the `Value` trait. -pub struct _26; impl Value for _26 { const VALUE: u32 = 26; } +pub struct _26; +impl Value for _26 { + const VALUE: u32 = 26; +} /// Type representing the value 27 for the `Value` trait. -pub struct _27; impl Value for _27 { const VALUE: u32 = 27; } +pub struct _27; +impl Value for _27 { + const VALUE: u32 = 27; +} /// Type representing the value 28 for the `Value` trait. -pub struct _28; impl Value for _28 { const VALUE: u32 = 28; } +pub struct _28; +impl Value for _28 { + const VALUE: u32 = 28; +} /// Type representing the value 29 for the `Value` trait. -pub struct _29; impl Value for _29 { const VALUE: u32 = 29; } +pub struct _29; +impl Value for _29 { + const VALUE: u32 = 29; +} /// Type representing the value 30 for the `Value` trait. -pub struct _30; impl Value for _30 { const VALUE: u32 = 30; } +pub struct _30; +impl Value for _30 { + const VALUE: u32 = 30; +} /// Type representing the value 31 for the `Value` trait. -pub struct _31; impl Value for _31 { const VALUE: u32 = 31; } +pub struct _31; +impl Value for _31 { + const VALUE: u32 = 31; +} /// Type representing the value 32 for the `Value` trait. -pub struct _32; impl Value for _32 { const VALUE: u32 = 32; } +pub struct _32; +impl Value for _32 { + const VALUE: u32 = 32; +} /// Type representing the value 33 for the `Value` trait. -pub struct _33; impl Value for _33 { const VALUE: u32 = 33; } +pub struct _33; +impl Value for _33 { + const VALUE: u32 = 33; +} /// Type representing the value 34 for the `Value` trait. -pub struct _34; impl Value for _34 { const VALUE: u32 = 34; } +pub struct _34; +impl Value for _34 { + const VALUE: u32 = 34; +} /// Type representing the value 35 for the `Value` trait. -pub struct _35; impl Value for _35 { const VALUE: u32 = 35; } +pub struct _35; +impl Value for _35 { + const VALUE: u32 = 35; +} /// Type representing the value 36 for the `Value` trait. -pub struct _36; impl Value for _36 { const VALUE: u32 = 36; } +pub struct _36; +impl Value for _36 { + const VALUE: u32 = 36; +} /// Type representing the value 37 for the `Value` trait. -pub struct _37; impl Value for _37 { const VALUE: u32 = 37; } +pub struct _37; +impl Value for _37 { + const VALUE: u32 = 37; +} /// Type representing the value 38 for the `Value` trait. -pub struct _38; impl Value for _38 { const VALUE: u32 = 38; } +pub struct _38; +impl Value for _38 { + const VALUE: u32 = 38; +} /// Type representing the value 39 for the `Value` trait. -pub struct _39; impl Value for _39 { const VALUE: u32 = 39; } +pub struct _39; +impl Value for _39 { + const VALUE: u32 = 39; +} /// Type representing the value 40 for the `Value` trait. -pub struct _40; impl Value for _40 { const VALUE: u32 = 40; } +pub struct _40; +impl Value for _40 { + const VALUE: u32 = 40; +} /// Type representing the value 41 for the `Value` trait. -pub struct _41; impl Value for _41 { const VALUE: u32 = 41; } +pub struct _41; +impl Value for _41 { + const VALUE: u32 = 41; +} /// Type representing the value 42 for the `Value` trait. -pub struct _42; impl Value for _42 { const VALUE: u32 = 42; } +pub struct _42; +impl Value for _42 { + const VALUE: u32 = 42; +} /// Type representing the value 43 for the `Value` trait. -pub struct _43; impl Value for _43 { const VALUE: u32 = 43; } +pub struct _43; +impl Value for _43 { + const VALUE: u32 = 43; +} /// Type representing the value 44 for the `Value` trait. -pub struct _44; impl Value for _44 { const VALUE: u32 = 44; } +pub struct _44; +impl Value for _44 { + const VALUE: u32 = 44; +} /// Type representing the value 45 for the `Value` trait. -pub struct _45; impl Value for _45 { const VALUE: u32 = 45; } +pub struct _45; +impl Value for _45 { + const VALUE: u32 = 45; +} /// Type representing the value 46 for the `Value` trait. -pub struct _46; impl Value for _46 { const VALUE: u32 = 46; } +pub struct _46; +impl Value for _46 { + const VALUE: u32 = 46; +} /// Type representing the value 47 for the `Value` trait. -pub struct _47; impl Value for _47 { const VALUE: u32 = 47; } +pub struct _47; +impl Value for _47 { + const VALUE: u32 = 47; +} /// Type representing the value 48 for the `Value` trait. -pub struct _48; impl Value for _48 { const VALUE: u32 = 48; } +pub struct _48; +impl Value for _48 { + const VALUE: u32 = 48; +} /// Type representing the value 49 for the `Value` trait. -pub struct _49; impl Value for _49 { const VALUE: u32 = 49; } +pub struct _49; +impl Value for _49 { + const VALUE: u32 = 49; +} /// Type representing the value 50 for the `Value` trait. -pub struct _50; impl Value for _50 { const VALUE: u32 = 50; } +pub struct _50; +impl Value for _50 { + const VALUE: u32 = 50; +} /// Type representing the value 51 for the `Value` trait. -pub struct _51; impl Value for _51 { const VALUE: u32 = 51; } +pub struct _51; +impl Value for _51 { + const VALUE: u32 = 51; +} /// Type representing the value 52 for the `Value` trait. -pub struct _52; impl Value for _52 { const VALUE: u32 = 52; } +pub struct _52; +impl Value for _52 { + const VALUE: u32 = 52; +} /// Type representing the value 53 for the `Value` trait. -pub struct _53; impl Value for _53 { const VALUE: u32 = 53; } +pub struct _53; +impl Value for _53 { + const VALUE: u32 = 53; +} /// Type representing the value 54 for the `Value` trait. -pub struct _54; impl Value for _54 { const VALUE: u32 = 54; } +pub struct _54; +impl Value for _54 { + const VALUE: u32 = 54; +} /// Type representing the value 55 for the `Value` trait. -pub struct _55; impl Value for _55 { const VALUE: u32 = 55; } +pub struct _55; +impl Value for _55 { + const VALUE: u32 = 55; +} /// Type representing the value 56 for the `Value` trait. -pub struct _56; impl Value for _56 { const VALUE: u32 = 56; } +pub struct _56; +impl Value for _56 { + const VALUE: u32 = 56; +} /// Type representing the value 57 for the `Value` trait. -pub struct _57; impl Value for _57 { const VALUE: u32 = 57; } +pub struct _57; +impl Value for _57 { + const VALUE: u32 = 57; +} /// Type representing the value 58 for the `Value` trait. -pub struct _58; impl Value for _58 { const VALUE: u32 = 58; } +pub struct _58; +impl Value for _58 { + const VALUE: u32 = 58; +} /// Type representing the value 59 for the `Value` trait. -pub struct _59; impl Value for _59 { const VALUE: u32 = 59; } +pub struct _59; +impl Value for _59 { + const VALUE: u32 = 59; +} /// Type representing the value 60 for the `Value` trait. -pub struct _60; impl Value for _60 { const VALUE: u32 = 60; } +pub struct _60; +impl Value for _60 { + const VALUE: u32 = 60; +} /// Type representing the value 61 for the `Value` trait. -pub struct _61; impl Value for _61 { const VALUE: u32 = 61; } +pub struct _61; +impl Value for _61 { + const VALUE: u32 = 61; +} /// Type representing the value 62 for the `Value` trait. -pub struct _62; impl Value for _62 { const VALUE: u32 = 62; } +pub struct _62; +impl Value for _62 { + const VALUE: u32 = 62; +} /// Type representing the value 63 for the `Value` trait. -pub struct _63; impl Value for _63 { const VALUE: u32 = 63; } +pub struct _63; +impl Value for _63 { + const VALUE: u32 = 63; +} /// Type representing the value 64 for the `Value` trait. -pub struct _64; impl Value for _64 { const VALUE: u32 = 64; } +pub struct _64; +impl Value for _64 { + const VALUE: u32 = 64; +} /// Type representing the value 65 for the `Value` trait. -pub struct _65; impl Value for _65 { const VALUE: u32 = 65; } +pub struct _65; +impl Value for _65 { + const VALUE: u32 = 65; +} /// Type representing the value 66 for the `Value` trait. -pub struct _66; impl Value for _66 { const VALUE: u32 = 66; } +pub struct _66; +impl Value for _66 { + const VALUE: u32 = 66; +} /// Type representing the value 67 for the `Value` trait. -pub struct _67; impl Value for _67 { const VALUE: u32 = 67; } +pub struct _67; +impl Value for _67 { + const VALUE: u32 = 67; +} /// Type representing the value 68 for the `Value` trait. -pub struct _68; impl Value for _68 { const VALUE: u32 = 68; } +pub struct _68; +impl Value for _68 { + const VALUE: u32 = 68; +} /// Type representing the value 69 for the `Value` trait. -pub struct _69; impl Value for _69 { const VALUE: u32 = 69; } +pub struct _69; +impl Value for _69 { + const VALUE: u32 = 69; +} /// Type representing the value 70 for the `Value` trait. -pub struct _70; impl Value for _70 { const VALUE: u32 = 70; } +pub struct _70; +impl Value for _70 { + const VALUE: u32 = 70; +} /// Type representing the value 71 for the `Value` trait. -pub struct _71; impl Value for _71 { const VALUE: u32 = 71; } +pub struct _71; +impl Value for _71 { + const VALUE: u32 = 71; +} /// Type representing the value 72 for the `Value` trait. -pub struct _72; impl Value for _72 { const VALUE: u32 = 72; } +pub struct _72; +impl Value for _72 { + const VALUE: u32 = 72; +} /// Type representing the value 73 for the `Value` trait. -pub struct _73; impl Value for _73 { const VALUE: u32 = 73; } +pub struct _73; +impl Value for _73 { + const VALUE: u32 = 73; +} /// Type representing the value 74 for the `Value` trait. -pub struct _74; impl Value for _74 { const VALUE: u32 = 74; } +pub struct _74; +impl Value for _74 { + const VALUE: u32 = 74; +} /// Type representing the value 75 for the `Value` trait. -pub struct _75; impl Value for _75 { const VALUE: u32 = 75; } +pub struct _75; +impl Value for _75 { + const VALUE: u32 = 75; +} /// Type representing the value 76 for the `Value` trait. -pub struct _76; impl Value for _76 { const VALUE: u32 = 76; } +pub struct _76; +impl Value for _76 { + const VALUE: u32 = 76; +} /// Type representing the value 77 for the `Value` trait. -pub struct _77; impl Value for _77 { const VALUE: u32 = 77; } +pub struct _77; +impl Value for _77 { + const VALUE: u32 = 77; +} /// Type representing the value 78 for the `Value` trait. -pub struct _78; impl Value for _78 { const VALUE: u32 = 78; } +pub struct _78; +impl Value for _78 { + const VALUE: u32 = 78; +} /// Type representing the value 79 for the `Value` trait. -pub struct _79; impl Value for _79 { const VALUE: u32 = 79; } +pub struct _79; +impl Value for _79 { + const VALUE: u32 = 79; +} /// Type representing the value 80 for the `Value` trait. -pub struct _80; impl Value for _80 { const VALUE: u32 = 80; } +pub struct _80; +impl Value for _80 { + const VALUE: u32 = 80; +} /// Type representing the value 81 for the `Value` trait. -pub struct _81; impl Value for _81 { const VALUE: u32 = 81; } +pub struct _81; +impl Value for _81 { + const VALUE: u32 = 81; +} /// Type representing the value 82 for the `Value` trait. -pub struct _82; impl Value for _82 { const VALUE: u32 = 82; } +pub struct _82; +impl Value for _82 { + const VALUE: u32 = 82; +} /// Type representing the value 83 for the `Value` trait. -pub struct _83; impl Value for _83 { const VALUE: u32 = 83; } +pub struct _83; +impl Value for _83 { + const VALUE: u32 = 83; +} /// Type representing the value 84 for the `Value` trait. -pub struct _84; impl Value for _84 { const VALUE: u32 = 84; } +pub struct _84; +impl Value for _84 { + const VALUE: u32 = 84; +} /// Type representing the value 85 for the `Value` trait. -pub struct _85; impl Value for _85 { const VALUE: u32 = 85; } +pub struct _85; +impl Value for _85 { + const VALUE: u32 = 85; +} /// Type representing the value 86 for the `Value` trait. -pub struct _86; impl Value for _86 { const VALUE: u32 = 86; } +pub struct _86; +impl Value for _86 { + const VALUE: u32 = 86; +} /// Type representing the value 87 for the `Value` trait. -pub struct _87; impl Value for _87 { const VALUE: u32 = 87; } +pub struct _87; +impl Value for _87 { + const VALUE: u32 = 87; +} /// Type representing the value 88 for the `Value` trait. -pub struct _88; impl Value for _88 { const VALUE: u32 = 88; } +pub struct _88; +impl Value for _88 { + const VALUE: u32 = 88; +} /// Type representing the value 89 for the `Value` trait. -pub struct _89; impl Value for _89 { const VALUE: u32 = 89; } +pub struct _89; +impl Value for _89 { + const VALUE: u32 = 89; +} /// Type representing the value 90 for the `Value` trait. -pub struct _90; impl Value for _90 { const VALUE: u32 = 90; } +pub struct _90; +impl Value for _90 { + const VALUE: u32 = 90; +} /// Type representing the value 91 for the `Value` trait. -pub struct _91; impl Value for _91 { const VALUE: u32 = 91; } +pub struct _91; +impl Value for _91 { + const VALUE: u32 = 91; +} /// Type representing the value 92 for the `Value` trait. -pub struct _92; impl Value for _92 { const VALUE: u32 = 92; } +pub struct _92; +impl Value for _92 { + const VALUE: u32 = 92; +} /// Type representing the value 93 for the `Value` trait. -pub struct _93; impl Value for _93 { const VALUE: u32 = 93; } +pub struct _93; +impl Value for _93 { + const VALUE: u32 = 93; +} /// Type representing the value 94 for the `Value` trait. -pub struct _94; impl Value for _94 { const VALUE: u32 = 94; } +pub struct _94; +impl Value for _94 { + const VALUE: u32 = 94; +} /// Type representing the value 95 for the `Value` trait. -pub struct _95; impl Value for _95 { const VALUE: u32 = 95; } +pub struct _95; +impl Value for _95 { + const VALUE: u32 = 95; +} /// Type representing the value 96 for the `Value` trait. -pub struct _96; impl Value for _96 { const VALUE: u32 = 96; } +pub struct _96; +impl Value for _96 { + const VALUE: u32 = 96; +} /// Type representing the value 97 for the `Value` trait. -pub struct _97; impl Value for _97 { const VALUE: u32 = 97; } +pub struct _97; +impl Value for _97 { + const VALUE: u32 = 97; +} /// Type representing the value 98 for the `Value` trait. -pub struct _98; impl Value for _98 { const VALUE: u32 = 98; } +pub struct _98; +impl Value for _98 { + const VALUE: u32 = 98; +} /// Type representing the value 99 for the `Value` trait. -pub struct _99; impl Value for _99 { const VALUE: u32 = 99; } +pub struct _99; +impl Value for _99 { + const VALUE: u32 = 99; +} /// Type representing the value 100 for the `Value` trait. -pub struct _100; impl Value for _100 { const VALUE: u32 = 100; } +pub struct _100; +impl Value for _100 { + const VALUE: u32 = 100; +} /// Type representing the value 112 for the `Value` trait. -pub struct _112; impl Value for _112 { const VALUE: u32 = 112; } +pub struct _112; +impl Value for _112 { + const VALUE: u32 = 112; +} /// Type representing the value 128 for the `Value` trait. -pub struct _128; impl Value for _128 { const VALUE: u32 = 128; } +pub struct _128; +impl Value for _128 { + const VALUE: u32 = 128; +} /// Type representing the value 160 for the `Value` trait. -pub struct _160; impl Value for _160 { const VALUE: u32 = 160; } +pub struct _160; +impl Value for _160 { + const VALUE: u32 = 160; +} /// Type representing the value 192 for the `Value` trait. -pub struct _192; impl Value for _192 { const VALUE: u32 = 192; } +pub struct _192; +impl Value for _192 { + const VALUE: u32 = 192; +} /// Type representing the value 224 for the `Value` trait. -pub struct _224; impl Value for _224 { const VALUE: u32 = 224; } +pub struct _224; +impl Value for _224 { + const VALUE: u32 = 224; +} /// Type representing the value 256 for the `Value` trait. -pub struct _256; impl Value for _256 { const VALUE: u32 = 256; } +pub struct _256; +impl Value for _256 { + const VALUE: u32 = 256; +} /// Type representing the value 384 for the `Value` trait. -pub struct _384; impl Value for _384 { const VALUE: u32 = 384; } +pub struct _384; +impl Value for _384 { + const VALUE: u32 = 384; +} /// Type representing the value 512 for the `Value` trait. -pub struct _512; impl Value for _512 { const VALUE: u32 = 512; } - +pub struct _512; +impl Value for _512 { + const VALUE: u32 = 512; +} diff --git a/primitives/core/src/uint.rs b/primitives/core/src/uint.rs index ff45ad6ecf0d5..a74980332ad28 100644 --- a/primitives/core/src/uint.rs +++ b/primitives/core/src/uint.rs @@ -22,7 +22,7 @@ pub use primitive_types::{U256, U512}; #[cfg(test)] mod tests { use super::*; - use codec::{Encode, Decode}; + use codec::{Decode, Encode}; use sp_serializer as ser; macro_rules! test { @@ -55,34 +55,27 @@ mod tests { assert!(ser::from_str::<$name>("\"10\"").unwrap_err().is_data()); assert!(ser::from_str::<$name>("\"0\"").unwrap_err().is_data()); } - } + }; } test!(U256, test_u256); #[test] fn test_u256_codec() { - let res1 = vec![120, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0]; - let res2 = vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]; + let res1 = vec![ + 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, + ]; + let res2 = vec![ + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, + ]; - assert_eq!( - U256::from(120).encode(), - res1); - assert_eq!( - U256::max_value().encode(), - res2); - assert_eq!( - U256::decode(&mut &res1[..]), - Ok(U256::from(120))); - assert_eq!( - U256::decode(&mut &res2[..]), - Ok(U256::max_value())); + assert_eq!(U256::from(120).encode(), res1); + assert_eq!(U256::max_value().encode(), res2); + assert_eq!(U256::decode(&mut &res1[..]), Ok(U256::from(120))); + assert_eq!(U256::decode(&mut &res2[..]), Ok(U256::max_value())); } #[test] @@ -91,10 +84,10 @@ mod tests { ser::to_string_pretty(&!U256::zero()), "\"0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\"" ); - assert!( - ser::from_str::("\"0x1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\"") - .unwrap_err() - .is_data() - ); + assert!(ser::from_str::( + "\"0x1ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff\"" + ) + .unwrap_err() + .is_data()); } } diff --git a/primitives/database/src/kvdb.rs b/primitives/database/src/kvdb.rs index d99fe6360ef7b..1a2b0513dc28a 100644 --- a/primitives/database/src/kvdb.rs +++ b/primitives/database/src/kvdb.rs @@ -16,30 +16,31 @@ // limitations under the License. /// A wrapper around `kvdb::Database` that implements `sp_database::Database` trait - use ::kvdb::{DBTransaction, KeyValueDB}; -use crate::{Database, Change, ColumnId, Transaction, error}; +use crate::{error, Change, ColumnId, Database, Transaction}; struct DbAdapter(D); fn handle_err(result: std::io::Result) -> T { match result { Ok(r) => r, - Err(e) => { + Err(e) => { panic!("Critical database error: {:?}", e); - } + }, } } /// Wrap RocksDb database into a trait object that implements `sp_database::Database` pub fn as_database(db: D) -> std::sync::Arc> - where D: KeyValueDB + 'static, H: Clone + AsRef<[u8]> +where + D: KeyValueDB + 'static, + H: Clone + AsRef<[u8]>, { std::sync::Arc::new(DbAdapter(db)) } -impl DbAdapter { +impl DbAdapter { // Returns counter key and counter value if it exists. fn read_counter(&self, col: ColumnId, key: &[u8]) -> error::Result<(Vec, Option)> { // Add a key suffix for the counter @@ -49,16 +50,16 @@ impl DbAdapter { Some(data) => { let mut counter_data = [0; 4]; if data.len() != 4 { - return Err(error::DatabaseError(Box::new( - std::io::Error::new(std::io::ErrorKind::Other, - format!("Unexpected counter len {}", data.len()))) - )) + return Err(error::DatabaseError(Box::new(std::io::Error::new( + std::io::ErrorKind::Other, + format!("Unexpected counter len {}", data.len()), + )))) } counter_data.copy_from_slice(&data); let counter = u32::from_le_bytes(counter_data); (counter_key, Some(counter)) }, - None => (counter_key, None) + None => (counter_key, None), }) } } @@ -70,27 +71,29 @@ impl> Database for DbAdapter { match change { Change::Set(col, key, value) => tx.put_vec(col, &key, value), Change::Remove(col, key) => tx.delete(col, &key), - Change::Store(col, key, value) => { - match self.read_counter(col, key.as_ref())? { - (counter_key, Some(mut counter)) => { - counter += 1; - tx.put(col, &counter_key, &counter.to_le_bytes()); - }, - (counter_key, None) => { - let d = 1u32.to_le_bytes(); - tx.put(col, &counter_key, &d); - tx.put_vec(col, key.as_ref(), value); - }, - } - } + Change::Store(col, key, value) => match self.read_counter(col, key.as_ref())? { + (counter_key, Some(mut counter)) => { + counter += 1; + tx.put(col, &counter_key, &counter.to_le_bytes()); + }, + (counter_key, None) => { + let d = 1u32.to_le_bytes(); + tx.put(col, &counter_key, &d); + tx.put_vec(col, key.as_ref(), value); + }, + }, Change::Reference(col, key) => { - if let (counter_key, Some(mut counter)) = self.read_counter(col, key.as_ref())? { + if let (counter_key, Some(mut counter)) = + self.read_counter(col, key.as_ref())? + { counter += 1; tx.put(col, &counter_key, &counter.to_le_bytes()); } - } + }, Change::Release(col, key) => { - if let (counter_key, Some(mut counter)) = self.read_counter(col, key.as_ref())? { + if let (counter_key, Some(mut counter)) = + self.read_counter(col, key.as_ref())? + { counter -= 1; if counter == 0 { tx.delete(col, &counter_key); @@ -99,7 +102,7 @@ impl> Database for DbAdapter { tx.put(col, &counter_key, &counter.to_le_bytes()); } } - } + }, } } self.0.write(tx).map_err(|e| error::DatabaseError(Box::new(e))) diff --git a/primitives/database/src/lib.rs b/primitives/database/src/lib.rs index 1fa0c8e49b015..ed5d93ed5b9c6 100644 --- a/primitives/database/src/lib.rs +++ b/primitives/database/src/lib.rs @@ -18,11 +18,11 @@ //! The main database trait, allowing Substrate to store data persistently. pub mod error; -mod mem; mod kvdb; +mod mem; -pub use mem::MemDb; pub use crate::kvdb::as_database; +pub use mem::MemDb; /// An identifier for a column. pub type ColumnId = u32; @@ -118,10 +118,13 @@ impl std::fmt::Debug for dyn Database { pub fn with_get>( db: &dyn Database, col: ColumnId, - key: &[u8], mut f: impl FnMut(&[u8]) -> R + key: &[u8], + mut f: impl FnMut(&[u8]) -> R, ) -> Option { let mut result: Option = None; - let mut adapter = |k: &_| { result = Some(f(k)); }; + let mut adapter = |k: &_| { + result = Some(f(k)); + }; db.with_get(col, key, &mut adapter); result } diff --git a/primitives/database/src/mem.rs b/primitives/database/src/mem.rs index 24ddf03319711..d1b1861e98fdd 100644 --- a/primitives/database/src/mem.rs +++ b/primitives/database/src/mem.rs @@ -17,41 +17,52 @@ //! In-memory implementation of `Database` -use std::collections::{HashMap, hash_map::Entry}; -use crate::{Database, Change, ColumnId, Transaction, error}; +use crate::{error, Change, ColumnId, Database, Transaction}; use parking_lot::RwLock; +use std::collections::{hash_map::Entry, HashMap}; #[derive(Default)] /// This implements `Database` as an in-memory hash map. `commit` is not atomic. pub struct MemDb(RwLock, (u32, Vec)>>>); impl Database for MemDb - where H: Clone + AsRef<[u8]> +where + H: Clone + AsRef<[u8]>, { fn commit(&self, transaction: Transaction) -> error::Result<()> { let mut s = self.0.write(); for change in transaction.0.into_iter() { match change { - Change::Set(col, key, value) => { s.entry(col).or_default().insert(key, (1, value)); }, - Change::Remove(col, key) => { s.entry(col).or_default().remove(&key); }, + Change::Set(col, key, value) => { + s.entry(col).or_default().insert(key, (1, value)); + }, + Change::Remove(col, key) => { + s.entry(col).or_default().remove(&key); + }, Change::Store(col, hash, value) => { - s.entry(col).or_default().entry(hash.as_ref().to_vec()) + s.entry(col) + .or_default() + .entry(hash.as_ref().to_vec()) .and_modify(|(c, _)| *c += 1) .or_insert_with(|| (1, value)); }, Change::Reference(col, hash) => { - if let Entry::Occupied(mut entry) = s.entry(col).or_default().entry(hash.as_ref().to_vec()) { + if let Entry::Occupied(mut entry) = + s.entry(col).or_default().entry(hash.as_ref().to_vec()) + { entry.get_mut().0 += 1; } - } + }, Change::Release(col, hash) => { - if let Entry::Occupied(mut entry) = s.entry(col).or_default().entry(hash.as_ref().to_vec()) { + if let Entry::Occupied(mut entry) = + s.entry(col).or_default().entry(hash.as_ref().to_vec()) + { entry.get_mut().0 -= 1; if entry.get().0 == 0 { entry.remove(); } } - } + }, } } @@ -76,4 +87,3 @@ impl MemDb { s.get(&col).map(|c| c.len()).unwrap_or(0) } } - diff --git a/primitives/debug-derive/src/impls.rs b/primitives/debug-derive/src/impls.rs index 898e4eef5d06b..4d79ee9880160 100644 --- a/primitives/debug-derive/src/impls.rs +++ b/primitives/debug-derive/src/impls.rs @@ -15,9 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -use quote::quote; use proc_macro2::TokenStream; -use syn::{Data, DeriveInput, parse_quote}; +use quote::quote; +use syn::{parse_quote, Data, DeriveInput}; pub fn debug_derive(ast: DeriveInput) -> proc_macro::TokenStream { let name_str = ast.ident.to_string(); @@ -28,11 +28,11 @@ pub fn debug_derive(ast: DeriveInput) -> proc_macro::TokenStream { let wh = generics.make_where_clause(); for t in ast.generics.type_params() { let name = &t.ident; - wh.predicates.push(parse_quote!{ #name : core::fmt::Debug }); + wh.predicates.push(parse_quote! { #name : core::fmt::Debug }); } generics.split_for_impl() }; - let gen = quote!{ + let gen = quote! { impl #impl_generics core::fmt::Debug for #name #ty_generics #where_clause { fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result { #implementation @@ -62,32 +62,26 @@ mod implementation { mod implementation { use super::*; use proc_macro2::Span; - use syn::{Ident, Index, token::SelfValue}; + use syn::{token::SelfValue, Ident, Index}; /// Derive the inner implementation of `Debug::fmt` function. pub fn derive(name_str: &str, data: &Data) -> TokenStream { match *data { Data::Struct(ref s) => derive_struct(&name_str, &s.fields), - Data::Union(ref u) => derive_fields(&name_str, Fields::new(u.fields.named.iter(), None)), + Data::Union(ref u) => + derive_fields(&name_str, Fields::new(u.fields.named.iter(), None)), Data::Enum(ref e) => derive_enum(&name_str, &e), } } enum Fields { - Indexed { - indices: Vec, - }, - Unnamed { - vars: Vec, - }, - Named { - names: Vec, - this: Option, - }, + Indexed { indices: Vec }, + Unnamed { vars: Vec }, + Named { names: Vec, this: Option }, } impl Fields { - fn new<'a>(fields: impl Iterator, this: Option) -> Self { + fn new<'a>(fields: impl Iterator, this: Option) -> Self { let mut indices = vec![]; let mut names = vec![]; @@ -100,27 +94,17 @@ mod implementation { } if names.is_empty() { - Self::Indexed { - indices, - } + Self::Indexed { indices } } else { - Self::Named { - names, - this, - } + Self::Named { names, this } } } } - fn derive_fields<'a>( - name_str: &str, - fields: Fields, - ) -> TokenStream { + fn derive_fields<'a>(name_str: &str, fields: Fields) -> TokenStream { match fields { Fields::Named { names, this } => { - let names_str: Vec<_> = names.iter() - .map(|x| x.to_string()) - .collect(); + let names_str: Vec<_> = names.iter().map(|x| x.to_string()).collect(); let fields = match this { None => quote! { #( .field(#names_str, #names) )* }, @@ -132,16 +116,15 @@ mod implementation { #fields .finish() } - }, - Fields::Indexed { indices } => { + Fields::Indexed { indices } => { quote! { fmt.debug_tuple(#name_str) #( .field(&self.#indices) )* .finish() } }, - Fields::Unnamed { vars } => { + Fields::Unnamed { vars } => { quote! { fmt.debug_tuple(#name_str) #( .field(#vars) )* @@ -151,38 +134,33 @@ mod implementation { } } - fn derive_enum( - name: &str, - e: &syn::DataEnum, - ) -> TokenStream { - let v = e.variants - .iter() - .map(|v| { - let name = format!("{}::{}", name, v.ident); - let ident = &v.ident; - match v.fields { - syn::Fields::Named(ref f) => { - let names: Vec<_> = f.named.iter().flat_map(|f| f.ident.clone()).collect(); - let fields_impl = derive_fields(&name, Fields::Named { - names: names.clone(), - this: None, - }); - (ident, (quote!{ { #( ref #names ),* } }, fields_impl)) - }, - syn::Fields::Unnamed(ref f) => { - let names = f.unnamed.iter() - .enumerate() - .map(|(id, _)| Ident::new(&format!("a{}", id), Span::call_site())) - .collect::>(); - let fields_impl = derive_fields(&name, Fields::Unnamed { vars: names.clone() }); - (ident, (quote! { ( #( ref #names ),* ) }, fields_impl)) - }, - syn::Fields::Unit => { - let fields_impl = derive_fields(&name, Fields::Indexed { indices: vec![] }); - (ident, (quote! { }, fields_impl)) - }, - } - }); + fn derive_enum(name: &str, e: &syn::DataEnum) -> TokenStream { + let v = e.variants.iter().map(|v| { + let name = format!("{}::{}", name, v.ident); + let ident = &v.ident; + match v.fields { + syn::Fields::Named(ref f) => { + let names: Vec<_> = f.named.iter().flat_map(|f| f.ident.clone()).collect(); + let fields_impl = + derive_fields(&name, Fields::Named { names: names.clone(), this: None }); + (ident, (quote! { { #( ref #names ),* } }, fields_impl)) + }, + syn::Fields::Unnamed(ref f) => { + let names = f + .unnamed + .iter() + .enumerate() + .map(|(id, _)| Ident::new(&format!("a{}", id), Span::call_site())) + .collect::>(); + let fields_impl = derive_fields(&name, Fields::Unnamed { vars: names.clone() }); + (ident, (quote! { ( #( ref #names ),* ) }, fields_impl)) + }, + syn::Fields::Unit => { + let fields_impl = derive_fields(&name, Fields::Indexed { indices: vec![] }); + (ident, (quote! {}, fields_impl)) + }, + } + }); type Vecs = (Vec, Vec); let (variants, others): Vecs<_, _> = v.unzip(); @@ -196,23 +174,15 @@ mod implementation { } } - fn derive_struct( - name_str: &str, - fields: &syn::Fields, - ) -> TokenStream { + fn derive_struct(name_str: &str, fields: &syn::Fields) -> TokenStream { match *fields { syn::Fields::Named(ref f) => derive_fields( name_str, Fields::new(f.named.iter(), Some(syn::Token!(self)(Span::call_site()))), ), - syn::Fields::Unnamed(ref f) => derive_fields( - name_str, - Fields::new(f.unnamed.iter(), None), - ), - syn::Fields::Unit => derive_fields( - name_str, - Fields::Indexed { indices: vec![] }, - ), + syn::Fields::Unnamed(ref f) => + derive_fields(name_str, Fields::new(f.unnamed.iter(), None)), + syn::Fields::Unit => derive_fields(name_str, Fields::Indexed { indices: vec![] }), } } } diff --git a/primitives/debug-derive/src/lib.rs b/primitives/debug-derive/src/lib.rs index ebfbd614d9c8d..7eaa3a0020e93 100644 --- a/primitives/debug-derive/src/lib.rs +++ b/primitives/debug-derive/src/lib.rs @@ -38,6 +38,5 @@ use proc_macro::TokenStream; #[proc_macro_derive(RuntimeDebug)] pub fn debug_derive(input: TokenStream) -> TokenStream { - impls::debug_derive(syn::parse_macro_input!(input)) + impls::debug_derive(syn::parse_macro_input!(input)) } - diff --git a/primitives/debug-derive/tests/tests.rs b/primitives/debug-derive/tests/tests.rs index d51d6a05bf21c..4f4c7f4caabc2 100644 --- a/primitives/debug-derive/tests/tests.rs +++ b/primitives/debug-derive/tests/tests.rs @@ -30,33 +30,17 @@ struct Named { enum EnumLongName { A, B(A, String), - VariantLongName { - a: A, - b: String, - }, + VariantLongName { a: A, b: String }, } - #[test] fn should_display_proper_debug() { use self::EnumLongName as Enum; - assert_eq!( - format!("{:?}", Unnamed(1, "abc".into())), - "Unnamed(1, \"abc\")" - ); - assert_eq!( - format!("{:?}", Named { a: 1, b: "abc".into() }), - "Named { a: 1, b: \"abc\" }" - ); - assert_eq!( - format!("{:?}", Enum::::A), - "EnumLongName::A" - ); - assert_eq!( - format!("{:?}", Enum::B(1, "abc".into())), - "EnumLongName::B(1, \"abc\")" - ); + assert_eq!(format!("{:?}", Unnamed(1, "abc".into())), "Unnamed(1, \"abc\")"); + assert_eq!(format!("{:?}", Named { a: 1, b: "abc".into() }), "Named { a: 1, b: \"abc\" }"); + assert_eq!(format!("{:?}", Enum::::A), "EnumLongName::A"); + assert_eq!(format!("{:?}", Enum::B(1, "abc".into())), "EnumLongName::B(1, \"abc\")"); assert_eq!( format!("{:?}", Enum::VariantLongName { a: 1, b: "abc".into() }), "EnumLongName::VariantLongName { a: 1, b: \"abc\" }" diff --git a/primitives/externalities/src/extensions.rs b/primitives/externalities/src/extensions.rs index 69c6c09be4487..55b69fde08903 100644 --- a/primitives/externalities/src/extensions.rs +++ b/primitives/externalities/src/extensions.rs @@ -22,10 +22,13 @@ //! //! It is required that each extension implements the [`Extension`] trait. +use crate::Error; use sp_std::{ - collections::btree_map::{BTreeMap, Entry}, any::{Any, TypeId}, ops::DerefMut, boxed::Box, + any::{Any, TypeId}, + boxed::Box, + collections::btree_map::{BTreeMap, Entry}, + ops::DerefMut, }; -use crate::Error; /// Marker trait for types that should be registered as [`Externalities`](crate::Externalities) extension. /// @@ -101,7 +104,11 @@ pub trait ExtensionStore { /// Register extension `extension` with specified `type_id`. /// /// It should return error if extension is already registered. - fn register_extension_with_type_id(&mut self, type_id: TypeId, extension: Box) -> Result<(), Error>; + fn register_extension_with_type_id( + &mut self, + type_id: TypeId, + extension: Box, + ) -> Result<(), Error>; /// Deregister extension with speicifed 'type_id' and drop it. /// @@ -129,10 +136,7 @@ impl Extensions { } /// Register the given extension. - pub fn register( - &mut self, - ext: E, - ) { + pub fn register(&mut self, ext: E) { let type_id = ext.type_id(); self.extensions.insert(type_id, Box::new(ext)); } @@ -154,7 +158,10 @@ impl Extensions { /// Return a mutable reference to the requested extension. pub fn get_mut(&mut self, ext_type_id: TypeId) -> Option<&mut dyn Any> { - self.extensions.get_mut(&ext_type_id).map(DerefMut::deref_mut).map(Extension::as_mut_any) + self.extensions + .get_mut(&ext_type_id) + .map(DerefMut::deref_mut) + .map(Extension::as_mut_any) } /// Deregister extension for the given `type_id`. @@ -165,7 +172,9 @@ impl Extensions { } /// Returns a mutable iterator over all extensions. - pub fn iter_mut<'a>(&'a mut self) -> impl Iterator)> { + pub fn iter_mut<'a>( + &'a mut self, + ) -> impl Iterator)> { self.extensions.iter_mut() } } diff --git a/primitives/externalities/src/lib.rs b/primitives/externalities/src/lib.rs index 80bb5b99f3155..b0ec16213b2c2 100644 --- a/primitives/externalities/src/lib.rs +++ b/primitives/externalities/src/lib.rs @@ -25,12 +25,16 @@ //! //! This crate exposes the main [`Externalities`] trait. -use sp_std::{any::{Any, TypeId}, vec::Vec, boxed::Box}; +use sp_std::{ + any::{Any, TypeId}, + boxed::Box, + vec::Vec, +}; use sp_storage::{ChildInfo, TrackedStorageKey}; +pub use extensions::{Extension, ExtensionStore, Extensions}; pub use scope_limited::{set_and_run_with_externalities, with_externalities}; -pub use extensions::{Extension, Extensions, ExtensionStore}; mod extensions; mod scope_limited; @@ -68,20 +72,12 @@ pub trait Externalities: ExtensionStore { /// This may be optimized for large values. /// /// Returns an `Option` that holds the SCALE encoded hash. - fn child_storage_hash( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option>; + fn child_storage_hash(&self, child_info: &ChildInfo, key: &[u8]) -> Option>; /// Read child runtime storage. /// /// Returns an `Option` that holds the SCALE encoded hash. - fn child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option>; + fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> Option>; /// Set storage entry `key` of current contract being called (effective immediately). fn set_storage(&mut self, key: Vec, value: Vec) { @@ -89,12 +85,7 @@ pub trait Externalities: ExtensionStore { } /// Set child storage entry `key` of current contract being called (effective immediately). - fn set_child_storage( - &mut self, - child_info: &ChildInfo, - key: Vec, - value: Vec, - ) { + fn set_child_storage(&mut self, child_info: &ChildInfo, key: Vec, value: Vec) { self.place_child_storage(child_info, key, Some(value)) } @@ -104,11 +95,7 @@ pub trait Externalities: ExtensionStore { } /// Clear a child storage entry (`key`) of current contract being called (effective immediately). - fn clear_child_storage( - &mut self, - child_info: &ChildInfo, - key: &[u8], - ) { + fn clear_child_storage(&mut self, child_info: &ChildInfo, key: &[u8]) { self.place_child_storage(child_info, key.to_vec(), None) } @@ -118,11 +105,7 @@ pub trait Externalities: ExtensionStore { } /// Whether a child storage entry exists. - fn exists_child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> bool { + fn exists_child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> bool { self.child_storage(child_info, key).is_some() } @@ -130,11 +113,7 @@ pub trait Externalities: ExtensionStore { fn next_storage_key(&self, key: &[u8]) -> Option>; /// Returns the key immediately following the given key, if it exists, in child storage. - fn next_child_storage_key( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option>; + fn next_child_storage_key(&self, child_info: &ChildInfo, key: &[u8]) -> Option>; /// Clear an entire child storage. /// @@ -169,12 +148,7 @@ pub trait Externalities: ExtensionStore { fn place_storage(&mut self, key: Vec, value: Option>); /// Set or clear a child storage entry. - fn place_child_storage( - &mut self, - child_info: &ChildInfo, - key: Vec, - value: Option>, - ); + fn place_child_storage(&mut self, child_info: &ChildInfo, key: Vec, value: Option>); /// Get the trie root of the current storage map. /// @@ -189,19 +163,12 @@ pub trait Externalities: ExtensionStore { /// /// If the storage root equals the default hash as defined by the trie, the key in the top-level /// storage map will be removed. - fn child_storage_root( - &mut self, - child_info: &ChildInfo, - ) -> Vec; + fn child_storage_root(&mut self, child_info: &ChildInfo) -> Vec; /// Append storage item. /// /// This assumes specific format of the storage item. Also there is no way to undo this operation. - fn storage_append( - &mut self, - key: Vec, - value: Vec, - ); + fn storage_append(&mut self, key: Vec, value: Vec); /// Get the changes trie root of the current storage overlay at a block with given `parent`. /// diff --git a/primitives/externalities/src/scope_limited.rs b/primitives/externalities/src/scope_limited.rs index 3b5013ba8e7fe..ab8be1f3fc81e 100644 --- a/primitives/externalities/src/scope_limited.rs +++ b/primitives/externalities/src/scope_limited.rs @@ -25,7 +25,8 @@ environmental::environmental!(ext: trait Externalities); /// while executing the given closure [`with_externalities`] grants access to them. The externalities /// are only set for the same thread this function was called from. pub fn set_and_run_with_externalities(ext: &mut dyn Externalities, f: F) -> R - where F: FnOnce() -> R +where + F: FnOnce() -> R, { ext::using(ext, f) } diff --git a/primitives/finality-grandpa/src/lib.rs b/primitives/finality-grandpa/src/lib.rs index a3bd89fab01d5..1948c5cc859c1 100644 --- a/primitives/finality-grandpa/src/lib.rs +++ b/primitives/finality-grandpa/src/lib.rs @@ -25,13 +25,12 @@ extern crate alloc; #[cfg(feature = "std")] use serde::Serialize; -use codec::{Encode, Decode, Input, Codec}; +use codec::{Codec, Decode, Encode, Input}; use scale_info::TypeInfo; -use sp_runtime::{ConsensusEngineId, RuntimeDebug, traits::NumberFor}; -use sp_std::borrow::Cow; -use sp_std::vec::Vec; #[cfg(feature = "std")] -use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; +use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; +use sp_runtime::{traits::NumberFor, ConsensusEngineId, RuntimeDebug}; +use sp_std::{borrow::Cow, vec::Vec}; #[cfg(feature = "std")] use log::debug; @@ -40,7 +39,7 @@ use log::debug; pub const KEY_TYPE: sp_core::crypto::KeyTypeId = sp_application_crypto::key_types::GRANDPA; mod app { - use sp_application_crypto::{app_crypto, key_types::GRANDPA, ed25519}; + use sp_application_crypto::{app_crypto, ed25519, key_types::GRANDPA}; app_crypto!(ed25519, GRANDPA); } @@ -182,10 +181,7 @@ impl EquivocationProof { /// Create a new `EquivocationProof` for the given set id and using the /// given equivocation as proof. pub fn new(set_id: SetId, equivocation: Equivocation) -> Self { - EquivocationProof { - set_id, - equivocation, - } + EquivocationProof { set_id, equivocation } } /// Returns the set id at which the equivocation occurred. @@ -278,7 +274,7 @@ where if $equivocation.first.0.target_hash == $equivocation.second.0.target_hash && $equivocation.first.0.target_number == $equivocation.second.0.target_number { - return false; + return false } // check signatures on both votes are valid @@ -298,17 +294,17 @@ where report.set_id, ); - return valid_first && valid_second; + return valid_first && valid_second }; } match report.equivocation { Equivocation::Prevote(equivocation) => { check!(equivocation, grandpa::Message::Prevote); - } + }, Equivocation::Precommit(equivocation) => { check!(equivocation, grandpa::Message::Precommit); - } + }, } } @@ -391,8 +387,8 @@ where H: Encode, N: Encode, { - use sp_core::crypto::Public; use sp_application_crypto::AppKey; + use sp_core::crypto::Public; use sp_std::convert::TryInto; let encoded = localized_payload(round, set_id, &message); @@ -401,13 +397,13 @@ where AuthorityId::ID, &public.to_public_crypto_pair(), &encoded[..], - ).ok().flatten()?.try_into().ok()?; + ) + .ok() + .flatten()? + .try_into() + .ok()?; - Some(grandpa::SignedMessage { - message, - signature, - id: public, - }) + Some(grandpa::SignedMessage { message, signature, id: public }) } /// WASM function call to check for pending changes. @@ -458,7 +454,7 @@ impl<'a> Decode for VersionedAuthorityList<'a> { fn decode(value: &mut I) -> Result { let (version, authorities): (u8, AuthorityList) = Decode::decode(value)?; if version != AUTHORITIES_VERSION { - return Err("unknown Grandpa authorities version".into()); + return Err("unknown Grandpa authorities version".into()) } Ok(authorities.into()) } diff --git a/primitives/inherents/src/client_side.rs b/primitives/inherents/src/client_side.rs index 38639c5de3227..18877cae5f343 100644 --- a/primitives/inherents/src/client_side.rs +++ b/primitives/inherents/src/client_side.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{InherentData, Error, InherentIdentifier}; +use crate::{Error, InherentData, InherentIdentifier}; use sp_runtime::traits::Block as BlockT; /// Something that can create inherent data providers. @@ -44,7 +44,9 @@ impl CreateInherentDataProviders Fut + Sync + Send, - Fut: std::future::Future>> + Send + 'static, + Fut: std::future::Future>> + + Send + + 'static, IDP: InherentDataProvider + 'static, ExtraArgs: Send + 'static, { diff --git a/primitives/inherents/src/lib.rs b/primitives/inherents/src/lib.rs index f0b5fdc940a92..5d1246983bcf4 100644 --- a/primitives/inherents/src/lib.rs +++ b/primitives/inherents/src/lib.rs @@ -162,9 +162,12 @@ #![cfg_attr(not(feature = "std"), no_std)] #![warn(missing_docs)] -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; -use sp_std::{collections::btree_map::{BTreeMap, IntoIter, Entry}, vec::Vec}; +use sp_std::{ + collections::btree_map::{BTreeMap, Entry, IntoIter}, + vec::Vec, +}; #[cfg(feature = "std")] mod client_side; @@ -204,7 +207,7 @@ pub type InherentIdentifier = [u8; 8]; #[derive(Clone, Default, Encode, Decode)] pub struct InherentData { /// All inherent data encoded with parity-scale-codec and an identifier. - data: BTreeMap> + data: BTreeMap>, } impl InherentData { @@ -231,20 +234,14 @@ impl InherentData { entry.insert(inherent.encode()); Ok(()) }, - Entry::Occupied(_) => { - Err(Error::InherentDataExists(identifier)) - } + Entry::Occupied(_) => Err(Error::InherentDataExists(identifier)), } } /// Replace the data for an inherent. /// /// If it does not exist, the data is just inserted. - pub fn replace_data( - &mut self, - identifier: InherentIdentifier, - inherent: &I, - ) { + pub fn replace_data(&mut self, identifier: InherentIdentifier, inherent: &I) { self.data.insert(identifier, inherent.encode()); } @@ -260,11 +257,10 @@ impl InherentData { identifier: &InherentIdentifier, ) -> Result, Error> { match self.data.get(identifier) { - Some(inherent) => - I::decode(&mut &inherent[..]) - .map_err(|e| Error::DecodingFailed(e, *identifier)) - .map(Some), - None => Ok(None) + Some(inherent) => I::decode(&mut &inherent[..]) + .map_err(|e| Error::DecodingFailed(e, *identifier)) + .map(Some), + None => Ok(None), } } @@ -292,11 +288,7 @@ pub struct CheckInherentsResult { impl Default for CheckInherentsResult { fn default() -> Self { - Self { - okay: true, - errors: InherentData::new(), - fatal_error: false, - } + Self { okay: true, errors: InherentData::new(), fatal_error: false } } } @@ -370,8 +362,8 @@ impl CheckInherentsResult { impl PartialEq for CheckInherentsResult { fn eq(&self, other: &Self) -> bool { self.fatal_error == other.fatal_error && - self.okay == other.okay && - self.errors.data == other.errors.data + self.okay == other.okay && + self.errors.data == other.errors.data } } @@ -407,7 +399,7 @@ impl IsFatalError for MakeFatalError { #[cfg(test)] mod tests { use super::*; - use codec::{Encode, Decode}; + use codec::{Decode, Encode}; const TEST_INHERENT_0: InherentIdentifier = *b"testinh0"; const TEST_INHERENT_1: InherentIdentifier = *b"testinh1"; @@ -470,10 +462,7 @@ mod tests { let inherent_data = provider.create_inherent_data().unwrap(); - assert_eq!( - inherent_data.get_data::(&TEST_INHERENT_0).unwrap().unwrap(), - 42u32, - ); + assert_eq!(inherent_data.get_data::(&TEST_INHERENT_0).unwrap().unwrap(), 42u32,); } #[test] diff --git a/primitives/io/src/batch_verifier.rs b/primitives/io/src/batch_verifier.rs index 341df36c55649..b6da1d85907bd 100644 --- a/primitives/io/src/batch_verifier.rs +++ b/primitives/io/src/batch_verifier.rs @@ -17,9 +17,12 @@ //! Batch/parallel verification. -use sp_core::{ed25519, sr25519, ecdsa, crypto::Pair, traits::SpawnNamed}; -use std::sync::{Arc, atomic::{AtomicBool, Ordering as AtomicOrdering}}; -use futures::{future::FutureExt, channel::oneshot}; +use futures::{channel::oneshot, future::FutureExt}; +use sp_core::{crypto::Pair, ecdsa, ed25519, sr25519, traits::SpawnNamed}; +use std::sync::{ + atomic::{AtomicBool, Ordering as AtomicOrdering}, + Arc, +}; #[derive(Debug, Clone)] struct Sr25519BatchItem { @@ -61,7 +64,9 @@ impl BatchVerifier { name: &'static str, ) -> bool { // there is already invalid transaction encountered - if self.invalid.load(AtomicOrdering::Relaxed) { return false; } + if self.invalid.load(AtomicOrdering::Relaxed) { + return false + } let invalid_clone = self.invalid.clone(); let (sender, receiver) = oneshot::channel(); @@ -78,7 +83,8 @@ impl BatchVerifier { log::warn!("Verification halted while result was pending"); invalid_clone.store(true, AtomicOrdering::Relaxed); } - }.boxed(), + } + .boxed(), ); true @@ -110,7 +116,9 @@ impl BatchVerifier { pub_key: sr25519::Public, message: Vec, ) -> bool { - if self.invalid.load(AtomicOrdering::Relaxed) { return false; } + if self.invalid.load(AtomicOrdering::Relaxed) { + return false + } self.sr25519_items.push(Sr25519BatchItem { signature, pub_key, message }); if self.sr25519_items.len() >= 128 { @@ -163,7 +171,7 @@ impl BatchVerifier { ); if !Self::verify_sr25519_batch(std::mem::take(&mut self.sr25519_items)) { - return false; + return false } if pending.len() > 0 { @@ -172,10 +180,12 @@ impl BatchVerifier { "substrate_batch_verify_join", async move { futures::future::join_all(pending).await; - sender.send(()) - .expect("Channel never panics if receiver is live. \ - Receiver is always live until received this data; qed. "); - }.boxed(), + sender.send(()).expect( + "Channel never panics if receiver is live. \ + Receiver is always live until received this data; qed. ", + ); + } + .boxed(), ); if receiver.recv().is_err() { @@ -184,7 +194,7 @@ impl BatchVerifier { "Haven't received async result from verification task. Returning false.", ); - return false; + return false } } diff --git a/primitives/io/src/lib.rs b/primitives/io/src/lib.rs index 6fb25df3d02a5..d1aa9c489491e 100644 --- a/primitives/io/src/lib.rs +++ b/primitives/io/src/lib.rs @@ -18,14 +18,16 @@ //! I/O host interface for substrate runtime. #![warn(missing_docs)] - #![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), feature(alloc_error_handler))] - -#![cfg_attr(feature = "std", - doc = "Substrate runtime standard library as compiled when linked with Rust's standard library.")] -#![cfg_attr(not(feature = "std"), - doc = "Substrate's runtime standard library as compiled without Rust's standard library.")] +#![cfg_attr( + feature = "std", + doc = "Substrate runtime standard library as compiled when linked with Rust's standard library." +)] +#![cfg_attr( + not(feature = "std"), + doc = "Substrate's runtime standard library as compiled without Rust's standard library." +)] use sp_std::vec::Vec; @@ -35,31 +37,35 @@ use tracing; #[cfg(feature = "std")] use sp_core::{ crypto::Pair, - traits::{TaskExecutorExt, RuntimeSpawnExt}, - offchain::{OffchainDbExt, OffchainWorkerExt, TransactionPoolExt}, hexdisplay::HexDisplay, + offchain::{OffchainDbExt, OffchainWorkerExt, TransactionPoolExt}, storage::ChildInfo, + traits::{RuntimeSpawnExt, TaskExecutorExt}, }; #[cfg(feature = "std")] use sp_keystore::{KeystoreExt, SyncCryptoStore}; use sp_core::{ - OpaquePeerId, crypto::KeyTypeId, ed25519, sr25519, ecdsa, H256, LogLevel, LogLevelFilter, + crypto::KeyTypeId, + ecdsa, ed25519, offchain::{ - Timestamp, HttpRequestId, HttpRequestStatus, HttpError, StorageKind, OpaqueNetworkState, + HttpError, HttpRequestId, HttpRequestStatus, OpaqueNetworkState, StorageKind, Timestamp, }, + sr25519, LogLevel, LogLevelFilter, OpaquePeerId, H256, }; #[cfg(feature = "std")] -use sp_trie::{TrieConfiguration, trie_types::Layout}; +use sp_trie::{trie_types::Layout, TrieConfiguration}; -use sp_runtime_interface::{runtime_interface, Pointer}; -use sp_runtime_interface::pass_by::{PassBy, PassByCodec}; +use sp_runtime_interface::{ + pass_by::{PassBy, PassByCodec}, + runtime_interface, Pointer, +}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; #[cfg(feature = "std")] -use sp_externalities::{ExternalitiesExt, Externalities}; +use sp_externalities::{Externalities, ExternalitiesExt}; #[cfg(feature = "std")] mod batch_verifier; @@ -167,7 +173,6 @@ pub trait Storage { } } - /// Append the encoded `value` to the storage item at `key`. /// /// The storage item needs to implement [`EncodeAppend`](codec::EncodeAppend). @@ -255,11 +260,7 @@ pub trait DefaultChildStorage { /// /// Parameter `storage_key` is the unprefixed location of the root of the child trie in the parent trie. /// Result is `None` if the value for `key` in the child storage can not be found. - fn get( - &self, - storage_key: &[u8], - key: &[u8], - ) -> Option> { + fn get(&self, storage_key: &[u8], key: &[u8]) -> Option> { let child_info = ChildInfo::new_default(storage_key); self.child_storage(&child_info, key).map(|s| s.to_vec()) } @@ -279,25 +280,19 @@ pub trait DefaultChildStorage { value_offset: u32, ) -> Option { let child_info = ChildInfo::new_default(storage_key); - self.child_storage(&child_info, key) - .map(|value| { - let value_offset = value_offset as usize; - let data = &value[value_offset.min(value.len())..]; - let written = std::cmp::min(data.len(), value_out.len()); - value_out[..written].copy_from_slice(&data[..written]); - data.len() as u32 - }) + self.child_storage(&child_info, key).map(|value| { + let value_offset = value_offset as usize; + let data = &value[value_offset.min(value.len())..]; + let written = std::cmp::min(data.len(), value_out.len()); + value_out[..written].copy_from_slice(&data[..written]); + data.len() as u32 + }) } /// Set a child storage value. /// /// Set `key` to `value` in the child storage denoted by `storage_key`. - fn set( - &mut self, - storage_key: &[u8], - key: &[u8], - value: &[u8], - ) { + fn set(&mut self, storage_key: &[u8], key: &[u8], value: &[u8]) { let child_info = ChildInfo::new_default(storage_key); self.set_child_storage(&child_info, key.to_vec(), value.to_vec()); } @@ -305,11 +300,7 @@ pub trait DefaultChildStorage { /// Clear a child storage key. /// /// For the default child storage at `storage_key`, clear value at `key`. - fn clear( - &mut self, - storage_key: &[u8], - key: &[u8], - ) { + fn clear(&mut self, storage_key: &[u8], key: &[u8]) { let child_info = ChildInfo::new_default(storage_key); self.clear_child_storage(&child_info, key); } @@ -318,10 +309,7 @@ pub trait DefaultChildStorage { /// /// If it exists, the child storage for `storage_key` /// is removed. - fn storage_kill( - &mut self, - storage_key: &[u8], - ) { + fn storage_kill(&mut self, storage_key: &[u8]) { let child_info = ChildInfo::new_default(storage_key); self.kill_child_storage(&child_info, None); } @@ -352,11 +340,7 @@ pub trait DefaultChildStorage { /// Check a child storage key. /// /// Check whether the given `key` exists in default child defined at `storage_key`. - fn exists( - &self, - storage_key: &[u8], - key: &[u8], - ) -> bool { + fn exists(&self, storage_key: &[u8], key: &[u8]) -> bool { let child_info = ChildInfo::new_default(storage_key); self.exists_child_storage(&child_info, key) } @@ -364,11 +348,7 @@ pub trait DefaultChildStorage { /// Clear child default key by prefix. /// /// Clear the child storage of each key-value pair where the key starts with the given `prefix`. - fn clear_prefix( - &mut self, - storage_key: &[u8], - prefix: &[u8], - ) { + fn clear_prefix(&mut self, storage_key: &[u8], prefix: &[u8]) { let child_info = ChildInfo::new_default(storage_key); let _ = self.clear_child_prefix(&child_info, prefix, None); } @@ -397,10 +377,7 @@ pub trait DefaultChildStorage { /// The hashing algorithm is defined by the `Block`. /// /// Returns a `Vec` that holds the SCALE encoded hash. - fn root( - &mut self, - storage_key: &[u8], - ) -> Vec { + fn root(&mut self, storage_key: &[u8]) -> Vec { let child_info = ChildInfo::new_default(storage_key); self.child_storage_root(&child_info) } @@ -408,11 +385,7 @@ pub trait DefaultChildStorage { /// Child storage key iteration. /// /// Get the next key in storage after the given one in lexicographic order in child storage. - fn next_key( - &mut self, - storage_key: &[u8], - key: &[u8], - ) -> Option> { + fn next_key(&mut self, storage_key: &[u8], key: &[u8]) -> Option> { let child_info = ChildInfo::new_default(storage_key); self.next_child_storage_key(&child_info, key) } @@ -447,7 +420,8 @@ pub trait Trie { &root, proof, &[(key, Some(value))], - ).is_ok() + ) + .is_ok() } /// Verify trie proof @@ -456,7 +430,8 @@ pub trait Trie { &root, proof, &[(key, Some(value))], - ).is_ok() + ) + .is_ok() } } @@ -516,7 +491,7 @@ pub trait Misc { err, ); None - } + }, } } } @@ -526,7 +501,8 @@ pub trait Misc { pub trait Crypto { /// Returns all `ed25519` public keys for the given key id from the keystore. fn ed25519_public_keys(&mut self, id: KeyTypeId) -> Vec { - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::ed25519_public_keys(keystore, id) } @@ -539,7 +515,8 @@ pub trait Crypto { /// Returns the public key. fn ed25519_generate(&mut self, id: KeyTypeId, seed: Option>) -> ed25519::Public { let seed = seed.as_ref().map(|s| std::str::from_utf8(&s).expect("Seed is valid utf8!")); - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::ed25519_generate_new(keystore, id, seed) .expect("`ed25519_generate` failed") @@ -555,7 +532,8 @@ pub trait Crypto { pub_key: &ed25519::Public, msg: &[u8], ) -> Option { - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::sign_with(keystore, id, &pub_key.into(), msg) .ok() @@ -566,11 +544,7 @@ pub trait Crypto { /// Verify `ed25519` signature. /// /// Returns `true` when the verification was successful. - fn ed25519_verify( - sig: &ed25519::Signature, - msg: &[u8], - pub_key: &ed25519::Public, - ) -> bool { + fn ed25519_verify(sig: &ed25519::Signature, msg: &[u8], pub_key: &ed25519::Public) -> bool { ed25519::Pair::verify(sig, msg, pub_key) } @@ -588,20 +562,16 @@ pub trait Crypto { msg: &[u8], pub_key: &ed25519::Public, ) -> bool { - self.extension::().map( - |extension| extension.push_ed25519(sig.clone(), pub_key.clone(), msg.to_vec()) - ).unwrap_or_else(|| ed25519_verify(sig, msg, pub_key)) + self.extension::() + .map(|extension| extension.push_ed25519(sig.clone(), pub_key.clone(), msg.to_vec())) + .unwrap_or_else(|| ed25519_verify(sig, msg, pub_key)) } /// Verify `sr25519` signature. /// /// Returns `true` when the verification was successful. #[version(2)] - fn sr25519_verify( - sig: &sr25519::Signature, - msg: &[u8], - pub_key: &sr25519::Public, - ) -> bool { + fn sr25519_verify(sig: &sr25519::Signature, msg: &[u8], pub_key: &sr25519::Public) -> bool { sr25519::Pair::verify(sig, msg, pub_key) } @@ -619,14 +589,15 @@ pub trait Crypto { msg: &[u8], pub_key: &sr25519::Public, ) -> bool { - self.extension::().map( - |extension| extension.push_sr25519(sig.clone(), pub_key.clone(), msg.to_vec()) - ).unwrap_or_else(|| sr25519_verify(sig, msg, pub_key)) + self.extension::() + .map(|extension| extension.push_sr25519(sig.clone(), pub_key.clone(), msg.to_vec())) + .unwrap_or_else(|| sr25519_verify(sig, msg, pub_key)) } /// Start verification extension. fn start_batch_verify(&mut self) { - let scheduler = self.extension::() + let scheduler = self + .extension::() .expect("No task executor associated with the current context!") .clone(); @@ -641,7 +612,8 @@ pub trait Crypto { /// /// Will panic if no `VerificationExt` is registered (`start_batch_verify` was not called). fn finish_batch_verify(&mut self) -> bool { - let result = self.extension::() + let result = self + .extension::() .expect("`finish_batch_verify` should only be called after `start_batch_verify`") .verify_and_clear(); @@ -653,7 +625,8 @@ pub trait Crypto { /// Returns all `sr25519` public keys for the given key id from the keystore. fn sr25519_public_keys(&mut self, id: KeyTypeId) -> Vec { - let keystore = &*** self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::sr25519_public_keys(keystore, id) } @@ -666,7 +639,8 @@ pub trait Crypto { /// Returns the public key. fn sr25519_generate(&mut self, id: KeyTypeId, seed: Option>) -> sr25519::Public { let seed = seed.as_ref().map(|s| std::str::from_utf8(&s).expect("Seed is valid utf8!")); - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::sr25519_generate_new(keystore, id, seed) .expect("`sr25519_generate` failed") @@ -682,7 +656,8 @@ pub trait Crypto { pub_key: &sr25519::Public, msg: &[u8], ) -> Option { - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::sign_with(keystore, id, &pub_key.into(), msg) .ok() @@ -700,7 +675,8 @@ pub trait Crypto { /// Returns all `ecdsa` public keys for the given key id from the keystore. fn ecdsa_public_keys(&mut self, id: KeyTypeId) -> Vec { - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::ecdsa_public_keys(keystore, id) } @@ -713,10 +689,10 @@ pub trait Crypto { /// Returns the public key. fn ecdsa_generate(&mut self, id: KeyTypeId, seed: Option>) -> ecdsa::Public { let seed = seed.as_ref().map(|s| std::str::from_utf8(&s).expect("Seed is valid utf8!")); - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); - SyncCryptoStore::ecdsa_generate_new(keystore, id, seed) - .expect("`ecdsa_generate` failed") + SyncCryptoStore::ecdsa_generate_new(keystore, id, seed).expect("`ecdsa_generate` failed") } /// Sign the given `msg` with the `ecdsa` key that corresponds to the given public key and @@ -729,7 +705,8 @@ pub trait Crypto { pub_key: &ecdsa::Public, msg: &[u8], ) -> Option { - let keystore = &***self.extension::() + let keystore = &***self + .extension::() .expect("No `keystore` associated for the current context!"); SyncCryptoStore::sign_with(keystore, id, &pub_key.into(), msg) .ok() @@ -740,11 +717,7 @@ pub trait Crypto { /// Verify `ecdsa` signature. /// /// Returns `true` when the verification was successful. - fn ecdsa_verify( - sig: &ecdsa::Signature, - msg: &[u8], - pub_key: &ecdsa::Public, - ) -> bool { + fn ecdsa_verify(sig: &ecdsa::Signature, msg: &[u8], pub_key: &ecdsa::Public) -> bool { ecdsa::Pair::verify(sig, msg, pub_key) } @@ -762,9 +735,9 @@ pub trait Crypto { msg: &[u8], pub_key: &ecdsa::Public, ) -> bool { - self.extension::().map( - |extension| extension.push_ecdsa(sig.clone(), pub_key.clone(), msg.to_vec()) - ).unwrap_or_else(|| ecdsa_verify(sig, msg, pub_key)) + self.extension::() + .map(|extension| extension.push_ecdsa(sig.clone(), pub_key.clone(), msg.to_vec())) + .unwrap_or_else(|| ecdsa_verify(sig, msg, pub_key)) } /// Verify and recover a SECP256k1 ECDSA signature. @@ -778,10 +751,11 @@ pub trait Crypto { sig: &[u8; 65], msg: &[u8; 32], ) -> Result<[u8; 64], EcdsaVerifyError> { - let rs = secp256k1::Signature::parse_slice(&sig[0..64]) - .map_err(|_| EcdsaVerifyError::BadRS)?; - let v = secp256k1::RecoveryId::parse(if sig[64] > 26 { sig[64] - 27 } else { sig[64] } as u8) - .map_err(|_| EcdsaVerifyError::BadV)?; + let rs = + secp256k1::Signature::parse_slice(&sig[0..64]).map_err(|_| EcdsaVerifyError::BadRS)?; + let v = + secp256k1::RecoveryId::parse(if sig[64] > 26 { sig[64] - 27 } else { sig[64] } as u8) + .map_err(|_| EcdsaVerifyError::BadV)?; let pubkey = secp256k1::recover(&secp256k1::Message::parse(msg), &rs, &v) .map_err(|_| EcdsaVerifyError::BadSignature)?; let mut res = [0u8; 64]; @@ -799,10 +773,11 @@ pub trait Crypto { sig: &[u8; 65], msg: &[u8; 32], ) -> Result<[u8; 33], EcdsaVerifyError> { - let rs = secp256k1::Signature::parse_slice(&sig[0..64]) - .map_err(|_| EcdsaVerifyError::BadRS)?; - let v = secp256k1::RecoveryId::parse(if sig[64] > 26 { sig[64] - 27 } else { sig[64] } as u8) - .map_err(|_| EcdsaVerifyError::BadV)?; + let rs = + secp256k1::Signature::parse_slice(&sig[0..64]).map_err(|_| EcdsaVerifyError::BadRS)?; + let v = + secp256k1::RecoveryId::parse(if sig[64] > 26 { sig[64] - 27 } else { sig[64] } as u8) + .map_err(|_| EcdsaVerifyError::BadV)?; let pubkey = secp256k1::recover(&secp256k1::Message::parse(msg), &rs, &v) .map_err(|_| EcdsaVerifyError::BadSignature)?; Ok(pubkey.serialize_compressed()) @@ -907,8 +882,10 @@ pub trait Offchain { /// The transaction will end up in the pool. fn submit_transaction(&mut self, data: Vec) -> Result<(), ()> { self.extension::() - .expect("submit_transaction can be called only in the offchain call context with - TransactionPool capabilities enabled") + .expect( + "submit_transaction can be called only in the offchain call context with + TransactionPool capabilities enabled", + ) .submit_transaction(data) } @@ -949,8 +926,10 @@ pub trait Offchain { /// offchain worker tasks running on the same machine. It IS persisted between runs. fn local_storage_set(&mut self, kind: StorageKind, key: &[u8], value: &[u8]) { self.extension::() - .expect("local_storage_set can be called only in the offchain call context with - OffchainDb extension") + .expect( + "local_storage_set can be called only in the offchain call context with + OffchainDb extension", + ) .local_storage_set(kind, key, value) } @@ -960,8 +939,10 @@ pub trait Offchain { /// offchain worker tasks running on the same machine. It IS persisted between runs. fn local_storage_clear(&mut self, kind: StorageKind, key: &[u8]) { self.extension::() - .expect("local_storage_clear can be called only in the offchain call context with - OffchainDb extension") + .expect( + "local_storage_clear can be called only in the offchain call context with + OffchainDb extension", + ) .local_storage_clear(kind, key) } @@ -982,14 +963,11 @@ pub trait Offchain { new_value: &[u8], ) -> bool { self.extension::() - .expect("local_storage_compare_and_set can be called only in the offchain call context - with OffchainDb extension") - .local_storage_compare_and_set( - kind, - key, - old_value.as_deref(), - new_value, + .expect( + "local_storage_compare_and_set can be called only in the offchain call context + with OffchainDb extension", ) + .local_storage_compare_and_set(kind, key, old_value.as_deref(), new_value) } /// Gets a value from the local storage. @@ -999,8 +977,10 @@ pub trait Offchain { /// offchain worker tasks running on the same machine. It IS persisted between runs. fn local_storage_get(&mut self, kind: StorageKind, key: &[u8]) -> Option> { self.extension::() - .expect("local_storage_get can be called only in the offchain call context with - OffchainDb extension") + .expect( + "local_storage_get can be called only in the offchain call context with + OffchainDb extension", + ) .local_storage_get(kind, key) } @@ -1128,12 +1108,7 @@ pub trait Logging { /// Instead of using directly, prefer setting up `RuntimeLogger` and using `log` macros. fn log(level: LogLevel, target: &str, message: &[u8]) { if let Ok(message) = std::str::from_utf8(message) { - log::log!( - target: target, - log::Level::from(level), - "{}", - message, - ) + log::log!(target: target, log::Level::from(level), "{}", message,) } } @@ -1153,7 +1128,6 @@ impl PassBy for Crossing { } impl Crossing { - /// Convert into the inner type pub fn into_inner(self) -> T { self.0 @@ -1162,12 +1136,12 @@ impl Crossing { // useful for testing impl core::default::Default for Crossing - where T: core::default::Default + Encode + Decode +where + T: core::default::Default + Encode + Decode, { fn default() -> Self { Self(Default::default()) } - } /// Interface to provide tracing facilities for wasm. Modelled after tokios `tracing`-crate @@ -1184,9 +1158,7 @@ pub trait WasmTracing { /// chose to cache the result for the execution of the entire block. fn enabled(&mut self, metadata: Crossing) -> bool { let metadata: &tracing_core::metadata::Metadata<'static> = (&metadata.into_inner()).into(); - tracing::dispatcher::get_default(|d| { - d.enabled(metadata) - }) + tracing::dispatcher::get_default(|d| d.enabled(metadata)) } /// Open a new span with the given attributes. Return the u64 Id of the span. @@ -1205,9 +1177,7 @@ pub trait WasmTracing { d.enter(&final_id); final_id.into_u64() }), - _ => { - 0 - } + _ => 0, } } @@ -1226,19 +1196,18 @@ pub trait WasmTracing { } } -#[cfg(all(not(feature="std"), feature="with-tracing"))] +#[cfg(all(not(feature = "std"), feature = "with-tracing"))] mod tracing_setup { + use super::{wasm_tracing, Crossing}; use core::sync::atomic::{AtomicBool, Ordering}; use tracing_core::{ - dispatcher::{Dispatch, set_global_default}, - span::{Id, Record, Attributes}, - Metadata, Event, + dispatcher::{set_global_default, Dispatch}, + span::{Attributes, Id, Record}, + Event, Metadata, }; - use super::{wasm_tracing, Crossing}; static TRACING_SET: AtomicBool = AtomicBool::new(false); - /// The PassingTracingSubscriber implements `tracing_core::Subscriber` /// and pushes the information across the runtime interface to the host struct PassingTracingSubsciber; @@ -1256,12 +1225,12 @@ mod tracing_setup { /// Not implemented! We do not support recording values later /// Will panic when used. fn record(&self, span: &Id, values: &Record<'_>) { - unimplemented!{} // this usage is not supported + unimplemented! {} // this usage is not supported } /// Not implemented! We do not support recording values later /// Will panic when used. fn record_follows_from(&self, span: &Id, follows: &Id) { - unimplemented!{ } // this usage is not supported + unimplemented! {} // this usage is not supported } fn event(&self, event: &Event<'_>) { wasm_tracing::event(Crossing(event.into())) @@ -1271,7 +1240,6 @@ mod tracing_setup { } } - /// Initialize tracing of sp_tracing on wasm with `with-tracing` enabled. /// Can be called multiple times from within the same process and will only /// set the global bridging subscriber once. @@ -1284,11 +1252,11 @@ mod tracing_setup { } } -#[cfg(not(all(not(feature="std"), feature="with-tracing")))] +#[cfg(not(all(not(feature = "std"), feature = "with-tracing")))] mod tracing_setup { /// Initialize tracing of sp_tracing not necessary – noop. To enable build /// without std and with the `with-tracing`-feature. - pub fn init_tracing() { } + pub fn init_tracing() {} } pub use tracing_setup::init_tracing; @@ -1319,14 +1287,16 @@ pub trait Sandbox { return_val_len: u32, state_ptr: Pointer, ) -> u32 { - self.sandbox().invoke( - instance_idx, - &function, - &args, - return_val_ptr, - return_val_len, - state_ptr.into(), - ).expect("Failed to invoke function with sandbox") + self.sandbox() + .invoke( + instance_idx, + &function, + &args, + return_val_ptr, + return_val_len, + state_ptr.into(), + ) + .expect("Failed to invoke function with sandbox") } /// Create a new memory instance with the given `initial` and `maximum` size. @@ -1364,20 +1334,30 @@ pub trait Sandbox { /// Teardown the memory instance with the given `memory_idx`. fn memory_teardown(&mut self, memory_idx: u32) { - self.sandbox().memory_teardown(memory_idx).expect("Failed to teardown memory with sandbox") + self.sandbox() + .memory_teardown(memory_idx) + .expect("Failed to teardown memory with sandbox") } /// Teardown the sandbox instance with the given `instance_idx`. fn instance_teardown(&mut self, instance_idx: u32) { - self.sandbox().instance_teardown(instance_idx).expect("Failed to teardown sandbox instance") + self.sandbox() + .instance_teardown(instance_idx) + .expect("Failed to teardown sandbox instance") } /// Get the value from a global with the given `name`. The sandbox is determined by the given /// `instance_idx`. /// /// Returns `Some(_)` when the requested global variable could be found. - fn get_global_val(&mut self, instance_idx: u32, name: &str) -> Option { - self.sandbox().get_global_val(instance_idx, name).expect("Failed to get global from sandbox") + fn get_global_val( + &mut self, + instance_idx: u32, + name: &str, + ) -> Option { + self.sandbox() + .get_global_val(instance_idx, name) + .expect("Failed to get global from sandbox") } } @@ -1390,11 +1370,13 @@ pub trait RuntimeTasks { /// /// This should not be used directly. Use `sp_tasks::spawn` instead. fn spawn(dispatcher_ref: u32, entry: u32, payload: Vec) -> u64 { - sp_externalities::with_externalities(|mut ext|{ - let runtime_spawn = ext.extension::() + sp_externalities::with_externalities(|mut ext| { + let runtime_spawn = ext + .extension::() .expect("Cannot spawn without dynamic runtime dispatcher (RuntimeSpawnExt)"); runtime_spawn.spawn_call(dispatcher_ref, entry, payload) - }).expect("`RuntimeTasks::spawn`: called outside of externalities context") + }) + .expect("`RuntimeTasks::spawn`: called outside of externalities context") } /// Wasm host function for joining a task. @@ -1402,12 +1384,14 @@ pub trait RuntimeTasks { /// This should not be used directly. Use `join` of `sp_tasks::spawn` result instead. fn join(handle: u64) -> Vec { sp_externalities::with_externalities(|mut ext| { - let runtime_spawn = ext.extension::() + let runtime_spawn = ext + .extension::() .expect("Cannot join without dynamic runtime dispatcher (RuntimeSpawnExt)"); runtime_spawn.join(handle) - }).expect("`RuntimeTasks::join`: called outside of externalities context") + }) + .expect("`RuntimeTasks::join`: called outside of externalities context") } - } +} /// Allocator used by Substrate when executing the Wasm runtime. #[cfg(not(feature = "std"))] @@ -1483,10 +1467,8 @@ pub type SubstrateHostFunctions = ( #[cfg(test)] mod tests { use super::*; + use sp_core::{map, storage::Storage, testing::TaskExecutor, traits::TaskExecutorExt}; use sp_state_machine::BasicExternalities; - use sp_core::{ - storage::Storage, map, traits::TaskExecutorExt, testing::TaskExecutor, - }; use std::any::TypeId; #[test] @@ -1542,7 +1524,10 @@ mod tests { }); t.execute_with(|| { - assert!(matches!(storage::clear_prefix(b":abc", None), KillStorageResult::AllRemoved(2))); + assert!(matches!( + storage::clear_prefix(b":abc", None), + KillStorageResult::AllRemoved(2) + )); assert!(storage::get(b":a").is_some()); assert!(storage::get(b":abdd").is_some()); @@ -1583,11 +1568,7 @@ mod tests { } // push invlaid - crypto::sr25519_batch_verify( - &Default::default(), - &Vec::new(), - &Default::default(), - ); + crypto::sr25519_batch_verify(&Default::default(), &Vec::new(), &Default::default()); assert!(!crypto::finish_batch_verify()); crypto::start_batch_verify(); @@ -1607,11 +1588,7 @@ mod tests { ext.execute_with(|| { // invalid ed25519 signature crypto::start_batch_verify(); - crypto::ed25519_batch_verify( - &Default::default(), - &Vec::new(), - &Default::default(), - ); + crypto::ed25519_batch_verify(&Default::default(), &Vec::new(), &Default::default()); assert!(!crypto::finish_batch_verify()); // 2 valid ed25519 signatures @@ -1637,11 +1614,7 @@ mod tests { let signature = pair.sign(msg); crypto::ed25519_batch_verify(&signature, msg, &pair.public()); - crypto::ed25519_batch_verify( - &Default::default(), - &Vec::new(), - &Default::default(), - ); + crypto::ed25519_batch_verify(&Default::default(), &Vec::new(), &Default::default()); assert!(!crypto::finish_batch_verify()); @@ -1673,11 +1646,7 @@ mod tests { let signature = pair.sign(msg); crypto::sr25519_batch_verify(&signature, msg, &pair.public()); - crypto::sr25519_batch_verify( - &Default::default(), - &Vec::new(), - &Default::default(), - ); + crypto::sr25519_batch_verify(&Default::default(), &Vec::new(), &Default::default()); assert!(!crypto::finish_batch_verify()); }); diff --git a/primitives/keyring/src/ed25519.rs b/primitives/keyring/src/ed25519.rs index c9dd70d63d5c9..2230013e84192 100644 --- a/primitives/keyring/src/ed25519.rs +++ b/primitives/keyring/src/ed25519.rs @@ -17,11 +17,14 @@ //! Support code for the runtime. A set of test accounts. -use std::{collections::HashMap, ops::Deref}; use lazy_static::lazy_static; -use sp_core::{ed25519::{Pair, Public, Signature}, Pair as PairT, Public as PublicT, H256}; pub use sp_core::ed25519; +use sp_core::{ + ed25519::{Pair, Public, Signature}, + Pair as PairT, Public as PublicT, H256, +}; use sp_runtime::AccountId32; +use std::{collections::HashMap, ops::Deref}; /// Set of test accounts. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, strum::EnumIter)] @@ -79,7 +82,7 @@ impl Keyring { } /// Returns an iterator over all test accounts. - pub fn iter() -> impl Iterator { + pub fn iter() -> impl Iterator { ::iter() } @@ -114,13 +117,10 @@ impl From for sp_runtime::MultiSigner { } lazy_static! { - static ref PRIVATE_KEYS: HashMap = { - Keyring::iter().map(|i| (i, i.pair())).collect() - }; - - static ref PUBLIC_KEYS: HashMap = { - PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect() - }; + static ref PRIVATE_KEYS: HashMap = + { Keyring::iter().map(|i| (i, i.pair())).collect() }; + static ref PUBLIC_KEYS: HashMap = + { PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect() }; } impl From for Public { @@ -185,26 +185,20 @@ mod tests { #[test] fn should_work() { - assert!( - Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Alice!", - &Keyring::Alice.public(), - ) - ); - assert!( - !Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Bob!", - &Keyring::Alice.public(), - ) - ); - assert!( - !Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Alice!", - &Keyring::Bob.public(), - ) - ); + assert!(Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Alice!", + &Keyring::Alice.public(), + )); + assert!(!Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Bob!", + &Keyring::Alice.public(), + )); + assert!(!Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Alice!", + &Keyring::Bob.public(), + )); } } diff --git a/primitives/keyring/src/sr25519.rs b/primitives/keyring/src/sr25519.rs index a4f43be07f07d..524be7ee51fbb 100644 --- a/primitives/keyring/src/sr25519.rs +++ b/primitives/keyring/src/sr25519.rs @@ -17,12 +17,14 @@ //! Support code for the runtime. A set of test accounts. -use std::collections::HashMap; -use std::ops::Deref; use lazy_static::lazy_static; -use sp_core::{sr25519::{Pair, Public, Signature}, Pair as PairT, Public as PublicT, H256}; pub use sp_core::sr25519; +use sp_core::{ + sr25519::{Pair, Public, Signature}, + Pair as PairT, Public as PublicT, H256, +}; use sp_runtime::AccountId32; +use std::{collections::HashMap, ops::Deref}; /// Set of test accounts. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, strum::EnumIter)] @@ -80,7 +82,7 @@ impl Keyring { } /// Returns an iterator over all test accounts. - pub fn iter() -> impl Iterator { + pub fn iter() -> impl Iterator { ::iter() } @@ -135,19 +137,16 @@ impl std::str::FromStr for Keyring { "ferdie" => Ok(Keyring::Ferdie), "one" => Ok(Keyring::One), "two" => Ok(Keyring::Two), - _ => Err(ParseKeyringError) + _ => Err(ParseKeyringError), } } } lazy_static! { - static ref PRIVATE_KEYS: HashMap = { - Keyring::iter().map(|i| (i, i.pair())).collect() - }; - - static ref PUBLIC_KEYS: HashMap = { - PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect() - }; + static ref PRIVATE_KEYS: HashMap = + { Keyring::iter().map(|i| (i, i.pair())).collect() }; + static ref PUBLIC_KEYS: HashMap = + { PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect() }; } impl From for AccountId32 { @@ -212,26 +211,20 @@ mod tests { #[test] fn should_work() { - assert!( - Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Alice!", - &Keyring::Alice.public(), - ) - ); - assert!( - !Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Bob!", - &Keyring::Alice.public(), - ) - ); - assert!( - !Pair::verify( - &Keyring::Alice.sign(b"I am Alice!"), - b"I am Alice!", - &Keyring::Bob.public(), - ) - ); + assert!(Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Alice!", + &Keyring::Alice.public(), + )); + assert!(!Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Bob!", + &Keyring::Alice.public(), + )); + assert!(!Pair::verify( + &Keyring::Alice.sign(b"I am Alice!"), + b"I am Alice!", + &Keyring::Bob.public(), + )); } } diff --git a/primitives/keystore/src/lib.rs b/primitives/keystore/src/lib.rs index cccb390d34ba1..c45e8a6f5d2be 100644 --- a/primitives/keystore/src/lib.rs +++ b/primitives/keystore/src/lib.rs @@ -19,30 +19,30 @@ pub mod testing; pub mod vrf; -use std::sync::Arc; +use crate::vrf::{VRFSignature, VRFTranscriptData}; use async_trait::async_trait; use futures::{executor::block_on, future::join_all}; use sp_core::{ - crypto::{KeyTypeId, CryptoTypePublicPair}, - ed25519, sr25519, ecdsa, + crypto::{CryptoTypePublicPair, KeyTypeId}, + ecdsa, ed25519, sr25519, }; -use crate::vrf::{VRFTranscriptData, VRFSignature}; +use std::sync::Arc; /// CryptoStore error #[derive(Debug, derive_more::Display)] pub enum Error { /// Public key type is not supported - #[display(fmt="Key not supported: {:?}", _0)] + #[display(fmt = "Key not supported: {:?}", _0)] KeyNotSupported(KeyTypeId), /// Validation error - #[display(fmt="Validation error: {}", _0)] + #[display(fmt = "Validation error: {}", _0)] ValidationError(String), /// Keystore unavailable - #[display(fmt="Keystore unavailable")] + #[display(fmt = "Keystore unavailable")] Unavailable, /// Programming errors - #[display(fmt="An unknown keystore error occurred: {}", _0)] - Other(String) + #[display(fmt = "An unknown keystore error occurred: {}", _0)] + Other(String), } /// Something that generates, stores and provides access to keys. @@ -91,12 +91,7 @@ pub trait CryptoStore: Send + Sync { /// Places it into the file system store. /// /// `Err` if there's some sort of weird filesystem error, but should generally be `Ok`. - async fn insert_unknown( - &self, - id: KeyTypeId, - suri: &str, - public: &[u8] - ) -> Result<(), ()>; + async fn insert_unknown(&self, id: KeyTypeId, suri: &str, public: &[u8]) -> Result<(), ()>; /// Find intersection between provided keys and supported keys /// @@ -105,7 +100,7 @@ pub trait CryptoStore: Send + Sync { async fn supported_keys( &self, id: KeyTypeId, - keys: Vec + keys: Vec, ) -> Result, Error>; /// List all supported keys /// @@ -142,14 +137,14 @@ pub trait CryptoStore: Send + Sync { &self, id: KeyTypeId, keys: Vec, - msg: &[u8] + msg: &[u8], ) -> Result)>, Error> { if keys.len() == 1 { - return Ok(self.sign_with(id, &keys[0], msg).await?.map(|s| (keys[0].clone(), s))); + return Ok(self.sign_with(id, &keys[0], msg).await?.map(|s| (keys[0].clone(), s))) } else { for k in self.supported_keys(id, keys).await? { if let Ok(Some(sign)) = self.sign_with(id, &k, msg).await { - return Ok(Some((k, sign))); + return Ok(Some((k, sign))) } } } @@ -170,8 +165,7 @@ pub trait CryptoStore: Send + Sync { keys: Vec, msg: &[u8], ) -> Result>, Error>>, ()> { - let futs = keys.iter() - .map(|k| self.sign_with(id, k, msg)); + let futs = keys.iter().map(|k| self.sign_with(id, k, msg)); Ok(join_all(futs).await) } @@ -202,8 +196,8 @@ pub trait CryptoStore: Send + Sync { /// in turn, used for signing the provided pre-hashed message. /// /// The `msg` argument provided should be a hashed message for which an - /// ECDSA signature should be generated. - /// + /// ECDSA signature should be generated. + /// /// Returns an [`ecdsa::Signature`] or `None` in case the given `id` and /// `public` combination doesn't exist in the keystore. An `Err` will be /// returned if generating the signature itself failed. @@ -260,11 +254,8 @@ pub trait SyncCryptoStore: CryptoStore + Send + Sync { /// If the given seed is `Some(_)`, the key pair will only be stored in memory. /// /// Returns the public key of the generated key pair. - fn ecdsa_generate_new( - &self, - id: KeyTypeId, - seed: Option<&str>, - ) -> Result; + fn ecdsa_generate_new(&self, id: KeyTypeId, seed: Option<&str>) + -> Result; /// Insert a new key. This doesn't require any known of the crypto; but a public key must be /// manually provided. @@ -281,7 +272,7 @@ pub trait SyncCryptoStore: CryptoStore + Send + Sync { fn supported_keys( &self, id: KeyTypeId, - keys: Vec + keys: Vec, ) -> Result, Error>; /// List all supported keys @@ -321,16 +312,16 @@ pub trait SyncCryptoStore: CryptoStore + Send + Sync { &self, id: KeyTypeId, keys: Vec, - msg: &[u8] + msg: &[u8], ) -> Result)>, Error> { if keys.len() == 1 { return Ok( - SyncCryptoStore::sign_with(self, id, &keys[0], msg)?.map(|s| (keys[0].clone(), s)), + SyncCryptoStore::sign_with(self, id, &keys[0], msg)?.map(|s| (keys[0].clone(), s)) ) } else { for k in SyncCryptoStore::supported_keys(self, id, keys)? { if let Ok(Some(sign)) = SyncCryptoStore::sign_with(self, id, &k, msg) { - return Ok(Some((k, sign))); + return Ok(Some((k, sign))) } } } @@ -380,8 +371,8 @@ pub trait SyncCryptoStore: CryptoStore + Send + Sync { /// in turn, used for signing the provided pre-hashed message. /// /// The `msg` argument provided should be a hashed message for which an - /// ECDSA signature should be generated. - /// + /// ECDSA signature should be generated. + /// /// Returns an [`ecdsa::Signature`] or `None` in case the given `id` and /// `public` combination doesn't exist in the keystore. An `Err` will be /// returned if generating the signature itself failed. diff --git a/primitives/keystore/src/testing.rs b/primitives/keystore/src/testing.rs index 9cc8b8fc64b11..718ba798dc0f3 100644 --- a/primitives/keystore/src/testing.rs +++ b/primitives/keystore/src/testing.rs @@ -17,19 +17,21 @@ //! Types that should only be used for testing! -use sp_core::crypto::KeyTypeId; use sp_core::{ - crypto::{Pair, Public, CryptoTypePublicPair}, - ed25519, sr25519, ecdsa, + crypto::{CryptoTypePublicPair, KeyTypeId, Pair, Public}, + ecdsa, ed25519, sr25519, }; use crate::{ - {CryptoStore, SyncCryptoStorePtr, Error, SyncCryptoStore}, - vrf::{VRFTranscriptData, VRFSignature, make_transcript}, + vrf::{make_transcript, VRFSignature, VRFTranscriptData}, + CryptoStore, Error, SyncCryptoStore, SyncCryptoStorePtr, }; -use std::{collections::{HashMap, HashSet}, sync::Arc}; -use parking_lot::RwLock; use async_trait::async_trait; +use parking_lot::RwLock; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; /// A keystore implementation usable in tests. #[derive(Default)] @@ -45,29 +47,28 @@ impl KeyStore { } fn sr25519_key_pair(&self, id: KeyTypeId, pub_key: &sr25519::Public) -> Option { - self.keys.read().get(&id) - .and_then(|inner| - inner.get(pub_key.as_slice()) - .map(|s| sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid")) - ) + self.keys.read().get(&id).and_then(|inner| { + inner.get(pub_key.as_slice()).map(|s| { + sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid") + }) + }) } fn ed25519_key_pair(&self, id: KeyTypeId, pub_key: &ed25519::Public) -> Option { - self.keys.read().get(&id) - .and_then(|inner| - inner.get(pub_key.as_slice()) - .map(|s| ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid")) - ) + self.keys.read().get(&id).and_then(|inner| { + inner.get(pub_key.as_slice()).map(|s| { + ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid") + }) + }) } fn ecdsa_key_pair(&self, id: KeyTypeId, pub_key: &ecdsa::Public) -> Option { - self.keys.read().get(&id) - .and_then(|inner| - inner.get(pub_key.as_slice()) - .map(|s| ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid")) - ) + self.keys.read().get(&id).and_then(|inner| { + inner + .get(pub_key.as_slice()) + .map(|s| ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid")) + }) } - } #[async_trait] @@ -158,28 +159,32 @@ impl CryptoStore for KeyStore { impl SyncCryptoStore for KeyStore { fn keys(&self, id: KeyTypeId) -> Result, Error> { - self.keys.read() + self.keys + .read() .get(&id) .map(|map| { - Ok(map.keys() - .fold(Vec::new(), |mut v, k| { - v.push(CryptoTypePublicPair(sr25519::CRYPTO_ID, k.clone())); - v.push(CryptoTypePublicPair(ed25519::CRYPTO_ID, k.clone())); - v.push(CryptoTypePublicPair(ecdsa::CRYPTO_ID, k.clone())); - v - })) + Ok(map.keys().fold(Vec::new(), |mut v, k| { + v.push(CryptoTypePublicPair(sr25519::CRYPTO_ID, k.clone())); + v.push(CryptoTypePublicPair(ed25519::CRYPTO_ID, k.clone())); + v.push(CryptoTypePublicPair(ecdsa::CRYPTO_ID, k.clone())); + v + })) }) .unwrap_or_else(|| Ok(vec![])) } fn sr25519_public_keys(&self, id: KeyTypeId) -> Vec { - self.keys.read().get(&id) - .map(|keys| + self.keys + .read() + .get(&id) + .map(|keys| { keys.values() - .map(|s| sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid")) + .map(|s| { + sr25519::Pair::from_string(s, None).expect("`sr25519` seed slice is valid") + }) .map(|p| p.public()) .collect() - ) + }) .unwrap_or_default() } @@ -190,27 +195,40 @@ impl SyncCryptoStore for KeyStore { ) -> Result { match seed { Some(seed) => { - let pair = sr25519::Pair::from_string(seed, None) - .map_err(|_| Error::ValidationError("Generates an `sr25519` pair.".to_owned()))?; - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into()); + let pair = sr25519::Pair::from_string(seed, None).map_err(|_| { + Error::ValidationError("Generates an `sr25519` pair.".to_owned()) + })?; + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), seed.into()); Ok(pair.public()) }, None => { let (pair, phrase, _) = sr25519::Pair::generate_with_phrase(None); - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), phrase); + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), phrase); Ok(pair.public()) - } + }, } } fn ed25519_public_keys(&self, id: KeyTypeId) -> Vec { - self.keys.read().get(&id) - .map(|keys| + self.keys + .read() + .get(&id) + .map(|keys| { keys.values() - .map(|s| ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid")) + .map(|s| { + ed25519::Pair::from_string(s, None).expect("`ed25519` seed slice is valid") + }) .map(|p| p.public()) .collect() - ) + }) .unwrap_or_default() } @@ -221,27 +239,40 @@ impl SyncCryptoStore for KeyStore { ) -> Result { match seed { Some(seed) => { - let pair = ed25519::Pair::from_string(seed, None) - .map_err(|_| Error::ValidationError("Generates an `ed25519` pair.".to_owned()))?; - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into()); + let pair = ed25519::Pair::from_string(seed, None).map_err(|_| { + Error::ValidationError("Generates an `ed25519` pair.".to_owned()) + })?; + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), seed.into()); Ok(pair.public()) }, None => { let (pair, phrase, _) = ed25519::Pair::generate_with_phrase(None); - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), phrase); + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), phrase); Ok(pair.public()) - } + }, } } fn ecdsa_public_keys(&self, id: KeyTypeId) -> Vec { - self.keys.read().get(&id) - .map(|keys| + self.keys + .read() + .get(&id) + .map(|keys| { keys.values() - .map(|s| ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid")) + .map(|s| { + ecdsa::Pair::from_string(s, None).expect("`ecdsa` seed slice is valid") + }) .map(|p| p.public()) .collect() - ) + }) .unwrap_or_default() } @@ -254,24 +285,38 @@ impl SyncCryptoStore for KeyStore { Some(seed) => { let pair = ecdsa::Pair::from_string(seed, None) .map_err(|_| Error::ValidationError("Generates an `ecdsa` pair.".to_owned()))?; - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), seed.into()); + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), seed.into()); Ok(pair.public()) }, None => { let (pair, phrase, _) = ecdsa::Pair::generate_with_phrase(None); - self.keys.write().entry(id).or_default().insert(pair.public().to_raw_vec(), phrase); + self.keys + .write() + .entry(id) + .or_default() + .insert(pair.public().to_raw_vec(), phrase); Ok(pair.public()) - } + }, } } fn insert_unknown(&self, id: KeyTypeId, suri: &str, public: &[u8]) -> Result<(), ()> { - self.keys.write().entry(id).or_default().insert(public.to_owned(), suri.to_string()); + self.keys + .write() + .entry(id) + .or_default() + .insert(public.to_owned(), suri.to_string()); Ok(()) } fn has_keys(&self, public_keys: &[(Vec, KeyTypeId)]) -> bool { - public_keys.iter().all(|(k, t)| self.keys.read().get(&t).and_then(|s| s.get(k)).is_some()) + public_keys + .iter() + .all(|(k, t)| self.keys.read().get(&t).and_then(|s| s.get(k)).is_some()) } fn supported_keys( @@ -295,24 +340,24 @@ impl SyncCryptoStore for KeyStore { match key.0 { ed25519::CRYPTO_ID => { - let key_pair = self - .ed25519_key_pair(id, &ed25519::Public::from_slice(key.1.as_slice())); + let key_pair = + self.ed25519_key_pair(id, &ed25519::Public::from_slice(key.1.as_slice())); key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() - } + }, sr25519::CRYPTO_ID => { - let key_pair = self - .sr25519_key_pair(id, &sr25519::Public::from_slice(key.1.as_slice())); + let key_pair = + self.sr25519_key_pair(id, &sr25519::Public::from_slice(key.1.as_slice())); key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() - } + }, ecdsa::CRYPTO_ID => { - let key_pair = self - .ecdsa_key_pair(id, &ecdsa::Public::from_slice(key.1.as_slice())); + let key_pair = + self.ecdsa_key_pair(id, &ecdsa::Public::from_slice(key.1.as_slice())); key_pair.map(|k| k.sign(msg).encode()).map(Ok).transpose() - } - _ => Err(Error::KeyNotSupported(id)) + }, + _ => Err(Error::KeyNotSupported(id)), } } @@ -323,17 +368,11 @@ impl SyncCryptoStore for KeyStore { transcript_data: VRFTranscriptData, ) -> Result, Error> { let transcript = make_transcript(transcript_data); - let pair = if let Some(k) = self.sr25519_key_pair(key_type, public) { - k - } else { - return Ok(None) - }; + let pair = + if let Some(k) = self.sr25519_key_pair(key_type, public) { k } else { return Ok(None) }; let (inout, proof, _) = pair.as_ref().vrf_sign(transcript); - Ok(Some(VRFSignature { - output: inout.to_output(), - proof, - })) + Ok(Some(VRFSignature { output: inout.to_output(), proof })) } fn ecdsa_sign_prehashed( @@ -362,15 +401,18 @@ impl Into> for KeyStore { #[cfg(test)] mod tests { use super::*; - use sp_core::{sr25519, testing::{ED25519, SR25519, ECDSA}}; - use crate::{SyncCryptoStore, vrf::VRFTranscriptValue}; + use crate::{vrf::VRFTranscriptValue, SyncCryptoStore}; + use sp_core::{ + sr25519, + testing::{ECDSA, ED25519, SR25519}, + }; #[test] fn store_key_and_extract() { let store = KeyStore::new(); - let public = SyncCryptoStore::ed25519_generate_new(&store, ED25519, None) - .expect("Generates key"); + let public = + SyncCryptoStore::ed25519_generate_new(&store, ED25519, None).expect("Generates key"); let public_keys = SyncCryptoStore::keys(&store, ED25519).unwrap(); @@ -384,12 +426,8 @@ mod tests { let secret_uri = "//Alice"; let key_pair = sr25519::Pair::from_string(secret_uri, None).expect("Generates key pair"); - SyncCryptoStore::insert_unknown( - &store, - SR25519, - secret_uri, - key_pair.public().as_ref(), - ).expect("Inserts unknown key"); + SyncCryptoStore::insert_unknown(&store, SR25519, secret_uri, key_pair.public().as_ref()) + .expect("Inserts unknown key"); let public_keys = SyncCryptoStore::keys(&store, SR25519).unwrap(); @@ -409,7 +447,7 @@ mod tests { ("one", VRFTranscriptValue::U64(1)), ("two", VRFTranscriptValue::U64(2)), ("three", VRFTranscriptValue::Bytes("test".as_bytes().to_vec())), - ] + ], }; let result = SyncCryptoStore::sr25519_vrf_sign( @@ -420,19 +458,11 @@ mod tests { ); assert!(result.unwrap().is_none()); - SyncCryptoStore::insert_unknown( - &store, - SR25519, - secret_uri, - key_pair.public().as_ref(), - ).expect("Inserts unknown key"); + SyncCryptoStore::insert_unknown(&store, SR25519, secret_uri, key_pair.public().as_ref()) + .expect("Inserts unknown key"); - let result = SyncCryptoStore::sr25519_vrf_sign( - &store, - SR25519, - &key_pair.public(), - transcript_data, - ); + let result = + SyncCryptoStore::sr25519_vrf_sign(&store, SR25519, &key_pair.public(), transcript_data); assert!(result.unwrap().is_some()); } @@ -445,16 +475,19 @@ mod tests { let pair = ecdsa::Pair::from_string(suri, None).unwrap(); let msg = sp_core::keccak_256(b"this should be a hashed message"); - + // no key in key store - let res = SyncCryptoStore::ecdsa_sign_prehashed(&store, ECDSA, &pair.public(), &msg).unwrap(); + let res = + SyncCryptoStore::ecdsa_sign_prehashed(&store, ECDSA, &pair.public(), &msg).unwrap(); assert!(res.is_none()); // insert key, sign again - let res = SyncCryptoStore::insert_unknown(&store, ECDSA, suri, pair.public().as_ref()).unwrap(); + let res = + SyncCryptoStore::insert_unknown(&store, ECDSA, suri, pair.public().as_ref()).unwrap(); assert_eq!((), res); - let res = SyncCryptoStore::ecdsa_sign_prehashed(&store, ECDSA, &pair.public(), &msg).unwrap(); - assert!(res.is_some()); + let res = + SyncCryptoStore::ecdsa_sign_prehashed(&store, ECDSA, &pair.public(), &msg).unwrap(); + assert!(res.is_some()); } } diff --git a/primitives/keystore/src/vrf.rs b/primitives/keystore/src/vrf.rs index 04286eea82761..383abb77e17c7 100644 --- a/primitives/keystore/src/vrf.rs +++ b/primitives/keystore/src/vrf.rs @@ -59,21 +59,17 @@ pub fn make_transcript(data: VRFTranscriptData) -> Transcript { }, VRFTranscriptValue::U64(val) => { transcript.append_u64(label.as_bytes(), val); - } + }, } } transcript } - #[cfg(test)] mod tests { use super::*; use rand::RngCore; - use rand_chacha::{ - rand_core::SeedableRng, - ChaChaRng, - }; + use rand_chacha::{rand_core::SeedableRng, ChaChaRng}; #[test] fn transcript_creation_matches() { @@ -90,9 +86,7 @@ mod tests { }); let test = |t: Transcript| -> [u8; 16] { let mut b = [0u8; 16]; - t.build_rng() - .finalize(&mut ChaChaRng::from_seed([0u8;32])) - .fill_bytes(&mut b); + t.build_rng().finalize(&mut ChaChaRng::from_seed([0u8; 32])).fill_bytes(&mut b); b }; debug_assert!(test(orig_transcript) == test(new_transcript)); diff --git a/primitives/maybe-compressed-blob/src/lib.rs b/primitives/maybe-compressed-blob/src/lib.rs index acd283e747f9f..4e4a3da0a82c6 100644 --- a/primitives/maybe-compressed-blob/src/lib.rs +++ b/primitives/maybe-compressed-blob/src/lib.rs @@ -18,8 +18,7 @@ //! Handling of blobs that may be compressed, based on an 8-byte magic identifier //! at the head. -use std::borrow::Cow; -use std::io::Read; +use std::{borrow::Cow, io::Read}; // An arbitrary prefix, that indicates a blob beginning with should be decompressed with // Zstd compression. @@ -52,7 +51,7 @@ impl std::fmt::Display for Error { } } -impl std::error::Error for Error { } +impl std::error::Error for Error {} fn read_from_decoder( decoder: impl Read, @@ -81,8 +80,8 @@ fn decompress_zstd(blob: &[u8], bomb_limit: usize) -> Result, Error> { #[cfg(target_os = "unknown")] fn decompress_zstd(mut blob: &[u8], bomb_limit: usize) -> Result, Error> { let blob_len = blob.len(); - let decoder = ruzstd::streaming_decoder::StreamingDecoder::new(&mut blob) - .map_err(|_| Error::Invalid)?; + let decoder = + ruzstd::streaming_decoder::StreamingDecoder::new(&mut blob).map_err(|_| Error::Invalid)?; read_from_decoder(decoder, blob_len, bomb_limit) } @@ -105,7 +104,7 @@ pub fn compress(blob: &[u8], bomb_limit: usize) -> Option> { use std::io::Write; if blob.len() > bomb_limit { - return None; + return None } let mut buf = ZSTD_PREFIX.to_vec(); diff --git a/primitives/npos-elections/benches/phragmen.rs b/primitives/npos-elections/benches/phragmen.rs index d48c246558844..7848259249351 100644 --- a/primitives/npos-elections/benches/phragmen.rs +++ b/primitives/npos-elections/benches/phragmen.rs @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. - //! Benchmarks of the phragmen election algorithm. //! Note that execution times will not be accurate in an absolute scale, since //! - Everything is executed in the context of `TestExternalities` @@ -27,13 +26,12 @@ use test::Bencher; use rand::{self, Rng}; use sp_npos_elections::{ElectionResult, VoteWeight}; -use std::collections::BTreeMap; -use sp_runtime::{Perbill, PerThing, traits::Zero}; use sp_npos_elections::{ - balance_solution, assignment_ratio_to_staked, to_support_map, to_without_backing, VoteWeight, - ExtendedBalance, Assignment, StakedAssignment, IdentifierT, assignment_ratio_to_staked, - seq_phragmen, + assignment_ratio_to_staked, balance_solution, seq_phragmen, to_support_map, to_without_backing, + Assignment, ExtendedBalance, IdentifierT, StakedAssignment, VoteWeight, }; +use sp_runtime::{traits::Zero, PerThing, Perbill}; +use std::collections::BTreeMap; // default params. Each will be scaled by the benchmarks individually. const VALIDATORS: u64 = 100; @@ -69,15 +67,13 @@ mod bench_closure_and_slice { ratio .into_iter() .zip(stakes.into_iter().map(|x| *x as ExtendedBalance)) - .map(|(a, stake)| { - a.into_staked(stake.into(), true) - }) + .map(|(a, stake)| a.into_staked(stake.into(), true)) .collect() } #[bench] fn closure(b: &mut Bencher) { - let assignments = (0..1000).map(|_| random_assignment()).collect::>>(); + let assignments = (0..1000).map(|_| random_assignment()).collect::>>(); let stake_of = |x: &u32| -> VoteWeight { (x * 2 + 100).into() }; // each have one clone of assignments @@ -86,7 +82,7 @@ mod bench_closure_and_slice { #[bench] fn slice(b: &mut Bencher) { - let assignments = (0..1000).map(|_| random_assignment()).collect::>>(); + let assignments = (0..1000).map(|_| random_assignment()).collect::>>(); let stake_of = |x: &u32| -> VoteWeight { (x * 2 + 100).into() }; b.iter(|| { @@ -112,20 +108,19 @@ fn do_phragmen( let mut candidates = Vec::with_capacity(num_validators as usize); let mut stake_of_tree: BTreeMap = BTreeMap::new(); - (1 ..= num_validators).for_each(|acc| { + (1..=num_validators).for_each(|acc| { candidates.push(acc); stake_of_tree.insert(acc, STAKE + rr(10, 1000)); }); let mut voters = Vec::with_capacity(num_nominators as usize); - (PREFIX ..= (PREFIX + num_nominators)).for_each(|acc| { + (PREFIX..=(PREFIX + num_nominators)).for_each(|acc| { // all possible targets let mut all_targets = candidates.clone(); // we remove and pop into `targets` `edge_per_voter` times. - let targets = (0 .. edge_per_voter).map(|_| { - all_targets.remove(rr(0, all_targets.len()) as usize) - }) - .collect::>(); + let targets = (0..edge_per_voter) + .map(|_| all_targets.remove(rr(0, all_targets.len()) as usize)) + .collect::>(); let stake = STAKE + rr(10, 1000); stake_of_tree.insert(acc, stake); @@ -138,20 +133,16 @@ fn do_phragmen( Zero::zero(), candidates.clone(), voters.clone(), - ).unwrap(); + ) + .unwrap(); - let stake_of = |who: &AccountId| -> VoteWeight { - *stake_of_tree.get(who).unwrap() - }; + let stake_of = |who: &AccountId| -> VoteWeight { *stake_of_tree.get(who).unwrap() }; // Do the benchmarking with balancing. if eq_iters > 0 { let staked = assignment_ratio_to_staked(assignments, &stake_of); let winners = to_without_backing(winners); - let mut support = to_support_map( - winners.as_ref(), - staked.as_ref(), - ).unwrap(); + let mut support = to_support_map(winners.as_ref(), staked.as_ref()).unwrap(); balance_solution( staked.into_iter().map(|a| (a.clone(), stake_of(&a.who))).collect(), diff --git a/primitives/npos-elections/compact/src/assignment.rs b/primitives/npos-elections/compact/src/assignment.rs index 2c8edefbfb379..bd5b1bf0c154a 100644 --- a/primitives/npos-elections/compact/src/assignment.rs +++ b/primitives/npos-elections/compact/src/assignment.rs @@ -46,25 +46,29 @@ pub(crate) fn from_impl(count: usize) -> TokenStream2 { ),) }; - let from_impl_rest = (3..=count).map(|c| { - let inner = (0..c-1).map(|i| - quote!((index_of_target(&distribution[#i].0).or_invalid_index()?, distribution[#i].1),) - ).collect::(); - - let field_name = field_name_for(c); - let last_index = c - 1; - let last = quote!(index_of_target(&distribution[#last_index].0).or_invalid_index()?); - - quote!( - #c => compact.#field_name.push( - ( - index_of_voter(&who).or_invalid_index()?, - [#inner], - #last, + let from_impl_rest = (3..=count) + .map(|c| { + let inner = (0..c - 1) + .map( + |i| quote!((index_of_target(&distribution[#i].0).or_invalid_index()?, distribution[#i].1),), ) - ), - ) - }).collect::(); + .collect::(); + + let field_name = field_name_for(c); + let last_index = c - 1; + let last = quote!(index_of_target(&distribution[#last_index].0).or_invalid_index()?); + + quote!( + #c => compact.#field_name.push( + ( + index_of_voter(&who).or_invalid_index()?, + [#inner], + #last, + ) + ), + ) + }) + .collect::(); quote!( #from_impl_single @@ -113,39 +117,41 @@ pub(crate) fn into_impl(count: usize, per_thing: syn::Type) -> TokenStream2 { ) }; - let into_impl_rest = (3..=count).map(|c| { - let name = field_name_for(c); - quote!( - for (voter_index, inners, t_last_idx) in self.#name { - let mut sum = #per_thing::zero(); - let mut inners_parsed = inners - .iter() - .map(|(ref t_idx, p)| { - sum = _npos::sp_arithmetic::traits::Saturating::saturating_add(sum, *p); - let target = target_at(*t_idx).or_invalid_index()?; - Ok((target, *p)) - }) - .collect::, _npos::Error>>()?; - - if sum >= #per_thing::one() { - return Err(_npos::Error::CompactStakeOverflow); + let into_impl_rest = (3..=count) + .map(|c| { + let name = field_name_for(c); + quote!( + for (voter_index, inners, t_last_idx) in self.#name { + let mut sum = #per_thing::zero(); + let mut inners_parsed = inners + .iter() + .map(|(ref t_idx, p)| { + sum = _npos::sp_arithmetic::traits::Saturating::saturating_add(sum, *p); + let target = target_at(*t_idx).or_invalid_index()?; + Ok((target, *p)) + }) + .collect::, _npos::Error>>()?; + + if sum >= #per_thing::one() { + return Err(_npos::Error::CompactStakeOverflow); + } + + // defensive only. Since Percent doesn't have `Sub`. + let p_last = _npos::sp_arithmetic::traits::Saturating::saturating_sub( + #per_thing::one(), + sum, + ); + + inners_parsed.push((target_at(t_last_idx).or_invalid_index()?, p_last)); + + assignments.push(_npos::Assignment { + who: voter_at(voter_index).or_invalid_index()?, + distribution: inners_parsed, + }); } - - // defensive only. Since Percent doesn't have `Sub`. - let p_last = _npos::sp_arithmetic::traits::Saturating::saturating_sub( - #per_thing::one(), - sum, - ); - - inners_parsed.push((target_at(t_last_idx).or_invalid_index()?, p_last)); - - assignments.push(_npos::Assignment { - who: voter_at(voter_index).or_invalid_index()?, - distribution: inners_parsed, - }); - } - ) - }).collect::(); + ) + }) + .collect::(); quote!( #into_impl_single diff --git a/primitives/npos-elections/compact/src/codec.rs b/primitives/npos-elections/compact/src/codec.rs index 5416587f2044f..265bf1103b556 100644 --- a/primitives/npos-elections/compact/src/codec.rs +++ b/primitives/npos-elections/compact/src/codec.rs @@ -82,39 +82,42 @@ fn decode_impl( } }; - let decode_impl_rest = (3..=count).map(|c| { - let name = field_name_for(c); + let decode_impl_rest = (3..=count) + .map(|c| { + let name = field_name_for(c); - let inner_impl = (0..c-1).map(|i| - quote! { ( (inner[#i].0).0, (inner[#i].1).0 ), } - ).collect::(); - - quote! { - let #name = - < - _npos::sp_std::prelude::Vec<( - _npos::codec::Compact<#voter_type>, - [(_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); #c-1], - _npos::codec::Compact<#target_type>, - )> - as _npos::codec::Decode - >::decode(value)?; - let #name = #name - .into_iter() - .map(|(v, inner, t_last)| ( - v.0, - [ #inner_impl ], - t_last.0, - )) - .collect::<_npos::sp_std::prelude::Vec<_>>(); - } - }).collect::(); + let inner_impl = (0..c - 1) + .map(|i| quote! { ( (inner[#i].0).0, (inner[#i].1).0 ), }) + .collect::(); + quote! { + let #name = + < + _npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + [(_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); #c-1], + _npos::codec::Compact<#target_type>, + )> + as _npos::codec::Decode + >::decode(value)?; + let #name = #name + .into_iter() + .map(|(v, inner, t_last)| ( + v.0, + [ #inner_impl ], + t_last.0, + )) + .collect::<_npos::sp_std::prelude::Vec<_>>(); + } + }) + .collect::(); - let all_field_names = (1..=count).map(|c| { - let name = field_name_for(c); - quote! { #name, } - }).collect::(); + let all_field_names = (1..=count) + .map(|c| { + let name = field_name_for(c); + quote! { #name, } + }) + .collect::(); quote!( impl _npos::codec::Decode for #ident { @@ -167,29 +170,33 @@ fn encode_impl(ident: &syn::Ident, count: usize) -> TokenStream2 { } }; - let encode_impl_rest = (3..=count).map(|c| { - let name = field_name_for(c); + let encode_impl_rest = (3..=count) + .map(|c| { + let name = field_name_for(c); - // we use the knowledge of the length to avoid copy_from_slice. - let inners_compact_array = (0..c-1).map(|i| - quote!{( - _npos::codec::Compact(inner[#i].0.clone()), - _npos::codec::Compact(inner[#i].1.clone()), - ),} - ).collect::(); + // we use the knowledge of the length to avoid copy_from_slice. + let inners_compact_array = (0..c - 1) + .map(|i| { + quote! {( + _npos::codec::Compact(inner[#i].0.clone()), + _npos::codec::Compact(inner[#i].1.clone()), + ),} + }) + .collect::(); - quote! { - let #name = self.#name - .iter() - .map(|(v, inner, t_last)| ( - _npos::codec::Compact(v.clone()), - [ #inners_compact_array ], - _npos::codec::Compact(t_last.clone()), - )) - .collect::<_npos::sp_std::prelude::Vec<_>>(); - #name.encode_to(&mut r); - } - }).collect::(); + quote! { + let #name = self.#name + .iter() + .map(|(v, inner, t_last)| ( + _npos::codec::Compact(v.clone()), + [ #inners_compact_array ], + _npos::codec::Compact(t_last.clone()), + )) + .collect::<_npos::sp_std::prelude::Vec<_>>(); + #name.encode_to(&mut r); + } + }) + .collect::(); quote!( impl _npos::codec::Encode for #ident { @@ -237,37 +244,39 @@ fn scale_info_impl( } }; - let scale_info_impl_rest = (3..=count).map(|c| { - let name = format!("{}", field_name_for(c)); - quote! { - .field(|f| - f.ty::<_npos::sp_std::prelude::Vec<( - _npos::codec::Compact<#voter_type>, - [ - (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); - #c - 1 - ], - _npos::codec::Compact<#target_type> - )>>() - .name(#name) - ) - } - }).collect::(); + let scale_info_impl_rest = (3..=count) + .map(|c| { + let name = format!("{}", field_name_for(c)); + quote! { + .field(|f| + f.ty::<_npos::sp_std::prelude::Vec<( + _npos::codec::Compact<#voter_type>, + [ + (_npos::codec::Compact<#target_type>, _npos::codec::Compact<#weight_type>); + #c - 1 + ], + _npos::codec::Compact<#target_type> + )>>() + .name(#name) + ) + } + }) + .collect::(); quote!( - impl _npos::scale_info::TypeInfo for #ident { - type Identity = Self; + impl _npos::scale_info::TypeInfo for #ident { + type Identity = Self; - fn type_info() -> _npos::scale_info::Type<_npos::scale_info::form::MetaForm> { - _npos::scale_info::Type::builder() - .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) - .composite( - _npos::scale_info::build::Fields::named() - #scale_info_impl_single - #scale_info_impl_double - #scale_info_impl_rest - ) - } - } - ) + fn type_info() -> _npos::scale_info::Type<_npos::scale_info::form::MetaForm> { + _npos::scale_info::Type::builder() + .path(_npos::scale_info::Path::new(stringify!(#ident), module_path!())) + .composite( + _npos::scale_info::build::Fields::named() + #scale_info_impl_single + #scale_info_impl_double + #scale_info_impl_rest + ) + } + } + ) } diff --git a/primitives/npos-elections/compact/src/index_assignment.rs b/primitives/npos-elections/compact/src/index_assignment.rs index 6aeef1442236e..347be7d19984f 100644 --- a/primitives/npos-elections/compact/src/index_assignment.rs +++ b/primitives/npos-elections/compact/src/index_assignment.rs @@ -65,7 +65,7 @@ pub(crate) fn from_impl(count: usize) -> TokenStream2 { ) ), ) - }) + }) .collect::(); quote!( diff --git a/primitives/npos-elections/compact/src/lib.rs b/primitives/npos-elections/compact/src/lib.rs index 557b812d7bbfc..eaebb6892ef1a 100644 --- a/primitives/npos-elections/compact/src/lib.rs +++ b/primitives/npos-elections/compact/src/lib.rs @@ -18,7 +18,7 @@ //! Proc macro for a npos compact assignment. use proc_macro::TokenStream; -use proc_macro2::{TokenStream as TokenStream2, Span, Ident}; +use proc_macro2::{Ident, Span, TokenStream as TokenStream2}; use proc_macro_crate::{crate_name, FoundCrate}; use quote::quote; use syn::parse::{Parse, ParseStream, Result}; @@ -82,15 +82,8 @@ pub(crate) fn syn_err(message: &'static str) -> syn::Error { /// ``` #[proc_macro] pub fn generate_solution_type(item: TokenStream) -> TokenStream { - let SolutionDef { - vis, - ident, - count, - voter_type, - target_type, - weight_type, - compact_encoding, - } = syn::parse_macro_input!(item as SolutionDef); + let SolutionDef { vis, ident, count, voter_type, target_type, weight_type, compact_encoding } = + syn::parse_macro_input!(item as SolutionDef); let imports = imports().unwrap_or_else(|e| e.to_compile_error()); @@ -102,7 +95,8 @@ pub fn generate_solution_type(item: TokenStream) -> TokenStream { target_type.clone(), weight_type.clone(), compact_encoding, - ).unwrap_or_else(|e| e.to_compile_error()); + ) + .unwrap_or_else(|e| e.to_compile_error()); quote!( #imports @@ -167,7 +161,7 @@ fn struct_def( weight_type.clone(), count, ); - quote!{ + quote! { #compact_impl #[derive(Default, PartialEq, Eq, Clone, Debug, PartialOrd, Ord)] } @@ -326,23 +320,27 @@ fn remove_voter_impl(count: usize) -> TokenStream2 { } fn len_impl(count: usize) -> TokenStream2 { - (1..=count).map(|c| { - let field_name = field_name_for(c); - quote!( - all_len = all_len.saturating_add(self.#field_name.len()); - ) - }).collect::() + (1..=count) + .map(|c| { + let field_name = field_name_for(c); + quote!( + all_len = all_len.saturating_add(self.#field_name.len()); + ) + }) + .collect::() } fn edge_count_impl(count: usize) -> TokenStream2 { - (1..=count).map(|c| { - let field_name = field_name_for(c); - quote!( - all_edges = all_edges.saturating_add( - self.#field_name.len().saturating_mul(#c as usize) - ); - ) - }).collect::() + (1..=count) + .map(|c| { + let field_name = field_name_for(c); + quote!( + all_edges = all_edges.saturating_add( + self.#field_name.len().saturating_mul(#c as usize) + ); + ) + }) + .collect::() } fn unique_targets_impl(count: usize) -> TokenStream2 { @@ -365,17 +363,19 @@ fn unique_targets_impl(count: usize) -> TokenStream2 { } }; - let unique_targets_impl_rest = (3..=count).map(|c| { - let field_name = field_name_for(c); - quote! { - self.#field_name.iter().for_each(|(_, inners, t_last)| { - inners.iter().for_each(|(t, _)| { - maybe_insert_target(*t); + let unique_targets_impl_rest = (3..=count) + .map(|c| { + let field_name = field_name_for(c); + quote! { + self.#field_name.iter().for_each(|(_, inners, t_last)| { + inners.iter().for_each(|(t, _)| { + maybe_insert_target(*t); + }); + maybe_insert_target(*t_last); }); - maybe_insert_target(*t_last); - }); - } - }).collect::(); + } + }) + .collect::(); quote! { #unique_targets_impl_single @@ -445,23 +445,29 @@ impl Parse for SolutionDef { let expected_types = ["VoterIndex", "TargetIndex", "Accuracy"]; - let mut types: Vec = generics.args.iter().zip(expected_types.iter()).map(|(t, expected)| - match t { + let mut types: Vec = generics + .args + .iter() + .zip(expected_types.iter()) + .map(|(t, expected)| match t { syn::GenericArgument::Type(ty) => { // this is now an error - Err(syn::Error::new_spanned(ty, format!("Expected binding: `{} = ...`", expected))) + Err(syn::Error::new_spanned( + ty, + format!("Expected binding: `{} = ...`", expected), + )) }, - syn::GenericArgument::Binding(syn::Binding{ident, ty, ..}) => { + syn::GenericArgument::Binding(syn::Binding { ident, ty, .. }) => { // check that we have the right keyword for this position in the argument list if ident == expected { Ok(ty.clone()) } else { Err(syn::Error::new_spanned(ident, format!("Expected `{}`", expected))) } - } + }, _ => Err(syn_err("Wrong type of generic provided. Must be a `type`.")), - } - ).collect::>()?; + }) + .collect::>()?; let weight_type = types.pop().expect("Vector of length 3 can be popped; qed"); let target_type = types.pop().expect("Vector of length 2 can be popped; qed"); @@ -472,15 +478,15 @@ impl Parse for SolutionDef { let expr = count_expr.expr; let expr_lit = match *expr { syn::Expr::Lit(count_lit) => count_lit.lit, - _ => return Err(syn_err("Count must be literal.")) + _ => return Err(syn_err("Count must be literal.")), }; let int_lit = match expr_lit { syn::Lit::Int(int_lit) => int_lit, - _ => return Err(syn_err("Count must be int literal.")) + _ => return Err(syn_err("Count must be int literal.")), }; let count = int_lit.base10_parse::()?; - Ok(Self { vis, ident, voter_type, target_type, weight_type, count, compact_encoding } ) + Ok(Self { vis, ident, voter_type, target_type, weight_type, count, compact_encoding }) } } diff --git a/primitives/npos-elections/fuzzer/src/common.rs b/primitives/npos-elections/fuzzer/src/common.rs index fe237c930de17..e97f7f7df8b11 100644 --- a/primitives/npos-elections/fuzzer/src/common.rs +++ b/primitives/npos-elections/fuzzer/src/common.rs @@ -62,11 +62,7 @@ pub fn generate_random_npos_inputs( candidate_count: usize, voter_count: usize, mut rng: impl Rng, -) -> ( - usize, - Vec, - Vec<(AccountId, VoteWeight, Vec)>, -) { +) -> (usize, Vec, Vec<(AccountId, VoteWeight, Vec)>) { // cache for fast generation of unique candidate and voter ids let mut used_ids = HashSet::with_capacity(candidate_count + voter_count); diff --git a/primitives/npos-elections/fuzzer/src/compact.rs b/primitives/npos-elections/fuzzer/src/compact.rs index a49f6a535e5f0..b171765e783f7 100644 --- a/primitives/npos-elections/fuzzer/src/compact.rs +++ b/primitives/npos-elections/fuzzer/src/compact.rs @@ -1,6 +1,5 @@ use honggfuzz::fuzz; -use sp_npos_elections::generate_solution_type; -use sp_npos_elections::sp_arithmetic::Percent; +use sp_npos_elections::{generate_solution_type, sp_arithmetic::Percent}; use sp_runtime::codec::{Encode, Error}; fn main() { @@ -26,9 +25,8 @@ fn main() { // The reencoded value should definitely be decodable (if unwrap() fails that is a valid // panic/finding for the fuzzer): let decoded2: InnerTestSolutionCompact = - ::decode( - &mut reencoded.as_slice(), - ).unwrap(); + ::decode(&mut reencoded.as_slice()) + .unwrap(); // And it should be equal to the original decoded object (resulting from directly // decoding fuzzer_data): assert_eq!(decoded, decoded2); diff --git a/primitives/npos-elections/fuzzer/src/phragmen_balancing.rs b/primitives/npos-elections/fuzzer/src/phragmen_balancing.rs index 4ff18e95d1ef1..04ff60683f9c0 100644 --- a/primitives/npos-elections/fuzzer/src/phragmen_balancing.rs +++ b/primitives/npos-elections/fuzzer/src/phragmen_balancing.rs @@ -21,23 +21,17 @@ mod common; use common::*; use honggfuzz::fuzz; +use rand::{self, SeedableRng}; use sp_npos_elections::{ assignment_ratio_to_staked_normalized, is_score_better, seq_phragmen, to_supports, to_without_backing, EvaluateSupport, VoteWeight, }; use sp_runtime::Perbill; -use rand::{self, SeedableRng}; fn main() { loop { fuzz!(|data: (usize, usize, usize, usize, u64)| { - let ( - mut target_count, - mut voter_count, - mut iterations, - mut to_elect, - seed, - ) = data; + let (mut target_count, mut voter_count, mut iterations, mut to_elect, seed) = data; let rng = rand::rngs::SmallRng::seed_from_u64(seed); target_count = to_range(target_count, 100, 200); voter_count = to_range(voter_count, 100, 200); @@ -48,12 +42,7 @@ fn main() { "++ [voter_count: {} / target_count:{} / to_elect:{} / iterations:{}]", voter_count, target_count, to_elect, iterations, ); - let ( - unbalanced, - candidates, - voters, - stake_of_tree, - ) = generate_random_npos_result( + let (unbalanced, candidates, voters, stake_of_tree) = generate_random_npos_result( voter_count as u64, target_count as u64, to_elect, @@ -61,9 +50,7 @@ fn main() { ElectionType::Phragmen(None), ); - let stake_of = |who: &AccountId| -> VoteWeight { - *stake_of_tree.get(who).unwrap() - }; + let stake_of = |who: &AccountId| -> VoteWeight { *stake_of_tree.get(who).unwrap() }; let unbalanced_score = { let staked = assignment_ratio_to_staked_normalized( @@ -76,7 +63,7 @@ fn main() { if score[0] == 0 { // such cases cannot be improved by balancing. - return; + return } score }; @@ -87,34 +74,32 @@ fn main() { candidates, voters, Some((iterations, 0)), - ).unwrap(); + ) + .unwrap(); let balanced_score = { let staked = assignment_ratio_to_staked_normalized( balanced.assignments.clone(), &stake_of, - ).unwrap(); + ) + .unwrap(); let winners = to_without_backing(balanced.winners); to_supports(winners.as_ref(), staked.as_ref()).unwrap().evaluate() - }; let enhance = is_score_better(balanced_score, unbalanced_score, Perbill::zero()); println!( "iter = {} // {:?} -> {:?} [{}]", - iterations, - unbalanced_score, - balanced_score, - enhance, + iterations, unbalanced_score, balanced_score, enhance, ); // The only guarantee of balancing is such that the first and third element of the score // cannot decrease. assert!( balanced_score[0] >= unbalanced_score[0] && - balanced_score[1] == unbalanced_score[1] && - balanced_score[2] <= unbalanced_score[2] + balanced_score[1] == unbalanced_score[1] && + balanced_score[2] <= unbalanced_score[2] ); } }); diff --git a/primitives/npos-elections/fuzzer/src/phragmms_balancing.rs b/primitives/npos-elections/fuzzer/src/phragmms_balancing.rs index 8ce7e7d415fa2..0d8a07489d310 100644 --- a/primitives/npos-elections/fuzzer/src/phragmms_balancing.rs +++ b/primitives/npos-elections/fuzzer/src/phragmms_balancing.rs @@ -21,23 +21,17 @@ mod common; use common::*; use honggfuzz::fuzz; +use rand::{self, SeedableRng}; use sp_npos_elections::{ assignment_ratio_to_staked_normalized, is_score_better, phragmms, to_supports, to_without_backing, EvaluateSupport, VoteWeight, }; use sp_runtime::Perbill; -use rand::{self, SeedableRng}; fn main() { loop { fuzz!(|data: (usize, usize, usize, usize, u64)| { - let ( - mut target_count, - mut voter_count, - mut iterations, - mut to_elect, - seed, - ) = data; + let (mut target_count, mut voter_count, mut iterations, mut to_elect, seed) = data; let rng = rand::rngs::SmallRng::seed_from_u64(seed); target_count = to_range(target_count, 100, 200); voter_count = to_range(voter_count, 100, 200); @@ -48,12 +42,7 @@ fn main() { "++ [voter_count: {} / target_count:{} / to_elect:{} / iterations:{}]", voter_count, target_count, to_elect, iterations, ); - let ( - unbalanced, - candidates, - voters, - stake_of_tree, - ) = generate_random_npos_result( + let (unbalanced, candidates, voters, stake_of_tree) = generate_random_npos_result( voter_count as u64, target_count as u64, to_elect, @@ -61,9 +50,7 @@ fn main() { ElectionType::Phragmms(None), ); - let stake_of = |who: &AccountId| -> VoteWeight { - *stake_of_tree.get(who).unwrap() - }; + let stake_of = |who: &AccountId| -> VoteWeight { *stake_of_tree.get(who).unwrap() }; let unbalanced_score = { let staked = assignment_ratio_to_staked_normalized( @@ -76,7 +63,7 @@ fn main() { if score[0] == 0 { // such cases cannot be improved by balancing. - return; + return } score }; @@ -86,34 +73,30 @@ fn main() { candidates, voters, Some((iterations, 0)), - ).unwrap(); + ) + .unwrap(); let balanced_score = { let staked = assignment_ratio_to_staked_normalized(balanced.assignments.clone(), &stake_of) .unwrap(); let winners = to_without_backing(balanced.winners); - to_supports(winners.as_ref(), staked.as_ref()) - .unwrap() - .evaluate() + to_supports(winners.as_ref(), staked.as_ref()).unwrap().evaluate() }; let enhance = is_score_better(balanced_score, unbalanced_score, Perbill::zero()); println!( "iter = {} // {:?} -> {:?} [{}]", - iterations, - unbalanced_score, - balanced_score, - enhance, + iterations, unbalanced_score, balanced_score, enhance, ); // The only guarantee of balancing is such that the first and third element of the score // cannot decrease. assert!( balanced_score[0] >= unbalanced_score[0] && - balanced_score[1] == unbalanced_score[1] && - balanced_score[2] <= unbalanced_score[2] + balanced_score[1] == unbalanced_score[1] && + balanced_score[2] <= unbalanced_score[2] ); }); } diff --git a/primitives/npos-elections/fuzzer/src/reduce.rs b/primitives/npos-elections/fuzzer/src/reduce.rs index 4ee2468d9d140..a7e77fdd516a5 100644 --- a/primitives/npos-elections/fuzzer/src/reduce.rs +++ b/primitives/npos-elections/fuzzer/src/reduce.rs @@ -34,8 +34,8 @@ use honggfuzz::fuzz; mod common; use common::to_range; -use sp_npos_elections::{reduce, to_support_map, ExtendedBalance, StakedAssignment}; use rand::{self, Rng, RngCore, SeedableRng}; +use sp_npos_elections::{reduce, to_support_map, ExtendedBalance, StakedAssignment}; type Balance = u128; type AccountId = u64; @@ -50,13 +50,8 @@ fn main() { let rng = rand::rngs::SmallRng::seed_from_u64(seed); target_count = to_range(target_count, 100, 1000); voter_count = to_range(voter_count, 100, 2000); - let (assignments, winners) = generate_random_phragmen_assignment( - voter_count, - target_count, - 8, - 8, - rng - ); + let (assignments, winners) = + generate_random_phragmen_assignment(voter_count, target_count, 8, 8, rng); reduce_and_compare(&assignments, &winners); }); } @@ -82,23 +77,27 @@ fn generate_random_phragmen_assignment( (1..=voter_count).for_each(|acc| { let mut targets_to_chose_from = all_targets.clone(); - let targets_to_chose = if edge_per_voter_var > 0 { rng.gen_range( - avg_edge_per_voter - edge_per_voter_var, - avg_edge_per_voter + edge_per_voter_var, - ) } else { avg_edge_per_voter }; - - let distribution = (0..targets_to_chose).map(|_| { - let target = targets_to_chose_from.remove(rng.gen_range(0, targets_to_chose_from.len())); - if winners.iter().find(|w| **w == target).is_none() { - winners.push(target.clone()); - } - (target, rng.gen_range(1 * KSM, 100 * KSM)) - }).collect::>(); - - assignments.push(StakedAssignment { - who: (acc as AccountId), - distribution, - }); + let targets_to_chose = if edge_per_voter_var > 0 { + rng.gen_range( + avg_edge_per_voter - edge_per_voter_var, + avg_edge_per_voter + edge_per_voter_var, + ) + } else { + avg_edge_per_voter + }; + + let distribution = (0..targets_to_chose) + .map(|_| { + let target = + targets_to_chose_from.remove(rng.gen_range(0, targets_to_chose_from.len())); + if winners.iter().find(|w| **w == target).is_none() { + winners.push(target.clone()); + } + (target, rng.gen_range(1 * KSM, 100 * KSM)) + }) + .collect::>(); + + assignments.push(StakedAssignment { who: (acc as AccountId), distribution }); }); (assignments, winners) @@ -117,10 +116,7 @@ fn assert_assignments_equal( } } -fn reduce_and_compare( - assignment: &Vec>, - winners: &Vec, -) { +fn reduce_and_compare(assignment: &Vec>, winners: &Vec) { let mut altered_assignment = assignment.clone(); let n = assignment.len() as u32; let m = winners.len() as u32; @@ -138,15 +134,13 @@ fn reduce_and_compare( num_changed, ); - assert_assignments_equal( - winners, - &assignment, - &altered_assignment, - ); + assert_assignments_equal(winners, &assignment, &altered_assignment); } fn assignment_len(assignments: &[StakedAssignment]) -> u32 { let mut counter = 0; - assignments.iter().for_each(|x| x.distribution.iter().for_each(|_| counter += 1)); + assignments + .iter() + .for_each(|x| x.distribution.iter().for_each(|_| counter += 1)); counter } diff --git a/primitives/npos-elections/src/assignments.rs b/primitives/npos-elections/src/assignments.rs index aacd01a030692..b0dd29dc19041 100644 --- a/primitives/npos-elections/src/assignments.rs +++ b/primitives/npos-elections/src/assignments.rs @@ -18,8 +18,11 @@ //! Structs and helpers for distributing a voter's stake among various winners. use crate::{Error, ExtendedBalance, IdentifierT, PerThing128, __OrInvalidIndex}; -use codec::{Encode, Decode}; -use sp_arithmetic::{traits::{Bounded, Zero}, Normalizable, PerThing}; +use codec::{Decode, Encode}; +use sp_arithmetic::{ + traits::{Bounded, Zero}, + Normalizable, PerThing, +}; use sp_core::RuntimeDebug; use sp_std::vec::Vec; @@ -61,10 +64,7 @@ impl Assignment { }) .collect::>(); - StakedAssignment { - who: self.who, - distribution, - } + StakedAssignment { who: self.who, distribution } } /// Try and normalize this assignment. @@ -83,12 +83,13 @@ impl Assignment { .map(|(_, p)| *p) .collect::>() .normalize(P::one()) - .map(|normalized_ratios| - self.distribution - .iter_mut() - .zip(normalized_ratios) - .for_each(|((_, old), corrected)| { *old = corrected; }) - ) + .map(|normalized_ratios| { + self.distribution.iter_mut().zip(normalized_ratios).for_each( + |((_, old), corrected)| { + *old = corrected; + }, + ) + }) } } @@ -118,7 +119,8 @@ impl StakedAssignment { AccountId: IdentifierT, { let stake = self.total(); - let distribution = self.distribution + let distribution = self + .distribution .into_iter() .filter_map(|(target, w)| { let per_thing = P::from_rational(w, stake); @@ -130,10 +132,7 @@ impl StakedAssignment { }) .collect::>(); - Assignment { - who: self.who, - distribution, - } + Assignment { who: self.who, distribution } } /// Try and normalize this assignment. @@ -152,12 +151,13 @@ impl StakedAssignment { .map(|(_, ref weight)| *weight) .collect::>() .normalize(stake) - .map(|normalized_weights| - self.distribution - .iter_mut() - .zip(normalized_weights.into_iter()) - .for_each(|((_, weight), corrected)| { *weight = corrected; }) - ) + .map(|normalized_weights| { + self.distribution.iter_mut().zip(normalized_weights.into_iter()).for_each( + |((_, weight), corrected)| { + *weight = corrected; + }, + ) + }) } /// Get the total stake of this assignment (aka voter budget). diff --git a/primitives/npos-elections/src/balancing.rs b/primitives/npos-elections/src/balancing.rs index 48cb980d78c33..378ebe8e84fd1 100644 --- a/primitives/npos-elections/src/balancing.rs +++ b/primitives/npos-elections/src/balancing.rs @@ -26,7 +26,7 @@ //! //! See [`balance`] for more information. -use crate::{IdentifierT, Voter, ExtendedBalance, Edge}; +use crate::{Edge, ExtendedBalance, IdentifierT, Voter}; use sp_arithmetic::traits::Zero; use sp_std::prelude::*; @@ -57,19 +57,23 @@ pub fn balance( iterations: usize, tolerance: ExtendedBalance, ) -> usize { - if iterations == 0 { return 0; } + if iterations == 0 { + return 0 + } let mut iter = 0; loop { let mut max_diff = 0; for voter in voters.iter_mut() { let diff = balance_voter(voter, tolerance); - if diff > max_diff { max_diff = diff; } + if diff > max_diff { + max_diff = diff; + } } iter += 1; if max_diff <= tolerance || iter >= iterations { - break iter; + break iter } } } @@ -80,7 +84,8 @@ pub(crate) fn balance_voter( tolerance: ExtendedBalance, ) -> ExtendedBalance { // create a shallow copy of the elected ones. The original one will not be used henceforth. - let mut elected_edges = voter.edges + let mut elected_edges = voter + .edges .iter_mut() .filter(|e| e.candidate.borrow().elected) .collect::>>(); @@ -91,9 +96,8 @@ pub(crate) fn balance_voter( } // amount of stake from this voter that is used in edges. - let stake_used = elected_edges - .iter() - .fold(0, |a: ExtendedBalance, e| a.saturating_add(e.weight)); + let stake_used = + elected_edges.iter().fold(0, |a: ExtendedBalance, e| a.saturating_add(e.weight)); // backed stake of each of the elected edges. let backed_stakes = elected_edges @@ -104,13 +108,7 @@ pub(crate) fn balance_voter( // backed stake of all the edges for whom we've spent some stake. let backing_backed_stake = elected_edges .iter() - .filter_map(|e| - if e.weight > 0 { - Some(e.candidate.borrow().backed_stake) - } else { - None - } - ) + .filter_map(|e| if e.weight > 0 { Some(e.candidate.borrow().backed_stake) } else { None }) .collect::>(); let difference = if backing_backed_stake.len() > 0 { @@ -125,7 +123,7 @@ pub(crate) fn balance_voter( let mut difference = max_stake.saturating_sub(*min_stake); difference = difference.saturating_add(voter.budget.saturating_sub(stake_used)); if difference < tolerance { - return difference; + return difference } difference } else { @@ -156,12 +154,18 @@ pub(crate) fn balance_voter( cumulative_backed_stake = cumulative_backed_stake.saturating_add(backed_stake); } - let last_stake = elected_edges.get(last_index).expect( - "length of elected_edges is greater than or equal 2; last_index index is at \ - the minimum elected_edges.len() - 1; index is within range; qed" - ).candidate.borrow().backed_stake; + let last_stake = elected_edges + .get(last_index) + .expect( + "length of elected_edges is greater than or equal 2; last_index index is at \ + the minimum elected_edges.len() - 1; index is within range; qed", + ) + .candidate + .borrow() + .backed_stake; let ways_to_split = last_index + 1; - let excess = voter.budget + let excess = voter + .budget .saturating_add(cumulative_backed_stake) .saturating_sub(last_stake.saturating_mul(ways_to_split as ExtendedBalance)); diff --git a/primitives/npos-elections/src/helpers.rs b/primitives/npos-elections/src/helpers.rs index 9fdf76118f89f..5b02eaf2ad2e8 100644 --- a/primitives/npos-elections/src/helpers.rs +++ b/primitives/npos-elections/src/helpers.rs @@ -17,7 +17,9 @@ //! Helper methods for npos-elections. -use crate::{Assignment, Error, IdentifierT, PerThing128, StakedAssignment, VoteWeight, WithApprovalOf}; +use crate::{ + Assignment, Error, IdentifierT, PerThing128, StakedAssignment, VoteWeight, WithApprovalOf, +}; use sp_arithmetic::PerThing; use sp_std::prelude::*; @@ -52,7 +54,8 @@ where staked .iter_mut() .map(|a| { - a.try_normalize(stake_of(&a.who).into()).map_err(|err| Error::ArithmeticError(err)) + a.try_normalize(stake_of(&a.who).into()) + .map_err(|err| Error::ArithmeticError(err)) }) .collect::>()?; Ok(staked) @@ -113,14 +116,8 @@ mod tests { assert_eq!( staked, vec![ - StakedAssignment { - who: 1u32, - distribution: vec![(10u32, 50), (20, 50),] - }, - StakedAssignment { - who: 2u32, - distribution: vec![(10u32, 33), (20, 67),] - } + StakedAssignment { who: 1u32, distribution: vec![(10u32, 50), (20, 50),] }, + StakedAssignment { who: 2u32, distribution: vec![(10u32, 33), (20, 67),] } ] ); } diff --git a/primitives/npos-elections/src/lib.rs b/primitives/npos-elections/src/lib.rs index 4315d14de75e9..a778a4ab36038 100644 --- a/primitives/npos-elections/src/lib.rs +++ b/primitives/npos-elections/src/lib.rs @@ -78,6 +78,7 @@ use sp_arithmetic::{ traits::{Bounded, UniqueSaturatedInto, Zero}, Normalizable, PerThing, Rational128, ThresholdOrd, }; +use sp_core::RuntimeDebug; use sp_std::{ cell::RefCell, cmp::Ordering, @@ -88,7 +89,6 @@ use sp_std::{ prelude::*, rc::Rc, }; -use sp_core::RuntimeDebug; use codec::{Decode, Encode}; #[cfg(feature = "std")] @@ -100,21 +100,21 @@ mod mock; mod tests; mod assignments; -pub mod phragmen; pub mod balancing; -pub mod phragmms; -pub mod node; -pub mod reduce; pub mod helpers; +pub mod node; +pub mod phragmen; +pub mod phragmms; pub mod pjr; +pub mod reduce; -pub use assignments::{Assignment, IndexAssignment, StakedAssignment, IndexAssignmentOf}; -pub use reduce::reduce; +pub use assignments::{Assignment, IndexAssignment, IndexAssignmentOf, StakedAssignment}; +pub use balancing::*; pub use helpers::*; pub use phragmen::*; pub use phragmms::*; -pub use balancing::*; pub use pjr::*; +pub use reduce::reduce; // re-export the compact macro, with the dependencies of the macro. #[doc(hidden)] @@ -208,9 +208,7 @@ where /// Get the average edge count. fn average_edge_count(&self) -> usize { - self.edge_count() - .checked_div(self.voter_count()) - .unwrap_or(0) + self.edge_count().checked_div(self.voter_count()).unwrap_or(0) } /// Remove a certain voter. @@ -381,9 +379,14 @@ impl Voter { .into_iter() .filter_map(|e| { let per_thing = P::from_rational(e.weight, budget); - // trim zero edges. - if per_thing.is_zero() { None } else { Some((e.who, per_thing)) } - }).collect::>(); + // trim zero edges. + if per_thing.is_zero() { + None + } else { + Some((e.who, per_thing)) + } + }) + .collect::>(); if distribution.len() > 0 { Some(Assignment { who, distribution }) @@ -613,10 +616,7 @@ pub fn is_score_better(this: ElectionScore, that: ElectionScore, ep match this .iter() .zip(that.iter()) - .map(|(thi, tha)| ( - thi.ge(&tha), - thi.tcmp(&tha, epsilon.mul_ceil(*tha)), - )) + .map(|(thi, tha)| (thi.ge(&tha), thi.tcmp(&tha, epsilon.mul_ceil(*tha)))) .collect::>() .as_slice() { @@ -655,40 +655,34 @@ pub fn setup_inputs( }) .collect::>>(); - let voters = initial_voters.into_iter().filter_map(|(who, voter_stake, votes)| { - let mut edges: Vec> = Vec::with_capacity(votes.len()); - for v in votes { - if edges.iter().any(|e| e.who == v) { - // duplicate edge. - continue; - } - if let Some(idx) = c_idx_cache.get(&v) { - // This candidate is valid + already cached. - let mut candidate = candidates[*idx].borrow_mut(); - candidate.approval_stake = - candidate.approval_stake.saturating_add(voter_stake.into()); - edges.push( - Edge { + let voters = initial_voters + .into_iter() + .filter_map(|(who, voter_stake, votes)| { + let mut edges: Vec> = Vec::with_capacity(votes.len()); + for v in votes { + if edges.iter().any(|e| e.who == v) { + // duplicate edge. + continue + } + if let Some(idx) = c_idx_cache.get(&v) { + // This candidate is valid + already cached. + let mut candidate = candidates[*idx].borrow_mut(); + candidate.approval_stake = + candidate.approval_stake.saturating_add(voter_stake.into()); + edges.push(Edge { who: v.clone(), candidate: Rc::clone(&candidates[*idx]), ..Default::default() - } - ); - } // else {} would be wrong votes. We don't really care about it. - } - if edges.is_empty() { - None - } - else { - Some(Voter { - who, - edges: edges, - budget: voter_stake.into(), - load: Rational128::zero(), - }) - } - - }).collect::>(); + }); + } // else {} would be wrong votes. We don't really care about it. + } + if edges.is_empty() { + None + } else { + Some(Voter { who, edges, budget: voter_stake.into(), load: Rational128::zero() }) + } + }) + .collect::>(); - (candidates, voters,) + (candidates, voters) } diff --git a/primitives/npos-elections/src/mock.rs b/primitives/npos-elections/src/mock.rs index 363550ed8efcc..1be591e4ea6f5 100644 --- a/primitives/npos-elections/src/mock.rs +++ b/primitives/npos-elections/src/mock.rs @@ -20,12 +20,12 @@ #![cfg(any(test, mocks))] use std::{ - collections::{HashSet, HashMap}, + collections::{HashMap, HashSet}, convert::TryInto, hash::Hash, }; -use rand::{self, Rng, seq::SliceRandom}; +use rand::{self, seq::SliceRandom, Rng}; use sp_arithmetic::{ traits::{One, SaturatedConversion, Zero}, PerThing, @@ -33,7 +33,7 @@ use sp_arithmetic::{ use sp_runtime::assert_eq_error_rate; use sp_std::collections::btree_map::BTreeMap; -use crate::{Assignment, ElectionResult, ExtendedBalance, PerThing128, VoteWeight, seq_phragmen}; +use crate::{seq_phragmen, Assignment, ElectionResult, ExtendedBalance, PerThing128, VoteWeight}; sp_npos_elections_compact::generate_solution_type!( #[compact] @@ -87,7 +87,7 @@ pub(crate) type _SupportMap = BTreeMap>; #[derive(Debug, Clone)] pub(crate) struct _ElectionResult { pub winners: Vec<(A, ExtendedBalance)>, - pub assignments: Vec<(A, Vec<_Assignment>)> + pub assignments: Vec<(A, Vec<_Assignment>)>, } pub(crate) fn auto_generate_self_voters(candidates: &[A]) -> Vec<(A, Vec)> { @@ -99,7 +99,8 @@ pub(crate) fn elect_float( initial_candidates: Vec, initial_voters: Vec<(A, Vec)>, stake_of: impl Fn(&A) -> VoteWeight, -) -> Option<_ElectionResult> where +) -> Option<_ElectionResult> +where A: Default + Ord + Copy, { let mut elected_candidates: Vec<(A, ExtendedBalance)>; @@ -123,17 +124,10 @@ pub(crate) fn elect_float( for v in votes { if let Some(idx) = c_idx_cache.get(&v) { candidates[*idx].approval_stake = candidates[*idx].approval_stake + voter_stake; - edges.push( - _Edge { who: v.clone(), candidate_index: *idx, ..Default::default() } - ); + edges.push(_Edge { who: v.clone(), candidate_index: *idx, ..Default::default() }); } } - _Voter { - who, - edges: edges, - budget: voter_stake, - load: 0f64, - } + _Voter { who, edges, budget: voter_stake, load: 0f64 } })); let to_elect = candidate_count.min(candidates.len()); @@ -179,7 +173,9 @@ pub(crate) fn elect_float( for n in &mut voters { let mut assignment = (n.who.clone(), vec![]); for e in &mut n.edges { - if let Some(c) = elected_candidates.iter().cloned().map(|(c, _)| c).find(|c| *c == e.who) { + if let Some(c) = + elected_candidates.iter().cloned().map(|(c, _)| c).find(|c| *c == e.who) + { if c != n.who { let ratio = e.load / n.load; assignment.1.push((e.who.clone(), ratio)); @@ -191,10 +187,7 @@ pub(crate) fn elect_float( } } - Some(_ElectionResult { - winners: elected_candidates, - assignments: assigned, - }) + Some(_ElectionResult { winners: elected_candidates, assignments: assigned }) } pub(crate) fn equalize_float( @@ -211,18 +204,14 @@ pub(crate) fn equalize_float( let mut max_diff = 0.0; for (voter, assignment) in assignments.iter_mut() { let voter_budget = stake_of(&voter); - let diff = do_equalize_float( - voter, - voter_budget, - assignment, - supports, - tolerance, - ); - if diff > max_diff { max_diff = diff; } + let diff = do_equalize_float(voter, voter_budget, assignment, supports, tolerance); + if diff > max_diff { + max_diff = diff; + } } if max_diff < tolerance { - break; + break } } } @@ -232,21 +221,20 @@ pub(crate) fn do_equalize_float( budget_balance: VoteWeight, elected_edges: &mut Vec<_Assignment>, support_map: &mut _SupportMap, - tolerance: f64 -) -> f64 where + tolerance: f64, +) -> f64 +where A: Ord + Clone, { let budget = budget_balance as f64; - if elected_edges.is_empty() { return 0.0; } + if elected_edges.is_empty() { + return 0.0 + } - let stake_used = elected_edges - .iter() - .fold(0.0, |s, e| s + e.1); + let stake_used = elected_edges.iter().fold(0.0, |s, e| s + e.1); - let backed_stakes_iter = elected_edges - .iter() - .filter_map(|e| support_map.get(&e.0)) - .map(|e| e.total); + let backed_stakes_iter = + elected_edges.iter().filter_map(|e| support_map.get(&e.0)).map(|e| e.total); let backing_backed_stake = elected_edges .iter() @@ -268,7 +256,7 @@ pub(crate) fn do_equalize_float( difference = max_stake - min_stake; difference = difference + budget - stake_used; if difference < tolerance { - return difference; + return difference } } else { difference = budget; @@ -283,11 +271,12 @@ pub(crate) fn do_equalize_float( e.1 = 0.0; }); - elected_edges.sort_by(|x, y| - support_map.get(&x.0) + elected_edges.sort_by(|x, y| { + support_map + .get(&x.0) .and_then(|x| support_map.get(&y.0).and_then(|y| x.total.partial_cmp(&y.total))) .unwrap_or(sp_std::cmp::Ordering::Equal) - ); + }); let mut cumulative_stake = 0.0; let mut last_index = elected_edges.len() - 1; @@ -318,20 +307,22 @@ pub(crate) fn do_equalize_float( difference } - -pub(crate) fn create_stake_of(stakes: &[(AccountId, VoteWeight)]) - -> impl Fn(&AccountId) -> VoteWeight -{ +pub(crate) fn create_stake_of( + stakes: &[(AccountId, VoteWeight)], +) -> impl Fn(&AccountId) -> VoteWeight { let mut storage = BTreeMap::::new(); - stakes.iter().for_each(|s| { storage.insert(s.0, s.1); }); + stakes.iter().for_each(|s| { + storage.insert(s.0, s.1); + }); move |who: &AccountId| -> VoteWeight { storage.get(who).unwrap().to_owned() } } - pub fn check_assignments_sum(assignments: &[Assignment]) { for Assignment { distribution, .. } in assignments { let mut sum: u128 = Zero::zero(); - distribution.iter().for_each(|(_, p)| sum += p.deconstruct().saturated_into::()); + distribution + .iter() + .for_each(|(_, p)| sum += p.deconstruct().saturated_into::()); assert_eq!(sum, T::ACCURACY.saturated_into(), "Assignment ratio sum is not 100%"); } } @@ -341,8 +332,7 @@ pub(crate) fn run_and_compare( voters: Vec<(AccountId, Vec)>, stake_of: FS, to_elect: usize, -) -where +) where Output: PerThing128, FS: Fn(&AccountId) -> VoteWeight, { @@ -350,24 +340,28 @@ where let ElectionResult { winners, assignments } = seq_phragmen::<_, Output>( to_elect, candidates.clone(), - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), - None - ).unwrap(); + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), + None, + ) + .unwrap(); // run float poc code. - let truth_value = elect_float( - to_elect, - candidates, - voters, - &stake_of, - ).unwrap(); + let truth_value = elect_float(to_elect, candidates, voters, &stake_of).unwrap(); - assert_eq!(winners.iter().map(|(x, _)| x).collect::>(), truth_value.winners.iter().map(|(x, _)| x).collect::>()); + assert_eq!( + winners.iter().map(|(x, _)| x).collect::>(), + truth_value.winners.iter().map(|(x, _)| x).collect::>() + ); for Assignment { who, distribution } in assignments.iter() { if let Some(float_assignments) = truth_value.assignments.iter().find(|x| x.0 == *who) { for (candidate, per_thingy) in distribution { - if let Some(float_assignment) = float_assignments.1.iter().find(|x| x.0 == *candidate ) { + if let Some(float_assignment) = + float_assignments.1.iter().find(|x| x.0 == *candidate) + { assert_eq_error_rate!( Output::from_float(float_assignment.1).deconstruct(), per_thingy.deconstruct(), @@ -376,8 +370,7 @@ where } else { panic!( "candidate mismatch. This should never happen. could not find ({:?}, {:?})", - candidate, - per_thingy, + candidate, per_thingy, ) } } @@ -394,13 +387,10 @@ pub(crate) fn build_support_map_float( stake_of: impl Fn(&AccountId) -> VoteWeight, ) -> _SupportMap { let mut supports = <_SupportMap>::new(); - result.winners - .iter() - .map(|(e, _)| (e, stake_of(e) as f64)) - .for_each(|(e, s)| { - let item = _Support { own: s, total: s, ..Default::default() }; - supports.insert(e.clone(), item); - }); + result.winners.iter().map(|(e, _)| (e, stake_of(e) as f64)).for_each(|(e, s)| { + let item = _Support { own: s, total: s, ..Default::default() }; + supports.insert(e.clone(), item); + }); for (n, assignment) in result.assignments.iter_mut() { for (c, r) in assignment.iter_mut() { diff --git a/primitives/npos-elections/src/node.rs b/primitives/npos-elections/src/node.rs index ae65318ff0461..c5c05b2a119af 100644 --- a/primitives/npos-elections/src/node.rs +++ b/primitives/npos-elections/src/node.rs @@ -55,11 +55,7 @@ impl sp_std::fmt::Debug for NodeId { f, "Node({:?}, {:?})", self.who, - if self.role == NodeRole::Voter { - "V" - } else { - "T" - } + if self.role == NodeRole::Voter { "V" } else { "T" } ) } } @@ -84,12 +80,7 @@ impl Eq for Node {} #[cfg(feature = "std")] impl fmt::Debug for Node { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( - f, - "({:?} --> {:?})", - self.id, - self.parent.as_ref().map(|p| p.borrow().id.clone()) - ) + write!(f, "({:?} --> {:?})", self.id, self.parent.as_ref().map(|p| p.borrow().id.clone())) } } @@ -102,7 +93,7 @@ impl Node { /// Returns true if `other` is the parent of `who`. pub fn is_parent_of(who: &NodeRef, other: &NodeRef) -> bool { if who.borrow().parent.is_none() { - return false; + return false } who.borrow().parent.as_ref() == Some(other) } @@ -136,7 +127,7 @@ impl Node { while let Some(ref next_parent) = current.clone().borrow().parent { if visited.contains(next_parent) { - break; + break } parent_path.push(next_parent.clone()); current = next_parent.clone(); @@ -164,16 +155,7 @@ mod tests { #[test] fn basic_create_works() { let node = Node::new(id(10)); - assert_eq!( - node, - Node { - id: NodeId { - who: 10, - role: NodeRole::Target - }, - parent: None - } - ); + assert_eq!(node, Node { id: NodeId { who: 10, role: NodeRole::Target }, parent: None }); } #[test] @@ -209,17 +191,11 @@ mod tests { Node::set_parent_of(&e, &a); Node::set_parent_of(&a, &d); - assert_eq!( - Node::root(&e), - (d.clone(), vec![e.clone(), a.clone(), d.clone()]), - ); + assert_eq!(Node::root(&e), (d.clone(), vec![e.clone(), a.clone(), d.clone()]),); assert_eq!(Node::root(&a), (d.clone(), vec![a.clone(), d.clone()]),); - assert_eq!( - Node::root(&c), - (d.clone(), vec![c.clone(), b.clone(), a.clone(), d.clone()]), - ); + assert_eq!(Node::root(&c), (d.clone(), vec![c.clone(), b.clone(), a.clone(), d.clone()]),); // D A <-- B <-- C // F <-- / \ @@ -228,10 +204,7 @@ mod tests { assert_eq!(Node::root(&a), (f.clone(), vec![a.clone(), f.clone()]),); - assert_eq!( - Node::root(&c), - (f.clone(), vec![c.clone(), b.clone(), a.clone(), f.clone()]), - ); + assert_eq!(Node::root(&c), (f.clone(), vec![c.clone(), b.clone(), a.clone(), f.clone()]),); } #[test] diff --git a/primitives/npos-elections/src/phragmen.rs b/primitives/npos-elections/src/phragmen.rs index bbead91c938f8..0f9b144919761 100644 --- a/primitives/npos-elections/src/phragmen.rs +++ b/primitives/npos-elections/src/phragmen.rs @@ -75,11 +75,7 @@ pub fn seq_phragmen( ) -> Result, crate::Error> { let (candidates, voters) = setup_inputs(initial_candidates, initial_voters); - let (candidates, mut voters) = seq_phragmen_core::( - rounds, - candidates, - voters, - )?; + let (candidates, mut voters) = seq_phragmen_core::(rounds, candidates, voters)?; if let Some((iterations, tolerance)) = balance { // NOTE: might create zero-edges, but we will strip them again when we convert voter into @@ -152,7 +148,8 @@ pub fn seq_phragmen_core( voter.load.n(), voter.budget, candidate.approval_stake, - ).unwrap_or(Bounded::max_value()); + ) + .unwrap_or(Bounded::max_value()); let temp_d = voter.load.d(); let temp = Rational128::from(temp_n, temp_d); candidate.score = candidate.score.lazy_saturating_add(temp); @@ -188,13 +185,9 @@ pub fn seq_phragmen_core( for edge in &mut voter.edges { if edge.candidate.borrow().elected { // update internal state. - edge.weight = multiply_by_rational( - voter.budget, - edge.load.n(), - voter.load.n(), - ) - // If result cannot fit in u128. Not much we can do about it. - .unwrap_or(Bounded::max_value()); + edge.weight = multiply_by_rational(voter.budget, edge.load.n(), voter.load.n()) + // If result cannot fit in u128. Not much we can do about it. + .unwrap_or(Bounded::max_value()); } else { edge.weight = 0 } diff --git a/primitives/npos-elections/src/phragmms.rs b/primitives/npos-elections/src/phragmms.rs index 2a643d3673a52..95551d9761fcb 100644 --- a/primitives/npos-elections/src/phragmms.rs +++ b/primitives/npos-elections/src/phragmms.rs @@ -1,4 +1,4 @@ - // This file is part of Substrate. +// This file is part of Substrate. // Copyright (C) 2020-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 @@ -22,10 +22,10 @@ //! MMS algorithm. use crate::{ - IdentifierT, ElectionResult, ExtendedBalance, setup_inputs, VoteWeight, Voter, CandidatePtr, - balance, PerThing128, + balance, setup_inputs, CandidatePtr, ElectionResult, ExtendedBalance, IdentifierT, PerThing128, + VoteWeight, Voter, }; -use sp_arithmetic::{PerThing, Rational128, traits::Bounded}; +use sp_arithmetic::{traits::Bounded, PerThing, Rational128}; use sp_std::{prelude::*, rc::Rc}; /// Execute the phragmms method. @@ -62,15 +62,17 @@ pub fn phragmms( balance(&mut voters, iterations, tolerance); } } else { - break; + break } } - let mut assignments = voters.into_iter().filter_map(|v| v.into_assignment()).collect::>(); + let mut assignments = + voters.into_iter().filter_map(|v| v.into_assignment()).collect::>(); let _ = assignments.iter_mut().map(|a| a.try_normalize()).collect::>()?; - let winners = winners.into_iter().map(|w_ptr| - (w_ptr.borrow().who.clone(), w_ptr.borrow().backed_stake) - ).collect(); + let winners = winners + .into_iter() + .map(|w_ptr| (w_ptr.borrow().who.clone(), w_ptr.borrow().backed_stake)) + .collect(); Ok(ElectionResult { winners, assignments }) } @@ -101,10 +103,8 @@ pub(crate) fn calculate_max_score( for edge in voter.edges.iter() { let edge_candidate = edge.candidate.borrow(); if edge_candidate.elected { - let edge_contribution: ExtendedBalance = P::from_rational( - edge.weight, - edge_candidate.backed_stake, - ).deconstruct().into(); + let edge_contribution: ExtendedBalance = + P::from_rational(edge.weight, edge_candidate.backed_stake).deconstruct().into(); denominator_contribution += edge_contribution; } } @@ -125,7 +125,7 @@ pub(crate) fn calculate_max_score( for c_ptr in candidates.iter() { let mut candidate = c_ptr.borrow_mut(); - if candidate.approval_stake > 0 { + if candidate.approval_stake > 0 { // finalise the score value. let score_d = candidate.score.d(); let one: ExtendedBalance = P::ACCURACY.into(); @@ -153,7 +153,10 @@ pub(crate) fn calculate_max_score( // `RationalInfinite` as the score type does not introduce significant overhead. Then we // can switch the score type to `RationalInfinite` and ensure compatibility with any // crazy token scale. - let score_n = candidate.approval_stake.checked_mul(one).unwrap_or_else(|| Bounded::max_value()); + let score_n = candidate + .approval_stake + .checked_mul(one) + .unwrap_or_else(|| Bounded::max_value()); candidate.score = Rational128::from(score_n, score_d); // check if we have a new winner. @@ -180,7 +183,10 @@ pub(crate) fn apply_elected( elected_ptr: CandidatePtr, ) { let elected_who = elected_ptr.borrow().who.clone(); - let cutoff = elected_ptr.borrow().score.to_den(1) + let cutoff = elected_ptr + .borrow() + .score + .to_den(1) .expect("(n / d) < u128::MAX and (n' / 1) == (n / d), thus n' < u128::MAX'; qed.") .n(); @@ -193,18 +199,19 @@ pub(crate) fn apply_elected( elected_backed_stake = elected_backed_stake.saturating_add(new_edge_weight); // Iterate over all other edges. - for (_, edge) in voter.edges - .iter_mut() - .enumerate() - .filter(|(edge_index, edge_inner)| *edge_index != new_edge_index && edge_inner.weight > 0) - { + for (_, edge) in + voter.edges.iter_mut().enumerate().filter(|(edge_index, edge_inner)| { + *edge_index != new_edge_index && edge_inner.weight > 0 + }) { let mut edge_candidate = edge.candidate.borrow_mut(); if edge_candidate.backed_stake > cutoff { - let stake_to_take = edge.weight.saturating_mul(cutoff) / edge_candidate.backed_stake.max(1); + let stake_to_take = + edge.weight.saturating_mul(cutoff) / edge_candidate.backed_stake.max(1); // subtract this amount from this edge. edge.weight = edge.weight.saturating_sub(stake_to_take); - edge_candidate.backed_stake = edge_candidate.backed_stake.saturating_sub(stake_to_take); + edge_candidate.backed_stake = + edge_candidate.backed_stake.saturating_sub(stake_to_take); // inject it into the outer loop's edge. elected_backed_stake = elected_backed_stake.saturating_add(stake_to_take); @@ -223,7 +230,7 @@ pub(crate) fn apply_elected( #[cfg(test)] mod tests { use super::*; - use crate::{ElectionResult, Assignment}; + use crate::{Assignment, ElectionResult}; use sp_runtime::{Perbill, Percent}; use sp_std::rc::Rc; @@ -232,32 +239,31 @@ mod tests { //! Manually run the internal steps of phragmms. In each round we select a new winner by //! `max_score`, then apply this change by `apply_elected`, and finally do a `balance` round. let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 10, vec![1, 2]), - (20, 20, vec![1, 3]), - (30, 30, vec![2, 3]), - ]; + let voters = vec![(10, 10, vec![1, 2]), (20, 20, vec![1, 3]), (30, 30, vec![2, 3])]; let (candidates, mut voters) = setup_inputs(candidates, voters); // Round 1 - let winner = calculate_max_score::(candidates.as_ref(), voters.as_ref()).unwrap(); + let winner = + calculate_max_score::(candidates.as_ref(), voters.as_ref()).unwrap(); assert_eq!(winner.borrow().who, 3); assert_eq!(winner.borrow().score, 50u32.into()); apply_elected(&mut voters, Rc::clone(&winner)); assert_eq!( - voters.iter().find(|x| x.who == 30).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 30) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (30, vec![(2, 0), (3, 30)]), ); assert_eq!( - voters.iter().find(|x| x.who == 20).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 20) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (20, vec![(1, 0), (3, 20)]), ); @@ -270,30 +276,34 @@ mod tests { balance(&mut voters, 10, 0); // round 2 - let winner = calculate_max_score::(candidates.as_ref(), voters.as_ref()).unwrap(); + let winner = + calculate_max_score::(candidates.as_ref(), voters.as_ref()).unwrap(); assert_eq!(winner.borrow().who, 2); assert_eq!(winner.borrow().score, 25u32.into()); apply_elected(&mut voters, Rc::clone(&winner)); assert_eq!( - voters.iter().find(|x| x.who == 30).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 30) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (30, vec![(2, 15), (3, 15)]), ); assert_eq!( - voters.iter().find(|x| x.who == 20).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 20) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (20, vec![(1, 0), (3, 20)]), ); assert_eq!( - voters.iter().find(|x| x.who == 10).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 10) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (10, vec![(1, 0), (2, 10)]), ); @@ -306,24 +316,27 @@ mod tests { balance(&mut voters, 10, 0); assert_eq!( - voters.iter().find(|x| x.who == 30).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 30) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (30, vec![(2, 20), (3, 10)]), ); assert_eq!( - voters.iter().find(|x| x.who == 20).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 20) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (20, vec![(1, 0), (3, 20)]), ); assert_eq!( - voters.iter().find(|x| x.who == 10).map(|v| ( - v.who, - v.edges.iter().map(|e| (e.who, e.weight)).collect::>() - )).unwrap(), + voters + .iter() + .find(|x| x.who == 10) + .map(|v| (v.who, v.edges.iter().map(|e| (e.who, e.weight)).collect::>())) + .unwrap(), (10, vec![(1, 0), (2, 10)]), ); } @@ -331,25 +344,16 @@ mod tests { #[test] fn basic_election_works() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 10, vec![1, 2]), - (20, 20, vec![1, 3]), - (30, 30, vec![2, 3]), - ]; + let voters = vec![(10, 10, vec![1, 2]), (20, 20, vec![1, 3]), (30, 30, vec![2, 3])]; - let ElectionResult { winners, assignments } = phragmms::<_, Perbill>(2, candidates, voters, Some((2, 0))).unwrap(); + let ElectionResult { winners, assignments } = + phragmms::<_, Perbill>(2, candidates, voters, Some((2, 0))).unwrap(); assert_eq!(winners, vec![(3, 30), (2, 30)]); assert_eq!( assignments, vec![ - Assignment { - who: 10u64, - distribution: vec![(2, Perbill::one())], - }, - Assignment { - who: 20, - distribution: vec![(3, Perbill::one())], - }, + Assignment { who: 10u64, distribution: vec![(2, Perbill::one())] }, + Assignment { who: 20, distribution: vec![(3, Perbill::one())] }, Assignment { who: 30, distribution: vec![ @@ -374,13 +378,9 @@ mod tests { (130, 1000, vec![61, 71]), ]; - let ElectionResult { winners, assignments: _ } = phragmms::<_, Perbill>(4, candidates, voters, Some((2, 0))).unwrap(); - assert_eq!(winners, vec![ - (11, 3000), - (31, 2000), - (51, 1500), - (61, 1500), - ]); + let ElectionResult { winners, assignments: _ } = + phragmms::<_, Perbill>(4, candidates, voters, Some((2, 0))).unwrap(); + assert_eq!(winners, vec![(11, 3000), (31, 2000), (51, 1500), (61, 1500),]); } #[test] @@ -391,7 +391,8 @@ mod tests { // give a bit more to 1 and 3. voters.push((2, u64::MAX, vec![1, 3])); - let ElectionResult { winners, assignments: _ } = phragmms::<_, Perbill>(2, candidates, voters, Some((2, 0))).unwrap(); + let ElectionResult { winners, assignments: _ } = + phragmms::<_, Perbill>(2, candidates, voters, Some((2, 0))).unwrap(); assert_eq!(winners.into_iter().map(|(w, _)| w).collect::>(), vec![1u32, 3]); } } diff --git a/primitives/npos-elections/src/pjr.rs b/primitives/npos-elections/src/pjr.rs index 290110b14e650..e87c9a7db4c82 100644 --- a/primitives/npos-elections/src/pjr.rs +++ b/primitives/npos-elections/src/pjr.rs @@ -1,4 +1,4 @@ - // This file is part of Substrate. +// This file is part of Substrate. // Copyright (C) 2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 @@ -23,20 +23,11 @@ //! See [`pjr_check`] which is the main entry point of the module. use crate::{ - Candidate, - CandidatePtr, - Edge, - ExtendedBalance, - IdentifierT, - Support, - SupportMap, - Supports, - Voter, - VoteWeight, + Candidate, CandidatePtr, Edge, ExtendedBalance, IdentifierT, Support, SupportMap, Supports, + VoteWeight, Voter, }; -use sp_std::{rc::Rc, vec::Vec}; -use sp_std::collections::btree_map::BTreeMap; use sp_arithmetic::{traits::Zero, Perbill}; +use sp_std::{collections::btree_map::BTreeMap, rc::Rc, vec::Vec}; /// The type used as the threshold. /// /// Just some reading sugar; Must always be same as [`ExtendedBalance`]; @@ -60,10 +51,8 @@ pub fn standard_threshold( ) -> Threshold { weights .into_iter() - .fold(Threshold::zero(), |acc, elem| { - acc.saturating_add(elem) - }) - / committee_size.max(1) as Threshold + .fold(Threshold::zero(), |acc, elem| acc.saturating_add(elem)) / + committee_size.max(1) as Threshold } /// Check a solution to be PJR. @@ -74,7 +63,10 @@ pub fn pjr_check( all_candidates: Vec, all_voters: Vec<(AccountId, VoteWeight, Vec)>, ) -> Result<(), AccountId> { - let t = standard_threshold(supports.len(), all_voters.iter().map(|voter| voter.1 as ExtendedBalance)); + let t = standard_threshold( + supports.len(), + all_voters.iter().map(|voter| voter.1 as ExtendedBalance), + ); t_pjr_check(supports, all_candidates, all_voters, t) } @@ -120,11 +112,7 @@ pub fn t_pjr_check( t: Threshold, ) -> Result<(), AccountId> { // First order of business: derive `(candidates, voters)` from `supports`. - let (candidates, voters) = prepare_pjr_input( - supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(supports, all_candidates, all_voters); // compute with threshold t. pjr_check_core(candidates.as_ref(), voters.as_ref(), t) } @@ -141,7 +129,9 @@ pub fn pjr_check_core( t: Threshold, ) -> Result<(), AccountId> { let unelected = candidates.iter().filter(|c| !c.borrow().elected); - let maybe_max_pre_score = unelected.map(|c| (pre_score(Rc::clone(c), voters, t), c.borrow().who.clone())).max(); + let maybe_max_pre_score = unelected + .map(|c| (pre_score(Rc::clone(c), voters, t), c.borrow().who.clone())) + .max(); // if unelected is empty then the solution is indeed PJR. match maybe_max_pre_score { Some((max_pre_score, counter_example)) if max_pre_score >= t => Err(counter_example), @@ -165,7 +155,10 @@ pub fn validate_pjr_challenge( all_candidates: Vec, all_voters: Vec<(AccountId, VoteWeight, Vec)>, ) -> bool { - let threshold = standard_threshold(supports.len(), all_voters.iter().map(|voter| voter.1 as ExtendedBalance)); + let threshold = standard_threshold( + supports.len(), + all_voters.iter().map(|voter| voter.1 as ExtendedBalance), + ); validate_t_pjr_challenge(counter_example, supports, all_candidates, all_voters, threshold) } @@ -186,11 +179,7 @@ pub fn validate_t_pjr_challenge( all_voters: Vec<(AccountId, VoteWeight, Vec)>, threshold: Threshold, ) -> bool { - let (candidates, voters) = prepare_pjr_input( - supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(supports, all_candidates, all_voters); validate_pjr_challenge_core(counter_example, &candidates, &voters, threshold) } @@ -219,10 +208,11 @@ fn validate_pjr_challenge_core( // unsafe code leveraging the existing `candidates_index`: allocate an uninitialized vector of // appropriate length, then copy in all the elements. We'd really prefer to avoid unsafe code // in the runtime, though. - let candidate = match candidates.iter().find(|candidate| candidate.borrow().who == counter_example) { - None => return false, - Some(candidate) => candidate.clone(), - }; + let candidate = + match candidates.iter().find(|candidate| candidate.borrow().who == counter_example) { + None => return false, + Some(candidate) => candidate.clone(), + }; pre_score(candidate, &voters, threshold) >= threshold } @@ -261,10 +251,14 @@ fn prepare_pjr_input( let mut candidates_index: BTreeMap = BTreeMap::new(); // dump the staked assignments in a voter-major map for faster access down the road. - let mut assignment_map: BTreeMap> = BTreeMap::new(); + let mut assignment_map: BTreeMap> = + BTreeMap::new(); for (winner_id, Support { voters, .. }) in supports.iter() { for (voter_id, support) in voters.iter() { - assignment_map.entry(voter_id.clone()).or_default().push((winner_id.clone(), *support)); + assignment_map + .entry(voter_id.clone()) + .or_default() + .push((winner_id.clone(), *support)); } } @@ -282,47 +276,56 @@ fn prepare_pjr_input( let supports: SupportMap = supports.iter().cloned().collect(); // collect all candidates and winners into a unified `Vec`. - let candidates = all_candidates.into_iter().enumerate().map(|(i, c)| { - candidates_index.insert(c.clone(), i); + let candidates = all_candidates + .into_iter() + .enumerate() + .map(|(i, c)| { + candidates_index.insert(c.clone(), i); - // set the backing value and elected flag if the candidate is among the winners. - let who = c; - let maybe_support = supports.get(&who); - let elected = maybe_support.is_some(); - let backed_stake = maybe_support.map(|support| support.total).unwrap_or_default(); + // set the backing value and elected flag if the candidate is among the winners. + let who = c; + let maybe_support = supports.get(&who); + let elected = maybe_support.is_some(); + let backed_stake = maybe_support.map(|support| support.total).unwrap_or_default(); - Candidate { who, elected, backed_stake, ..Default::default() }.to_ptr() - }).collect::>(); + Candidate { who, elected, backed_stake, ..Default::default() }.to_ptr() + }) + .collect::>(); // collect all voters into a unified Vec. - let voters = all_voters.into_iter().map(|(v, w, ts)| { - let mut edges: Vec> = Vec::with_capacity(ts.len()); - for t in ts { - if edges.iter().any(|e| e.who == t) { - // duplicate edge. - continue; + let voters = all_voters + .into_iter() + .map(|(v, w, ts)| { + let mut edges: Vec> = Vec::with_capacity(ts.len()); + for t in ts { + if edges.iter().any(|e| e.who == t) { + // duplicate edge. + continue + } + + if let Some(idx) = candidates_index.get(&t) { + // if this edge is among the assignments, set the weight as well. + let weight = assignment_map + .get(&v) + .and_then(|d| { + d.iter().find_map(|(x, y)| if x == &t { Some(y) } else { None }) + }) + .cloned() + .unwrap_or_default(); + edges.push(Edge { + who: t, + candidate: Rc::clone(&candidates[*idx]), + weight, + ..Default::default() + }); + } } - if let Some(idx) = candidates_index.get(&t) { - // if this edge is among the assignments, set the weight as well. - let weight = assignment_map - .get(&v) - .and_then(|d| d.iter().find_map(|(x, y)| if x == &t { Some(y) } else { None })) - .cloned() - .unwrap_or_default(); - edges.push(Edge { - who: t, - candidate: Rc::clone(&candidates[*idx]), - weight, - ..Default::default() - }); - } - } - - let who = v; - let budget: ExtendedBalance = w.into(); - Voter { who, budget, edges, ..Default::default() } - }).collect::>(); + let who = v; + let budget: ExtendedBalance = w.into(); + Voter { who, budget, edges, ..Default::default() } + }) + .collect::>(); (candidates, voters) } @@ -345,7 +348,6 @@ fn pre_score( .fold(Zero::zero(), |acc: ExtendedBalance, voter| acc.saturating_add(slack(voter, t))) } - /// The slack of a voter at a given state. /// /// The slack of each voter, with threshold `t` is the total amount of stake that this voter can @@ -363,8 +365,7 @@ fn slack(voter: &Voter, t: Threshold) -> Exte let candidate = edge.candidate.borrow(); if candidate.elected { let extra = - Perbill::one().min(Perbill::from_rational(t, candidate.backed_stake)) - * edge.weight; + Perbill::one().min(Perbill::from_rational(t, candidate.backed_stake)) * edge.weight; acc.saturating_add(extra) } else { // No slack generated here. @@ -383,13 +384,22 @@ mod tests { fn setup_voter(who: u32, votes: Vec<(u32, u128, bool)>) -> Voter { let mut voter = Voter::new(who); let mut budget = 0u128; - let candidates = votes.into_iter().map(|(t, w, e)| { - budget += w; - Candidate { who: t, elected: e, backed_stake: w, ..Default::default() } - }).collect::>(); - let edges = candidates.into_iter().map(|c| - Edge { who: c.who, weight: c.backed_stake, candidate: c.to_ptr(), ..Default::default() } - ).collect::>(); + let candidates = votes + .into_iter() + .map(|(t, w, e)| { + budget += w; + Candidate { who: t, elected: e, backed_stake: w, ..Default::default() } + }) + .collect::>(); + let edges = candidates + .into_iter() + .map(|c| Edge { + who: c.who, + weight: c.backed_stake, + candidate: c.to_ptr(), + ..Default::default() + }) + .collect::>(); voter.edges = edges; voter.budget = budget; voter @@ -412,7 +422,6 @@ mod tests { assert_eq!(slack(&voter, 17), 3); assert_eq!(slack(&voter, 10), 10); assert_eq!(slack(&voter, 5), 20); - } #[test] @@ -440,15 +449,11 @@ mod tests { ]; // tuples in voters vector are (AccountId, Balance) let supports: Supports = vec![ - (20, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), - (40, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), + (20, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), + (40, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), ]; - let (candidates, voters) = prepare_pjr_input( - &supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(&supports, all_candidates, all_voters); // elected flag and backing must be set correctly assert_eq!( @@ -467,7 +472,8 @@ mod tests { v.who, v.budget, v.edges.iter().map(|e| (e.who, e.weight)).collect::>(), - )).collect::>(), + )) + .collect::>(), vec![ (1, 10, vec![(10, 0), (20, 5), (30, 0), (40, 5)]), (2, 20, vec![(10, 0), (20, 10), (30, 0), (40, 10)]), @@ -498,15 +504,11 @@ mod tests { ]; // tuples in voters vector are (AccountId, Balance) let supports: Supports = vec![ - (20, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), - (40, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), + (20, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), + (40, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), ]; - let (candidates, voters) = prepare_pjr_input( - &supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(&supports, all_candidates, all_voters); find_threshold_phase_change_for_scenario(candidates, voters); } @@ -521,15 +523,11 @@ mod tests { ]; // tuples in voters vector are (AccountId, Balance) let supports: Supports = vec![ - (20, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), - (40, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), + (20, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), + (40, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), ]; - let (candidates, voters) = prepare_pjr_input( - &supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(&supports, all_candidates, all_voters); find_threshold_phase_change_for_scenario(candidates, voters); } @@ -544,22 +542,18 @@ mod tests { ]; // tuples in voters vector are (AccountId, Balance) let supports: Supports = vec![ - (20, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), - (40, Support { total: 15, voters: vec![(1, 5), (2, 10)]}), + (20, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), + (40, Support { total: 15, voters: vec![(1, 5), (2, 10)] }), ]; - let (candidates, voters) = prepare_pjr_input( - &supports, - all_candidates, - all_voters, - ); + let (candidates, voters) = prepare_pjr_input(&supports, all_candidates, all_voters); find_threshold_phase_change_for_scenario(candidates, voters); } fn find_threshold_phase_change_for_scenario( candidates: Vec>, - voters: Vec> + voters: Vec>, ) -> Threshold { let mut threshold = 1; let mut prev_threshold = 0; @@ -567,7 +561,9 @@ mod tests { // find the binary range containing the threshold beyond which the PJR check succeeds while pjr_check_core(&candidates, &voters, threshold).is_err() { prev_threshold = threshold; - threshold = threshold.checked_mul(2).expect("pjr check must fail before we run out of capacity in u128"); + threshold = threshold + .checked_mul(2) + .expect("pjr check must fail before we run out of capacity in u128"); } // now binary search within that range to find the phase threshold @@ -595,7 +591,7 @@ mod tests { unexpected_successes.push(t); } } - for t in high_bound..(high_bound*2) { + for t in high_bound..(high_bound * 2) { if pjr_check_core(&candidates, &voters, t).is_err() { unexpected_failures.push(t); } diff --git a/primitives/npos-elections/src/reduce.rs b/primitives/npos-elections/src/reduce.rs index a34f1612ca1a5..589bd414245f6 100644 --- a/primitives/npos-elections/src/reduce.rs +++ b/primitives/npos-elections/src/reduce.rs @@ -47,13 +47,15 @@ //! //! 1. -use crate::node::{Node, NodeId, NodeRef, NodeRole}; -use crate::{ExtendedBalance, IdentifierT, StakedAssignment}; +use crate::{ + node::{Node, NodeId, NodeRef, NodeRole}, + ExtendedBalance, IdentifierT, StakedAssignment, +}; use sp_arithmetic::traits::{Bounded, Zero}; use sp_std::{ collections::btree_map::{BTreeMap, Entry::*}, - vec, prelude::*, + vec, }; /// Map type used for reduce_4. Can be easily swapped with HashMap. @@ -63,7 +65,7 @@ type Map = BTreeMap<(A, A), A>; fn combinations_2(input: &[T]) -> Vec<(T, T)> { let n = input.len(); if n < 2 { - return Default::default(); + return Default::default() } let mut comb = Vec::with_capacity(n * (n - 1) / 2); @@ -126,7 +128,7 @@ fn reduce_4(assignments: &mut Vec>) -> u32 { match combination_map.entry((v1.clone(), v2.clone())) { Vacant(entry) => { entry.insert(who.clone()); - } + }, Occupied(mut entry) => { let other_who = entry.get_mut(); @@ -141,29 +143,30 @@ fn reduce_4(assignments: &mut Vec>) -> u32 { .filter(|(t, _)| *t == v1 || *t == v2) .count() != 2 { - continue; + continue } // check if other_who voted for the same pair v1, v2. let maybe_other_assignments = assignments.iter().find(|a| a.who == *other_who); if maybe_other_assignments.is_none() { - continue; + continue } let other_assignment = maybe_other_assignments.expect("value is checked to be 'Some'"); // Collect potential cycle votes - let mut other_cycle_votes = other_assignment - .distribution - .iter() - .filter_map(|(t, w)| { - if *t == v1 || *t == v2 { - Some((t.clone(), *w)) - } else { - None - } - }) - .collect::>(); + let mut other_cycle_votes = + other_assignment + .distribution + .iter() + .filter_map(|(t, w)| { + if *t == v1 || *t == v2 { + Some((t.clone(), *w)) + } else { + None + } + }) + .collect::>(); let other_votes_count = other_cycle_votes.len(); @@ -175,21 +178,18 @@ fn reduce_4(assignments: &mut Vec>) -> u32 { if other_votes_count < 2 { // This is not a cycle. Replace and continue. *other_who = who.clone(); - continue; + continue } else if other_votes_count == 2 { // This is a cycle. let mut who_cycle_votes: Vec<(A, ExtendedBalance)> = Vec::with_capacity(2); - assignments[assignment_index] - .distribution - .iter() - .for_each(|(t, w)| { - if *t == v1 || *t == v2 { - who_cycle_votes.push((t.clone(), *w)); - } - }); + assignments[assignment_index].distribution.iter().for_each(|(t, w)| { + if *t == v1 || *t == v2 { + who_cycle_votes.push((t.clone(), *w)); + } + }); if who_cycle_votes.len() != 2 { - continue; + continue } // Align the targets similarly. This helps with the circulation below. @@ -240,53 +240,39 @@ fn reduce_4(assignments: &mut Vec>) -> u32 { // apply changes let mut remove_indices: Vec = Vec::with_capacity(1); increase_indices.into_iter().for_each(|i| { - let voter = if i < 2 { - who.clone() - } else { - other_who.clone() - }; + let voter = if i < 2 { who.clone() } else { other_who.clone() }; // Note: so this is pretty ambiguous. We should only look for one // assignment that meets this criteria and if we find multiple then that // is a corrupt input. Same goes for the next block. - assignments - .iter_mut() - .filter(|a| a.who == voter) - .for_each(|ass| { - ass.distribution - .iter_mut() - .position(|(t, _)| *t == cycle[i].0) - .map(|idx| { - let next_value = - ass.distribution[idx].1.saturating_add(min_value); - ass.distribution[idx].1 = next_value; - }); - }); + assignments.iter_mut().filter(|a| a.who == voter).for_each(|ass| { + ass.distribution + .iter_mut() + .position(|(t, _)| *t == cycle[i].0) + .map(|idx| { + let next_value = + ass.distribution[idx].1.saturating_add(min_value); + ass.distribution[idx].1 = next_value; + }); + }); }); decrease_indices.into_iter().for_each(|i| { - let voter = if i < 2 { - who.clone() - } else { - other_who.clone() - }; - assignments - .iter_mut() - .filter(|a| a.who == voter) - .for_each(|ass| { - ass.distribution - .iter_mut() - .position(|(t, _)| *t == cycle[i].0) - .map(|idx| { - let next_value = - ass.distribution[idx].1.saturating_sub(min_value); - if next_value.is_zero() { - ass.distribution.remove(idx); - remove_indices.push(i); - num_changed += 1; - } else { - ass.distribution[idx].1 = next_value; - } - }); - }); + let voter = if i < 2 { who.clone() } else { other_who.clone() }; + assignments.iter_mut().filter(|a| a.who == voter).for_each(|ass| { + ass.distribution + .iter_mut() + .position(|(t, _)| *t == cycle[i].0) + .map(|idx| { + let next_value = + ass.distribution[idx].1.saturating_sub(min_value); + if next_value.is_zero() { + ass.distribution.remove(idx); + remove_indices.push(i); + num_changed += 1; + } else { + ass.distribution[idx].1 = next_value; + } + }); + }); }); // remove either one of them. @@ -297,21 +283,21 @@ fn reduce_4(assignments: &mut Vec>) -> u32 { match (who_removed, other_removed) { (false, true) => { *other_who = who.clone(); - } + }, (true, false) => { // nothing, other_who can stay there. - } + }, (true, true) => { // remove and don't replace entry.remove(); - } + }, (false, false) => { // Neither of the edges was removed? impossible. panic!("Duplicate voter (or other corrupt input)."); - } + }, } } - } + }, } } } @@ -350,7 +336,7 @@ fn reduce_all(assignments: &mut Vec>) -> u32 let maybe_dist = assignments[assignment_index].distribution.get(dist_index); if maybe_dist.is_none() { // The rest of this loop is moot. - break; + break } let (target, _) = maybe_dist.expect("Value checked to be some").clone(); @@ -377,19 +363,19 @@ fn reduce_all(assignments: &mut Vec>) -> u32 (false, false) => { Node::set_parent_of(&target_node, &voter_node); dist_index += 1; - continue; - } + continue + }, (false, true) => { Node::set_parent_of(&voter_node, &target_node); dist_index += 1; - continue; - } + continue + }, (true, false) => { Node::set_parent_of(&target_node, &voter_node); dist_index += 1; - continue; - } - (true, true) => { /* don't continue and execute the rest */ } + continue + }, + (true, true) => { /* don't continue and execute the rest */ }, }; let (voter_root, voter_root_path) = Node::root(&voter_node); @@ -405,10 +391,7 @@ fn reduce_all(assignments: &mut Vec>) -> u32 // because roots are the same. #[cfg(feature = "std")] - debug_assert_eq!( - target_root_path.last().unwrap(), - voter_root_path.last().unwrap() - ); + debug_assert_eq!(target_root_path.last().unwrap(), voter_root_path.last().unwrap()); debug_assert!(common_count > 0); // cycle part of each path will be `path[path.len() - common_count - 1 : 0]` @@ -602,7 +585,7 @@ fn reduce_all(assignments: &mut Vec>) -> u32 let current = voter_root_path[i].clone().borrow().id.who.clone(); let next = voter_root_path[i + 1].clone().borrow().id.who.clone(); if min_edge.contains(¤t) && min_edge.contains(&next) { - break; + break } Node::set_parent_of(&voter_root_path[i + 1], &voter_root_path[i]); } @@ -613,7 +596,7 @@ fn reduce_all(assignments: &mut Vec>) -> u32 let current = target_root_path[i].clone().borrow().id.who.clone(); let next = target_root_path[i + 1].clone().borrow().id.who.clone(); if min_edge.contains(¤t) && min_edge.contains(&next) { - break; + break } Node::set_parent_of(&target_root_path[i + 1], &target_root_path[i]); } @@ -718,14 +701,8 @@ mod tests { use super::*; let assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 25), (20, 75)], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 50), (20, 50)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 25), (20, 75)] }, + StakedAssignment { who: 2, distribution: vec![(10, 50), (20, 50)] }, ]; let mut new_assignments = assignments.clone(); @@ -735,14 +712,8 @@ mod tests { assert_eq!( new_assignments, vec![ - StakedAssignment { - who: 1, - distribution: vec![(20, 100),], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 75), (20, 25),], - }, + StakedAssignment { who: 1, distribution: vec![(20, 100),] }, + StakedAssignment { who: 2, distribution: vec![(10, 75), (20, 25),] }, ], ); } @@ -750,26 +721,11 @@ mod tests { #[test] fn basic_reduce_all_cycles_works() { let mut assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10)], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5)], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 15), (40, 15)], - }, - StakedAssignment { - who: 4, - distribution: vec![(20, 10), (30, 10), (40, 20)], - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 20), (30, 10), (40, 20)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10)] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] }, + StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] }, + StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] }, + StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] }, ]; assert_eq!(3, reduce_all(&mut assignments)); @@ -777,26 +733,11 @@ mod tests { assert_eq!( assignments, vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10),] - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5),], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 30),], - }, - StakedAssignment { - who: 4, - distribution: vec![(40, 40),] - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 15), (30, 20), (40, 15),], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10),] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] }, + StakedAssignment { who: 3, distribution: vec![(20, 30),] }, + StakedAssignment { who: 4, distribution: vec![(40, 40),] }, + StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] }, ], ) } @@ -804,26 +745,11 @@ mod tests { #[test] fn basic_reduce_works() { let mut assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10)], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5)], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 15), (40, 15)], - }, - StakedAssignment { - who: 4, - distribution: vec![(20, 10), (30, 10), (40, 20)], - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 20), (30, 10), (40, 20)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10)] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] }, + StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] }, + StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] }, + StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] }, ]; assert_eq!(3, reduce(&mut assignments)); @@ -831,26 +757,11 @@ mod tests { assert_eq!( assignments, vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10),] - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5),], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 30),], - }, - StakedAssignment { - who: 4, - distribution: vec![(40, 40),] - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 15), (30, 20), (40, 15),], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10),] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] }, + StakedAssignment { who: 3, distribution: vec![(20, 30),] }, + StakedAssignment { who: 4, distribution: vec![(40, 40),] }, + StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] }, ], ) } @@ -858,35 +769,14 @@ mod tests { #[test] fn should_deal_with_self_vote() { let mut assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10)], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5)], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 15), (40, 15)], - }, - StakedAssignment { - who: 4, - distribution: vec![(20, 10), (30, 10), (40, 20)], - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 20), (30, 10), (40, 20)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10)] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] }, + StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] }, + StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] }, + StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] }, // self vote from 10 and 20 to itself. - StakedAssignment { - who: 10, - distribution: vec![(10, 100)], - }, - StakedAssignment { - who: 20, - distribution: vec![(20, 200)], - }, + StakedAssignment { who: 10, distribution: vec![(10, 100)] }, + StakedAssignment { who: 20, distribution: vec![(20, 200)] }, ]; assert_eq!(3, reduce(&mut assignments)); @@ -894,35 +784,14 @@ mod tests { assert_eq!( assignments, vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10),] - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 5),], - }, - StakedAssignment { - who: 3, - distribution: vec![(20, 30),], - }, - StakedAssignment { - who: 4, - distribution: vec![(40, 40),] - }, - StakedAssignment { - who: 5, - distribution: vec![(20, 15), (30, 20), (40, 15),], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10),] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] }, + StakedAssignment { who: 3, distribution: vec![(20, 30),] }, + StakedAssignment { who: 4, distribution: vec![(40, 40),] }, + StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] }, // should stay untouched. - StakedAssignment { - who: 10, - distribution: vec![(10, 100)] - }, - StakedAssignment { - who: 20, - distribution: vec![(20, 200)] - }, + StakedAssignment { who: 10, distribution: vec![(10, 100)] }, + StakedAssignment { who: 20, distribution: vec![(20, 200)] }, ], ) } @@ -930,55 +799,23 @@ mod tests { #[test] fn reduce_3_common_votes_same_weight() { let mut assignments = vec![ - StakedAssignment { - who: 4, - distribution: vec![ - ( - 1000000, - 100, - ), - ( - 1000002, - 100, - ), - ( - 1000004, - 100, - ), - ], - }, - StakedAssignment { - who: 5, - distribution: vec![ - ( - 1000000, - 100, - ), - ( - 1000002, - 100, - ), - ( - 1000004, - 100, - ), - ], - }, - ]; + StakedAssignment { + who: 4, + distribution: vec![(1000000, 100), (1000002, 100), (1000004, 100)], + }, + StakedAssignment { + who: 5, + distribution: vec![(1000000, 100), (1000002, 100), (1000004, 100)], + }, + ]; reduce_4(&mut assignments); assert_eq!( assignments, vec![ - StakedAssignment { - who: 4, - distribution: vec![(1000000, 200,), (1000004, 100,),], - }, - StakedAssignment { - who: 5, - distribution: vec![(1000002, 200,), (1000004, 100,),], - }, + StakedAssignment { who: 4, distribution: vec![(1000000, 200,), (1000004, 100,),] }, + StakedAssignment { who: 5, distribution: vec![(1000002, 200,), (1000004, 100,),] }, ], ) } @@ -987,18 +824,9 @@ mod tests { #[should_panic] fn reduce_panics_on_duplicate_voter() { let mut assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 10), (20, 10)], - }, - StakedAssignment { - who: 1, - distribution: vec![(10, 15), (20, 5)], - }, - StakedAssignment { - who: 2, - distribution: vec![(10, 15), (20, 15)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 10), (20, 10)] }, + StakedAssignment { who: 1, distribution: vec![(10, 15), (20, 5)] }, + StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 15)] }, ]; reduce(&mut assignments); @@ -1007,10 +835,7 @@ mod tests { #[test] fn should_deal_with_duplicates_target() { let mut assignments = vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 15), (20, 5)], - }, + StakedAssignment { who: 1, distribution: vec![(10, 15), (20, 5)] }, StakedAssignment { who: 2, distribution: vec![ @@ -1029,10 +854,7 @@ mod tests { assert_eq!( assignments, vec![ - StakedAssignment { - who: 1, - distribution: vec![(10, 20),], - }, + StakedAssignment { who: 1, distribution: vec![(10, 20),] }, StakedAssignment { who: 2, distribution: vec![ diff --git a/primitives/npos-elections/src/tests.rs b/primitives/npos-elections/src/tests.rs index ff3c06a783c66..143bd518f8ab3 100644 --- a/primitives/npos-elections/src/tests.rs +++ b/primitives/npos-elections/src/tests.rs @@ -19,22 +19,18 @@ use crate::{ balancing, helpers::*, is_score_better, mock::*, seq_phragmen, seq_phragmen_core, setup_inputs, - to_support_map, to_supports, Assignment, CompactSolution, ElectionResult, ExtendedBalance, - IndexAssignment, StakedAssignment, Support, Voter, EvaluateSupport, + to_support_map, to_supports, Assignment, CompactSolution, ElectionResult, EvaluateSupport, + ExtendedBalance, IndexAssignment, StakedAssignment, Support, Voter, }; use rand::{self, SeedableRng}; use sp_arithmetic::{PerU16, Perbill, Percent, Permill}; -use substrate_test_utils::assert_eq_uvec; use std::convert::TryInto; +use substrate_test_utils::assert_eq_uvec; #[test] fn float_phragmen_poc_works() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, vec![1, 2]), - (20, vec![1, 3]), - (30, vec![2, 3]), - ]; + let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])]; let stake_of = create_stake_of(&[(10, 10), (20, 20), (30, 30), (1, 0), (2, 0), (3, 0)]); let mut phragmen_result = elect_float(2, candidates, voters, &stake_of).unwrap(); let winners = phragmen_result.clone().winners; @@ -43,11 +39,7 @@ fn float_phragmen_poc_works() { assert_eq_uvec!(winners, vec![(2, 40), (3, 50)]); assert_eq_uvec!( assignments, - vec![ - (10, vec![(2, 1.0)]), - (20, vec![(3, 1.0)]), - (30, vec![(2, 0.5), (3, 0.5)]), - ] + vec![(10, vec![(2, 1.0)]), (20, vec![(3, 1.0)]), (30, vec![(2, 0.5), (3, 0.5)]),] ); let mut support_map = build_support_map_float(&mut phragmen_result, &stake_of); @@ -76,11 +68,7 @@ fn float_phragmen_poc_works() { #[test] fn phragmen_core_test_without_edges() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 10, vec![]), - (20, 20, vec![]), - (30, 30, vec![]), - ]; + let voters = vec![(10, 10, vec![]), (20, 20, vec![]), (30, 30, vec![])]; let (candidates, voters) = setup_inputs(candidates, voters); @@ -104,23 +92,16 @@ fn phragmen_core_test_without_edges() { c_ptr.borrow().elected, c_ptr.borrow().round, c_ptr.borrow().backed_stake, - )).collect::>(), - vec![ - (1, false, 0, 0), - (2, false, 0, 0), - (3, false, 0, 0), - ] + )) + .collect::>(), + vec![(1, false, 0, 0), (2, false, 0, 0), (3, false, 0, 0),] ); } #[test] fn phragmen_core_poc_works() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 10, vec![1, 2]), - (20, 20, vec![1, 3]), - (30, 30, vec![2, 3]), - ]; + let voters = vec![(10, 10, vec![1, 2]), (20, 20, vec![1, 3]), (30, 30, vec![2, 3])]; let (candidates, voters) = setup_inputs(candidates, voters); let (candidates, voters) = seq_phragmen_core(2, candidates, voters).unwrap(); @@ -134,11 +115,7 @@ fn phragmen_core_poc_works() { (v.edges.iter().map(|e| (e.who, e.weight)).collect::>()), )) .collect::>(), - vec![ - (10, 10, vec![(2, 10)]), - (20, 20, vec![(3, 20)]), - (30, 30, vec![(2, 15), (3, 15)]), - ] + vec![(10, 10, vec![(2, 10)]), (20, 20, vec![(3, 20)]), (30, 30, vec![(2, 15), (3, 15)]),] ); assert_eq!( @@ -149,12 +126,9 @@ fn phragmen_core_poc_works() { c_ptr.borrow().elected, c_ptr.borrow().round, c_ptr.borrow().backed_stake, - )).collect::>(), - vec![ - (1, false, 0, 0), - (2, true, 1, 25), - (3, true, 0, 35), - ] + )) + .collect::>(), + vec![(1, false, 0, 0), (2, true, 1, 25), (3, true, 0, 35),] ); } @@ -203,7 +177,8 @@ fn balancing_core_works() { c_ptr.borrow().elected, c_ptr.borrow().round, c_ptr.borrow().backed_stake, - )).collect::>(), + )) + .collect::>(), vec![ (1, true, 1, 37), (2, true, 2, 38), @@ -220,40 +195,30 @@ fn voter_normalize_ops_works() { use sp_std::{cell::RefCell, rc::Rc}; // normalize { - let c1 = Candidate { who: 10, elected: false ,..Default::default() }; - let c2 = Candidate { who: 20, elected: false ,..Default::default() }; - let c3 = Candidate { who: 30, elected: false ,..Default::default() }; + let c1 = Candidate { who: 10, elected: false, ..Default::default() }; + let c2 = Candidate { who: 20, elected: false, ..Default::default() }; + let c3 = Candidate { who: 30, elected: false, ..Default::default() }; let e1 = Edge { candidate: Rc::new(RefCell::new(c1)), weight: 30, ..Default::default() }; let e2 = Edge { candidate: Rc::new(RefCell::new(c2)), weight: 33, ..Default::default() }; let e3 = Edge { candidate: Rc::new(RefCell::new(c3)), weight: 30, ..Default::default() }; - let mut v = Voter { - who: 1, - budget: 100, - edges: vec![e1, e2, e3], - ..Default::default() - }; + let mut v = Voter { who: 1, budget: 100, edges: vec![e1, e2, e3], ..Default::default() }; v.try_normalize().unwrap(); assert_eq!(v.edges.iter().map(|e| e.weight).collect::>(), vec![34, 33, 33]); } // // normalize_elected { - let c1 = Candidate { who: 10, elected: false ,..Default::default() }; - let c2 = Candidate { who: 20, elected: true ,..Default::default() }; - let c3 = Candidate { who: 30, elected: true ,..Default::default() }; + let c1 = Candidate { who: 10, elected: false, ..Default::default() }; + let c2 = Candidate { who: 20, elected: true, ..Default::default() }; + let c3 = Candidate { who: 30, elected: true, ..Default::default() }; let e1 = Edge { candidate: Rc::new(RefCell::new(c1)), weight: 30, ..Default::default() }; let e2 = Edge { candidate: Rc::new(RefCell::new(c2)), weight: 33, ..Default::default() }; let e3 = Edge { candidate: Rc::new(RefCell::new(c3)), weight: 30, ..Default::default() }; - let mut v = Voter { - who: 1, - budget: 100, - edges: vec![e1, e2, e3], - ..Default::default() - }; + let mut v = Voter { who: 1, budget: 100, edges: vec![e1, e2, e3], ..Default::default() }; v.try_normalize_elected().unwrap(); assert_eq!(v.edges.iter().map(|e| e.weight).collect::>(), vec![30, 34, 66]); @@ -263,37 +228,31 @@ fn voter_normalize_ops_works() { #[test] fn phragmen_poc_works() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, vec![1, 2]), - (20, vec![1, 3]), - (30, vec![2, 3]), - ]; + let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])]; let stake_of = create_stake_of(&[(10, 10), (20, 20), (30, 30)]); let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(2, 25), (3, 35)]); assert_eq_uvec!( assignments, vec![ - Assignment { - who: 10u64, - distribution: vec![(2, Perbill::from_percent(100))], - }, - Assignment { - who: 20, - distribution: vec![(3, Perbill::from_percent(100))], - }, + Assignment { who: 10u64, distribution: vec![(2, Perbill::from_percent(100))] }, + Assignment { who: 20, distribution: vec![(3, Perbill::from_percent(100))] }, Assignment { who: 30, distribution: vec![ - (2, Perbill::from_percent(100/2)), - (3, Perbill::from_percent(100/2)), + (2, Perbill::from_percent(100 / 2)), + (3, Perbill::from_percent(100 / 2)), ], }, ] @@ -306,21 +265,9 @@ fn phragmen_poc_works() { assert_eq_uvec!( staked, vec![ - StakedAssignment { - who: 10u64, - distribution: vec![(2, 10)], - }, - StakedAssignment { - who: 20, - distribution: vec![(3, 20)], - }, - StakedAssignment { - who: 30, - distribution: vec![ - (2, 15), - (3, 15), - ], - }, + StakedAssignment { who: 10u64, distribution: vec![(2, 10)] }, + StakedAssignment { who: 20, distribution: vec![(3, 20)] }, + StakedAssignment { who: 30, distribution: vec![(2, 15), (3, 15),] }, ] ); @@ -337,32 +284,26 @@ fn phragmen_poc_works() { #[test] fn phragmen_poc_works_with_balancing() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, vec![1, 2]), - (20, vec![1, 3]), - (30, vec![2, 3]), - ]; + let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])]; let stake_of = create_stake_of(&[(10, 10), (20, 20), (30, 30)]); let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), Some((4, 0)), - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(2, 30), (3, 30)]); assert_eq_uvec!( assignments, vec![ - Assignment { - who: 10u64, - distribution: vec![(2, Perbill::from_percent(100))], - }, - Assignment { - who: 20, - distribution: vec![(3, Perbill::from_percent(100))], - }, + Assignment { who: 10u64, distribution: vec![(2, Perbill::from_percent(100))] }, + Assignment { who: 20, distribution: vec![(3, Perbill::from_percent(100))] }, Assignment { who: 30, distribution: vec![ @@ -380,21 +321,9 @@ fn phragmen_poc_works_with_balancing() { assert_eq_uvec!( staked, vec![ - StakedAssignment { - who: 10u64, - distribution: vec![(2, 10)], - }, - StakedAssignment { - who: 20, - distribution: vec![(3, 20)], - }, - StakedAssignment { - who: 30, - distribution: vec![ - (2, 20), - (3, 10), - ], - }, + StakedAssignment { who: 10u64, distribution: vec![(2, 10)] }, + StakedAssignment { who: 20, distribution: vec![(3, 20)] }, + StakedAssignment { who: 30, distribution: vec![(2, 20), (3, 10),] }, ] ); @@ -408,22 +337,12 @@ fn phragmen_poc_works_with_balancing() { ); } - #[test] fn phragmen_poc_2_works() { let candidates = vec![10, 20, 30]; - let voters = vec![ - (2, vec![10, 20, 30]), - (4, vec![10, 20, 40]), - ]; - let stake_of = create_stake_of(&[ - (10, 1000), - (20, 1000), - (30, 1000), - (40, 1000), - (2, 500), - (4, 500), - ]); + let voters = vec![(2, vec![10, 20, 30]), (4, vec![10, 20, 40])]; + let stake_of = + create_stake_of(&[(10, 1000), (20, 1000), (30, 1000), (40, 1000), (2, 500), (4, 500)]); run_and_compare::(candidates.clone(), voters.clone(), &stake_of, 2); run_and_compare::(candidates.clone(), voters.clone(), &stake_of, 2); @@ -434,17 +353,8 @@ fn phragmen_poc_2_works() { #[test] fn phragmen_poc_3_works() { let candidates = vec![10, 20, 30]; - let voters = vec![ - (2, vec![10, 20, 30]), - (4, vec![10, 20, 40]), - ]; - let stake_of = create_stake_of(&[ - (10, 1000), - (20, 1000), - (30, 1000), - (2, 50), - (4, 1000), - ]); + let voters = vec![(2, vec![10, 20, 30]), (4, vec![10, 20, 40])]; + let stake_of = create_stake_of(&[(10, 1000), (20, 1000), (30, 1000), (2, 50), (4, 1000)]); run_and_compare::(candidates.clone(), voters.clone(), &stake_of, 2); run_and_compare::(candidates.clone(), voters.clone(), &stake_of, 2); @@ -473,7 +383,8 @@ fn phragmen_accuracy_on_large_scale_only_candidates() { .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(1, 18446744073709551614u128), (5, 18446744073709551613u128)]); assert_eq!(assignments.len(), 2); @@ -483,17 +394,14 @@ fn phragmen_accuracy_on_large_scale_only_candidates() { #[test] fn phragmen_accuracy_on_large_scale_voters_and_candidates() { let candidates = vec![1, 2, 3, 4, 5]; - let mut voters = vec![ - (13, vec![1, 3, 5]), - (14, vec![2, 4]), - ]; + let mut voters = vec![(13, vec![1, 3, 5]), (14, vec![2, 4])]; voters.extend(auto_generate_self_voters(&candidates)); let stake_of = create_stake_of(&[ - (1, (u64::MAX - 1).into()), - (2, (u64::MAX - 4).into()), - (3, (u64::MAX - 5).into()), - (4, (u64::MAX - 3).into()), - (5, (u64::MAX - 2).into()), + (1, (u64::MAX - 1).into()), + (2, (u64::MAX - 4).into()), + (3, (u64::MAX - 5).into()), + (4, (u64::MAX - 3).into()), + (5, (u64::MAX - 2).into()), (13, (u64::MAX - 10).into()), (14, u64::MAX.into()), ]); @@ -501,31 +409,23 @@ fn phragmen_accuracy_on_large_scale_voters_and_candidates() { let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(2, 36893488147419103226u128), (1, 36893488147419103219u128)]); assert_eq!( assignments, vec![ - Assignment { - who: 13u64, - distribution: vec![(1, Perbill::one())], - }, - Assignment { - who: 14, - distribution: vec![(2, Perbill::one())], - }, - Assignment { - who: 1, - distribution: vec![(1, Perbill::one())], - }, - Assignment { - who: 2, - distribution: vec![(2, Perbill::one())], - }, + Assignment { who: 13u64, distribution: vec![(1, Perbill::one())] }, + Assignment { who: 14, distribution: vec![(2, Perbill::one())] }, + Assignment { who: 1, distribution: vec![(1, Perbill::one())] }, + Assignment { who: 2, distribution: vec![(2, Perbill::one())] }, ] ); @@ -536,19 +436,18 @@ fn phragmen_accuracy_on_large_scale_voters_and_candidates() { fn phragmen_accuracy_on_small_scale_self_vote() { let candidates = vec![40, 10, 20, 30]; let voters = auto_generate_self_voters(&candidates); - let stake_of = create_stake_of(&[ - (40, 0), - (10, 1), - (20, 2), - (30, 1), - ]); + let stake_of = create_stake_of(&[(40, 0), (10, 1), (20, 2), (30, 1)]); let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 3, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(20, 2), (10, 1), (30, 1)]); check_assignments_sum(&assignments); @@ -557,12 +456,7 @@ fn phragmen_accuracy_on_small_scale_self_vote() { #[test] fn phragmen_accuracy_on_small_scale_no_self_vote() { let candidates = vec![40, 10, 20, 30]; - let voters = vec![ - (1, vec![10]), - (2, vec![20]), - (3, vec![30]), - (4, vec![40]), - ]; + let voters = vec![(1, vec![10]), (2, vec![20]), (3, vec![30]), (4, vec![40])]; let stake_of = create_stake_of(&[ (40, 1000), // don't care (10, 1000), // don't care @@ -577,27 +471,28 @@ fn phragmen_accuracy_on_small_scale_no_self_vote() { let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 3, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(20, 2), (10, 1), (30, 1)]); check_assignments_sum(&assignments); - } #[test] fn phragmen_large_scale_test() { - let candidates = vec![2, 4, 6, 8, 10, 12, 14, 16 ,18, 20, 22, 24]; - let mut voters = vec![ - (50, vec![2, 4, 6, 8, 10, 12, 14, 16 ,18, 20, 22, 24]), - ]; + let candidates = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24]; + let mut voters = vec![(50, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24])]; voters.extend(auto_generate_self_voters(&candidates)); let stake_of = create_stake_of(&[ - (2, 1), - (4, 100), - (6, 1000000), - (8, 100000000001000), + (2, 1), + (4, 100), + (6, 1000000), + (8, 100000000001000), (10, 100000000002000), (12, 100000000003000), (14, 400000000000000), @@ -612,9 +507,13 @@ fn phragmen_large_scale_test() { let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(to_without_backing(winners.clone()), vec![24, 22]); check_assignments_sum(&assignments); @@ -629,18 +528,19 @@ fn phragmen_large_scale_test_2() { let mut voters = vec![(50, vec![2, 4])]; voters.extend(auto_generate_self_voters(&candidates)); - let stake_of = create_stake_of(&[ - (2, c_budget.into()), - (4, c_budget.into()), - (50, nom_budget.into()), - ]); + let stake_of = + create_stake_of(&[(2, c_budget.into()), (4, c_budget.into()), (50, nom_budget.into())]); let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq_uvec!(winners, vec![(2, 500000000005000000u128), (4, 500000000003000000)]); @@ -654,14 +554,8 @@ fn phragmen_large_scale_test_2() { (4, Perbill::from_parts(500000000)), ], }, - Assignment { - who: 2, - distribution: vec![(2, Perbill::one())], - }, - Assignment { - who: 4, - distribution: vec![(4, Perbill::one())], - }, + Assignment { who: 2, distribution: vec![(2, Perbill::one())] }, + Assignment { who: 4, distribution: vec![(4, Perbill::one())] }, ], ); @@ -688,7 +582,6 @@ fn phragmen_linear_equalize() { (51, 1000), (61, 1000), (71, 1000), - (2, 2000), (4, 1000), (6, 1000), @@ -704,58 +597,48 @@ fn phragmen_linear_equalize() { #[test] fn elect_has_no_entry_barrier() { let candidates = vec![10, 20, 30]; - let voters = vec![ - (1, vec![10]), - (2, vec![20]), - ]; - let stake_of = create_stake_of(&[ - (1, 10), - (2, 10), - ]); + let voters = vec![(1, vec![10]), (2, vec![20])]; + let stake_of = create_stake_of(&[(1, 10), (2, 10)]); let ElectionResult { winners, assignments: _ } = seq_phragmen::<_, Perbill>( 3, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); // 30 is elected with stake 0. The caller is responsible for stripping this. - assert_eq_uvec!(winners, vec![ - (10, 10), - (20, 10), - (30, 0), - ]); + assert_eq_uvec!(winners, vec![(10, 10), (20, 10), (30, 0),]); } #[test] fn phragmen_self_votes_should_be_kept() { let candidates = vec![5, 10, 20, 30]; - let voters = vec![ - (5, vec![5]), - (10, vec![10]), - (20, vec![20]), - (1, vec![10, 20]) - ]; - let stake_of = create_stake_of(&[ - (5, 5), - (10, 10), - (20, 20), - (1, 8), - ]); + let voters = vec![(5, vec![5]), (10, vec![10]), (20, vec![20]), (1, vec![10, 20])]; + let stake_of = create_stake_of(&[(5, 5), (10, 10), (20, 20), (1, 8)]); let result = seq_phragmen::<_, Perbill>( 2, candidates, - voters.iter().map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())).collect::>(), + voters + .iter() + .map(|(ref v, ref vs)| (v.clone(), stake_of(v), vs.clone())) + .collect::>(), None, - ).unwrap(); + ) + .unwrap(); assert_eq!(result.winners, vec![(20, 24), (10, 14)]); assert_eq_uvec!( result.assignments, vec![ - Assignment { who: 1, distribution: vec![ + Assignment { + who: 1, + distribution: vec![ (10, Perbill::from_percent(50)), (20, Perbill::from_percent(50)), ] @@ -783,18 +666,10 @@ fn phragmen_self_votes_should_be_kept() { #[test] fn duplicate_target_is_ignored() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 100, vec![1, 1, 2, 3]), - (20, 100, vec![2, 3]), - (30, 50, vec![1, 1, 2]), - ]; + let voters = vec![(10, 100, vec![1, 1, 2, 3]), (20, 100, vec![2, 3]), (30, 50, vec![1, 1, 2])]; - let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( - 2, - candidates, - voters, - None, - ).unwrap(); + let ElectionResult { winners, assignments } = + seq_phragmen::<_, Perbill>(2, candidates, voters, None).unwrap(); let winners = to_without_backing(winners); assert_eq!(winners, vec![(2), (3)]); @@ -803,28 +678,17 @@ fn duplicate_target_is_ignored() { .into_iter() .map(|x| (x.who, x.distribution.into_iter().map(|(w, _)| w).collect::>())) .collect::>(), - vec![ - (10, vec![2, 3]), - (20, vec![2, 3]), - (30, vec![2]), - ], + vec![(10, vec![2, 3]), (20, vec![2, 3]), (30, vec![2]),], ); } #[test] fn duplicate_target_is_ignored_when_winner() { let candidates = vec![1, 2, 3]; - let voters = vec![ - (10, 100, vec![1, 1, 2, 3]), - (20, 100, vec![1, 2]), - ]; + let voters = vec![(10, 100, vec![1, 1, 2, 3]), (20, 100, vec![1, 2])]; - let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( - 2, - candidates, - voters, - None, - ).unwrap(); + let ElectionResult { winners, assignments } = + seq_phragmen::<_, Perbill>(2, candidates, voters, None).unwrap(); let winners = to_without_backing(winners); assert_eq!(winners, vec![1, 2]); @@ -833,10 +697,7 @@ fn duplicate_target_is_ignored_when_winner() { .into_iter() .map(|x| (x.who, x.distribution.into_iter().map(|(w, _)| w).collect::>())) .collect::>(), - vec![ - (10, vec![1, 2]), - (20, vec![1, 2]), - ], + vec![(10, vec![1, 2]), (20, vec![1, 2]),], ); } @@ -846,10 +707,7 @@ fn support_map_and_vec_can_be_evaluated() { let voters = vec![(10, vec![1, 2]), (20, vec![1, 3]), (30, vec![2, 3])]; let stake_of = create_stake_of(&[(10, 10), (20, 20), (30, 30)]); - let ElectionResult { - winners, - assignments, - } = seq_phragmen::<_, Perbill>( + let ElectionResult { winners, assignments } = seq_phragmen::<_, Perbill>( 2, candidates, voters @@ -874,10 +732,7 @@ mod assignment_convert_normalize { fn assignment_convert_works() { let staked = StakedAssignment { who: 1 as AccountId, - distribution: vec![ - (20, 100 as ExtendedBalance), - (30, 25), - ], + distribution: vec![(20, 100 as ExtendedBalance), (30, 25)], }; let assignment = staked.clone().into_assignment(); @@ -892,10 +747,7 @@ mod assignment_convert_normalize { } ); - assert_eq!( - assignment.into_staked(125), - staked, - ); + assert_eq!(assignment.into_staked(125), staked,); } #[test] @@ -903,11 +755,9 @@ mod assignment_convert_normalize { assert_eq!( Assignment { who: 1, - distribution: vec![ - (2, Perbill::from_percent(33)), - (3, Perbill::from_percent(66)), - ] - }.into_staked(100), + distribution: vec![(2, Perbill::from_percent(33)), (3, Perbill::from_percent(66)),] + } + .into_staked(100), StakedAssignment { who: 1, distribution: vec![ @@ -926,7 +776,8 @@ mod assignment_convert_normalize { (3, 333_333_333_333_333), (4, 666_666_666_666_333), ], - }.into_assignment(), + } + .into_assignment(), Assignment { who: 1, distribution: vec![ @@ -947,7 +798,7 @@ mod assignment_convert_normalize { (2, Perbill::from_parts(330000000)), (3, Perbill::from_parts(660000000)), // sum is not 100%! - ] + ], }; a.try_normalize().unwrap(); assert_eq!( @@ -964,24 +815,9 @@ mod assignment_convert_normalize { #[test] fn staked_assignment_can_normalize() { - let mut a = StakedAssignment { - who: 1, - distribution: vec![ - (2, 33), - (3, 66), - ] - }; + let mut a = StakedAssignment { who: 1, distribution: vec![(2, 33), (3, 66)] }; a.try_normalize(100).unwrap(); - assert_eq!( - a, - StakedAssignment { - who: 1, - distribution: vec![ - (2, 34), - (3, 66), - ] - }, - ); + assert_eq!(a, StakedAssignment { who: 1, distribution: vec![(2, 34), (3, 66),] },); } } @@ -991,28 +827,16 @@ mod score { fn score_comparison_is_lexicographical_no_epsilon() { let epsilon = Perbill::zero(); // only better in the fist parameter, worse in the other two ✅ - assert_eq!( - is_score_better([12, 10, 35], [10, 20, 30], epsilon), - true, - ); + assert_eq!(is_score_better([12, 10, 35], [10, 20, 30], epsilon), true,); // worse in the first, better in the other two ❌ - assert_eq!( - is_score_better([9, 30, 10], [10, 20, 30], epsilon), - false, - ); + assert_eq!(is_score_better([9, 30, 10], [10, 20, 30], epsilon), false,); // equal in the first, the second one dictates. - assert_eq!( - is_score_better([10, 25, 40], [10, 20, 30], epsilon), - true, - ); + assert_eq!(is_score_better([10, 25, 40], [10, 20, 30], epsilon), true,); // equal in the first two, the last one dictates. - assert_eq!( - is_score_better([10, 20, 40], [10, 20, 30], epsilon), - false, - ); + assert_eq!(is_score_better([10, 20, 40], [10, 20, 30], epsilon), false,); } #[test] @@ -1021,120 +845,72 @@ mod score { { // no more than 1 percent (10) better in the first param. - assert_eq!( - is_score_better([1009, 5000, 100000], [1000, 5000, 100000], epsilon), - false, - ); + assert_eq!(is_score_better([1009, 5000, 100000], [1000, 5000, 100000], epsilon), false,); // now equal, still not better. - assert_eq!( - is_score_better([1010, 5000, 100000], [1000, 5000, 100000], epsilon), - false, - ); + assert_eq!(is_score_better([1010, 5000, 100000], [1000, 5000, 100000], epsilon), false,); // now it is. - assert_eq!( - is_score_better([1011, 5000, 100000], [1000, 5000, 100000], epsilon), - true, - ); + assert_eq!(is_score_better([1011, 5000, 100000], [1000, 5000, 100000], epsilon), true,); } { // First score score is epsilon better, but first score is no longer `ge`. Then this is // still not a good solution. - assert_eq!( - is_score_better([999, 6000, 100000], [1000, 5000, 100000], epsilon), - false, - ); + assert_eq!(is_score_better([999, 6000, 100000], [1000, 5000, 100000], epsilon), false,); } { // first score is equal or better, but not epsilon. Then second one is the determinant. - assert_eq!( - is_score_better([1005, 5000, 100000], [1000, 5000, 100000], epsilon), - false, - ); - - assert_eq!( - is_score_better([1005, 5050, 100000], [1000, 5000, 100000], epsilon), - false, - ); - - assert_eq!( - is_score_better([1005, 5051, 100000], [1000, 5000, 100000], epsilon), - true, - ); + assert_eq!(is_score_better([1005, 5000, 100000], [1000, 5000, 100000], epsilon), false,); + + assert_eq!(is_score_better([1005, 5050, 100000], [1000, 5000, 100000], epsilon), false,); + + assert_eq!(is_score_better([1005, 5051, 100000], [1000, 5000, 100000], epsilon), true,); } { // first score and second are equal or less than epsilon more, third is determinant. - assert_eq!( - is_score_better([1005, 5025, 100000], [1000, 5000, 100000], epsilon), - false, - ); - - assert_eq!( - is_score_better([1005, 5025, 99_000], [1000, 5000, 100000], epsilon), - false, - ); - - assert_eq!( - is_score_better([1005, 5025, 98_999], [1000, 5000, 100000], epsilon), - true, - ); + assert_eq!(is_score_better([1005, 5025, 100000], [1000, 5000, 100000], epsilon), false,); + + assert_eq!(is_score_better([1005, 5025, 99_000], [1000, 5000, 100000], epsilon), false,); + + assert_eq!(is_score_better([1005, 5025, 98_999], [1000, 5000, 100000], epsilon), true,); } } #[test] fn score_comparison_large_value() { // some random value taken from eras in kusama. - let initial = [12488167277027543u128, 5559266368032409496, 118749283262079244270992278287436446]; + let initial = + [12488167277027543u128, 5559266368032409496, 118749283262079244270992278287436446]; // this claim is 0.04090% better in the third component. It should be accepted as better if // epsilon is smaller than 5/10_0000 - let claim = [12488167277027543u128, 5559266368032409496, 118700736389524721358337889258988054]; + let claim = + [12488167277027543u128, 5559266368032409496, 118700736389524721358337889258988054]; assert_eq!( - is_score_better( - claim.clone(), - initial.clone(), - Perbill::from_rational(1u32, 10_000), - ), + is_score_better(claim.clone(), initial.clone(), Perbill::from_rational(1u32, 10_000),), true, ); assert_eq!( - is_score_better( - claim.clone(), - initial.clone(), - Perbill::from_rational(2u32, 10_000), - ), + is_score_better(claim.clone(), initial.clone(), Perbill::from_rational(2u32, 10_000),), true, ); assert_eq!( - is_score_better( - claim.clone(), - initial.clone(), - Perbill::from_rational(3u32, 10_000), - ), + is_score_better(claim.clone(), initial.clone(), Perbill::from_rational(3u32, 10_000),), true, ); assert_eq!( - is_score_better( - claim.clone(), - initial.clone(), - Perbill::from_rational(4u32, 10_000), - ), + is_score_better(claim.clone(), initial.clone(), Perbill::from_rational(4u32, 10_000),), true, ); assert_eq!( - is_score_better( - claim.clone(), - initial.clone(), - Perbill::from_rational(5u32, 10_000), - ), + is_score_better(claim.clone(), initial.clone(), Perbill::from_rational(5u32, 10_000),), false, ); } @@ -1223,10 +999,7 @@ mod solution_type { let encoded = compact.encode(); - assert_eq!( - compact, - Decode::decode(&mut &encoded[..]).unwrap(), - ); + assert_eq!(compact, Decode::decode(&mut &encoded[..]).unwrap(),); assert_eq!(compact.voter_count(), 4); assert_eq!(compact.edge_count(), 2 + 4); assert_eq!(compact.unique_targets(), vec![10, 11, 20, 40, 50, 51]); @@ -1240,13 +1013,11 @@ mod solution_type { (2, (0, TestAccuracy::from_percent(80)), 1), (3, (7, TestAccuracy::from_percent(85)), 8), ], - votes3: vec![ - ( - 4, - [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], - 5, - ), - ], + votes3: vec![( + 4, + [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], + 5, + )], ..Default::default() }; @@ -1256,16 +1027,12 @@ mod solution_type { compact, TestSolutionCompact { votes1: vec![(0, 2), (1, 6)], - votes2: vec![ - (3, (7, TestAccuracy::from_percent(85)), 8), - ], - votes3: vec![ - ( - 4, - [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], - 5, - ), - ], + votes2: vec![(3, (7, TestAccuracy::from_percent(85)), 8),], + votes3: vec![( + 4, + [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], + 5, + ),], ..Default::default() }, ); @@ -1275,9 +1042,7 @@ mod solution_type { compact, TestSolutionCompact { votes1: vec![(0, 2), (1, 6)], - votes2: vec![ - (3, (7, TestAccuracy::from_percent(85)), 8), - ], + votes2: vec![(3, (7, TestAccuracy::from_percent(85)), 8),], ..Default::default() }, ); @@ -1287,9 +1052,7 @@ mod solution_type { compact, TestSolutionCompact { votes1: vec![(0, 2)], - votes2: vec![ - (3, (7, TestAccuracy::from_percent(85)), 8), - ], + votes2: vec![(3, (7, TestAccuracy::from_percent(85)), 8),], ..Default::default() }, ); @@ -1297,13 +1060,7 @@ mod solution_type { #[test] fn basic_from_and_into_portable_works_assignments() { - let voters = vec![ - 2 as AccountId, - 4, - 1, - 5, - 3, - ]; + let voters = vec![2 as AccountId, 4, 1, 5, 3]; let targets = vec![ 10 as AccountId, 11, @@ -1319,17 +1076,14 @@ mod solution_type { let assignments = vec![ Assignment { who: 2 as AccountId, - distribution: vec![(20u64, TestAccuracy::from_percent(100))] - }, - Assignment { - who: 4, - distribution: vec![(40, TestAccuracy::from_percent(100))], + distribution: vec![(20u64, TestAccuracy::from_percent(100))], }, + Assignment { who: 4, distribution: vec![(40, TestAccuracy::from_percent(100))] }, Assignment { who: 1, distribution: vec![ (10, TestAccuracy::from_percent(80)), - (11, TestAccuracy::from_percent(20)) + (11, TestAccuracy::from_percent(20)), ], }, Assignment { @@ -1337,7 +1091,7 @@ mod solution_type { distribution: vec![ (50, TestAccuracy::from_percent(85)), (51, TestAccuracy::from_percent(15)), - ] + ], }, Assignment { who: 3, @@ -1356,11 +1110,8 @@ mod solution_type { targets.iter().position(|x| x == a).map(TryInto::try_into).unwrap().ok() }; - let compacted = TestSolutionCompact::from_assignment( - &assignments, - voter_index, - target_index, - ).unwrap(); + let compacted = + TestSolutionCompact::from_assignment(&assignments, voter_index, target_index).unwrap(); // basically number of assignments that it is encoding. assert_eq!(compacted.voter_count(), assignments.len()); @@ -1377,21 +1128,16 @@ mod solution_type { (2, (0, TestAccuracy::from_percent(80)), 1), (3, (7, TestAccuracy::from_percent(85)), 8), ], - votes3: vec![ - ( - 4, - [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], - 5, - ), - ], + votes3: vec![( + 4, + [(3, TestAccuracy::from_percent(50)), (4, TestAccuracy::from_percent(25))], + 5, + ),], ..Default::default() } ); - assert_eq!( - compacted.unique_targets(), - vec![0, 1, 2, 3, 4, 5, 6, 7, 8], - ); + assert_eq!(compacted.unique_targets(), vec![0, 1, 2, 3, 4, 5, 6, 7, 8],); let voter_at = |a: u32| -> Option { voters.get(>::try_into(a).unwrap()).cloned() @@ -1400,10 +1146,7 @@ mod solution_type { targets.get(>::try_into(a).unwrap()).cloned() }; - assert_eq!( - compacted.into_assignment(voter_at, target_at).unwrap(), - assignments, - ); + assert_eq!(compacted.into_assignment(voter_at, target_at).unwrap(), assignments,); } #[test] @@ -1413,57 +1156,42 @@ mod solution_type { // we don't really care about voters here so all duplicates. This is not invalid per se. let compact = TestSolutionCompact { votes1: vec![(99, 1), (99, 2)], - votes2: vec![ - (99, (3, ACC.clone()), 7), - (99, (4, ACC.clone()), 8), - ], - votes3: vec![ - (99, [(11, ACC.clone()), (12, ACC.clone())], 13), - ], + votes2: vec![(99, (3, ACC.clone()), 7), (99, (4, ACC.clone()), 8)], + votes3: vec![(99, [(11, ACC.clone()), (12, ACC.clone())], 13)], // ensure the last one is also counted. - votes16: vec![ - ( - 99, - [ - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - (66, ACC.clone()), - ], - 67, - ) - ], + votes16: vec![( + 99, + [ + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + (66, ACC.clone()), + ], + 67, + )], ..Default::default() }; - assert_eq!( - compact.unique_targets(), - vec![1, 2, 3, 4, 7, 8, 11, 12, 13, 66, 67] - ); + assert_eq!(compact.unique_targets(), vec![1, 2, 3, 4, 7, 8, 11, 12, 13, 66, 67]); assert_eq!(compact.edge_count(), 2 + (2 * 2) + 3 + 16); assert_eq!(compact.voter_count(), 6); // this one has some duplicates. let compact = TestSolutionCompact { votes1: vec![(99, 1), (99, 1)], - votes2: vec![ - (99, (3, ACC.clone()), 7), - (99, (4, ACC.clone()), 8), - ], - votes3: vec![ - (99, [(11, ACC.clone()), (11, ACC.clone())], 13), - ], + votes2: vec![(99, (3, ACC.clone()), 7), (99, (4, ACC.clone()), 8)], + votes3: vec![(99, [(11, ACC.clone()), (11, ACC.clone())], 13)], ..Default::default() }; @@ -1484,7 +1212,6 @@ mod solution_type { let voter_at = |a: u32| -> Option { Some(a as AccountId) }; let target_at = |a: u8| -> Option { Some(a as AccountId) }; - assert_eq!( compact.into_assignment(&voter_at, &target_at).unwrap_err(), PhragmenError::CompactStakeOverflow, @@ -1494,7 +1221,11 @@ mod solution_type { let compact = TestSolutionCompact { votes1: Default::default(), votes2: Default::default(), - votes3: vec![(0, [(1, TestAccuracy::from_percent(70)), (2, TestAccuracy::from_percent(80))], 3)], + votes3: vec![( + 0, + [(1, TestAccuracy::from_percent(70)), (2, TestAccuracy::from_percent(80))], + 3, + )], ..Default::default() }; @@ -1509,21 +1240,15 @@ mod solution_type { let voter_index = |a: &AccountId| -> Option { Some(*a as u32) }; let target_index = |a: &AccountId| -> Option { Some(*a as u8) }; - let assignments = vec![ - Assignment { - who: 1 as AccountId, - distribution: - (10..27) - .map(|i| (i as AccountId, Percent::from_parts(i as u8))) - .collect::>(), - }, - ]; + let assignments = vec![Assignment { + who: 1 as AccountId, + distribution: (10..27) + .map(|i| (i as AccountId, Percent::from_parts(i as u8))) + .collect::>(), + }]; - let compacted = TestSolutionCompact::from_assignment( - &assignments, - voter_index, - target_index, - ); + let compacted = + TestSolutionCompact::from_assignment(&assignments, voter_index, target_index); assert_eq!(compacted.unwrap_err(), PhragmenError::CompactTargetOverflow); } @@ -1535,12 +1260,12 @@ mod solution_type { let assignments = vec![ Assignment { who: 1 as AccountId, - distribution: vec![(10, Percent::from_percent(50)), (11, Percent::from_percent(50))], - }, - Assignment { - who: 2, - distribution: vec![], + distribution: vec![ + (10, Percent::from_percent(50)), + (11, Percent::from_percent(50)), + ], }, + Assignment { who: 2, distribution: vec![] }, ]; let voter_index = |a: &AccountId| -> Option { @@ -1550,11 +1275,8 @@ mod solution_type { targets.iter().position(|x| x == a).map(TryInto::try_into).unwrap().ok() }; - let compacted = TestSolutionCompact::from_assignment( - &assignments, - voter_index, - target_index, - ).unwrap(); + let compacted = + TestSolutionCompact::from_assignment(&assignments, voter_index, target_index).unwrap(); assert_eq!( compacted, diff --git a/primitives/panic-handler/src/lib.rs b/primitives/panic-handler/src/lib.rs index 150ce52976807..1c72f224071cd 100644 --- a/primitives/panic-handler/src/lib.rs +++ b/primitives/panic-handler/src/lib.rs @@ -25,11 +25,13 @@ //! temporarily be disabled by using an [`AbortGuard`]. use backtrace::Backtrace; -use std::io::{self, Write}; -use std::marker::PhantomData; -use std::panic::{self, PanicInfo}; -use std::cell::Cell; -use std::thread; +use std::{ + cell::Cell, + io::{self, Write}, + marker::PhantomData, + panic::{self, PanicInfo}, + thread, +}; thread_local! { static ON_PANIC: Cell = Cell::new(OnPanic::Abort); @@ -56,18 +58,19 @@ pub fn set(bug_url: &str, version: &str) { panic::set_hook(Box::new({ let version = version.to_string(); let bug_url = bug_url.to_string(); - move |c| { - panic_hook(c, &bug_url, &version) - } + move |c| panic_hook(c, &bug_url, &version) })); } macro_rules! ABOUT_PANIC { - () => (" + () => { + " This is a bug. Please report it at: {} -")} +" + }; +} /// Set aborting flag. Returns previous value of the flag. fn set_abort(on_panic: OnPanic) -> OnPanic { @@ -92,35 +95,26 @@ pub struct AbortGuard { /// Value that was in `ABORT` before we created this guard. previous_val: OnPanic, /// Marker so that `AbortGuard` doesn't implement `Send`. - _not_send: PhantomData> + _not_send: PhantomData>, } impl AbortGuard { /// Create a new guard. While the guard is alive, panics that happen in the current thread will /// unwind the stack (unless another guard is created afterwards). pub fn force_unwind() -> AbortGuard { - AbortGuard { - previous_val: set_abort(OnPanic::Unwind), - _not_send: PhantomData - } + AbortGuard { previous_val: set_abort(OnPanic::Unwind), _not_send: PhantomData } } /// Create a new guard. While the guard is alive, panics that happen in the current thread will /// abort the process (unless another guard is created afterwards). pub fn force_abort() -> AbortGuard { - AbortGuard { - previous_val: set_abort(OnPanic::Abort), - _not_send: PhantomData - } + AbortGuard { previous_val: set_abort(OnPanic::Abort), _not_send: PhantomData } } /// Create a new guard. While the guard is alive, panics that happen in the current thread will /// **never** abort the process (even if `AbortGuard::force_abort()` guard will be created afterwards). pub fn never_abort() -> AbortGuard { - AbortGuard { - previous_val: set_abort(OnPanic::NeverAbort), - _not_send: PhantomData - } + AbortGuard { previous_val: set_abort(OnPanic::NeverAbort), _not_send: PhantomData } } } @@ -141,7 +135,7 @@ fn panic_hook(info: &PanicInfo, report_url: &str, version: &str) { None => match info.payload().downcast_ref::() { Some(s) => &s[..], None => "Box", - } + }, }; let thread = thread::current(); @@ -158,11 +152,7 @@ fn panic_hook(info: &PanicInfo, report_url: &str, version: &str) { let _ = writeln!(stderr, ""); let _ = writeln!(stderr, "{:?}", backtrace); let _ = writeln!(stderr, ""); - let _ = writeln!( - stderr, - "Thread '{}' panicked at '{}', {}:{}", - name, msg, file, line - ); + let _ = writeln!(stderr, "Thread '{}' panicked at '{}', {}:{}", name, msg, file, line); let _ = writeln!(stderr, ABOUT_PANIC!(), report_url); ON_PANIC.with(|val| { diff --git a/primitives/rpc/src/lib.rs b/primitives/rpc/src/lib.rs index ea7118479943d..0d716d5a07c18 100644 --- a/primitives/rpc/src/lib.rs +++ b/primitives/rpc/src/lib.rs @@ -19,22 +19,16 @@ #![warn(missing_docs)] -pub mod number; pub mod list; +pub mod number; pub mod tracing; /// A util function to assert the result of serialization and deserialization is the same. #[cfg(test)] -pub(crate) fn assert_deser(s: &str, expected: T) where - T: std::fmt::Debug + serde::ser::Serialize + serde::de::DeserializeOwned + PartialEq +pub(crate) fn assert_deser(s: &str, expected: T) +where + T: std::fmt::Debug + serde::ser::Serialize + serde::de::DeserializeOwned + PartialEq, { - assert_eq!( - serde_json::from_str::(s).unwrap(), - expected - ); - assert_eq!( - serde_json::to_string(&expected).unwrap(), - s - ); + assert_eq!(serde_json::from_str::(s).unwrap(), expected); + assert_eq!(serde_json::to_string(&expected).unwrap(), s); } - diff --git a/primitives/rpc/src/list.rs b/primitives/rpc/src/list.rs index 1f4c6ff098c4d..b3d0a4f546e94 100644 --- a/primitives/rpc/src/list.rs +++ b/primitives/rpc/src/list.rs @@ -17,7 +17,7 @@ //! RPC a lenient list or value type. -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; /// RPC list or value wrapper. /// diff --git a/primitives/rpc/src/number.rs b/primitives/rpc/src/number.rs index ad19b7f5b4367..916f2c3d83266 100644 --- a/primitives/rpc/src/number.rs +++ b/primitives/rpc/src/number.rs @@ -18,9 +18,12 @@ //! A number type that can be serialized both as a number or a string that encodes a number in a //! string. -use std::{convert::{TryFrom, TryInto}, fmt::Debug}; -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use sp_core::U256; +use std::{ + convert::{TryFrom, TryInto}, + fmt::Debug, +}; /// A number type that can be serialized both as a number or a string that encodes a number in a /// string. diff --git a/primitives/rpc/src/tracing.rs b/primitives/rpc/src/tracing.rs index 1062ec1d9ebe5..7e05cd84a7dd7 100644 --- a/primitives/rpc/src/tracing.rs +++ b/primitives/rpc/src/tracing.rs @@ -17,7 +17,7 @@ //! Types for working with tracing data -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use rustc_hash::FxHashMap; @@ -84,7 +84,7 @@ pub struct Data { #[serde(rename_all = "camelCase")] pub struct TraceError { /// Error message - pub error: String, + pub error: String, } /// Response for the `state_traceBlock` RPC. @@ -94,5 +94,5 @@ pub enum TraceBlockResponse { /// Error block tracing response TraceError(TraceError), /// Successful block tracing response - BlockTrace(BlockTrace) + BlockTrace(BlockTrace), } diff --git a/primitives/runtime-interface/proc-macro/src/lib.rs b/primitives/runtime-interface/proc-macro/src/lib.rs index 53df4e084d277..502130f1b4108 100644 --- a/primitives/runtime-interface/proc-macro/src/lib.rs +++ b/primitives/runtime-interface/proc-macro/src/lib.rs @@ -26,8 +26,10 @@ //! 3. The [`PassByEnum`](derive.PassByInner.html) derive macro for implementing `PassBy` with `Enum`. //! 4. The [`PassByInner`](derive.PassByInner.html) derive macro for implementing `PassBy` with `Inner`. -use syn::{parse_macro_input, ItemTrait, DeriveInput, Result, Token}; -use syn::parse::{Parse, ParseStream}; +use syn::{ + parse::{Parse, ParseStream}, + parse_macro_input, DeriveInput, ItemTrait, Result, Token, +}; mod pass_by; mod runtime_interface; @@ -35,7 +37,7 @@ mod utils; struct Options { wasm_only: bool, - tracing: bool + tracing: bool, } impl Options { @@ -86,17 +88,21 @@ pub fn runtime_interface( #[proc_macro_derive(PassByCodec)] pub fn pass_by_codec(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); - pass_by::codec_derive_impl(input).unwrap_or_else(|e| e.to_compile_error()).into() + pass_by::codec_derive_impl(input) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } #[proc_macro_derive(PassByInner)] pub fn pass_by_inner(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); - pass_by::inner_derive_impl(input).unwrap_or_else(|e| e.to_compile_error()).into() + pass_by::inner_derive_impl(input) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } #[proc_macro_derive(PassByEnum)] pub fn pass_by_enum(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); pass_by::enum_derive_impl(input).unwrap_or_else(|e| e.to_compile_error()).into() -} \ No newline at end of file +} diff --git a/primitives/runtime-interface/proc-macro/src/pass_by/codec.rs b/primitives/runtime-interface/proc-macro/src/pass_by/codec.rs index 1e6b72f882339..2be455d17a47b 100644 --- a/primitives/runtime-interface/proc-macro/src/pass_by/codec.rs +++ b/primitives/runtime-interface/proc-macro/src/pass_by/codec.rs @@ -22,7 +22,7 @@ use crate::utils::{generate_crate_access, generate_runtime_interface_include}; -use syn::{DeriveInput, Result, Generics, parse_quote}; +use syn::{parse_quote, DeriveInput, Generics, Result}; use quote::quote; @@ -53,7 +53,7 @@ pub fn derive_impl(mut input: DeriveInput) -> Result { fn add_trait_bounds(generics: &mut Generics) { let crate_ = generate_crate_access(); - generics.type_params_mut() + generics + .type_params_mut() .for_each(|type_param| type_param.bounds.push(parse_quote!(#crate_::codec::Codec))); } - diff --git a/primitives/runtime-interface/proc-macro/src/pass_by/enum_.rs b/primitives/runtime-interface/proc-macro/src/pass_by/enum_.rs index cc0428fc9b56b..f614e4d9f294d 100644 --- a/primitives/runtime-interface/proc-macro/src/pass_by/enum_.rs +++ b/primitives/runtime-interface/proc-macro/src/pass_by/enum_.rs @@ -21,11 +21,11 @@ use crate::utils::{generate_crate_access, generate_runtime_interface_include}; -use syn::{DeriveInput, Result, Data, Fields, Error, Ident}; +use syn::{Data, DeriveInput, Error, Fields, Ident, Result}; use quote::quote; -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; /// The derive implementation for `PassBy` with `Enum`. pub fn derive_impl(input: DeriveInput) -> Result { @@ -81,22 +81,21 @@ pub fn derive_impl(input: DeriveInput) -> Result { /// enum or a variant is not an unit. fn get_enum_field_idents<'a>(data: &'a Data) -> Result>> { match data { - Data::Enum(d) => { + Data::Enum(d) => if d.variants.len() <= 256 { - Ok( - d.variants.iter().map(|v| if let Fields::Unit = v.fields { + Ok(d.variants.iter().map(|v| { + if let Fields::Unit = v.fields { Ok(&v.ident) } else { Err(Error::new( Span::call_site(), "`PassByEnum` only supports unit variants.", )) - }) - ) + } + })) } else { Err(Error::new(Span::call_site(), "`PassByEnum` only supports `256` variants.")) - } - }, - _ => Err(Error::new(Span::call_site(), "`PassByEnum` only supports enums as input type.")) + }, + _ => Err(Error::new(Span::call_site(), "`PassByEnum` only supports enums as input type.")), } } diff --git a/primitives/runtime-interface/proc-macro/src/pass_by/inner.rs b/primitives/runtime-interface/proc-macro/src/pass_by/inner.rs index 7fe0d1734c36c..6eaa689d6293a 100644 --- a/primitives/runtime-interface/proc-macro/src/pass_by/inner.rs +++ b/primitives/runtime-interface/proc-macro/src/pass_by/inner.rs @@ -22,11 +22,11 @@ use crate::utils::{generate_crate_access, generate_runtime_interface_include}; -use syn::{DeriveInput, Result, Generics, parse_quote, Type, Data, Error, Fields, Ident}; +use syn::{parse_quote, Data, DeriveInput, Error, Fields, Generics, Ident, Result, Type}; use quote::quote; -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; /// The derive implementation for `PassBy` with `Inner` and `PassByInner`. pub fn derive_impl(mut input: DeriveInput) -> Result { @@ -80,7 +80,8 @@ pub fn derive_impl(mut input: DeriveInput) -> Result { fn add_trait_bounds(generics: &mut Generics) { let crate_ = generate_crate_access(); - generics.type_params_mut() + generics + .type_params_mut() .for_each(|type_param| type_param.bounds.push(parse_quote!(#crate_::RIType))); } @@ -97,15 +98,13 @@ fn extract_inner_ty_and_name(data: &Data) -> Result<(Type, Option)> { Fields::Unnamed(ref unnamed) if unnamed.unnamed.len() == 1 => { let field = &unnamed.unnamed[0]; return Ok((field.ty.clone(), field.ident.clone())) - } + }, _ => {}, } } - Err( - Error::new( - Span::call_site(), - "Only newtype/one field structs are supported by `PassByInner`!", - ) - ) + Err(Error::new( + Span::call_site(), + "Only newtype/one field structs are supported by `PassByInner`!", + )) } diff --git a/primitives/runtime-interface/proc-macro/src/runtime_interface/bare_function_interface.rs b/primitives/runtime-interface/proc-macro/src/runtime_interface/bare_function_interface.rs index d17067d990c36..1943acbb214da 100644 --- a/primitives/runtime-interface/proc-macro/src/runtime_interface/bare_function_interface.rs +++ b/primitives/runtime-interface/proc-macro/src/runtime_interface/bare_function_interface.rs @@ -30,15 +30,16 @@ //! are feature-gated, so that one is compiled for the native and the other for the wasm side. use crate::utils::{ - generate_crate_access, create_exchangeable_host_function_ident, get_function_arguments, - get_function_argument_names, get_runtime_interface, create_function_ident_with_version, + create_exchangeable_host_function_ident, create_function_ident_with_version, + generate_crate_access, get_function_argument_names, get_function_arguments, + get_runtime_interface, }; use syn::{ - Ident, ItemTrait, TraitItemMethod, FnArg, Signature, Result, spanned::Spanned, parse_quote, + parse_quote, spanned::Spanned, FnArg, Ident, ItemTrait, Result, Signature, TraitItemMethod, }; -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use quote::{quote, quote_spanned}; @@ -51,21 +52,22 @@ pub fn generate(trait_def: &ItemTrait, is_wasm_only: bool, tracing: bool) -> Res let runtime_interface = get_runtime_interface(trait_def)?; // latest version dispatch - let token_stream: Result = runtime_interface.latest_versions() - .try_fold( - TokenStream::new(), - |mut t, (latest_version, method)| { - t.extend(function_for_method(method, latest_version, is_wasm_only)?); - Ok(t) - } - ); + let token_stream: Result = runtime_interface.latest_versions().try_fold( + TokenStream::new(), + |mut t, (latest_version, method)| { + t.extend(function_for_method(method, latest_version, is_wasm_only)?); + Ok(t) + }, + ); // earlier versions compatibility dispatch (only std variant) - let result: Result = runtime_interface.all_versions().try_fold(token_stream?, |mut t, (version, method)| - { - t.extend(function_std_impl(trait_name, method, version, is_wasm_only, tracing)?); - Ok(t) - }); + let result: Result = + runtime_interface + .all_versions() + .try_fold(token_stream?, |mut t, (version, method)| { + t.extend(function_std_impl(trait_name, method, version, is_wasm_only, tracing)?); + Ok(t) + }); result } @@ -76,21 +78,16 @@ fn function_for_method( latest_version: u32, is_wasm_only: bool, ) -> Result { - let std_impl = if !is_wasm_only { - function_std_latest_impl(method, latest_version)? - } else { - quote!() - }; + let std_impl = + if !is_wasm_only { function_std_latest_impl(method, latest_version)? } else { quote!() }; let no_std_impl = function_no_std_impl(method)?; - Ok( - quote! { - #std_impl + Ok(quote! { + #std_impl - #no_std_impl - } - ) + #no_std_impl + }) } /// Generates the bare function implementation for `cfg(not(feature = "std"))`. @@ -102,31 +99,27 @@ fn function_no_std_impl(method: &TraitItemMethod) -> Result { let return_value = &method.sig.output; let attrs = method.attrs.iter().filter(|a| !a.path.is_ident("version")); - Ok( - quote! { - #[cfg(not(feature = "std"))] - #( #attrs )* - pub fn #function_name( #( #args, )* ) #return_value { - // Call the host function - #host_function_name.get()( #( #arg_names, )* ) - } + Ok(quote! { + #[cfg(not(feature = "std"))] + #( #attrs )* + pub fn #function_name( #( #args, )* ) #return_value { + // Call the host function + #host_function_name.get()( #( #arg_names, )* ) } - ) + }) } /// Generate call to latest function version for `cfg((feature = "std")` /// /// This should generate simple `fn func(..) { func_version_(..) }`. -fn function_std_latest_impl( - method: &TraitItemMethod, - latest_version: u32, -) -> Result { +fn function_std_latest_impl(method: &TraitItemMethod, latest_version: u32) -> Result { let function_name = &method.sig.ident; let args = get_function_arguments(&method.sig).map(FnArg::Typed); let arg_names = get_function_argument_names(&method.sig).collect::>(); let return_value = &method.sig.output; let attrs = method.attrs.iter().filter(|a| !a.path.is_ident("version")); - let latest_function_name = create_function_ident_with_version(&method.sig.ident, latest_version); + let latest_function_name = + create_function_ident_with_version(&method.sig.ident, latest_version); Ok(quote_spanned! { method.span() => #[cfg(feature = "std")] @@ -153,17 +146,16 @@ fn function_std_impl( let crate_ = generate_crate_access(); let args = get_function_arguments(&method.sig).map(FnArg::Typed).chain( // Add the function context as last parameter when this is a wasm only interface. - iter::from_fn(|| + iter::from_fn(|| { if is_wasm_only { - Some( - parse_quote!( - mut __function_context__: &mut dyn #crate_::sp_wasm_interface::FunctionContext - ) - ) + Some(parse_quote!( + mut __function_context__: &mut dyn #crate_::sp_wasm_interface::FunctionContext + )) } else { None } - ).take(1), + }) + .take(1), ); let return_value = &method.sig.output; let attrs = method.attrs.iter().filter(|a| !a.path.is_ident("version")); @@ -179,15 +171,13 @@ fn function_std_impl( ) }; - Ok( - quote_spanned! { method.span() => - #[cfg(feature = "std")] - #( #attrs )* - fn #function_name( #( #args, )* ) #return_value { - #call_to_trait - } + Ok(quote_spanned! { method.span() => + #[cfg(feature = "std")] + #( #attrs )* + fn #function_name( #( #args, )* ) #return_value { + #call_to_trait } - ) + }) } /// Generate the call to the interface trait. @@ -199,10 +189,8 @@ fn generate_call_to_trait( ) -> TokenStream { let crate_ = generate_crate_access(); let method_name = create_function_ident_with_version(&method.sig.ident, version); - let expect_msg = format!( - "`{}` called outside of an Externalities-provided environment.", - method_name, - ); + let expect_msg = + format!("`{}` called outside of an Externalities-provided environment.", method_name,); let arg_names = get_function_argument_names(&method.sig); if takes_self_argument(&method.sig) { diff --git a/primitives/runtime-interface/proc-macro/src/runtime_interface/host_function_interface.rs b/primitives/runtime-interface/proc-macro/src/runtime_interface/host_function_interface.rs index fb127b1941532..ab84c04e3a728 100644 --- a/primitives/runtime-interface/proc-macro/src/runtime_interface/host_function_interface.rs +++ b/primitives/runtime-interface/proc-macro/src/runtime_interface/host_function_interface.rs @@ -22,35 +22,36 @@ //! executor. These implementations call the bare function interface. use crate::utils::{ - generate_crate_access, create_host_function_ident, get_function_argument_names, - get_function_argument_types_without_ref, get_function_argument_types_ref_and_mut, - get_function_argument_names_and_types_without_ref, get_function_arguments, - get_function_argument_types, create_exchangeable_host_function_ident, get_runtime_interface, - create_function_ident_with_version, + create_exchangeable_host_function_ident, create_function_ident_with_version, + create_host_function_ident, generate_crate_access, get_function_argument_names, + get_function_argument_names_and_types_without_ref, get_function_argument_types, + get_function_argument_types_ref_and_mut, get_function_argument_types_without_ref, + get_function_arguments, get_runtime_interface, }; use syn::{ - ItemTrait, TraitItemMethod, Result, ReturnType, Ident, Pat, Error, Signature, spanned::Spanned, + spanned::Spanned, Error, Ident, ItemTrait, Pat, Result, ReturnType, Signature, TraitItemMethod, }; -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens}; use inflector::Inflector; -use std::iter::{Iterator, self}; +use std::iter::{self, Iterator}; /// Generate the extern host functions for wasm and the `HostFunctions` struct that provides the /// implementations for the host functions on the host. pub fn generate(trait_def: &ItemTrait, is_wasm_only: bool) -> Result { let trait_name = &trait_def.ident; - let extern_host_function_impls = get_runtime_interface(trait_def)? - .latest_versions() - .try_fold(TokenStream::new(), |mut t, (version, method)| { + let extern_host_function_impls = get_runtime_interface(trait_def)?.latest_versions().try_fold( + TokenStream::new(), + |mut t, (version, method)| { t.extend(generate_extern_host_function(method, version, trait_name)?); Ok::<_, Error>(t) - })?; + }, + )?; let exchangeable_host_functions = get_runtime_interface(trait_def)? .latest_versions() .try_fold(TokenStream::new(), |mut t, (_, m)| { @@ -59,27 +60,29 @@ pub fn generate(trait_def: &ItemTrait, is_wasm_only: bool) -> Result ret` to make the function implementations exchangeable. - #[cfg(not(feature = "std"))] - mod extern_host_function_impls { - use super::*; - - #extern_host_function_impls - } - - #exchangeable_host_functions + Ok(quote! { + /// The implementations of the extern host functions. This special implementation module + /// is required to change the extern host functions signature to + /// `unsafe fn name(args) -> ret` to make the function implementations exchangeable. + #[cfg(not(feature = "std"))] + mod extern_host_function_impls { + use super::*; - #host_functions_struct + #extern_host_function_impls } - ) + + #exchangeable_host_functions + + #host_functions_struct + }) } /// Generate the extern host function for the given method. -fn generate_extern_host_function(method: &TraitItemMethod, version: u32, trait_name: &Ident) -> Result { +fn generate_extern_host_function( + method: &TraitItemMethod, + version: u32, + trait_name: &Ident, +) -> Result { let crate_ = generate_crate_access(); let args = get_function_arguments(&method.sig); let arg_types = get_function_argument_types_without_ref(&method.sig); @@ -106,33 +109,31 @@ fn generate_extern_host_function(method: &TraitItemMethod, version: u32, trait_n ReturnType::Default => quote!(), ReturnType::Type(_, ref ty) => quote! { <#ty as #crate_::wasm::FromFFIValue>::from_ffi_value(result) - } + }, }; - Ok( - quote! { - #[doc = #doc_string] - pub fn #function ( #( #args ),* ) #return_value { - extern "C" { - /// The extern function. - pub fn #ext_function ( - #( #arg_names: <#arg_types as #crate_::RIType>::FFIType ),* - ) #ffi_return_value; - } + Ok(quote! { + #[doc = #doc_string] + pub fn #function ( #( #args ),* ) #return_value { + extern "C" { + /// The extern function. + pub fn #ext_function ( + #( #arg_names: <#arg_types as #crate_::RIType>::FFIType ),* + ) #ffi_return_value; + } - // Generate all wrapped ffi values. - #( - let #arg_names2 = <#arg_types2 as #crate_::wasm::IntoFFIValue>::into_ffi_value( - &#arg_names2, - ); - )* + // Generate all wrapped ffi values. + #( + let #arg_names2 = <#arg_types2 as #crate_::wasm::IntoFFIValue>::into_ffi_value( + &#arg_names2, + ); + )* - let result = unsafe { #ext_function( #( #arg_names3.get() ),* ) }; + let result = unsafe { #ext_function( #( #arg_names3.get() ),* ) }; - #convert_return_value - } + #convert_return_value } - ) + }) } /// Generate the host exchangeable function for the given method. @@ -144,44 +145,43 @@ fn generate_exchangeable_host_function(method: &TraitItemMethod) -> Result = #crate_::wasm::ExchangeableFunction::new(extern_host_function_impls::#function); - } - ) + Ok(quote! { + #[cfg(not(feature = "std"))] + #[allow(non_upper_case_globals)] + #[doc = #doc_string] + pub static #exchangeable_function : #crate_::wasm::ExchangeableFunction< + fn ( #( #arg_types ),* ) #output + > = #crate_::wasm::ExchangeableFunction::new(extern_host_function_impls::#function); + }) } /// Generate the `HostFunctions` struct that implements `wasm-interface::HostFunctions` to provide /// implementations for the extern host functions. -fn generate_host_functions_struct(trait_def: &ItemTrait, is_wasm_only: bool) -> Result { +fn generate_host_functions_struct( + trait_def: &ItemTrait, + is_wasm_only: bool, +) -> Result { let crate_ = generate_crate_access(); let host_functions = get_runtime_interface(trait_def)? .all_versions() - .map(|(version, method)| + .map(|(version, method)| { generate_host_function_implementation(&trait_def.ident, method, version, is_wasm_only) - ) + }) .collect::>>()?; - Ok( - quote! { - /// Provides implementations for the extern host functions. - #[cfg(feature = "std")] - pub struct HostFunctions; - - #[cfg(feature = "std")] - impl #crate_::sp_wasm_interface::HostFunctions for HostFunctions { - fn host_functions() -> Vec<&'static dyn #crate_::sp_wasm_interface::Function> { - vec![ #( #host_functions ),* ] - } + Ok(quote! { + /// Provides implementations for the extern host functions. + #[cfg(feature = "std")] + pub struct HostFunctions; + + #[cfg(feature = "std")] + impl #crate_::sp_wasm_interface::HostFunctions for HostFunctions { + fn host_functions() -> Vec<&'static dyn #crate_::sp_wasm_interface::Function> { + vec![ #( #host_functions ),* ] } } - ) + }) } /// Generates the host function struct that implements `wasm_interface::Function` and returns a static @@ -199,71 +199,65 @@ fn generate_host_function_implementation( let struct_name = Ident::new(&name.to_pascal_case(), Span::call_site()); let crate_ = generate_crate_access(); let signature = generate_wasm_interface_signature_for_host_function(&method.sig)?; - let wasm_to_ffi_values = generate_wasm_to_ffi_values( - &method.sig, - trait_name, - ).collect::>>()?; + let wasm_to_ffi_values = + generate_wasm_to_ffi_values(&method.sig, trait_name).collect::>>()?; let ffi_to_host_values = generate_ffi_to_host_value(&method.sig).collect::>>()?; let host_function_call = generate_host_function_call(&method.sig, version, is_wasm_only); let into_preallocated_ffi_value = generate_into_preallocated_ffi_value(&method.sig)?; let convert_return_value = generate_return_value_into_wasm_value(&method.sig); - Ok( - quote! { - { - struct #struct_name; - - impl #crate_::sp_wasm_interface::Function for #struct_name { - fn name(&self) -> &str { - #name - } - - fn signature(&self) -> #crate_::sp_wasm_interface::Signature { - #signature - } - - fn execute( - &self, - __function_context__: &mut dyn #crate_::sp_wasm_interface::FunctionContext, - args: &mut dyn Iterator, - ) -> std::result::Result, String> { - #( #wasm_to_ffi_values )* - #( #ffi_to_host_values )* - #host_function_call - #into_preallocated_ffi_value - #convert_return_value - } + Ok(quote! { + { + struct #struct_name; + + impl #crate_::sp_wasm_interface::Function for #struct_name { + fn name(&self) -> &str { + #name + } + + fn signature(&self) -> #crate_::sp_wasm_interface::Signature { + #signature } - &#struct_name as &dyn #crate_::sp_wasm_interface::Function + fn execute( + &self, + __function_context__: &mut dyn #crate_::sp_wasm_interface::FunctionContext, + args: &mut dyn Iterator, + ) -> std::result::Result, String> { + #( #wasm_to_ffi_values )* + #( #ffi_to_host_values )* + #host_function_call + #into_preallocated_ffi_value + #convert_return_value + } } + + &#struct_name as &dyn #crate_::sp_wasm_interface::Function } - ) + }) } /// Generate the `wasm_interface::Signature` for the given host function `sig`. fn generate_wasm_interface_signature_for_host_function(sig: &Signature) -> Result { let crate_ = generate_crate_access(); let return_value = match &sig.output { - ReturnType::Type(_, ty) => - quote! { - Some( <<#ty as #crate_::RIType>::FFIType as #crate_::sp_wasm_interface::IntoValue>::VALUE_TYPE ) - }, - ReturnType::Default => quote!( None ), + ReturnType::Type(_, ty) => quote! { + Some( <<#ty as #crate_::RIType>::FFIType as #crate_::sp_wasm_interface::IntoValue>::VALUE_TYPE ) + }, + ReturnType::Default => quote!(None), }; - let arg_types = get_function_argument_types_without_ref(sig) - .map(|ty| quote! { + let arg_types = get_function_argument_types_without_ref(sig).map(|ty| { + quote! { <<#ty as #crate_::RIType>::FFIType as #crate_::sp_wasm_interface::IntoValue>::VALUE_TYPE - }); + } + }); - Ok( - quote! { - #crate_::sp_wasm_interface::Signature { - args: std::borrow::Cow::Borrowed(&[ #( #arg_types ),* ][..]), - return_value: #return_value, - } + Ok(quote! { + #crate_::sp_wasm_interface::Signature { + args: std::borrow::Cow::Borrowed(&[ #( #arg_types ),* ][..]), + return_value: #return_value, } - ) + }) } /// Generate the code that converts the wasm values given to `HostFunctions::execute` into the FFI @@ -279,24 +273,23 @@ fn generate_wasm_to_ffi_values<'a>( function_name, ); - get_function_argument_names_and_types_without_ref(sig) - .map(move |(name, ty)| { - let try_from_error = format!( - "Could not instantiate `{}` from wasm value while executing `{}` from interface `{}`!", - name.to_token_stream(), - function_name, - trait_name, - ); + get_function_argument_names_and_types_without_ref(sig).map(move |(name, ty)| { + let try_from_error = format!( + "Could not instantiate `{}` from wasm value while executing `{}` from interface `{}`!", + name.to_token_stream(), + function_name, + trait_name, + ); - let var_name = generate_ffi_value_var_name(&name)?; + let var_name = generate_ffi_value_var_name(&name)?; - Ok(quote! { - let val = args.next().ok_or_else(|| #error_message)?; - let #var_name = < - <#ty as #crate_::RIType>::FFIType as #crate_::sp_wasm_interface::TryFromValue - >::try_from_value(val).ok_or_else(|| #try_from_error)?; - }) + Ok(quote! { + let val = args.next().ok_or_else(|| #error_message)?; + let #var_name = < + <#ty as #crate_::RIType>::FFIType as #crate_::sp_wasm_interface::TryFromValue + >::try_from_value(val).ok_or_else(|| #try_from_error)?; }) + }) } /// Generate the code to convert the ffi values on the host to the host values using `FromFFIValue`. @@ -311,14 +304,12 @@ fn generate_ffi_to_host_value<'a>( .map(move |((name, ty), mut_access)| { let ffi_value_var_name = generate_ffi_value_var_name(&name)?; - Ok( - quote! { - let #mut_access #name = <#ty as #crate_::host::FromFFIValue>::from_ffi_value( - __function_context__, - #ffi_value_var_name, - )?; - } - ) + Ok(quote! { + let #mut_access #name = <#ty as #crate_::host::FromFFIValue>::from_ffi_value( + __function_context__, + #ffi_value_var_name, + )?; + }) }) } @@ -326,19 +317,17 @@ fn generate_ffi_to_host_value<'a>( fn generate_host_function_call(sig: &Signature, version: u32, is_wasm_only: bool) -> TokenStream { let host_function_name = create_function_ident_with_version(&sig.ident, version); let result_var_name = generate_host_function_result_var_name(&sig.ident); - let ref_and_mut = get_function_argument_types_ref_and_mut(sig).map(|ram| - ram.map(|(vr, vm)| quote!(#vr #vm)) - ); + let ref_and_mut = + get_function_argument_types_ref_and_mut(sig).map(|ram| ram.map(|(vr, vm)| quote!(#vr #vm))); let names = get_function_argument_names(sig); - let var_access = names.zip(ref_and_mut) - .map(|(n, ref_and_mut)| { - quote!( #ref_and_mut #n ) - }) + let var_access = names + .zip(ref_and_mut) + .map(|(n, ref_and_mut)| quote!( #ref_and_mut #n )) // If this is a wasm only interface, we add the function context as last parameter. .chain( iter::from_fn(|| if is_wasm_only { Some(quote!(__function_context__)) } else { None }) - .take(1) + .take(1), ); quote! { @@ -354,16 +343,15 @@ fn generate_host_function_result_var_name(name: &Ident) -> Ident { /// Generate the variable name that stores the FFI value. fn generate_ffi_value_var_name(pat: &Pat) -> Result { match pat { - Pat::Ident(pat_ident) => { + Pat::Ident(pat_ident) => if let Some(by_ref) = pat_ident.by_ref { Err(Error::new(by_ref.span(), "`ref` not supported!")) } else if let Some(sub_pattern) = &pat_ident.subpat { Err(Error::new(sub_pattern.0.span(), "Not supported!")) } else { Ok(Ident::new(&format!("{}_ffi_value", pat_ident.ident), Span::call_site())) - } - } - _ => Err(Error::new(pat.span(), "Not supported as variable name!")) + }, + _ => Err(Error::new(pat.span(), "Not supported as variable name!")), } } @@ -373,25 +361,23 @@ fn generate_ffi_value_var_name(pat: &Pat) -> Result { /// that the type implements `IntoPreAllocatedFFIValue`. fn generate_into_preallocated_ffi_value(sig: &Signature) -> Result { let crate_ = generate_crate_access(); - let ref_and_mut = get_function_argument_types_ref_and_mut(sig).map(|ram| - ram.and_then(|(vr, vm)| vm.map(|v| (vr, v))) - ); + let ref_and_mut = get_function_argument_types_ref_and_mut(sig) + .map(|ram| ram.and_then(|(vr, vm)| vm.map(|v| (vr, v)))); let names_and_types = get_function_argument_names_and_types_without_ref(sig); - ref_and_mut.zip(names_and_types) + ref_and_mut + .zip(names_and_types) .filter_map(|(ram, (name, ty))| ram.map(|_| (name, ty))) .map(|(name, ty)| { let ffi_var_name = generate_ffi_value_var_name(&name)?; - Ok( - quote! { - <#ty as #crate_::host::IntoPreallocatedFFIValue>::into_preallocated_ffi_value( - #name, - __function_context__, - #ffi_var_name, - )?; - } - ) + Ok(quote! { + <#ty as #crate_::host::IntoPreallocatedFFIValue>::into_preallocated_ffi_value( + #name, + __function_context__, + #ffi_var_name, + )?; + }) }) .collect() } @@ -401,7 +387,7 @@ fn generate_return_value_into_wasm_value(sig: &Signature) -> TokenStream { let crate_ = generate_crate_access(); match &sig.output { - ReturnType::Default => quote!( Ok(None) ), + ReturnType::Default => quote!(Ok(None)), ReturnType::Type(_, ty) => { let result_var_name = generate_host_function_result_var_name(&sig.ident); @@ -411,6 +397,6 @@ fn generate_return_value_into_wasm_value(sig: &Signature) -> TokenStream { __function_context__, ).map(#crate_::sp_wasm_interface::IntoValue::into_value).map(Some) } - } + }, } } diff --git a/primitives/runtime-interface/proc-macro/src/runtime_interface/trait_decl_impl.rs b/primitives/runtime-interface/proc-macro/src/runtime_interface/trait_decl_impl.rs index 0e392b1a02fbf..c62e3ba87ccd3 100644 --- a/primitives/runtime-interface/proc-macro/src/runtime_interface/trait_decl_impl.rs +++ b/primitives/runtime-interface/proc-macro/src/runtime_interface/trait_decl_impl.rs @@ -19,15 +19,14 @@ //! default implementations and implements the trait for `&mut dyn Externalities`. use crate::utils::{ - generate_crate_access, - get_function_argument_types_without_ref, - get_runtime_interface, - create_function_ident_with_version, + create_function_ident_with_version, generate_crate_access, + get_function_argument_types_without_ref, get_runtime_interface, }; use syn::{ - ItemTrait, TraitItemMethod, Result, Error, fold::{self, Fold}, spanned::Spanned, - Visibility, Receiver, Type, Generics, + fold::{self, Fold}, + spanned::Spanned, + Error, Generics, ItemTrait, Receiver, Result, TraitItemMethod, Type, Visibility, }; use proc_macro2::TokenStream; @@ -40,13 +39,11 @@ pub fn process(trait_def: &ItemTrait, is_wasm_only: bool) -> Result let impl_trait = impl_trait_for_externalities(trait_def, is_wasm_only)?; let essential_trait_def = declare_essential_trait(trait_def)?; - Ok( - quote! { - #impl_trait + Ok(quote! { + #impl_trait - #essential_trait_def - } - ) + #essential_trait_def + }) } /// Converts the given trait definition into the essential trait definition without method @@ -66,12 +63,10 @@ impl ToEssentialTraitDef { let mut errors = self.errors; let methods = self.methods; if let Some(first_error) = errors.pop() { - Err( - errors.into_iter().fold(first_error, |mut o, n| { - o.combine(n); - o - }) - ) + Err(errors.into_iter().fold(first_error, |mut o, n| { + o.combine(n); + o + })) } else { Ok(methods) } @@ -101,12 +96,12 @@ impl Fold for ToEssentialTraitDef { } let arg_types = get_function_argument_types_without_ref(&method.sig); - arg_types.filter_map(|ty| - match *ty { + arg_types + .filter_map(|ty| match *ty { Type::ImplTrait(impl_trait) => Some(impl_trait), - _ => None - } - ).for_each(|invalid| self.push_error(&invalid, "`impl Trait` syntax not supported.")); + _ => None, + }) + .for_each(|invalid| self.push_error(&invalid, "`impl Trait` syntax not supported.")); self.error_on_generic_parameters(&method.sig.generics); @@ -145,13 +140,11 @@ fn declare_essential_trait(trait_def: &ItemTrait) -> Result { } let methods = folder.into_methods()?; - Ok( - quote! { - trait #trait_ { - #( #methods )* - } + Ok(quote! { + trait #trait_ { + #( #methods )* } - ) + }) } /// Implements the given trait definition for `dyn Externalities`. @@ -172,12 +165,10 @@ fn impl_trait_for_externalities(trait_def: &ItemTrait, is_wasm_only: bool) -> Re quote!( &mut dyn #crate_::Externalities ) }; - Ok( - quote! { - #[cfg(feature = "std")] - impl #trait_ for #impl_type { - #( #methods )* - } + Ok(quote! { + #[cfg(feature = "std")] + impl #trait_ for #impl_type { + #( #methods )* } - ) + }) } diff --git a/primitives/runtime-interface/proc-macro/src/utils.rs b/primitives/runtime-interface/proc-macro/src/utils.rs index d2d9dd7e3997a..02b5d23fbcac7 100644 --- a/primitives/runtime-interface/proc-macro/src/utils.rs +++ b/primitives/runtime-interface/proc-macro/src/utils.rs @@ -17,16 +17,19 @@ //! Util function used by this crate. -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use syn::{ - Ident, Error, Signature, Pat, PatType, FnArg, Type, token, TraitItemMethod, ItemTrait, - TraitItem, parse_quote, spanned::Spanned, Result, Meta, NestedMeta, Lit, Attribute, + parse_quote, spanned::Spanned, token, Attribute, Error, FnArg, Ident, ItemTrait, Lit, Meta, + NestedMeta, Pat, PatType, Result, Signature, TraitItem, TraitItemMethod, Type, }; use proc_macro_crate::{crate_name, FoundCrate}; -use std::{env, collections::{BTreeMap, btree_map::Entry}}; +use std::{ + collections::{btree_map::Entry, BTreeMap}, + env, +}; use quote::quote; @@ -53,8 +56,9 @@ impl<'a> RuntimeInterfaceFunction<'a> { pub fn latest_version(&self) -> (u32, &TraitItemMethod) { ( self.latest_version, - self.versions.get(&self.latest_version) - .expect("If latest_version has a value, the key with this value is in the versions; qed") + self.versions.get(&self.latest_version).expect( + "If latest_version has a value, the key with this value is in the versions; qed", + ), ) } } @@ -70,9 +74,12 @@ impl<'a> RuntimeInterface<'a> { } pub fn all_versions(&self) -> impl Iterator { - self.items.iter().flat_map(|(_, item)| item.versions.iter()).map(|(v, i)| (*v, *i)) + self.items + .iter() + .flat_map(|(_, item)| item.versions.iter()) + .map(|(v, i)| (*v, *i)) } - } +} /// Generates the include for the runtime-interface crate. pub fn generate_runtime_interface_include() -> TokenStream { @@ -88,16 +95,16 @@ pub fn generate_runtime_interface_include() -> TokenStream { Err(e) => { let err = Error::new(Span::call_site(), e).to_compile_error(); quote!( #err ) - } + }, } } /// Generates the access to the `sp-runtime-interface` crate. pub fn generate_crate_access() -> TokenStream { if env::var("CARGO_PKG_NAME").unwrap() == "sp-runtime-interface" { - quote!( sp_runtime_interface ) + quote!(sp_runtime_interface) } else { - quote!( proc_macro_runtime_interface ) + quote!(proc_macro_runtime_interface) } } @@ -109,26 +116,14 @@ pub fn create_exchangeable_host_function_ident(name: &Ident) -> Ident { /// Create the host function identifier for the given function name. pub fn create_host_function_ident(name: &Ident, version: u32, trait_name: &Ident) -> Ident { Ident::new( - &format!( - "ext_{}_{}_version_{}", - trait_name.to_string().to_snake_case(), - name, - version, - ), + &format!("ext_{}_{}_version_{}", trait_name.to_string().to_snake_case(), name, version,), Span::call_site(), ) } /// Create the host function identifier for the given function name. pub fn create_function_ident_with_version(name: &Ident, version: u32) -> Ident { - Ident::new( - &format!( - "{}_version_{}", - name, - version, - ), - Span::call_site(), - ) + Ident::new(&format!("{}_version_{}", name, version,), Span::call_site()) } /// Returns the function arguments of the given `Signature`, minus any `self` arguments. @@ -143,10 +138,8 @@ pub fn get_function_arguments<'a>(sig: &'a Signature) -> impl Iterator(sig: &'a Signature) -> impl Iterator( sig: &'a Signature, ) -> impl Iterator> + 'a { - get_function_arguments(sig) - .map(|pt| pt.ty) - .map(|ty| match *ty { - Type::Reference(type_ref) => type_ref.elem, - _ => ty, - }) + get_function_arguments(sig).map(|pt| pt.ty).map(|ty| match *ty { + Type::Reference(type_ref) => type_ref.elem, + _ => ty, + }) } /// Returns the function argument names and types, minus any `self`. If any of the arguments @@ -183,11 +174,10 @@ pub fn get_function_argument_types_without_ref<'a>( pub fn get_function_argument_names_and_types_without_ref<'a>( sig: &'a Signature, ) -> impl Iterator, Box)> + 'a { - get_function_arguments(sig) - .map(|pt| match *pt.ty { - Type::Reference(type_ref) => (pt.pat, type_ref.elem), - _ => (pt.pat, pt.ty), - }) + get_function_arguments(sig).map(|pt| match *pt.ty { + Type::Reference(type_ref) => (pt.pat, type_ref.elem), + _ => (pt.pat, pt.ty), + }) } /// Returns the `&`/`&mut` for all function argument types, minus the `self` arg. If a function @@ -195,23 +185,18 @@ pub fn get_function_argument_names_and_types_without_ref<'a>( pub fn get_function_argument_types_ref_and_mut<'a>( sig: &'a Signature, ) -> impl Iterator)>> + 'a { - get_function_arguments(sig) - .map(|pt| pt.ty) - .map(|ty| match *ty { - Type::Reference(type_ref) => Some((type_ref.and_token, type_ref.mutability)), - _ => None, - }) + get_function_arguments(sig).map(|pt| pt.ty).map(|ty| match *ty { + Type::Reference(type_ref) => Some((type_ref.and_token, type_ref.mutability)), + _ => None, + }) } /// Returns an iterator over all trait methods for the given trait definition. fn get_trait_methods<'a>(trait_def: &'a ItemTrait) -> impl Iterator { - trait_def - .items - .iter() - .filter_map(|i| match i { - TraitItem::Method(ref method) => Some(method), - _ => None, - }) + trait_def.items.iter().filter_map(|i| match i { + TraitItem::Method(ref method) => Some(method), + _ => None, + }) } /// Parse version attribute. @@ -221,36 +206,34 @@ fn parse_version_attribute(version: &Attribute) -> Result { let meta = version.parse_meta()?; let err = Err(Error::new( - meta.span(), - "Unexpected `version` attribute. The supported format is `#[version(1)]`", - ) - ); + meta.span(), + "Unexpected `version` attribute. The supported format is `#[version(1)]`", + )); match meta { - Meta::List(list) => { + Meta::List(list) => if list.nested.len() != 1 { err } else if let Some(NestedMeta::Lit(Lit::Int(i))) = list.nested.first() { i.base10_parse() } else { err - } - }, + }, _ => err, } } /// Return item version (`#[version(X)]`) attribute, if present. fn get_item_version(item: &TraitItemMethod) -> Result> { - item.attrs.iter().find(|attr| attr.path.is_ident("version")) + item.attrs + .iter() + .find(|attr| attr.path.is_ident("version")) .map(|attr| parse_version_attribute(attr)) .transpose() } /// Returns all runtime interface members, with versions. -pub fn get_runtime_interface<'a>(trait_def: &'a ItemTrait) - -> Result> -{ +pub fn get_runtime_interface<'a>(trait_def: &'a ItemTrait) -> Result> { let mut functions: BTreeMap> = BTreeMap::new(); for item in get_trait_methods(trait_def) { @@ -258,25 +241,26 @@ pub fn get_runtime_interface<'a>(trait_def: &'a ItemTrait) let version = get_item_version(item)?.unwrap_or(1); match functions.entry(name.clone()) { - Entry::Vacant(entry) => { entry.insert(RuntimeInterfaceFunction::new(version, item)); }, + Entry::Vacant(entry) => { + entry.insert(RuntimeInterfaceFunction::new(version, item)); + }, Entry::Occupied(mut entry) => { if let Some(existing_item) = entry.get().versions.get(&version) { - let mut err = Error::new( - item.span(), - "Duplicated version attribute", - ); + let mut err = Error::new(item.span(), "Duplicated version attribute"); err.combine(Error::new( existing_item.span(), "Previous version with the same number defined here", )); - return Err(err); + return Err(err) } let interface_item = entry.get_mut(); - if interface_item.latest_version < version { interface_item.latest_version = version; } + if interface_item.latest_version < version { + interface_item.latest_version = version; + } interface_item.versions.insert(version, item); - } + }, } } @@ -286,8 +270,11 @@ pub fn get_runtime_interface<'a>(trait_def: &'a ItemTrait) if next_expected != *version { return Err(Error::new( item.span(), - format!("Unexpected version attribute: missing version '{}' for this function", next_expected), - )); + format!( + "Unexpected version attribute: missing version '{}' for this function", + next_expected + ), + )) } next_expected += 1; } diff --git a/primitives/runtime-interface/src/impls.rs b/primitives/runtime-interface/src/impls.rs index 4dd79aeccb39e..40f8e90479f95 100644 --- a/primitives/runtime-interface/src/impls.rs +++ b/primitives/runtime-interface/src/impls.rs @@ -17,14 +17,15 @@ //! Provides implementations for the runtime interface traits. -use crate::{ - RIType, Pointer, pass_by::{PassBy, Codec, Inner, PassByInner, Enum}, - util::{unpack_ptr_and_len, pack_ptr_and_len}, -}; #[cfg(feature = "std")] use crate::host::*; #[cfg(not(feature = "std"))] use crate::wasm::*; +use crate::{ + pass_by::{Codec, Enum, Inner, PassBy, PassByInner}, + util::{pack_ptr_and_len, unpack_ptr_and_len}, + Pointer, RIType, +}; #[cfg(all(not(feature = "std"), not(feature = "disable_target_static_assertions")))] use static_assertions::assert_eq_size; @@ -32,7 +33,7 @@ use static_assertions::assert_eq_size; #[cfg(feature = "std")] use sp_wasm_interface::{FunctionContext, Result}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_std::{any::TypeId, mem, vec::Vec}; @@ -195,7 +196,7 @@ impl FromFFIValue for Vec { let len = len as usize; if len == 0 { - return Vec::new(); + return Vec::new() } let data = unsafe { Vec::from_raw_parts(ptr as *mut u8, len, len) }; @@ -230,7 +231,8 @@ impl FromFFIValue for [T] { if TypeId::of::() == TypeId::of::() { Ok(unsafe { mem::transmute(vec) }) } else { - Ok(Vec::::decode(&mut &vec[..]).expect("Wasm to host values are encoded correctly; qed")) + Ok(Vec::::decode(&mut &vec[..]) + .expect("Wasm to host values are encoded correctly; qed")) } } } @@ -247,13 +249,11 @@ impl IntoPreallocatedFFIValue for [u8] { let (ptr, len) = unpack_ptr_and_len(allocated); if (len as usize) < self_instance.len() { - Err( - format!( - "Preallocated buffer is not big enough (given {} vs needed {})!", - len, - self_instance.len() - ) - ) + Err(format!( + "Preallocated buffer is not big enough (given {} vs needed {})!", + len, + self_instance.len() + )) } else { context.write_memory(Pointer::new(ptr), &self_instance) } @@ -367,7 +367,10 @@ impl PassBy for Option { #[impl_trait_for_tuples::impl_for_tuples(30)] #[tuple_types_no_default_trait_bound] -impl PassBy for Tuple where Self: codec::Codec { +impl PassBy for Tuple +where + Self: codec::Codec, +{ type PassBy = Codec; } @@ -511,7 +514,8 @@ macro_rules! for_u128_i128 { type SelfInstance = $type; fn from_ffi_value(context: &mut dyn FunctionContext, arg: u32) -> Result<$type> { - let data = context.read_memory(Pointer::new(arg), mem::size_of::<$type>() as u32)?; + let data = + context.read_memory(Pointer::new(arg), mem::size_of::<$type>() as u32)?; let mut res = [0u8; mem::size_of::<$type>()]; res.copy_from_slice(&data); Ok(<$type>::from_le_bytes(res)) @@ -526,7 +530,7 @@ macro_rules! for_u128_i128 { Ok(addr.into()) } } - } + }; } for_u128_i128!(u128); diff --git a/primitives/runtime-interface/src/lib.rs b/primitives/runtime-interface/src/lib.rs index 93b4a8db87e9d..6b6bebb591360 100644 --- a/primitives/runtime-interface/src/lib.rs +++ b/primitives/runtime-interface/src/lib.rs @@ -298,22 +298,23 @@ pub use sp_runtime_interface_proc_macro::runtime_interface; #[doc(hidden)] #[cfg(feature = "std")] pub use sp_externalities::{ - set_and_run_with_externalities, with_externalities, Externalities, ExternalitiesExt, ExtensionStore, + set_and_run_with_externalities, with_externalities, ExtensionStore, Externalities, + ExternalitiesExt, }; #[doc(hidden)] pub use codec; -pub(crate) mod impls; #[cfg(feature = "std")] pub mod host; +pub(crate) mod impls; +pub mod pass_by; #[cfg(any(not(feature = "std"), doc))] pub mod wasm; -pub mod pass_by; mod util; -pub use util::{unpack_ptr_and_len, pack_ptr_and_len}; +pub use util::{pack_ptr_and_len, unpack_ptr_and_len}; /// Something that can be used by the runtime interface as type to communicate between wasm and the /// host. diff --git a/primitives/runtime-interface/src/pass_by.rs b/primitives/runtime-interface/src/pass_by.rs index 69485a1a2873f..0535d1ca8d7fc 100644 --- a/primitives/runtime-interface/src/pass_by.rs +++ b/primitives/runtime-interface/src/pass_by.rs @@ -20,7 +20,10 @@ //! //! [`Codec`], [`Inner`] and [`Enum`] are the provided strategy implementations. -use crate::{RIType, util::{unpack_ptr_and_len, pack_ptr_and_len}}; +use crate::{ + util::{pack_ptr_and_len, unpack_ptr_and_len}, + RIType, +}; #[cfg(feature = "std")] use crate::host::*; @@ -30,7 +33,7 @@ use crate::wasm::*; #[cfg(feature = "std")] use sp_wasm_interface::{FunctionContext, Pointer, Result}; -use sp_std::{marker::PhantomData, convert::TryFrom}; +use sp_std::{convert::TryFrom, marker::PhantomData}; #[cfg(not(feature = "std"))] use sp_std::vec::Vec; @@ -119,18 +122,12 @@ pub trait PassByImpl: RIType { /// Convert the given instance to the ffi value. /// /// For more information see: [`crate::host::IntoFFIValue::into_ffi_value`] - fn into_ffi_value( - instance: T, - context: &mut dyn FunctionContext, - ) -> Result; + fn into_ffi_value(instance: T, context: &mut dyn FunctionContext) -> Result; /// Create `T` from the given ffi value. /// /// For more information see: [`crate::host::FromFFIValue::from_ffi_value`] - fn from_ffi_value( - context: &mut dyn FunctionContext, - arg: Self::FFIType, - ) -> Result; + fn from_ffi_value(context: &mut dyn FunctionContext, arg: Self::FFIType) -> Result; } /// Something that provides a strategy for passing a type between wasm and the host. @@ -220,10 +217,7 @@ pub struct Codec(PhantomData); #[cfg(feature = "std")] impl PassByImpl for Codec { - fn into_ffi_value( - instance: T, - context: &mut dyn FunctionContext, - ) -> Result { + fn into_ffi_value(instance: T, context: &mut dyn FunctionContext) -> Result { let vec = instance.encode(); let ptr = context.allocate_memory(vec.len() as u32)?; context.write_memory(ptr, &vec)?; @@ -231,14 +225,10 @@ impl PassByImpl for Codec { Ok(pack_ptr_and_len(ptr.into(), vec.len() as u32)) } - fn from_ffi_value( - context: &mut dyn FunctionContext, - arg: Self::FFIType, - ) -> Result { + fn from_ffi_value(context: &mut dyn FunctionContext, arg: Self::FFIType) -> Result { let (ptr, len) = unpack_ptr_and_len(arg); let vec = context.read_memory(Pointer::new(ptr), len)?; - T::decode(&mut &vec[..]) - .map_err(|e| format!("Could not decode value from wasm: {}", e)) + T::decode(&mut &vec[..]).map_err(|e| format!("Could not decode value from wasm: {}", e)) } } @@ -330,35 +320,31 @@ pub struct Inner, I: RIType>(PhantomData<(T, I)>); #[cfg(feature = "std")] impl, I: RIType> PassByImpl for Inner - where I: IntoFFIValue + FromFFIValue +where + I: IntoFFIValue + FromFFIValue, { - fn into_ffi_value( - instance: T, - context: &mut dyn FunctionContext, - ) -> Result { + fn into_ffi_value(instance: T, context: &mut dyn FunctionContext) -> Result { instance.into_inner().into_ffi_value(context) } - fn from_ffi_value( - context: &mut dyn FunctionContext, - arg: Self::FFIType, - ) -> Result { + fn from_ffi_value(context: &mut dyn FunctionContext, arg: Self::FFIType) -> Result { I::from_ffi_value(context, arg).map(T::from_inner) } } #[cfg(not(feature = "std"))] impl, I: RIType> PassByImpl for Inner - where I: IntoFFIValue + FromFFIValue +where + I: IntoFFIValue + FromFFIValue, { type Owned = I::Owned; fn into_ffi_value(instance: &T) -> WrappedFFIValue { - instance.inner().into_ffi_value() + instance.inner().into_ffi_value() } fn from_ffi_value(arg: Self::FFIType) -> T { - T::from_inner(I::from_ffi_value(arg)) + T::from_inner(I::from_ffi_value(arg)) } } @@ -415,17 +401,11 @@ pub struct Enum + TryFrom>(PhantomData); #[cfg(feature = "std")] impl + TryFrom> PassByImpl for Enum { - fn into_ffi_value( - instance: T, - _: &mut dyn FunctionContext, - ) -> Result { + fn into_ffi_value(instance: T, _: &mut dyn FunctionContext) -> Result { Ok(instance.into()) } - fn from_ffi_value( - _: &mut dyn FunctionContext, - arg: Self::FFIType, - ) -> Result { + fn from_ffi_value(_: &mut dyn FunctionContext, arg: Self::FFIType) -> Result { T::try_from(arg).map_err(|_| format!("Invalid enum discriminant: {}", arg)) } } diff --git a/primitives/runtime-interface/src/wasm.rs b/primitives/runtime-interface/src/wasm.rs index 387d6901e2f25..28613f81a68b2 100644 --- a/primitives/runtime-interface/src/wasm.rs +++ b/primitives/runtime-interface/src/wasm.rs @@ -108,7 +108,7 @@ impl ExchangeableFunction { /// # Returns /// /// Returns the original implementation wrapped in [`RestoreImplementation`]. - pub fn replace_implementation(&'static self, new_impl: T) -> RestoreImplementation { + pub fn replace_implementation(&'static self, new_impl: T) -> RestoreImplementation { if let ExchangeableFunctionState::Replaced = self.0.get().1 { panic!("Trying to replace an already replaced implementation!") } @@ -139,6 +139,7 @@ pub struct RestoreImplementation(&'static ExchangeableFunctio impl Drop for RestoreImplementation { fn drop(&mut self) { - self.0.restore_orig_implementation(self.1.take().expect("Value is only taken on drop; qed")); + self.0 + .restore_orig_implementation(self.1.take().expect("Value is only taken on drop; qed")); } } diff --git a/primitives/runtime-interface/test-wasm-deprecated/src/lib.rs b/primitives/runtime-interface/test-wasm-deprecated/src/lib.rs index 0a7e2b49bbbbb..8c864fc90e036 100644 --- a/primitives/runtime-interface/test-wasm-deprecated/src/lib.rs +++ b/primitives/runtime-interface/test-wasm-deprecated/src/lib.rs @@ -29,8 +29,10 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_unwrap() -> &'static [u8] { - WASM_BINARY.expect("Development wasm binary is not available. Testing is only \ - supported with the flag disabled.") + WASM_BINARY.expect( + "Development wasm binary is not available. Testing is only \ + supported with the flag disabled.", + ) } /// This function is not used, but we require it for the compiler to include `sp-io`. diff --git a/primitives/runtime-interface/test-wasm/src/lib.rs b/primitives/runtime-interface/test-wasm/src/lib.rs index 65a0e5c5ca447..72acdd4ff8d6e 100644 --- a/primitives/runtime-interface/test-wasm/src/lib.rs +++ b/primitives/runtime-interface/test-wasm/src/lib.rs @@ -22,7 +22,7 @@ use sp_runtime_interface::runtime_interface; #[cfg(not(feature = "std"))] -use sp_std::{prelude::*, mem, convert::TryFrom}; +use sp_std::{convert::TryFrom, mem, prelude::*}; use sp_core::{sr25519::Public, wasm_export_functions}; @@ -33,8 +33,10 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_unwrap() -> &'static [u8] { - WASM_BINARY.expect("Development wasm binary is not available. Testing is only \ - supported with the flag disabled.") + WASM_BINARY.expect( + "Development wasm binary is not available. Testing is only \ + supported with the flag disabled.", + ) } /// Used in the `test_array_as_mutable_reference` test. diff --git a/primitives/runtime-interface/test/src/lib.rs b/primitives/runtime-interface/test/src/lib.rs index a021a93939a10..82c50fffeb8d7 100644 --- a/primitives/runtime-interface/test/src/lib.rs +++ b/primitives/runtime-interface/test/src/lib.rs @@ -20,13 +20,16 @@ use sp_runtime_interface::*; -use sp_runtime_interface_test_wasm::{wasm_binary_unwrap, test_api::HostFunctions}; +use sp_runtime_interface_test_wasm::{test_api::HostFunctions, wasm_binary_unwrap}; use sp_runtime_interface_test_wasm_deprecated::wasm_binary_unwrap as wasm_binary_deprecated_unwrap; -use sp_wasm_interface::HostFunctions as HostFunctionsT; use sc_executor_common::runtime_blob::RuntimeBlob; +use sp_wasm_interface::HostFunctions as HostFunctionsT; -use std::{collections::HashSet, sync::{Arc, Mutex}}; +use std::{ + collections::HashSet, + sync::{Arc, Mutex}, +}; type TestExternalities = sp_state_machine::TestExternalities; @@ -82,7 +85,10 @@ fn test_set_storage() { #[test] fn test_return_value_into_mutable_reference() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_return_value_into_mutable_reference"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_return_value_into_mutable_reference", + ); } #[test] @@ -102,7 +108,8 @@ fn test_return_input_public_key() { #[test] fn host_function_not_found() { - let err = call_wasm_method_with_result::<()>(&wasm_binary_unwrap()[..], "test_return_data").unwrap_err(); + let err = call_wasm_method_with_result::<()>(&wasm_binary_unwrap()[..], "test_return_data") + .unwrap_err(); assert!(err.contains("Instantiation: Export ")); assert!(err.contains(" not found")); @@ -111,41 +118,56 @@ fn host_function_not_found() { #[test] #[should_panic(expected = "Invalid utf8 data provided")] fn test_invalid_utf8_data_should_return_an_error() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_invalid_utf8_data_should_return_an_error"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_invalid_utf8_data_should_return_an_error", + ); } #[test] fn test_overwrite_native_function_implementation() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_overwrite_native_function_implementation"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_overwrite_native_function_implementation", + ); } #[test] fn test_u128_i128_as_parameter_and_return_value() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_u128_i128_as_parameter_and_return_value"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_u128_i128_as_parameter_and_return_value", + ); } #[test] fn test_vec_return_value_memory_is_freed() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_vec_return_value_memory_is_freed"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_vec_return_value_memory_is_freed", + ); } #[test] fn test_encoded_return_value_memory_is_freed() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_encoded_return_value_memory_is_freed"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_encoded_return_value_memory_is_freed", + ); } #[test] fn test_array_return_value_memory_is_freed() { - call_wasm_method::(&wasm_binary_unwrap()[..], "test_array_return_value_memory_is_freed"); + call_wasm_method::( + &wasm_binary_unwrap()[..], + "test_array_return_value_memory_is_freed", + ); } #[test] fn test_versionining_with_new_host_works() { // We call to the new wasm binary with new host function. - call_wasm_method::( - &wasm_binary_unwrap()[..], - "test_versionning_works", - ); + call_wasm_method::(&wasm_binary_unwrap()[..], "test_versionning_works"); // we call to the old wasm binary with a new host functions // old versions of host functions should be called and test should be ok! @@ -158,7 +180,7 @@ fn test_versionining_with_new_host_works() { #[test] fn test_tracing() { use std::fmt; - use tracing::{span::Id as SpanId}; + use tracing::span::Id as SpanId; use tracing_core::field::{Field, Visit}; #[derive(Clone)] @@ -166,9 +188,8 @@ fn test_tracing() { struct FieldConsumer(&'static str, Option); impl Visit for FieldConsumer { - fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { - if field.name() == self.0 { + if field.name() == self.0 { self.1 = Some(format!("{:?}", value)) } } @@ -180,14 +201,16 @@ fn test_tracing() { } impl tracing::subscriber::Subscriber for TracingSubscriber { - fn enabled(&self, _: &tracing::Metadata) -> bool { true } + fn enabled(&self, _: &tracing::Metadata) -> bool { + true + } fn new_span(&self, span: &tracing::span::Attributes) -> tracing::Id { let mut inner = self.0.lock().unwrap(); let id = SpanId::from_u64((inner.spans.len() + 1) as _); let mut f = FieldConsumer("name", None); span.record(&mut f); - inner.spans.insert(f.1.unwrap_or_else(||span.metadata().name().to_owned())); + inner.spans.insert(f.1.unwrap_or_else(|| span.metadata().name().to_owned())); id } diff --git a/primitives/runtime/src/curve.rs b/primitives/runtime/src/curve.rs index 981e943fce067..d6bd94c2bff70 100644 --- a/primitives/runtime/src/curve.rs +++ b/primitives/runtime/src/curve.rs @@ -17,7 +17,10 @@ //! Provides some utilities to define a piecewise linear function. -use crate::{Perbill, traits::{AtLeast32BitUnsigned, SaturatedConversion}}; +use crate::{ + traits::{AtLeast32BitUnsigned, SaturatedConversion}, + Perbill, +}; use core::ops::Sub; /// Piecewise Linear function in [0, 1] -> [0, 1]. @@ -29,14 +32,15 @@ pub struct PiecewiseLinear<'a> { pub maximum: Perbill, } -fn abs_sub + Clone>(a: N, b: N) -> N where { +fn abs_sub + Clone>(a: N, b: N) -> N where { a.clone().max(b.clone()) - a.min(b) } impl<'a> PiecewiseLinear<'a> { /// Compute `f(n/d)*d` with `n <= d`. This is useful to avoid loss of precision. - pub fn calculate_for_fraction_times_denominator(&self, n: N, d: N) -> N where - N: AtLeast32BitUnsigned + Clone + pub fn calculate_for_fraction_times_denominator(&self, n: N, d: N) -> N + where + N: AtLeast32BitUnsigned + Clone, { let n = n.min(d.clone()); @@ -44,8 +48,7 @@ impl<'a> PiecewiseLinear<'a> { return N::zero() } - let next_point_index = self.points.iter() - .position(|p| n < p.0 * d.clone()); + let next_point_index = self.points.iter().position(|p| n < p.0 * d.clone()); let (prev, next) = if let Some(next_point_index) = next_point_index { if let Some(previous_point_index) = next_point_index.checked_sub(1) { @@ -80,7 +83,8 @@ impl<'a> PiecewiseLinear<'a> { // This is guaranteed not to overflow on whatever values nor lose precision. // `q` must be superior to zero. fn multiply_by_rational_saturating(value: N, p: u32, q: u32) -> N - where N: AtLeast32BitUnsigned + Clone +where + N: AtLeast32BitUnsigned + Clone, { let q = q.max(1); @@ -112,17 +116,14 @@ fn test_multiply_by_rational_saturating() { for value in 0..=div { for p in 0..=div { for q in 1..=div { - let value: u64 = (value as u128 * u64::MAX as u128 / div as u128) - .try_into().unwrap(); - let p = (p as u64 * u32::MAX as u64 / div as u64) - .try_into().unwrap(); - let q = (q as u64 * u32::MAX as u64 / div as u64) - .try_into().unwrap(); + let value: u64 = + (value as u128 * u64::MAX as u128 / div as u128).try_into().unwrap(); + let p = (p as u64 * u32::MAX as u64 / div as u64).try_into().unwrap(); + let q = (q as u64 * u32::MAX as u64 / div as u64).try_into().unwrap(); assert_eq!( multiply_by_rational_saturating(value, p, q), - (value as u128 * p as u128 / q as u128) - .try_into().unwrap_or(u64::MAX) + (value as u128 * p as u128 / q as u128).try_into().unwrap_or(u64::MAX) ); } } @@ -153,10 +154,8 @@ fn test_calculate_for_fraction_times_denominator() { let div = 100u32; for d in 0..=div { for n in 0..=d { - let d: u64 = (d as u128 * u64::MAX as u128 / div as u128) - .try_into().unwrap(); - let n: u64 = (n as u128 * u64::MAX as u128 / div as u128) - .try_into().unwrap(); + let d: u64 = (d as u128 * u64::MAX as u128 / div as u128).try_into().unwrap(); + let n: u64 = (n as u128 * u64::MAX as u128 / div as u128).try_into().unwrap(); let res = curve.calculate_for_fraction_times_denominator(n, d); let expected = formal_calculate_for_fraction_times_denominator(n, d); diff --git a/primitives/runtime/src/generic/block.rs b/primitives/runtime/src/generic/block.rs index af4f9e4521e3b..21a01933bc691 100644 --- a/primitives/runtime/src/generic/block.rs +++ b/primitives/runtime/src/generic/block.rs @@ -23,14 +23,16 @@ use std::fmt; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use sp_std::prelude::*; -use sp_core::RuntimeDebug; -use crate::codec::{Codec, Encode, Decode}; -use crate::traits::{ - self, Member, Block as BlockT, Header as HeaderT, MaybeSerialize, MaybeMallocSizeOf, - NumberFor, +use crate::{ + codec::{Codec, Decode, Encode}, + traits::{ + self, Block as BlockT, Header as HeaderT, MaybeMallocSizeOf, MaybeSerialize, Member, + NumberFor, + }, + Justifications, }; -use crate::Justifications; +use sp_core::RuntimeDebug; +use sp_std::prelude::*; /// Something to identify a block. #[derive(PartialEq, Eq, Clone, RuntimeDebug)] diff --git a/primitives/runtime/src/generic/checked_extrinsic.rs b/primitives/runtime/src/generic/checked_extrinsic.rs index 2c3392a133799..b2044a6cf74fd 100644 --- a/primitives/runtime/src/generic/checked_extrinsic.rs +++ b/primitives/runtime/src/generic/checked_extrinsic.rs @@ -18,11 +18,13 @@ //! Generic implementation of an extrinsic that has passed the verification //! stage. -use crate::traits::{ - self, Member, MaybeDisplay, SignedExtension, Dispatchable, DispatchInfoOf, PostDispatchInfoOf, - ValidateUnsigned, +use crate::{ + traits::{ + self, DispatchInfoOf, Dispatchable, MaybeDisplay, Member, PostDispatchInfoOf, + SignedExtension, ValidateUnsigned, + }, + transaction_validity::{TransactionSource, TransactionValidity}, }; -use crate::transaction_validity::{TransactionValidity, TransactionSource}; /// Definition of something that the external world might want to say; its /// existence implies that it has been checked and is good, particularly with @@ -37,12 +39,11 @@ pub struct CheckedExtrinsic { pub function: Call, } -impl traits::Applyable for - CheckedExtrinsic +impl traits::Applyable for CheckedExtrinsic where AccountId: Member + MaybeDisplay, - Call: Member + Dispatchable, - Extra: SignedExtension, + Call: Member + Dispatchable, + Extra: SignedExtension, Origin: From>, { type Call = Call; @@ -64,7 +65,7 @@ where } } - fn apply>( + fn apply>( self, info: &DispatchInfoOf, len: usize, diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 24bd7fbe94528..df8199e41fcea 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -22,9 +22,14 @@ use serde::{Deserialize, Serialize}; use sp_std::prelude::*; -use crate::ConsensusEngineId; -use crate::codec::{Decode, Encode, Input, Error}; -use crate::scale_info::{Path, TypeInfo, Type, TypeParameter, build::{Fields, Variants}, meta_type}; +use crate::{ + codec::{Decode, Encode, Error, Input}, + scale_info::{ + build::{Fields, Variants}, + meta_type, Path, Type, TypeInfo, TypeParameter, + }, + ConsensusEngineId, +}; use sp_core::{ChangesTrieConfiguration, RuntimeDebug}; /// Generic header digest. @@ -41,7 +46,7 @@ pub struct Digest { impl Default for Digest { fn default() -> Self { - Self { logs: Vec::new(), } + Self { logs: Vec::new() } } } @@ -62,12 +67,18 @@ impl Digest { } /// Get reference to the first digest item that matches the passed predicate. - pub fn log) -> Option<&T>>(&self, predicate: F) -> Option<&T> { + pub fn log) -> Option<&T>>( + &self, + predicate: F, + ) -> Option<&T> { self.logs().iter().find_map(predicate) } /// Get a conversion of the first digest item that successfully converts using the function. - pub fn convert_first) -> Option>(&self, predicate: F) -> Option { + pub fn convert_first) -> Option>( + &self, + predicate: F, + ) -> Option { self.logs().iter().find_map(predicate) } } @@ -133,16 +144,18 @@ pub enum ChangesTrieSignal { #[cfg(feature = "std")] impl serde::Serialize for DigestItem { - fn serialize(&self, seq: S) -> Result where S: serde::Serializer { - self.using_encoded(|bytes| { - sp_core::bytes::serialize(bytes, seq) - }) + fn serialize(&self, seq: S) -> Result + where + S: serde::Serializer, + { + self.using_encoded(|bytes| sp_core::bytes::serialize(bytes, seq)) } } #[cfg(feature = "std")] impl<'a, Hash: Decode> serde::Deserialize<'a> for DigestItem { - fn deserialize(de: D) -> Result where + fn deserialize(de: D) -> Result + where D: serde::Deserializer<'a>, { let r = sp_core::bytes::deserialize(de)?; @@ -153,7 +166,7 @@ impl<'a, Hash: Decode> serde::Deserialize<'a> for DigestItem { impl TypeInfo for DigestItem where - Hash: TypeInfo + 'static + Hash: TypeInfo + 'static, { type Identity = Self; @@ -164,17 +177,14 @@ where .variant( Variants::new() .variant("ChangesTrieRoot", |v| { - v.index(DigestItemType::ChangesTrieRoot as u8).fields( - Fields::unnamed() - .field(|f| f.ty::().type_name("Hash")), - ) + v.index(DigestItemType::ChangesTrieRoot as u8) + .fields(Fields::unnamed().field(|f| f.ty::().type_name("Hash"))) }) .variant("PreRuntime", |v| { v.index(DigestItemType::PreRuntime as u8).fields( Fields::unnamed() .field(|f| { - f.ty::() - .type_name("ConsensusEngineId") + f.ty::().type_name("ConsensusEngineId") }) .field(|f| f.ty::>().type_name("Vec")), ) @@ -183,8 +193,7 @@ where v.index(DigestItemType::Consensus as u8).fields( Fields::unnamed() .field(|f| { - f.ty::() - .type_name("ConsensusEngineId") + f.ty::().type_name("ConsensusEngineId") }) .field(|f| f.ty::>().type_name("Vec")), ) @@ -193,8 +202,7 @@ where v.index(DigestItemType::Seal as u8).fields( Fields::unnamed() .field(|f| { - f.ty::() - .type_name("ConsensusEngineId") + f.ty::().type_name("ConsensusEngineId") }) .field(|f| f.ty::>().type_name("Vec")), ) @@ -208,8 +216,7 @@ where }) .variant("Other", |v| { v.index(DigestItemType::Other as u8).fields( - Fields::unnamed() - .field(|f| f.ty::>().type_name("Vec")), + Fields::unnamed().field(|f| f.ty::>().type_name("Vec")), ) }), ) @@ -363,9 +370,7 @@ impl Decode for DigestItem { fn decode(input: &mut I) -> Result { let item_type: DigestItemType = Decode::decode(input)?; match item_type { - DigestItemType::ChangesTrieRoot => Ok(Self::ChangesTrieRoot( - Decode::decode(input)?, - )), + DigestItemType::ChangesTrieRoot => Ok(Self::ChangesTrieRoot(Decode::decode(input)?)), DigestItemType::PreRuntime => { let vals: (ConsensusEngineId, Vec) = Decode::decode(input)?; Ok(Self::PreRuntime(vals.0, vals.1)) @@ -373,17 +378,14 @@ impl Decode for DigestItem { DigestItemType::Consensus => { let vals: (ConsensusEngineId, Vec) = Decode::decode(input)?; Ok(Self::Consensus(vals.0, vals.1)) - } + }, DigestItemType::Seal => { let vals: (ConsensusEngineId, Vec) = Decode::decode(input)?; Ok(Self::Seal(vals.0, vals.1)) }, - DigestItemType::ChangesTrieSignal => Ok(Self::ChangesTrieSignal( - Decode::decode(input)?, - )), - DigestItemType::Other => Ok(Self::Other( - Decode::decode(input)?, - )), + DigestItemType::ChangesTrieSignal => + Ok(Self::ChangesTrieSignal(Decode::decode(input)?)), + DigestItemType::Other => Ok(Self::Other(Decode::decode(input)?)), } } } @@ -442,9 +444,10 @@ impl<'a, Hash> DigestItemRef<'a, Hash> { pub fn try_as_raw(&self, id: OpaqueDigestItemId) -> Option<&'a [u8]> { match (id, self) { (OpaqueDigestItemId::Consensus(w), &Self::Consensus(v, s)) | - (OpaqueDigestItemId::Seal(w), &Self::Seal(v, s)) | - (OpaqueDigestItemId::PreRuntime(w), &Self::PreRuntime(v, s)) - if v == w => Some(&s[..]), + (OpaqueDigestItemId::Seal(w), &Self::Seal(v, s)) | + (OpaqueDigestItemId::PreRuntime(w), &Self::PreRuntime(v, s)) + if v == w => + Some(&s[..]), (OpaqueDigestItemId::Other, &Self::Other(s)) => Some(&s[..]), _ => None, } @@ -461,8 +464,7 @@ impl<'a, Hash> DigestItemRef<'a, Hash> { /// Returns `None` if this isn't a seal item, the `id` doesn't match or when the decoding fails. pub fn seal_try_to(&self, id: &ConsensusEngineId) -> Option { match self { - Self::Seal(v, s) if *v == id => - Decode::decode(&mut &s[..]).ok(), + Self::Seal(v, s) if *v == id => Decode::decode(&mut &s[..]).ok(), _ => None, } } @@ -473,8 +475,7 @@ impl<'a, Hash> DigestItemRef<'a, Hash> { /// when the decoding fails. pub fn consensus_try_to(&self, id: &ConsensusEngineId) -> Option { match self { - Self::Consensus(v, s) if *v == id => - Decode::decode(&mut &s[..]).ok(), + Self::Consensus(v, s) if *v == id => Decode::decode(&mut &s[..]).ok(), _ => None, } } @@ -485,8 +486,7 @@ impl<'a, Hash> DigestItemRef<'a, Hash> { /// when the decoding fails. pub fn pre_runtime_try_to(&self, id: &ConsensusEngineId) -> Option { match self { - Self::PreRuntime(v, s) if *v == id => - Decode::decode(&mut &s[..]).ok(), + Self::PreRuntime(v, s) if *v == id => Decode::decode(&mut &s[..]).ok(), _ => None, } } @@ -548,7 +548,7 @@ mod tests { logs: vec![ DigestItem::ChangesTrieRoot(4), DigestItem::Other(vec![1, 2, 3]), - DigestItem::Seal(*b"test", vec![1, 2, 3]) + DigestItem::Seal(*b"test", vec![1, 2, 3]), ], }; @@ -561,34 +561,35 @@ mod tests { #[test] fn digest_item_type_info() { let type_info = DigestItem::::type_info(); - let variants = - if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { - variant.variants() - } else { - panic!("Should be a TypeDef::TypeDefVariant") - }; + let variants = if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { + variant.variants() + } else { + panic!("Should be a TypeDef::TypeDefVariant") + }; // ensure that all variants are covered by manual TypeInfo impl let check = |digest_item_type: DigestItemType| { - let (variant_name, digest_item) = - match digest_item_type { - DigestItemType::Other => - ("Other", DigestItem::::Other(Default::default())), - DigestItemType::ChangesTrieRoot => - ("ChangesTrieRoot", DigestItem::ChangesTrieRoot(Default::default())), - DigestItemType::Consensus => - ("Consensus", DigestItem::Consensus(Default::default(), Default::default())), - DigestItemType::Seal => - ("Seal", DigestItem::Seal(Default::default(), Default::default())), - DigestItemType::PreRuntime => - ("PreRuntime", DigestItem::PreRuntime(Default::default(), Default::default())), - DigestItemType::ChangesTrieSignal => - ("ChangesTrieSignal", DigestItem::ChangesTrieSignal( - ChangesTrieSignal::NewConfiguration(Default::default()) - )), - }; + let (variant_name, digest_item) = match digest_item_type { + DigestItemType::Other => ("Other", DigestItem::::Other(Default::default())), + DigestItemType::ChangesTrieRoot => + ("ChangesTrieRoot", DigestItem::ChangesTrieRoot(Default::default())), + DigestItemType::Consensus => + ("Consensus", DigestItem::Consensus(Default::default(), Default::default())), + DigestItemType::Seal => + ("Seal", DigestItem::Seal(Default::default(), Default::default())), + DigestItemType::PreRuntime => + ("PreRuntime", DigestItem::PreRuntime(Default::default(), Default::default())), + DigestItemType::ChangesTrieSignal => ( + "ChangesTrieSignal", + DigestItem::ChangesTrieSignal(ChangesTrieSignal::NewConfiguration( + Default::default(), + )), + ), + }; let encoded = digest_item.encode(); - let variant = variants.iter().find(|v| v.name() == &variant_name) + let variant = variants + .iter() + .find(|v| v.name() == &variant_name) .expect(&format!("Variant {} not found", variant_name)); assert_eq!(encoded[0], variant.index()) diff --git a/primitives/runtime/src/generic/era.rs b/primitives/runtime/src/generic/era.rs index 6b365f221b3bd..06d0404a61f69 100644 --- a/primitives/runtime/src/generic/era.rs +++ b/primitives/runtime/src/generic/era.rs @@ -18,9 +18,9 @@ //! Generic implementation of an unchecked (pre-verification) extrinsic. #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; -use crate::codec::{Decode, Encode, Input, Output, Error}; +use crate::codec::{Decode, Encode, Error, Input, Output}; /// Era period pub type Period = u64; @@ -64,10 +64,7 @@ impl Era { /// does not exceed `BlockHashCount` parameter passed to `system` module, since that /// prunes old blocks and renders transactions immediately invalid. pub fn mortal(period: u64, current: u64) -> Self { - let period = period.checked_next_power_of_two() - .unwrap_or(1 << 16) - .max(4) - .min(1 << 16); + let period = period.checked_next_power_of_two().unwrap_or(1 << 16).max(4).min(1 << 16); let phase = current % period; let quantize_factor = (period >> 12).max(1); let quantized_phase = phase / quantize_factor * quantize_factor; @@ -109,9 +106,10 @@ impl Encode for Era { Self::Immortal => output.push_byte(0), Self::Mortal(period, phase) => { let quantize_factor = (*period as u64 >> 12).max(1); - let encoded = (period.trailing_zeros() - 1).max(1).min(15) as u16 | ((phase / quantize_factor) << 4) as u16; + let encoded = (period.trailing_zeros() - 1).max(1).min(15) as u16 | + ((phase / quantize_factor) << 4) as u16; encoded.encode_to(output); - } + }, } } } @@ -154,28 +152,25 @@ impl scale_info::TypeInfo for Era { type Identity = Self; fn type_info() -> scale_info::Type { - let variants = scale_info::build::Variants::new() - .variant("Immortal", |v| v.index(0)); + let variants = scale_info::build::Variants::new().variant("Immortal", |v| v.index(0)); // this is necessary since the size of the encoded Mortal variant is `u16`, conditional on // the value of the first byte being > 0. - let variants = mortal_variants!(variants, - 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, - 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, - 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, - 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, - 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, - 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, - 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, - 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, - 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, - 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, - 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, - 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, - 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, - 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255 + let variants = mortal_variants!( + variants, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, + 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, + 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, + 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, + 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, + 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, + 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, + 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255 ); scale_info::Type::builder() @@ -200,7 +195,7 @@ mod tests { assert!(e.is_immortal()); assert_eq!(e.encode(), vec![0u8]); - assert_eq!(e, Era::decode(&mut&[0u8][..]).unwrap()); + assert_eq!(e, Era::decode(&mut &[0u8][..]).unwrap()); } #[test] @@ -210,7 +205,7 @@ mod tests { let expected = vec![5 + 42 % 16 * 16, 42 / 16]; assert_eq!(e.encode(), expected); - assert_eq!(e, Era::decode(&mut&expected[..]).unwrap()); + assert_eq!(e, Era::decode(&mut &expected[..]).unwrap()); } #[test] @@ -219,7 +214,7 @@ mod tests { let expected = vec![(14 + 2500 % 16 * 16) as u8, (2500 / 16) as u8]; assert_eq!(e.encode(), expected); - assert_eq!(e, Era::decode(&mut&expected[..]).unwrap()); + assert_eq!(e, Era::decode(&mut &expected[..]).unwrap()); } #[test] diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index d924415f926d9..bd9961a68af2c 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -17,21 +17,19 @@ //! Generic implementation of a block header. +use crate::{ + codec::{Codec, Decode, Encode, EncodeAsRef, Error, HasCompact, Input, Output}, + generic::Digest, + scale_info::TypeInfo, + traits::{ + self, AtLeast32BitUnsigned, Hash as HashT, MaybeDisplay, MaybeMallocSizeOf, MaybeSerialize, + MaybeSerializeDeserialize, Member, SimpleBitOps, + }, +}; #[cfg(feature = "std")] use serde::{Deserialize, Serialize}; -use crate::codec::{Decode, Encode, Codec, Input, Output, HasCompact, EncodeAsRef, Error}; -use crate::scale_info::TypeInfo; -use crate::traits::{ - self, Member, AtLeast32BitUnsigned, SimpleBitOps, Hash as HashT, - MaybeSerializeDeserialize, MaybeSerialize, MaybeDisplay, - MaybeMallocSizeOf, -}; -use crate::generic::Digest; use sp_core::U256; -use sp_std::{ - convert::TryFrom, - fmt::Debug, -}; +use sp_std::{convert::TryFrom, fmt::Debug}; /// Abstraction over a block header for a substrate chain. #[derive(PartialEq, Eq, Clone, sp_core::RuntimeDebug)] @@ -42,9 +40,10 @@ pub struct Header + TryFrom, Hash: HashT> { /// The parent hash. pub parent_hash: Hash::Output, /// The block number. - #[cfg_attr(feature = "std", serde( - serialize_with = "serialize_number", - deserialize_with = "deserialize_number"))] + #[cfg_attr( + feature = "std", + serde(serialize_with = "serialize_number", deserialize_with = "deserialize_number") + )] pub number: Number, /// The state trie merkle root pub state_root: Hash::Output, @@ -72,21 +71,27 @@ where #[cfg(feature = "std")] pub fn serialize_number + TryFrom>( - val: &T, s: S, -) -> Result where S: serde::Serializer { + val: &T, + s: S, +) -> Result +where + S: serde::Serializer, +{ let u256: U256 = (*val).into(); serde::Serialize::serialize(&u256, s) } #[cfg(feature = "std")] -pub fn deserialize_number<'a, D, T: Copy + Into + TryFrom>( - d: D, -) -> Result where D: serde::Deserializer<'a> { +pub fn deserialize_number<'a, D, T: Copy + Into + TryFrom>(d: D) -> Result +where + D: serde::Deserializer<'a>, +{ let u256: U256 = serde::Deserialize::deserialize(d)?; TryFrom::try_from(u256).map_err(|_| serde::de::Error::custom("Try from failed")) } -impl Decode for Header where +impl Decode for Header +where Number: HasCompact + Copy + Into + TryFrom, Hash: HashT, Hash::Output: Decode, @@ -102,21 +107,24 @@ impl Decode for Header where } } -impl Encode for Header where +impl Encode for Header +where Number: HasCompact + Copy + Into + TryFrom, Hash: HashT, Hash::Output: Encode, { fn encode_to(&self, dest: &mut T) { self.parent_hash.encode_to(dest); - <<::Type as EncodeAsRef<_>>::RefType>::from(&self.number).encode_to(dest); + <<::Type as EncodeAsRef<_>>::RefType>::from(&self.number) + .encode_to(dest); self.state_root.encode_to(dest); self.extrinsics_root.encode_to(dest); self.digest.encode_to(dest); } } -impl TypeInfo for Header where +impl TypeInfo for Header +where Number: HasCompact + Copy + Into + TryFrom + TypeInfo + 'static, Hash: HashT, Hash::Output: TypeInfo, @@ -127,57 +135,93 @@ impl TypeInfo for Header where scale_info::Type::builder() .path(scale_info::Path::new("Header", module_path!())) .docs(&["Abstraction over a block header for a substrate chain."]) - .composite(scale_info::build::Fields::named() - .field(|f| f - .name("parent_hash").ty::().type_name("Hash::Output") - ) - .field(|f| f - .name("number").compact::().type_name("Number") - ) - .field(|f| f - .name("state_root").ty::().type_name("Hash::Output") - ) - .field(|f| f - .name("extrinsics_root").ty::().type_name("Hash::Output") - ) - .field(|f| f - .name("digest").ty::>().type_name("Digest") - ) + .composite( + scale_info::build::Fields::named() + .field(|f| f.name("parent_hash").ty::().type_name("Hash::Output")) + .field(|f| f.name("number").compact::().type_name("Number")) + .field(|f| f.name("state_root").ty::().type_name("Hash::Output")) + .field(|f| { + f.name("extrinsics_root").ty::().type_name("Hash::Output") + }) + .field(|f| { + f.name("digest") + .ty::>() + .type_name("Digest") + }), ) } } -impl codec::EncodeLike for Header where +impl codec::EncodeLike for Header +where Number: HasCompact + Copy + Into + TryFrom, Hash: HashT, Hash::Output: Encode, -{} +{ +} -impl traits::Header for Header where - Number: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + MaybeDisplay + - AtLeast32BitUnsigned + Codec + Copy + Into + TryFrom + sp_std::str::FromStr + - MaybeMallocSizeOf, +impl traits::Header for Header +where + Number: Member + + MaybeSerializeDeserialize + + Debug + + sp_std::hash::Hash + + MaybeDisplay + + AtLeast32BitUnsigned + + Codec + + Copy + + Into + + TryFrom + + sp_std::str::FromStr + + MaybeMallocSizeOf, Hash: HashT, - Hash::Output: Default + sp_std::hash::Hash + Copy + Member + Ord + - MaybeSerialize + Debug + MaybeDisplay + SimpleBitOps + Codec + MaybeMallocSizeOf, + Hash::Output: Default + + sp_std::hash::Hash + + Copy + + Member + + Ord + + MaybeSerialize + + Debug + + MaybeDisplay + + SimpleBitOps + + Codec + + MaybeMallocSizeOf, { type Number = Number; type Hash = ::Output; type Hashing = Hash; - fn number(&self) -> &Self::Number { &self.number } - fn set_number(&mut self, num: Self::Number) { self.number = num } + fn number(&self) -> &Self::Number { + &self.number + } + fn set_number(&mut self, num: Self::Number) { + self.number = num + } - fn extrinsics_root(&self) -> &Self::Hash { &self.extrinsics_root } - fn set_extrinsics_root(&mut self, root: Self::Hash) { self.extrinsics_root = root } + fn extrinsics_root(&self) -> &Self::Hash { + &self.extrinsics_root + } + fn set_extrinsics_root(&mut self, root: Self::Hash) { + self.extrinsics_root = root + } - fn state_root(&self) -> &Self::Hash { &self.state_root } - fn set_state_root(&mut self, root: Self::Hash) { self.state_root = root } + fn state_root(&self) -> &Self::Hash { + &self.state_root + } + fn set_state_root(&mut self, root: Self::Hash) { + self.state_root = root + } - fn parent_hash(&self) -> &Self::Hash { &self.parent_hash } - fn set_parent_hash(&mut self, hash: Self::Hash) { self.parent_hash = hash } + fn parent_hash(&self) -> &Self::Hash { + &self.parent_hash + } + fn set_parent_hash(&mut self, hash: Self::Hash) { + self.parent_hash = hash + } - fn digest(&self) -> &Digest { &self.digest } + fn digest(&self) -> &Digest { + &self.digest + } fn digest_mut(&mut self) -> &mut Digest { #[cfg(feature = "std")] @@ -192,22 +236,24 @@ impl traits::Header for Header where parent_hash: Self::Hash, digest: Digest, ) -> Self { - Self { - number, - extrinsics_root, - state_root, - parent_hash, - digest, - } + Self { number, extrinsics_root, state_root, parent_hash, digest } } } -impl Header where - Number: Member + sp_std::hash::Hash + Copy + MaybeDisplay + AtLeast32BitUnsigned + Codec + - Into + TryFrom, +impl Header +where + Number: Member + + sp_std::hash::Hash + + Copy + + MaybeDisplay + + AtLeast32BitUnsigned + + Codec + + Into + + TryFrom, Hash: HashT, - Hash::Output: Default + sp_std::hash::Hash + Copy + Member + MaybeDisplay + SimpleBitOps + Codec, - { + Hash::Output: + Default + sp_std::hash::Hash + Copy + Member + MaybeDisplay + SimpleBitOps + Codec, +{ /// Convenience helper for computing the hash of the header without having /// to import the trait. pub fn hash(&self) -> Hash::Output { diff --git a/primitives/runtime/src/generic/mod.rs b/primitives/runtime/src/generic/mod.rs index c4b28a06c901f..deaecd65e478a 100644 --- a/primitives/runtime/src/generic/mod.rs +++ b/primitives/runtime/src/generic/mod.rs @@ -19,22 +19,22 @@ //! Generic implementations of Extrinsic/Header/Block. // end::description[] -mod unchecked_extrinsic; -mod era; -mod checked_extrinsic; -mod header; mod block; +mod checked_extrinsic; mod digest; +mod era; +mod header; #[cfg(test)] mod tests; +mod unchecked_extrinsic; -pub use self::unchecked_extrinsic::{UncheckedExtrinsic, SignedPayload}; -pub use self::era::{Era, Phase}; -pub use self::checked_extrinsic::CheckedExtrinsic; -pub use self::header::Header; -pub use self::block::{Block, SignedBlock, BlockId}; -pub use self::digest::{ - Digest, DigestItem, DigestItemRef, OpaqueDigestItemId, ChangesTrieSignal, +pub use self::{ + block::{Block, BlockId, SignedBlock}, + checked_extrinsic::CheckedExtrinsic, + digest::{ChangesTrieSignal, Digest, DigestItem, DigestItemRef, OpaqueDigestItemId}, + era::{Era, Phase}, + header::Header, + unchecked_extrinsic::{SignedPayload, UncheckedExtrinsic}, }; use crate::codec::Encode; diff --git a/primitives/runtime/src/generic/tests.rs b/primitives/runtime/src/generic/tests.rs index ec31e7de48524..f4937603f11df 100644 --- a/primitives/runtime/src/generic/tests.rs +++ b/primitives/runtime/src/generic/tests.rs @@ -17,27 +17,23 @@ //! Tests for the generic implementations of Extrinsic/Header/Block. +use super::DigestItem; use crate::codec::{Decode, Encode}; use sp_core::H256; -use super::DigestItem; #[test] fn system_digest_item_encoding() { let item = DigestItem::ChangesTrieRoot::(H256::default()); let encoded = item.encode(); - assert_eq!(encoded, vec![ - // type = DigestItemType::ChangesTrieRoot - 2, - // trie root - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - 0, 0, 0, 0, - ]); + assert_eq!( + encoded, + vec![ + // type = DigestItemType::ChangesTrieRoot + 2, // trie root + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + ] + ); let decoded: DigestItem = Decode::decode(&mut &encoded[..]).unwrap(); assert_eq!(item, decoded); @@ -47,14 +43,15 @@ fn system_digest_item_encoding() { fn non_system_digest_item_encoding() { let item = DigestItem::Other::(vec![10, 20, 30]); let encoded = item.encode(); - assert_eq!(encoded, vec![ - // type = DigestItemType::Other - 0, - // length of other data - 12, - // authorities - 10, 20, 30, - ]); + assert_eq!( + encoded, + vec![ + // type = DigestItemType::Other + 0, // length of other data + 12, // authorities + 10, 20, 30, + ] + ); let decoded: DigestItem = Decode::decode(&mut &encoded[..]).unwrap(); assert_eq!(item, decoded); diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index d983afc77a425..50a09aea650e4 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -17,19 +17,19 @@ //! Generic implementation of an unchecked (pre-verification) extrinsic. -use sp_std::{fmt, prelude::*}; -use sp_io::hashing::blake2_256; -use codec::{Decode, Encode, EncodeLike, Input, Error}; -use scale_info::{build::Fields, TypeInfo, Type, Path}; use crate::{ + generic::CheckedExtrinsic, traits::{ - self, Member, MaybeDisplay, SignedExtension, Checkable, Extrinsic, ExtrinsicMetadata, - IdentifyAccount, + self, Checkable, Extrinsic, ExtrinsicMetadata, IdentifyAccount, MaybeDisplay, Member, + SignedExtension, }, - generic::CheckedExtrinsic, - transaction_validity::{TransactionValidityError, InvalidTransaction}, + transaction_validity::{InvalidTransaction, TransactionValidityError}, OpaqueExtrinsic, }; +use codec::{Decode, Encode, EncodeLike, Error, Input}; +use scale_info::{build::Fields, Path, Type, TypeInfo}; +use sp_io::hashing::blake2_256; +use sp_std::{fmt, prelude::*}; /// Current version of the [`UncheckedExtrinsic`] format. const EXTRINSIC_VERSION: u8 = 4; @@ -39,7 +39,7 @@ const EXTRINSIC_VERSION: u8 = 4; #[derive(PartialEq, Eq, Clone)] pub struct UncheckedExtrinsic where - Extra: SignedExtension + Extra: SignedExtension, { /// The signature, address, number of extrinsics have come before from /// the same signer and an era describing the longevity of this transaction, @@ -56,7 +56,7 @@ where impl TypeInfo for UncheckedExtrinsic where - Extra: SignedExtension + TypeInfo + Extra: SignedExtension + TypeInfo, { type Identity = UncheckedExtrinsic<(), (), (), Extra>; @@ -72,7 +72,7 @@ where impl parity_util_mem::MallocSizeOf for UncheckedExtrinsic where - Extra: SignedExtension + Extra: SignedExtension, { fn size_of(&self, _ops: &mut parity_util_mem::MallocSizeOfOps) -> usize { // Instantiated only in runtime. @@ -84,24 +84,13 @@ impl UncheckedExtrinsic { /// New instance of a signed extrinsic aka "transaction". - pub fn new_signed( - function: Call, - signed: Address, - signature: Signature, - extra: Extra - ) -> Self { - Self { - signature: Some((signed, signature, extra)), - function, - } + pub fn new_signed(function: Call, signed: Address, signature: Signature, extra: Extra) -> Self { + Self { signature: Some((signed, signature, extra)), function } } /// New instance of an unsigned extrinsic aka "inherent". pub fn new_unsigned(function: Call) -> Self { - Self { - signature: None, - function, - } + Self { signature: None, function } } } @@ -110,11 +99,7 @@ impl Extrinsic { type Call = Call; - type SignaturePayload = ( - Address, - Signature, - Extra, - ); + type SignaturePayload = (Address, Signature, Extra); fn is_signed(&self) -> Option { Some(self.signature.is_some()) @@ -129,18 +114,16 @@ impl Extrinsic } } -impl - Checkable -for - UncheckedExtrinsic +impl Checkable + for UncheckedExtrinsic where Address: Member + MaybeDisplay, Call: Encode + Member, Signature: Member + traits::Verify, - ::Signer: IdentifyAccount, - Extra: SignedExtension, + ::Signer: IdentifyAccount, + Extra: SignedExtension, AccountId: Member + MaybeDisplay, - Lookup: traits::Lookup, + Lookup: traits::Lookup, { type Checked = CheckedExtrinsic; @@ -154,23 +137,17 @@ where } let (function, extra, _) = raw_payload.deconstruct(); - CheckedExtrinsic { - signed: Some((signed, extra)), - function, - } - } - None => CheckedExtrinsic { - signed: None, - function: self.function, + CheckedExtrinsic { signed: Some((signed, extra)), function } }, + None => CheckedExtrinsic { signed: None, function: self.function }, }) } } impl ExtrinsicMetadata for UncheckedExtrinsic - where - Extra: SignedExtension, +where + Extra: SignedExtension, { const VERSION: u8 = EXTRINSIC_VERSION; type SignedExtensions = Extra; @@ -181,13 +158,10 @@ impl ExtrinsicMetadata /// Note that the payload that we sign to produce unchecked extrinsic signature /// is going to be different than the `SignaturePayload` - so the thing the extrinsic /// actually contains. -pub struct SignedPayload(( - Call, - Extra, - Extra::AdditionalSigned, -)); +pub struct SignedPayload((Call, Extra, Extra::AdditionalSigned)); -impl SignedPayload where +impl SignedPayload +where Call: Encode, Extra: SignedExtension, { @@ -211,7 +185,8 @@ impl SignedPayload where } } -impl Encode for SignedPayload where +impl Encode for SignedPayload +where Call: Encode, Extra: SignedExtension, { @@ -233,10 +208,10 @@ impl EncodeLike for SignedPayload where Call: Encode, Extra: SignedExtension, -{} +{ +} -impl Decode - for UncheckedExtrinsic +impl Decode for UncheckedExtrinsic where Address: Decode, Signature: Decode, @@ -255,7 +230,7 @@ where let is_signed = version & 0b1000_0000 != 0; let version = version & 0b0111_1111; if version != EXTRINSIC_VERSION { - return Err("Invalid transaction version".into()); + return Err("Invalid transaction version".into()) } Ok(Self { @@ -265,8 +240,7 @@ where } } -impl Encode - for UncheckedExtrinsic +impl Encode for UncheckedExtrinsic where Address: Encode, Signature: Encode, @@ -280,10 +254,10 @@ where Some(s) => { v.push(EXTRINSIC_VERSION | 0b1000_0000); s.encode_to(v); - } + }, None => { v.push(EXTRINSIC_VERSION & 0b0111_1111); - } + }, } self.function.encode_to(v); }) @@ -297,22 +271,27 @@ where Signature: Encode, Call: Encode, Extra: SignedExtension, -{} +{ +} #[cfg(feature = "std")] impl serde::Serialize for UncheckedExtrinsic { - fn serialize(&self, seq: S) -> Result where S: ::serde::Serializer { + fn serialize(&self, seq: S) -> Result + where + S: ::serde::Serializer, + { self.using_encoded(|bytes| seq.serialize_bytes(bytes)) } } #[cfg(feature = "std")] -impl<'a, Address: Decode, Signature: Decode, Call: Decode, Extra: SignedExtension> serde::Deserialize<'a> - for UncheckedExtrinsic +impl<'a, Address: Decode, Signature: Decode, Call: Decode, Extra: SignedExtension> + serde::Deserialize<'a> for UncheckedExtrinsic { - fn deserialize(de: D) -> Result where + fn deserialize(de: D) -> Result + where D: serde::Deserializer<'a>, { let r = sp_core::bytes::deserialize(de)?; @@ -347,21 +326,22 @@ where Extra: SignedExtension, { fn from(extrinsic: UncheckedExtrinsic) -> Self { - Self::from_bytes(extrinsic.encode().as_slice()) - .expect( - "both OpaqueExtrinsic and UncheckedExtrinsic have encoding that is compatible with \ - raw Vec encoding; qed" - ) + Self::from_bytes(extrinsic.encode().as_slice()).expect( + "both OpaqueExtrinsic and UncheckedExtrinsic have encoding that is compatible with \ + raw Vec encoding; qed", + ) } } #[cfg(test)] mod tests { use super::*; + use crate::{ + codec::{Decode, Encode}, + testing::TestSignature as TestSig, + traits::{IdentityLookup, SignedExtension}, + }; use sp_io::hashing::blake2_256; - use crate::codec::{Encode, Decode}; - use crate::traits::{SignedExtension, IdentityLookup}; - use crate::testing::TestSignature as TestSig; type TestContext = IdentityLookup; type TestAccountId = u64; @@ -370,7 +350,9 @@ mod tests { const TEST_ACCOUNT: TestAccountId = 0; // NOTE: this is demonstration. One can simply use `()` for testing. - #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, scale_info::TypeInfo)] + #[derive( + Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, scale_info::TypeInfo, + )] struct TestExtra; impl SignedExtension for TestExtra { const IDENTIFIER: &'static str = "TestExtra"; @@ -379,7 +361,9 @@ mod tests { type AdditionalSigned = (); type Pre = (); - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } } type Ex = UncheckedExtrinsic; @@ -398,7 +382,7 @@ mod tests { vec![0u8; 0], TEST_ACCOUNT, TestSig(TEST_ACCOUNT, (vec![0u8; 0], TestExtra).encode()), - TestExtra + TestExtra, ); let encoded = ux.encode(); assert_eq!(Ex::decode(&mut &encoded[..]), Ok(ux)); @@ -409,9 +393,11 @@ mod tests { let ux = Ex::new_signed( vec![0u8; 0], TEST_ACCOUNT, - TestSig(TEST_ACCOUNT, (vec![0u8; 257], TestExtra) - .using_encoded(blake2_256)[..].to_owned()), - TestExtra + TestSig( + TEST_ACCOUNT, + (vec![0u8; 257], TestExtra).using_encoded(blake2_256)[..].to_owned(), + ), + TestExtra, ); let encoded = ux.encode(); assert_eq!(Ex::decode(&mut &encoded[..]), Ok(ux)); diff --git a/primitives/runtime/src/lib.rs b/primitives/runtime/src/lib.rs index 3c252b136af10..2b40140ba047c 100644 --- a/primitives/runtime/src/lib.rs +++ b/primitives/runtime/src/lib.rs @@ -19,10 +19,10 @@ #![warn(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] - // to allow benchmarking #![cfg_attr(feature = "bench", feature(test))] -#[cfg(feature = "bench")] extern crate test; +#[cfg(feature = "bench")] +extern crate test; #[doc(hidden)] pub use codec; @@ -43,23 +43,27 @@ pub use sp_application_crypto as app_crypto; #[cfg(feature = "std")] pub use sp_core::storage::{Storage, StorageChild}; -use sp_std::prelude::*; -use sp_std::convert::TryFrom; -use sp_core::{crypto::{self, Public}, ed25519, sr25519, ecdsa, hash::{H256, H512}}; +use sp_core::{ + crypto::{self, Public}, + ecdsa, ed25519, + hash::{H256, H512}, + sr25519, +}; +use sp_std::{convert::TryFrom, prelude::*}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; pub mod curve; pub mod generic; +mod multiaddress; pub mod offchain; +pub mod runtime_logger; +mod runtime_string; #[cfg(feature = "std")] pub mod testing; pub mod traits; pub mod transaction_validity; -mod runtime_string; -mod multiaddress; -pub mod runtime_logger; pub use crate::runtime_string::*; @@ -67,25 +71,28 @@ pub use crate::runtime_string::*; pub use multiaddress::MultiAddress; /// Re-export these since they're only "kind of" generic. -pub use generic::{DigestItem, Digest}; +pub use generic::{Digest, DigestItem}; +pub use sp_application_crypto::{BoundToRuntimeAppPublic, RuntimeAppPublic}; /// Re-export this since it's part of the API of this crate. -pub use sp_core::{TypeId, crypto::{key_types, KeyTypeId, CryptoType, CryptoTypeId, AccountId32}}; -pub use sp_application_crypto::{RuntimeAppPublic, BoundToRuntimeAppPublic}; +pub use sp_core::{ + crypto::{key_types, AccountId32, CryptoType, CryptoTypeId, KeyTypeId}, + TypeId, +}; /// Re-export `RuntimeDebug`, to avoid dependency clutter. pub use sp_core::RuntimeDebug; +/// Re-export big_uint stuff. +pub use sp_arithmetic::biguint; +/// Re-export 128 bit helpers. +pub use sp_arithmetic::helpers_128bit; /// Re-export top-level arithmetic stuff. pub use sp_arithmetic::{ - PerThing, Perquintill, Perbill, Permill, Percent, PerU16, InnerOf, UpperOf, - Rational128, FixedI64, FixedI128, FixedU128, FixedPointNumber, FixedPointOperand, - traits::SaturatedConversion, + traits::SaturatedConversion, FixedI128, FixedI64, FixedPointNumber, FixedPointOperand, + FixedU128, InnerOf, PerThing, PerU16, Perbill, Percent, Permill, Perquintill, Rational128, + UpperOf, }; -/// Re-export 128 bit helpers. -pub use sp_arithmetic::helpers_128bit; -/// Re-export big_uint stuff. -pub use sp_arithmetic::biguint; pub use either::Either; @@ -122,7 +129,7 @@ impl Justifications { /// not inserted. pub fn append(&mut self, justification: Justification) -> bool { if self.get(justification.0).is_some() { - return false; + return false } self.0.push(justification); true @@ -156,11 +163,11 @@ impl From for Justifications { } } -use traits::{Verify, Lazy}; +use traits::{Lazy, Verify}; -#[cfg(feature = "std")] -pub use serde::{Serialize, Deserialize, de::DeserializeOwned}; use crate::traits::IdentifyAccount; +#[cfg(feature = "std")] +pub use serde::{de::DeserializeOwned, Deserialize, Serialize}; /// Complex storage builder stuff. #[cfg(feature = "std")] @@ -172,10 +179,7 @@ pub trait BuildStorage { Ok(storage) } /// Assimilate the storage for this module into pre-existing overlays. - fn assimilate_storage( - &self, - storage: &mut sp_core::storage::Storage, - ) -> Result<(), String>; + fn assimilate_storage(&self, storage: &mut sp_core::storage::Storage) -> Result<(), String>; } /// Something that can build the genesis storage of a module. @@ -190,17 +194,14 @@ pub trait BuildModuleGenesisStorage: Sized { #[cfg(feature = "std")] impl BuildStorage for sp_core::storage::Storage { - fn assimilate_storage( - &self, - storage: &mut sp_core::storage::Storage, - )-> Result<(), String> { + fn assimilate_storage(&self, storage: &mut sp_core::storage::Storage) -> Result<(), String> { storage.top.extend(self.top.iter().map(|(k, v)| (k.clone(), v.clone()))); for (k, other_map) in self.children_default.iter() { let k = k.clone(); if let Some(map) = storage.children_default.get_mut(&k) { map.data.extend(other_map.data.iter().map(|(k, v)| (k.clone(), v.clone()))); if !map.child_info.try_update(&other_map.child_info) { - return Err("Incompatible child info update".to_string()); + return Err("Incompatible child info update".to_string()) } } else { storage.children_default.insert(k, other_map.clone()); @@ -212,10 +213,7 @@ impl BuildStorage for sp_core::storage::Storage { #[cfg(feature = "std")] impl BuildStorage for () { - fn assimilate_storage( - &self, - _: &mut sp_core::storage::Storage, - ) -> Result<(), String> { + fn assimilate_storage(&self, _: &mut sp_core::storage::Storage) -> Result<(), String> { Err("`assimilate_storage` not implemented for `()`".into()) } } @@ -244,7 +242,11 @@ impl From for MultiSignature { impl TryFrom for ed25519::Signature { type Error = (); fn try_from(m: MultiSignature) -> Result { - if let MultiSignature::Ed25519(x) = m { Ok(x) } else { Err(()) } + if let MultiSignature::Ed25519(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -257,7 +259,11 @@ impl From for MultiSignature { impl TryFrom for sr25519::Signature { type Error = (); fn try_from(m: MultiSignature) -> Result { - if let MultiSignature::Sr25519(x) = m { Ok(x) } else { Err(()) } + if let MultiSignature::Sr25519(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -270,7 +276,11 @@ impl From for MultiSignature { impl TryFrom for ecdsa::Signature { type Error = (); fn try_from(m: MultiSignature) -> Result { - if let MultiSignature::Ecdsa(x) = m { Ok(x) } else { Err(()) } + if let MultiSignature::Ecdsa(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -336,7 +346,11 @@ impl From for MultiSigner { impl TryFrom for ed25519::Public { type Error = (); fn try_from(m: MultiSigner) -> Result { - if let MultiSigner::Ed25519(x) = m { Ok(x) } else { Err(()) } + if let MultiSigner::Ed25519(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -349,7 +363,11 @@ impl From for MultiSigner { impl TryFrom for sr25519::Public { type Error = (); fn try_from(m: MultiSigner) -> Result { - if let MultiSigner::Sr25519(x) = m { Ok(x) } else { Err(()) } + if let MultiSigner::Sr25519(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -362,7 +380,11 @@ impl From for MultiSigner { impl TryFrom for ecdsa::Public { type Error = (); fn try_from(m: MultiSigner) -> Result { - if let MultiSigner::Ecdsa(x) = m { Ok(x) } else { Err(()) } + if let MultiSigner::Ecdsa(x) = m { + Ok(x) + } else { + Err(()) + } } } @@ -381,17 +403,19 @@ impl Verify for MultiSignature { type Signer = MultiSigner; fn verify>(&self, mut msg: L, signer: &AccountId32) -> bool { match (self, signer) { - (Self::Ed25519(ref sig), who) => sig.verify(msg, &ed25519::Public::from_slice(who.as_ref())), - (Self::Sr25519(ref sig), who) => sig.verify(msg, &sr25519::Public::from_slice(who.as_ref())), + (Self::Ed25519(ref sig), who) => + sig.verify(msg, &ed25519::Public::from_slice(who.as_ref())), + (Self::Sr25519(ref sig), who) => + sig.verify(msg, &sr25519::Public::from_slice(who.as_ref())), (Self::Ecdsa(ref sig), who) => { let m = sp_io::hashing::blake2_256(msg.get()); match sp_io::crypto::secp256k1_ecdsa_recover_compressed(sig.as_ref(), &m) { Ok(pubkey) => - &sp_io::hashing::blake2_256(pubkey.as_ref()) - == >::as_ref(who), + &sp_io::hashing::blake2_256(pubkey.as_ref()) == + >::as_ref(who), _ => false, } - } + }, } } } @@ -407,10 +431,10 @@ impl Verify for AnySignature { let msg = msg.get(); sr25519::Signature::try_from(self.0.as_fixed_bytes().as_ref()) .map(|s| s.verify(msg, signer)) - .unwrap_or(false) - || ed25519::Signature::try_from(self.0.as_fixed_bytes().as_ref()) - .map(|s| s.verify(msg, &ed25519::Public::from_slice(signer.as_ref()))) - .unwrap_or(false) + .unwrap_or(false) || + ed25519::Signature::try_from(self.0.as_fixed_bytes().as_ref()) + .map(|s| s.verify(msg, &ed25519::Public::from_slice(signer.as_ref()))) + .unwrap_or(false) } } @@ -446,7 +470,11 @@ pub type DispatchResultWithInfo = sp_std::result::Result where - Info: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable +pub struct DispatchErrorWithPostInfo +where + Info: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable, { /// Additional information about the `Dispatchable` which is only known post dispatch. pub post_info: Info, @@ -488,22 +517,20 @@ impl DispatchError { /// Return the same error but without the attached message. pub fn stripped(self) -> Self { match self { - DispatchError::Module { index, error, message: Some(_) } - => DispatchError::Module { index, error, message: None }, + DispatchError::Module { index, error, message: Some(_) } => + DispatchError::Module { index, error, message: None }, m => m, } } } -impl From for DispatchErrorWithPostInfo where +impl From for DispatchErrorWithPostInfo +where T: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable + Default, - E: Into + E: Into, { fn from(error: E) -> Self { - Self { - post_info: Default::default(), - error: error.into(), - } + Self { post_info: Default::default(), error: error.into() } } } @@ -608,8 +635,9 @@ impl From for &'static str { } } -impl From> for &'static str where - T: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable +impl From> for &'static str +where + T: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable, { fn from(err: DispatchErrorWithPostInfo) -> &'static str { err.error.into() @@ -629,7 +657,7 @@ impl traits::Printable for DispatchError { if let Some(msg) = message { msg.print(); } - } + }, Self::ConsumerRemaining => "Consumer remaining".print(), Self::NoProviders => "No providers".print(), Self::Token(e) => { @@ -639,13 +667,14 @@ impl traits::Printable for DispatchError { Self::Arithmetic(e) => { "Arithmetic error: ".print(); <&'static str>::from(*e).print(); - } + }, } } } -impl traits::Printable for DispatchErrorWithPostInfo where - T: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable +impl traits::Printable for DispatchErrorWithPostInfo +where + T: Eq + PartialEq + Clone + Copy + Encode + Decode + traits::Printable, { fn print(&self) { self.error.print(); @@ -707,7 +736,8 @@ pub type DispatchOutcome = Result<(), DispatchError>; /// - The sender doesn't have enough funds to pay the transaction inclusion fee. Including such /// a transaction in the block doesn't make sense. /// - The extrinsic supplied a bad signature. This transaction won't become valid ever. -pub type ApplyExtrinsicResult = Result; +pub type ApplyExtrinsicResult = + Result; /// Same as `ApplyExtrinsicResult` but augmented with `PostDispatchInfo` on success. pub type ApplyExtrinsicResultWithInfo = @@ -718,7 +748,7 @@ pub type ApplyExtrinsicResultWithInfo = pub fn verify_encoded_lazy( sig: &V, item: &T, - signer: &::AccountId + signer: &::AccountId, ) -> bool { // The `Lazy` trait expresses something like `X: FnMut &'a T>`. // unfortunately this is a lifetime relationship that can't @@ -735,10 +765,7 @@ pub fn verify_encoded_lazy( } } - sig.verify( - LazyEncode { inner: || item.encode(), encoded: None }, - signer, - ) + sig.verify(LazyEncode { inner: || item.encode(), encoded: None }, signer) } /// Checks that `$x` is equal to `$y` with an error rate of `$error`. @@ -805,14 +832,20 @@ impl sp_std::fmt::Debug for OpaqueExtrinsic { #[cfg(feature = "std")] impl ::serde::Serialize for OpaqueExtrinsic { - fn serialize(&self, seq: S) -> Result where S: ::serde::Serializer { + fn serialize(&self, seq: S) -> Result + where + S: ::serde::Serializer, + { codec::Encode::using_encoded(&self.0, |bytes| ::sp_core::bytes::serialize(bytes, seq)) } } #[cfg(feature = "std")] impl<'a> ::serde::Deserialize<'a> for OpaqueExtrinsic { - fn deserialize(de: D) -> Result where D: ::serde::Deserializer<'a> { + fn deserialize(de: D) -> Result + where + D: ::serde::Deserializer<'a>, + { let r = ::sp_core::bytes::deserialize(de)?; Decode::decode(&mut &r[..]) .map_err(|e| ::serde::de::Error::custom(format!("Decode error: {}", e))) @@ -884,7 +917,7 @@ impl TransactionOutcome { #[cfg(test)] mod tests { use super::*; - use codec::{Encode, Decode}; + use codec::{Decode, Encode}; use sp_core::crypto::Pair; #[test] @@ -895,22 +928,11 @@ mod tests { #[test] fn dispatch_error_encoding() { - let error = DispatchError::Module { - index: 1, - error: 2, - message: Some("error message"), - }; + let error = DispatchError::Module { index: 1, error: 2, message: Some("error message") }; let encoded = error.encode(); let decoded = DispatchError::decode(&mut &encoded[..]).unwrap(); assert_eq!(encoded, vec![3, 1, 2]); - assert_eq!( - decoded, - DispatchError::Module { - index: 1, - error: 2, - message: None, - }, - ); + assert_eq!(decoded, DispatchError::Module { index: 1, error: 2, message: None },); } #[test] @@ -950,7 +972,7 @@ mod tests { // Ignores `message` field in `Module` variant. assert_eq!( Module { index: 1, error: 1, message: Some("foo") }, - Module { index: 1, error: 1, message: None}, + Module { index: 1, error: 1, message: None }, ); } @@ -974,17 +996,13 @@ mod tests { #[should_panic(expected = "Signature verification has not been called")] fn batching_still_finishes_when_not_called_directly() { let mut ext = sp_state_machine::BasicExternalities::default(); - ext.register_extension( - sp_core::traits::TaskExecutorExt::new(sp_core::testing::TaskExecutor::new()), - ); + ext.register_extension(sp_core::traits::TaskExecutorExt::new( + sp_core::testing::TaskExecutor::new(), + )); ext.execute_with(|| { let _batching = SignatureBatching::start(); - sp_io::crypto::sr25519_verify( - &Default::default(), - &Vec::new(), - &Default::default(), - ); + sp_io::crypto::sr25519_verify(&Default::default(), &Vec::new(), &Default::default()); }); } @@ -992,9 +1010,9 @@ mod tests { #[should_panic(expected = "Hey, I'm an error")] fn batching_does_not_panic_while_thread_is_already_panicking() { let mut ext = sp_state_machine::BasicExternalities::default(); - ext.register_extension( - sp_core::traits::TaskExecutorExt::new(sp_core::testing::TaskExecutor::new()), - ); + ext.register_extension(sp_core::traits::TaskExecutorExt::new( + sp_core::testing::TaskExecutor::new(), + )); ext.execute_with(|| { let _batching = SignatureBatching::start(); diff --git a/primitives/runtime/src/multiaddress.rs b/primitives/runtime/src/multiaddress.rs index 0cf1a66ffb02f..e95d16c8d397a 100644 --- a/primitives/runtime/src/multiaddress.rs +++ b/primitives/runtime/src/multiaddress.rs @@ -17,7 +17,7 @@ //! MultiAddress type is a wrapper for multiple downstream account formats. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_std::vec::Vec; /// A multi-format address wrapper for on-chain accounts. @@ -46,8 +46,10 @@ where use sp_core::hexdisplay::HexDisplay; match self { Self::Raw(inner) => write!(f, "MultiAddress::Raw({})", HexDisplay::from(inner)), - Self::Address32(inner) => write!(f, "MultiAddress::Address32({})", HexDisplay::from(inner)), - Self::Address20(inner) => write!(f, "MultiAddress::Address20({})", HexDisplay::from(inner)), + Self::Address32(inner) => + write!(f, "MultiAddress::Address32({})", HexDisplay::from(inner)), + Self::Address20(inner) => + write!(f, "MultiAddress::Address20({})", HexDisplay::from(inner)), _ => write!(f, "{:?}", self), } } diff --git a/primitives/runtime/src/offchain/http.rs b/primitives/runtime/src/offchain/http.rs index a346460897d58..50baad56f4a5f 100644 --- a/primitives/runtime/src/offchain/http.rs +++ b/primitives/runtime/src/offchain/http.rs @@ -48,17 +48,15 @@ //! assert_eq!(body.error(), &None); //! ``` -use sp_std::str; -use sp_std::prelude::Vec; +use sp_core::{ + offchain::{ + HttpError, HttpRequestId as RequestId, HttpRequestStatus as RequestStatus, Timestamp, + }, + RuntimeDebug, +}; #[cfg(not(feature = "std"))] use sp_std::prelude::vec; -use sp_core::RuntimeDebug; -use sp_core::offchain::{ - Timestamp, - HttpRequestId as RequestId, - HttpRequestStatus as RequestStatus, - HttpError, -}; +use sp_std::{prelude::Vec, str}; /// Request method (HTTP verb) #[derive(Clone, PartialEq, Eq, RuntimeDebug)] @@ -103,10 +101,7 @@ mod header { impl Header { /// Creates new header given it's name and value. pub fn new(name: &str, value: &str) -> Self { - Header { - name: name.as_bytes().to_vec(), - value: value.as_bytes().to_vec(), - } + Header { name: name.as_bytes().to_vec(), value: value.as_bytes().to_vec() } } /// Returns the name of this header. @@ -166,13 +161,7 @@ impl<'a, T> Request<'a, T> { pub fn post(url: &'a str, body: T) -> Self { let req: Request = Request::default(); - Request { - url, - body, - method: Method::Post, - headers: req.headers, - deadline: req.deadline, - } + Request { url, body, method: Method::Post, headers: req.headers, deadline: req.deadline } } } @@ -213,7 +202,7 @@ impl<'a, T: Default> Request<'a, T> { } } -impl<'a, I: AsRef<[u8]>, T: IntoIterator> Request<'a, T> { +impl<'a, I: AsRef<[u8]>, T: IntoIterator> Request<'a, T> { /// Send the request and return a handle. /// /// Err is returned in case the deadline is reached @@ -222,19 +211,13 @@ impl<'a, I: AsRef<[u8]>, T: IntoIterator> Request<'a, T> { let meta = &[]; // start an http request. - let id = sp_io::offchain::http_request_start( - self.method.as_ref(), - self.url, - meta, - ).map_err(|_| HttpError::IoError)?; + let id = sp_io::offchain::http_request_start(self.method.as_ref(), self.url, meta) + .map_err(|_| HttpError::IoError)?; // add custom headers for header in &self.headers { - sp_io::offchain::http_request_add_header( - id, - header.name(), - header.value(), - ).map_err(|_| HttpError::IoError)? + sp_io::offchain::http_request_add_header(id, header.name(), header.value()) + .map_err(|_| HttpError::IoError)? } // write body @@ -245,9 +228,7 @@ impl<'a, I: AsRef<[u8]>, T: IntoIterator> Request<'a, T> { // finalize the request sp_io::offchain::http_request_write_body(id, &[], self.deadline)?; - Ok(PendingRequest { - id, - }) + Ok(PendingRequest { id }) } } @@ -285,8 +266,13 @@ impl PendingRequest { /// Attempts to wait for the request to finish, /// but will return `Err` in case the deadline is reached. - pub fn try_wait(self, deadline: impl Into>) -> Result { - Self::try_wait_all(vec![self], deadline).pop().expect("One request passed, one status received; qed") + pub fn try_wait( + self, + deadline: impl Into>, + ) -> Result { + Self::try_wait_all(vec![self], deadline) + .pop() + .expect("One request passed, one status received; qed") } /// Wait for all provided requests. @@ -305,7 +291,7 @@ impl PendingRequest { /// Requests that are complete will resolve to an `Ok` others will return a `DeadlineReached` error. pub fn try_wait_all( requests: Vec, - deadline: impl Into> + deadline: impl Into>, ) -> Vec> { let ids = requests.iter().map(|r| r.id).collect::>(); let statuses = sp_io::offchain::http_response_wait(&ids, deadline.into()); @@ -336,19 +322,13 @@ pub struct Response { impl Response { fn new(id: RequestId, code: u16) -> Self { - Self { - id, - code, - headers: None, - } + Self { id, code, headers: None } } /// Retrieve the headers for this response. pub fn headers(&mut self) -> &Headers { if self.headers.is_none() { - self.headers = Some( - Headers { raw: sp_io::offchain::http_response_headers(self.id) }, - ); + self.headers = Some(Headers { raw: sp_io::offchain::http_response_headers(self.id) }); } self.headers.as_ref().expect("Headers were just set; qed") } @@ -423,32 +403,30 @@ impl Iterator for ResponseBody { fn next(&mut self) -> Option { if self.error.is_some() { - return None; + return None } if self.filled_up_to.is_none() { - let result = sp_io::offchain::http_response_read_body( - self.id, - &mut self.buffer, - self.deadline); + let result = + sp_io::offchain::http_response_read_body(self.id, &mut self.buffer, self.deadline); match result { Err(e) => { self.error = Some(e); - return None; - } + return None + }, Ok(0) => { - return None; - } + return None + }, Ok(size) => { self.position = 0; self.filled_up_to = Some(size as usize); - } + }, } } if Some(self.position) == self.filled_up_to { self.filled_up_to = None; - return self.next(); + return self.next() } let result = self.buffer[self.position]; @@ -508,7 +486,8 @@ impl<'a> HeadersIterator<'a> { /// /// Note that you have to call `next` prior to calling this pub fn current(&self) -> Option<(&str, &str)> { - self.collection.get(self.index?) + self.collection + .get(self.index?) .map(|val| (str::from_utf8(&val.0).unwrap_or(""), str::from_utf8(&val.1).unwrap_or(""))) } } @@ -516,11 +495,8 @@ impl<'a> HeadersIterator<'a> { #[cfg(test)] mod tests { use super::*; + use sp_core::offchain::{testing, OffchainWorkerExt}; use sp_io::TestExternalities; - use sp_core::offchain::{ - OffchainWorkerExt, - testing, - }; #[test] fn should_send_a_basic_request_and_get_response() { @@ -530,10 +506,7 @@ mod tests { t.execute_with(|| { let request: Request = Request::get("http://localhost:1234"); - let pending = request - .add_header("X-Auth", "hunter2") - .send() - .unwrap(); + let pending = request.add_header("X-Auth", "hunter2").send().unwrap(); // make sure it's sent correctly state.write().fulfill_pending_request( 0, diff --git a/primitives/runtime/src/offchain/storage.rs b/primitives/runtime/src/offchain/storage.rs index c6ed10c5be26f..3bc5b10f161f7 100644 --- a/primitives/runtime/src/offchain/storage.rs +++ b/primitives/runtime/src/offchain/storage.rs @@ -44,7 +44,7 @@ pub enum MutateStorageError { /// The function given to us to create the value to be stored failed. /// May be used to signal that having looked at the existing value, /// they don't want to mutate it. - ValueFunctionFailed(E) + ValueFunctionFailed(E), } impl<'a> StorageValueRef<'a> { @@ -64,9 +64,7 @@ impl<'a> StorageValueRef<'a> { /// if you happen to write a `get-check-set` pattern you should most likely /// be using `mutate` instead. pub fn set(&self, value: &impl codec::Encode) { - value.using_encoded(|val| { - sp_io::offchain::local_storage_set(self.kind, self.key, val) - }) + value.using_encoded(|val| sp_io::offchain::local_storage_set(self.kind, self.key, val)) } /// Remove the associated value from the storage. @@ -83,8 +81,7 @@ impl<'a> StorageValueRef<'a> { /// Returns an error if the value could not be decoded. pub fn get(&self) -> Result, StorageRetrievalError> { sp_io::offchain::local_storage_get(self.kind, self.key) - .map(|val| T::decode(&mut &*val) - .map_err(|_| StorageRetrievalError::Undecodable)) + .map(|val| T::decode(&mut &*val).map_err(|_| StorageRetrievalError::Undecodable)) .transpose() } @@ -98,26 +95,22 @@ impl<'a> StorageValueRef<'a> { /// 2. `Err(MutateStorageError::ConcurrentModification(T))` in case the value was calculated /// by the passed closure `mutate_val`, but it could not be stored. /// 3. `Err(MutateStorageError::ValueFunctionFailed(_))` in case `mutate_val` returns an error. - pub fn mutate(&self, mutate_val: F) -> Result> where + pub fn mutate(&self, mutate_val: F) -> Result> + where T: codec::Codec, - F: FnOnce(Result, StorageRetrievalError>) -> Result + F: FnOnce(Result, StorageRetrievalError>) -> Result, { let value = sp_io::offchain::local_storage_get(self.kind, self.key); - let decoded = value.as_deref() - .map(|mut bytes| { - T::decode(&mut bytes) - .map_err(|_| StorageRetrievalError::Undecodable) - }).transpose(); + let decoded = value + .as_deref() + .map(|mut bytes| T::decode(&mut bytes).map_err(|_| StorageRetrievalError::Undecodable)) + .transpose(); - let val = mutate_val(decoded).map_err(|err| MutateStorageError::ValueFunctionFailed(err))?; + let val = + mutate_val(decoded).map_err(|err| MutateStorageError::ValueFunctionFailed(err))?; let set = val.using_encoded(|new_val| { - sp_io::offchain::local_storage_compare_and_set( - self.kind, - self.key, - value, - new_val, - ) + sp_io::offchain::local_storage_compare_and_set(self.kind, self.key, value, new_val) }); if set { Ok(val) @@ -130,11 +123,8 @@ impl<'a> StorageValueRef<'a> { #[cfg(test)] mod tests { use super::*; + use sp_core::offchain::{testing, OffchainDbExt}; use sp_io::TestExternalities; - use sp_core::offchain::{ - OffchainDbExt, - testing, - }; #[test] fn should_set_and_get() { @@ -151,10 +141,7 @@ mod tests { assert_eq!(val.get::(), Ok(Some(15_u32))); assert_eq!(val.get::>(), Err(StorageRetrievalError::Undecodable)); - assert_eq!( - state.read().persistent_storage.get(b"testval"), - Some(vec![15_u8, 0, 0, 0]) - ); + assert_eq!(state.read().persistent_storage.get(b"testval"), Some(vec![15_u8, 0, 0, 0])); }) } @@ -174,10 +161,7 @@ mod tests { }); assert_eq!(result, Ok(16_u32)); assert_eq!(val.get::(), Ok(Some(16_u32))); - assert_eq!( - state.read().persistent_storage.get(b"testval"), - Some(vec![16_u8, 0, 0, 0]) - ); + assert_eq!(state.read().persistent_storage.get(b"testval"), Some(vec![16_u8, 0, 0, 0])); // mutate again, but this time early-exit. let res = val.mutate::(|val| { diff --git a/primitives/runtime/src/offchain/storage_lock.rs b/primitives/runtime/src/offchain/storage_lock.rs index 7ea52775c5e05..b541f726f73bf 100644 --- a/primitives/runtime/src/offchain/storage_lock.rs +++ b/primitives/runtime/src/offchain/storage_lock.rs @@ -61,8 +61,10 @@ //! } //! ``` -use crate::offchain::storage::{StorageRetrievalError, MutateStorageError, StorageValueRef}; -use crate::traits::BlockNumberProvider; +use crate::{ + offchain::storage::{MutateStorageError, StorageRetrievalError, StorageValueRef}, + traits::BlockNumberProvider, +}; use codec::{Codec, Decode, Encode}; use sp_core::offchain::{Duration, Timestamp}; use sp_io::offchain; @@ -115,9 +117,7 @@ pub struct Time { impl Default for Time { fn default() -> Self { - Self { - expiration_duration: STORAGE_LOCK_DEFAULT_EXPIRY_DURATION, - } + Self { expiration_duration: STORAGE_LOCK_DEFAULT_EXPIRY_DURATION } } } @@ -157,10 +157,7 @@ pub struct BlockAndTimeDeadline { impl Clone for BlockAndTimeDeadline { fn clone(&self) -> Self { - Self { - block_number: self.block_number.clone(), - timestamp: self.timestamp, - } + Self { block_number: self.block_number.clone(), timestamp: self.timestamp } } } @@ -175,7 +172,8 @@ impl Default for BlockAndTimeDeadline { } impl fmt::Debug for BlockAndTimeDeadline - where ::BlockNumber: fmt::Debug +where + ::BlockNumber: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("BlockAndTimeDeadline") @@ -225,8 +223,8 @@ impl Lockable for BlockAndTime { type Deadline = BlockAndTimeDeadline; fn deadline(&self) -> Self::Deadline { - let block_number = ::current_block_number() - + self.expiration_block_number_offset.into(); + let block_number = ::current_block_number() + + self.expiration_block_number_offset.into(); BlockAndTimeDeadline { timestamp: offchain::timestamp().add(self.expiration_duration), block_number, @@ -234,8 +232,8 @@ impl Lockable for BlockAndTime { } fn has_expired(deadline: &Self::Deadline) -> bool { - offchain::timestamp() > deadline.timestamp - && ::current_block_number() > deadline.block_number + offchain::timestamp() > deadline.timestamp && + ::current_block_number() > deadline.block_number } fn snooze(deadline: &Self::Deadline) { @@ -271,10 +269,7 @@ impl<'a, L: Lockable + Default> StorageLock<'a, L> { impl<'a, L: Lockable> StorageLock<'a, L> { /// Create a new storage lock with an explicit instance of a lockable `L`. pub fn with_lockable(key: &'a [u8], lockable: L) -> Self { - Self { - value_ref: StorageValueRef::<'a>::persistent(key), - lockable, - } + Self { value_ref: StorageValueRef::<'a>::persistent(key), lockable } } /// Extend active lock's deadline @@ -398,9 +393,7 @@ impl<'a> StorageLock<'a, Time> { pub fn with_deadline(key: &'a [u8], expiration_duration: Duration) -> Self { Self { value_ref: StorageValueRef::<'a>::persistent(key), - lockable: Time { - expiration_duration, - }, + lockable: Time { expiration_duration }, } } } @@ -443,7 +436,7 @@ where #[cfg(test)] mod tests { use super::*; - use sp_core::offchain::{testing, OffchainWorkerExt, OffchainDbExt}; + use sp_core::offchain::{testing, OffchainDbExt, OffchainWorkerExt}; use sp_io::TestExternalities; const VAL_1: u32 = 0u32; diff --git a/primitives/runtime/src/runtime_logger.rs b/primitives/runtime/src/runtime_logger.rs index f74704390174d..ff0e531ed814f 100644 --- a/primitives/runtime/src/runtime_logger.rs +++ b/primitives/runtime/src/runtime_logger.rs @@ -57,11 +57,7 @@ impl log::Log for RuntimeLogger { let mut w = sp_std::Writer::default(); let _ = ::core::write!(&mut w, "{}", record.args()); - sp_io::logging::log( - record.level().into(), - record.target(), - w.inner(), - ); + sp_io::logging::log(record.level().into(), record.target(), w.inner()); } fn flush(&self) {} @@ -69,12 +65,12 @@ impl log::Log for RuntimeLogger { #[cfg(test)] mod tests { + use sp_api::{BlockId, ProvideRuntimeApi}; + use std::{env, str::FromStr}; use substrate_test_runtime_client::{ - ExecutionStrategy, TestClientBuilderExt, DefaultTestClientBuilderExt, - TestClientBuilder, runtime::TestAPI, + runtime::TestAPI, DefaultTestClientBuilderExt, ExecutionStrategy, TestClientBuilder, + TestClientBuilderExt, }; - use sp_api::{ProvideRuntimeApi, BlockId}; - use std::{env, str::FromStr}; #[test] fn ensure_runtime_logger_respects_host_max_log_level() { @@ -83,7 +79,8 @@ mod tests { log::set_max_level(log::LevelFilter::from_str(&env::var("RUST_LOG").unwrap()).unwrap()); let client = TestClientBuilder::new() - .set_execution_strategy(ExecutionStrategy::AlwaysWasm).build(); + .set_execution_strategy(ExecutionStrategy::AlwaysWasm) + .build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(0); runtime_api.do_trace_log(&block_id).expect("Logging should not fail"); diff --git a/primitives/runtime/src/runtime_string.rs b/primitives/runtime/src/runtime_string.rs index d2e961de5c5c4..179e881451813 100644 --- a/primitives/runtime/src/runtime_string.rs +++ b/primitives/runtime/src/runtime_string.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_core::RuntimeDebug; use sp_std::vec::Vec; @@ -55,7 +55,6 @@ macro_rules! format_runtime_string { }}; } - impl From<&'static str> for RuntimeString { fn from(data: &'static str) -> Self { Self::Borrowed(data) @@ -138,5 +137,7 @@ impl<'de> serde::Deserialize<'de> for RuntimeString { /// Create a const [`RuntimeString`]. #[macro_export] macro_rules! create_runtime_str { - ( $y:expr ) => {{ $crate::RuntimeString::Borrowed($y) }} + ( $y:expr ) => {{ + $crate::RuntimeString::Borrowed($y) + }}; } diff --git a/primitives/runtime/src/testing.rs b/primitives/runtime/src/testing.rs index 72390269d3023..781f342d43c1e 100644 --- a/primitives/runtime/src/testing.rs +++ b/primitives/runtime/src/testing.rs @@ -17,19 +17,28 @@ //! Testing utilities. -use serde::{Serialize, Serializer, Deserialize, de::Error as DeError, Deserializer}; -use std::{fmt::{self, Debug}, ops::Deref, cell::RefCell}; -use crate::codec::{Codec, Encode, Decode}; -use crate::scale_info::TypeInfo; -use crate::traits::{ - self, Checkable, Applyable, BlakeTwo256, OpaqueKeys, - SignedExtension, Dispatchable, DispatchInfoOf, PostDispatchInfoOf, +use crate::{ + codec::{Codec, Decode, Encode}, + generic, + scale_info::TypeInfo, + traits::{ + self, Applyable, BlakeTwo256, Checkable, DispatchInfoOf, Dispatchable, OpaqueKeys, + PostDispatchInfoOf, SignedExtension, ValidateUnsigned, + }, + transaction_validity::{TransactionSource, TransactionValidity, TransactionValidityError}, + ApplyExtrinsicResultWithInfo, CryptoTypeId, KeyTypeId, +}; +use serde::{de::Error as DeError, Deserialize, Deserializer, Serialize, Serializer}; +use sp_core::{ + crypto::{key_types, CryptoType, Dummy, Public}, + U256, +}; +pub use sp_core::{sr25519, H256}; +use std::{ + cell::RefCell, + fmt::{self, Debug}, + ops::Deref, }; -use crate::traits::ValidateUnsigned; -use crate::{generic, KeyTypeId, CryptoTypeId, ApplyExtrinsicResultWithInfo}; -pub use sp_core::{H256, sr25519}; -use sp_core::{crypto::{CryptoType, Dummy, key_types, Public}, U256}; -use crate::transaction_validity::{TransactionValidity, TransactionValidityError, TransactionSource}; /// A dummy type which can be used instead of regular cryptographic primitives. /// @@ -38,8 +47,19 @@ use crate::transaction_validity::{TransactionValidity, TransactionValidityError, /// 3. Implements `RuntimeAppPublic` so it can be used instead of regular application-specific /// crypto. #[derive( - Default, PartialEq, Eq, Clone, Encode, Decode, Debug, Hash, Serialize, Deserialize, PartialOrd, - Ord, TypeInfo + Default, + PartialEq, + Eq, + Clone, + Encode, + Decode, + Debug, + Hash, + Serialize, + Deserialize, + PartialOrd, + Ord, + TypeInfo, )] pub struct UintAuthorityId(pub u64); @@ -72,7 +92,10 @@ impl AsRef<[u8]> for UintAuthorityId { // Unsafe, i know, but it's test code and it's just there because it's really convenient to // keep `UintAuthorityId` as a u64 under the hood. unsafe { - std::slice::from_raw_parts(&self.0 as *const u64 as *const _, std::mem::size_of::()) + std::slice::from_raw_parts( + &self.0 as *const u64 as *const _, + std::mem::size_of::(), + ) } } } @@ -84,7 +107,7 @@ thread_local! { impl UintAuthorityId { /// Set the list of keys returned by the runtime call for all keys of that type. - pub fn set_all_keys>(keys: impl IntoIterator) { + pub fn set_all_keys>(keys: impl IntoIterator) { ALL_KEYS.with(|l| *l.borrow_mut() = keys.into_iter().map(Into::into).collect()) } } @@ -184,7 +207,8 @@ impl Header { pub struct ExtrinsicWrapper(Xt); impl traits::Extrinsic for ExtrinsicWrapper -where Xt: parity_util_mem::MallocSizeOf +where + Xt: parity_util_mem::MallocSizeOf, { type Call = (); type SignaturePayload = (); @@ -195,7 +219,10 @@ where Xt: parity_util_mem::MallocSizeOf } impl serde::Serialize for ExtrinsicWrapper { - fn serialize(&self, seq: S) -> Result where S: ::serde::Serializer { + fn serialize(&self, seq: S) -> Result + where + S: ::serde::Serializer, + { self.using_encoded(|bytes| seq.serialize_bytes(bytes)) } } @@ -223,8 +250,9 @@ pub struct Block { pub extrinsics: Vec, } -impl traits::Block - for Block +impl< + Xt: 'static + Codec + Sized + Send + Sync + Serialize + Clone + Eq + Debug + traits::Extrinsic, + > traits::Block for Block { type Extrinsic = Xt; type Header = Header; @@ -247,7 +275,10 @@ impl Deserialize<'a> for Block where Block: Decode { +impl<'a, Xt> Deserialize<'a> for Block +where + Block: Decode, +{ fn deserialize>(de: D) -> Result { let r = >::deserialize(de)?; Decode::decode(&mut &r[..]) @@ -277,8 +308,14 @@ impl TestXt { // Non-opaque extrinsics always 0. parity_util_mem::malloc_size_of_is_0!(any: TestXt); -impl Serialize for TestXt where TestXt: Encode { - fn serialize(&self, seq: S) -> Result where S: Serializer { +impl Serialize for TestXt +where + TestXt: Encode, +{ + fn serialize(&self, seq: S) -> Result + where + S: Serializer, + { self.using_encoded(|bytes| seq.serialize_bytes(bytes)) } } @@ -291,7 +328,9 @@ impl Debug for TestXt { impl Checkable for TestXt { type Checked = Self; - fn check(self, _: &Context) -> Result { Ok(self) } + fn check(self, _: &Context) -> Result { + Ok(self) + } } impl traits::Extrinsic for TestXt { @@ -307,23 +346,26 @@ impl traits::Extrinsic for TestXt } } -impl traits::ExtrinsicMetadata for TestXt where +impl traits::ExtrinsicMetadata for TestXt +where Call: Codec + Sync + Send, - Extra: SignedExtension, + Extra: SignedExtension, { type SignedExtensions = Extra; const VERSION: u8 = 0u8; } -impl Applyable for TestXt where - Call: 'static + Sized + Send + Sync + Clone + Eq + Codec + Debug + Dispatchable, - Extra: SignedExtension, +impl Applyable for TestXt +where + Call: + 'static + Sized + Send + Sync + Clone + Eq + Codec + Debug + Dispatchable, + Extra: SignedExtension, Origin: From>, { type Call = Call; /// Checks to see if this is a valid *transaction*. It returns information on it if so. - fn validate>( + fn validate>( &self, source: TransactionSource, info: &DispatchInfoOf, @@ -340,7 +382,7 @@ impl Applyable for TestXt where /// Executes all necessary logic needed prior to dispatch and deconstructs into function call, /// index and sender. - fn apply>( + fn apply>( self, info: &DispatchInfoOf, len: usize, diff --git a/primitives/runtime/src/traits.rs b/primitives/runtime/src/traits.rs index ed2fd2bf73865..535d2f6f0ba60 100644 --- a/primitives/runtime/src/traits.rs +++ b/primitives/runtime/src/traits.rs @@ -17,30 +17,37 @@ //! Primitives for the runtime modules. -use sp_std::prelude::*; -use sp_std::{self, marker::PhantomData, convert::{TryFrom, TryInto}, fmt::Debug}; +use crate::{ + codec::{Codec, Decode, Encode, MaxEncodedLen}, + generic::{Digest, DigestItem}, + scale_info::{MetaType, StaticTypeInfo, TypeInfo}, + transaction_validity::{ + TransactionSource, TransactionValidity, TransactionValidityError, UnknownTransaction, + ValidTransaction, + }, + DispatchResult, +}; +use impl_trait_for_tuples::impl_for_tuples; +#[cfg(feature = "std")] +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use sp_application_crypto::AppKey; +pub use sp_arithmetic::traits::{ + AtLeast32Bit, AtLeast32BitUnsigned, Bounded, CheckedAdd, CheckedDiv, CheckedMul, CheckedShl, + CheckedShr, CheckedSub, IntegerSquareRoot, One, SaturatedConversion, Saturating, + UniqueSaturatedFrom, UniqueSaturatedInto, Zero, +}; +use sp_core::{self, Hasher, RuntimeDebug, TypeId}; +use sp_std::{ + self, + convert::{TryFrom, TryInto}, + fmt::Debug, + marker::PhantomData, + prelude::*, +}; #[cfg(feature = "std")] use std::fmt::Display; #[cfg(feature = "std")] use std::str::FromStr; -#[cfg(feature = "std")] -use serde::{Serialize, Deserialize, de::DeserializeOwned}; -use sp_core::{self, Hasher, TypeId, RuntimeDebug}; -use crate::codec::{Codec, Encode, Decode, MaxEncodedLen}; -use crate::scale_info::{MetaType, TypeInfo, StaticTypeInfo}; -use crate::transaction_validity::{ - ValidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, - UnknownTransaction, -}; -use crate::generic::{Digest, DigestItem}; -pub use sp_arithmetic::traits::{ - AtLeast32Bit, AtLeast32BitUnsigned, UniqueSaturatedInto, UniqueSaturatedFrom, Saturating, - SaturatedConversion, Zero, One, Bounded, CheckedAdd, CheckedSub, CheckedMul, CheckedDiv, - CheckedShl, CheckedShr, IntegerSquareRoot -}; -use sp_application_crypto::AppKey; -use impl_trait_for_tuples::impl_for_tuples; -use crate::DispatchResult; /// A lazy value. pub trait Lazy { @@ -51,7 +58,9 @@ pub trait Lazy { } impl<'a> Lazy<[u8]> for &'a [u8] { - fn get(&mut self) -> &[u8] { &**self } + fn get(&mut self) -> &[u8] { + &**self + } } /// Some type that is able to be collapsed into an account ID. It is not possible to recreate the @@ -65,17 +74,23 @@ pub trait IdentifyAccount { impl IdentifyAccount for sp_core::ed25519::Public { type AccountId = Self; - fn into_account(self) -> Self { self } + fn into_account(self) -> Self { + self + } } impl IdentifyAccount for sp_core::sr25519::Public { type AccountId = Self; - fn into_account(self) -> Self { self } + fn into_account(self) -> Self { + self + } } impl IdentifyAccount for sp_core::ecdsa::Public { type AccountId = Self; - fn into_account(self) -> Self { self } + fn into_account(self) -> Self { + self + } } /// Means of signature verification. @@ -85,7 +100,11 @@ pub trait Verify { /// Verify a signature. /// /// Return `true` if signature is valid for the value. - fn verify>(&self, msg: L, signer: &::AccountId) -> bool; + fn verify>( + &self, + msg: L, + signer: &::AccountId, + ) -> bool; } impl Verify for sp_core::ed25519::Signature { @@ -126,19 +145,27 @@ pub trait AppVerify { } impl< - S: Verify::Public as sp_application_crypto::AppPublic>::Generic> + From, - T: sp_application_crypto::Wraps + sp_application_crypto::AppKey + sp_application_crypto::AppSignature + - AsRef + AsMut + From, -> AppVerify for T where + S: Verify::Public as sp_application_crypto::AppPublic>::Generic> + + From, + T: sp_application_crypto::Wraps + + sp_application_crypto::AppKey + + sp_application_crypto::AppSignature + + AsRef + + AsMut + + From, + > AppVerify for T +where ::Signer: IdentifyAccount::Signer>, - <::Public as sp_application_crypto::AppPublic>::Generic: - IdentifyAccount::Public as sp_application_crypto::AppPublic>::Generic>, + <::Public as sp_application_crypto::AppPublic>::Generic: IdentifyAccount< + AccountId = <::Public as sp_application_crypto::AppPublic>::Generic, + >, { type AccountId = ::Public; fn verify>(&self, msg: L, signer: &::Public) -> bool { use sp_application_crypto::IsWrappedBy; let inner: &S = self.as_ref(); - let inner_pubkey = <::Public as sp_application_crypto::AppPublic>::Generic::from_ref(&signer); + let inner_pubkey = + <::Public as sp_application_crypto::AppPublic>::Generic::from_ref(&signer); Verify::verify(inner, msg, inner_pubkey) } } @@ -199,14 +226,20 @@ pub struct IdentityLookup(PhantomData); impl StaticLookup for IdentityLookup { type Source = T; type Target = T; - fn lookup(x: T) -> Result { Ok(x) } - fn unlookup(x: T) -> T { x } + fn lookup(x: T) -> Result { + Ok(x) + } + fn unlookup(x: T) -> T { + x + } } impl Lookup for IdentityLookup { type Source = T; type Target = T; - fn lookup(&self, x: T) -> Result { Ok(x) } + fn lookup(&self, x: T) -> Result { + Ok(x) + } } /// A lookup implementation returning the `AccountId` from a `MultiAddress`. @@ -254,19 +287,25 @@ pub trait Convert { } impl Convert for () { - fn convert(_: A) -> B { Default::default() } + fn convert(_: A) -> B { + Default::default() + } } /// A structure that performs identity conversion. pub struct Identity; impl Convert for Identity { - fn convert(a: T) -> T { a } + fn convert(a: T) -> T { + a + } } /// A structure that performs standard conversion using the standard Rust conversion traits. pub struct ConvertInto; impl> Convert for ConvertInto { - fn convert(a: A) -> B { a.into() } + fn convert(a: A) -> B { + a.into() + } } /// Convenience type to work around the highly unergonomic syntax needed @@ -278,7 +317,10 @@ pub trait CheckedConversion { /// This just uses `TryFrom` internally but with this /// variant you can provide the destination type using turbofish syntax /// in case Rust happens not to assume the correct type. - fn checked_from(t: T) -> Option where Self: TryFrom { + fn checked_from(t: T) -> Option + where + Self: TryFrom, + { >::try_from(t).ok() } /// Consume self to return `Some` equivalent value of `Option`. @@ -286,7 +328,10 @@ pub trait CheckedConversion { /// This just uses `TryInto` internally but with this /// variant you can provide the destination type using turbofish syntax /// in case Rust happens not to assume the correct type. - fn checked_into(self) -> Option where Self: TryInto { + fn checked_into(self) -> Option + where + Self: TryInto, + { >::try_into(self).ok() } } @@ -311,11 +356,17 @@ macro_rules! impl_scale { ($self:ty, $other:ty) => { impl Scale<$other> for $self { type Output = Self; - fn mul(self, other: $other) -> Self::Output { self * (other as Self) } - fn div(self, other: $other) -> Self::Output { self / (other as Self) } - fn rem(self, other: $other) -> Self::Output { self % (other as Self) } + fn mul(self, other: $other) -> Self::Output { + self * (other as Self) + } + fn div(self, other: $other) -> Self::Output { + self / (other as Self) + } + fn rem(self, other: $other) -> Self::Output { + self % (other as Self) + } } - } + }; } impl_scale!(u128, u128); impl_scale!(u128, u64); @@ -344,31 +395,57 @@ pub trait Clear { } impl Clear for T { - fn is_clear(&self) -> bool { *self == Self::clear() } - fn clear() -> Self { Default::default() } + fn is_clear(&self) -> bool { + *self == Self::clear() + } + fn clear() -> Self { + Default::default() + } } /// A meta trait for all bit ops. pub trait SimpleBitOps: - Sized + Clear + - sp_std::ops::BitOr + - sp_std::ops::BitXor + - sp_std::ops::BitAnd -{} -impl + - sp_std::ops::BitXor + - sp_std::ops::BitAnd -> SimpleBitOps for T {} + Sized + + Clear + + sp_std::ops::BitOr + + sp_std::ops::BitXor + + sp_std::ops::BitAnd +{ +} +impl< + T: Sized + + Clear + + sp_std::ops::BitOr + + sp_std::ops::BitXor + + sp_std::ops::BitAnd, + > SimpleBitOps for T +{ +} /// Abstraction around hashing // Stupid bug in the Rust compiler believes derived // traits must be fulfilled by all type parameters. -pub trait Hash: 'static + MaybeSerializeDeserialize + Debug + Clone + Eq + PartialEq + Hasher::Output> { +pub trait Hash: + 'static + + MaybeSerializeDeserialize + + Debug + + Clone + + Eq + + PartialEq + + Hasher::Output> +{ /// The hash type produced. - type Output: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash - + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + Encode + Decode + MaxEncodedLen + type Output: Member + + MaybeSerializeDeserialize + + Debug + + sp_std::hash::Hash + + AsRef<[u8]> + + AsMut<[u8]> + + Copy + + Default + + Encode + + Decode + + MaxEncodedLen + TypeInfo; /// Produce the hash of some byte-slice. @@ -471,7 +548,10 @@ impl CheckEqual for sp_core::H256 { } } -impl CheckEqual for super::generic::DigestItem where H: Encode { +impl CheckEqual for super::generic::DigestItem +where + H: Encode, +{ #[cfg(feature = "std")] fn check_equal(&self, other: &Self) { if self != other { @@ -525,16 +605,34 @@ pub trait IsMember { /// /// You can also create a `new` one from those fields. pub trait Header: - Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + - MaybeMallocSizeOf + 'static + Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + MaybeMallocSizeOf + 'static { /// Header number. - type Number: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Copy + - MaybeDisplay + AtLeast32BitUnsigned + Codec + sp_std::str::FromStr + MaybeMallocSizeOf; + type Number: Member + + MaybeSerializeDeserialize + + Debug + + sp_std::hash::Hash + + Copy + + MaybeDisplay + + AtLeast32BitUnsigned + + Codec + + sp_std::str::FromStr + + MaybeMallocSizeOf; /// Header hash type - type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord - + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> - + AsMut<[u8]> + MaybeMallocSizeOf + TypeInfo; + type Hash: Member + + MaybeSerializeDeserialize + + Debug + + sp_std::hash::Hash + + Ord + + Copy + + MaybeDisplay + + Default + + SimpleBitOps + + Codec + + AsRef<[u8]> + + AsMut<[u8]> + + MaybeMallocSizeOf + + TypeInfo; /// Hashing algorithm type Hashing: Hash; @@ -582,15 +680,28 @@ pub trait Header: /// `Extrinsic` pieces of information as well as a `Header`. /// /// You can get an iterator over each of the `extrinsics` and retrieve the `header`. -pub trait Block: Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + MaybeMallocSizeOf + 'static { +pub trait Block: + Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + MaybeMallocSizeOf + 'static +{ /// Type for extrinsics. type Extrinsic: Member + Codec + Extrinsic + MaybeSerialize + MaybeMallocSizeOf; /// Header type. - type Header: Header + MaybeMallocSizeOf; + type Header: Header + MaybeMallocSizeOf; /// Block hash type. - type Hash: Member + MaybeSerializeDeserialize + Debug + sp_std::hash::Hash + Ord - + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]> + AsMut<[u8]> - + MaybeMallocSizeOf + TypeInfo; + type Hash: Member + + MaybeSerializeDeserialize + + Debug + + sp_std::hash::Hash + + Ord + + Copy + + MaybeDisplay + + Default + + SimpleBitOps + + Codec + + AsRef<[u8]> + + AsMut<[u8]> + + MaybeMallocSizeOf + + TypeInfo; /// Returns a reference to the header. fn header(&self) -> &Self::Header; @@ -609,7 +720,6 @@ pub trait Block: Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + May fn encode_from(header: &Self::Header, extrinsics: &[Self::Extrinsic]) -> Vec; } - /// Something that acts like an `Extrinsic`. pub trait Extrinsic: Sized + MaybeMallocSizeOf { /// The function call. @@ -624,7 +734,9 @@ pub trait Extrinsic: Sized + MaybeMallocSizeOf { /// Is this `Extrinsic` signed? /// If no information are available about signed/unsigned, `None` should be returned. - fn is_signed(&self) -> Option { None } + fn is_signed(&self) -> Option { + None + } /// Create new instance of the extrinsic. /// @@ -632,7 +744,9 @@ pub trait Extrinsic: Sized + MaybeMallocSizeOf { /// 1. Inherents (no signature; created by validators during block production) /// 2. Unsigned Transactions (no signature; represent "system calls" or other special kinds of calls) /// 3. Signed Transactions (with signature; a regular transactions with known origin) - fn new(_call: Self::Call, _signed_data: Option) -> Option { None } + fn new(_call: Self::Call, _signed_data: Option) -> Option { + None + } } /// Implementor is an [`Extrinsic`] and provides metadata about this extrinsic. @@ -723,7 +837,9 @@ impl Dispatchable for () { /// Means by which a transaction may be extended. This type embodies both the data and the logic /// that should be additionally associated with the transaction. It should be plain old data. -pub trait SignedExtension: Codec + Debug + Sync + Send + Clone + Eq + PartialEq + StaticTypeInfo { +pub trait SignedExtension: + Codec + Debug + Sync + Send + Clone + Eq + PartialEq + StaticTypeInfo +{ /// Unique identifier of this signed extension. /// /// This will be exposed in the metadata to identify the signed extension used @@ -895,9 +1011,13 @@ impl SignedExtension for Tuple { Ok(valid) } - fn pre_dispatch(self, who: &Self::AccountId, call: &Self::Call, info: &DispatchInfoOf, len: usize) - -> Result - { + fn pre_dispatch( + self, + who: &Self::AccountId, + call: &Self::Call, + info: &DispatchInfoOf, + len: usize, + ) -> Result { Ok(for_tuples!( ( #( Tuple.pre_dispatch(who, call, info, len)? ),* ) )) } @@ -945,7 +1065,9 @@ impl SignedExtension for () { type Call = (); type Pre = (); const IDENTIFIER: &'static str = "UnitSignedExtension"; - fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) } + fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { + Ok(()) + } } /// An "executable" piece of information, used by the standard Substrate Executive in order to @@ -959,7 +1081,7 @@ pub trait Applyable: Sized + Send + Sync { type Call: Dispatchable; /// Checks to see if this is a valid *transaction*. It returns information on it if so. - fn validate>( + fn validate>( &self, source: TransactionSource, info: &DispatchInfoOf, @@ -968,7 +1090,7 @@ pub trait Applyable: Sized + Send + Sync { /// Executes all necessary logic needed prior to dispatch and deconstructs into function call, /// index and sender. - fn apply>( + fn apply>( self, info: &DispatchInfoOf, len: usize, @@ -1037,7 +1159,9 @@ pub trait OpaqueKeys: Clone { T::decode(&mut self.get_raw(i)).ok() } /// Verify a proof of ownership for the keys. - fn ownership_proof_is_valid(&self, _proof: &[u8]) -> bool { true } + fn ownership_proof_is_valid(&self, _proof: &[u8]) -> bool { + true + } } /// Input that adds infinite number of zero after wrapped input. @@ -1073,7 +1197,7 @@ impl<'a, T: codec::Input> codec::Input for AppendZerosInput<'a, T> { into[i] = b; i += 1; } else { - break; + break } } i @@ -1116,7 +1240,9 @@ impl<'a> codec::Input for TrailingZeroInput<'a> { /// This type can be converted into and possibly from an AccountId (which itself is generic). pub trait AccountIdConversion: Sized { /// Convert into an account ID. This is infallible. - fn into_account(&self) -> AccountId { self.into_sub_account(&()) } + fn into_account(&self) -> AccountId { + self.into_sub_account(&()) + } /// Try to convert an account ID into this type. Might not succeed. fn try_from_account(a: &AccountId) -> Option { @@ -1142,14 +1268,16 @@ pub trait AccountIdConversion: Sized { /// fill AccountId. impl AccountIdConversion for Id { fn into_sub_account(&self, sub: S) -> T { - (Id::TYPE_ID, self, sub).using_encoded(|b| - T::decode(&mut TrailingZeroInput(b)) - ).unwrap_or_default() + (Id::TYPE_ID, self, sub) + .using_encoded(|b| T::decode(&mut TrailingZeroInput(b))) + .unwrap_or_default() } fn try_from_sub_account(x: &T) -> Option<(Self, S)> { x.using_encoded(|d| { - if &d[0..4] != Id::TYPE_ID { return None } + if &d[0..4] != Id::TYPE_ID { + return None + } let mut cursor = &d[4..]; let result = Decode::decode(&mut cursor).ok()?; if cursor.iter().all(|x| *x == 0) { @@ -1484,19 +1612,19 @@ pub trait BlockNumberProvider { #[cfg(test)] mod tests { use super::*; - use crate::codec::{Encode, Decode, Input}; + use crate::codec::{Decode, Encode, Input}; use sp_core::{crypto::Pair, ecdsa}; mod t { - use sp_core::crypto::KeyTypeId; use sp_application_crypto::{app_crypto, sr25519}; + use sp_core::crypto::KeyTypeId; app_crypto!(sr25519, KeyTypeId(*b"test")); } #[test] fn app_verify_works() { - use t::*; use super::AppVerify; + use t::*; let s = Signature::default(); let _ = s.verify(&[0u8; 100][..], &Public::default()); diff --git a/primitives/runtime/src/transaction_validity.rs b/primitives/runtime/src/transaction_validity.rs index 1768c27d6f5a6..939452384f75e 100644 --- a/primitives/runtime/src/transaction_validity.rs +++ b/primitives/runtime/src/transaction_validity.rs @@ -17,9 +17,11 @@ //! Transaction validity interface. +use crate::{ + codec::{Decode, Encode}, + RuntimeDebug, +}; use sp_std::prelude::*; -use crate::codec::{Encode, Decode}; -use crate::RuntimeDebug; /// Priority for a transaction. Additive. Higher is better. pub type TransactionPriority = u64; @@ -98,8 +100,7 @@ impl From for &'static str { InvalidTransaction::Stale => "Transaction is outdated", InvalidTransaction::BadProof => "Transaction has a bad signature", InvalidTransaction::AncientBirthBlock => "Transaction has an ancient birth block", - InvalidTransaction::ExhaustsResources => - "Transaction would exhaust the block limits", + InvalidTransaction::ExhaustsResources => "Transaction would exhaust the block limits", InvalidTransaction::Payment => "Inability to pay some fees (e.g. account balance too low)", InvalidTransaction::BadMandatory => @@ -220,7 +221,9 @@ impl From for TransactionValidity { /// Depending on the source we might apply different validation schemes. /// For instance we can disallow specific kinds of transactions if they were not produced /// by our local node (for instance off-chain workers). -#[derive(Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, parity_util_mem::MallocSizeOf)] +#[derive( + Copy, Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug, parity_util_mem::MallocSizeOf, +)] pub enum TransactionSource { /// Transaction is already included in block. /// @@ -295,10 +298,7 @@ impl ValidTransaction { /// To avoid conflicts between different parts in runtime it's recommended to build `requires` /// and `provides` tags with a unique prefix. pub fn with_tag_prefix(prefix: &'static str) -> ValidTransactionBuilder { - ValidTransactionBuilder { - prefix: Some(prefix), - validity: Default::default(), - } + ValidTransactionBuilder { prefix: Some(prefix), validity: Default::default() } } /// Combine two instances into one, as a best effort. This will take the superset of each of the @@ -307,8 +307,14 @@ impl ValidTransaction { pub fn combine_with(mut self, mut other: ValidTransaction) -> Self { Self { priority: self.priority.saturating_add(other.priority), - requires: { self.requires.append(&mut other.requires); self.requires }, - provides: { self.provides.append(&mut other.provides); self.provides }, + requires: { + self.requires.append(&mut other.requires); + self.requires + }, + provides: { + self.provides.append(&mut other.provides); + self.provides + }, longevity: self.longevity.min(other.longevity), propagate: self.propagate && other.propagate, } @@ -412,7 +418,6 @@ impl From for ValidTransaction { } } - #[cfg(test)] mod tests { use super::*; @@ -430,7 +435,10 @@ mod tests { let encoded = v.encode(); assert_eq!( encoded, - vec![0, 5, 0, 0, 0, 0, 0, 0, 0, 4, 16, 1, 2, 3, 4, 4, 12, 4, 5, 6, 42, 0, 0, 0, 0, 0, 0, 0, 0] + vec![ + 0, 5, 0, 0, 0, 0, 0, 0, 0, 4, 16, 1, 2, 3, 4, 4, 12, 4, 5, 6, 42, 0, 0, 0, 0, 0, 0, + 0, 0 + ] ); // decode back @@ -450,12 +458,15 @@ mod tests { .priority(3) .priority(6) .into(); - assert_eq!(a, ValidTransaction { - propagate: false, - longevity: 5, - priority: 6, - requires: vec![(PREFIX, 1).encode(), (PREFIX, 2).encode()], - provides: vec![(PREFIX, 3).encode(), (PREFIX, 4).encode()], - }); + assert_eq!( + a, + ValidTransaction { + propagate: false, + longevity: 5, + priority: 6, + requires: vec![(PREFIX, 1).encode(), (PREFIX, 2).encode()], + provides: vec![(PREFIX, 3).encode(), (PREFIX, 4).encode()], + } + ); } } diff --git a/primitives/sandbox/src/lib.rs b/primitives/sandbox/src/lib.rs index 22e68439958d9..a433d57c3b515 100755 --- a/primitives/sandbox/src/lib.rs +++ b/primitives/sandbox/src/lib.rs @@ -41,7 +41,7 @@ use sp_std::prelude::*; pub use sp_core::sandbox::HostError; -pub use sp_wasm_interface::{Value, ReturnValue}; +pub use sp_wasm_interface::{ReturnValue, Value}; mod imp { #[cfg(feature = "std")] @@ -100,9 +100,7 @@ impl Memory { /// /// Allocated memory is always zeroed. pub fn new(initial: u32, maximum: Option) -> Result { - Ok(Memory { - inner: imp::Memory::new(initial, maximum)?, - }) + Ok(Memory { inner: imp::Memory::new(initial, maximum)? }) } /// Read a memory area at the address `ptr` with the size of the provided slice `buf`. @@ -131,9 +129,7 @@ pub struct EnvironmentDefinitionBuilder { impl EnvironmentDefinitionBuilder { /// Construct a new `EnvironmentDefinitionBuilder`. pub fn new() -> EnvironmentDefinitionBuilder { - EnvironmentDefinitionBuilder { - inner: imp::EnvironmentDefinitionBuilder::new(), - } + EnvironmentDefinitionBuilder { inner: imp::EnvironmentDefinitionBuilder::new() } } /// Register a host function in this environment definition. @@ -176,12 +172,12 @@ impl Instance { /// be returned. /// /// [`EnvironmentDefinitionBuilder`]: struct.EnvironmentDefinitionBuilder.html - pub fn new(code: &[u8], env_def_builder: &EnvironmentDefinitionBuilder, state: &mut T) - -> Result, Error> - { - Ok(Instance { - inner: imp::Instance::new(code, &env_def_builder.inner, state)?, - }) + pub fn new( + code: &[u8], + env_def_builder: &EnvironmentDefinitionBuilder, + state: &mut T, + ) -> Result, Error> { + Ok(Instance { inner: imp::Instance::new(code, &env_def_builder.inner, state)? }) } /// Invoke an exported function with the given name. diff --git a/primitives/serializer/src/lib.rs b/primitives/serializer/src/lib.rs index 3aef9ef5a3873..ccdbbf27f179b 100644 --- a/primitives/serializer/src/lib.rs +++ b/primitives/serializer/src/lib.rs @@ -22,7 +22,7 @@ #![warn(missing_docs)] -pub use serde_json::{from_str, from_slice, from_reader, Result, Error}; +pub use serde_json::{from_reader, from_slice, from_str, Error, Result}; const PROOF: &str = "Serializers are infallible; qed"; @@ -37,6 +37,9 @@ pub fn encode(value: &T) -> Vec { } /// Serialize the given data structure as JSON into the IO stream. -pub fn to_writer(writer: W, value: &T) -> Result<()> { +pub fn to_writer( + writer: W, + value: &T, +) -> Result<()> { serde_json::to_writer(writer, value) } diff --git a/primitives/session/src/lib.rs b/primitives/session/src/lib.rs index 4c7f9524043f7..d85b6af4349e4 100644 --- a/primitives/session/src/lib.rs +++ b/primitives/session/src/lib.rs @@ -19,15 +19,14 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; -#[cfg(feature = "std")] -use sp_runtime::{generic::BlockId, traits::Block as BlockT}; #[cfg(feature = "std")] use sp_api::ProvideRuntimeApi; +#[cfg(feature = "std")] +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; -use sp_core::RuntimeDebug; -use sp_core::crypto::KeyTypeId; +use sp_core::{crypto::KeyTypeId, RuntimeDebug}; use sp_staking::SessionIndex; use sp_std::vec::Vec; diff --git a/primitives/staking/src/offence.rs b/primitives/staking/src/offence.rs index 9e37d2d8799b8..a91cb47c117b6 100644 --- a/primitives/staking/src/offence.rs +++ b/primitives/staking/src/offence.rs @@ -20,7 +20,7 @@ use sp_std::vec::Vec; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use sp_runtime::Perbill; use crate::SessionIndex; @@ -84,10 +84,7 @@ pub trait Offence { /// /// `offenders_count` - the count of unique offending authorities. It is >0. /// `validator_set_count` - the cardinality of the validator set at the time of offence. - fn slash_fraction( - offenders_count: u32, - validator_set_count: u32, - ) -> Perbill; + fn slash_fraction(offenders_count: u32, validator_set_count: u32) -> Perbill; } /// Errors that may happen on offence reports. @@ -108,7 +105,7 @@ impl sp_runtime::traits::Printable for OffenceError { Self::Other(e) => { "Other".print(); e.print(); - } + }, } } } diff --git a/primitives/state-machine/src/backend.rs b/primitives/state-machine/src/backend.rs index 0dc054ed50390..de4ff33b51fe8 100644 --- a/primitives/state-machine/src/backend.rs +++ b/primitives/state-machine/src/backend.rs @@ -17,19 +17,16 @@ //! State machine backends. These manage the code and storage of contracts. -use hash_db::Hasher; -use codec::{Decode, Encode}; -use sp_core::{ - storage::{ChildInfo, well_known_keys, TrackedStorageKey} -}; use crate::{ - trie_backend::TrieBackend, - trie_backend_essence::TrieBackendStorage, - UsageInfo, StorageKey, StorageValue, StorageCollection, ChildStorageCollection, + trie_backend::TrieBackend, trie_backend_essence::TrieBackendStorage, ChildStorageCollection, + StorageCollection, StorageKey, StorageValue, UsageInfo, }; -use sp_std::vec::Vec; +use codec::{Decode, Encode}; +use hash_db::Hasher; +use sp_core::storage::{well_known_keys, ChildInfo, TrackedStorageKey}; #[cfg(feature = "std")] use sp_core::traits::RuntimeCode; +use sp_std::vec::Vec; /// A state backend is used to read state data and can have changes committed /// to it. @@ -90,7 +87,7 @@ pub trait Backend: sp_std::fmt::Debug { fn next_child_storage_key( &self, child_info: &ChildInfo, - key: &[u8] + key: &[u8], ) -> Result, Self::Error>; /// Iterate over storage starting at key, for a given prefix and child trie. @@ -128,7 +125,6 @@ pub trait Backend: sp_std::fmt::Debug { /// call `f` for each of those keys. fn for_key_values_with_prefix(&self, prefix: &[u8], f: F); - /// Retrieve all child entries keys which start with the given prefix and /// call `f` for each of those keys. fn for_child_keys_with_prefix( @@ -143,8 +139,10 @@ pub trait Backend: sp_std::fmt::Debug { /// Does not include child storage updates. fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (H::Out, Self::Transaction) where H::Out: Ord; + delta: impl Iterator)>, + ) -> (H::Out, Self::Transaction) + where + H::Out: Ord; /// Calculate the child storage root, with given delta over what is already stored in /// the backend, and produce a "transaction" that can be used to commit. The second argument @@ -152,8 +150,10 @@ pub trait Backend: sp_std::fmt::Debug { fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (H::Out, bool, Self::Transaction) where H::Out: Ord; + delta: impl Iterator)>, + ) -> (H::Out, bool, Self::Transaction) + where + H::Out: Ord; /// Get all key/value pairs into a Vec. fn pairs(&self) -> Vec<(StorageKey, StorageValue)>; @@ -166,11 +166,7 @@ pub trait Backend: sp_std::fmt::Debug { } /// Get all keys of child storage with given prefix - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec { + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec { let mut all = Vec::new(); self.for_child_keys_with_prefix(child_info, prefix, |k| all.push(k.to_vec())); all @@ -186,18 +182,19 @@ pub trait Backend: sp_std::fmt::Debug { /// Does include child storage updates. fn full_storage_root<'a>( &self, - delta: impl Iterator)>, - child_deltas: impl Iterator)>, - )>, - ) -> (H::Out, Self::Transaction) where H::Out: Ord + Encode { + delta: impl Iterator)>, + child_deltas: impl Iterator< + Item = (&'a ChildInfo, impl Iterator)>), + >, + ) -> (H::Out, Self::Transaction) + where + H::Out: Ord + Encode, + { let mut txs: Self::Transaction = Default::default(); let mut child_roots: Vec<_> = Default::default(); // child first for (child_info, child_delta) in child_deltas { - let (child_root, empty, child_txs) = - self.child_storage_root(&child_info, child_delta); + let (child_root, empty, child_txs) = self.child_storage_root(&child_info, child_delta); let prefixed_storage_key = child_info.prefixed_storage_key(); txs.consolidate(child_txs); if empty { @@ -206,13 +203,10 @@ pub trait Backend: sp_std::fmt::Debug { child_roots.push((prefixed_storage_key.into_inner(), Some(child_root.encode()))); } } - let (root, parent_txs) = self.storage_root(delta - .map(|(k, v)| (k, v.as_ref().map(|v| &v[..]))) - .chain( - child_roots - .iter() - .map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..]))) - ) + let (root, parent_txs) = self.storage_root( + delta + .map(|(k, v)| (k, v.as_ref().map(|v| &v[..]))) + .chain(child_roots.iter().map(|(k, v)| (&k[..], v.as_ref().map(|v| &v[..])))), ); txs.consolidate(parent_txs); (root, txs) @@ -286,10 +280,7 @@ impl Consolidate for () { } } -impl Consolidate for Vec<( - Option, - StorageCollection, - )> { +impl Consolidate for Vec<(Option, StorageCollection)> { fn consolidate(&mut self, mut other: Self) { self.append(&mut other); } @@ -303,12 +294,15 @@ impl> Consolidate for sp_trie::GenericMem /// Insert input pairs into memory db. #[cfg(test)] -pub(crate) fn insert_into_memory_db(mdb: &mut sp_trie::MemoryDB, input: I) -> Option - where - H: Hasher, - I: IntoIterator, +pub(crate) fn insert_into_memory_db( + mdb: &mut sp_trie::MemoryDB, + input: I, +) -> Option +where + H: Hasher, + I: IntoIterator, { - use sp_trie::{TrieMut, trie_types::TrieDBMut}; + use sp_trie::{trie_types::TrieDBMut, TrieMut}; let mut root = ::Out::default(); { @@ -316,7 +310,7 @@ pub(crate) fn insert_into_memory_db(mdb: &mut sp_trie::MemoryDB, input: for (key, value) in input { if let Err(e) = trie.insert(&key, &value) { log::warn!(target: "trie", "Failed to write to trie: {}", e); - return None; + return None } } } @@ -332,8 +326,8 @@ pub struct BackendRuntimeCode<'a, B, H> { } #[cfg(feature = "std")] -impl<'a, B: Backend, H: Hasher> sp_core::traits::FetchRuntimeCode for - BackendRuntimeCode<'a, B, H> +impl<'a, B: Backend, H: Hasher> sp_core::traits::FetchRuntimeCode + for BackendRuntimeCode<'a, B, H> { fn fetch_runtime_code<'b>(&'b self) -> Option> { self.backend.storage(well_known_keys::CODE).ok().flatten().map(Into::into) @@ -341,23 +335,27 @@ impl<'a, B: Backend, H: Hasher> sp_core::traits::FetchRuntimeCode for } #[cfg(feature = "std")] -impl<'a, B: Backend, H: Hasher> BackendRuntimeCode<'a, B, H> where H::Out: Encode { +impl<'a, B: Backend, H: Hasher> BackendRuntimeCode<'a, B, H> +where + H::Out: Encode, +{ /// Create a new instance. pub fn new(backend: &'a B) -> Self { - Self { - backend, - _marker: std::marker::PhantomData, - } + Self { backend, _marker: std::marker::PhantomData } } /// Return the [`RuntimeCode`] build from the wrapped `backend`. pub fn runtime_code(&self) -> Result { - let hash = self.backend.storage_hash(well_known_keys::CODE) + let hash = self + .backend + .storage_hash(well_known_keys::CODE) .ok() .flatten() .ok_or("`:code` hash not found")? .encode(); - let heap_pages = self.backend.storage(well_known_keys::HEAP_PAGES) + let heap_pages = self + .backend + .storage(well_known_keys::HEAP_PAGES) .ok() .flatten() .and_then(|d| Decode::decode(&mut &d[..]).ok()); diff --git a/primitives/state-machine/src/basic.rs b/primitives/state-machine/src/basic.rs index 75b0c1c922e43..0bbd2d0a8e8e6 100644 --- a/primitives/state-machine/src/basic.rs +++ b/primitives/state-machine/src/basic.rs @@ -17,23 +17,25 @@ //! Basic implementation for Externalities. -use std::{ - collections::BTreeMap, any::{TypeId, Any}, iter::FromIterator, ops::Bound, -}; use crate::{Backend, StorageKey, StorageValue}; +use codec::Encode; use hash_db::Hasher; -use sp_trie::{TrieConfiguration, empty_child_trie_root}; -use sp_trie::trie_types::Layout; +use log::warn; use sp_core::{ storage::{ - well_known_keys::is_child_storage_key, Storage, - ChildInfo, StorageChild, TrackedStorageKey, + well_known_keys::is_child_storage_key, ChildInfo, Storage, StorageChild, TrackedStorageKey, }, - traits::Externalities, Blake2Hasher, + traits::Externalities, + Blake2Hasher, +}; +use sp_externalities::{Extension, Extensions}; +use sp_trie::{empty_child_trie_root, trie_types::Layout, TrieConfiguration}; +use std::{ + any::{Any, TypeId}, + collections::BTreeMap, + iter::FromIterator, + ops::Bound, }; -use log::warn; -use codec::Encode; -use sp_externalities::{Extensions, Extension}; /// Simple Map-based Externalities impl. #[derive(Debug)] @@ -105,13 +107,13 @@ impl BasicExternalities { impl PartialEq for BasicExternalities { fn eq(&self, other: &BasicExternalities) -> bool { - self.inner.top.eq(&other.inner.top) - && self.inner.children_default.eq(&other.inner.children_default) + self.inner.top.eq(&other.inner.top) && + self.inner.children_default.eq(&other.inner.children_default) } } impl FromIterator<(StorageKey, StorageValue)> for BasicExternalities { - fn from_iter>(iter: I) -> Self { + fn from_iter>(iter: I) -> Self { let mut t = Self::default(); t.inner.top.extend(iter); t @@ -119,16 +121,15 @@ impl FromIterator<(StorageKey, StorageValue)> for BasicExternalities { } impl Default for BasicExternalities { - fn default() -> Self { Self::new(Default::default()) } + fn default() -> Self { + Self::new(Default::default()) + } } impl From> for BasicExternalities { fn from(hashmap: BTreeMap) -> Self { BasicExternalities { - inner: Storage { - top: hashmap, - children_default: Default::default(), - }, + inner: Storage { top: hashmap, children_default: Default::default() }, extensions: Default::default(), } } @@ -145,20 +146,15 @@ impl Externalities for BasicExternalities { self.storage(key).map(|v| Blake2Hasher::hash(&v).encode()) } - fn child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { - self.inner.children_default.get(child_info.storage_key()) - .and_then(|child| child.data.get(key)).cloned() + fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> Option { + self.inner + .children_default + .get(child_info.storage_key()) + .and_then(|child| child.data.get(key)) + .cloned() } - fn child_storage_hash( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option> { + fn child_storage_hash(&self, child_info: &ChildInfo, key: &[u8]) -> Option> { self.child_storage(child_info, key).map(|v| Blake2Hasher::hash(&v).encode()) } @@ -167,25 +163,27 @@ impl Externalities for BasicExternalities { self.inner.top.range::<[u8], _>(range).next().map(|(k, _)| k).cloned() } - fn next_child_storage_key( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { + fn next_child_storage_key(&self, child_info: &ChildInfo, key: &[u8]) -> Option { let range = (Bound::Excluded(key), Bound::Unbounded); - self.inner.children_default.get(child_info.storage_key()) + self.inner + .children_default + .get(child_info.storage_key()) .and_then(|child| child.data.range::<[u8], _>(range).next().map(|(k, _)| k).cloned()) } fn place_storage(&mut self, key: StorageKey, maybe_value: Option) { if is_child_storage_key(&key) { warn!(target: "trie", "Refuse to set child storage key via main storage"); - return; + return } match maybe_value { - Some(value) => { self.inner.top.insert(key, value); } - None => { self.inner.top.remove(&key); } + Some(value) => { + self.inner.top.insert(key, value); + }, + None => { + self.inner.top.remove(&key); + }, } } @@ -195,7 +193,10 @@ impl Externalities for BasicExternalities { key: StorageKey, value: Option, ) { - let child_map = self.inner.children_default.entry(child_info.storage_key().to_vec()) + let child_map = self + .inner + .children_default + .entry(child_info.storage_key().to_vec()) .or_insert_with(|| StorageChild { data: Default::default(), child_info: child_info.to_owned(), @@ -207,12 +208,13 @@ impl Externalities for BasicExternalities { } } - fn kill_child_storage( - &mut self, - child_info: &ChildInfo, - _limit: Option, - ) -> (bool, u32) { - let num_removed = self.inner.children_default.remove(child_info.storage_key()).map(|c| c.data.len()).unwrap_or(0); + fn kill_child_storage(&mut self, child_info: &ChildInfo, _limit: Option) -> (bool, u32) { + let num_removed = self + .inner + .children_default + .remove(child_info.storage_key()) + .map(|c| c.data.len()) + .unwrap_or(0); (true, num_removed as u32) } @@ -222,10 +224,13 @@ impl Externalities for BasicExternalities { target: "trie", "Refuse to clear prefix that is part of child storage key via main storage" ); - return (false, 0); + return (false, 0) } - let to_remove = self.inner.top.range::<[u8], _>((Bound::Included(prefix), Bound::Unbounded)) + let to_remove = self + .inner + .top + .range::<[u8], _>((Bound::Included(prefix), Bound::Unbounded)) .map(|(k, _)| k) .take_while(|k| k.starts_with(prefix)) .cloned() @@ -245,7 +250,9 @@ impl Externalities for BasicExternalities { _limit: Option, ) -> (bool, u32) { if let Some(child) = self.inner.children_default.get_mut(child_info.storage_key()) { - let to_remove = child.data.range::<[u8], _>((Bound::Included(prefix), Bound::Unbounded)) + let to_remove = child + .data + .range::<[u8], _>((Bound::Included(prefix), Bound::Unbounded)) .map(|(k, _)| k) .take_while(|k| k.starts_with(prefix)) .cloned() @@ -261,20 +268,19 @@ impl Externalities for BasicExternalities { } } - fn storage_append( - &mut self, - key: Vec, - value: Vec, - ) { + fn storage_append(&mut self, key: Vec, value: Vec) { let current = self.inner.top.entry(key).or_default(); crate::ext::StorageAppend::new(current).append(value); } fn storage_root(&mut self) -> Vec { let mut top = self.inner.top.clone(); - let prefixed_keys: Vec<_> = self.inner.children_default.iter().map(|(_k, v)| { - (v.child_info.prefixed_storage_key(), v.child_info.clone()) - }).collect(); + let prefixed_keys: Vec<_> = self + .inner + .children_default + .iter() + .map(|(_k, v)| (v.child_info.prefixed_storage_key(), v.child_info.clone())) + .collect(); // Single child trie implementation currently allows using the same child // empty root for all child trie. Using null storage key until multiple // type of child trie support. @@ -291,17 +297,16 @@ impl Externalities for BasicExternalities { Layout::::trie_root(self.inner.top.clone()).as_ref().into() } - fn child_storage_root( - &mut self, - child_info: &ChildInfo, - ) -> Vec { + fn child_storage_root(&mut self, child_info: &ChildInfo) -> Vec { if let Some(child) = self.inner.children_default.get(child_info.storage_key()) { let delta = child.data.iter().map(|(k, v)| (k.as_ref(), Some(v.as_ref()))); crate::in_memory_backend::new_in_mem::() - .child_storage_root(&child.child_info, delta).0 + .child_storage_root(&child.child_info, delta) + .0 } else { empty_child_trie_root::>() - }.encode() + } + .encode() } fn storage_changes_root(&mut self, _parent: &[u8]) -> Result>, ()> { @@ -358,7 +363,10 @@ impl sp_externalities::ExtensionStore for BasicExternalities { self.extensions.register_with_type_id(type_id, extension) } - fn deregister_extension_by_type_id(&mut self, type_id: TypeId) -> Result<(), sp_externalities::Error> { + fn deregister_extension_by_type_id( + &mut self, + type_id: TypeId, + ) -> Result<(), sp_externalities::Error> { if self.extensions.deregister(type_id) { Ok(()) } else { @@ -370,10 +378,11 @@ impl sp_externalities::ExtensionStore for BasicExternalities { #[cfg(test)] mod tests { use super::*; - use sp_core::map; - use sp_core::storage::{Storage, StorageChild}; - use sp_core::storage::well_known_keys::CODE; use hex_literal::hex; + use sp_core::{ + map, + storage::{well_known_keys::CODE, Storage, StorageChild}, + }; #[test] fn commit_should_work() { @@ -381,7 +390,8 @@ mod tests { ext.set_storage(b"doe".to_vec(), b"reindeer".to_vec()); ext.set_storage(b"dog".to_vec(), b"puppy".to_vec()); ext.set_storage(b"dogglesworth".to_vec(), b"cat".to_vec()); - const ROOT: [u8; 32] = hex!("39245109cef3758c2eed2ccba8d9b370a917850af3824bc8348d505df2c298fa"); + const ROOT: [u8; 32] = + hex!("39245109cef3758c2eed2ccba8d9b370a917850af3824bc8348d505df2c298fa"); assert_eq!(&ext.storage_root()[..], &ROOT); } @@ -407,7 +417,7 @@ mod tests { data: map![ b"doe".to_vec() => b"reindeer".to_vec() ], child_info: child_info.to_owned(), } - ] + ], }); assert_eq!(ext.child_storage(child_info, b"doe"), Some(b"reindeer".to_vec())); @@ -437,10 +447,9 @@ mod tests { ], child_info: child_info.to_owned(), } - ] + ], }); - let res = ext.kill_child_storage(child_info, None); assert_eq!(res, (true, 3)); } diff --git a/primitives/state-machine/src/changes_trie/build.rs b/primitives/state-machine/src/changes_trie/build.rs index 38d1ab714e7f8..2c75ac236bf33 100644 --- a/primitives/state-machine/src/changes_trie/build.rs +++ b/primitives/state-machine/src/changes_trie/build.rs @@ -17,23 +17,22 @@ //! Structures and functions required to build changes trie for given block. -use std::collections::BTreeMap; -use std::collections::btree_map::Entry; -use codec::{Decode, Encode}; -use hash_db::Hasher; -use num_traits::One; use crate::{ - StorageKey, backend::Backend, - overlayed_changes::{OverlayedChanges, OverlayedValue}, - trie_backend_essence::TrieBackendEssence, changes_trie::{ - AnchorBlockId, ConfigurationRange, Storage, BlockNumber, build_iterator::digest_build_iterator, - input::{InputKey, InputPair, DigestIndex, ExtrinsicIndex, ChildIndex}, + input::{ChildIndex, DigestIndex, ExtrinsicIndex, InputKey, InputPair}, + AnchorBlockId, BlockNumber, ConfigurationRange, Storage, }, + overlayed_changes::{OverlayedChanges, OverlayedValue}, + trie_backend_essence::TrieBackendEssence, + StorageKey, }; +use codec::{Decode, Encode}; +use hash_db::Hasher; +use num_traits::One; use sp_core::storage::{ChildInfo, PrefixedStorageKey}; +use std::collections::{btree_map::Entry, BTreeMap}; /// Prepare input pairs for building a changes trie of given block. /// @@ -45,66 +44,59 @@ pub(crate) fn prepare_input<'a, B, H, Number>( config: ConfigurationRange<'a, Number>, overlay: &'a OverlayedChanges, parent: &'a AnchorBlockId, -) -> Result<( - impl Iterator> + 'a, - Vec<(ChildIndex, impl Iterator> + 'a)>, +) -> Result< + ( + impl Iterator> + 'a, + Vec<(ChildIndex, impl Iterator> + 'a)>, Vec, - ), String> - where - B: Backend, - H: Hasher + 'a, - H::Out: Encode, - Number: BlockNumber, + ), + String, +> +where + B: Backend, + H: Hasher + 'a, + H::Out: Encode, + Number: BlockNumber, { let number = parent.number.clone() + One::one(); - let (extrinsics_input, children_extrinsics_input) = prepare_extrinsics_input( - backend, - &number, - overlay, - )?; - let (digest_input, mut children_digest_input, digest_input_blocks) = prepare_digest_input::( - parent, - config, - number, - storage, - )?; + let (extrinsics_input, children_extrinsics_input) = + prepare_extrinsics_input(backend, &number, overlay)?; + let (digest_input, mut children_digest_input, digest_input_blocks) = + prepare_digest_input::(parent, config, number, storage)?; let mut children_digest = Vec::with_capacity(children_extrinsics_input.len()); for (child_index, ext_iter) in children_extrinsics_input.into_iter() { let dig_iter = children_digest_input.remove(&child_index); children_digest.push(( child_index, - Some(ext_iter).into_iter().flatten() - .chain(dig_iter.into_iter().flatten()), + Some(ext_iter).into_iter().flatten().chain(dig_iter.into_iter().flatten()), )); } for (child_index, dig_iter) in children_digest_input.into_iter() { children_digest.push(( child_index, - None.into_iter().flatten() - .chain(Some(dig_iter).into_iter().flatten()), + None.into_iter().flatten().chain(Some(dig_iter).into_iter().flatten()), )); } - Ok(( - extrinsics_input.chain(digest_input), - children_digest, - digest_input_blocks, - )) + Ok((extrinsics_input.chain(digest_input), children_digest, digest_input_blocks)) } /// Prepare ExtrinsicIndex input pairs. fn prepare_extrinsics_input<'a, B, H, Number>( backend: &'a B, block: &Number, overlay: &'a OverlayedChanges, -) -> Result<( - impl Iterator> + 'a, - BTreeMap, impl Iterator> + 'a>, - ), String> - where - B: Backend, - H: Hasher + 'a, - Number: BlockNumber, +) -> Result< + ( + impl Iterator> + 'a, + BTreeMap, impl Iterator> + 'a>, + ), + String, +> +where + B: Backend, + H: Hasher + 'a, + Number: BlockNumber, { let mut children_result = BTreeMap::new(); @@ -115,7 +107,9 @@ fn prepare_extrinsics_input<'a, B, H, Number>( }; let iter = prepare_extrinsics_input_inner( - backend, block, overlay, + backend, + block, + overlay, Some(child_info.clone()), child_changes, )?; @@ -132,12 +126,12 @@ fn prepare_extrinsics_input_inner<'a, B, H, Number>( block: &Number, overlay: &'a OverlayedChanges, child_info: Option, - changes: impl Iterator -) -> Result> + 'a, String> - where - B: Backend, - H: Hasher, - Number: BlockNumber, + changes: impl Iterator, +) -> Result> + 'a, String> +where + B: Backend, + H: Hasher, + Number: BlockNumber, { changes .filter_map(|(k, v)| { @@ -148,68 +142,79 @@ fn prepare_extrinsics_input_inner<'a, B, H, Number>( None } }) - .try_fold(BTreeMap::new(), |mut map: BTreeMap<&[u8], (ExtrinsicIndex, Vec)>, (k, extrinsics)| { - match map.entry(k) { - Entry::Vacant(entry) => { - // ignore temporary values (values that have null value at the end of operation - // AND are not in storage at the beginning of operation - if let Some(child_info) = child_info.as_ref() { - if !overlay.child_storage(child_info, k).map(|v| v.is_some()).unwrap_or_default() { - if !backend.exists_child_storage(&child_info, k) - .map_err(|e| format!("{}", e))? { - return Ok(map); + .try_fold( + BTreeMap::new(), + |mut map: BTreeMap<&[u8], (ExtrinsicIndex, Vec)>, (k, extrinsics)| { + match map.entry(k) { + Entry::Vacant(entry) => { + // ignore temporary values (values that have null value at the end of operation + // AND are not in storage at the beginning of operation + if let Some(child_info) = child_info.as_ref() { + if !overlay + .child_storage(child_info, k) + .map(|v| v.is_some()) + .unwrap_or_default() + { + if !backend + .exists_child_storage(&child_info, k) + .map_err(|e| format!("{}", e))? + { + return Ok(map) + } } - } - } else { - if !overlay.storage(k).map(|v| v.is_some()).unwrap_or_default() { - if !backend.exists_storage(k).map_err(|e| format!("{}", e))? { - return Ok(map); + } else { + if !overlay.storage(k).map(|v| v.is_some()).unwrap_or_default() { + if !backend.exists_storage(k).map_err(|e| format!("{}", e))? { + return Ok(map) + } } - } - }; - - let extrinsics = extrinsics.into_iter().collect(); - entry.insert((ExtrinsicIndex { - block: block.clone(), - key: k.to_vec(), - }, extrinsics)); - }, - Entry::Occupied(mut entry) => { - // we do not need to check for temporary values here, because entry is Occupied - // AND we are checking it before insertion - let entry_extrinsics = &mut entry.get_mut().1; - entry_extrinsics.extend( - extrinsics.into_iter() - ); - entry_extrinsics.sort(); - }, - } + }; - Ok(map) - }) + let extrinsics = extrinsics.into_iter().collect(); + entry.insert(( + ExtrinsicIndex { block: block.clone(), key: k.to_vec() }, + extrinsics, + )); + }, + Entry::Occupied(mut entry) => { + // we do not need to check for temporary values here, because entry is Occupied + // AND we are checking it before insertion + let entry_extrinsics = &mut entry.get_mut().1; + entry_extrinsics.extend(extrinsics.into_iter()); + entry_extrinsics.sort(); + }, + } + + Ok(map) + }, + ) .map(|pairs| pairs.into_iter().map(|(_, (k, v))| InputPair::ExtrinsicIndex(k, v))) } - /// Prepare DigestIndex input pairs. fn prepare_digest_input<'a, H, Number>( parent: &'a AnchorBlockId, config: ConfigurationRange, block: Number, storage: &'a dyn Storage, -) -> Result<( - impl Iterator> + 'a, - BTreeMap, impl Iterator> + 'a>, +) -> Result< + ( + impl Iterator> + 'a, + BTreeMap, impl Iterator> + 'a>, Vec, - ), String> - where - H: Hasher, - H::Out: 'a + Encode, - Number: BlockNumber, + ), + String, +> +where + H: Hasher, + H::Out: 'a + Encode, + Number: BlockNumber, { let build_skewed_digest = config.end.as_ref() == Some(&block); let block_for_digest = if build_skewed_digest { - config.config.next_max_level_digest_range(config.zero.clone(), block.clone()) + config + .config + .next_max_level_digest_range(config.zero.clone(), block.clone()) .map(|(_, end)| end) .unwrap_or_else(|| block.clone()) } else { @@ -217,128 +222,158 @@ fn prepare_digest_input<'a, H, Number>( }; let digest_input_blocks = digest_build_iterator(config, block_for_digest).collect::>(); - digest_input_blocks.clone().into_iter() + digest_input_blocks + .clone() + .into_iter() .try_fold( - (BTreeMap::new(), BTreeMap::new()), move |(mut map, mut child_map), digest_build_block| { - let extrinsic_prefix = ExtrinsicIndex::key_neutral_prefix(digest_build_block.clone()); - let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block.clone()); - let child_prefix = ChildIndex::key_neutral_prefix(digest_build_block.clone()); - let trie_root = storage.root(parent, digest_build_block.clone())?; - let trie_root = trie_root.ok_or_else(|| format!("No changes trie root for block {}", digest_build_block.clone()))?; - - let insert_to_map = |map: &mut BTreeMap<_,_>, key: StorageKey| { - match map.entry(key.clone()) { - Entry::Vacant(entry) => { - entry.insert((DigestIndex { - block: block.clone(), - key, - }, vec![digest_build_block.clone()])); - }, - Entry::Occupied(mut entry) => { - // DigestIndexValue must be sorted. Here we are relying on the fact that digest_build_iterator() - // returns blocks in ascending order => we only need to check for duplicates - // - // is_dup_block could be true when key has been changed in both digest block - // AND other blocks that it covers - let is_dup_block = entry.get().1.last() == Some(&digest_build_block); - if !is_dup_block { - entry.get_mut().1.push(digest_build_block.clone()); - } - }, - } - }; - - // try to get all updated keys from cache - let populated_from_cache = storage.with_cached_changed_keys( - &trie_root, - &mut |changed_keys| { - for (storage_key, changed_keys) in changed_keys { - let map = match storage_key { - Some(storage_key) => child_map - .entry(ChildIndex:: { - block: block.clone(), - storage_key: storage_key.clone(), - }) - .or_default(), - None => &mut map, - }; - for changed_key in changed_keys.iter().cloned() { - insert_to_map(map, changed_key); - } + (BTreeMap::new(), BTreeMap::new()), + move |(mut map, mut child_map), digest_build_block| { + let extrinsic_prefix = + ExtrinsicIndex::key_neutral_prefix(digest_build_block.clone()); + let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block.clone()); + let child_prefix = ChildIndex::key_neutral_prefix(digest_build_block.clone()); + let trie_root = storage.root(parent, digest_build_block.clone())?; + let trie_root = trie_root.ok_or_else(|| { + format!("No changes trie root for block {}", digest_build_block.clone()) + })?; + + let insert_to_map = |map: &mut BTreeMap<_, _>, key: StorageKey| { + match map.entry(key.clone()) { + Entry::Vacant(entry) => { + entry.insert(( + DigestIndex { block: block.clone(), key }, + vec![digest_build_block.clone()], + )); + }, + Entry::Occupied(mut entry) => { + // DigestIndexValue must be sorted. Here we are relying on the fact that digest_build_iterator() + // returns blocks in ascending order => we only need to check for duplicates + // + // is_dup_block could be true when key has been changed in both digest block + // AND other blocks that it covers + let is_dup_block = entry.get().1.last() == Some(&digest_build_block); + if !is_dup_block { + entry.get_mut().1.push(digest_build_block.clone()); + } + }, } + }; + + // try to get all updated keys from cache + let populated_from_cache = + storage.with_cached_changed_keys(&trie_root, &mut |changed_keys| { + for (storage_key, changed_keys) in changed_keys { + let map = match storage_key { + Some(storage_key) => child_map + .entry(ChildIndex:: { + block: block.clone(), + storage_key: storage_key.clone(), + }) + .or_default(), + None => &mut map, + }; + for changed_key in changed_keys.iter().cloned() { + insert_to_map(map, changed_key); + } + } + }); + if populated_from_cache { + return Ok((map, child_map)) } - ); - if populated_from_cache { - return Ok((map, child_map)); - } - let mut children_roots = BTreeMap::::new(); - { - let trie_storage = TrieBackendEssence::<_, H>::new( - crate::changes_trie::TrieBackendStorageAdapter(storage), - trie_root, - ); - - trie_storage.for_key_values_with_prefix(&child_prefix, |mut key, mut value| - if let Ok(InputKey::ChildIndex::(trie_key)) = Decode::decode(&mut key) { - if let Ok(value) = >::decode(&mut value) { - let mut trie_root = ::Out::default(); - trie_root.as_mut().copy_from_slice(&value[..]); - children_roots.insert(trie_key.storage_key, trie_root); + let mut children_roots = BTreeMap::::new(); + { + let trie_storage = TrieBackendEssence::<_, H>::new( + crate::changes_trie::TrieBackendStorageAdapter(storage), + trie_root, + ); + + trie_storage.for_key_values_with_prefix(&child_prefix, |mut key, mut value| { + if let Ok(InputKey::ChildIndex::(trie_key)) = + Decode::decode(&mut key) + { + if let Ok(value) = >::decode(&mut value) { + let mut trie_root = ::Out::default(); + trie_root.as_mut().copy_from_slice(&value[..]); + children_roots.insert(trie_key.storage_key, trie_root); + } } }); - trie_storage.for_keys_with_prefix(&extrinsic_prefix, |mut key| - if let Ok(InputKey::ExtrinsicIndex::(trie_key)) = Decode::decode(&mut key) { - insert_to_map(&mut map, trie_key.key); + trie_storage.for_keys_with_prefix(&extrinsic_prefix, |mut key| { + if let Ok(InputKey::ExtrinsicIndex::(trie_key)) = + Decode::decode(&mut key) + { + insert_to_map(&mut map, trie_key.key); + } }); - trie_storage.for_keys_with_prefix(&digest_prefix, |mut key| - if let Ok(InputKey::DigestIndex::(trie_key)) = Decode::decode(&mut key) { - insert_to_map(&mut map, trie_key.key); + trie_storage.for_keys_with_prefix(&digest_prefix, |mut key| { + if let Ok(InputKey::DigestIndex::(trie_key)) = + Decode::decode(&mut key) + { + insert_to_map(&mut map, trie_key.key); + } }); - } + } - for (storage_key, trie_root) in children_roots.into_iter() { - let child_index = ChildIndex:: { - block: block.clone(), - storage_key, - }; + for (storage_key, trie_root) in children_roots.into_iter() { + let child_index = ChildIndex:: { block: block.clone(), storage_key }; - let mut map = child_map.entry(child_index).or_default(); - let trie_storage = TrieBackendEssence::<_, H>::new( - crate::changes_trie::TrieBackendStorageAdapter(storage), - trie_root, - ); - trie_storage.for_keys_with_prefix(&extrinsic_prefix, |mut key| - if let Ok(InputKey::ExtrinsicIndex::(trie_key)) = Decode::decode(&mut key) { - insert_to_map(&mut map, trie_key.key); + let mut map = child_map.entry(child_index).or_default(); + let trie_storage = TrieBackendEssence::<_, H>::new( + crate::changes_trie::TrieBackendStorageAdapter(storage), + trie_root, + ); + trie_storage.for_keys_with_prefix(&extrinsic_prefix, |mut key| { + if let Ok(InputKey::ExtrinsicIndex::(trie_key)) = + Decode::decode(&mut key) + { + insert_to_map(&mut map, trie_key.key); + } }); - trie_storage.for_keys_with_prefix(&digest_prefix, |mut key| - if let Ok(InputKey::DigestIndex::(trie_key)) = Decode::decode(&mut key) { - insert_to_map(&mut map, trie_key.key); + trie_storage.for_keys_with_prefix(&digest_prefix, |mut key| { + if let Ok(InputKey::DigestIndex::(trie_key)) = + Decode::decode(&mut key) + { + insert_to_map(&mut map, trie_key.key); + } }); - } - Ok((map, child_map)) + } + Ok((map, child_map)) + }, + ) + .map(|(pairs, child_pairs)| { + ( + pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)), + child_pairs + .into_iter() + .map(|(sk, pairs)| { + (sk, pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v))) + }) + .collect(), + digest_input_blocks, + ) }) - .map(|(pairs, child_pairs)| ( - pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)), - child_pairs.into_iter().map(|(sk, pairs)| - (sk, pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)))).collect(), - digest_input_blocks, - )) } #[cfg(test)] mod test { - use sp_core::Blake2Hasher; - use crate::InMemoryBackend; - use crate::changes_trie::{RootsStorage, Configuration, storage::InMemoryStorage}; - use crate::changes_trie::build_cache::{IncompleteCacheAction, IncompleteCachedBuildData}; use super::*; + use crate::{ + changes_trie::{ + build_cache::{IncompleteCacheAction, IncompleteCachedBuildData}, + storage::InMemoryStorage, + Configuration, RootsStorage, + }, + InMemoryBackend, + }; + use sp_core::Blake2Hasher; - fn prepare_for_build(zero: u64) -> ( + fn prepare_for_build( + zero: u64, + ) -> ( InMemoryBackend, InMemoryStorage, OverlayedChanges, @@ -353,57 +388,150 @@ mod test { (vec![103], vec![255]), (vec![104], vec![255]), (vec![105], vec![255]), - ].into_iter().collect::>().into(); + ] + .into_iter() + .collect::>() + .into(); let prefixed_child_trie_key1 = child_info_1.prefixed_storage_key(); - let storage = InMemoryStorage::with_inputs(vec![ - (zero + 1, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![101] }, vec![0, 2]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![105] }, vec![0, 2, 4]), - ]), - (zero + 2, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 2, key: vec![102] }, vec![0]), - ]), - (zero + 3, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 3, key: vec![100] }, vec![0]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 3, key: vec![105] }, vec![1]), - ]), - (zero + 4, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1, zero + 3]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1, zero + 3]), - ]), - (zero + 5, Vec::new()), - (zero + 6, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 6, key: vec![105] }, vec![2]), - ]), - (zero + 7, Vec::new()), - (zero + 8, vec![ - InputPair::DigestIndex(DigestIndex { block: zero + 8, key: vec![105] }, vec![zero + 6]), - ]), - (zero + 9, Vec::new()), (zero + 10, Vec::new()), (zero + 11, Vec::new()), (zero + 12, Vec::new()), - (zero + 13, Vec::new()), (zero + 14, Vec::new()), (zero + 15, Vec::new()), - ], vec![(prefixed_child_trie_key1.clone(), vec![ - (zero + 1, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![101] }, vec![0, 2]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![105] }, vec![0, 2, 4]), - ]), - (zero + 2, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 2, key: vec![102] }, vec![0]), - ]), - (zero + 4, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 2, key: vec![102] }, vec![0, 3]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - ]), - ]), - ]); + let storage = InMemoryStorage::with_inputs( + vec![ + ( + zero + 1, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![100] }, + vec![1, 3], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![101] }, + vec![0, 2], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![105] }, + vec![0, 2, 4], + ), + ], + ), + ( + zero + 2, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 2, key: vec![102] }, + vec![0], + )], + ), + ( + zero + 3, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 3, key: vec![100] }, + vec![0], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 3, key: vec![105] }, + vec![1], + ), + ], + ), + ( + zero + 4, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![100] }, + vec![0, 2, 3], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![101] }, + vec![1], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![103] }, + vec![0, 1], + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![100] }, + vec![zero + 1, zero + 3], + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![101] }, + vec![zero + 1], + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2], + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![105] }, + vec![zero + 1, zero + 3], + ), + ], + ), + (zero + 5, Vec::new()), + ( + zero + 6, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 6, key: vec![105] }, + vec![2], + )], + ), + (zero + 7, Vec::new()), + ( + zero + 8, + vec![InputPair::DigestIndex( + DigestIndex { block: zero + 8, key: vec![105] }, + vec![zero + 6], + )], + ), + (zero + 9, Vec::new()), + (zero + 10, Vec::new()), + (zero + 11, Vec::new()), + (zero + 12, Vec::new()), + (zero + 13, Vec::new()), + (zero + 14, Vec::new()), + (zero + 15, Vec::new()), + ], + vec![( + prefixed_child_trie_key1.clone(), + vec![ + ( + zero + 1, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![100] }, + vec![1, 3], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![101] }, + vec![0, 2], + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 1, key: vec![105] }, + vec![0, 2, 4], + ), + ], + ), + ( + zero + 2, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 2, key: vec![102] }, + vec![0], + )], + ), + ( + zero + 4, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 2, key: vec![102] }, + vec![0, 3], + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2], + ), + ], + ), + ], + )], + ); let mut changes = OverlayedChanges::default(); changes.set_collect_extrinsics(true); @@ -446,12 +574,11 @@ mod test { (backend, storage, changes, config) } - fn configuration_range<'a>(config: &'a Configuration, zero: u64) -> ConfigurationRange<'a, u64> { - ConfigurationRange { - config, - zero, - end: None, - } + fn configuration_range<'a>( + config: &'a Configuration, + zero: u64, + ) -> ConfigurationRange<'a, u64> { + ConfigurationRange { config, zero, end: None } } #[test] @@ -467,24 +594,48 @@ mod test { configuration_range(&config, zero), &changes, &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![103] }, vec![0, 1]), - ]); - assert_eq!(changes_trie_nodes.1.into_iter() - .map(|(k,v)| (k, v.collect::>())).collect::>(), vec![ - (ChildIndex { block: zero + 5u64, storage_key: child_trie_key1 }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5u64, key: vec![100] }, vec![0, 2, 3]), - ]), - (ChildIndex { block: zero + 5, storage_key: child_trie_key2 }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![100] }, vec![0, 2]), - ]), - ]); - + ) + .unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 5, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 5, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 5, key: vec![103] }, + vec![0, 1] + ), + ] + ); + assert_eq!( + changes_trie_nodes + .1 + .into_iter() + .map(|(k, v)| (k, v.collect::>())) + .collect::>(), + vec![ + ( + ChildIndex { block: zero + 5u64, storage_key: child_trie_key1 }, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 5u64, key: vec![100] }, + vec![0, 2, 3] + ),] + ), + ( + ChildIndex { block: zero + 5, storage_key: child_trie_key2 }, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 5, key: vec![100] }, + vec![0, 2] + ),] + ), + ] + ); } test_with_zero(0); @@ -505,33 +656,82 @@ mod test { configuration_range(&config, zero), &changes, &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1, zero + 3]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1, zero + 3]), - ]); - assert_eq!(changes_trie_nodes.1.into_iter() - .map(|(k,v)| (k, v.collect::>())).collect::>(), vec![ - (ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]), - ]), - (ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]), - ]), - ]); + ) + .unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![103] }, + vec![0, 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![100] }, + vec![zero + 1, zero + 3] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![101] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![105] }, + vec![zero + 1, zero + 3] + ), + ] + ); + assert_eq!( + changes_trie_nodes + .1 + .into_iter() + .map(|(k, v)| (k, v.collect::>())) + .collect::>(), + vec![ + ( + ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() }, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![100] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![101] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![105] }, + vec![zero + 1] + ), + ] + ), + ( + ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() }, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![100] }, + vec![0, 2] + ),] + ), + ] + ); } test_with_zero(0); @@ -552,31 +752,74 @@ mod test { configuration_range(&config, zero), &changes, &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![100] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![101] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![102] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![103] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![105] }, vec![zero + 4, zero + 8]), - ]); - assert_eq!(changes_trie_nodes.1.into_iter() - .map(|(k,v)| (k, v.collect::>())).collect::>(), vec![ - (ChildIndex { block: zero + 16u64, storage_key: child_trie_key1.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16u64, key: vec![100] }, vec![0, 2, 3]), - - InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![102] }, vec![zero + 4]), - ]), - (ChildIndex { block: zero + 16, storage_key: child_trie_key2.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![100] }, vec![0, 2]), - ]), - ]); + ) + .unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 16, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 16, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 16, key: vec![103] }, + vec![0, 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![100] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![101] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![102] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![103] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![105] }, + vec![zero + 4, zero + 8] + ), + ] + ); + assert_eq!( + changes_trie_nodes + .1 + .into_iter() + .map(|(k, v)| (k, v.collect::>())) + .collect::>(), + vec![ + ( + ChildIndex { block: zero + 16u64, storage_key: child_trie_key1.clone() }, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 16u64, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 16, key: vec![102] }, + vec![zero + 4] + ), + ] + ), + ( + ChildIndex { block: zero + 16, storage_key: child_trie_key2.clone() }, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 16, key: vec![100] }, + vec![0, 2] + ),] + ), + ] + ); } test_with_zero(0); @@ -591,38 +834,67 @@ mod test { let parent = AnchorBlockId { hash: Default::default(), number: zero + 10 }; let mut configuration_range = configuration_range(&config, zero); - let changes_trie_nodes = prepare_input( - &backend, - &storage, - configuration_range.clone(), - &changes, - &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![103] }, vec![0, 1]), - ]); + let changes_trie_nodes = + prepare_input(&backend, &storage, configuration_range.clone(), &changes, &parent) + .unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![103] }, + vec![0, 1] + ), + ] + ); configuration_range.end = Some(zero + 11); - let changes_trie_nodes = prepare_input( - &backend, - &storage, - configuration_range, - &changes, - &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: zero + 11, key: vec![100] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 11, key: vec![101] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 11, key: vec![102] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 11, key: vec![103] }, vec![zero + 4]), - InputPair::DigestIndex(DigestIndex { block: zero + 11, key: vec![105] }, vec![zero + 4, zero + 8]), - ]); + let changes_trie_nodes = + prepare_input(&backend, &storage, configuration_range, &changes, &parent).unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 11, key: vec![103] }, + vec![0, 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 11, key: vec![100] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 11, key: vec![101] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 11, key: vec![102] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 11, key: vec![103] }, + vec![zero + 4] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 11, key: vec![105] }, + vec![zero + 4, zero + 8] + ), + ] + ); } test_with_zero(0); @@ -647,34 +919,82 @@ mod test { configuration_range(&config, zero), &changes, &parent, - ).unwrap(); - assert_eq!(changes_trie_nodes.0.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1, zero + 3]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1, zero + 3]), - ]); - assert_eq!(changes_trie_nodes.1.into_iter() - .map(|(k,v)| (k, v.collect::>())).collect::>(), vec![ - (ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]), - - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]), - InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]), - ]), - (ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() }, - vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]), - ]), - ]); - + ) + .unwrap(); + assert_eq!( + changes_trie_nodes.0.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![101] }, + vec![1] + ), + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![103] }, + vec![0, 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![100] }, + vec![zero + 1, zero + 3] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![101] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![105] }, + vec![zero + 1, zero + 3] + ), + ] + ); + assert_eq!( + changes_trie_nodes + .1 + .into_iter() + .map(|(k, v)| (k, v.collect::>())) + .collect::>(), + vec![ + ( + ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() }, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![100] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![101] }, + vec![zero + 1] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![102] }, + vec![zero + 2] + ), + InputPair::DigestIndex( + DigestIndex { block: zero + 4, key: vec![105] }, + vec![zero + 1] + ), + ] + ), + ( + ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() }, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: zero + 4, key: vec![100] }, + vec![0, 2] + ),] + ), + ] + ); } test_with_zero(0); @@ -710,44 +1030,50 @@ mod test { .complete(4, &trie_root4); storage.cache_mut().perform(cached_data4); - let (root_changes_trie_nodes, child_changes_tries_nodes, _) = prepare_input( - &backend, - &storage, - configuration_range(&config, 0), - &changes, - &parent, - ).unwrap(); - assert_eq!(root_changes_trie_nodes.collect::>>(), vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![100] }, vec![0, 2, 3]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![101] }, vec![1]), - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![103] }, vec![0, 1]), - - InputPair::DigestIndex(DigestIndex { block: 16, key: vec![100] }, vec![4]), - InputPair::DigestIndex(DigestIndex { block: 16, key: vec![102] }, vec![4]), - InputPair::DigestIndex(DigestIndex { block: 16, key: vec![105] }, vec![8]), - ]); + let (root_changes_trie_nodes, child_changes_tries_nodes, _) = + prepare_input(&backend, &storage, configuration_range(&config, 0), &changes, &parent) + .unwrap(); + assert_eq!( + root_changes_trie_nodes.collect::>>(), + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 16, key: vec![100] }, + vec![0, 2, 3] + ), + InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![101] }, vec![1]), + InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![103] }, vec![0, 1]), + InputPair::DigestIndex(DigestIndex { block: 16, key: vec![100] }, vec![4]), + InputPair::DigestIndex(DigestIndex { block: 16, key: vec![102] }, vec![4]), + InputPair::DigestIndex(DigestIndex { block: 16, key: vec![105] }, vec![8]), + ] + ); let child_changes_tries_nodes = child_changes_tries_nodes .into_iter() .map(|(k, i)| (k, i.collect::>())) .collect::>(); assert_eq!( - child_changes_tries_nodes.get(&ChildIndex { - block: 16u64, - storage_key: child_trie_key1.clone(), - }).unwrap(), + child_changes_tries_nodes + .get(&ChildIndex { block: 16u64, storage_key: child_trie_key1.clone() }) + .unwrap(), &vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16u64, key: vec![100] }, vec![0, 2, 3]), - + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 16u64, key: vec![100] }, + vec![0, 2, 3] + ), InputPair::DigestIndex(DigestIndex { block: 16u64, key: vec![103] }, vec![4]), InputPair::DigestIndex(DigestIndex { block: 16u64, key: vec![104] }, vec![4]), ], ); assert_eq!( - child_changes_tries_nodes.get(&ChildIndex { block: 16u64, storage_key: child_trie_key2.clone() }).unwrap(), + child_changes_tries_nodes + .get(&ChildIndex { block: 16u64, storage_key: child_trie_key2.clone() }) + .unwrap(), &vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16u64, key: vec![100] }, vec![0, 2]), - + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 16u64, key: vec![100] }, + vec![0, 2] + ), InputPair::DigestIndex(DigestIndex { block: 16u64, key: vec![105] }, vec![4]), InputPair::DigestIndex(DigestIndex { block: 16u64, key: vec![106] }, vec![4]), ], diff --git a/primitives/state-machine/src/changes_trie/build_cache.rs b/primitives/state-machine/src/changes_trie/build_cache.rs index 9b2190ae1951f..67098d4d72040 100644 --- a/primitives/state-machine/src/changes_trie/build_cache.rs +++ b/primitives/state-machine/src/changes_trie/build_cache.rs @@ -78,20 +78,20 @@ pub(crate) struct IncompleteCachedBuildData { } impl BuildCache - where - N: Eq + ::std::hash::Hash, - H: Eq + ::std::hash::Hash + Clone, +where + N: Eq + ::std::hash::Hash, + H: Eq + ::std::hash::Hash + Clone, { /// Create new changes trie build cache. pub fn new() -> Self { - BuildCache { - roots_by_number: HashMap::new(), - changed_keys: HashMap::new(), - } + BuildCache { roots_by_number: HashMap::new(), changed_keys: HashMap::new() } } /// Get cached changed keys for changes trie with given root. - pub fn get(&self, root: &H) -> Option<&HashMap, HashSet>> { + pub fn get( + &self, + root: &H, + ) -> Option<&HashMap, HashSet>> { self.changed_keys.get(&root) } @@ -158,7 +158,9 @@ impl IncompleteCacheAction { pub(crate) fn set_digest_input_blocks(self, digest_input_blocks: Vec) -> Self { match self { IncompleteCacheAction::CacheBuildData(build_data) => - IncompleteCacheAction::CacheBuildData(build_data.set_digest_input_blocks(digest_input_blocks)), + IncompleteCacheAction::CacheBuildData( + build_data.set_digest_input_blocks(digest_input_blocks), + ), IncompleteCacheAction::Clear => IncompleteCacheAction::Clear, } } @@ -180,10 +182,7 @@ impl IncompleteCacheAction { impl IncompleteCachedBuildData { /// Create new cached data. pub(crate) fn new() -> Self { - IncompleteCachedBuildData { - digest_input_blocks: Vec::new(), - changed_keys: HashMap::new(), - } + IncompleteCachedBuildData { digest_input_blocks: Vec::new(), changed_keys: HashMap::new() } } fn complete(self, block: N, trie_root: H) -> CachedBuildData { @@ -232,30 +231,42 @@ mod tests { #[test] fn obsolete_entries_are_purged_when_new_ct_is_built() { let mut cache = BuildCache::::new(); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .insert(None, vec![vec![1]].into_iter().collect()) - .complete(1, 1))); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .insert(None, vec![vec![2]].into_iter().collect()) - .complete(2, 2))); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .insert(None, vec![vec![3]].into_iter().collect()) - .complete(3, 3))); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .insert(None, vec![vec![1]].into_iter().collect()) + .complete(1, 1), + )); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .insert(None, vec![vec![2]].into_iter().collect()) + .complete(2, 2), + )); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .insert(None, vec![vec![3]].into_iter().collect()) + .complete(3, 3), + )); assert_eq!(cache.changed_keys.len(), 3); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .set_digest_input_blocks(vec![1, 2, 3]) - .complete(4, 4))); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .set_digest_input_blocks(vec![1, 2, 3]) + .complete(4, 4), + )); assert_eq!(cache.changed_keys.len(), 1); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .insert(None, vec![vec![8]].into_iter().collect()) - .complete(8, 8))); - cache.perform(CacheAction::CacheBuildData(IncompleteCachedBuildData::new() - .insert(None, vec![vec![12]].into_iter().collect()) - .complete(12, 12))); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .insert(None, vec![vec![8]].into_iter().collect()) + .complete(8, 8), + )); + cache.perform(CacheAction::CacheBuildData( + IncompleteCachedBuildData::new() + .insert(None, vec![vec![12]].into_iter().collect()) + .complete(12, 12), + )); assert_eq!(cache.changed_keys.len(), 3); diff --git a/primitives/state-machine/src/changes_trie/build_iterator.rs b/primitives/state-machine/src/changes_trie/build_iterator.rs index 43089d819b66d..d4adc99d109fc 100644 --- a/primitives/state-machine/src/changes_trie/build_iterator.rs +++ b/primitives/state-machine/src/changes_trie/build_iterator.rs @@ -18,8 +18,8 @@ //! Structures and functions to return blocks whose changes are to be included //! in given block's changes trie. +use crate::changes_trie::{BlockNumber, ConfigurationRange}; use num_traits::Zero; -use crate::changes_trie::{ConfigurationRange, BlockNumber}; /// Returns iterator of OTHER blocks that are required for inclusion into /// changes trie of given block. Blocks are guaranteed to be returned in @@ -31,13 +31,19 @@ pub fn digest_build_iterator<'a, Number: BlockNumber>( block: Number, ) -> DigestBuildIterator { // prepare digest build parameters - let (_, _, digest_step) = match config.config.digest_level_at_block(config.zero, block.clone()) { + let (_, _, digest_step) = match config.config.digest_level_at_block(config.zero, block.clone()) + { Some((current_level, digest_interval, digest_step)) => (current_level, digest_interval, digest_step), None => return DigestBuildIterator::empty(), }; - DigestBuildIterator::new(block.clone(), config.end.unwrap_or(block), config.config.digest_interval, digest_step) + DigestBuildIterator::new( + block.clone(), + config.end.unwrap_or(block), + config.config.digest_interval, + digest_step, + ) } /// Changes trie build iterator that returns numbers of OTHER blocks that are @@ -56,7 +62,6 @@ pub struct DigestBuildIterator { max_step: u32, // Mutable data below: - /// Step of current blocks range. current_step: u32, /// Reverse step of current blocks range. @@ -98,7 +103,7 @@ impl Iterator for DigestBuildIterator { if let Some(next) = self.current_range.as_mut().and_then(|iter| iter.next()) { if next < self.end { self.last_block = Some(next.clone()); - return Some(next); + return Some(next) } } @@ -112,14 +117,16 @@ impl Iterator for DigestBuildIterator { self.current_step_reverse * self.digest_interval }; if next_step_reverse > self.max_step { - return None; + return None } self.current_step_reverse = next_step_reverse; self.current_range = Some(BlocksRange::new( match self.last_block.clone() { Some(last_block) => last_block + self.current_step.into(), - None => self.block.clone() - (self.current_step * self.digest_interval - self.current_step).into(), + None => + self.block.clone() - + (self.current_step * self.digest_interval - self.current_step).into(), }, self.block.clone(), self.current_step.into(), @@ -143,11 +150,7 @@ struct BlocksRange { impl BlocksRange { pub fn new(begin: Number, end: Number, step: Number) -> Self { - BlocksRange { - current: begin, - end, - step, - } + BlocksRange { current: begin, end, step } } } @@ -156,7 +159,7 @@ impl Iterator for BlocksRange { fn next(&mut self) -> Option { if self.current >= self.end { - return None; + return None } let current = Some(self.current.clone()); @@ -167,8 +170,8 @@ impl Iterator for BlocksRange { #[cfg(test)] mod tests { - use crate::changes_trie::Configuration; use super::*; + use crate::changes_trie::Configuration; fn digest_build_iterator( digest_interval: u32, @@ -179,10 +182,7 @@ mod tests { ) -> DigestBuildIterator { super::digest_build_iterator( ConfigurationRange { - config: &Configuration { - digest_interval, - digest_levels, - }, + config: &Configuration { digest_interval, digest_levels }, zero, end, }, @@ -215,9 +215,21 @@ mod tests { fn test_with_zero(zero: u64) { let empty = (0, 0, 0); assert_eq!(digest_build_iterator_basic(4, 16, zero, zero + 0), empty, "block is 0"); - assert_eq!(digest_build_iterator_basic(0, 16, zero, zero + 64), empty, "digest_interval is 0"); - assert_eq!(digest_build_iterator_basic(1, 16, zero, zero + 64), empty, "digest_interval is 1"); - assert_eq!(digest_build_iterator_basic(4, 0, zero, zero + 64), empty, "digest_levels is 0"); + assert_eq!( + digest_build_iterator_basic(0, 16, zero, zero + 64), + empty, + "digest_interval is 0" + ); + assert_eq!( + digest_build_iterator_basic(1, 16, zero, zero + 64), + empty, + "digest_interval is 1" + ); + assert_eq!( + digest_build_iterator_basic(4, 0, zero, zero + 64), + empty, + "digest_levels is 0" + ); assert_eq!( digest_build_iterator_basic(4, 16, zero, zero + 1), empty, @@ -238,12 +250,11 @@ mod tests { empty, "digest is not required for this block", ); - assert_eq!(digest_build_iterator_basic( - ::std::u32::MAX / 2 + 1, - 16, - zero, - ::std::u64::MAX, - ), empty, "digest_interval * 2 is greater than u64::MAX"); + assert_eq!( + digest_build_iterator_basic(::std::u32::MAX / 2 + 1, 16, zero, ::std::u64::MAX,), + empty, + "digest_interval * 2 is greater than u64::MAX" + ); } test_with_zero(0); @@ -326,18 +337,37 @@ mod tests { #[test] fn digest_iterator_returns_level1_blocks() { fn test_with_zero(zero: u64) { - assert_eq!(digest_build_iterator_blocks(16, 1, zero, zero + 16, None), + assert_eq!( + digest_build_iterator_blocks(16, 1, zero, zero + 16, None), [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] - .iter().map(|item| zero + item).collect::>()); - assert_eq!(digest_build_iterator_blocks(16, 1, zero, zero + 256, None), + .iter() + .map(|item| zero + item) + .collect::>() + ); + assert_eq!( + digest_build_iterator_blocks(16, 1, zero, zero + 256, None), [241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255] - .iter().map(|item| zero + item).collect::>()); - assert_eq!(digest_build_iterator_blocks(16, 2, zero, zero + 32, None), + .iter() + .map(|item| zero + item) + .collect::>() + ); + assert_eq!( + digest_build_iterator_blocks(16, 2, zero, zero + 32, None), [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31] - .iter().map(|item| zero + item).collect::>()); - assert_eq!(digest_build_iterator_blocks(16, 3, zero, zero + 4080, None), - [4065, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, 4078, 4079] - .iter().map(|item| zero + item).collect::>()); + .iter() + .map(|item| zero + item) + .collect::>() + ); + assert_eq!( + digest_build_iterator_blocks(16, 3, zero, zero + 4080, None), + [ + 4065, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, + 4078, 4079 + ] + .iter() + .map(|item| zero + item) + .collect::>() + ); } test_with_zero(0); @@ -348,21 +378,30 @@ mod tests { #[test] fn digest_iterator_returns_level1_and_level2_blocks() { fn test_with_zero(zero: u64) { - assert_eq!(digest_build_iterator_blocks(16, 2, zero, zero + 256, None), + assert_eq!( + digest_build_iterator_blocks(16, 2, zero, zero + 256, None), [ // level2 points to previous 16-1 level1 digests: 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, // level2 is a level1 digest of 16-1 previous blocks: 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, - ].iter().map(|item| zero + item).collect::>(), + ] + .iter() + .map(|item| zero + item) + .collect::>(), ); - assert_eq!(digest_build_iterator_blocks(16, 2, zero, zero + 4096, None), + assert_eq!( + digest_build_iterator_blocks(16, 2, zero, zero + 4096, None), [ // level2 points to previous 16-1 level1 digests: - 3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080, - // level2 is a level1 digest of 16-1 previous blocks: - 4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095, - ].iter().map(|item| zero + item).collect::>(), + 3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, + 4064, 4080, // level2 is a level1 digest of 16-1 previous blocks: + 4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, + 4094, 4095, + ] + .iter() + .map(|item| zero + item) + .collect::>(), ); } @@ -374,15 +413,20 @@ mod tests { #[test] fn digest_iterator_returns_level1_and_level2_and_level3_blocks() { fn test_with_zero(zero: u64) { - assert_eq!(digest_build_iterator_blocks(16, 3, zero, zero + 4096, None), + assert_eq!( + digest_build_iterator_blocks(16, 3, zero, zero + 4096, None), [ // level3 points to previous 16-1 level2 digests: - 256, 512, 768, 1024, 1280, 1536, 1792, 2048, 2304, 2560, 2816, 3072, 3328, 3584, 3840, - // level3 points to previous 16-1 level1 digests: - 3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080, - // level3 is a level1 digest of 16-1 previous blocks: - 4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095, - ].iter().map(|item| zero + item).collect::>(), + 256, 512, 768, 1024, 1280, 1536, 1792, 2048, 2304, 2560, 2816, 3072, 3328, 3584, + 3840, // level3 points to previous 16-1 level1 digests: + 3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, + 4064, 4080, // level3 is a level1 digest of 16-1 previous blocks: + 4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, + 4094, 4095, + ] + .iter() + .map(|item| zero + item) + .collect::>(), ); } @@ -394,7 +438,8 @@ mod tests { #[test] fn digest_iterator_returns_skewed_digest_blocks() { fn test_with_zero(zero: u64) { - assert_eq!(digest_build_iterator_blocks(16, 3, zero, zero + 4096, Some(zero + 1338)), + assert_eq!( + digest_build_iterator_blocks(16, 3, zero, zero + 4096, Some(zero + 1338)), [ // level3 MUST point to previous 16-1 level2 digests, BUT there are only 5: 256, 512, 768, 1024, 1280, @@ -402,7 +447,10 @@ mod tests { 1296, 1312, 1328, // level3 MUST be a level1 digest of 16-1 previous blocks, BUT there are only 9: 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, - ].iter().map(|item| zero + item).collect::>(), + ] + .iter() + .map(|item| zero + item) + .collect::>(), ); } @@ -414,14 +462,18 @@ mod tests { #[test] fn digest_iterator_returns_skewed_digest_blocks_skipping_level() { fn test_with_zero(zero: u64) { - assert_eq!(digest_build_iterator_blocks(16, 3, zero, zero + 4096, Some(zero + 1284)), + assert_eq!( + digest_build_iterator_blocks(16, 3, zero, zero + 4096, Some(zero + 1284)), [ // level3 MUST point to previous 16-1 level2 digests, BUT there are only 5: 256, 512, 768, 1024, 1280, // level3 MUST point to previous 16-1 level1 digests, BUT there are NO ANY L1-digests: // level3 MUST be a level1 digest of 16-1 previous blocks, BUT there are only 3: 1281, 1282, 1283, - ].iter().map(|item| zero + item).collect::>(), + ] + .iter() + .map(|item| zero + item) + .collect::>(), ); } diff --git a/primitives/state-machine/src/changes_trie/changes_iterator.rs b/primitives/state-machine/src/changes_trie/changes_iterator.rs index be35581e7514d..8b7d7c5781091 100644 --- a/primitives/state-machine/src/changes_trie/changes_iterator.rs +++ b/primitives/state-machine/src/changes_trie/changes_iterator.rs @@ -18,20 +18,22 @@ //! Functions + iterator that traverses changes tries and returns all //! (block, extrinsic) pairs where given key has been changed. -use std::cell::RefCell; -use std::collections::VecDeque; -use codec::{Decode, Encode, Codec}; +use crate::{ + changes_trie::{ + input::{ChildIndex, DigestIndex, DigestIndexValue, ExtrinsicIndex, ExtrinsicIndexValue}, + storage::{InMemoryStorage, TrieBackendAdapter}, + surface_iterator::{surface_iterator, SurfaceIterator}, + AnchorBlockId, BlockNumber, ConfigurationRange, RootsStorage, Storage, + }, + proving_backend::ProvingBackendRecorder, + trie_backend_essence::TrieBackendEssence, +}; +use codec::{Codec, Decode, Encode}; use hash_db::Hasher; use num_traits::Zero; use sp_core::storage::PrefixedStorageKey; use sp_trie::Recorder; -use crate::changes_trie::{AnchorBlockId, ConfigurationRange, RootsStorage, Storage, BlockNumber}; -use crate::changes_trie::input::{DigestIndex, ExtrinsicIndex, DigestIndexValue, ExtrinsicIndexValue}; -use crate::changes_trie::storage::{TrieBackendAdapter, InMemoryStorage}; -use crate::changes_trie::input::ChildIndex; -use crate::changes_trie::surface_iterator::{surface_iterator, SurfaceIterator}; -use crate::proving_backend::ProvingBackendRecorder; -use crate::trie_backend_essence::{TrieBackendEssence}; +use std::{cell::RefCell, collections::VecDeque}; /// Return changes of given key at given blocks range. /// `max` is the number of best known block. @@ -57,12 +59,7 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>( begin: begin.clone(), end, config: config.clone(), - surface: surface_iterator( - config, - max, - begin, - end.number.clone(), - )?, + surface: surface_iterator(config, max, begin, end.number.clone())?, extrinsics: Default::default(), blocks: Default::default(), @@ -72,7 +69,6 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>( }) } - /// Returns proof of changes of given key at given blocks range. /// `max` is the number of best known block. pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>( @@ -83,7 +79,10 @@ pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>( max: Number, storage_key: Option<&PrefixedStorageKey>, key: &[u8], -) -> Result>, String> where H::Out: Codec { +) -> Result>, String> +where + H::Out: Codec, +{ // we can't query any roots before root let max = std::cmp::min(max, end.number.clone()); @@ -96,12 +95,7 @@ pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>( begin: begin.clone(), end, config: config.clone(), - surface: surface_iterator( - config, - max, - begin, - end.number.clone(), - )?, + surface: surface_iterator(config, max, begin, end.number.clone())?, extrinsics: Default::default(), blocks: Default::default(), @@ -130,8 +124,11 @@ pub fn key_changes_proof_check<'a, H: Hasher, Number: BlockNumber>( end: &AnchorBlockId, max: Number, storage_key: Option<&PrefixedStorageKey>, - key: &[u8] -) -> Result, String> where H::Out: Encode { + key: &[u8], +) -> Result, String> +where + H::Out: Encode, +{ key_changes_proof_check_with_db( config, roots_storage, @@ -153,8 +150,11 @@ pub fn key_changes_proof_check_with_db<'a, H: Hasher, Number: BlockNumber>( end: &AnchorBlockId, max: Number, storage_key: Option<&PrefixedStorageKey>, - key: &[u8] -) -> Result, String> where H::Out: Encode { + key: &[u8], +) -> Result, String> +where + H::Out: Encode, +{ // we can't query any roots before root let max = std::cmp::min(max, end.number.clone()); @@ -167,28 +167,24 @@ pub fn key_changes_proof_check_with_db<'a, H: Hasher, Number: BlockNumber>( begin: begin.clone(), end, config: config.clone(), - surface: surface_iterator( - config, - max, - begin, - end.number.clone(), - )?, + surface: surface_iterator(config, max, begin, end.number.clone())?, extrinsics: Default::default(), blocks: Default::default(), _hasher: ::std::marker::PhantomData::::default(), }, - }.collect() + } + .collect() } /// Drilldown iterator - receives 'digest points' from surface iterator and explores /// every point until extrinsic is found. pub struct DrilldownIteratorEssence<'a, H, Number> - where - H: Hasher, - Number: BlockNumber, - H::Out: 'a, +where + H: Hasher, + Number: BlockNumber, + H::Out: 'a, { storage_key: Option<&'a PrefixedStorageKey>, key: &'a [u8], @@ -206,14 +202,14 @@ pub struct DrilldownIteratorEssence<'a, H, Number> } impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> - where - H: Hasher, - Number: BlockNumber, - H::Out: 'a, +where + H: Hasher, + Number: BlockNumber, + H::Out: 'a, { pub fn next(&mut self, trie_reader: F) -> Option> - where - F: FnMut(&dyn Storage, H::Out, &[u8]) -> Result>, String>, + where + F: FnMut(&dyn Storage, H::Out, &[u8]) -> Result>, String>, { match self.do_next(trie_reader) { Ok(Some(res)) => Some(Ok(res)), @@ -223,25 +219,26 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> } fn do_next(&mut self, mut trie_reader: F) -> Result, String> - where - F: FnMut(&dyn Storage, H::Out, &[u8]) -> Result>, String>, + where + F: FnMut(&dyn Storage, H::Out, &[u8]) -> Result>, String>, { loop { if let Some((block, extrinsic)) = self.extrinsics.pop_front() { - return Ok(Some((block, extrinsic))); + return Ok(Some((block, extrinsic))) } if let Some((block, level)) = self.blocks.pop_front() { // not having a changes trie root is an error because: // we never query roots for future blocks // AND trie roots for old blocks are known (both on full + light node) - let trie_root = self.roots_storage.root(&self.end, block.clone())? - .ok_or_else(|| format!("Changes trie root for block {} is not found", block.clone()))?; + let trie_root = + self.roots_storage.root(&self.end, block.clone())?.ok_or_else(|| { + format!("Changes trie root for block {} is not found", block.clone()) + })?; let trie_root = if let Some(storage_key) = self.storage_key { - let child_key = ChildIndex { - block: block.clone(), - storage_key: storage_key.clone(), - }.encode(); + let child_key = + ChildIndex { block: block.clone(), storage_key: storage_key.clone() } + .encode(); if let Some(trie_root) = trie_reader(self.storage, trie_root, &child_key)? .and_then(|v| >::decode(&mut &v[..]).ok()) .map(|v| { @@ -251,7 +248,7 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> }) { trie_root } else { - continue; + continue } } else { trie_root @@ -260,18 +257,24 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> // only return extrinsics for blocks before self.max // most of blocks will be filtered out before pushing to `self.blocks` // here we just throwing away changes at digest blocks we're processing - debug_assert!(block >= self.begin, "We shall not touch digests earlier than a range' begin"); + debug_assert!( + block >= self.begin, + "We shall not touch digests earlier than a range' begin" + ); if block <= self.end.number { - let extrinsics_key = ExtrinsicIndex { block: block.clone(), key: self.key.to_vec() }.encode(); + let extrinsics_key = + ExtrinsicIndex { block: block.clone(), key: self.key.to_vec() }.encode(); let extrinsics = trie_reader(self.storage, trie_root, &extrinsics_key); if let Some(extrinsics) = extrinsics? { if let Ok(extrinsics) = ExtrinsicIndexValue::decode(&mut &extrinsics[..]) { - self.extrinsics.extend(extrinsics.into_iter().rev().map(|e| (block.clone(), e))); + self.extrinsics + .extend(extrinsics.into_iter().rev().map(|e| (block.clone(), e))); } } } - let blocks_key = DigestIndex { block: block.clone(), key: self.key.to_vec() }.encode(); + let blocks_key = + DigestIndex { block: block.clone(), key: self.key.to_vec() }.encode(); let blocks = trie_reader(self.storage, trie_root, &blocks_key); if let Some(blocks) = blocks? { if let Ok(blocks) = >::decode(&mut &blocks[..]) { @@ -280,23 +283,35 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> let begin = self.begin.clone(); let end = self.end.number.clone(); let config = self.config.clone(); - self.blocks.extend(blocks.into_iter() - .rev() - .filter(|b| level.map(|level| level > 1).unwrap_or(true) || (*b >= begin && *b <= end)) - .map(|b| { - let prev_level = level - .map(|level| Some(level - 1)) - .unwrap_or_else(|| - Some(config.config.digest_level_at_block(config.zero.clone(), b.clone()) - .map(|(level, _, _)| level) - .unwrap_or_else(|| Zero::zero()))); - (b, prev_level) - }) + self.blocks.extend( + blocks + .into_iter() + .rev() + .filter(|b| { + level.map(|level| level > 1).unwrap_or(true) || + (*b >= begin && *b <= end) + }) + .map(|b| { + let prev_level = + level.map(|level| Some(level - 1)).unwrap_or_else(|| { + Some( + config + .config + .digest_level_at_block( + config.zero.clone(), + b.clone(), + ) + .map(|(level, _, _)| level) + .unwrap_or_else(|| Zero::zero()), + ) + }); + (b, prev_level) + }), ); } } - continue; + continue } match self.surface.next() { @@ -310,46 +325,50 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number> /// Exploring drilldown operator. pub struct DrilldownIterator<'a, H, Number> - where - Number: BlockNumber, - H: Hasher, - H::Out: 'a, +where + Number: BlockNumber, + H: Hasher, + H::Out: 'a, { essence: DrilldownIteratorEssence<'a, H, Number>, } impl<'a, H: Hasher, Number: BlockNumber> Iterator for DrilldownIterator<'a, H, Number> - where H::Out: Encode +where + H::Out: Encode, { type Item = Result<(Number, u32), String>; fn next(&mut self) -> Option { - self.essence.next(|storage, root, key| - TrieBackendEssence::<_, H>::new(TrieBackendAdapter::new(storage), root).storage(key)) + self.essence.next(|storage, root, key| { + TrieBackendEssence::<_, H>::new(TrieBackendAdapter::new(storage), root).storage(key) + }) } } /// Proving drilldown iterator. struct ProvingDrilldownIterator<'a, H, Number> - where - Number: BlockNumber, - H: Hasher, - H::Out: 'a, +where + Number: BlockNumber, + H: Hasher, + H::Out: 'a, { essence: DrilldownIteratorEssence<'a, H, Number>, proof_recorder: RefCell>, } impl<'a, H, Number> ProvingDrilldownIterator<'a, H, Number> - where - Number: BlockNumber, - H: Hasher, - H::Out: 'a, +where + Number: BlockNumber, + H: Hasher, + H::Out: 'a, { /// Consume the iterator, extracting the gathered proof in lexicographical order /// by value. pub fn extract_proof(self) -> Vec> { - self.proof_recorder.into_inner().drain() + self.proof_recorder + .into_inner() + .drain() .into_iter() .map(|n| n.data.to_vec()) .collect() @@ -357,32 +376,34 @@ impl<'a, H, Number> ProvingDrilldownIterator<'a, H, Number> } impl<'a, H, Number> Iterator for ProvingDrilldownIterator<'a, H, Number> - where - Number: BlockNumber, - H: Hasher, - H::Out: 'a + Codec, +where + Number: BlockNumber, + H: Hasher, + H::Out: 'a + Codec, { type Item = Result<(Number, u32), String>; fn next(&mut self) -> Option { - let proof_recorder = &mut *self.proof_recorder.try_borrow_mut() + let proof_recorder = &mut *self + .proof_recorder + .try_borrow_mut() .expect("only fails when already borrowed; storage() is non-reentrant; qed"); - self.essence.next(|storage, root, key| + self.essence.next(|storage, root, key| { ProvingBackendRecorder::<_, H> { backend: &TrieBackendEssence::new(TrieBackendAdapter::new(storage), root), proof_recorder, - }.storage(key)) + } + .storage(key) + }) } } #[cfg(test)] mod tests { - use std::iter::FromIterator; - use crate::changes_trie::Configuration; - use crate::changes_trie::input::InputPair; - use crate::changes_trie::storage::InMemoryStorage; - use sp_runtime::traits::BlakeTwo256; use super::*; + use crate::changes_trie::{input::InputPair, storage::InMemoryStorage, Configuration}; + use sp_runtime::traits::BlakeTwo256; + use std::iter::FromIterator; fn child_key() -> PrefixedStorageKey { let child_info = sp_core::storage::ChildInfo::new_default(&b"1"[..]); @@ -391,64 +412,98 @@ mod tests { fn prepare_for_drilldown() -> (Configuration, InMemoryStorage) { let config = Configuration { digest_interval: 4, digest_levels: 2 }; - let backend = InMemoryStorage::with_inputs(vec![ - // digest: 1..4 => [(3, 0)] - (1, vec![ - ]), - (2, vec![ - ]), - (3, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![42] }, vec![0]), - ]), - (4, vec![ - InputPair::DigestIndex(DigestIndex { block: 4, key: vec![42] }, vec![3]), - ]), - // digest: 5..8 => [(6, 3), (8, 1+2)] - (5, vec![]), - (6, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 6, key: vec![42] }, vec![3]), - ]), - (7, vec![]), - (8, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 8, key: vec![42] }, vec![1, 2]), - InputPair::DigestIndex(DigestIndex { block: 8, key: vec![42] }, vec![6]), - ]), - // digest: 9..12 => [] - (9, vec![]), - (10, vec![]), - (11, vec![]), - (12, vec![]), - // digest: 0..16 => [4, 8] - (13, vec![]), - (14, vec![]), - (15, vec![]), - (16, vec![ - InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![4, 8]), - ]), - ], vec![(child_key(), vec![ - (1, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![42] }, vec![0]), - ]), - (2, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 2, key: vec![42] }, vec![3]), - ]), - (16, vec![ - InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![42] }, vec![5]), - - InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![2]), - ]), - ]), - ]); + let backend = InMemoryStorage::with_inputs( + vec![ + // digest: 1..4 => [(3, 0)] + (1, vec![]), + (2, vec![]), + ( + 3, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 3, key: vec![42] }, + vec![0], + )], + ), + (4, vec![InputPair::DigestIndex(DigestIndex { block: 4, key: vec![42] }, vec![3])]), + // digest: 5..8 => [(6, 3), (8, 1+2)] + (5, vec![]), + ( + 6, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 6, key: vec![42] }, + vec![3], + )], + ), + (7, vec![]), + ( + 8, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 8, key: vec![42] }, + vec![1, 2], + ), + InputPair::DigestIndex(DigestIndex { block: 8, key: vec![42] }, vec![6]), + ], + ), + // digest: 9..12 => [] + (9, vec![]), + (10, vec![]), + (11, vec![]), + (12, vec![]), + // digest: 0..16 => [4, 8] + (13, vec![]), + (14, vec![]), + (15, vec![]), + ( + 16, + vec![InputPair::DigestIndex( + DigestIndex { block: 16, key: vec![42] }, + vec![4, 8], + )], + ), + ], + vec![( + child_key(), + vec![ + ( + 1, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 1, key: vec![42] }, + vec![0], + )], + ), + ( + 2, + vec![InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 2, key: vec![42] }, + vec![3], + )], + ), + ( + 16, + vec![ + InputPair::ExtrinsicIndex( + ExtrinsicIndex { block: 16, key: vec![42] }, + vec![5], + ), + InputPair::DigestIndex( + DigestIndex { block: 16, key: vec![42] }, + vec![2], + ), + ], + ), + ], + )], + ); (config, backend) } - fn configuration_range<'a>(config: &'a Configuration, zero: u64) -> ConfigurationRange<'a, u64> { - ConfigurationRange { - config, - zero, - end: None, - } + fn configuration_range<'a>( + config: &'a Configuration, + zero: u64, + ) -> ConfigurationRange<'a, u64> { + ConfigurationRange { config, zero, end: None } } #[test] @@ -462,7 +517,8 @@ mod tests { 16, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)])); let drilldown_result = key_changes::( @@ -473,7 +529,8 @@ mod tests { 4, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![])); let drilldown_result = key_changes::( @@ -484,7 +541,8 @@ mod tests { 4, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(3, 0)])); let drilldown_result = key_changes::( @@ -495,7 +553,8 @@ mod tests { 7, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(6, 3), (3, 0)])); let drilldown_result = key_changes::( @@ -506,7 +565,8 @@ mod tests { 8, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1)])); let drilldown_result = key_changes::( @@ -517,7 +577,8 @@ mod tests { 8, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(6, 3)])); } @@ -534,7 +595,9 @@ mod tests { 1000, None, &[42], - ).and_then(|i| i.collect::, _>>()).is_err()); + ) + .and_then(|i| i.collect::, _>>()) + .is_err()); assert!(key_changes::( configuration_range(&config, 0), @@ -544,7 +607,9 @@ mod tests { 1000, Some(&child_key()), &[42], - ).and_then(|i| i.collect::, _>>()).is_err()); + ) + .and_then(|i| i.collect::, _>>()) + .is_err()); } #[test] @@ -558,7 +623,8 @@ mod tests { 50, None, &[42], - ).is_err()); + ) + .is_err()); assert!(key_changes::( configuration_range(&config, 0), &storage, @@ -567,10 +633,10 @@ mod tests { 100, None, &[42], - ).is_err()); + ) + .is_err()); } - #[test] fn proving_drilldown_iterator_works() { // happens on remote full node: @@ -578,13 +644,27 @@ mod tests { // create drilldown iterator that records all trie nodes during drilldown let (remote_config, remote_storage) = prepare_for_drilldown(); let remote_proof = key_changes_proof::( - configuration_range(&remote_config, 0), &remote_storage, 1, - &AnchorBlockId { hash: Default::default(), number: 16 }, 16, None, &[42]).unwrap(); + configuration_range(&remote_config, 0), + &remote_storage, + 1, + &AnchorBlockId { hash: Default::default(), number: 16 }, + 16, + None, + &[42], + ) + .unwrap(); let (remote_config, remote_storage) = prepare_for_drilldown(); let remote_proof_child = key_changes_proof::( - configuration_range(&remote_config, 0), &remote_storage, 1, - &AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&child_key()), &[42]).unwrap(); + configuration_range(&remote_config, 0), + &remote_storage, + 1, + &AnchorBlockId { hash: Default::default(), number: 16 }, + 16, + Some(&child_key()), + &[42], + ) + .unwrap(); // happens on local light node: @@ -592,14 +672,28 @@ mod tests { let (local_config, local_storage) = prepare_for_drilldown(); local_storage.clear_storage(); let local_result = key_changes_proof_check::( - configuration_range(&local_config, 0), &local_storage, remote_proof, 1, - &AnchorBlockId { hash: Default::default(), number: 16 }, 16, None, &[42]); + configuration_range(&local_config, 0), + &local_storage, + remote_proof, + 1, + &AnchorBlockId { hash: Default::default(), number: 16 }, + 16, + None, + &[42], + ); let (local_config, local_storage) = prepare_for_drilldown(); local_storage.clear_storage(); let local_result_child = key_changes_proof_check::( - configuration_range(&local_config, 0), &local_storage, remote_proof_child, 1, - &AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&child_key()), &[42]); + configuration_range(&local_config, 0), + &local_storage, + remote_proof_child, + 1, + &AnchorBlockId { hash: Default::default(), number: 16 }, + 16, + Some(&child_key()), + &[42], + ); // check that drilldown result is the same as if it was happening at the full node assert_eq!(local_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)])); @@ -620,12 +714,22 @@ mod tests { // regular blocks: 89, 90, 91 let mut input = (1u64..92u64).map(|b| (b, vec![])).collect::>(); // changed at block#63 and covered by L3 digest at block#64 - input[63 - 1].1.push(InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 63, key: vec![42] }, vec![0])); - input[64 - 1].1.push(InputPair::DigestIndex(DigestIndex { block: 64, key: vec![42] }, vec![63])); + input[63 - 1] + .1 + .push(InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 63, key: vec![42] }, vec![0])); + input[64 - 1] + .1 + .push(InputPair::DigestIndex(DigestIndex { block: 64, key: vec![42] }, vec![63])); // changed at block#79 and covered by L2 digest at block#80 + skewed digest at block#91 - input[79 - 1].1.push(InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 79, key: vec![42] }, vec![1])); - input[80 - 1].1.push(InputPair::DigestIndex(DigestIndex { block: 80, key: vec![42] }, vec![79])); - input[91 - 1].1.push(InputPair::DigestIndex(DigestIndex { block: 91, key: vec![42] }, vec![80])); + input[79 - 1] + .1 + .push(InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 79, key: vec![42] }, vec![1])); + input[80 - 1] + .1 + .push(InputPair::DigestIndex(DigestIndex { block: 80, key: vec![42] }, vec![79])); + input[91 - 1] + .1 + .push(InputPair::DigestIndex(DigestIndex { block: 91, key: vec![42] }, vec![80])); let storage = InMemoryStorage::with_inputs(input, vec![]); let drilldown_result = key_changes::( @@ -636,7 +740,8 @@ mod tests { 100_000u64, None, &[42], - ).and_then(Result::from_iter); + ) + .and_then(Result::from_iter); assert_eq!(drilldown_result, Ok(vec![(79, 1), (63, 0)])); } } diff --git a/primitives/state-machine/src/changes_trie/input.rs b/primitives/state-machine/src/changes_trie/input.rs index 85a8de0b78d81..4261042956116 100644 --- a/primitives/state-machine/src/changes_trie/input.rs +++ b/primitives/state-machine/src/changes_trie/input.rs @@ -17,11 +17,8 @@ //! Different types of changes trie input pairs. -use codec::{Decode, Encode, Input, Output, Error}; -use crate::{ - StorageKey, StorageValue, - changes_trie::BlockNumber -}; +use crate::{changes_trie::BlockNumber, StorageKey, StorageValue}; +use codec::{Decode, Encode, Error, Input, Output}; use sp_core::storage::PrefixedStorageKey; /// Key of { changed key => set of extrinsic indices } mapping. @@ -140,7 +137,6 @@ impl DigestIndex { } } - impl Encode for DigestIndex { fn encode_to(&self, dest: &mut W) { dest.push_byte(2); diff --git a/primitives/state-machine/src/changes_trie/mod.rs b/primitives/state-machine/src/changes_trie/mod.rs index 105f3d7de6d39..7fedff1f1e2b9 100644 --- a/primitives/state-machine/src/changes_trie/mod.rs +++ b/primitives/state-machine/src/changes_trie/mod.rs @@ -58,63 +58,86 @@ mod prune; mod storage; mod surface_iterator; -pub use self::build_cache::{BuildCache, CachedBuildData, CacheAction}; -pub use self::storage::InMemoryStorage; -pub use self::changes_iterator::{ - key_changes, key_changes_proof, - key_changes_proof_check, key_changes_proof_check_with_db, +pub use self::{ + build_cache::{BuildCache, CacheAction, CachedBuildData}, + changes_iterator::{ + key_changes, key_changes_proof, key_changes_proof_check, key_changes_proof_check_with_db, + }, + prune::prune, + storage::InMemoryStorage, }; -pub use self::prune::prune; -use std::collections::{HashMap, HashSet}; -use std::convert::TryInto; -use hash_db::{Hasher, Prefix}; -use num_traits::{One, Zero}; -use codec::{Decode, Encode}; -use sp_core; -use sp_core::storage::PrefixedStorageKey; -use sp_trie::{MemoryDB, DBValue, TrieMut}; -use sp_trie::trie_types::TrieDBMut; use crate::{ - StorageKey, backend::Backend, - overlayed_changes::OverlayedChanges, changes_trie::{ build::prepare_input, - build_cache::{IncompleteCachedBuildData, IncompleteCacheAction}, + build_cache::{IncompleteCacheAction, IncompleteCachedBuildData}, }, + overlayed_changes::OverlayedChanges, + StorageKey, +}; +use codec::{Decode, Encode}; +use hash_db::{Hasher, Prefix}; +use num_traits::{One, Zero}; +use sp_core::{self, storage::PrefixedStorageKey}; +use sp_trie::{trie_types::TrieDBMut, DBValue, MemoryDB, TrieMut}; +use std::{ + collections::{HashMap, HashSet}, + convert::TryInto, }; /// Requirements for block number that can be used with changes tries. pub trait BlockNumber: - Send + Sync + 'static + - std::fmt::Display + - Clone + - From + TryInto + One + Zero + - PartialEq + Ord + - std::hash::Hash + - std::ops::Add + ::std::ops::Sub + - std::ops::Mul + ::std::ops::Div + - std::ops::Rem + - std::ops::AddAssign + - num_traits::CheckedMul + num_traits::CheckedSub + - Decode + Encode -{} - -impl BlockNumber for T where T: - Send + Sync + 'static + - std::fmt::Display + - Clone + - From + TryInto + One + Zero + - PartialEq + Ord + - std::hash::Hash + - std::ops::Add + ::std::ops::Sub + - std::ops::Mul + ::std::ops::Div + - std::ops::Rem + - std::ops::AddAssign + - num_traits::CheckedMul + num_traits::CheckedSub + - Decode + Encode, -{} + Send + + Sync + + 'static + + std::fmt::Display + + Clone + + From + + TryInto + + One + + Zero + + PartialEq + + Ord + + std::hash::Hash + + std::ops::Add + + ::std::ops::Sub + + std::ops::Mul + + ::std::ops::Div + + std::ops::Rem + + std::ops::AddAssign + + num_traits::CheckedMul + + num_traits::CheckedSub + + Decode + + Encode +{ +} + +impl BlockNumber for T where + T: Send + + Sync + + 'static + + std::fmt::Display + + Clone + + From + + TryInto + + One + + Zero + + PartialEq + + Ord + + std::hash::Hash + + std::ops::Add + + ::std::ops::Sub + + std::ops::Mul + + ::std::ops::Div + + std::ops::Rem + + std::ops::AddAssign + + num_traits::CheckedMul + + num_traits::CheckedSub + + Decode + + Encode +{ +} /// Block identifier that could be used to determine fork of this block. #[derive(Debug)] @@ -143,7 +166,11 @@ pub trait RootsStorage: Send + Sync { fn build_anchor(&self, hash: H::Out) -> Result, String>; /// Get changes trie root for the block with given number which is an ancestor (or the block /// itself) of the anchor_block (i.e. anchor_block.number >= block). - fn root(&self, anchor: &AnchorBlockId, block: Number) -> Result, String>; + fn root( + &self, + anchor: &AnchorBlockId, + block: Number, + ) -> Result, String>; } /// Changes trie storage. Provides access to trie roots and trie nodes. @@ -162,9 +189,13 @@ pub trait Storage: RootsStorage { } /// Changes trie storage -> trie backend essence adapter. -pub struct TrieBackendStorageAdapter<'a, H: Hasher, Number: BlockNumber>(pub &'a dyn Storage); +pub struct TrieBackendStorageAdapter<'a, H: Hasher, Number: BlockNumber>( + pub &'a dyn Storage, +); -impl<'a, H: Hasher, N: BlockNumber> crate::TrieBackendStorage for TrieBackendStorageAdapter<'a, H, N> { +impl<'a, H: Hasher, N: BlockNumber> crate::TrieBackendStorage + for TrieBackendStorageAdapter<'a, H, N> +{ type Overlay = sp_trie::MemoryDB; fn get(&self, key: &H::Out, prefix: Prefix) -> Result, String> { @@ -188,26 +219,14 @@ pub struct ConfigurationRange<'a, N> { impl<'a, H, Number> State<'a, H, Number> { /// Create state with given config and storage. - pub fn new( - config: Configuration, - zero: Number, - storage: &'a dyn Storage, - ) -> Self { - Self { - config, - zero, - storage, - } + pub fn new(config: Configuration, zero: Number, storage: &'a dyn Storage) -> Self { + Self { config, zero, storage } } } impl<'a, H, Number: Clone> Clone for State<'a, H, Number> { fn clone(&self) -> Self { - State { - config: self.config.clone(), - zero: self.zero.clone(), - storage: self.storage, - } + State { config: self.config.clone(), zero: self.zero.clone(), storage: self.storage } } } @@ -227,20 +246,24 @@ pub fn build_changes_trie<'a, B: Backend, H: Hasher, Number: BlockNumber>( parent_hash: H::Out, panic_on_storage_error: bool, ) -> Result, H::Out, CacheAction)>, ()> - where - H::Out: Ord + 'static + Encode, +where + H::Out: Ord + 'static + Encode, { /// Panics when `res.is_err() && panic`, otherwise it returns `Err(())` on an error. fn maybe_panic( res: std::result::Result, panic: bool, ) -> std::result::Result { - res.map(Ok) - .unwrap_or_else(|e| if panic { - panic!("changes trie: storage access is not allowed to fail within runtime: {:?}", e) + res.map(Ok).unwrap_or_else(|e| { + if panic { + panic!( + "changes trie: storage access is not allowed to fail within runtime: {:?}", + e + ) } else { Err(()) - }) + } + }) } // when storage isn't provided, changes tries aren't created @@ -255,11 +278,12 @@ pub fn build_changes_trie<'a, B: Backend, H: Hasher, Number: BlockNumber>( // prepare configuration range - we already know zero block. Current block may be the end block if configuration // has been changed in this block - let is_config_changed = match changes.storage(sp_core::storage::well_known_keys::CHANGES_TRIE_CONFIG) { - Some(Some(new_config)) => new_config != &state.config.encode()[..], - Some(None) => true, - None => false, - }; + let is_config_changed = + match changes.storage(sp_core::storage::well_known_keys::CHANGES_TRIE_CONFIG) { + Some(Some(new_config)) => new_config != &state.config.encode()[..], + Some(None) => true, + None => false, + }; let config_range = ConfigurationRange { config: &state.config, zero: state.zero.clone(), @@ -303,10 +327,8 @@ pub fn build_changes_trie<'a, B: Backend, H: Hasher, Number: BlockNumber>( maybe_panic(trie.insert(&key, &value), panic_on_storage_error)?; } - cache_action = cache_action.insert( - Some(child_index.storage_key.clone()), - storage_changed_keys, - ); + cache_action = + cache_action.insert(Some(child_index.storage_key.clone()), storage_changed_keys); } if not_empty { child_roots.push(input::InputPair::ChildIndex(child_index, root.as_ref().to_vec())); @@ -331,10 +353,7 @@ pub fn build_changes_trie<'a, B: Backend, H: Hasher, Number: BlockNumber>( maybe_panic(trie.insert(&key, &value), panic_on_storage_error)?; } - cache_action = cache_action.insert( - None, - storage_changed_keys, - ); + cache_action = cache_action.insert(None, storage_changed_keys); } let cache_action = cache_action.complete(block, &root); @@ -350,20 +369,21 @@ fn prepare_cached_build_data( // because it'll never be used again for building other tries // => let's clear the cache if !config.config.is_digest_build_enabled() { - return IncompleteCacheAction::Clear; + return IncompleteCacheAction::Clear } // when this is the last block where current configuration is active // => let's clear the cache if config.end.as_ref() == Some(&block) { - return IncompleteCacheAction::Clear; + return IncompleteCacheAction::Clear } // we do not need to cache anything when top-level digest trie is created, because // it'll never be used again for building other tries // => let's clear the cache match config.config.digest_level_at_block(config.zero.clone(), block) { - Some((digest_level, _, _)) if digest_level == config.config.digest_levels => IncompleteCacheAction::Clear, + Some((digest_level, _, _)) if digest_level == config.config.digest_levels => + IncompleteCacheAction::Clear, _ => IncompleteCacheAction::CacheBuildData(IncompleteCachedBuildData::new()), } } @@ -399,6 +419,9 @@ mod tests { fn cache_is_cleared_when_end_block_of_configuration_is_built() { let config = Configuration { digest_interval: 8, digest_levels: 2 }; let config_range = ConfigurationRange { zero: 0, end: Some(4u32), config: &config }; - assert_eq!(prepare_cached_build_data(config_range.clone(), 4u32), IncompleteCacheAction::Clear); + assert_eq!( + prepare_cached_build_data(config_range.clone(), 4u32), + IncompleteCacheAction::Clear + ); } } diff --git a/primitives/state-machine/src/changes_trie/prune.rs b/primitives/state-machine/src/changes_trie/prune.rs index 754e3893f966f..2ca540562b47f 100644 --- a/primitives/state-machine/src/changes_trie/prune.rs +++ b/primitives/state-machine/src/changes_trie/prune.rs @@ -17,16 +17,20 @@ //! Changes trie pruning-related functions. +use crate::{ + changes_trie::{ + input::{ChildIndex, InputKey}, + storage::TrieBackendAdapter, + AnchorBlockId, BlockNumber, Storage, + }, + proving_backend::ProvingBackendRecorder, + trie_backend_essence::TrieBackendEssence, +}; +use codec::{Codec, Decode}; use hash_db::Hasher; -use sp_trie::Recorder; use log::warn; use num_traits::One; -use crate::proving_backend::ProvingBackendRecorder; -use crate::trie_backend_essence::TrieBackendEssence; -use crate::changes_trie::{AnchorBlockId, Storage, BlockNumber}; -use crate::changes_trie::storage::TrieBackendAdapter; -use crate::changes_trie::input::{ChildIndex, InputKey}; -use codec::{Decode, Codec}; +use sp_trie::Recorder; /// Prune obsolete changes tries. Pruning happens at the same block, where highest /// level digest is created. Pruning guarantees to save changes tries for last @@ -38,12 +42,14 @@ pub fn prune( last: Number, current_block: &AnchorBlockId, mut remove_trie_node: F, -) where H::Out: Codec { +) where + H::Out: Codec, +{ // delete changes trie for every block in range let mut block = first; loop { if block >= last.clone() + One::one() { - break; + break } let prev_block = block.clone(); @@ -56,7 +62,7 @@ pub fn prune( Err(error) => { // try to delete other tries warn!(target: "trie", "Failed to read changes trie root from DB: {}", error); - continue; + continue }, }; let children_roots = { @@ -91,8 +97,9 @@ fn prune_trie( storage: &dyn Storage, root: H::Out, remove_trie_node: &mut F, -) where H::Out: Codec { - +) where + H::Out: Codec, +{ // enumerate all changes trie' keys, recording all nodes that have been 'touched' // (effectively - all changes trie nodes) let mut proof_recorder: Recorder = Default::default(); @@ -113,14 +120,13 @@ fn prune_trie( #[cfg(test)] mod tests { - use std::collections::HashSet; - use sp_trie::MemoryDB; - use sp_core::H256; - use crate::backend::insert_into_memory_db; - use crate::changes_trie::storage::InMemoryStorage; + use super::*; + use crate::{backend::insert_into_memory_db, changes_trie::storage::InMemoryStorage}; use codec::Encode; + use sp_core::H256; use sp_runtime::traits::BlakeTwo256; - use super::*; + use sp_trie::MemoryDB; + use std::collections::HashSet; fn prune_by_collect( storage: &dyn Storage, @@ -130,8 +136,9 @@ mod tests { ) -> HashSet { let mut pruned_trie_nodes = HashSet::new(); let anchor = AnchorBlockId { hash: Default::default(), number: current_block }; - prune(storage, first, last, &anchor, - |node| { pruned_trie_nodes.insert(node); }); + prune(storage, first, last, &anchor, |node| { + pruned_trie_nodes.insert(node); + }); pruned_trie_nodes } @@ -139,28 +146,36 @@ mod tests { fn prune_works() { fn prepare_storage() -> InMemoryStorage { let child_info = sp_core::storage::ChildInfo::new_default(&b"1"[..]); - let child_key = ChildIndex { block: 67u64, storage_key: child_info.prefixed_storage_key() }.encode(); + let child_key = + ChildIndex { block: 67u64, storage_key: child_info.prefixed_storage_key() } + .encode(); let mut mdb1 = MemoryDB::::default(); - let root1 = insert_into_memory_db::( - &mut mdb1, vec![(vec![10], vec![20])]).unwrap(); + let root1 = + insert_into_memory_db::(&mut mdb1, vec![(vec![10], vec![20])]) + .unwrap(); let mut mdb2 = MemoryDB::::default(); let root2 = insert_into_memory_db::( &mut mdb2, vec![(vec![11], vec![21]), (vec![12], vec![22])], - ).unwrap(); + ) + .unwrap(); let mut mdb3 = MemoryDB::::default(); - let ch_root3 = insert_into_memory_db::( - &mut mdb3, vec![(vec![110], vec![120])]).unwrap(); - let root3 = insert_into_memory_db::(&mut mdb3, vec![ - (vec![13], vec![23]), - (vec![14], vec![24]), - (child_key, ch_root3.as_ref().encode()), - ]).unwrap(); + let ch_root3 = + insert_into_memory_db::(&mut mdb3, vec![(vec![110], vec![120])]) + .unwrap(); + let root3 = insert_into_memory_db::( + &mut mdb3, + vec![ + (vec![13], vec![23]), + (vec![14], vec![24]), + (child_key, ch_root3.as_ref().encode()), + ], + ) + .unwrap(); let mut mdb4 = MemoryDB::::default(); - let root4 = insert_into_memory_db::( - &mut mdb4, - vec![(vec![15], vec![25])], - ).unwrap(); + let root4 = + insert_into_memory_db::(&mut mdb4, vec![(vec![15], vec![25])]) + .unwrap(); let storage = InMemoryStorage::new(); storage.insert(65, root1, mdb1); storage.insert(66, root2, mdb2); diff --git a/primitives/state-machine/src/changes_trie/storage.rs b/primitives/state-machine/src/changes_trie/storage.rs index e08fe36126c7b..bd5e3a32b5657 100644 --- a/primitives/state-machine/src/changes_trie/storage.rs +++ b/primitives/state-machine/src/changes_trie/storage.rs @@ -17,22 +17,21 @@ //! Changes trie storage utilities. -use std::collections::{BTreeMap, HashSet, HashMap}; -use hash_db::{Hasher, Prefix, EMPTY_PREFIX}; -use sp_core::storage::PrefixedStorageKey; -use sp_trie::DBValue; -use sp_trie::MemoryDB; -use parking_lot::RwLock; use crate::{ - StorageKey, + changes_trie::{AnchorBlockId, BlockNumber, BuildCache, RootsStorage, Storage}, trie_backend_essence::TrieBackendStorage, - changes_trie::{BuildCache, RootsStorage, Storage, AnchorBlockId, BlockNumber}, + StorageKey, }; +use hash_db::{Hasher, Prefix, EMPTY_PREFIX}; +use parking_lot::RwLock; +use sp_core::storage::PrefixedStorageKey; +use sp_trie::{DBValue, MemoryDB}; +use std::collections::{BTreeMap, HashMap, HashSet}; #[cfg(test)] use crate::backend::insert_into_memory_db; #[cfg(test)] -use crate::changes_trie::input::{InputPair, ChildIndex}; +use crate::changes_trie::input::{ChildIndex, InputPair}; /// In-memory implementation of changes trie storage. pub struct InMemoryStorage { @@ -55,10 +54,7 @@ impl InMemoryStorage { /// Creates storage from given in-memory database. pub fn with_db(mdb: MemoryDB) -> Self { Self { - data: RwLock::new(InMemoryStorageData { - roots: BTreeMap::new(), - mdb, - }), + data: RwLock::new(InMemoryStorageData { roots: BTreeMap::new(), mdb }), cache: BuildCache::new(), } } @@ -72,7 +68,7 @@ impl InMemoryStorage { pub fn with_proof(proof: Vec>) -> Self { use hash_db::HashDB; - let mut proof_db = MemoryDB::::default(); + let mut proof_db = MemoryDB::::default(); for item in proof { proof_db.insert(EMPTY_PREFIX, &item); } @@ -104,7 +100,8 @@ impl InMemoryStorage { let mut roots = BTreeMap::new(); for (storage_key, child_input) in children_inputs { for (block, pairs) in child_input { - let root = insert_into_memory_db::(&mut mdb, pairs.into_iter().map(Into::into)); + let root = + insert_into_memory_db::(&mut mdb, pairs.into_iter().map(Into::into)); if let Some(root) = root { let ix = if let Some(ix) = top_inputs.iter().position(|v| v.0 == block) { @@ -129,17 +126,14 @@ impl InMemoryStorage { } InMemoryStorage { - data: RwLock::new(InMemoryStorageData { - roots, - mdb, - }), + data: RwLock::new(InMemoryStorageData { roots, mdb }), cache: BuildCache::new(), } } #[cfg(test)] pub fn clear_storage(&self) { - self.data.write().mdb = MemoryDB::default(); // use new to be more correct + self.data.write().mdb = MemoryDB::default(); // use new to be more correct } #[cfg(test)] @@ -165,13 +159,20 @@ impl InMemoryStorage { impl RootsStorage for InMemoryStorage { fn build_anchor(&self, parent_hash: H::Out) -> Result, String> { - self.data.read().roots.iter() + self.data + .read() + .roots + .iter() .find(|(_, v)| **v == parent_hash) .map(|(k, _)| AnchorBlockId { hash: parent_hash, number: k.clone() }) .ok_or_else(|| format!("Can't find associated number for block {:?}", parent_hash)) } - fn root(&self, _anchor_block: &AnchorBlockId, block: Number) -> Result, String> { + fn root( + &self, + _anchor_block: &AnchorBlockId, + block: Number, + ) -> Result, String> { Ok(self.data.read().roots.get(&block).cloned()) } } @@ -201,9 +202,9 @@ impl<'a, H: Hasher, Number: BlockNumber> TrieBackendAdapter<'a, H, Number> { } impl<'a, H, Number> TrieBackendStorage for TrieBackendAdapter<'a, H, Number> - where - Number: BlockNumber, - H: Hasher, +where + Number: BlockNumber, + H: Hasher, { type Overlay = MemoryDB; diff --git a/primitives/state-machine/src/changes_trie/surface_iterator.rs b/primitives/state-machine/src/changes_trie/surface_iterator.rs index 13da8511f3f96..509c02ee379ff 100644 --- a/primitives/state-machine/src/changes_trie/surface_iterator.rs +++ b/primitives/state-machine/src/changes_trie/surface_iterator.rs @@ -21,8 +21,8 @@ //! of points at the terrain (mountains and valleys) inside this range that have to be drilled down to //! search for gems. +use crate::changes_trie::{BlockNumber, ConfigurationRange}; use num_traits::One; -use crate::changes_trie::{ConfigurationRange, BlockNumber}; /// Returns surface iterator for given range of blocks. /// @@ -34,12 +34,8 @@ pub fn surface_iterator<'a, Number: BlockNumber>( begin: Number, end: Number, ) -> Result, String> { - let (current, current_begin, digest_step, digest_level) = lower_bound_max_digest( - config.clone(), - max.clone(), - begin.clone(), - end, - )?; + let (current, current_begin, digest_step, digest_level) = + lower_bound_max_digest(config.clone(), max.clone(), begin.clone(), end)?; Ok(SurfaceIterator { config, begin, @@ -89,7 +85,8 @@ impl<'a, Number: BlockNumber> Iterator for SurfaceIterator<'a, Number> { self.begin.clone(), next, ); - let (current, current_begin, digest_step, digest_level) = match max_digest_interval { + let (current, current_begin, digest_step, digest_level) = match max_digest_interval + { Err(err) => return Some(Err(err)), Ok(range) => range, }; @@ -114,14 +111,21 @@ fn lower_bound_max_digest<'a, Number: BlockNumber>( end: Number, ) -> Result<(Number, Number, u32, Option), String> { if end > max || begin > end { - return Err(format!("invalid changes range: {}..{}/{}", begin, end, max)); + return Err(format!("invalid changes range: {}..{}/{}", begin, end, max)) } - if begin <= config.zero || config.end.as_ref().map(|config_end| end > *config_end).unwrap_or(false) { - return Err(format!("changes trie range is not covered by configuration: {}..{}/{}..{}", - begin, end, config.zero, match config.end.as_ref() { + if begin <= config.zero || + config.end.as_ref().map(|config_end| end > *config_end).unwrap_or(false) + { + return Err(format!( + "changes trie range is not covered by configuration: {}..{}/{}..{}", + begin, + end, + config.zero, + match config.end.as_ref() { Some(config_end) => format!("{}", config_end), None => "None".into(), - })); + } + )) } let mut digest_level = 0u32; @@ -135,10 +139,16 @@ fn lower_bound_max_digest<'a, Number: BlockNumber>( let new_digest_level = digest_level + 1; let new_digest_step = digest_step * config.config.digest_interval; let new_digest_interval = config.config.digest_interval * { - if digest_interval == 0 { 1 } else { digest_interval } + if digest_interval == 0 { + 1 + } else { + digest_interval + } }; - let new_digest_begin = config.zero.clone() + ((current.clone() - One::one() - config.zero.clone()) - / new_digest_interval.into()) * new_digest_interval.into(); + let new_digest_begin = config.zero.clone() + + ((current.clone() - One::one() - config.zero.clone()) / + new_digest_interval.into()) * + new_digest_interval.into(); let new_digest_end = new_digest_begin.clone() + new_digest_interval.into(); let new_current = new_digest_begin.clone() + new_digest_interval.into(); @@ -150,16 +160,20 @@ fn lower_bound_max_digest<'a, Number: BlockNumber>( skewed_digest_end.clone(), ); if let Some(skewed_digest_start) = skewed_digest_start { - let skewed_digest_range = (skewed_digest_end.clone() - skewed_digest_start.clone()) - .try_into().ok() - .expect("skewed digest range is always <= max level digest range;\ - max level digest range always fits u32; qed"); + let skewed_digest_range = (skewed_digest_end.clone() - + skewed_digest_start.clone()) + .try_into() + .ok() + .expect( + "skewed digest range is always <= max level digest range;\ + max level digest range always fits u32; qed", + ); return Ok(( skewed_digest_end.clone(), skewed_digest_start, skewed_digest_range, None, - )); + )) } } } @@ -169,7 +183,7 @@ fn lower_bound_max_digest<'a, Number: BlockNumber>( if begin < new_digest_begin { current_begin = new_digest_begin; } - break; + break } // we can (and will) use this digest @@ -181,30 +195,24 @@ fn lower_bound_max_digest<'a, Number: BlockNumber>( // if current digest covers the whole range => no need to use next level digest if current_begin <= begin && new_digest_end >= end { - break; + break } } } - Ok(( - current, - current_begin, - digest_step, - Some(digest_level), - )) + Ok((current, current_begin, digest_step, Some(digest_level))) } #[cfg(test)] mod tests { - use crate::changes_trie::{Configuration}; use super::*; + use crate::changes_trie::Configuration; - fn configuration_range<'a>(config: &'a Configuration, zero: u64) -> ConfigurationRange<'a, u64> { - ConfigurationRange { - config, - zero, - end: None, - } + fn configuration_range<'a>( + config: &'a Configuration, + zero: u64, + ) -> ConfigurationRange<'a, u64> { + ConfigurationRange { config, zero, end: None } } #[test] @@ -213,13 +221,15 @@ mod tests { // when config activates at 0 assert_eq!( - lower_bound_max_digest(configuration_range(&config, 0u64), 100_000u64, 20u64, 180u64).unwrap(), + lower_bound_max_digest(configuration_range(&config, 0u64), 100_000u64, 20u64, 180u64) + .unwrap(), (192, 176, 16, Some(2)), ); // when config activates at 30 assert_eq!( - lower_bound_max_digest(configuration_range(&config, 30u64), 100_000u64, 50u64, 210u64).unwrap(), + lower_bound_max_digest(configuration_range(&config, 30u64), 100_000u64, 50u64, 210u64) + .unwrap(), (222, 206, 16, Some(2)), ); } @@ -230,40 +240,61 @@ mod tests { // when config activates at 0 assert_eq!( - surface_iterator( - configuration_range(&config, 0u64), - 100_000u64, - 40u64, - 180u64, - ).unwrap().collect::>(), + surface_iterator(configuration_range(&config, 0u64), 100_000u64, 40u64, 180u64,) + .unwrap() + .collect::>(), vec![ - Ok((192, Some(2))), Ok((176, Some(2))), Ok((160, Some(2))), Ok((144, Some(2))), - Ok((128, Some(2))), Ok((112, Some(2))), Ok((96, Some(2))), Ok((80, Some(2))), - Ok((64, Some(2))), Ok((48, Some(2))), + Ok((192, Some(2))), + Ok((176, Some(2))), + Ok((160, Some(2))), + Ok((144, Some(2))), + Ok((128, Some(2))), + Ok((112, Some(2))), + Ok((96, Some(2))), + Ok((80, Some(2))), + Ok((64, Some(2))), + Ok((48, Some(2))), ], ); // when config activates at 30 assert_eq!( - surface_iterator( - configuration_range(&config, 30u64), - 100_000u64, - 40u64, - 180u64, - ).unwrap().collect::>(), + surface_iterator(configuration_range(&config, 30u64), 100_000u64, 40u64, 180u64,) + .unwrap() + .collect::>(), vec![ - Ok((190, Some(2))), Ok((174, Some(2))), Ok((158, Some(2))), Ok((142, Some(2))), Ok((126, Some(2))), - Ok((110, Some(2))), Ok((94, Some(2))), Ok((78, Some(2))), Ok((62, Some(2))), Ok((46, Some(2))), + Ok((190, Some(2))), + Ok((174, Some(2))), + Ok((158, Some(2))), + Ok((142, Some(2))), + Ok((126, Some(2))), + Ok((110, Some(2))), + Ok((94, Some(2))), + Ok((78, Some(2))), + Ok((62, Some(2))), + Ok((46, Some(2))), ], ); // when config activates at 0 AND max block is before next digest assert_eq!( - surface_iterator(configuration_range(&config, 0u64), 183u64, 40u64, 183u64).unwrap().collect::>(), + surface_iterator(configuration_range(&config, 0u64), 183u64, 40u64, 183u64) + .unwrap() + .collect::>(), vec![ - Ok((183, Some(0))), Ok((182, Some(0))), Ok((181, Some(0))), Ok((180, Some(1))), - Ok((176, Some(2))), Ok((160, Some(2))), Ok((144, Some(2))), Ok((128, Some(2))), Ok((112, Some(2))), - Ok((96, Some(2))), Ok((80, Some(2))), Ok((64, Some(2))), Ok((48, Some(2))), + Ok((183, Some(0))), + Ok((182, Some(0))), + Ok((181, Some(0))), + Ok((180, Some(1))), + Ok((176, Some(2))), + Ok((160, Some(2))), + Ok((144, Some(2))), + Ok((128, Some(2))), + Ok((112, Some(2))), + Ok((96, Some(2))), + Ok((80, Some(2))), + Ok((64, Some(2))), + Ok((48, Some(2))), ], ); } @@ -276,10 +307,19 @@ mod tests { // when config activates at 0 AND ends at 170 config_range.end = Some(170); assert_eq!( - surface_iterator(config_range, 100_000u64, 40u64, 170u64).unwrap().collect::>(), + surface_iterator(config_range, 100_000u64, 40u64, 170u64) + .unwrap() + .collect::>(), vec![ - Ok((170, None)), Ok((160, Some(2))), Ok((144, Some(2))), Ok((128, Some(2))), Ok((112, Some(2))), - Ok((96, Some(2))), Ok((80, Some(2))), Ok((64, Some(2))), Ok((48, Some(2))), + Ok((170, None)), + Ok((160, Some(2))), + Ok((144, Some(2))), + Ok((128, Some(2))), + Ok((112, Some(2))), + Ok((96, Some(2))), + Ok((80, Some(2))), + Ok((64, Some(2))), + Ok((48, Some(2))), ], ); } diff --git a/primitives/state-machine/src/error.rs b/primitives/state-machine/src/error.rs index 2705e4623a784..acc5b6080c7a3 100644 --- a/primitives/state-machine/src/error.rs +++ b/primitives/state-machine/src/error.rs @@ -16,7 +16,6 @@ // limitations under the License. /// State Machine Errors - use sp_std::fmt; /// State Machine Error bound. diff --git a/primitives/state-machine/src/ext.rs b/primitives/state-machine/src/ext.rs index d7d65b905f49c..cf7cbd413b1f0 100644 --- a/primitives/state-machine/src/ext.rs +++ b/primitives/state-machine/src/ext.rs @@ -18,25 +18,28 @@ //! Concrete externalities implementation. use crate::{ - StorageKey, StorageValue, OverlayedChanges, IndexOperation, - backend::Backend, overlayed_changes::OverlayedExtensions, + backend::Backend, overlayed_changes::OverlayedExtensions, IndexOperation, OverlayedChanges, + StorageKey, StorageValue, }; +use codec::{Decode, Encode, EncodeAppend}; use hash_db::Hasher; use sp_core::{ - storage::{well_known_keys::is_child_storage_key, ChildInfo, TrackedStorageKey}, hexdisplay::HexDisplay, + storage::{well_known_keys::is_child_storage_key, ChildInfo, TrackedStorageKey}, }; -use sp_trie::{trie_types::Layout, empty_child_trie_root}; -use sp_externalities::{ - Externalities, Extensions, Extension, ExtensionStore, -}; -use codec::{Decode, Encode, EncodeAppend}; +use sp_externalities::{Extension, ExtensionStore, Extensions, Externalities}; +use sp_trie::{empty_child_trie_root, trie_types::Layout}; -use sp_std::{fmt, any::{Any, TypeId}, vec::Vec, vec, boxed::Box, cmp::Ordering}; -use crate::{warn, trace, log_error}; #[cfg(feature = "std")] use crate::changes_trie::State as ChangesTrieState; -use crate::StorageTransactionCache; +use crate::{log_error, trace, warn, StorageTransactionCache}; +use sp_std::{ + any::{Any, TypeId}, + boxed::Box, + cmp::Ordering, + fmt, vec, + vec::Vec, +}; #[cfg(feature = "std")] use std::error; @@ -46,7 +49,6 @@ const BENCHMARKING_FN: &str = "\ For that reason client started transactions before calling into runtime are not allowed. Without client transactions the loop condition garantuees the success of the tx close."; - #[cfg(feature = "std")] fn guard() -> sp_panic_handler::AbortGuard { sp_panic_handler::AbortGuard::force_abort() @@ -91,10 +93,10 @@ impl error::Error for Error { /// Wraps a read-only backend, call executor, and current overlayed changes. pub struct Ext<'a, H, N, B> - where - H: Hasher, - B: 'a + Backend, - N: crate::changes_trie::BlockNumber, +where + H: Hasher, + B: 'a + Backend, + N: crate::changes_trie::BlockNumber, { /// The overlayed changes to write to. overlay: &'a mut OverlayedChanges, @@ -114,12 +116,11 @@ pub struct Ext<'a, H, N, B> extensions: Option>, } - impl<'a, H, N, B> Ext<'a, H, N, B> - where - H: Hasher, - B: Backend, - N: crate::changes_trie::BlockNumber, +where + H: Hasher, + B: Backend, + N: crate::changes_trie::BlockNumber, { /// Create a new `Ext`. #[cfg(not(feature = "std"))] @@ -128,13 +129,7 @@ impl<'a, H, N, B> Ext<'a, H, N, B> storage_transaction_cache: &'a mut StorageTransactionCache, backend: &'a B, ) -> Self { - Ext { - overlay, - backend, - id: 0, - storage_transaction_cache, - _phantom: Default::default(), - } + Ext { overlay, backend, id: 0, storage_transaction_cache, _phantom: Default::default() } } /// Create a new `Ext` from overlayed changes and read-only backend @@ -176,7 +171,9 @@ where pub fn storage_pairs(&self) -> Vec<(StorageKey, StorageValue)> { use std::collections::HashMap; - self.backend.pairs().iter() + self.backend + .pairs() + .iter() .map(|&(ref k, ref v)| (k.to_vec(), Some(v.to_vec()))) .chain(self.overlay.changes().map(|(k, v)| (k.clone(), v.value().cloned()))) .collect::>() @@ -199,8 +196,11 @@ where fn storage(&self, key: &[u8]) -> Option { let _guard = guard(); - let result = self.overlay.storage(key).map(|x| x.map(|x| x.to_vec())).unwrap_or_else(|| - self.backend.storage(key).expect(EXT_NOT_ALLOWED_TO_FAIL)); + let result = self + .overlay + .storage(key) + .map(|x| x.map(|x| x.to_vec())) + .unwrap_or_else(|| self.backend.storage(key).expect(EXT_NOT_ALLOWED_TO_FAIL)); // NOTE: be careful about touching the key names – used outside substrate! trace!( @@ -222,7 +222,8 @@ where fn storage_hash(&self, key: &[u8]) -> Option> { let _guard = guard(); - let result = self.overlay + let result = self + .overlay .storage(key) .map(|x| x.map(|x| H::hash(x))) .unwrap_or_else(|| self.backend.storage_hash(key).expect(EXT_NOT_ALLOWED_TO_FAIL)); @@ -235,19 +236,15 @@ where result.map(|r| r.encode()) } - fn child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { + fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> Option { let _guard = guard(); - let result = self.overlay + let result = self + .overlay .child_storage(child_info, key) .map(|x| x.map(|x| x.to_vec())) - .unwrap_or_else(|| - self.backend.child_storage(child_info, key) - .expect(EXT_NOT_ALLOWED_TO_FAIL) - ); + .unwrap_or_else(|| { + self.backend.child_storage(child_info, key).expect(EXT_NOT_ALLOWED_TO_FAIL) + }); trace!(target: "state", "{:04x}: GetChild({}) {}={:?}", self.id, @@ -259,19 +256,15 @@ where result } - fn child_storage_hash( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option> { + fn child_storage_hash(&self, child_info: &ChildInfo, key: &[u8]) -> Option> { let _guard = guard(); - let result = self.overlay + let result = self + .overlay .child_storage(child_info, key) .map(|x| x.map(|x| H::hash(x))) - .unwrap_or_else(|| - self.backend.child_storage_hash(child_info, key) - .expect(EXT_NOT_ALLOWED_TO_FAIL) - ); + .unwrap_or_else(|| { + self.backend.child_storage_hash(child_info, key).expect(EXT_NOT_ALLOWED_TO_FAIL) + }); trace!(target: "state", "{:04x}: ChildHash({}) {}={:?}", self.id, @@ -299,16 +292,13 @@ where result } - fn exists_child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> bool { + fn exists_child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> bool { let _guard = guard(); let result = match self.overlay.child_storage(child_info, key) { Some(x) => x.is_some(), - _ => self.backend + _ => self + .backend .exists_child_storage(child_info, key) .expect(EXT_NOT_ALLOWED_TO_FAIL), }; @@ -323,7 +313,8 @@ where } fn next_storage_key(&self, key: &[u8]) -> Option { - let mut next_backend_key = self.backend.next_storage_key(key).expect(EXT_NOT_ALLOWED_TO_FAIL); + let mut next_backend_key = + self.backend.next_storage_key(key).expect(EXT_NOT_ALLOWED_TO_FAIL); let mut overlay_changes = self.overlay.iter_after(key).peekable(); match (&next_backend_key, overlay_changes.peek()) { @@ -343,9 +334,10 @@ where // If the `backend_key` and `overlay_key` are equal, it means that we need // to search for the next backend key, because the overlay has overwritten // this key. - next_backend_key = self.backend.next_storage_key( - &overlay_key.0, - ).expect(EXT_NOT_ALLOWED_TO_FAIL); + next_backend_key = self + .backend + .next_storage_key(&overlay_key.0) + .expect(EXT_NOT_ALLOWED_TO_FAIL); } } @@ -358,18 +350,13 @@ where } } - fn next_child_storage_key( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { - let mut next_backend_key = self.backend + fn next_child_storage_key(&self, child_info: &ChildInfo, key: &[u8]) -> Option { + let mut next_backend_key = self + .backend .next_child_storage_key(child_info, key) .expect(EXT_NOT_ALLOWED_TO_FAIL); - let mut overlay_changes = self.overlay.child_iter_after( - child_info.storage_key(), - key - ).peekable(); + let mut overlay_changes = + self.overlay.child_iter_after(child_info.storage_key(), key).peekable(); match (&next_backend_key, overlay_changes.peek()) { (_, None) => next_backend_key, @@ -388,10 +375,10 @@ where // If the `backend_key` and `overlay_key` are equal, it means that we need // to search for the next backend key, because the overlay has overwritten // this key. - next_backend_key = self.backend.next_child_storage_key( - child_info, - &overlay_key.0, - ).expect(EXT_NOT_ALLOWED_TO_FAIL); + next_backend_key = self + .backend + .next_child_storage_key(child_info, &overlay_key.0) + .expect(EXT_NOT_ALLOWED_TO_FAIL); } } @@ -408,7 +395,7 @@ where let _guard = guard(); if is_child_storage_key(&key) { warn!(target: "trie", "Refuse to directly set child storage key"); - return; + return } // NOTE: be careful about touching the key names – used outside substrate! @@ -448,11 +435,7 @@ where self.overlay.set_child_storage(child_info, key, value); } - fn kill_child_storage( - &mut self, - child_info: &ChildInfo, - limit: Option, - ) -> (bool, u32) { + fn kill_child_storage(&mut self, child_info: &ChildInfo, limit: Option) -> (bool, u32) { trace!(target: "state", "{:04x}: KillChild({})", self.id, HexDisplay::from(&child_info.storage_key()), @@ -472,7 +455,7 @@ where if sp_core::storage::well_known_keys::starts_with_child_storage_key(prefix) { warn!(target: "trie", "Refuse to directly clear prefix that is part or contains of child storage key"); - return (false, 0); + return (false, 0) } self.mark_dirty(); @@ -498,11 +481,7 @@ where self.limit_remove_from_backend(Some(child_info), Some(prefix), limit) } - fn storage_append( - &mut self, - key: Vec, - value: Vec, - ) { + fn storage_append(&mut self, key: Vec, value: Vec) { trace!(target: "state", "{:04x}: Append {}={}", self.id, HexDisplay::from(&key), @@ -513,10 +492,9 @@ where self.mark_dirty(); let backend = &mut self.backend; - let current_value = self.overlay.value_mut_or_insert_with( - &key, - || backend.storage(&key).expect(EXT_NOT_ALLOWED_TO_FAIL).unwrap_or_default() - ); + let current_value = self.overlay.value_mut_or_insert_with(&key, || { + backend.storage(&key).expect(EXT_NOT_ALLOWED_TO_FAIL).unwrap_or_default() + }); StorageAppend::new(current_value).append(value); } @@ -527,7 +505,7 @@ where self.id, HexDisplay::from(&root.as_ref()), ); - return root.encode(); + return root.encode() } let root = self.overlay.storage_root(self.backend, self.storage_transaction_cache); @@ -535,10 +513,7 @@ where root.encode() } - fn child_storage_root( - &mut self, - child_info: &ChildInfo, - ) -> Vec { + fn child_storage_root(&mut self, child_info: &ChildInfo) -> Vec { let _guard = guard(); let storage_key = child_info.storage_key(); let prefixed_storage_key = child_info.prefixed_storage_key(); @@ -546,9 +521,7 @@ where let root = self .storage(prefixed_storage_key.as_slice()) .and_then(|k| Decode::decode(&mut &k[..]).ok()) - .unwrap_or_else( - || empty_child_trie_root::>() - ); + .unwrap_or_else(|| empty_child_trie_root::>()); trace!(target: "state", "{:04x}: ChildRoot({})(cached) {}", self.id, HexDisplay::from(&storage_key), @@ -587,9 +560,7 @@ where let root = self .storage(prefixed_storage_key.as_slice()) .and_then(|k| Decode::decode(&mut &k[..]).ok()) - .unwrap_or_else( - || empty_child_trie_root::>() - ); + .unwrap_or_else(|| empty_child_trie_root::>()); trace!(target: "state", "{:04x}: ChildRoot({})(no_change) {}", self.id, HexDisplay::from(&storage_key.as_ref()), @@ -625,10 +596,8 @@ where index, HexDisplay::from(&hash), ); - self.overlay.add_transaction_index(IndexOperation::Renew { - extrinsic: index, - hash: hash.to_vec(), - }); + self.overlay + .add_transaction_index(IndexOperation::Renew { extrinsic: index, hash: hash.to_vec() }); } #[cfg(not(feature = "std"))] @@ -639,7 +608,8 @@ where #[cfg(feature = "std")] fn storage_changes_root(&mut self, mut parent_hash: &[u8]) -> Result>, ()> { let _guard = guard(); - if let Some(ref root) = self.storage_transaction_cache.changes_trie_transaction_storage_root { + if let Some(ref root) = self.storage_transaction_cache.changes_trie_transaction_storage_root + { trace!( target: "state", "{:04x}: ChangesRoot({})(cached) {:?}", @@ -653,13 +623,13 @@ where let root = self.overlay.changes_trie_root( self.backend, self.changes_trie_state.as_ref(), - Decode::decode(&mut parent_hash).map_err(|e| + Decode::decode(&mut parent_hash).map_err(|e| { trace!( target: "state", "Failed to decode changes root parent hash: {}", e, ) - )?, + })?, true, self.storage_transaction_cache, ); @@ -693,13 +663,15 @@ where for _ in 0..self.overlay.transaction_depth() { self.overlay.rollback_transaction().expect(BENCHMARKING_FN); } - self.overlay.drain_storage_changes( - self.backend, - #[cfg(feature = "std")] - None, - Default::default(), - self.storage_transaction_cache, - ).expect(EXT_NOT_ALLOWED_TO_FAIL); + self.overlay + .drain_storage_changes( + self.backend, + #[cfg(feature = "std")] + None, + Default::default(), + self.storage_transaction_cache, + ) + .expect(EXT_NOT_ALLOWED_TO_FAIL); self.backend.wipe().expect(EXT_NOT_ALLOWED_TO_FAIL); self.mark_dirty(); self.overlay @@ -711,19 +683,24 @@ where for _ in 0..self.overlay.transaction_depth() { self.overlay.commit_transaction().expect(BENCHMARKING_FN); } - let changes = self.overlay.drain_storage_changes( - self.backend, - #[cfg(feature = "std")] - None, - Default::default(), - self.storage_transaction_cache, - ).expect(EXT_NOT_ALLOWED_TO_FAIL); - self.backend.commit( - changes.transaction_storage_root, - changes.transaction, - changes.main_storage_changes, - changes.child_storage_changes, - ).expect(EXT_NOT_ALLOWED_TO_FAIL); + let changes = self + .overlay + .drain_storage_changes( + self.backend, + #[cfg(feature = "std")] + None, + Default::default(), + self.storage_transaction_cache, + ) + .expect(EXT_NOT_ALLOWED_TO_FAIL); + self.backend + .commit( + changes.transaction_storage_root, + changes.transaction, + changes.main_storage_changes, + changes.child_storage_changes, + ) + .expect(EXT_NOT_ALLOWED_TO_FAIL); self.mark_dirty(); self.overlay .enter_runtime() @@ -775,13 +752,13 @@ where self.backend.apply_to_keys_while(child_info, prefix, |key| { if num_deleted == limit { all_deleted = false; - return false; + return false } if let Some(num) = num_deleted.checked_add(1) { num_deleted = num; } else { all_deleted = false; - return false; + return false } if let Some(child_info) = child_info { self.overlay.set_child_storage(child_info, key.to_vec(), None); @@ -840,7 +817,7 @@ impl<'a> StorageAppend<'a> { "Failed to append value, resetting storage item to `[value]`.", ); value.encode() - } + }, }; } } @@ -896,7 +873,10 @@ where } } - fn deregister_extension_by_type_id(&mut self, type_id: TypeId) -> Result<(), sp_externalities::Error> { + fn deregister_extension_by_type_id( + &mut self, + type_id: TypeId, + ) -> Result<(), sp_externalities::Error> { if let Some(ref mut extensions) = self.extensions { if extensions.deregister(type_id) { Ok(()) @@ -912,24 +892,19 @@ where #[cfg(test)] mod tests { use super::*; + use crate::{ + changes_trie::{ + Configuration as ChangesTrieConfiguration, InMemoryStorage as TestChangesTrieStorage, + }, + InMemoryBackend, + }; + use codec::Encode; use hex_literal::hex; use num_traits::Zero; - use codec::Encode; use sp_core::{ - H256, - Blake2Hasher, map, - storage::{ - Storage, - StorageChild, - well_known_keys::EXTRINSIC_INDEX, - }, - }; - use crate::{ - changes_trie::{ - Configuration as ChangesTrieConfiguration, - InMemoryStorage as TestChangesTrieStorage, - }, InMemoryBackend, + storage::{well_known_keys::EXTRINSIC_INDEX, Storage, StorageChild}, + Blake2Hasher, H256, }; type TestBackend = InMemoryBackend; @@ -947,10 +922,7 @@ mod tests { } fn changes_trie_config() -> ChangesTrieConfiguration { - ChangesTrieConfiguration { - digest_interval: 0, - digest_levels: 0, - } + ChangesTrieConfiguration { digest_interval: 0, digest_levels: 0 } } #[test] @@ -1013,8 +985,9 @@ mod tests { vec![20] => vec![20], vec![40] => vec![40] ], - children_default: map![] - }.into(); + children_default: map![], + } + .into(); let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None); @@ -1056,8 +1029,9 @@ mod tests { top: map![ vec![30] => vec![30] ], - children_default: map![] - }.into(); + children_default: map![], + } + .into(); let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None); @@ -1087,7 +1061,8 @@ mod tests { child_info: child_info.to_owned(), } ], - }.into(); + } + .into(); let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None); @@ -1131,7 +1106,8 @@ mod tests { child_info: child_info.to_owned(), } ], - }.into(); + } + .into(); let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None); @@ -1142,10 +1118,7 @@ mod tests { ); assert_eq!(ext.child_storage(child_info, &[20]), None); - assert_eq!( - ext.child_storage_hash(child_info, &[20]), - None, - ); + assert_eq!(ext.child_storage_hash(child_info, &[20]), None,); assert_eq!(ext.child_storage(child_info, &[30]), Some(vec![31])); assert_eq!( @@ -1170,7 +1143,8 @@ mod tests { child_info: child_info.to_owned(), } ], - }.into(); + } + .into(); let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None); diff --git a/primitives/state-machine/src/in_memory_backend.rs b/primitives/state-machine/src/in_memory_backend.rs index 4ee16dfd2f8a8..4daf1004a85fc 100644 --- a/primitives/state-machine/src/in_memory_backend.rs +++ b/primitives/state-machine/src/in_memory_backend.rs @@ -18,13 +18,13 @@ //! State machine in memory backend. use crate::{ - StorageKey, StorageValue, StorageCollection, trie_backend::TrieBackend, backend::Backend, + backend::Backend, trie_backend::TrieBackend, StorageCollection, StorageKey, StorageValue, }; -use std::collections::{BTreeMap, HashMap}; -use hash_db::Hasher; -use sp_trie::{MemoryDB, empty_trie_root, Layout}; use codec::Codec; +use hash_db::Hasher; use sp_core::storage::{ChildInfo, Storage}; +use sp_trie::{empty_trie_root, Layout, MemoryDB}; +use std::collections::{BTreeMap, HashMap}; /// Create a new empty instance of in-memory backend. pub fn new_in_mem() -> TrieBackend, H> @@ -40,9 +40,7 @@ where H::Out: Codec + Ord, { /// Copy the state, with applied updates - pub fn update< - T: IntoIterator, StorageCollection)> - >( + pub fn update, StorageCollection)>>( &self, changes: T, ) -> Self { @@ -52,19 +50,16 @@ where } /// Insert values into backend trie. - pub fn insert< - T: IntoIterator, StorageCollection)> - >( + pub fn insert, StorageCollection)>>( &mut self, changes: T, ) { let (top, child) = changes.into_iter().partition::, _>(|v| v.0.is_none()); let (root, transaction) = self.full_storage_root( top.iter().map(|(_, v)| v).flatten().map(|(k, v)| (&k[..], v.as_deref())), - child.iter() - .filter_map(|v| - v.0.as_ref().map(|c| (c, v.1.iter().map(|(k, v)| (&k[..], v.as_deref())))) - ), + child.iter().filter_map(|v| { + v.0.as_ref().map(|c| (c, v.1.iter().map(|(k, v)| (&k[..], v.as_deref())))) + }), ); self.apply_transaction(root, transaction); @@ -115,7 +110,9 @@ where fn from(inner: HashMap, BTreeMap>) -> Self { let mut backend = new_in_mem(); backend.insert( - inner.into_iter().map(|(k, m)| (k, m.into_iter().map(|(k, v)| (k, Some(v))).collect())), + inner + .into_iter() + .map(|(k, m)| (k, m.into_iter().map(|(k, v)| (k, Some(v))).collect())), ); backend } @@ -126,8 +123,11 @@ where H::Out: Codec + Ord, { fn from(inners: Storage) -> Self { - let mut inner: HashMap, BTreeMap> - = inners.children_default.into_iter().map(|(_k, c)| (Some(c.child_info), c.data)).collect(); + let mut inner: HashMap, BTreeMap> = inners + .children_default + .into_iter() + .map(|(_k, c)| (Some(c.child_info), c.data)) + .collect(); inner.insert(None, inners.top); inner.into() } @@ -144,16 +144,13 @@ where } } -impl From, StorageCollection)>> - for TrieBackend, H> +impl From, StorageCollection)>> for TrieBackend, H> where H::Out: Codec + Ord, { - fn from( - inner: Vec<(Option, StorageCollection)>, - ) -> Self { - let mut expanded: HashMap, BTreeMap> - = HashMap::new(); + fn from(inner: Vec<(Option, StorageCollection)>) -> Self { + let mut expanded: HashMap, BTreeMap> = + HashMap::new(); for (child_info, key_values) in inner { let entry = expanded.entry(child_info).or_default(); for (key, value) in key_values { @@ -169,8 +166,8 @@ where #[cfg(test)] mod tests { use super::*; - use sp_runtime::traits::BlakeTwo256; use crate::backend::Backend; + use sp_runtime::traits::BlakeTwo256; /// Assert in memory backend with only child trie keys works as trie backend. #[test] @@ -178,15 +175,10 @@ mod tests { let storage = new_in_mem::(); let child_info = ChildInfo::new_default(b"1"); let child_info = &child_info; - let mut storage = storage.update( - vec![( - Some(child_info.clone()), - vec![(b"2".to_vec(), Some(b"3".to_vec()))] - )] - ); + let mut storage = storage + .update(vec![(Some(child_info.clone()), vec![(b"2".to_vec(), Some(b"3".to_vec()))])]); let trie_backend = storage.as_trie_backend().unwrap(); - assert_eq!(trie_backend.child_storage(child_info, b"2").unwrap(), - Some(b"3".to_vec())); + assert_eq!(trie_backend.child_storage(child_info, b"2").unwrap(), Some(b"3".to_vec())); let storage_key = child_info.prefixed_storage_key(); assert!(trie_backend.storage(storage_key.as_slice()).unwrap().is_some()); } @@ -196,8 +188,10 @@ mod tests { let mut storage = new_in_mem::(); let child_info = ChildInfo::new_default(b"1"); - storage.insert(vec![(Some(child_info.clone()), vec![(b"2".to_vec(), Some(b"3".to_vec()))])]); - storage.insert(vec![(Some(child_info.clone()), vec![(b"1".to_vec(), Some(b"3".to_vec()))])]); + storage + .insert(vec![(Some(child_info.clone()), vec![(b"2".to_vec(), Some(b"3".to_vec()))])]); + storage + .insert(vec![(Some(child_info.clone()), vec![(b"1".to_vec(), Some(b"3".to_vec()))])]); assert_eq!(storage.child_storage(&child_info, &b"2"[..]), Ok(Some(b"3".to_vec()))); assert_eq!(storage.child_storage(&child_info, &b"1"[..]), Ok(Some(b"3".to_vec()))); diff --git a/primitives/state-machine/src/lib.rs b/primitives/state-machine/src/lib.rs index bc5b48f02db4e..e2162df5cfd19 100644 --- a/primitives/state-machine/src/lib.rs +++ b/primitives/state-machine/src/lib.rs @@ -22,23 +22,23 @@ pub mod backend; #[cfg(feature = "std")] -mod in_memory_backend; +mod basic; #[cfg(feature = "std")] mod changes_trie; mod error; mod ext; #[cfg(feature = "std")] -mod testing; -#[cfg(feature = "std")] -mod basic; +mod in_memory_backend; pub(crate) mod overlayed_changes; #[cfg(feature = "std")] mod proving_backend; -mod trie_backend; -mod trie_backend_essence; -mod stats; #[cfg(feature = "std")] mod read_only; +mod stats; +#[cfg(feature = "std")] +mod testing; +mod trie_backend; +mod trie_backend_essence; #[cfg(feature = "std")] pub use std_reexport::*; @@ -46,7 +46,7 @@ pub use std_reexport::*; #[cfg(feature = "std")] pub use execution::*; #[cfg(feature = "std")] -pub use log::{debug, warn, error as log_error}; +pub use log::{debug, error as log_error, warn}; #[cfg(feature = "std")] pub use tracing::trace; @@ -55,12 +55,12 @@ pub use tracing::trace; #[cfg(not(feature = "std"))] #[macro_export] macro_rules! warn { - (target: $target:expr, $($arg:tt)+) => ( + (target: $target:expr, $($arg:tt)+) => { () - ); - ($($arg:tt)+) => ( + }; + ($($arg:tt)+) => { () - ); + }; } /// In no_std we skip logs for state_machine, this macro @@ -68,12 +68,12 @@ macro_rules! warn { #[cfg(not(feature = "std"))] #[macro_export] macro_rules! debug { - (target: $target:expr, $($arg:tt)+) => ( + (target: $target:expr, $($arg:tt)+) => { () - ); - ($($arg:tt)+) => ( + }; + ($($arg:tt)+) => { () - ); + }; } /// In no_std we skip logs for state_machine, this macro @@ -81,12 +81,12 @@ macro_rules! debug { #[cfg(not(feature = "std"))] #[macro_export] macro_rules! trace { - (target: $target:expr, $($arg:tt)+) => ( + (target: $target:expr, $($arg:tt)+) => { () - ); - ($($arg:tt)+) => ( + }; + ($($arg:tt)+) => { () - ); + }; } /// In no_std we skip logs for state_machine, this macro @@ -94,12 +94,12 @@ macro_rules! trace { #[cfg(not(feature = "std"))] #[macro_export] macro_rules! log_error { - (target: $target:expr, $($arg:tt)+) => ( + (target: $target:expr, $($arg:tt)+) => { () - ); - ($($arg:tt)+) => ( + }; + ($($arg:tt)+) => { () - ); + }; } /// Default error type to use with state machine trie backend. @@ -117,20 +117,19 @@ impl sp_std::fmt::Display for DefaultError { } } -pub use crate::overlayed_changes::{ - OverlayedChanges, StorageKey, StorageValue, - StorageCollection, ChildStorageCollection, - StorageChanges, StorageTransactionCache, - OffchainChangesCollection, - OffchainOverlayedChanges, - IndexOperation, +pub use crate::{ + backend::Backend, + ext::Ext, + overlayed_changes::{ + ChildStorageCollection, IndexOperation, OffchainChangesCollection, + OffchainOverlayedChanges, OverlayedChanges, StorageChanges, StorageCollection, StorageKey, + StorageTransactionCache, StorageValue, + }, + stats::{StateMachineStats, UsageInfo, UsageUnit}, + trie_backend::TrieBackend, + trie_backend_essence::{Storage, TrieBackendStorage}, }; -pub use crate::backend::Backend; -pub use crate::trie_backend_essence::{TrieBackendStorage, Storage}; -pub use crate::trie_backend::TrieBackend; -pub use crate::stats::{UsageInfo, UsageUnit, StateMachineStats}; pub use error::{Error, ExecutionError}; -pub use crate::ext::Ext; #[cfg(not(feature = "std"))] mod changes_trie { @@ -143,45 +142,45 @@ mod changes_trie { #[cfg(feature = "std")] mod std_reexport { - pub use sp_trie::{trie_types::{Layout, TrieDBMut}, StorageProof, TrieMut, DBValue, MemoryDB}; - pub use crate::testing::TestExternalities; - pub use crate::basic::BasicExternalities; - pub use crate::read_only::{ReadOnlyExternalities, InspectState}; - pub use crate::changes_trie::{ - AnchorBlockId as ChangesTrieAnchorBlockId, - State as ChangesTrieState, - Storage as ChangesTrieStorage, - RootsStorage as ChangesTrieRootsStorage, - InMemoryStorage as InMemoryChangesTrieStorage, - BuildCache as ChangesTrieBuildCache, - CacheAction as ChangesTrieCacheAction, - ConfigurationRange as ChangesTrieConfigurationRange, - key_changes, key_changes_proof, - key_changes_proof_check, key_changes_proof_check_with_db, - prune as prune_changes_tries, - disabled_state as disabled_changes_trie_state, - BlockNumber as ChangesTrieBlockNumber, + pub use crate::{ + basic::BasicExternalities, + changes_trie::{ + disabled_state as disabled_changes_trie_state, key_changes, key_changes_proof, + key_changes_proof_check, key_changes_proof_check_with_db, prune as prune_changes_tries, + AnchorBlockId as ChangesTrieAnchorBlockId, BlockNumber as ChangesTrieBlockNumber, + BuildCache as ChangesTrieBuildCache, CacheAction as ChangesTrieCacheAction, + ConfigurationRange as ChangesTrieConfigurationRange, + InMemoryStorage as InMemoryChangesTrieStorage, RootsStorage as ChangesTrieRootsStorage, + State as ChangesTrieState, Storage as ChangesTrieStorage, + }, + error::{Error, ExecutionError}, + in_memory_backend::new_in_mem, + proving_backend::{ + create_proof_check_backend, ProofRecorder, ProvingBackend, ProvingBackendRecorder, + }, + read_only::{InspectState, ReadOnlyExternalities}, + testing::TestExternalities, }; - pub use crate::proving_backend::{ - create_proof_check_backend, ProofRecorder, ProvingBackend, ProvingBackendRecorder, + pub use sp_trie::{ + trie_types::{Layout, TrieDBMut}, + DBValue, MemoryDB, StorageProof, TrieMut, }; - pub use crate::error::{Error, ExecutionError}; - pub use crate::in_memory_backend::new_in_mem; } #[cfg(feature = "std")] mod execution { use super::*; - use std::{fmt, result, collections::HashMap, panic::UnwindSafe}; - use log::{warn, trace}; + use codec::{Codec, Decode, Encode}; use hash_db::Hasher; - use codec::{Decode, Encode, Codec}; + use log::{trace, warn}; use sp_core::{ - storage::ChildInfo, NativeOrEncoded, NeverNativeValue, hexdisplay::HexDisplay, + hexdisplay::HexDisplay, + storage::ChildInfo, traits::{CodeExecutor, ReadRuntimeVersionExt, RuntimeCode, SpawnNamed}, + NativeOrEncoded, NeverNativeValue, }; use sp_externalities::Extensions; - + use std::{collections::HashMap, fmt, panic::UnwindSafe, result}; const PROOF_CLOSE_TRANSACTION: &str = "\ Closing a transaction that was started in this function. Client initiated transactions @@ -193,10 +192,8 @@ mod execution { pub type DefaultHandler = fn(CallResult, CallResult) -> CallResult; /// Type of changes trie transaction. - pub type ChangesTrieTransaction = ( - MemoryDB, - ChangesTrieCacheAction<::Out, N>, - ); + pub type ChangesTrieTransaction = + (MemoryDB, ChangesTrieCacheAction<::Out, N>); /// Trie backend with in-memory storage. pub type InMemoryBackend = TrieBackend, H>; @@ -259,14 +256,14 @@ mod execution { self, ) -> ExecutionManager> { match self { - ExecutionStrategy::AlwaysWasm => ExecutionManager::AlwaysWasm(BackendTrustLevel::Trusted), + ExecutionStrategy::AlwaysWasm => + ExecutionManager::AlwaysWasm(BackendTrustLevel::Trusted), ExecutionStrategy::NativeWhenPossible => ExecutionManager::NativeWhenPossible, ExecutionStrategy::NativeElseWasm => ExecutionManager::NativeElseWasm, ExecutionStrategy::Both => ExecutionManager::Both(|wasm_result, native_result| { warn!( "Consensus error between wasm {:?} and native {:?}. Using wasm.", - wasm_result, - native_result, + wasm_result, native_result, ); warn!(" Native result {:?}", native_result); warn!(" Wasm result {:?}", wasm_result); @@ -293,10 +290,10 @@ mod execution { /// The substrate state machine. pub struct StateMachine<'a, B, H, N, Exec> - where - H: Hasher, - B: Backend, - N: ChangesTrieBlockNumber, + where + H: Hasher, + B: Backend, + N: ChangesTrieBlockNumber, { backend: &'a B, exec: &'a Exec, @@ -310,7 +307,8 @@ mod execution { stats: StateMachineStats, } - impl<'a, B, H, N, Exec> Drop for StateMachine<'a, B, H, N, Exec> where + impl<'a, B, H, N, Exec> Drop for StateMachine<'a, B, H, N, Exec> + where H: Hasher, B: Backend, N: ChangesTrieBlockNumber, @@ -320,7 +318,8 @@ mod execution { } } - impl<'a, B, H, N, Exec> StateMachine<'a, B, H, N, Exec> where + impl<'a, B, H, N, Exec> StateMachine<'a, B, H, N, Exec> + where H: Hasher, H::Out: Ord + 'static + codec::Codec, Exec: CodeExecutor + Clone + 'static, @@ -383,19 +382,19 @@ mod execution { self.execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( strategy.get_manager(), None, - ).map(NativeOrEncoded::into_encoded) + ) + .map(NativeOrEncoded::into_encoded) } fn execute_aux( &mut self, use_native: bool, native_call: Option, - ) -> ( - CallResult, - bool, - ) where + ) -> (CallResult, bool) + where R: Decode + Encode + PartialEq, - NC: FnOnce() -> result::Result> + UnwindSafe, + NC: FnOnce() -> result::Result> + + UnwindSafe, { let mut cache = StorageTransactionCache::default(); @@ -404,7 +403,9 @@ mod execution { None => &mut cache, }; - self.overlay.enter_runtime().expect("StateMachine is never called from the runtime; qed"); + self.overlay + .enter_runtime() + .expect("StateMachine is never called from the runtime; qed"); let mut ext = Ext::new( self.overlay, @@ -432,7 +433,8 @@ mod execution { native_call, ); - self.overlay.exit_runtime() + self.overlay + .exit_runtime() .expect("Runtime is not able to call this function in the overlay; qed"); trace!( @@ -450,27 +452,25 @@ mod execution { mut native_call: Option, on_consensus_failure: Handler, ) -> CallResult - where - R: Decode + Encode + PartialEq, - NC: FnOnce() -> result::Result> + UnwindSafe, - Handler: FnOnce( - CallResult, - CallResult, - ) -> CallResult + where + R: Decode + Encode + PartialEq, + NC: FnOnce() -> result::Result> + + UnwindSafe, + Handler: FnOnce( + CallResult, + CallResult, + ) -> CallResult, { self.overlay.start_transaction(); let (result, was_native) = self.execute_aux(true, native_call.take()); if was_native { self.overlay.rollback_transaction().expect(PROOF_CLOSE_TRANSACTION); - let (wasm_result, _) = self.execute_aux( - false, - native_call, - ); + let (wasm_result, _) = self.execute_aux(false, native_call); - if (result.is_ok() && wasm_result.is_ok() - && result.as_ref().ok() == wasm_result.as_ref().ok()) - || result.is_err() && wasm_result.is_err() + if (result.is_ok() && + wasm_result.is_ok() && result.as_ref().ok() == wasm_result.as_ref().ok()) || + result.is_err() && wasm_result.is_err() { result } else { @@ -486,25 +486,20 @@ mod execution { &mut self, mut native_call: Option, ) -> CallResult - where - R: Decode + Encode + PartialEq, - NC: FnOnce() -> result::Result> + UnwindSafe, + where + R: Decode + Encode + PartialEq, + NC: FnOnce() -> result::Result> + + UnwindSafe, { self.overlay.start_transaction(); - let (result, was_native) = self.execute_aux( - true, - native_call.take(), - ); + let (result, was_native) = self.execute_aux(true, native_call.take()); if !was_native || result.is_ok() { self.overlay.commit_transaction().expect(PROOF_CLOSE_TRANSACTION); result } else { self.overlay.rollback_transaction().expect(PROOF_CLOSE_TRANSACTION); - let (wasm_result, _) = self.execute_aux( - false, - native_call, - ); + let (wasm_result, _) = self.execute_aux(false, native_call); wasm_result } } @@ -523,40 +518,33 @@ mod execution { manager: ExecutionManager, mut native_call: Option, ) -> Result, Box> - where - R: Decode + Encode + PartialEq, - NC: FnOnce() -> result::Result> + UnwindSafe, - Handler: FnOnce( - CallResult, - CallResult, - ) -> CallResult + where + R: Decode + Encode + PartialEq, + NC: FnOnce() -> result::Result> + + UnwindSafe, + Handler: FnOnce( + CallResult, + CallResult, + ) -> CallResult, { let changes_tries_enabled = self.changes_trie_state.is_some(); self.overlay.set_collect_extrinsics(changes_tries_enabled); let result = { match manager { - ExecutionManager::Both(on_consensus_failure) => { - self.execute_call_with_both_strategy( - native_call.take(), - on_consensus_failure, - ) - }, - ExecutionManager::NativeElseWasm => { - self.execute_call_with_native_else_wasm_strategy( - native_call.take(), - ) - }, + ExecutionManager::Both(on_consensus_failure) => self + .execute_call_with_both_strategy(native_call.take(), on_consensus_failure), + ExecutionManager::NativeElseWasm => + self.execute_call_with_native_else_wasm_strategy(native_call.take()), ExecutionManager::AlwaysWasm(trust_level) => { let _abort_guard = match trust_level { BackendTrustLevel::Trusted => None, - BackendTrustLevel::Untrusted => Some(sp_panic_handler::AbortGuard::never_abort()), + BackendTrustLevel::Untrusted => + Some(sp_panic_handler::AbortGuard::never_abort()), }; self.execute_aux(false, native_call).0 }, - ExecutionManager::NativeWhenPossible => { - self.execute_aux(true, native_call).0 - }, + ExecutionManager::NativeWhenPossible => self.execute_aux(true, native_call).0, } }; @@ -582,7 +570,8 @@ mod execution { N: crate::changes_trie::BlockNumber, Spawn: SpawnNamed + Send + 'static, { - let trie_backend = backend.as_trie_backend() + let trie_backend = backend + .as_trie_backend() .ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box)?; prove_execution_on_trie_backend::<_, _, N, _, _>( trie_backend, @@ -704,14 +693,12 @@ mod execution { sm.execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( always_untrusted_wasm(), None, - ).map(NativeOrEncoded::into_encoded) + ) + .map(NativeOrEncoded::into_encoded) } /// Generate storage read proof. - pub fn prove_read( - mut backend: B, - keys: I, - ) -> Result> + pub fn prove_read(mut backend: B, keys: I) -> Result> where B: Backend, H: Hasher, @@ -719,10 +706,9 @@ mod execution { I: IntoIterator, I::Item: AsRef<[u8]>, { - let trie_backend = backend.as_trie_backend() - .ok_or_else( - || Box::new(ExecutionError::UnableToGenerateProof) as Box - )?; + let trie_backend = backend + .as_trie_backend() + .ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box)?; prove_read_on_trie_backend(trie_backend, keys) } @@ -739,9 +725,16 @@ mod execution { H: Hasher, H::Out: Ord + Codec, { - let trie_backend = backend.as_trie_backend() + let trie_backend = backend + .as_trie_backend() .ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box)?; - prove_range_read_with_size_on_trie_backend(trie_backend, child_info, prefix, size_limit, start_at) + prove_range_read_with_size_on_trie_backend( + trie_backend, + child_info, + prefix, + size_limit, + start_at, + ) } /// Generate range storage read proof on an existing trie backend. @@ -759,14 +752,22 @@ mod execution { { let proving_backend = proving_backend::ProvingBackend::::new(trie_backend); let mut count = 0; - proving_backend.apply_to_key_values_while(child_info, prefix, start_at, |_key, _value| { - if count == 0 || proving_backend.estimate_encoded_size() <= size_limit { - count += 1; - true - } else { - false - } - }, false).map_err(|e| Box::new(e) as Box)?; + proving_backend + .apply_to_key_values_while( + child_info, + prefix, + start_at, + |_key, _value| { + if count == 0 || proving_backend.estimate_encoded_size() <= size_limit { + count += 1; + true + } else { + false + } + }, + false, + ) + .map_err(|e| Box::new(e) as Box)?; Ok((proving_backend.extract_proof(), count)) } @@ -783,7 +784,8 @@ mod execution { I: IntoIterator, I::Item: AsRef<[u8]>, { - let trie_backend = backend.as_trie_backend() + let trie_backend = backend + .as_trie_backend() .ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box)?; prove_child_read_on_trie_backend(trie_backend, child_info, keys) } @@ -923,7 +925,8 @@ mod execution { H: Hasher, H::Out: Ord + Codec, { - proving_backend.child_storage(child_info, key) + proving_backend + .child_storage(child_info, key) .map_err(|e| Box::new(e) as Box) } @@ -943,10 +946,16 @@ mod execution { H::Out: Ord + Codec, { let mut values = Vec::new(); - let result = proving_backend.apply_to_key_values_while(child_info, prefix, start_at, |key, value| { - values.push((key.to_vec(), value.to_vec())); - count.as_ref().map_or(true, |c| (values.len() as u32) < *c) - }, true); + let result = proving_backend.apply_to_key_values_while( + child_info, + prefix, + start_at, + |key, value| { + values.push((key.to_vec(), value.to_vec())); + count.as_ref().map_or(true, |c| (values.len() as u32) < *c) + }, + true, + ); match result { Ok(completed) => Ok((values, completed)), Err(e) => Err(Box::new(e) as Box), @@ -956,23 +965,22 @@ mod execution { #[cfg(test)] mod tests { - use std::collections::BTreeMap; - use codec::Encode; - use super::*; - use super::ext::Ext; - use super::changes_trie::Configuration as ChangesTrieConfig; + use super::{changes_trie::Configuration as ChangesTrieConfig, ext::Ext, *}; + use crate::execution::CallResult; + use codec::{Decode, Encode}; use sp_core::{ - map, traits::{Externalities, RuntimeCode}, testing::TaskExecutor, + map, + storage::ChildInfo, + testing::TaskExecutor, + traits::{CodeExecutor, Externalities, RuntimeCode}, + NativeOrEncoded, NeverNativeValue, }; use sp_runtime::traits::BlakeTwo256; - use std::{result, collections::HashMap, panic::UnwindSafe}; - use codec::Decode; - use sp_core::{ - storage::ChildInfo, NativeOrEncoded, NeverNativeValue, - traits::CodeExecutor, + use std::{ + collections::{BTreeMap, HashMap}, + panic::UnwindSafe, + result, }; - use crate::execution::CallResult; - #[derive(Clone)] struct DummyCodeExecutor { @@ -1000,12 +1008,7 @@ mod tests { if self.change_changes_trie_config { ext.place_storage( sp_core::storage::well_known_keys::CHANGES_TRIE_CONFIG.to_vec(), - Some( - ChangesTrieConfig { - digest_interval: 777, - digest_levels: 333, - }.encode() - ) + Some(ChangesTrieConfig { digest_interval: 777, digest_levels: 333 }.encode()), ); } @@ -1013,24 +1016,14 @@ mod tests { match (using_native, self.native_succeeds, self.fallback_succeeds, native_call) { (true, true, _, Some(call)) => { let res = sp_externalities::set_and_run_with_externalities(ext, || call()); - ( - res.map(NativeOrEncoded::Native).map_err(|_| 0), - true - ) - }, - (true, true, _, None) | (false, _, true, None) => { - ( - Ok( - NativeOrEncoded::Encoded( - vec![ - ext.storage(b"value1").unwrap()[0] + - ext.storage(b"value2").unwrap()[0] - ] - ) - ), - using_native - ) + (res.map(NativeOrEncoded::Native).map_err(|_| 0), true) }, + (true, true, _, None) | (false, _, true, None) => ( + Ok(NativeOrEncoded::Encoded(vec![ + ext.storage(b"value1").unwrap()[0] + ext.storage(b"value2").unwrap()[0], + ])), + using_native, + ), _ => (Err(0), using_native), } } @@ -1069,13 +1062,9 @@ mod tests { TaskExecutor::new(), ); - assert_eq!( - state_machine.execute(ExecutionStrategy::NativeWhenPossible).unwrap(), - vec![66], - ); + assert_eq!(state_machine.execute(ExecutionStrategy::NativeWhenPossible).unwrap(), vec![66],); } - #[test] fn execute_works_with_native_else_wasm() { let backend = trie_backend::tests::test_trie(); @@ -1126,15 +1115,15 @@ mod tests { TaskExecutor::new(), ); - assert!( - state_machine.execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( + assert!(state_machine + .execute_using_consensus_failure_handler::<_, NeverNativeValue, fn() -> _>( ExecutionManager::Both(|we, _ne| { consensus_failed = true; we }), None, - ).is_err() - ); + ) + .is_err()); assert!(consensus_failed); } @@ -1158,7 +1147,8 @@ mod tests { "test", &[], &RuntimeCode::empty(), - ).unwrap(); + ) + .unwrap(); // check proof locally let local_result = execution_proof_check::( @@ -1170,7 +1160,8 @@ mod tests { "test", &[], &RuntimeCode::empty(), - ).unwrap(); + ) + .unwrap(); // check that both results are correct assert_eq!(remote_result, vec![66]); @@ -1210,7 +1201,9 @@ mod tests { overlay.commit_transaction().unwrap(); assert_eq!( - overlay.changes().map(|(k, v)| (k.clone(), v.value().cloned())) + overlay + .changes() + .map(|(k, v)| (k.clone(), v.value().cloned())) .collect::>(), map![ b"abc".to_vec() => None.into(), @@ -1238,7 +1231,9 @@ mod tests { overlay.commit_transaction().unwrap(); assert_eq!( - overlay.changes().map(|(k, v)| (k.clone(), v.value().cloned())) + overlay + .changes() + .map(|(k, v)| (k.clone(), v.value().cloned())) .collect::>(), map![ b"abb".to_vec() => None.into(), @@ -1283,7 +1278,8 @@ mod tests { } assert_eq!( - overlay.children() + overlay + .children() .flat_map(|(iter, _child_info)| iter) .map(|(k, v)| (k.clone(), v.value().clone())) .collect::>(), @@ -1345,39 +1341,15 @@ mod tests { None, ); - ext.set_child_storage( - child_info, - b"abc".to_vec(), - b"def".to_vec() - ); - assert_eq!( - ext.child_storage( - child_info, - b"abc" - ), - Some(b"def".to_vec()) - ); - ext.kill_child_storage( - child_info, - None, - ); - assert_eq!( - ext.child_storage( - child_info, - b"abc" - ), - None - ); + ext.set_child_storage(child_info, b"abc".to_vec(), b"def".to_vec()); + assert_eq!(ext.child_storage(child_info, b"abc"), Some(b"def".to_vec())); + ext.kill_child_storage(child_info, None); + assert_eq!(ext.child_storage(child_info, b"abc"), None); } #[test] fn append_storage_works() { - let reference_data = vec![ - b"data1".to_vec(), - b"2".to_vec(), - b"D3".to_vec(), - b"d4".to_vec(), - ]; + let reference_data = vec![b"data1".to_vec(), b"2".to_vec(), b"D3".to_vec(), b"d4".to_vec()]; let key = b"key".to_vec(); let mut state = new_in_mem::(); let backend = state.as_trie_backend().unwrap(); @@ -1393,10 +1365,7 @@ mod tests { ); ext.storage_append(key.clone(), reference_data[0].encode()); - assert_eq!( - ext.storage(key.as_slice()), - Some(vec![reference_data[0].clone()].encode()), - ); + assert_eq!(ext.storage(key.as_slice()), Some(vec![reference_data[0].clone()].encode()),); } overlay.start_transaction(); { @@ -1411,10 +1380,7 @@ mod tests { for i in reference_data.iter().skip(1) { ext.storage_append(key.clone(), i.encode()); } - assert_eq!( - ext.storage(key.as_slice()), - Some(reference_data.encode()), - ); + assert_eq!(ext.storage(key.as_slice()), Some(reference_data.encode()),); } overlay.rollback_transaction().unwrap(); { @@ -1425,18 +1391,18 @@ mod tests { changes_trie::disabled_state::<_, u64>(), None, ); - assert_eq!( - ext.storage(key.as_slice()), - Some(vec![reference_data[0].clone()].encode()), - ); + assert_eq!(ext.storage(key.as_slice()), Some(vec![reference_data[0].clone()].encode()),); } } #[test] fn remove_with_append_then_rollback_appended_then_append_again() { - #[derive(codec::Encode, codec::Decode)] - enum Item { InitializationItem, DiscardedItem, CommitedItem } + enum Item { + InitializationItem, + DiscardedItem, + CommitedItem, + } let key = b"events".to_vec(); let mut cache = StorageTransactionCache::default(); @@ -1468,10 +1434,7 @@ mod tests { None, ); - assert_eq!( - ext.storage(key.as_slice()), - Some(vec![Item::InitializationItem].encode()), - ); + assert_eq!(ext.storage(key.as_slice()), Some(vec![Item::InitializationItem].encode()),); ext.storage_append(key.clone(), Item::DiscardedItem.encode()); @@ -1492,10 +1455,7 @@ mod tests { None, ); - assert_eq!( - ext.storage(key.as_slice()), - Some(vec![Item::InitializationItem].encode()), - ); + assert_eq!(ext.storage(key.as_slice()), Some(vec![Item::InitializationItem].encode()),); ext.storage_append(key.clone(), Item::CommitedItem.encode()); @@ -1503,7 +1463,6 @@ mod tests { ext.storage(key.as_slice()), Some(vec![Item::InitializationItem, Item::CommitedItem].encode()), ); - } overlay.start_transaction(); @@ -1524,10 +1483,12 @@ mod tests { } fn test_compact(remote_proof: StorageProof, remote_root: &sp_core::H256) -> StorageProof { - let compact_remote_proof = remote_proof.into_compact_proof::( - remote_root.clone(), - ).unwrap(); - compact_remote_proof.to_storage_proof::(Some(remote_root)).unwrap().0 + let compact_remote_proof = + remote_proof.into_compact_proof::(remote_root.clone()).unwrap(); + compact_remote_proof + .to_storage_proof::(Some(remote_root)) + .unwrap() + .0 } #[test] @@ -1539,17 +1500,13 @@ mod tests { let remote_root = remote_backend.storage_root(std::iter::empty()).0; let remote_proof = prove_read(remote_backend, &[b"value2"]).unwrap(); let remote_proof = test_compact(remote_proof, &remote_root); - // check proof locally - let local_result1 = read_proof_check::( - remote_root, - remote_proof.clone(), - &[b"value2"], - ).unwrap(); - let local_result2 = read_proof_check::( - remote_root, - remote_proof.clone(), - &[&[0xff]], - ).is_ok(); + // check proof locally + let local_result1 = + read_proof_check::(remote_root, remote_proof.clone(), &[b"value2"]) + .unwrap(); + let local_result2 = + read_proof_check::(remote_root, remote_proof.clone(), &[&[0xff]]) + .is_ok(); // check that results are correct assert_eq!( local_result1.into_iter().collect::>(), @@ -1559,45 +1516,42 @@ mod tests { // on child trie let remote_backend = trie_backend::tests::test_trie(); let remote_root = remote_backend.storage_root(std::iter::empty()).0; - let remote_proof = prove_child_read( - remote_backend, - child_info, - &[b"value3"], - ).unwrap(); + let remote_proof = prove_child_read(remote_backend, child_info, &[b"value3"]).unwrap(); let remote_proof = test_compact(remote_proof, &remote_root); let local_result1 = read_child_proof_check::( remote_root, remote_proof.clone(), child_info, &[b"value3"], - ).unwrap(); + ) + .unwrap(); let local_result2 = read_child_proof_check::( remote_root, remote_proof.clone(), child_info, &[b"value2"], - ).unwrap(); + ) + .unwrap(); assert_eq!( local_result1.into_iter().collect::>(), vec![(b"value3".to_vec(), Some(vec![142]))], ); - assert_eq!( - local_result2.into_iter().collect::>(), - vec![(b"value2".to_vec(), None)], - ); + assert_eq!(local_result2.into_iter().collect::>(), vec![(b"value2".to_vec(), None)],); } #[test] fn prove_read_with_size_limit_works() { let remote_backend = trie_backend::tests::test_trie(); let remote_root = remote_backend.storage_root(::std::iter::empty()).0; - let (proof, count) = prove_range_read_with_size(remote_backend, None, None, 0, None).unwrap(); + let (proof, count) = + prove_range_read_with_size(remote_backend, None, None, 0, None).unwrap(); // Alwasys contains at least some nodes. assert_eq!(proof.into_memory_db::().drain().len(), 3); assert_eq!(count, 1); let remote_backend = trie_backend::tests::test_trie(); - let (proof, count) = prove_range_read_with_size(remote_backend, None, None, 800, Some(&[])).unwrap(); + let (proof, count) = + prove_range_read_with_size(remote_backend, None, None, 800, Some(&[])).unwrap(); assert_eq!(proof.clone().into_memory_db::().drain().len(), 9); assert_eq!(count, 85); let (results, completed) = read_range_proof_check::( @@ -1607,23 +1561,20 @@ mod tests { None, Some(count), None, - ).unwrap(); + ) + .unwrap(); assert_eq!(results.len() as u32, count); assert_eq!(completed, false); // When checking without count limit, proof may actually contain extra values. - let (results, completed) = read_range_proof_check::( - remote_root, - proof, - None, - None, - None, - None, - ).unwrap(); + let (results, completed) = + read_range_proof_check::(remote_root, proof, None, None, None, None) + .unwrap(); assert_eq!(results.len() as u32, 101); assert_eq!(completed, false); let remote_backend = trie_backend::tests::test_trie(); - let (proof, count) = prove_range_read_with_size(remote_backend, None, None, 50000, Some(&[])).unwrap(); + let (proof, count) = + prove_range_read_with_size(remote_backend, None, None, 50000, Some(&[])).unwrap(); assert_eq!(proof.clone().into_memory_db::().drain().len(), 11); assert_eq!(count, 132); let (results, completed) = read_range_proof_check::( @@ -1633,7 +1584,8 @@ mod tests { None, None, None, - ).unwrap(); + ) + .unwrap(); assert_eq!(results.len() as u32, count); assert_eq!(completed, true); } @@ -1650,41 +1602,41 @@ mod tests { let (remote_root, transaction) = remote_backend.full_storage_root( std::iter::empty(), vec![ - (&child_info1, vec![ - (&b"key1"[..], Some(&b"val2"[..])), - (&b"key2"[..], Some(&b"val3"[..])), - ].into_iter()), - (&child_info2, vec![ - (&b"key3"[..], Some(&b"val4"[..])), - (&b"key4"[..], Some(&b"val5"[..])), - ].into_iter()), - (&child_info3, vec![ - (&b"key5"[..], Some(&b"val6"[..])), - (&b"key6"[..], Some(&b"val7"[..])), - ].into_iter()), - ].into_iter(), + ( + &child_info1, + vec![(&b"key1"[..], Some(&b"val2"[..])), (&b"key2"[..], Some(&b"val3"[..]))] + .into_iter(), + ), + ( + &child_info2, + vec![(&b"key3"[..], Some(&b"val4"[..])), (&b"key4"[..], Some(&b"val5"[..]))] + .into_iter(), + ), + ( + &child_info3, + vec![(&b"key5"[..], Some(&b"val6"[..])), (&b"key6"[..], Some(&b"val7"[..]))] + .into_iter(), + ), + ] + .into_iter(), ); remote_backend.backend_storage_mut().consolidate(transaction); remote_backend.essence.set_root(remote_root.clone()); - let remote_proof = prove_child_read( - remote_backend, - &child_info1, - &[b"key1"], - ).unwrap(); + let remote_proof = prove_child_read(remote_backend, &child_info1, &[b"key1"]).unwrap(); let remote_proof = test_compact(remote_proof, &remote_root); let local_result1 = read_child_proof_check::( remote_root, remote_proof.clone(), &child_info1, &[b"key1"], - ).unwrap(); + ) + .unwrap(); assert_eq!(local_result1.len(), 1); assert_eq!(local_result1.get(&b"key1"[..]), Some(&Some(b"val2".to_vec()))); } #[test] fn child_storage_uuid() { - let child_info_1 = ChildInfo::new_default(b"sub_test1"); let child_info_2 = ChildInfo::new_default(b"sub_test2"); @@ -1782,16 +1734,19 @@ mod tests { ); let run_state_machine = |state_machine: &mut StateMachine<_, _, _, _>| { - state_machine.execute_using_consensus_failure_handler:: _, _, _>( - ExecutionManager::NativeWhenPossible, - Some(|| { - sp_externalities::with_externalities(|mut ext| { - ext.register_extension(DummyExt(2)).unwrap(); - }).unwrap(); - - Ok(()) - }), - ).unwrap(); + state_machine + .execute_using_consensus_failure_handler:: _, _, _>( + ExecutionManager::NativeWhenPossible, + Some(|| { + sp_externalities::with_externalities(|mut ext| { + ext.register_extension(DummyExt(2)).unwrap(); + }) + .unwrap(); + + Ok(()) + }), + ) + .unwrap(); }; run_state_machine(&mut state_machine); diff --git a/primitives/state-machine/src/overlayed_changes/changeset.rs b/primitives/state-machine/src/overlayed_changes/changeset.rs index ae9584990e5fa..1ffd569e2828b 100644 --- a/primitives/state-machine/src/overlayed_changes/changeset.rs +++ b/primitives/state-machine/src/overlayed_changes/changeset.rs @@ -17,17 +17,19 @@ //! Houses the code that implements the transactional overlay storage. -use super::{StorageKey, StorageValue, Extrinsics}; +use super::{Extrinsics, StorageKey, StorageValue}; -#[cfg(feature = "std")] -use std::collections::HashSet as Set; #[cfg(not(feature = "std"))] use sp_std::collections::btree_set::BTreeSet as Set; +#[cfg(feature = "std")] +use std::collections::HashSet as Set; -use sp_std::collections::{btree_map::BTreeMap, btree_set::BTreeSet}; -use sp_std::hash::Hash; -use smallvec::SmallVec; use crate::warn; +use smallvec::SmallVec; +use sp_std::{ + collections::{btree_map::BTreeMap, btree_set::BTreeSet}, + hash::Hash, +}; const PROOF_OVERLAY_NON_EMPTY: &str = "\ An OverlayValue is always created with at least one transaction and dropped as soon @@ -82,9 +84,7 @@ pub struct OverlayedEntry { impl Default for OverlayedEntry { fn default() -> Self { - Self { - transactions: SmallVec::new(), - } + Self { transactions: SmallVec::new() } } } @@ -142,7 +142,9 @@ impl OverlayedEntry { /// Unique list of extrinsic indices which modified the value. pub fn extrinsics(&self) -> BTreeSet { let mut set = BTreeSet::new(); - self.transactions.iter().for_each(|t| t.extrinsics.copy_extrinsics_into(&mut set)); + self.transactions + .iter() + .for_each(|t| t.extrinsics.copy_extrinsics_into(&mut set)); set } @@ -165,17 +167,9 @@ impl OverlayedEntry { /// /// This makes sure that the old version is not overwritten and can be properly /// rolled back when required. - fn set( - &mut self, - value: V, - first_write_in_tx: bool, - at_extrinsic: Option, - ) { + fn set(&mut self, value: V, first_write_in_tx: bool, at_extrinsic: Option) { if first_write_in_tx || self.transactions.is_empty() { - self.transactions.push(InnerValue { - value, - extrinsics: Default::default(), - }); + self.transactions.push(InnerValue { value, extrinsics: Default::default() }); } else { *self.value_mut() = value; } @@ -223,9 +217,9 @@ impl OverlayedMap { /// Get an optional reference to the value stored for the specified key. pub fn get(&self, key: &Q) -> Option<&OverlayedEntry> - where - K: sp_std::borrow::Borrow, - Q: Ord + ?Sized, + where + K: sp_std::borrow::Borrow, + Q: Ord + ?Sized, { self.changes.get(key) } @@ -233,24 +227,19 @@ impl OverlayedMap { /// Set a new value for the specified key. /// /// Can be rolled back or committed when called inside a transaction. - pub fn set( - &mut self, - key: K, - value: V, - at_extrinsic: Option, - ) { + pub fn set(&mut self, key: K, value: V, at_extrinsic: Option) { let overlayed = self.changes.entry(key.clone()).or_default(); overlayed.set(value, insert_dirty(&mut self.dirty_keys, key), at_extrinsic); } /// Get a list of all changes as seen by current transaction. - pub fn changes(&self) -> impl Iterator)> { + pub fn changes(&self) -> impl Iterator)> { self.changes.iter() } /// Get a list of all changes as seen by current transaction, consumes /// the overlay. - pub fn into_changes(self) -> impl Iterator)> { + pub fn into_changes(self) -> impl Iterator)> { self.changes.into_iter() } @@ -258,7 +247,7 @@ impl OverlayedMap { /// /// Panics: /// Panics if there are open transactions: `transaction_depth() > 0` - pub fn drain_commited(self) -> impl Iterator { + pub fn drain_commited(self) -> impl Iterator { assert!(self.transaction_depth() == 0, "Drain is not allowed with open transactions."); self.changes.into_iter().map(|(k, mut v)| (k, v.pop_transaction().value)) } @@ -276,7 +265,7 @@ impl OverlayedMap { /// Calling this while already inside the runtime will return an error. pub fn enter_runtime(&mut self) -> Result<(), AlreadyInRuntime> { if let ExecutionMode::Runtime = self.execution_mode { - return Err(AlreadyInRuntime); + return Err(AlreadyInRuntime) } self.execution_mode = ExecutionMode::Runtime; self.num_client_transactions = self.transaction_depth(); @@ -289,7 +278,7 @@ impl OverlayedMap { /// Calling this while already outside the runtime will return an error. pub fn exit_runtime(&mut self) -> Result<(), NotInRuntime> { if let ExecutionMode::Client = self.execution_mode { - return Err(NotInRuntime); + return Err(NotInRuntime) } self.execution_mode = ExecutionMode::Client; if self.has_open_runtime_transactions() { @@ -341,11 +330,13 @@ impl OverlayedMap { } for key in self.dirty_keys.pop().ok_or(NoOpenTransaction)? { - let overlayed = self.changes.get_mut(&key).expect("\ + let overlayed = self.changes.get_mut(&key).expect( + "\ A write to an OverlayedValue is recorded in the dirty key set. Before an OverlayedValue is removed, its containing dirty set is removed. This function is only called for keys that are in the dirty set. qed\ - "); + ", + ); if rollback { overlayed.pop_transaction(); @@ -443,9 +434,12 @@ mod test { type Drained<'a> = Vec<(&'a [u8], Option<&'a [u8]>)>; fn assert_changes(is: &OverlayedChangeSet, expected: &Changes) { - let is: Changes = is.changes().map(|(k, v)| { - (k.as_ref(), (v.value().map(AsRef::as_ref), v.extrinsics().into_iter().collect())) - }).collect(); + let is: Changes = is + .changes() + .map(|(k, v)| { + (k.as_ref(), (v.value().map(AsRef::as_ref), v.extrinsics().into_iter().collect())) + }) + .collect(); assert_eq!(&is, expected); } @@ -453,7 +447,8 @@ mod test { let is = is.drain_commited().collect::>(); let expected = expected .iter() - .map(|(k, v)| (k.to_vec(), v.0.map(From::from))).collect::>(); + .map(|(k, v)| (k.to_vec(), v.0.map(From::from))) + .collect::>(); assert_eq!(is, expected); } @@ -461,7 +456,8 @@ mod test { let is = is.drain_commited().collect::>(); let expected = expected .iter() - .map(|(k, v)| (k.to_vec(), v.map(From::from))).collect::>(); + .map(|(k, v)| (k.to_vec(), v.map(From::from))) + .collect::>(); assert_eq!(is, expected); } @@ -474,10 +470,7 @@ mod test { changeset.set(b"key1".to_vec(), Some(b"val1".to_vec()), Some(2)); changeset.set(b"key0".to_vec(), Some(b"val0-1".to_vec()), Some(9)); - assert_drained(changeset, vec![ - (b"key0", Some(b"val0-1")), - (b"key1", Some(b"val1")), - ]); + assert_drained(changeset, vec![(b"key0", Some(b"val0-1")), (b"key1", Some(b"val1"))]); } #[test] @@ -599,10 +592,8 @@ mod test { changeset.rollback_transaction().unwrap(); assert_eq!(changeset.transaction_depth(), 0); - let rolled_back: Changes = vec![ - (b"key0", (Some(b"val0-1"), vec![1, 10])), - (b"key1", (Some(b"val1"), vec![1])), - ]; + let rolled_back: Changes = + vec![(b"key0", (Some(b"val0-1"), vec![1, 10])), (b"key1", (Some(b"val1"), vec![1]))]; assert_changes(&changeset, &rolled_back); assert_drained_changes(changeset, rolled_back); @@ -676,21 +667,27 @@ mod test { changeset.clear_where(|k, _| k.starts_with(b"del"), Some(5)); - assert_changes(&changeset, &vec![ - (b"del1", (None, vec![3, 5])), - (b"del2", (None, vec![4, 5])), - (b"key0", (Some(b"val0"), vec![1])), - (b"key1", (Some(b"val1"), vec![2])), - ]); + assert_changes( + &changeset, + &vec![ + (b"del1", (None, vec![3, 5])), + (b"del2", (None, vec![4, 5])), + (b"key0", (Some(b"val0"), vec![1])), + (b"key1", (Some(b"val1"), vec![2])), + ], + ); changeset.rollback_transaction().unwrap(); - assert_changes(&changeset, &vec![ - (b"del1", (Some(b"delval1"), vec![3])), - (b"del2", (Some(b"delval2"), vec![4])), - (b"key0", (Some(b"val0"), vec![1])), - (b"key1", (Some(b"val1"), vec![2])), - ]); + assert_changes( + &changeset, + &vec![ + (b"del1", (Some(b"delval1"), vec![3])), + (b"del2", (Some(b"delval2"), vec![4])), + (b"key0", (Some(b"val0"), vec![1])), + (b"key1", (Some(b"val1"), vec![2])), + ], + ); } #[test] @@ -708,29 +705,52 @@ mod test { changeset.set(b"key11".to_vec(), Some(b"val11".to_vec()), Some(11)); assert_eq!(changeset.changes_after(b"key0").next().unwrap().0, b"key1"); - assert_eq!(changeset.changes_after(b"key0").next().unwrap().1.value(), Some(&b"val1".to_vec())); + assert_eq!( + changeset.changes_after(b"key0").next().unwrap().1.value(), + Some(&b"val1".to_vec()) + ); assert_eq!(changeset.changes_after(b"key1").next().unwrap().0, b"key11"); - assert_eq!(changeset.changes_after(b"key1").next().unwrap().1.value(), Some(&b"val11".to_vec())); + assert_eq!( + changeset.changes_after(b"key1").next().unwrap().1.value(), + Some(&b"val11".to_vec()) + ); assert_eq!(changeset.changes_after(b"key11").next().unwrap().0, b"key2"); - assert_eq!(changeset.changes_after(b"key11").next().unwrap().1.value(), Some(&b"val2".to_vec())); + assert_eq!( + changeset.changes_after(b"key11").next().unwrap().1.value(), + Some(&b"val2".to_vec()) + ); assert_eq!(changeset.changes_after(b"key2").next().unwrap().0, b"key3"); - assert_eq!(changeset.changes_after(b"key2").next().unwrap().1.value(), Some(&b"val3".to_vec())); + assert_eq!( + changeset.changes_after(b"key2").next().unwrap().1.value(), + Some(&b"val3".to_vec()) + ); assert_eq!(changeset.changes_after(b"key3").next().unwrap().0, b"key4"); - assert_eq!(changeset.changes_after(b"key3").next().unwrap().1.value(), Some(&b"val4".to_vec())); + assert_eq!( + changeset.changes_after(b"key3").next().unwrap().1.value(), + Some(&b"val4".to_vec()) + ); assert_eq!(changeset.changes_after(b"key4").next(), None); changeset.rollback_transaction().unwrap(); assert_eq!(changeset.changes_after(b"key0").next().unwrap().0, b"key1"); - assert_eq!(changeset.changes_after(b"key0").next().unwrap().1.value(), Some(&b"val1".to_vec())); + assert_eq!( + changeset.changes_after(b"key0").next().unwrap().1.value(), + Some(&b"val1".to_vec()) + ); assert_eq!(changeset.changes_after(b"key1").next().unwrap().0, b"key2"); - assert_eq!(changeset.changes_after(b"key1").next().unwrap().1.value(), Some(&b"val2".to_vec())); + assert_eq!( + changeset.changes_after(b"key1").next().unwrap().1.value(), + Some(&b"val2".to_vec()) + ); assert_eq!(changeset.changes_after(b"key11").next().unwrap().0, b"key2"); - assert_eq!(changeset.changes_after(b"key11").next().unwrap().1.value(), Some(&b"val2".to_vec())); + assert_eq!( + changeset.changes_after(b"key11").next().unwrap().1.value(), + Some(&b"val2".to_vec()) + ); assert_eq!(changeset.changes_after(b"key2").next(), None); assert_eq!(changeset.changes_after(b"key3").next(), None); assert_eq!(changeset.changes_after(b"key4").next(), None); - } #[test] @@ -790,9 +810,7 @@ mod test { changeset.commit_transaction().unwrap(); assert_eq!(changeset.transaction_depth(), 0); - assert_drained(changeset, vec![ - (b"key0", Some(b"val0")), - ]); + assert_drained(changeset, vec![(b"key0", Some(b"val0"))]); } #[test] diff --git a/primitives/state-machine/src/overlayed_changes/mod.rs b/primitives/state-machine/src/overlayed_changes/mod.rs index a261e084eeda9..a0558e06a380e 100644 --- a/primitives/state-machine/src/overlayed_changes/mod.rs +++ b/primitives/state-machine/src/overlayed_changes/mod.rs @@ -20,36 +20,35 @@ mod changeset; mod offchain; +use self::changeset::OverlayedChangeSet; +use crate::{backend::Backend, stats::StateMachineStats}; pub use offchain::OffchainOverlayedChanges; -use crate::{ - backend::Backend, - stats::StateMachineStats, +use sp_std::{ + any::{Any, TypeId}, + boxed::Box, + vec::Vec, }; -use sp_std::{vec::Vec, any::{TypeId, Any}, boxed::Box}; -use self::changeset::OverlayedChangeSet; +use crate::{changes_trie::BlockNumber, DefaultError}; #[cfg(feature = "std")] use crate::{ + changes_trie::{build_changes_trie, State as ChangesTrieState}, ChangesTrieTransaction, - changes_trie::{ - build_changes_trie, - State as ChangesTrieState, - }, }; -use crate::changes_trie::BlockNumber; -#[cfg(feature = "std")] -use std::collections::{HashMap as Map, hash_map::Entry as MapEntry}; +use codec::{Decode, Encode}; +use hash_db::Hasher; +use sp_core::{ + offchain::OffchainOverlayedChange, + storage::{well_known_keys::EXTRINSIC_INDEX, ChildInfo}, +}; +use sp_externalities::{Extension, Extensions}; #[cfg(not(feature = "std"))] use sp_std::collections::btree_map::{BTreeMap as Map, Entry as MapEntry}; use sp_std::collections::btree_set::BTreeSet; -use codec::{Decode, Encode}; -use sp_core::storage::{well_known_keys::EXTRINSIC_INDEX, ChildInfo}; -use sp_core::offchain::OffchainOverlayedChange; -use hash_db::Hasher; -use crate::DefaultError; -use sp_externalities::{Extensions, Extension}; +#[cfg(feature = "std")] +use std::collections::{hash_map::Entry as MapEntry, HashMap as Map}; -pub use self::changeset::{OverlayedValue, NoOpenTransaction, AlreadyInRuntime, NotInRuntime}; +pub use self::changeset::{AlreadyInRuntime, NoOpenTransaction, NotInRuntime, OverlayedValue}; /// Changes that are made outside of extrinsics are marked with this index; pub const NO_EXTRINSIC_INDEX: u32 = 0xffffffff; @@ -129,7 +128,7 @@ pub enum IndexOperation { extrinsic: u32, /// Referenced index hash. hash: Vec, - } + }, } /// A storage changes structure that can be generated by the data collected in [`OverlayedChanges`]. @@ -169,7 +168,9 @@ pub struct StorageChanges { #[cfg(feature = "std")] impl StorageChanges { /// Deconstruct into the inner values - pub fn into_inner(self) -> ( + pub fn into_inner( + self, + ) -> ( StorageCollection, ChildStorageCollection, OffchainChangesCollection, @@ -216,7 +217,9 @@ impl StorageTransactionCache Default for StorageTransactionCache { +impl Default + for StorageTransactionCache +{ fn default() -> Self { Self { transaction: None, @@ -231,7 +234,9 @@ impl Default for StorageTransactionCache } } -impl Default for StorageChanges { +impl Default + for StorageChanges +{ fn default() -> Self { Self { main_storage_changes: Default::default(), @@ -325,12 +330,10 @@ impl OverlayedChanges { self.stats.tally_write_overlay(size_write); let storage_key = child_info.storage_key().to_vec(); let top = &self.top; - let (changeset, info) = self.children.entry(storage_key).or_insert_with(|| - ( - top.spawn_child(), - child_info.clone() - ) - ); + let (changeset, info) = self + .children + .entry(storage_key) + .or_insert_with(|| (top.spawn_child(), child_info.clone())); let updatable = info.try_update(child_info); debug_assert!(updatable); changeset.set(key, val, extrinsic_index); @@ -339,19 +342,14 @@ impl OverlayedChanges { /// Clear child storage of given storage key. /// /// Can be rolled back or committed when called inside a transaction. - pub(crate) fn clear_child_storage( - &mut self, - child_info: &ChildInfo, - ) { + pub(crate) fn clear_child_storage(&mut self, child_info: &ChildInfo) { let extrinsic_index = self.extrinsic_index(); let storage_key = child_info.storage_key().to_vec(); let top = &self.top; - let (changeset, info) = self.children.entry(storage_key).or_insert_with(|| - ( - top.spawn_child(), - child_info.clone() - ) - ); + let (changeset, info) = self + .children + .entry(storage_key) + .or_insert_with(|| (top.spawn_child(), child_info.clone())); let updatable = info.try_update(child_info); debug_assert!(updatable); changeset.clear_where(|_, _| true, extrinsic_index); @@ -367,20 +365,14 @@ impl OverlayedChanges { /// Removes all key-value pairs which keys share the given prefix. /// /// Can be rolled back or committed when called inside a transaction - pub(crate) fn clear_child_prefix( - &mut self, - child_info: &ChildInfo, - prefix: &[u8], - ) { + pub(crate) fn clear_child_prefix(&mut self, child_info: &ChildInfo, prefix: &[u8]) { let extrinsic_index = self.extrinsic_index(); let storage_key = child_info.storage_key().to_vec(); let top = &self.top; - let (changeset, info) = self.children.entry(storage_key).or_insert_with(|| - ( - top.spawn_child(), - child_info.clone() - ) - ); + let (changeset, info) = self + .children + .entry(storage_key) + .or_insert_with(|| (top.spawn_child(), child_info.clone())); let updatable = info.try_update(child_info); debug_assert!(updatable); changeset.clear_where(|key, _| key.starts_with(prefix), extrinsic_index); @@ -417,11 +409,14 @@ impl OverlayedChanges { pub fn rollback_transaction(&mut self) -> Result<(), NoOpenTransaction> { self.top.rollback_transaction()?; retain_map(&mut self.children, |_, (changeset, _)| { - changeset.rollback_transaction() + changeset + .rollback_transaction() .expect("Top and children changesets are started in lockstep; qed"); !changeset.is_empty() }); - self.offchain.overlay_mut().rollback_transaction() + self.offchain + .overlay_mut() + .rollback_transaction() .expect("Top and offchain changesets are started in lockstep; qed"); Ok(()) } @@ -433,10 +428,13 @@ impl OverlayedChanges { pub fn commit_transaction(&mut self) -> Result<(), NoOpenTransaction> { self.top.commit_transaction()?; for (_, (changeset, _)) in self.children.iter_mut() { - changeset.commit_transaction() + changeset + .commit_transaction() .expect("Top and children changesets are started in lockstep; qed"); } - self.offchain.overlay_mut().commit_transaction() + self.offchain + .overlay_mut() + .commit_transaction() .expect("Top and offchain changesets are started in lockstep; qed"); Ok(()) } @@ -448,10 +446,13 @@ impl OverlayedChanges { pub fn enter_runtime(&mut self) -> Result<(), AlreadyInRuntime> { self.top.enter_runtime()?; for (_, (changeset, _)) in self.children.iter_mut() { - changeset.enter_runtime() + changeset + .enter_runtime() .expect("Top and children changesets are entering runtime in lockstep; qed") } - self.offchain.overlay_mut().enter_runtime() + self.offchain + .overlay_mut() + .enter_runtime() .expect("Top and offchain changesets are started in lockstep; qed"); Ok(()) } @@ -463,10 +464,13 @@ impl OverlayedChanges { pub fn exit_runtime(&mut self) -> Result<(), NotInRuntime> { self.top.exit_runtime()?; for (_, (changeset, _)) in self.children.iter_mut() { - changeset.exit_runtime() + changeset + .exit_runtime() .expect("Top and children changesets are entering runtime in lockstep; qed"); } - self.offchain.overlay_mut().exit_runtime() + self.offchain + .overlay_mut() + .exit_runtime() .expect("Top and offchain changesets are started in lockstep; qed"); Ok(()) } @@ -477,19 +481,23 @@ impl OverlayedChanges { /// /// Panics: /// Panics if `transaction_depth() > 0` - fn drain_committed(&mut self) -> ( - impl Iterator)>, - impl Iterator)>, ChildInfo))>, + fn drain_committed( + &mut self, + ) -> ( + impl Iterator)>, + impl Iterator< + Item = ( + StorageKey, + (impl Iterator)>, ChildInfo), + ), + >, ) { use sp_std::mem::take; ( take(&mut self.top).drain_commited(), - take(&mut self.children).into_iter() - .map(|(key, (val, info))| ( - key, - (val.drain_commited(), info) - ) - ), + take(&mut self.children) + .into_iter() + .map(|(key, (val, info))| (key, (val.drain_commited(), info))), ) } @@ -499,24 +507,29 @@ impl OverlayedChanges { /// /// Panics: /// Panics if `transaction_depth() > 0` - pub fn offchain_drain_committed(&mut self) -> impl Iterator { + pub fn offchain_drain_committed( + &mut self, + ) -> impl Iterator { self.offchain.drain() } /// Get an iterator over all child changes as seen by the current transaction. - pub fn children(&self) - -> impl Iterator, &ChildInfo)> { + pub fn children( + &self, + ) -> impl Iterator, &ChildInfo)> { self.children.iter().map(|(_, v)| (v.0.changes(), &v.1)) } /// Get an iterator over all top changes as been by the current transaction. - pub fn changes(&self) -> impl Iterator { + pub fn changes(&self) -> impl Iterator { self.top.changes() } /// Get an optional iterator over all child changes stored under the supplied key. - pub fn child_changes(&self, key: &[u8]) - -> Option<(impl Iterator, &ChildInfo)> { + pub fn child_changes( + &self, + key: &[u8], + ) -> Option<(impl Iterator, &ChildInfo)> { self.children.get(key).map(|(overlay, info)| (overlay.changes(), info)) } @@ -527,16 +540,16 @@ impl OverlayedChanges { /// Convert this instance with all changes into a [`StorageChanges`] instance. #[cfg(feature = "std")] - pub fn into_storage_changes< - B: Backend, H: Hasher, N: BlockNumber - >( + pub fn into_storage_changes, H: Hasher, N: BlockNumber>( mut self, backend: &B, changes_trie_state: Option<&ChangesTrieState>, parent_hash: H::Out, mut cache: StorageTransactionCache, ) -> Result, DefaultError> - where H::Out: Ord + Encode + 'static { + where + H::Out: Ord + Encode + 'static, + { self.drain_storage_changes(backend, changes_trie_state, parent_hash, &mut cache) } @@ -544,35 +557,34 @@ impl OverlayedChanges { pub fn drain_storage_changes, H: Hasher, N: BlockNumber>( &mut self, backend: &B, - #[cfg(feature = "std")] - changes_trie_state: Option<&ChangesTrieState>, + #[cfg(feature = "std")] changes_trie_state: Option<&ChangesTrieState>, parent_hash: H::Out, mut cache: &mut StorageTransactionCache, ) -> Result, DefaultError> - where H::Out: Ord + Encode + 'static { + where + H::Out: Ord + Encode + 'static, + { // If the transaction does not exist, we generate it. if cache.transaction.is_none() { self.storage_root(backend, &mut cache); } - let (transaction, transaction_storage_root) = cache.transaction.take() + let (transaction, transaction_storage_root) = cache + .transaction + .take() .and_then(|t| cache.transaction_storage_root.take().map(|tr| (t, tr))) .expect("Transaction was be generated as part of `storage_root`; qed"); // If the transaction does not exist, we generate it. #[cfg(feature = "std")] if cache.changes_trie_transaction.is_none() { - self.changes_trie_root( - backend, - changes_trie_state, - parent_hash, - false, - &mut cache, - ).map_err(|_| "Failed to generate changes trie transaction")?; + self.changes_trie_root(backend, changes_trie_state, parent_hash, false, &mut cache) + .map_err(|_| "Failed to generate changes trie transaction")?; } #[cfg(feature = "std")] - let changes_trie_transaction = cache.changes_trie_transaction + let changes_trie_transaction = cache + .changes_trie_transaction .take() .expect("Changes trie transaction was generated by `changes_trie_root`; qed"); @@ -584,7 +596,9 @@ impl OverlayedChanges { Ok(StorageChanges { main_storage_changes: main_storage_changes.collect(), - child_storage_changes: child_storage_changes.map(|(sk, it)| (sk, it.0.collect())).collect(), + child_storage_changes: child_storage_changes + .map(|(sk, it)| (sk, it.0.collect())) + .collect(), offchain_storage_changes, transaction, transaction_storage_root, @@ -614,7 +628,8 @@ impl OverlayedChanges { true => Some( self.storage(EXTRINSIC_INDEX) .and_then(|idx| idx.and_then(|idx| Decode::decode(&mut &*idx).ok())) - .unwrap_or(NO_EXTRINSIC_INDEX)), + .unwrap_or(NO_EXTRINSIC_INDEX), + ), false => None, } } @@ -628,13 +643,13 @@ impl OverlayedChanges { backend: &B, cache: &mut StorageTransactionCache, ) -> H::Out - where H::Out: Ord + Encode, + where + H::Out: Ord + Encode, { let delta = self.changes().map(|(k, v)| (&k[..], v.value().map(|v| &v[..]))); - let child_delta = self.children() - .map(|(changes, info)| (info, changes.map( - |(k, v)| (&k[..], v.value().map(|v| &v[..])) - ))); + let child_delta = self.children().map(|(changes, info)| { + (info, changes.map(|(k, v)| (&k[..], v.value().map(|v| &v[..])))) + }); let (root, transaction) = backend.full_storage_root(delta, child_delta); @@ -659,14 +674,18 @@ impl OverlayedChanges { parent_hash: H::Out, panic_on_storage_error: bool, cache: &mut StorageTransactionCache, - ) -> Result, ()> where H::Out: Ord + Encode + 'static { + ) -> Result, ()> + where + H::Out: Ord + Encode + 'static, + { build_changes_trie::<_, H, N>( backend, changes_trie_state, self, parent_hash, panic_on_storage_error, - ).map(|r| { + ) + .map(|r| { let root = r.as_ref().map(|r| r.1).clone(); cache.changes_trie_transaction = Some(r.map(|(db, _, cache)| (db, cache))); cache.changes_trie_transaction_storage_root = Some(root); @@ -685,7 +704,7 @@ impl OverlayedChanges { pub fn child_iter_after( &self, storage_key: &[u8], - key: &[u8] + key: &[u8], ) -> impl Iterator { self.children .get(storage_key) @@ -716,18 +735,18 @@ impl OverlayedChanges { #[cfg(feature = "std")] fn retain_map(map: &mut Map, f: F) - where - K: std::cmp::Eq + std::hash::Hash, - F: FnMut(&K, &mut V) -> bool, +where + K: std::cmp::Eq + std::hash::Hash, + F: FnMut(&K, &mut V) -> bool, { map.retain(f); } #[cfg(not(feature = "std"))] fn retain_map(map: &mut Map, mut f: F) - where - K: Ord, - F: FnMut(&K, &mut V) -> bool, +where + K: Ord, + F: FnMut(&K, &mut V) -> bool, { let old = sp_std::mem::replace(map, Map::default()); for (k, mut v) in old.into_iter() { @@ -799,18 +818,13 @@ impl<'a> OverlayedExtensions<'a> { #[cfg(test)] mod tests { - use hex_literal::hex; - use sp_core::{Blake2Hasher, traits::Externalities}; - use crate::InMemoryBackend; - use crate::ext::Ext; use super::*; + use crate::{ext::Ext, InMemoryBackend}; + use hex_literal::hex; + use sp_core::{traits::Externalities, Blake2Hasher}; use std::collections::BTreeMap; - fn assert_extrinsics( - overlay: &OverlayedChangeSet, - key: impl AsRef<[u8]>, - expected: Vec, - ) { + fn assert_extrinsics(overlay: &OverlayedChangeSet, key: impl AsRef<[u8]>, expected: Vec) { assert_eq!( overlay.get(key.as_ref()).unwrap().extrinsics().into_iter().collect::>(), expected @@ -863,13 +877,16 @@ mod tests { state.commit_transaction().unwrap(); } let offchain_data: Vec<_> = state.offchain_drain_committed().collect(); - let expected: Vec<_> = expected.into_iter().map(|(key, value)| { - let change = match value { - Some(value) => OffchainOverlayedChange::SetValue(value), - None => OffchainOverlayedChange::Remove, - }; - ((STORAGE_PREFIX.to_vec(), key), change) - }).collect(); + let expected: Vec<_> = expected + .into_iter() + .map(|(key, value)| { + let change = match value { + Some(value) => OffchainOverlayedChange::SetValue(value), + None => OffchainOverlayedChange::Remove, + }; + ((STORAGE_PREFIX.to_vec(), key), change) + }) + .collect(); assert_eq!(offchain_data, expected); } @@ -904,7 +921,6 @@ mod tests { check_offchain_content(&overlayed, 0, vec![(key.clone(), None)]); } - #[test] fn overlayed_storage_root_works() { let initial: BTreeMap<_, _> = vec![ @@ -912,7 +928,9 @@ mod tests { (b"dog".to_vec(), b"puppyXXX".to_vec()), (b"dogglesworth".to_vec(), b"catXXX".to_vec()), (b"doug".to_vec(), b"notadog".to_vec()), - ].into_iter().collect(); + ] + .into_iter() + .collect(); let backend = InMemoryBackend::::from(initial); let mut overlay = OverlayedChanges::default(); overlay.set_collect_extrinsics(false); @@ -935,7 +953,8 @@ mod tests { crate::changes_trie::disabled_state::<_, u64>(), None, ); - const ROOT: [u8; 32] = hex!("39245109cef3758c2eed2ccba8d9b370a917850af3824bc8348d505df2c298fa"); + const ROOT: [u8; 32] = + hex!("39245109cef3758c2eed2ccba8d9b370a917850af3824bc8348d505df2c298fa"); assert_eq!(&ext.storage_root()[..], &ROOT); } diff --git a/primitives/state-machine/src/overlayed_changes/offchain.rs b/primitives/state-machine/src/overlayed_changes/offchain.rs index 4128be24bc546..9603426fa5517 100644 --- a/primitives/state-machine/src/overlayed_changes/offchain.rs +++ b/primitives/state-machine/src/overlayed_changes/offchain.rs @@ -17,9 +17,9 @@ //! Overlayed changes for offchain indexing. +use super::changeset::OverlayedMap; use sp_core::offchain::OffchainOverlayedChange; use sp_std::prelude::Vec; -use super::changeset::OverlayedMap; /// In-memory storage for offchain workers recoding changes for the actual offchain storage implementation. #[derive(Debug, Clone, Default)] @@ -52,11 +52,9 @@ impl OffchainOverlayedChanges { /// Remove a key and its associated value from the offchain database. pub fn remove(&mut self, prefix: &[u8], key: &[u8]) { - let _ = self.0.set( - (prefix.to_vec(), key.to_vec()), - OffchainOverlayedChange::Remove, - None, - ); + let _ = self + .0 + .set((prefix.to_vec(), key.to_vec()), OffchainOverlayedChange::Remove, None); } /// Set the value associated with a key under a prefix to the value provided. @@ -80,7 +78,9 @@ impl OffchainOverlayedChanges { } /// Mutable reference to inner change set. - pub fn overlay_mut(&mut self) -> &mut OverlayedMap<(Vec, Vec), OffchainOverlayedChange> { + pub fn overlay_mut( + &mut self, + ) -> &mut OverlayedMap<(Vec, Vec), OffchainOverlayedChange> { &mut self.0 } } @@ -120,10 +120,10 @@ mod test { let mut iter = ooc.into_iter(); assert_eq!( iter.next(), - Some( - ((STORAGE_PREFIX.to_vec(), b"ppp".to_vec()), - OffchainOverlayedChange::SetValue(b"rrr".to_vec())) - ) + Some(( + (STORAGE_PREFIX.to_vec(), b"ppp".to_vec()), + OffchainOverlayedChange::SetValue(b"rrr".to_vec()) + )) ); assert_eq!(iter.next(), None); } diff --git a/primitives/state-machine/src/proving_backend.rs b/primitives/state-machine/src/proving_backend.rs index 5275aa82521c5..3a242313a65c7 100644 --- a/primitives/state-machine/src/proving_backend.rs +++ b/primitives/state-machine/src/proving_backend.rs @@ -17,20 +17,28 @@ //! Proving state machine backend. -use std::{sync::Arc, collections::{HashMap, hash_map::Entry}}; -use parking_lot::RwLock; -use codec::{Decode, Codec, Encode}; +use crate::{ + trie_backend::TrieBackend, + trie_backend_essence::{Ephemeral, TrieBackendEssence, TrieBackendStorage}, + Backend, DBValue, Error, ExecutionError, +}; +use codec::{Codec, Decode, Encode}; +use hash_db::{HashDB, Hasher, Prefix, EMPTY_PREFIX}; use log::debug; -use hash_db::{Hasher, HashDB, EMPTY_PREFIX, Prefix}; +use parking_lot::RwLock; +use sp_core::storage::ChildInfo; use sp_trie::{ - MemoryDB, empty_child_trie_root, read_trie_value_with, read_child_trie_value_with, - record_all_keys, StorageProof, + empty_child_trie_root, read_child_trie_value_with, read_trie_value_with, record_all_keys, + MemoryDB, StorageProof, +}; +pub use sp_trie::{ + trie_types::{Layout, TrieError}, + Recorder, +}; +use std::{ + collections::{hash_map::Entry, HashMap}, + sync::Arc, }; -pub use sp_trie::{Recorder, trie_types::{Layout, TrieError}}; -use crate::trie_backend::TrieBackend; -use crate::trie_backend_essence::{Ephemeral, TrieBackendEssence, TrieBackendStorage}; -use crate::{Error, ExecutionError, Backend, DBValue}; -use sp_core::storage::ChildInfo; /// Patricia trie-based backend specialized in get value proofs. pub struct ProvingBackendRecorder<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> { @@ -39,18 +47,15 @@ pub struct ProvingBackendRecorder<'a, S: 'a + TrieBackendStorage, H: 'a + Has } impl<'a, S, H> ProvingBackendRecorder<'a, S, H> - where - S: TrieBackendStorage, - H: Hasher, - H::Out: Codec, +where + S: TrieBackendStorage, + H: Hasher, + H::Out: Codec, { /// Produce proof for a key query. pub fn storage(&mut self, key: &[u8]) -> Result>, String> { let mut read_overlay = S::Overlay::default(); - let eph = Ephemeral::new( - self.backend.backend_storage(), - &mut read_overlay, - ); + let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let map_e = |e| format!("Trie lookup error: {}", e); @@ -59,25 +64,24 @@ impl<'a, S, H> ProvingBackendRecorder<'a, S, H> self.backend.root(), key, &mut *self.proof_recorder, - ).map_err(map_e) + ) + .map_err(map_e) } /// Produce proof for a child key query. pub fn child_storage( &mut self, child_info: &ChildInfo, - key: &[u8] + key: &[u8], ) -> Result>, String> { let storage_key = child_info.storage_key(); - let root = self.storage(storage_key)? + let root = self + .storage(storage_key)? .and_then(|r| Decode::decode(&mut &r[..]).ok()) .unwrap_or_else(|| empty_child_trie_root::>()); let mut read_overlay = S::Overlay::default(); - let eph = Ephemeral::new( - self.backend.backend_storage(), - &mut read_overlay, - ); + let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let map_e = |e| format!("Trie lookup error: {}", e); @@ -86,17 +90,15 @@ impl<'a, S, H> ProvingBackendRecorder<'a, S, H> &eph, &root.as_ref(), key, - &mut *self.proof_recorder - ).map_err(map_e) + &mut *self.proof_recorder, + ) + .map_err(map_e) } /// Produce proof for the whole backend. pub fn record_all_keys(&mut self) { let mut read_overlay = S::Overlay::default(); - let eph = Ephemeral::new( - self.backend.backend_storage(), - &mut read_overlay, - ); + let eph = Ephemeral::new(self.backend.backend_storage(), &mut read_overlay); let mut iter = move || -> Result<(), Box>> { let root = self.backend.root(); @@ -150,13 +152,14 @@ impl ProofRecorder { /// encoded proof. pub fn estimate_encoded_size(&self) -> usize { let inner = self.inner.read(); - inner.encoded_size - + codec::Compact(inner.records.len() as u32).encoded_size() + inner.encoded_size + codec::Compact(inner.records.len() as u32).encoded_size() } /// Convert into a [`StorageProof`]. pub fn to_storage_proof(&self) -> StorageProof { - let trie_nodes = self.inner.read() + let trie_nodes = self + .inner + .read() .records .iter() .filter_map(|(_k, v)| v.as_ref().map(|v| v.to_vec())) @@ -175,7 +178,7 @@ impl ProofRecorder { /// Patricia trie-based backend which also tracks all touched storage trie values. /// These can be sent to remote node and used as a proof of execution. -pub struct ProvingBackend<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> ( +pub struct ProvingBackend<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher>( TrieBackend, H>, ); @@ -186,7 +189,8 @@ pub struct ProofRecorderBackend<'a, S: 'a + TrieBackendStorage, H: 'a + Hashe } impl<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> ProvingBackend<'a, S, H> - where H::Out: Codec +where + H::Out: Codec, { /// Create new proving backend. pub fn new(backend: &'a TrieBackend) -> Self { @@ -201,10 +205,7 @@ impl<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> ProvingBackend<'a, S, H> ) -> Self { let essence = backend.essence(); let root = essence.root().clone(); - let recorder = ProofRecorderBackend { - backend: essence.backend_storage(), - proof_recorder, - }; + let recorder = ProofRecorderBackend { backend: essence.backend_storage(), proof_recorder }; ProvingBackend(TrieBackend::new(recorder, root)) } @@ -229,7 +230,7 @@ impl<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> TrieBackendStorage fn get(&self, key: &H::Out, prefix: Prefix) -> Result, String> { if let Some(v) = self.proof_recorder.get(key) { - return Ok(v); + return Ok(v) } let backend_value = self.backend.get(key, prefix)?; @@ -247,10 +248,10 @@ impl<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> std::fmt::Debug } impl<'a, S, H> Backend for ProvingBackend<'a, S, H> - where - S: 'a + TrieBackendStorage, - H: 'a + Hasher, - H::Out: Ord + Codec, +where + S: 'a + TrieBackendStorage, + H: 'a + Hasher, + H::Out: Ord + Codec, { type Error = String; type Transaction = S::Overlay; @@ -314,7 +315,7 @@ impl<'a, S, H> Backend for ProvingBackend<'a, S, H> prefix: &[u8], f: F, ) { - self.0.for_child_keys_with_prefix( child_info, prefix, f) + self.0.for_child_keys_with_prefix(child_info, prefix, f) } fn pairs(&self) -> Vec<(Vec, Vec)> { @@ -325,30 +326,32 @@ impl<'a, S, H> Backend for ProvingBackend<'a, S, H> self.0.keys(prefix) } - fn child_keys( - &self, - child_info: &ChildInfo, - prefix: &[u8], - ) -> Vec> { + fn child_keys(&self, child_info: &ChildInfo, prefix: &[u8]) -> Vec> { self.0.child_keys(child_info, prefix) } fn storage_root<'b>( &self, - delta: impl Iterator)>, - ) -> (H::Out, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, Self::Transaction) + where + H::Out: Ord, + { self.0.storage_root(delta) } fn child_storage_root<'b>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (H::Out, bool, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, bool, Self::Transaction) + where + H::Out: Ord, + { self.0.child_storage_root(child_info, delta) } - fn register_overlay_stats(&self, _stats: &crate::stats::StateMachineStats) { } + fn register_overlay_stats(&self, _stats: &crate::stats::StateMachineStats) {} fn usage_info(&self) -> crate::stats::UsageInfo { self.0.usage_info() @@ -375,15 +378,16 @@ where #[cfg(test)] mod tests { - use crate::InMemoryBackend; - use crate::trie_backend::tests::test_trie; use super::*; - use crate::proving_backend::create_proof_check_backend; - use sp_trie::PrefixedMemoryDB; + use crate::{ + proving_backend::create_proof_check_backend, trie_backend::tests::test_trie, + InMemoryBackend, + }; use sp_runtime::traits::BlakeTwo256; + use sp_trie::PrefixedMemoryDB; fn test_proving<'a>( - trie_backend: &'a TrieBackend,BlakeTwo256>, + trie_backend: &'a TrieBackend, BlakeTwo256>, ) -> ProvingBackend<'a, PrefixedMemoryDB, BlakeTwo256> { ProvingBackend::new(trie_backend) } @@ -407,7 +411,7 @@ mod tests { use sp_core::H256; let result = create_proof_check_backend::( H256::from_low_u64_be(1), - StorageProof::empty() + StorageProof::empty(), ); assert!(result.is_err()); } @@ -443,7 +447,8 @@ mod tests { let proof = proving.extract_proof(); - let proof_check = create_proof_check_backend::(in_memory_root.into(), proof).unwrap(); + let proof_check = + create_proof_check_backend::(in_memory_root.into(), proof).unwrap(); assert_eq!(proof_check.storage(&[42]).unwrap().unwrap(), vec![42]); } @@ -455,48 +460,38 @@ mod tests { let child_info_2 = &child_info_2; let contents = vec![ (None, (0..64).map(|i| (vec![i], Some(vec![i]))).collect()), - (Some(child_info_1.clone()), - (28..65).map(|i| (vec![i], Some(vec![i]))).collect()), - (Some(child_info_2.clone()), - (10..15).map(|i| (vec![i], Some(vec![i]))).collect()), + (Some(child_info_1.clone()), (28..65).map(|i| (vec![i], Some(vec![i]))).collect()), + (Some(child_info_2.clone()), (10..15).map(|i| (vec![i], Some(vec![i]))).collect()), ]; let in_memory = InMemoryBackend::::default(); let mut in_memory = in_memory.update(contents); let child_storage_keys = vec![child_info_1.to_owned(), child_info_2.to_owned()]; - let in_memory_root = in_memory.full_storage_root( - std::iter::empty(), - child_storage_keys.iter().map(|k|(k, std::iter::empty())) - ).0; - (0..64).for_each(|i| assert_eq!( - in_memory.storage(&[i]).unwrap().unwrap(), - vec![i] - )); - (28..65).for_each(|i| assert_eq!( - in_memory.child_storage(child_info_1, &[i]).unwrap().unwrap(), - vec![i] - )); - (10..15).for_each(|i| assert_eq!( - in_memory.child_storage(child_info_2, &[i]).unwrap().unwrap(), - vec![i] - )); + let in_memory_root = in_memory + .full_storage_root( + std::iter::empty(), + child_storage_keys.iter().map(|k| (k, std::iter::empty())), + ) + .0; + (0..64).for_each(|i| assert_eq!(in_memory.storage(&[i]).unwrap().unwrap(), vec![i])); + (28..65).for_each(|i| { + assert_eq!(in_memory.child_storage(child_info_1, &[i]).unwrap().unwrap(), vec![i]) + }); + (10..15).for_each(|i| { + assert_eq!(in_memory.child_storage(child_info_2, &[i]).unwrap().unwrap(), vec![i]) + }); let trie = in_memory.as_trie_backend().unwrap(); let trie_root = trie.storage_root(std::iter::empty()).0; assert_eq!(in_memory_root, trie_root); - (0..64).for_each(|i| assert_eq!( - trie.storage(&[i]).unwrap().unwrap(), - vec![i] - )); + (0..64).for_each(|i| assert_eq!(trie.storage(&[i]).unwrap().unwrap(), vec![i])); let proving = ProvingBackend::new(trie); assert_eq!(proving.storage(&[42]).unwrap().unwrap(), vec![42]); let proof = proving.extract_proof(); - let proof_check = create_proof_check_backend::( - in_memory_root.into(), - proof - ).unwrap(); + let proof_check = + create_proof_check_backend::(in_memory_root.into(), proof).unwrap(); assert!(proof_check.storage(&[0]).is_err()); assert_eq!(proof_check.storage(&[42]).unwrap().unwrap(), vec![42]); // note that it is include in root because proof close @@ -507,14 +502,9 @@ mod tests { assert_eq!(proving.child_storage(child_info_1, &[64]), Ok(Some(vec![64]))); let proof = proving.extract_proof(); - let proof_check = create_proof_check_backend::( - in_memory_root.into(), - proof - ).unwrap(); - assert_eq!( - proof_check.child_storage(child_info_1, &[64]).unwrap().unwrap(), - vec![64] - ); + let proof_check = + create_proof_check_backend::(in_memory_root.into(), proof).unwrap(); + assert_eq!(proof_check.child_storage(child_info_1, &[64]).unwrap().unwrap(), vec![64]); } #[test] @@ -522,15 +512,14 @@ mod tests { let trie_backend = test_trie(); let backend = test_proving(&trie_backend); - let check_estimation = |backend: &ProvingBackend<'_, PrefixedMemoryDB, BlakeTwo256>| { - let storage_proof = backend.extract_proof(); - let estimation = backend.0.essence() - .backend_storage() - .proof_recorder - .estimate_encoded_size(); + let check_estimation = + |backend: &ProvingBackend<'_, PrefixedMemoryDB, BlakeTwo256>| { + let storage_proof = backend.extract_proof(); + let estimation = + backend.0.essence().backend_storage().proof_recorder.estimate_encoded_size(); - assert_eq!(storage_proof.encoded_size(), estimation); - }; + assert_eq!(storage_proof.encoded_size(), estimation); + }; assert_eq!(backend.storage(b"key").unwrap(), Some(b"value".to_vec())); check_estimation(&backend); diff --git a/primitives/state-machine/src/read_only.rs b/primitives/state-machine/src/read_only.rs index 01e1fb6b5b2f5..5b7d568b0311e 100644 --- a/primitives/state-machine/src/read_only.rs +++ b/primitives/state-machine/src/read_only.rs @@ -17,17 +17,18 @@ //! Read-only version of Externalities. -use std::{ - any::{TypeId, Any}, - marker::PhantomData, -}; use crate::{Backend, StorageKey, StorageValue}; +use codec::Encode; use hash_db::Hasher; use sp_core::{ storage::{ChildInfo, TrackedStorageKey}, - traits::Externalities, Blake2Hasher, + traits::Externalities, + Blake2Hasher, +}; +use std::{ + any::{Any, TypeId}, + marker::PhantomData, }; -use codec::Encode; /// Trait for inspecting state in any backend. /// @@ -79,39 +80,34 @@ impl<'a, H: Hasher, B: 'a + Backend> Externalities for ReadOnlyExternalities< } fn storage(&self, key: &[u8]) -> Option { - self.backend.storage(key).expect("Backed failed for storage in ReadOnlyExternalities") + self.backend + .storage(key) + .expect("Backed failed for storage in ReadOnlyExternalities") } fn storage_hash(&self, key: &[u8]) -> Option> { self.storage(key).map(|v| Blake2Hasher::hash(&v).encode()) } - fn child_storage( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { - self.backend.child_storage(child_info, key).expect("Backed failed for child_storage in ReadOnlyExternalities") + fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> Option { + self.backend + .child_storage(child_info, key) + .expect("Backed failed for child_storage in ReadOnlyExternalities") } - fn child_storage_hash( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option> { + fn child_storage_hash(&self, child_info: &ChildInfo, key: &[u8]) -> Option> { self.child_storage(child_info, key).map(|v| Blake2Hasher::hash(&v).encode()) } fn next_storage_key(&self, key: &[u8]) -> Option { - self.backend.next_storage_key(key).expect("Backed failed for next_storage_key in ReadOnlyExternalities") + self.backend + .next_storage_key(key) + .expect("Backed failed for next_storage_key in ReadOnlyExternalities") } - fn next_child_storage_key( - &self, - child_info: &ChildInfo, - key: &[u8], - ) -> Option { - self.backend.next_child_storage_key(child_info, key) + fn next_child_storage_key(&self, child_info: &ChildInfo, key: &[u8]) -> Option { + self.backend + .next_child_storage_key(child_info, key) .expect("Backed failed for next_child_storage_key in ReadOnlyExternalities") } @@ -128,11 +124,7 @@ impl<'a, H: Hasher, B: 'a + Backend> Externalities for ReadOnlyExternalities< unimplemented!("place_child_storage not supported in ReadOnlyExternalities") } - fn kill_child_storage( - &mut self, - _child_info: &ChildInfo, - _limit: Option, - ) -> (bool, u32) { + fn kill_child_storage(&mut self, _child_info: &ChildInfo, _limit: Option) -> (bool, u32) { unimplemented!("kill_child_storage is not supported in ReadOnlyExternalities") } @@ -149,11 +141,7 @@ impl<'a, H: Hasher, B: 'a + Backend> Externalities for ReadOnlyExternalities< unimplemented!("clear_child_prefix is not supported in ReadOnlyExternalities") } - fn storage_append( - &mut self, - _key: Vec, - _value: Vec, - ) { + fn storage_append(&mut self, _key: Vec, _value: Vec) { unimplemented!("storage_append is not supported in ReadOnlyExternalities") } @@ -161,10 +149,7 @@ impl<'a, H: Hasher, B: 'a + Backend> Externalities for ReadOnlyExternalities< unimplemented!("storage_root is not supported in ReadOnlyExternalities") } - fn child_storage_root( - &mut self, - _child_info: &ChildInfo, - ) -> Vec { + fn child_storage_root(&mut self, _child_info: &ChildInfo) -> Vec { unimplemented!("child_storage_root is not supported in ReadOnlyExternalities") } @@ -209,7 +194,9 @@ impl<'a, H: Hasher, B: 'a + Backend> Externalities for ReadOnlyExternalities< } } -impl<'a, H: Hasher, B: 'a + Backend> sp_externalities::ExtensionStore for ReadOnlyExternalities<'a, H, B> { +impl<'a, H: Hasher, B: 'a + Backend> sp_externalities::ExtensionStore + for ReadOnlyExternalities<'a, H, B> +{ fn extension_by_type_id(&mut self, _type_id: TypeId) -> Option<&mut dyn Any> { unimplemented!("extension_by_type_id is not supported in ReadOnlyExternalities") } @@ -222,7 +209,10 @@ impl<'a, H: Hasher, B: 'a + Backend> sp_externalities::ExtensionStore for Rea unimplemented!("register_extension_with_type_id is not supported in ReadOnlyExternalities") } - fn deregister_extension_by_type_id(&mut self, _type_id: TypeId) -> Result<(), sp_externalities::Error> { + fn deregister_extension_by_type_id( + &mut self, + _type_id: TypeId, + ) -> Result<(), sp_externalities::Error> { unimplemented!("deregister_extension_by_type_id is not supported in ReadOnlyExternalities") } } diff --git a/primitives/state-machine/src/stats.rs b/primitives/state-machine/src/stats.rs index 9d4ac27e5e94f..affd71f9d2e5d 100644 --- a/primitives/state-machine/src/stats.rs +++ b/primitives/state-machine/src/stats.rs @@ -17,9 +17,9 @@ //! Usage statistics for state db -#[cfg(feature = "std")] -use std::time::{Instant, Duration}; use sp_std::cell::RefCell; +#[cfg(feature = "std")] +use std::time::{Duration, Instant}; /// Measured count of operations and total bytes. #[derive(Clone, Debug, Default)] diff --git a/primitives/state-machine/src/testing.rs b/primitives/state-machine/src/testing.rs index 363d543da086f..ec1772ba8666f 100644 --- a/primitives/state-machine/src/testing.rs +++ b/primitives/state-machine/src/testing.rs @@ -17,17 +17,19 @@ //! Test implementation for Externalities. -use std::{any::{Any, TypeId}, panic::{AssertUnwindSafe, UnwindSafe}}; +use std::{ + any::{Any, TypeId}, + panic::{AssertUnwindSafe, UnwindSafe}, +}; use crate::{ - backend::Backend, OverlayedChanges, StorageTransactionCache, ext::Ext, InMemoryBackend, - StorageKey, StorageValue, + backend::Backend, changes_trie::{ - Configuration as ChangesTrieConfiguration, - InMemoryStorage as ChangesTrieInMemoryStorage, - BlockNumber as ChangesTrieBlockNumber, - State as ChangesTrieState, + BlockNumber as ChangesTrieBlockNumber, Configuration as ChangesTrieConfiguration, + InMemoryStorage as ChangesTrieInMemoryStorage, State as ChangesTrieState, }, + ext::Ext, + InMemoryBackend, OverlayedChanges, StorageKey, StorageTransactionCache, StorageValue, }; use codec::Decode; @@ -35,13 +37,13 @@ use hash_db::Hasher; use sp_core::{ offchain::testing::TestPersistentOffchainDB, storage::{ - well_known_keys::{CHANGES_TRIE_CONFIG, CODE, is_child_storage_key}, + well_known_keys::{is_child_storage_key, CHANGES_TRIE_CONFIG, CODE}, Storage, }, - traits::TaskExecutorExt, testing::TaskExecutor, + traits::TaskExecutorExt, }; -use sp_externalities::{Extensions, Extension, ExtensionStore}; +use sp_externalities::{Extension, ExtensionStore, Extensions}; /// Simple HashMap-based Externalities impl. pub struct TestExternalities @@ -96,7 +98,9 @@ where /// Create a new instance of `TestExternalities` with code and storage. pub fn new_with_code(code: &[u8], mut storage: Storage) -> Self { let mut overlay = OverlayedChanges::default(); - let changes_trie_config = storage.top.get(CHANGES_TRIE_CONFIG) + let changes_trie_config = storage + .top + .get(CHANGES_TRIE_CONFIG) .and_then(|v| Decode::decode(&mut &v[..]).ok()); overlay.set_collect_extrinsics(changes_trie_config.is_some()); @@ -156,17 +160,14 @@ where /// In contrast to [`commit_all`](Self::commit_all) this will not panic if there are open /// transactions. fn as_backend(&self) -> InMemoryBackend { - let top: Vec<_> = self.overlay.changes() - .map(|(k, v)| (k.clone(), v.value().cloned())) - .collect(); + let top: Vec<_> = + self.overlay.changes().map(|(k, v)| (k.clone(), v.value().cloned())).collect(); let mut transaction = vec![(None, top)]; for (child_changes, child_info) in self.overlay.children() { transaction.push(( Some(child_info.clone()), - child_changes - .map(|(k, v)| (k.clone(), v.value().cloned())) - .collect(), + child_changes.map(|(k, v)| (k.clone(), v.value().cloned())).collect(), )) } @@ -186,7 +187,8 @@ where &mut Default::default(), )?; - self.backend.apply_transaction(changes.transaction_storage_root, changes.transaction); + self.backend + .apply_transaction(changes.transaction_storage_root, changes.transaction); Ok(()) } @@ -202,18 +204,21 @@ where /// /// Returns the result of the given closure, if no panics occured. /// Otherwise, returns `Err`. - pub fn execute_with_safe(&mut self, f: impl FnOnce() -> R + UnwindSafe) -> Result { + pub fn execute_with_safe( + &mut self, + f: impl FnOnce() -> R + UnwindSafe, + ) -> Result { let mut ext = AssertUnwindSafe(self.ext()); - std::panic::catch_unwind(move || + std::panic::catch_unwind(move || { sp_externalities::set_and_run_with_externalities(&mut *ext, f) - ).map_err(|e| { - format!("Closure panicked: {:?}", e) }) + .map_err(|e| format!("Closure panicked: {:?}", e)) } } impl std::fmt::Debug for TestExternalities - where H::Out: Ord + codec::Codec, +where + H::Out: Ord + codec::Codec, { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "overlay: {:?}\nbackend: {:?}", self.overlay, self.backend.pairs()) @@ -221,8 +226,8 @@ impl std::fmt::Debug for TestExternalities } impl PartialEq for TestExternalities - where - H::Out: Ord + 'static + codec::Codec +where + H::Out: Ord + 'static + codec::Codec, { /// This doesn't test if they are in the same state, only if they contains the /// same data at this state @@ -232,22 +237,25 @@ impl PartialEq for TestExternalities } impl Default for TestExternalities - where - H::Out: Ord + 'static + codec::Codec, +where + H::Out: Ord + 'static + codec::Codec, { - fn default() -> Self { Self::new(Default::default()) } + fn default() -> Self { + Self::new(Default::default()) + } } impl From for TestExternalities - where - H::Out: Ord + 'static + codec::Codec, +where + H::Out: Ord + 'static + codec::Codec, { fn from(storage: Storage) -> Self { Self::new(storage) } } -impl sp_externalities::ExtensionStore for TestExternalities where +impl sp_externalities::ExtensionStore for TestExternalities +where H: Hasher, H::Out: Ord + codec::Codec, N: ChangesTrieBlockNumber, @@ -264,7 +272,10 @@ impl sp_externalities::ExtensionStore for TestExternalities where self.extensions.register_with_type_id(type_id, extension) } - fn deregister_extension_by_type_id(&mut self, type_id: TypeId) -> Result<(), sp_externalities::Error> { + fn deregister_extension_by_type_id( + &mut self, + type_id: TypeId, + ) -> Result<(), sp_externalities::Error> { if self.extensions.deregister(type_id) { Ok(()) } else { @@ -274,14 +285,13 @@ impl sp_externalities::ExtensionStore for TestExternalities where } impl sp_externalities::ExternalitiesExt for TestExternalities - where - H: Hasher, - H::Out: Ord + codec::Codec, - N: ChangesTrieBlockNumber, +where + H: Hasher, + H::Out: Ord + codec::Codec, + N: ChangesTrieBlockNumber, { fn extension(&mut self) -> Option<&mut T> { - self.extension_by_type_id(TypeId::of::()) - .and_then(::downcast_mut) + self.extension_by_type_id(TypeId::of::()).and_then(::downcast_mut) } fn register_extension(&mut self, ext: T) -> Result<(), sp_externalities::Error> { @@ -296,9 +306,9 @@ impl sp_externalities::ExternalitiesExt for TestExternalities #[cfg(test)] mod tests { use super::*; - use sp_core::{H256, traits::Externalities, storage::ChildInfo}; - use sp_runtime::traits::BlakeTwo256; use hex_literal::hex; + use sp_core::{storage::ChildInfo, traits::Externalities, H256}; + use sp_runtime::traits::BlakeTwo256; #[test] fn commit_should_work() { @@ -307,7 +317,8 @@ mod tests { ext.set_storage(b"doe".to_vec(), b"reindeer".to_vec()); ext.set_storage(b"dog".to_vec(), b"puppy".to_vec()); ext.set_storage(b"dogglesworth".to_vec(), b"cat".to_vec()); - let root = H256::from(hex!("ed4d8c799d996add422395a6abd7545491d40bd838d738afafa1b8a4de625489")); + let root = + H256::from(hex!("ed4d8c799d996add422395a6abd7545491d40bd838d738afafa1b8a4de625489")); assert_eq!(H256::from_slice(ext.storage_root().as_slice()), root); } @@ -325,7 +336,7 @@ mod tests { #[test] fn check_send() { fn assert_send() {} - assert_send::>(); + assert_send::>(); } #[test] diff --git a/primitives/state-machine/src/trie_backend.rs b/primitives/state-machine/src/trie_backend.rs index 6162a9866a46c..e8c9fa475cffd 100644 --- a/primitives/state-machine/src/trie_backend.rs +++ b/primitives/state-machine/src/trie_backend.rs @@ -17,29 +17,33 @@ //! Trie-based state machine backend. -use crate::{warn, debug}; -use hash_db::Hasher; -use sp_trie::{Trie, delta_trie_root, empty_child_trie_root, child_delta_trie_root}; -use sp_trie::trie_types::{TrieDB, TrieError, Layout}; -use sp_core::storage::{ChildInfo, ChildType}; -use codec::{Codec, Decode}; use crate::{ - StorageKey, StorageValue, Backend, - trie_backend_essence::{TrieBackendEssence, TrieBackendStorage, Ephemeral}, + debug, + trie_backend_essence::{Ephemeral, TrieBackendEssence, TrieBackendStorage}, + warn, Backend, StorageKey, StorageValue, }; +use codec::{Codec, Decode}; +use hash_db::Hasher; +use sp_core::storage::{ChildInfo, ChildType}; use sp_std::{boxed::Box, vec::Vec}; +use sp_trie::{ + child_delta_trie_root, delta_trie_root, empty_child_trie_root, + trie_types::{Layout, TrieDB, TrieError}, + Trie, +}; /// Patricia trie-based backend. Transaction type is an overlay of changes to commit. pub struct TrieBackend, H: Hasher> { - pub (crate) essence: TrieBackendEssence, + pub(crate) essence: TrieBackendEssence, } -impl, H: Hasher> TrieBackend where H::Out: Codec { +impl, H: Hasher> TrieBackend +where + H::Out: Codec, +{ /// Create new trie-based backend. pub fn new(storage: S, root: H::Out) -> Self { - TrieBackend { - essence: TrieBackendEssence::new(storage, root), - } + TrieBackend { essence: TrieBackendEssence::new(storage, root) } } /// Get backend essence reference. @@ -74,7 +78,8 @@ impl, H: Hasher> sp_std::fmt::Debug for TrieBackend, H: Hasher> Backend for TrieBackend where +impl, H: Hasher> Backend for TrieBackend +where H::Out: Ord + Codec, { type Error = crate::DefaultError; @@ -121,7 +126,8 @@ impl, H: Hasher> Backend for TrieBackend where f: F, allow_missing: bool, ) -> Result { - self.essence.apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) + self.essence + .apply_to_key_values_while(child_info, prefix, start_at, f, allow_missing) } fn apply_to_keys_while bool>( @@ -159,7 +165,7 @@ impl, H: Hasher> Backend for TrieBackend where Err(e) => { debug!(target: "trie", "Error extracting trie values: {}", e); Vec::new() - } + }, } } @@ -177,21 +183,23 @@ impl, H: Hasher> Backend for TrieBackend where Ok(v) }; - collect_all().map_err(|e| debug!(target: "trie", "Error extracting trie keys: {}", e)).unwrap_or_default() + collect_all() + .map_err(|e| debug!(target: "trie", "Error extracting trie keys: {}", e)) + .unwrap_or_default() } fn storage_root<'a>( &self, - delta: impl Iterator)>, - ) -> (H::Out, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, Self::Transaction) + where + H::Out: Ord, + { let mut write_overlay = S::Overlay::default(); let mut root = *self.essence.root(); { - let mut eph = Ephemeral::new( - self.essence.backend_storage(), - &mut write_overlay, - ); + let mut eph = Ephemeral::new(self.essence.backend_storage(), &mut write_overlay); match delta_trie_root::, _, _, _, _, _>(&mut eph, root, delta) { Ok(ret) => root = ret, @@ -205,17 +213,21 @@ impl, H: Hasher> Backend for TrieBackend where fn child_storage_root<'a>( &self, child_info: &ChildInfo, - delta: impl Iterator)>, - ) -> (H::Out, bool, Self::Transaction) where H::Out: Ord { + delta: impl Iterator)>, + ) -> (H::Out, bool, Self::Transaction) + where + H::Out: Ord, + { let default_root = match child_info.child_type() { - ChildType::ParentKeyId => empty_child_trie_root::>() + ChildType::ParentKeyId => empty_child_trie_root::>(), }; let mut write_overlay = S::Overlay::default(); let prefixed_storage_key = child_info.prefixed_storage_key(); let mut root = match self.storage(prefixed_storage_key.as_slice()) { - Ok(value) => - value.and_then(|r| Decode::decode(&mut &r[..]).ok()).unwrap_or_else(|| default_root.clone()), + Ok(value) => value + .and_then(|r| Decode::decode(&mut &r[..]).ok()) + .unwrap_or_else(|| default_root.clone()), Err(e) => { warn!(target: "trie", "Failed to read child storage root: {}", e); default_root.clone() @@ -223,10 +235,7 @@ impl, H: Hasher> Backend for TrieBackend where }; { - let mut eph = Ephemeral::new( - self.essence.backend_storage(), - &mut write_overlay, - ); + let mut eph = Ephemeral::new(self.essence.backend_storage(), &mut write_overlay); match child_delta_trie_root::, _, _, _, _, _, _>( child_info.keyspace(), @@ -248,7 +257,7 @@ impl, H: Hasher> Backend for TrieBackend where Some(self) } - fn register_overlay_stats(&self, _stats: &crate::stats::StateMachineStats) { } + fn register_overlay_stats(&self, _stats: &crate::stats::StateMachineStats) {} fn usage_info(&self) -> crate::UsageInfo { crate::UsageInfo::empty() @@ -261,12 +270,12 @@ impl, H: Hasher> Backend for TrieBackend where #[cfg(test)] pub mod tests { - use std::{collections::HashSet, iter}; - use sp_core::H256; + use super::*; use codec::Encode; - use sp_trie::{TrieMut, PrefixedMemoryDB, trie_types::TrieDBMut, KeySpacedDBMut}; + use sp_core::H256; use sp_runtime::traits::BlakeTwo256; - use super::*; + use sp_trie::{trie_types::TrieDBMut, KeySpacedDBMut, PrefixedMemoryDB, TrieMut}; + use std::{collections::HashSet, iter}; const CHILD_KEY_1: &[u8] = b"sub1"; @@ -312,7 +321,9 @@ pub mod tests { fn read_from_child_storage_returns_some() { let test_trie = test_trie(); assert_eq!( - test_trie.child_storage(&ChildInfo::new_default(CHILD_KEY_1), b"value3").unwrap(), + test_trie + .child_storage(&ChildInfo::new_default(CHILD_KEY_1), b"value3") + .unwrap(), Some(vec![142u8]), ); } @@ -332,7 +343,9 @@ pub mod tests { assert!(TrieBackend::, BlakeTwo256>::new( PrefixedMemoryDB::default(), Default::default(), - ).pairs().is_empty()); + ) + .pairs() + .is_empty()); } #[test] @@ -347,9 +360,8 @@ pub mod tests { #[test] fn storage_root_transaction_is_non_empty() { - let (new_root, mut tx) = test_trie().storage_root( - iter::once((&b"new-key"[..], Some(&b"new-value"[..]))), - ); + let (new_root, mut tx) = + test_trie().storage_root(iter::once((&b"new-key"[..], Some(&b"new-value"[..])))); assert!(!tx.drain().is_empty()); assert!(new_root != test_trie().storage_root(iter::empty()).0); } diff --git a/primitives/state-machine/src/trie_backend_essence.rs b/primitives/state-machine/src/trie_backend_essence.rs index 54124e6754a52..06a99f9388039 100644 --- a/primitives/state-machine/src/trie_backend_essence.rs +++ b/primitives/state-machine/src/trie_backend_essence.rs @@ -18,24 +18,24 @@ //! Trie-based state machine backend essence used to read values //! from storage. -#[cfg(feature = "std")] -use std::sync::Arc; -use sp_std::{ops::Deref, boxed::Box, vec::Vec}; -use crate::{warn, debug}; +use crate::{backend::Consolidate, debug, warn, StorageKey, StorageValue}; +use codec::Encode; use hash_db::{self, Hasher, Prefix}; -use sp_trie::{Trie, MemoryDB, PrefixedMemoryDB, DBValue, - empty_child_trie_root, read_trie_value, read_child_trie_value, - KeySpacedDB, TrieDBIterator}; -use sp_trie::trie_types::{TrieDB, TrieError, Layout}; -use crate::{backend::Consolidate, StorageKey, StorageValue}; use sp_core::storage::ChildInfo; -use codec::Encode; +use sp_std::{boxed::Box, ops::Deref, vec::Vec}; +use sp_trie::{ + empty_child_trie_root, read_child_trie_value, read_trie_value, + trie_types::{Layout, TrieDB, TrieError}, + DBValue, KeySpacedDB, MemoryDB, PrefixedMemoryDB, Trie, TrieDBIterator, +}; +#[cfg(feature = "std")] +use std::sync::Arc; #[cfg(not(feature = "std"))] macro_rules! format { - ($($arg:tt)+) => ( + ($($arg:tt)+) => { crate::DefaultError - ); + }; } type Result = sp_std::result::Result; @@ -53,14 +53,13 @@ pub struct TrieBackendEssence, H: Hasher> { empty: H::Out, } -impl, H: Hasher> TrieBackendEssence where H::Out: Encode { +impl, H: Hasher> TrieBackendEssence +where + H::Out: Encode, +{ /// Create new trie-based backend. pub fn new(storage: S, root: H::Out) -> Self { - TrieBackendEssence { - storage, - root, - empty: H::hash(&[0u8]), - } + TrieBackendEssence { storage, root, empty: H::hash(&[0u8]) } } /// Get backend storage reference. @@ -114,7 +113,7 @@ impl, H: Hasher> TrieBackendEssence where H::Out: let mut hash = H::Out::default(); if child_root.len() != hash.as_ref().len() { - return Err(format!("Invalid child storage hash at {:?}", child_info.storage_key())); + return Err(format!("Invalid child storage hash at {:?}", child_info.storage_key())) } // note: child_root and hash must be same size, panics otherwise. hash.as_mut().copy_from_slice(&child_root[..]); @@ -138,10 +137,9 @@ impl, H: Hasher> TrieBackendEssence where H::Out: dyn_eph = self; } - let trie = TrieDB::::new(dyn_eph, root) - .map_err(|e| format!("TrieDB creation error: {}", e))?; - let mut iter = trie.iter() - .map_err(|e| format!("TrieDB iteration error: {}", e))?; + let trie = + TrieDB::::new(dyn_eph, root).map_err(|e| format!("TrieDB creation error: {}", e))?; + let mut iter = trie.iter().map_err(|e| format!("TrieDB iteration error: {}", e))?; // The key just after the one given in input, basically `key++0`. // Note: We are sure this is the next key if: @@ -157,8 +155,8 @@ impl, H: Hasher> TrieBackendEssence where H::Out: let next_element = iter.next(); let next_key = if let Some(next_element) = next_element { - let (next_key, _) = next_element - .map_err(|e| format!("TrieDB iterator next error: {}", e))?; + let (next_key, _) = + next_element.map_err(|e| format!("TrieDB iterator next error: {}", e))?; Some(next_key) } else { None @@ -180,7 +178,8 @@ impl, H: Hasher> TrieBackendEssence where H::Out: child_info: &ChildInfo, key: &[u8], ) -> Result> { - let root = self.child_root(child_info)? + let root = self + .child_root(child_info)? .unwrap_or_else(|| empty_child_trie_root::>().encode()); let map_e = |e| format!("Trie lookup error: {}", e); @@ -210,20 +209,13 @@ impl, H: Hasher> TrieBackendEssence where H::Out: &child_root } else { - return Ok(true); + return Ok(true) } } else { &self.root }; - self.trie_iter_inner( - &root, - prefix, - f, - child_info, - start_at, - allow_missing_nodes, - ) + self.trie_iter_inner(&root, prefix, f, child_info, start_at, allow_missing_nodes) } /// Retrieve all entries keys of a storage and call `f` for each of those keys. @@ -240,8 +232,8 @@ impl, H: Hasher> TrieBackendEssence where H::Out: Ok(v) => v.unwrap_or_else(|| empty_child_trie_root::>().encode()), Err(e) => { debug!(target: "trie", "Error while iterating child storage: {}", e); - return; - } + return + }, }; child_root.as_mut().copy_from_slice(&root_vec); &child_root @@ -249,7 +241,17 @@ impl, H: Hasher> TrieBackendEssence where H::Out: &self.root }; - let _ = self.trie_iter_inner(root, prefix, |k, _v| { f(&k); true}, child_info, None, false); + let _ = self.trie_iter_inner( + root, + prefix, + |k, _v| { + f(&k); + true + }, + child_info, + None, + false, + ); } /// Execute given closure for all keys starting with prefix. @@ -263,17 +265,37 @@ impl, H: Hasher> TrieBackendEssence where H::Out: Ok(v) => v.unwrap_or_else(|| empty_child_trie_root::>().encode()), Err(e) => { debug!(target: "trie", "Error while iterating child storage: {}", e); - return; - } + return + }, }; let mut root = H::Out::default(); root.as_mut().copy_from_slice(&root_vec); - let _ = self.trie_iter_inner(&root, Some(prefix), |k, _v| { f(&k); true }, Some(child_info), None, false); + let _ = self.trie_iter_inner( + &root, + Some(prefix), + |k, _v| { + f(&k); + true + }, + Some(child_info), + None, + false, + ); } /// Execute given closure for all keys starting with prefix. pub fn for_keys_with_prefix(&self, prefix: &[u8], mut f: F) { - let _ = self.trie_iter_inner(&self.root, Some(prefix), |k, _v| { f(&k); true }, None, None, false); + let _ = self.trie_iter_inner( + &self.root, + Some(prefix), + |k, _v| { + f(&k); + true + }, + None, + None, + false, + ); } fn trie_iter_inner, Vec) -> bool>( @@ -315,14 +337,25 @@ impl, H: Hasher> TrieBackendEssence where H::Out: }; match result { Ok(completed) => Ok(completed), - Err(e) if matches!(*e, TrieError::IncompleteDatabase(_)) && allow_missing_nodes => Ok(false), + Err(e) if matches!(*e, TrieError::IncompleteDatabase(_)) && allow_missing_nodes => + Ok(false), Err(e) => Err(format!("TrieDB iteration error: {}", e)), } } /// Execute given closure for all key and values starting with prefix. pub fn for_key_values_with_prefix(&self, prefix: &[u8], mut f: F) { - let _ = self.trie_iter_inner(&self.root, Some(prefix), |k, v| {f(&k, &v); true}, None, None, false); + let _ = self.trie_iter_inner( + &self.root, + Some(prefix), + |k, v| { + f(&k, &v); + true + }, + None, + None, + false, + ); } } @@ -334,16 +367,17 @@ pub(crate) struct Ephemeral<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> { impl<'a, S: 'a + TrieBackendStorage, H: 'a + Hasher> hash_db::AsHashDB for Ephemeral<'a, S, H> { - fn as_hash_db<'b>(&'b self) -> &'b (dyn hash_db::HashDB + 'b) { self } - fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn hash_db::HashDB + 'b) { self } + fn as_hash_db<'b>(&'b self) -> &'b (dyn hash_db::HashDB + 'b) { + self + } + fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn hash_db::HashDB + 'b) { + self + } } impl<'a, S: TrieBackendStorage, H: Hasher> Ephemeral<'a, S, H> { pub fn new(storage: &'a S, overlay: &'a mut S::Overlay) -> Self { - Ephemeral { - storage, - overlay, - } + Ephemeral { storage, overlay } } } @@ -431,13 +465,15 @@ impl TrieBackendStorage for MemoryDB { impl, H: Hasher> hash_db::AsHashDB for TrieBackendEssence { - fn as_hash_db<'b>(&'b self) -> &'b (dyn hash_db::HashDB + 'b) { self } - fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn hash_db::HashDB + 'b) { self } + fn as_hash_db<'b>(&'b self) -> &'b (dyn hash_db::HashDB + 'b) { + self + } + fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn hash_db::HashDB + 'b) { + self + } } -impl, H: Hasher> hash_db::HashDB - for TrieBackendEssence -{ +impl, H: Hasher> hash_db::HashDB for TrieBackendEssence { fn get(&self, key: &H::Out, prefix: Prefix) -> Option { if *key == self.empty { return Some([0u8].to_vec()) @@ -480,12 +516,11 @@ impl, H: Hasher> hash_db::HashDBRef } } - #[cfg(test)] mod test { - use sp_core::{Blake2Hasher, H256}; - use sp_trie::{TrieMut, PrefixedMemoryDB, trie_types::TrieDBMut, KeySpacedDBMut}; use super::*; + use sp_core::{Blake2Hasher, H256}; + use sp_trie::{trie_types::TrieDBMut, KeySpacedDBMut, PrefixedMemoryDB, TrieMut}; #[test] fn next_storage_key_and_next_child_storage_key_work() { @@ -529,20 +564,10 @@ mod test { let mdb = essence_1.into_storage(); let essence_2 = TrieBackendEssence::new(mdb, root_2); - assert_eq!( - essence_2.next_child_storage_key(child_info, b"2"), Ok(Some(b"3".to_vec())) - ); - assert_eq!( - essence_2.next_child_storage_key(child_info, b"3"), Ok(Some(b"4".to_vec())) - ); - assert_eq!( - essence_2.next_child_storage_key(child_info, b"4"), Ok(Some(b"6".to_vec())) - ); - assert_eq!( - essence_2.next_child_storage_key(child_info, b"5"), Ok(Some(b"6".to_vec())) - ); - assert_eq!( - essence_2.next_child_storage_key(child_info, b"6"), Ok(None) - ); + assert_eq!(essence_2.next_child_storage_key(child_info, b"2"), Ok(Some(b"3".to_vec()))); + assert_eq!(essence_2.next_child_storage_key(child_info, b"3"), Ok(Some(b"4".to_vec()))); + assert_eq!(essence_2.next_child_storage_key(child_info, b"4"), Ok(Some(b"6".to_vec()))); + assert_eq!(essence_2.next_child_storage_key(child_info, b"5"), Ok(Some(b"6".to_vec()))); + assert_eq!(essence_2.next_child_storage_key(child_info, b"6"), Ok(None)); } } diff --git a/primitives/std/src/lib.rs b/primitives/std/src/lib.rs index 6acf4b75967ae..3af4d07ac6297 100644 --- a/primitives/std/src/lib.rs +++ b/primitives/std/src/lib.rs @@ -19,11 +19,14 @@ //! or client/alloc to be used with any code that depends on the runtime. #![cfg_attr(not(feature = "std"), no_std)] - -#![cfg_attr(feature = "std", - doc = "Substrate runtime standard library as compiled when linked with Rust's standard library.")] -#![cfg_attr(not(feature = "std"), - doc = "Substrate's runtime standard library as compiled without Rust's standard library.")] +#![cfg_attr( + feature = "std", + doc = "Substrate runtime standard library as compiled when linked with Rust's standard library." +)] +#![cfg_attr( + not(feature = "std"), + doc = "Substrate's runtime standard library as compiled without Rust's standard library." +)] #[macro_export] macro_rules! map { @@ -55,7 +58,7 @@ macro_rules! if_std { #[cfg(not(feature = "std"))] #[macro_export] macro_rules! if_std { - ( $( $code:tt )* ) => {} + ( $( $code:tt )* ) => {}; } #[cfg(feature = "std")] @@ -64,7 +67,6 @@ include!("../with_std.rs"); #[cfg(not(feature = "std"))] include!("../without_std.rs"); - /// A target for `core::write!` macro - constructs a string in memory. #[derive(Default)] pub struct Writer(vec::Vec); @@ -92,10 +94,12 @@ impl Writer { /// /// This should include only things which are in the normal std prelude. pub mod prelude { - pub use crate::vec::Vec; - pub use crate::boxed::Box; - pub use crate::cmp::{Eq, PartialEq, Reverse}; - pub use crate::clone::Clone; + pub use crate::{ + boxed::Box, + clone::Clone, + cmp::{Eq, PartialEq, Reverse}, + vec::Vec, + }; // Re-export `vec!` macro here, but not in `std` mode, since // std's prelude already brings `vec!` into the scope. diff --git a/primitives/storage/src/lib.rs b/primitives/storage/src/lib.rs index 87c10f770a8ab..45474a44693ab 100644 --- a/primitives/storage/src/lib.rs +++ b/primitives/storage/src/lib.rs @@ -20,16 +20,22 @@ #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use sp_debug_derive::RuntimeDebug; -use sp_std::{vec::Vec, ops::{Deref, DerefMut}}; +use codec::{Decode, Encode}; use ref_cast::RefCast; -use codec::{Encode, Decode}; +use sp_std::{ + ops::{Deref, DerefMut}, + vec::Vec, +}; /// Storage key. #[derive(PartialEq, Eq, RuntimeDebug)] -#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Hash, PartialOrd, Ord, Clone, Encode, Decode))] +#[cfg_attr( + feature = "std", + derive(Serialize, Deserialize, Hash, PartialOrd, Ord, Clone, Encode, Decode) +)] pub struct StorageKey( #[cfg_attr(feature = "std", serde(with = "impl_serde::serialize"))] pub Vec, ); @@ -53,12 +59,7 @@ pub struct TrackedStorageKey { impl TrackedStorageKey { /// Create a default `TrackedStorageKey` pub fn new(key: Vec) -> Self { - Self { - key, - reads: 0, - writes: 0, - whitelisted: false, - } + Self { key, reads: 0, writes: 0, whitelisted: false } } /// Check if this key has been "read", i.e. it exists in the memory overlay. /// @@ -90,12 +91,7 @@ impl TrackedStorageKey { // Easily convert a key to a `TrackedStorageKey` that has been whitelisted. impl From> for TrackedStorageKey { fn from(key: Vec) -> Self { - Self { - key: key, - reads: 0, - writes: 0, - whitelisted: true, - } + Self { key, reads: 0, writes: 0, whitelisted: true } } } @@ -105,8 +101,7 @@ impl From> for TrackedStorageKey { #[repr(transparent)] #[derive(RefCast)] pub struct PrefixedStorageKey( - #[cfg_attr(feature = "std", serde(with="impl_serde::serialize"))] - Vec, + #[cfg_attr(feature = "std", serde(with = "impl_serde::serialize"))] Vec, ); impl Deref for PrefixedStorageKey { @@ -235,7 +230,6 @@ pub mod well_known_keys { CHILD_STORAGE_KEY_PREFIX.starts_with(key) } } - } /// Information related to a child state. @@ -257,9 +251,7 @@ impl ChildInfo { /// Same as `new_default` but with `Vec` as input. pub fn new_default_from_vec(storage_key: Vec) -> Self { - ChildInfo::ParentKeyId(ChildTrieParentKeyId { - data: storage_key, - }) + ChildInfo::ParentKeyId(ChildTrieParentKeyId { data: storage_key }) } /// Try to update with another instance, return false if both instance @@ -284,9 +276,7 @@ impl ChildInfo { /// child trie. pub fn storage_key(&self) -> &[u8] { match self { - ChildInfo::ParentKeyId(ChildTrieParentKeyId { - data, - }) => &data[..], + ChildInfo::ParentKeyId(ChildTrieParentKeyId { data }) => &data[..], } } @@ -294,9 +284,8 @@ impl ChildInfo { /// this trie. pub fn prefixed_storage_key(&self) -> PrefixedStorageKey { match self { - ChildInfo::ParentKeyId(ChildTrieParentKeyId { - data, - }) => ChildType::ParentKeyId.new_prefixed_key(data.as_slice()), + ChildInfo::ParentKeyId(ChildTrieParentKeyId { data }) => + ChildType::ParentKeyId.new_prefixed_key(data.as_slice()), } } @@ -304,9 +293,7 @@ impl ChildInfo { /// this trie. pub fn into_prefixed_storage_key(self) -> PrefixedStorageKey { match self { - ChildInfo::ParentKeyId(ChildTrieParentKeyId { - mut data, - }) => { + ChildInfo::ParentKeyId(ChildTrieParentKeyId { mut data }) => { ChildType::ParentKeyId.do_prefix_key(&mut data); PrefixedStorageKey(data) }, diff --git a/primitives/tasks/src/async_externalities.rs b/primitives/tasks/src/async_externalities.rs index 8402246cb4e29..975a81af4f53d 100644 --- a/primitives/tasks/src/async_externalities.rs +++ b/primitives/tasks/src/async_externalities.rs @@ -18,12 +18,12 @@ //! Async externalities. -use std::any::{TypeId, Any}; use sp_core::{ storage::{ChildInfo, TrackedStorageKey}, - traits::{Externalities, SpawnNamed, TaskExecutorExt, RuntimeSpawnExt, RuntimeSpawn}, + traits::{Externalities, RuntimeSpawn, RuntimeSpawnExt, SpawnNamed, TaskExecutorExt}, }; use sp_externalities::{Extensions, ExternalitiesExt as _}; +use std::any::{Any, TypeId}; /// Simple state-less externalities for use in async context. /// @@ -34,7 +34,9 @@ pub struct AsyncExternalities { } /// New Async externalities. -pub fn new_async_externalities(scheduler: Box) -> Result { +pub fn new_async_externalities( + scheduler: Box, +) -> Result { let mut res = AsyncExternalities { extensions: Default::default() }; let mut ext = &mut res as &mut dyn Externalities; ext.register_extension::(TaskExecutorExt(scheduler.clone())) @@ -74,19 +76,11 @@ impl Externalities for AsyncExternalities { panic!("`storage_hash`: should not be used in async externalities!") } - fn child_storage( - &self, - _child_info: &ChildInfo, - _key: &[u8], - ) -> Option { + fn child_storage(&self, _child_info: &ChildInfo, _key: &[u8]) -> Option { panic!("`child_storage`: should not be used in async externalities!") } - fn child_storage_hash( - &self, - _child_info: &ChildInfo, - _key: &[u8], - ) -> Option> { + fn child_storage_hash(&self, _child_info: &ChildInfo, _key: &[u8]) -> Option> { panic!("`child_storage_hash`: should not be used in async externalities!") } @@ -94,11 +88,7 @@ impl Externalities for AsyncExternalities { panic!("`next_storage_key`: should not be used in async externalities!") } - fn next_child_storage_key( - &self, - _child_info: &ChildInfo, - _key: &[u8], - ) -> Option { + fn next_child_storage_key(&self, _child_info: &ChildInfo, _key: &[u8]) -> Option { panic!("`next_child_storage_key`: should not be used in async externalities!") } @@ -115,11 +105,7 @@ impl Externalities for AsyncExternalities { panic!("`place_child_storage`: should not be used in async externalities!") } - fn kill_child_storage( - &mut self, - _child_info: &ChildInfo, - _limit: Option, - ) -> (bool, u32) { + fn kill_child_storage(&mut self, _child_info: &ChildInfo, _limit: Option) -> (bool, u32) { panic!("`kill_child_storage`: should not be used in async externalities!") } @@ -136,11 +122,7 @@ impl Externalities for AsyncExternalities { panic!("`clear_child_prefix`: should not be used in async externalities!") } - fn storage_append( - &mut self, - _key: Vec, - _value: Vec, - ) { + fn storage_append(&mut self, _key: Vec, _value: Vec) { panic!("`storage_append`: should not be used in async externalities!") } @@ -148,10 +130,7 @@ impl Externalities for AsyncExternalities { panic!("`storage_root`: should not be used in async externalities!") } - fn child_storage_root( - &mut self, - _child_info: &ChildInfo, - ) -> Vec { + fn child_storage_root(&mut self, _child_info: &ChildInfo) -> Vec { panic!("`child_storage_root`: should not be used in async externalities!") } @@ -209,7 +188,10 @@ impl sp_externalities::ExtensionStore for AsyncExternalities { self.extensions.register_with_type_id(type_id, extension) } - fn deregister_extension_by_type_id(&mut self, type_id: TypeId) -> Result<(), sp_externalities::Error> { + fn deregister_extension_by_type_id( + &mut self, + type_id: TypeId, + ) -> Result<(), sp_externalities::Error> { if self.extensions.deregister(type_id) { Ok(()) } else { diff --git a/primitives/tasks/src/lib.rs b/primitives/tasks/src/lib.rs index 96aca0e1cef6b..020976dbf6316 100644 --- a/primitives/tasks/src/lib.rs +++ b/primitives/tasks/src/lib.rs @@ -61,9 +61,9 @@ pub use async_externalities::{new_async_externalities, AsyncExternalities}; #[cfg(feature = "std")] mod inner { - use std::{panic::AssertUnwindSafe, sync::mpsc}; - use sp_externalities::ExternalitiesExt as _; use sp_core::traits::TaskExecutorExt; + use sp_externalities::ExternalitiesExt as _; + use std::{panic::AssertUnwindSafe, sync::mpsc}; /// Task handle (wasm). /// @@ -77,55 +77,62 @@ mod inner { impl DataJoinHandle { /// Join handle returned by `spawn` function pub fn join(self) -> Vec { - self.receiver.recv().expect("Spawned runtime task terminated before sending result.") + self.receiver + .recv() + .expect("Spawned runtime task terminated before sending result.") } } /// Spawn new runtime task (native). pub fn spawn(entry_point: fn(Vec) -> Vec, data: Vec) -> DataJoinHandle { - let scheduler = sp_externalities::with_externalities(|mut ext| ext.extension::() - .expect("No task executor associated with the current context!") - .clone() - ).expect("Spawn called outside of externalities context!"); + let scheduler = sp_externalities::with_externalities(|mut ext| { + ext.extension::() + .expect("No task executor associated with the current context!") + .clone() + }) + .expect("Spawn called outside of externalities context!"); let (sender, receiver) = mpsc::channel(); let extra_scheduler = scheduler.clone(); - scheduler.spawn("parallel-runtime-spawn", Box::pin(async move { - let result = match crate::new_async_externalities(extra_scheduler) { - Ok(mut ext) => { - let mut ext = AssertUnwindSafe(&mut ext); - match std::panic::catch_unwind(move || { - sp_externalities::set_and_run_with_externalities( - &mut **ext, - move || entry_point(data), - ) - }) { - Ok(result) => result, - Err(panic) => { - log::error!( - target: "runtime", - "Spawned task panicked: {:?}", - panic, - ); - - // This will drop sender without sending anything. - return; + scheduler.spawn( + "parallel-runtime-spawn", + Box::pin(async move { + let result = match crate::new_async_externalities(extra_scheduler) { + Ok(mut ext) => { + let mut ext = AssertUnwindSafe(&mut ext); + match std::panic::catch_unwind(move || { + sp_externalities::set_and_run_with_externalities( + &mut **ext, + move || entry_point(data), + ) + }) { + Ok(result) => result, + Err(panic) => { + log::error!( + target: "runtime", + "Spawned task panicked: {:?}", + panic, + ); + + // This will drop sender without sending anything. + return + }, } - } - }, - Err(e) => { - log::error!( - target: "runtime", - "Unable to run async task: {}", - e, - ); - - return; - }, - }; - - let _ = sender.send(result); - })); + }, + Err(e) => { + log::error!( + target: "runtime", + "Unable to run async task: {}", + e, + ); + + return + }, + }; + + let _ = sender.send(result); + }), + ); DataJoinHandle { receiver } } @@ -146,7 +153,11 @@ mod inner { /// /// NOTE: Since this dynamic dispatch function and the invoked function are compiled with /// the same compiler, there should be no problem with ABI incompatibility. - extern "C" fn dispatch_wrapper(func_ref: *const u8, payload_ptr: *mut u8, payload_len: u32) -> u64 { + extern "C" fn dispatch_wrapper( + func_ref: *const u8, + payload_ptr: *mut u8, + payload_len: u32, + ) -> u64 { let payload_len = payload_len as usize; let output = unsafe { let payload = Vec::from_raw_parts(payload_ptr, payload_len, payload_len); @@ -160,11 +171,8 @@ mod inner { pub fn spawn(entry_point: fn(Vec) -> Vec, payload: Vec) -> DataJoinHandle { let func_ptr: usize = unsafe { mem::transmute(entry_point) }; - let handle = sp_io::runtime_tasks::spawn( - dispatch_wrapper as usize as _, - func_ptr as u32, - payload, - ); + let handle = + sp_io::runtime_tasks::spawn(dispatch_wrapper as usize as _, func_ptr as u32, payload); DataJoinHandle { handle } } @@ -185,7 +193,7 @@ mod inner { } } -pub use inner::{DataJoinHandle, spawn}; +pub use inner::{spawn, DataJoinHandle}; #[cfg(test)] mod tests { @@ -211,7 +219,7 @@ mod tests { #[test] fn panicking() { - let res = sp_io::TestExternalities::default().execute_with_safe(||{ + let res = sp_io::TestExternalities::default().execute_with_safe(|| { spawn(async_panicker, vec![5, 2, 1]).join(); }); @@ -220,28 +228,30 @@ mod tests { #[test] fn many_joins() { - sp_io::TestExternalities::default().execute_with_safe(|| { - // converges to 1 only after 1000+ steps - let mut running_val = 9780657630u64; - let mut data = vec![]; - let handles = (0..1024).map( - |_| { - running_val = if running_val % 2 == 0 { - running_val / 2 - } else { - 3 * running_val + 1 - }; - data.push(running_val as u8); - (spawn(async_runner, data.clone()), data.clone()) + sp_io::TestExternalities::default() + .execute_with_safe(|| { + // converges to 1 only after 1000+ steps + let mut running_val = 9780657630u64; + let mut data = vec![]; + let handles = (0..1024) + .map(|_| { + running_val = if running_val % 2 == 0 { + running_val / 2 + } else { + 3 * running_val + 1 + }; + data.push(running_val as u8); + (spawn(async_runner, data.clone()), data.clone()) + }) + .collect::>(); + + for (handle, mut data) in handles { + let result = handle.join(); + data.sort(); + + assert_eq!(result, data); } - ).collect::>(); - - for (handle, mut data) in handles { - let result = handle.join(); - data.sort(); - - assert_eq!(result, data); - } - }).expect("Failed to run with externalities"); + }) + .expect("Failed to run with externalities"); } } diff --git a/primitives/test-primitives/src/lib.rs b/primitives/test-primitives/src/lib.rs index ed408f338e49a..d988160b1dc7b 100644 --- a/primitives/test-primitives/src/lib.rs +++ b/primitives/test-primitives/src/lib.rs @@ -19,13 +19,13 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; -use sp_application_crypto::sr25519; pub use sp_application_crypto; +use sp_application_crypto::sr25519; pub use sp_core::{hash::H256, RuntimeDebug}; -use sp_runtime::traits::{BlakeTwo256, Verify, Extrinsic as ExtrinsicT,}; +use sp_runtime::traits::{BlakeTwo256, Extrinsic as ExtrinsicT, Verify}; /// Extrinsic for test-runtime. #[derive(Clone, PartialEq, Eq, Encode, Decode, RuntimeDebug)] @@ -37,7 +37,10 @@ pub enum Extrinsic { #[cfg(feature = "std")] impl serde::Serialize for Extrinsic { - fn serialize(&self, seq: S) -> Result where S: ::serde::Serializer { + fn serialize(&self, seq: S) -> Result + where + S: ::serde::Serializer, + { self.using_encoded(|bytes| seq.serialize_bytes(bytes)) } } @@ -80,8 +83,5 @@ pub type Header = sp_runtime::generic::Header; /// Changes trie configuration (optionally) used in tests. pub fn changes_trie_config() -> sp_core::ChangesTrieConfiguration { - sp_core::ChangesTrieConfiguration { - digest_interval: 4, - digest_levels: 2, - } + sp_core::ChangesTrieConfiguration { digest_interval: 4, digest_levels: 2 } } diff --git a/primitives/timestamp/src/lib.rs b/primitives/timestamp/src/lib.rs index 542522c9b8500..892d359d8e889 100644 --- a/primitives/timestamp/src/lib.rs +++ b/primitives/timestamp/src/lib.rs @@ -19,8 +19,8 @@ #![cfg_attr(not(feature = "std"), no_std)] -use codec::{Encode, Decode}; -use sp_inherents::{InherentIdentifier, IsFatalError, InherentData}; +use codec::{Decode, Encode}; +use sp_inherents::{InherentData, InherentIdentifier, IsFatalError}; use sp_std::time::Duration; /// The identifier for the `timestamp` inherent. @@ -190,10 +190,7 @@ impl InherentDataProvider { /// Create `Self` using the given `timestamp`. pub fn new(timestamp: InherentType) -> Self { - Self { - max_drift: std::time::Duration::from_secs(60).into(), - timestamp, - } + Self { max_drift: std::time::Duration::from_secs(60).into(), timestamp } } /// With the given maximum drift. @@ -249,9 +246,9 @@ impl sp_inherents::InherentDataProvider for InherentDataProvider { // halt import until timestamp is valid. // reject when too far ahead. if valid > timestamp + max_drift { - return Some(Err( - sp_inherents::Error::Application(Box::from(InherentError::TooFarInFuture)) - )) + return Some(Err(sp_inherents::Error::Application(Box::from( + InherentError::TooFarInFuture, + )))) } let diff = valid.checked_sub(timestamp).unwrap_or_default(); @@ -269,4 +266,3 @@ impl sp_inherents::InherentDataProvider for InherentDataProvider { } } } - diff --git a/primitives/tracing/src/lib.rs b/primitives/tracing/src/lib.rs index 95eb4d0566709..458297123be19 100644 --- a/primitives/tracing/src/lib.rs +++ b/primitives/tracing/src/lib.rs @@ -40,18 +40,16 @@ #[cfg(feature = "std")] use tracing; pub use tracing::{ - debug, debug_span, error, error_span, event, info, info_span, Level, span, Span, - trace, trace_span, warn, warn_span, + debug, debug_span, error, error_span, event, info, info_span, span, trace, trace_span, warn, + warn_span, Level, Span, }; pub use crate::types::{ - WasmEntryAttributes, WasmFieldName, WasmFields, WasmLevel, WasmMetadata, WasmValue, - WasmValuesSet + WasmEntryAttributes, WasmFieldName, WasmFields, WasmLevel, WasmMetadata, WasmValue, + WasmValuesSet, }; #[cfg(feature = "std")] -pub use crate::types::{ - WASM_NAME_KEY, WASM_TARGET_KEY, WASM_TRACE_IDENTIFIER -}; +pub use crate::types::{WASM_NAME_KEY, WASM_TARGET_KEY, WASM_TRACE_IDENTIFIER}; /// Tracing facilities and helpers. /// @@ -108,7 +106,6 @@ pub use crate::types::{ /// and call `set_tracing_subscriber` at the very beginning of your execution – /// the default subscriber is doing nothing, so any spans or events happening before /// will not be recorded! - mod types; /// Try to init a simple tracing subscriber with log compatibility layer. @@ -117,7 +114,8 @@ mod types; pub fn try_init_simple() { let _ = tracing_subscriber::fmt() .with_env_filter(tracing_subscriber::EnvFilter::from_default_env()) - .with_writer(std::io::stderr).try_init(); + .with_writer(std::io::stderr) + .try_init(); } /// Runs given code within a tracing span, measuring it's execution time. @@ -189,13 +187,12 @@ macro_rules! within_span { }; } - /// Enter a span - noop for `no_std` without `with-tracing` #[cfg(all(not(feature = "std"), not(feature = "with-tracing")))] #[macro_export] macro_rules! enter_span { - ( $lvl:expr, $name:expr ) => ( ); - ( $name:expr ) => ( ) // no-op + ( $lvl:expr, $name:expr ) => {}; + ( $name:expr ) => {}; // no-op } /// Enter a span. diff --git a/primitives/tracing/src/types.rs b/primitives/tracing/src/types.rs index 9fdcdfb526399..b927cf262ecf1 100644 --- a/primitives/tracing/src/types.rs +++ b/primitives/tracing/src/types.rs @@ -15,15 +15,11 @@ // See the License for the specific language governing permissions and // limitations under the License. +use codec::{Decode, Encode}; /// Types for wasm based tracing. Loosly inspired by `tracing-core` but /// optimised for the specific use case. - -use core::{format_args, fmt::Debug}; -use sp_std::{ - vec, vec::Vec, -}; -use sp_std::Writer; -use codec::{Encode, Decode}; +use core::{fmt::Debug, format_args}; +use sp_std::{vec, vec::Vec, Writer}; /// The Tracing Level – the user can filter by this #[derive(Clone, Encode, Decode, Debug)] @@ -37,10 +33,9 @@ pub enum WasmLevel { /// Further information for debugging purposes DEBUG, /// The lowest level, keeping track of minute detail - TRACE + TRACE, } - impl From<&tracing_core::Level> for WasmLevel { fn from(l: &tracing_core::Level) -> WasmLevel { match *l { @@ -80,41 +75,27 @@ pub enum WasmValue { impl core::fmt::Debug for WasmValue { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { match self { - WasmValue::U8(ref i) => { - f.write_fmt(format_args!("{}_u8", i)) - } - WasmValue::I8(ref i) => { - f.write_fmt(format_args!("{}_i8", i)) - } - WasmValue::U32(ref i) => { - f.write_fmt(format_args!("{}_u32", i)) - } - WasmValue::I32(ref i) => { - f.write_fmt(format_args!("{}_i32", i)) - } - WasmValue::I64(ref i) => { - f.write_fmt(format_args!("{}_i64", i)) - } - WasmValue::U64(ref i) => { - f.write_fmt(format_args!("{}_u64", i)) - } - WasmValue::Bool(ref i) => { - f.write_fmt(format_args!("{}_bool", i)) - } + WasmValue::U8(ref i) => f.write_fmt(format_args!("{}_u8", i)), + WasmValue::I8(ref i) => f.write_fmt(format_args!("{}_i8", i)), + WasmValue::U32(ref i) => f.write_fmt(format_args!("{}_u32", i)), + WasmValue::I32(ref i) => f.write_fmt(format_args!("{}_i32", i)), + WasmValue::I64(ref i) => f.write_fmt(format_args!("{}_i64", i)), + WasmValue::U64(ref i) => f.write_fmt(format_args!("{}_u64", i)), + WasmValue::Bool(ref i) => f.write_fmt(format_args!("{}_bool", i)), WasmValue::Formatted(ref i) | WasmValue::Str(ref i) => { if let Ok(v) = core::str::from_utf8(i) { f.write_fmt(format_args!("{}", v)) } else { f.write_fmt(format_args!("{:?}", i)) } - } + }, WasmValue::Encoded(ref v) => { f.write_str("Scale(")?; - for byte in v { - f.write_fmt(format_args!("{:02x}", byte))?; - } + for byte in v { + f.write_fmt(format_args!("{:02x}", byte))?; + } f.write_str(")") - } + }, } } } @@ -297,7 +278,6 @@ impl core::fmt::Debug for WasmValuesSet { } } - impl From)>> for WasmValuesSet { fn from(v: Vec<(WasmFieldName, Option)>) -> Self { WasmValuesSet(v) @@ -324,34 +304,20 @@ impl WasmValuesSet { impl tracing_core::field::Visit for WasmValuesSet { fn record_debug(&mut self, field: &tracing_core::field::Field, value: &dyn Debug) { - self.0.push( ( - field.name().into(), - Some(WasmValue::from(format_args!("{:?}", value))) - )) + self.0 + .push((field.name().into(), Some(WasmValue::from(format_args!("{:?}", value))))) } fn record_i64(&mut self, field: &tracing_core::field::Field, value: i64) { - self.0.push( ( - field.name().into(), - Some(WasmValue::from(value)) - )) + self.0.push((field.name().into(), Some(WasmValue::from(value)))) } fn record_u64(&mut self, field: &tracing_core::field::Field, value: u64) { - self.0.push( ( - field.name().into(), - Some(WasmValue::from(value)) - )) + self.0.push((field.name().into(), Some(WasmValue::from(value)))) } fn record_bool(&mut self, field: &tracing_core::field::Field, value: bool) { - self.0.push( ( - field.name().into(), - Some(WasmValue::from(value)) - )) + self.0.push((field.name().into(), Some(WasmValue::from(value)))) } fn record_str(&mut self, field: &tracing_core::field::Field, value: &str) { - self.0.push( ( - field.name().into(), - Some(WasmValue::from(value)) - )) + self.0.push((field.name().into(), Some(WasmValue::from(value)))) } } /// Metadata provides generic information about the specifc location of the @@ -386,7 +352,7 @@ impl From<&tracing_core::Metadata<'_>> for WasmMetadata { line: wm.line().unwrap_or_default(), module_path: wm.module_path().map(|m| m.as_bytes().to_vec()).unwrap_or_default(), is_span: wm.is_span(), - fields: wm.fields().into() + fields: wm.fields().into(), } } } @@ -417,12 +383,11 @@ impl core::default::Default for WasmMetadata { line: Default::default(), module_path: Default::default(), is_span: true, - fields: WasmFields::empty() + fields: WasmFields::empty(), } } } - fn decode_field(field: &[u8]) -> &str { core::str::from_utf8(field).unwrap_or_default() } @@ -445,7 +410,7 @@ impl From<&tracing_core::Event<'_>> for WasmEntryAttributes { WasmEntryAttributes { parent_id: evt.parent().map(|id| id.into_u64()), metadata: evt.metadata().into(), - fields + fields, } } } @@ -457,7 +422,7 @@ impl From<&tracing_core::span::Attributes<'_>> for WasmEntryAttributes { WasmEntryAttributes { parent_id: attrs.parent().map(|id| id.into_u64()), metadata: attrs.metadata().into(), - fields + fields, } } } @@ -480,10 +445,14 @@ mod std_features { /// Static entry use for wasm-originated metadata. pub struct WasmCallsite; impl callsite::Callsite for WasmCallsite { - fn set_interest(&self, _: tracing_core::Interest) { unimplemented!() } - fn metadata(&self) -> &tracing_core::Metadata { unimplemented!() } + fn set_interest(&self, _: tracing_core::Interest) { + unimplemented!() + } + fn metadata(&self) -> &tracing_core::Metadata { + unimplemented!() + } } - static CALLSITE: WasmCallsite = WasmCallsite; + static CALLSITE: WasmCallsite = WasmCallsite; /// The identifier we are using to inject the wasm events in the generic `tracing` system pub static WASM_TRACE_IDENTIFIER: &str = "wasm_tracing"; /// The fieldname for the wasm-originated name @@ -491,8 +460,8 @@ mod std_features { /// The fieldname for the wasm-originated target pub static WASM_TARGET_KEY: &str = "target"; /// The the list of all static field names we construct from the given metadata - pub static GENERIC_FIELDS: &[&str] = &[WASM_TARGET_KEY, WASM_NAME_KEY, - "file", "line", "module_path", "params"]; + pub static GENERIC_FIELDS: &[&str] = + &[WASM_TARGET_KEY, WASM_NAME_KEY, "file", "line", "module_path", "params"]; // Implementation Note: // the original `tracing` crate generates these static metadata entries at every `span!` and @@ -500,63 +469,143 @@ mod std_features { // of wasm events we need these static metadata entries to inject into that system. We then provide // generic `From`-implementations picking the right metadata to refer to. - static SPAN_ERROR_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::ERROR, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::SPAN + static SPAN_ERROR_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::ERROR, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::SPAN, ); - static SPAN_WARN_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::WARN, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::SPAN + static SPAN_WARN_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::WARN, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::SPAN, ); - static SPAN_INFO_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::INFO, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::SPAN + static SPAN_INFO_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::INFO, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::SPAN, ); - static SPAN_DEBUG_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::DEBUG, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::SPAN + static SPAN_DEBUG_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::DEBUG, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::SPAN, ); - static SPAN_TRACE_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::TRACE, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::SPAN + static SPAN_TRACE_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::TRACE, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::SPAN, ); - static EVENT_ERROR_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::ERROR, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::EVENT + static EVENT_ERROR_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::ERROR, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::EVENT, ); - static EVENT_WARN_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::WARN, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::EVENT + static EVENT_WARN_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::WARN, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::EVENT, ); - static EVENT_INFO_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::INFO, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::EVENT + static EVENT_INFO_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::INFO, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::EVENT, ); - static EVENT_DEBUG_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::DEBUG, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::EVENT + static EVENT_DEBUG_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::DEBUG, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::EVENT, ); - static EVENT_TRACE_METADATA : tracing_core::Metadata<'static> = tracing::Metadata::new( - WASM_TRACE_IDENTIFIER, WASM_TRACE_IDENTIFIER, tracing::Level::TRACE, None, None, None, - tracing_core::field::FieldSet::new(GENERIC_FIELDS, tracing_core::identify_callsite!(&CALLSITE)), - tracing_core::metadata::Kind::EVENT + static EVENT_TRACE_METADATA: tracing_core::Metadata<'static> = tracing::Metadata::new( + WASM_TRACE_IDENTIFIER, + WASM_TRACE_IDENTIFIER, + tracing::Level::TRACE, + None, + None, + None, + tracing_core::field::FieldSet::new( + GENERIC_FIELDS, + tracing_core::identify_callsite!(&CALLSITE), + ), + tracing_core::metadata::Kind::EVENT, ); // FIXME: this could be done a lot in 0.2 if they opt for using `Cow` instead @@ -586,12 +635,12 @@ mod std_features { let line = a.metadata.line; let module_path = std::str::from_utf8(&a.metadata.module_path).unwrap_or_default(); let params = a.fields; - let metadata : &tracing_core::metadata::Metadata<'static> = (&a.metadata).into(); + let metadata: &tracing_core::metadata::Metadata<'static> = (&a.metadata).into(); tracing::span::Span::child_of( a.parent_id.map(tracing_core::span::Id::from_u64), &metadata, - &tracing::valueset!{ metadata.fields(), target, name, file, line, module_path, ?params } + &tracing::valueset! { metadata.fields(), target, name, file, line, module_path, ?params }, ) } } @@ -605,12 +654,12 @@ mod std_features { let line = self.metadata.line; let module_path = std::str::from_utf8(&self.metadata.module_path).unwrap_or_default(); let params = self.fields; - let metadata : &tracing_core::metadata::Metadata<'static> = (&self.metadata).into(); + let metadata: &tracing_core::metadata::Metadata<'static> = (&self.metadata).into(); tracing_core::Event::child_of( self.parent_id.map(tracing_core::span::Id::from_u64), &metadata, - &tracing::valueset!{ metadata.fields(), target, name, file, line, module_path, ?params } + &tracing::valueset! { metadata.fields(), target, name, file, line, module_path, ?params }, ) } } diff --git a/primitives/transaction-pool/src/runtime_api.rs b/primitives/transaction-pool/src/runtime_api.rs index 42542d9f3c8b4..be631ee03b9d7 100644 --- a/primitives/transaction-pool/src/runtime_api.rs +++ b/primitives/transaction-pool/src/runtime_api.rs @@ -17,8 +17,10 @@ //! Tagged Transaction Queue Runtime API. -use sp_runtime::transaction_validity::{TransactionValidity, TransactionSource}; -use sp_runtime::traits::Block as BlockT; +use sp_runtime::{ + traits::Block as BlockT, + transaction_validity::{TransactionSource, TransactionValidity}, +}; sp_api::decl_runtime_apis! { /// The `TaggedTransactionQueue` api trait for interfering with the transaction queue. diff --git a/primitives/transaction-storage-proof/src/lib.rs b/primitives/transaction-storage-proof/src/lib.rs index 952304e64b3f3..5fd0b62934558 100644 --- a/primitives/transaction-storage-proof/src/lib.rs +++ b/primitives/transaction-storage-proof/src/lib.rs @@ -20,11 +20,11 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{result::Result, prelude::*}; +use sp_std::{prelude::*, result::Result}; -use codec::{Encode, Decode}; -use sp_inherents::{InherentIdentifier, InherentData, IsFatalError}; -use sp_runtime::{traits::{Block as BlockT, NumberFor}}; +use codec::{Decode, Encode}; +use sp_inherents::{InherentData, InherentIdentifier, IsFatalError}; +use sp_runtime::traits::{Block as BlockT, NumberFor}; pub use sp_inherents::Error; @@ -40,7 +40,7 @@ pub const CHUNK_SIZE: usize = 256; #[cfg_attr(feature = "std", derive(Decode))] pub enum InherentError { InvalidProof, - TrieError + TrieError, } impl IsFatalError for InherentError { @@ -130,26 +130,20 @@ pub trait IndexedBody { /// /// Note that this will only fetch transactions /// that are indexed by the runtime with `storage_index_transaction`. - fn block_indexed_body( - &self, - number: NumberFor, - ) -> Result>>, Error>; + fn block_indexed_body(&self, number: NumberFor) -> Result>>, Error>; /// Get block number for a block hash. - fn number( - &self, - hash: B::Hash, - ) -> Result>, Error>; + fn number(&self, hash: B::Hash) -> Result>, Error>; } #[cfg(feature = "std")] pub mod registration { - use sp_runtime::{traits::{Block as BlockT, Saturating, Zero, One}}; - use sp_trie::TrieMut; use super::*; + use sp_runtime::traits::{Block as BlockT, One, Saturating, Zero}; + use sp_trie::TrieMut; type Hasher = sp_core::Blake2Hasher; - type TrieLayout = sp_trie::Layout::; + type TrieLayout = sp_trie::Layout; /// Create a new inherent data provider instance for a given parent block hash. pub fn new_data_provider( @@ -166,25 +160,24 @@ pub mod registration { .saturating_sub(DEFAULT_STORAGE_PERIOD.into()); if number.is_zero() { // Too early to collect proofs. - return Ok(InherentDataProvider::new(None)); + return Ok(InherentDataProvider::new(None)) } let proof = match client.block_indexed_body(number)? { - Some(transactions) => { - Some(build_proof(parent.as_ref(), transactions)?) - }, + Some(transactions) => Some(build_proof(parent.as_ref(), transactions)?), None => { // Nothing was indexed in that block. None - } + }, }; Ok(InherentDataProvider::new(proof)) } /// Build a proof for a given source of randomness and indexed transactions. - pub fn build_proof(random_hash: &[u8], transactions: Vec>) - -> Result - { + pub fn build_proof( + random_hash: &[u8], + transactions: Vec>, + ) -> Result { let mut db = sp_trie::MemoryDB::::default(); let mut target_chunk = None; @@ -192,7 +185,10 @@ pub mod registration { let mut target_chunk_key = Default::default(); let mut chunk_proof = Default::default(); - let total_chunks: u64 = transactions.iter().map(|t| ((t.len() + CHUNK_SIZE - 1) / CHUNK_SIZE) as u64).sum(); + let total_chunks: u64 = transactions + .iter() + .map(|t| ((t.len() + CHUNK_SIZE - 1) / CHUNK_SIZE) as u64) + .sum(); let mut buf = [0u8; 8]; buf.copy_from_slice(&random_hash[0..8]); let random_u64 = u64::from_be_bytes(buf); @@ -202,12 +198,12 @@ pub mod registration { for transaction in transactions { let mut transaction_root = sp_trie::empty_trie_root::(); { - let mut trie = sp_trie::TrieDBMut::::new(&mut db, &mut transaction_root); + let mut trie = + sp_trie::TrieDBMut::::new(&mut db, &mut transaction_root); let chunks = transaction.chunks(CHUNK_SIZE).map(|c| c.to_vec()); for (index, chunk) in chunks.enumerate() { let index = encode_index(index as u32); - trie.insert(&index, &chunk) - .map_err(|e| Error::Application(Box::new(e)))?; + trie.insert(&index, &chunk).map_err(|e| Error::Application(Box::new(e)))?; if chunk_index == target_chunk_index { target_chunk = Some(chunk); target_chunk_key = index; @@ -221,15 +217,13 @@ pub mod registration { chunk_proof = sp_trie::generate_trie_proof::( &db, transaction_root.clone(), - &[target_chunk_key.clone()] - ).map_err(|e| Error::Application(Box::new(e)))?; + &[target_chunk_key.clone()], + ) + .map_err(|e| Error::Application(Box::new(e)))?; } - }; + } - Ok(TransactionStorageProof { - proof: chunk_proof, - chunk: target_chunk.unwrap(), - }) + Ok(TransactionStorageProof { proof: chunk_proof, chunk: target_chunk.unwrap() }) } #[test] @@ -237,11 +231,15 @@ pub mod registration { use std::str::FromStr; let random = [0u8; 32]; let proof = build_proof(&random, vec![vec![42]]).unwrap(); - let root = sp_core::H256::from_str("0xff8611a4d212fc161dae19dd57f0f1ba9309f45d6207da13f2d3eab4c6839e91").unwrap(); + let root = sp_core::H256::from_str( + "0xff8611a4d212fc161dae19dd57f0f1ba9309f45d6207da13f2d3eab4c6839e91", + ) + .unwrap(); sp_trie::verify_trie_proof::( &root, &proof.proof, &[(encode_index(0), Some(proof.chunk))], - ).unwrap(); + ) + .unwrap(); } } diff --git a/primitives/trie/benches/bench.rs b/primitives/trie/benches/bench.rs index c2ccb31328aae..8c84c6354f2c3 100644 --- a/primitives/trie/benches/bench.rs +++ b/primitives/trie/benches/bench.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use criterion::{Criterion, criterion_group, criterion_main}; +use criterion::{criterion_group, criterion_main, Criterion}; criterion_group!(benches, benchmark); criterion_main!(benches); diff --git a/primitives/trie/src/error.rs b/primitives/trie/src/error.rs index bdaa49b1156f7..30a164c614755 100644 --- a/primitives/trie/src/error.rs +++ b/primitives/trie/src/error.rs @@ -15,10 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -#[cfg(feature="std")] -use std::fmt; -#[cfg(feature="std")] +#[cfg(feature = "std")] use std::error::Error as StdError; +#[cfg(feature = "std")] +use std::fmt; #[derive(Debug, PartialEq, Eq, Clone)] /// Error for trie node decoding. @@ -35,7 +35,7 @@ impl From for Error { } } -#[cfg(feature="std")] +#[cfg(feature = "std")] impl StdError for Error { fn description(&self) -> &str { match self { @@ -45,7 +45,7 @@ impl StdError for Error { } } -#[cfg(feature="std")] +#[cfg(feature = "std")] impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/primitives/trie/src/lib.rs b/primitives/trie/src/lib.rs index a496245637a52..8ba13284d379f 100644 --- a/primitives/trie/src/lib.rs +++ b/primitives/trie/src/lib.rs @@ -20,35 +20,36 @@ #![cfg_attr(not(feature = "std"), no_std)] mod error; -mod node_header; mod node_codec; +mod node_header; mod storage_proof; mod trie_codec; mod trie_stream; -use sp_std::{boxed::Box, marker::PhantomData, vec::Vec, borrow::Borrow}; -use hash_db::{Hasher, Prefix}; -use trie_db::proof::{generate_proof, verify_proof}; -pub use trie_db::proof::VerifyError; /// Our `NodeCodec`-specific error. pub use error::Error; -/// The Substrate format implementation of `TrieStream`. -pub use trie_stream::TrieStream; -/// The Substrate format implementation of `NodeCodec`. -pub use node_codec::NodeCodec; -pub use storage_proof::{StorageProof, CompactProof}; -/// Various re-exports from the `trie-db` crate. -pub use trie_db::{ - Trie, TrieMut, DBValue, Recorder, CError, Query, TrieLayout, TrieConfiguration, nibble_ops, TrieDBIterator, -}; -/// Various re-exports from the `memory-db` crate. -pub use memory_db::KeyFunction; -pub use memory_db::prefixed_key; /// Various re-exports from the `hash-db` crate. pub use hash_db::{HashDB as HashDBT, EMPTY_PREFIX}; +use hash_db::{Hasher, Prefix}; +pub use memory_db::prefixed_key; +/// Various re-exports from the `memory-db` crate. +pub use memory_db::KeyFunction; +/// The Substrate format implementation of `NodeCodec`. +pub use node_codec::NodeCodec; +use sp_std::{borrow::Borrow, boxed::Box, marker::PhantomData, vec::Vec}; +pub use storage_proof::{CompactProof, StorageProof}; /// Trie codec reexport, mainly child trie support /// for trie compact proof. pub use trie_codec::{decode_compact, encode_compact, Error as CompactProofError}; +pub use trie_db::proof::VerifyError; +use trie_db::proof::{generate_proof, verify_proof}; +/// Various re-exports from the `trie-db` crate. +pub use trie_db::{ + nibble_ops, CError, DBValue, Query, Recorder, Trie, TrieConfiguration, TrieDBIterator, + TrieLayout, TrieMut, +}; +/// The Substrate format implementation of `TrieStream`. +pub use trie_stream::TrieStream; #[derive(Default)] /// substrate trie layout @@ -62,7 +63,8 @@ impl TrieLayout for Layout { } impl TrieConfiguration for Layout { - fn trie_root(input: I) -> ::Out where + fn trie_root(input: I) -> ::Out + where I: IntoIterator, A: AsRef<[u8]> + Ord, B: AsRef<[u8]>, @@ -70,7 +72,8 @@ impl TrieConfiguration for Layout { trie_root::trie_root_no_extension::(input) } - fn trie_root_unhashed(input: I) -> Vec where + fn trie_root_unhashed(input: I) -> Vec + where I: IntoIterator, A: AsRef<[u8]> + Ord, B: AsRef<[u8]>, @@ -98,19 +101,14 @@ pub type HashDB<'a, H> = dyn hash_db::HashDB + 'a; /// Reexport from `hash_db`, with genericity set for `Hasher` trait. /// This uses a `KeyFunction` for prefixing keys internally (avoiding /// key conflict for non random keys). -pub type PrefixedMemoryDB = memory_db::MemoryDB< - H, memory_db::PrefixedKey, trie_db::DBValue, MemTracker ->; +pub type PrefixedMemoryDB = + memory_db::MemoryDB, trie_db::DBValue, MemTracker>; /// Reexport from `hash_db`, with genericity set for `Hasher` trait. /// This uses a noops `KeyFunction` (key addressing must be hashed or using /// an encoding scheme that avoid key conflict). -pub type MemoryDB = memory_db::MemoryDB< - H, memory_db::HashKey, trie_db::DBValue, MemTracker, ->; +pub type MemoryDB = memory_db::MemoryDB, trie_db::DBValue, MemTracker>; /// Reexport from `hash_db`, with genericity set for `Hasher` trait. -pub type GenericMemoryDB = memory_db::MemoryDB< - H, KF, trie_db::DBValue, MemTracker ->; +pub type GenericMemoryDB = memory_db::MemoryDB; /// Persistent trie database read-access interface for the a given hasher. pub type TrieDB<'a, L> = trie_db::TrieDB<'a, L>; @@ -147,8 +145,9 @@ pub fn generate_trie_proof<'a, L: TrieConfiguration, I, K, DB>( db: &DB, root: TrieHash, keys: I, -) -> Result>, Box>> where - I: IntoIterator, +) -> Result>, Box>> +where + I: IntoIterator, K: 'a + AsRef<[u8]>, DB: hash_db::HashDBRef, { @@ -168,8 +167,9 @@ pub fn verify_trie_proof<'a, L: TrieConfiguration, I, K, V>( root: &TrieHash, proof: &[Vec], items: I, -) -> Result<(), VerifyError, error::Error>> where - I: IntoIterator)>, +) -> Result<(), VerifyError, error::Error>> +where + I: IntoIterator)>, K: 'a + AsRef<[u8]>, V: 'a + AsRef<[u8]>, { @@ -180,8 +180,9 @@ pub fn verify_trie_proof<'a, L: TrieConfiguration, I, K, V>( pub fn delta_trie_root( db: &mut DB, mut root: TrieHash, - delta: I -) -> Result, Box>> where + delta: I, +) -> Result, Box>> +where I: IntoIterator, A: Borrow<[u8]>, B: Borrow>, @@ -209,7 +210,7 @@ pub fn delta_trie_root( pub fn read_trie_value>( db: &DB, root: &TrieHash, - key: &[u8] + key: &[u8], ) -> Result>, Box>> { TrieDB::::new(&*db, root)?.get(key).map(|x| x.map(|val| val.to_vec())) } @@ -217,15 +218,17 @@ pub fn read_trie_value, - DB: hash_db::HashDBRef + Q: Query, + DB: hash_db::HashDBRef, >( db: &DB, root: &TrieHash, key: &[u8], - query: Q + query: Q, ) -> Result>, Box>> { - TrieDB::::new(&*db, root)?.get_with(key, query).map(|x| x.map(|val| val.to_vec())) + TrieDB::::new(&*db, root)? + .get_with(key, query) + .map(|x| x.map(|val| val.to_vec())) } /// Determine the empty trie root. @@ -240,13 +243,11 @@ pub fn empty_child_trie_root() -> ::Out /// Determine a child trie root given its ordered contents, closed form. H is the default hasher, /// but a generic implementation may ignore this type parameter and use other hashers. -pub fn child_trie_root( - input: I, -) -> ::Out - where - I: IntoIterator, - A: AsRef<[u8]> + Ord, - B: AsRef<[u8]>, +pub fn child_trie_root(input: I) -> ::Out +where + I: IntoIterator, + A: AsRef<[u8]> + Ord, + B: AsRef<[u8]>, { L::trie_root(input) } @@ -259,33 +260,30 @@ pub fn child_delta_trie_root( root_data: RD, delta: I, ) -> Result<::Out, Box>> - where - I: IntoIterator, - A: Borrow<[u8]>, - B: Borrow>, - V: Borrow<[u8]>, - RD: AsRef<[u8]>, - DB: hash_db::HashDB +where + I: IntoIterator, + A: Borrow<[u8]>, + B: Borrow>, + V: Borrow<[u8]>, + RD: AsRef<[u8]>, + DB: hash_db::HashDB, { let mut root = TrieHash::::default(); // root is fetched from DB, not writable by runtime, so it's always valid. root.as_mut().copy_from_slice(root_data.as_ref()); let mut db = KeySpacedDBMut::new(&mut *db, keyspace); - delta_trie_root::( - &mut db, - root, - delta, - ) + delta_trie_root::(&mut db, root, delta) } /// Record all keys for a given root. pub fn record_all_keys( db: &DB, root: &TrieHash, - recorder: &mut Recorder> -) -> Result<(), Box>> where - DB: hash_db::HashDBRef + recorder: &mut Recorder>, +) -> Result<(), Box>> +where + DB: hash_db::HashDBRef, { let trie = TrieDB::::new(&*db, root)?; let iter = trie.iter()?; @@ -307,10 +305,10 @@ pub fn read_child_trie_value( keyspace: &[u8], db: &DB, root_slice: &[u8], - key: &[u8] + key: &[u8], ) -> Result>, Box>> - where - DB: hash_db::HashDBRef +where + DB: hash_db::HashDBRef, { let mut root = TrieHash::::default(); // root is fetched from DB, not writable by runtime, so it's always valid. @@ -321,22 +319,24 @@ pub fn read_child_trie_value( } /// Read a value from the child trie with given query. -pub fn read_child_trie_value_with, DB>( +pub fn read_child_trie_value_with, DB>( keyspace: &[u8], db: &DB, root_slice: &[u8], key: &[u8], - query: Q + query: Q, ) -> Result>, Box>> - where - DB: hash_db::HashDBRef +where + DB: hash_db::HashDBRef, { let mut root = TrieHash::::default(); // root is fetched from DB, not writable by runtime, so it's always valid. root.as_mut().copy_from_slice(root_slice); let db = KeySpacedDB::new(&*db, keyspace); - TrieDB::::new(&db, &root)?.get_with(key, query).map(|x| x.map(|val| val.to_vec())) + TrieDB::::new(&db, &root)? + .get_with(key, query) + .map(|x| x.map(|val| val.to_vec())) } /// `HashDB` implementation that append a encoded prefix (unique id bytes) in addition to the @@ -358,7 +358,8 @@ fn keyspace_as_prefix_alloc(ks: &[u8], prefix: Prefix) -> (Vec, Option) (result, prefix.1) } -impl<'a, DB, H> KeySpacedDB<'a, DB, H> where +impl<'a, DB, H> KeySpacedDB<'a, DB, H> +where H: Hasher, { /// instantiate new keyspaced db @@ -367,7 +368,8 @@ impl<'a, DB, H> KeySpacedDB<'a, DB, H> where } } -impl<'a, DB, H> KeySpacedDBMut<'a, DB, H> where +impl<'a, DB, H> KeySpacedDBMut<'a, DB, H> +where H: Hasher, { /// instantiate new keyspaced db @@ -376,7 +378,8 @@ impl<'a, DB, H> KeySpacedDBMut<'a, DB, H> where } } -impl<'a, DB, H, T> hash_db::HashDBRef for KeySpacedDB<'a, DB, H> where +impl<'a, DB, H, T> hash_db::HashDBRef for KeySpacedDB<'a, DB, H> +where DB: hash_db::HashDBRef, H: Hasher, T: From<&'static [u8]>, @@ -392,7 +395,8 @@ impl<'a, DB, H, T> hash_db::HashDBRef for KeySpacedDB<'a, DB, H> where } } -impl<'a, DB, H, T> hash_db::HashDB for KeySpacedDBMut<'a, DB, H> where +impl<'a, DB, H, T> hash_db::HashDB for KeySpacedDBMut<'a, DB, H> +where DB: hash_db::HashDB, H: Hasher, T: Default + PartialEq + for<'b> From<&'b [u8]> + Clone + Send + Sync, @@ -423,12 +427,15 @@ impl<'a, DB, H, T> hash_db::HashDB for KeySpacedDBMut<'a, DB, H> where } } -impl<'a, DB, H, T> hash_db::AsHashDB for KeySpacedDBMut<'a, DB, H> where +impl<'a, DB, H, T> hash_db::AsHashDB for KeySpacedDBMut<'a, DB, H> +where DB: hash_db::HashDB, H: Hasher, T: Default + PartialEq + for<'b> From<&'b [u8]> + Clone + Send + Sync, { - fn as_hash_db(&self) -> &dyn hash_db::HashDB { &*self } + fn as_hash_db(&self) -> &dyn hash_db::HashDB { + &*self + } fn as_hash_db_mut<'b>(&'b mut self) -> &'b mut (dyn hash_db::HashDB + 'b) { &mut *self @@ -447,12 +454,12 @@ mod trie_constants { #[cfg(test)] mod tests { use super::*; - use codec::{Encode, Decode, Compact}; - use sp_core::Blake2Hasher; + use codec::{Compact, Decode, Encode}; use hash_db::{HashDB, Hasher}; - use trie_db::{DBValue, TrieMut, Trie, NodeCodec as NodeCodecT}; - use trie_standardmap::{Alphabet, ValueMode, StandardMap}; use hex_literal::hex; + use sp_core::Blake2Hasher; + use trie_db::{DBValue, NodeCodec as NodeCodecT, Trie, TrieMut}; + use trie_standardmap::{Alphabet, StandardMap, ValueMode}; type Layout = super::Layout; @@ -491,7 +498,8 @@ mod tests { let t = TrieDB::::new(&mut memdb, &root).unwrap(); assert_eq!( input.iter().map(|(i, j)| (i.to_vec(), j.to_vec())).collect::>(), - t.iter().unwrap() + t.iter() + .unwrap() .map(|x| x.map(|y| (y.0, y.1.to_vec())).unwrap()) .collect::>() ); @@ -505,9 +513,11 @@ mod tests { let mut empty = TrieDBMut::::new(&mut db, &mut root); empty.commit(); let root1 = empty.root().as_ref().to_vec(); - let root2: Vec = Layout::trie_root::<_, Vec, Vec>( - std::iter::empty(), - ).as_ref().iter().cloned().collect(); + let root2: Vec = Layout::trie_root::<_, Vec, Vec>(std::iter::empty()) + .as_ref() + .iter() + .cloned() + .collect(); assert_eq!(root1, root2); } @@ -528,20 +538,16 @@ mod tests { #[test] fn branch_is_equivalent() { - let input: Vec<(&[u8], &[u8])> = vec![ - (&[0xaa][..], &[0x10][..]), - (&[0xba][..], &[0x11][..]), - ]; + let input: Vec<(&[u8], &[u8])> = + vec![(&[0xaa][..], &[0x10][..]), (&[0xba][..], &[0x11][..])]; check_equivalent::(&input); check_iteration::(&input); } #[test] fn extension_and_branch_is_equivalent() { - let input: Vec<(&[u8], &[u8])> = vec![ - (&[0xaa][..], &[0x10][..]), - (&[0xab][..], &[0x11][..]), - ]; + let input: Vec<(&[u8], &[u8])> = + vec![(&[0xaa][..], &[0x10][..]), (&[0xab][..], &[0x11][..])]; check_equivalent::(&input); check_iteration::(&input); } @@ -567,7 +573,7 @@ mod tests { let input: Vec<(&[u8], &[u8])> = vec![ (&[0xaa][..], &[0xa0][..]), (&[0xaa, 0xaa][..], &[0xaa][..]), - (&[0xaa, 0xbb][..], &[0xab][..]) + (&[0xaa, 0xbb][..], &[0xab][..]), ]; check_equivalent::(&input); check_iteration::(&input); @@ -590,7 +596,10 @@ mod tests { #[test] fn single_long_leaf_is_equivalent() { let input: Vec<(&[u8], &[u8])> = vec![ - (&[0xaa][..], &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..]), + ( + &[0xaa][..], + &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..], + ), (&[0xba][..], &[0x11][..]), ]; check_equivalent::(&input); @@ -600,8 +609,14 @@ mod tests { #[test] fn two_long_leaves_is_equivalent() { let input: Vec<(&[u8], &[u8])> = vec![ - (&[0xaa][..], &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..]), - (&[0xba][..], &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..]) + ( + &[0xaa][..], + &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..], + ), + ( + &[0xba][..], + &b"ABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABCABC"[..], + ), ]; check_equivalent::(&input); check_iteration::(&input); @@ -610,11 +625,11 @@ mod tests { fn populate_trie<'db, T: TrieConfiguration>( db: &'db mut dyn HashDB, root: &'db mut TrieHash, - v: &[(Vec, Vec)] + v: &[(Vec, Vec)], ) -> TrieDBMut<'db, T> { let mut t = TrieDBMut::::new(db, root); for i in 0..v.len() { - let key: &[u8]= &v[i].0; + let key: &[u8] = &v[i].0; let val: &[u8] = &v[i].1; t.insert(key, val).unwrap(); } @@ -626,7 +641,7 @@ mod tests { v: &[(Vec, Vec)], ) { for i in v { - let key: &[u8]= &i.0; + let key: &[u8] = &i.0; t.remove(key).unwrap(); } } @@ -644,7 +659,8 @@ mod tests { journal_key: 0, value_mode: ValueMode::Index, count: 100, - }.make_with(seed.as_fixed_bytes_mut()); + } + .make_with(seed.as_fixed_bytes_mut()); let real = Layout::trie_root(x.clone()); let mut memdb = MemoryDB::default(); @@ -690,17 +706,18 @@ mod tests { #[test] fn codec_trie_single_tuple() { - let input = vec![ - (vec![0xaa], vec![0xbb]) - ]; + let input = vec![(vec![0xaa], vec![0xbb])]; let trie = Layout::trie_root_unhashed::<_, _, _>(input); println!("trie: {:#x?}", trie); - assert_eq!(trie, vec![ - 0x42, // leaf 0x40 (2^6) with (+) key of 2 nibbles (0x02) - 0xaa, // key data - to_compact(1), // length of value in bytes as Compact - 0xbb // value data - ]); + assert_eq!( + trie, + vec![ + 0x42, // leaf 0x40 (2^6) with (+) key of 2 nibbles (0x02) + 0xaa, // key data + to_compact(1), // length of value in bytes as Compact + 0xbb // value data + ] + ); } #[test] @@ -709,21 +726,21 @@ mod tests { let trie = Layout::trie_root_unhashed::<_, _, _>(input); println!("trie: {:#x?}", trie); let mut ex = Vec::::new(); - ex.push(0x80); // branch, no value (0b_10..) no nibble - ex.push(0x12); // slots 1 & 4 are taken from 0-7 - ex.push(0x00); // no slots from 8-15 - ex.push(to_compact(0x05)); // first slot: LEAF, 5 bytes long. - ex.push(0x43); // leaf 0x40 with 3 nibbles - ex.push(0x03); // first nibble - ex.push(0x14); // second & third nibble - ex.push(to_compact(0x01)); // 1 byte data - ex.push(0xff); // value data - ex.push(to_compact(0x05)); // second slot: LEAF, 5 bytes long. - ex.push(0x43); // leaf with 3 nibbles - ex.push(0x08); // first nibble - ex.push(0x19); // second & third nibble - ex.push(to_compact(0x01)); // 1 byte data - ex.push(0xfe); // value data + ex.push(0x80); // branch, no value (0b_10..) no nibble + ex.push(0x12); // slots 1 & 4 are taken from 0-7 + ex.push(0x00); // no slots from 8-15 + ex.push(to_compact(0x05)); // first slot: LEAF, 5 bytes long. + ex.push(0x43); // leaf 0x40 with 3 nibbles + ex.push(0x03); // first nibble + ex.push(0x14); // second & third nibble + ex.push(to_compact(0x01)); // 1 byte data + ex.push(0xff); // value data + ex.push(to_compact(0x05)); // second slot: LEAF, 5 bytes long. + ex.push(0x43); // leaf with 3 nibbles + ex.push(0x08); // first nibble + ex.push(0x19); // second & third nibble + ex.push(to_compact(0x01)); // 1 byte data + ex.push(0xfe); // value data assert_eq!(trie, ex); } @@ -763,27 +780,25 @@ mod tests { populate_trie::(&mut memdb, &mut root, &pairs); let non_included_key: Vec = hex!("0909").to_vec(); - let proof = generate_trie_proof::( - &memdb, - root, - &[non_included_key.clone()] - ).unwrap(); + let proof = + generate_trie_proof::(&memdb, root, &[non_included_key.clone()]) + .unwrap(); // Verifying that the K was not included into the trie should work. assert!(verify_trie_proof::>( - &root, - &proof, - &[(non_included_key.clone(), None)], - ).is_ok() - ); + &root, + &proof, + &[(non_included_key.clone(), None)], + ) + .is_ok()); // Verifying that the K was included into the trie should fail. assert!(verify_trie_proof::>( - &root, - &proof, - &[(non_included_key, Some(hex!("1010").to_vec()))], - ).is_err() - ); + &root, + &proof, + &[(non_included_key, Some(hex!("1010").to_vec()))], + ) + .is_err()); } #[test] @@ -797,71 +812,71 @@ mod tests { let mut root = Default::default(); populate_trie::(&mut memdb, &mut root, &pairs); - let proof = generate_trie_proof::( - &memdb, - root, - &[pairs[0].0.clone()] - ).unwrap(); + let proof = + generate_trie_proof::(&memdb, root, &[pairs[0].0.clone()]).unwrap(); // Check that a K, V included into the proof are verified. assert!(verify_trie_proof::( - &root, - &proof, - &[(pairs[0].0.clone(), Some(pairs[0].1.clone()))] - ).is_ok() - ); + &root, + &proof, + &[(pairs[0].0.clone(), Some(pairs[0].1.clone()))] + ) + .is_ok()); // Absence of the V is not verified with the proof that has K, V included. assert!(verify_trie_proof::>( - &root, - &proof, - &[(pairs[0].0.clone(), None)] - ).is_err() - ); + &root, + &proof, + &[(pairs[0].0.clone(), None)] + ) + .is_err()); // K not included into the trie is not verified. assert!(verify_trie_proof::( - &root, - &proof, - &[(hex!("4242").to_vec(), Some(pairs[0].1.clone()))] - ).is_err() - ); + &root, + &proof, + &[(hex!("4242").to_vec(), Some(pairs[0].1.clone()))] + ) + .is_err()); // K included into the trie but not included into the proof is not verified. assert!(verify_trie_proof::( - &root, - &proof, - &[(pairs[1].0.clone(), Some(pairs[1].1.clone()))] - ).is_err() - ); + &root, + &proof, + &[(pairs[1].0.clone(), Some(pairs[1].1.clone()))] + ) + .is_err()); } #[test] fn generate_storage_root_with_proof_works_independently_from_the_delta_order() { let proof = StorageProof::decode(&mut &include_bytes!("../test-res/proof")[..]).unwrap(); - let storage_root = sp_core::H256::decode( - &mut &include_bytes!("../test-res/storage_root")[..], - ).unwrap(); + let storage_root = + sp_core::H256::decode(&mut &include_bytes!("../test-res/storage_root")[..]).unwrap(); // Delta order that is "invalid" so that it would require a different proof. let invalid_delta = Vec::<(Vec, Option>)>::decode( &mut &include_bytes!("../test-res/invalid-delta-order")[..], - ).unwrap(); + ) + .unwrap(); // Delta order that is "valid" let valid_delta = Vec::<(Vec, Option>)>::decode( &mut &include_bytes!("../test-res/valid-delta-order")[..], - ).unwrap(); + ) + .unwrap(); let proof_db = proof.into_memory_db::(); let first_storage_root = delta_trie_root::( &mut proof_db.clone(), storage_root, valid_delta, - ).unwrap(); + ) + .unwrap(); let second_storage_root = delta_trie_root::( &mut proof_db.clone(), storage_root, invalid_delta, - ).unwrap(); + ) + .unwrap(); assert_eq!(first_storage_root, second_storage_root); } diff --git a/primitives/trie/src/node_codec.rs b/primitives/trie/src/node_codec.rs index 296f03972c795..d5ffb3219cf68 100644 --- a/primitives/trie/src/node_codec.rs +++ b/primitives/trie/src/node_codec.rs @@ -17,17 +17,16 @@ //! `NodeCodec` implementation for Substrate's trie format. -use sp_std::marker::PhantomData; -use sp_std::ops::Range; -use sp_std::vec::Vec; -use sp_std::borrow::Borrow; -use codec::{Encode, Decode, Input, Compact}; +use super::node_header::{NodeHeader, NodeKind}; +use crate::{error::Error, trie_constants}; +use codec::{Compact, Decode, Encode, Input}; use hash_db::Hasher; -use trie_db::{self, node::{NibbleSlicePlan, NodePlan, NodeHandlePlan}, ChildReference, - nibble_ops, Partial, NodeCodec as NodeCodecT}; -use crate::error::Error; -use crate::trie_constants; -use super::{node_header::{NodeHeader, NodeKind}}; +use sp_std::{borrow::Borrow, marker::PhantomData, ops::Range, vec::Vec}; +use trie_db::{ + self, nibble_ops, + node::{NibbleSlicePlan, NodeHandlePlan, NodePlan}, + ChildReference, NodeCodec as NodeCodecT, Partial, +}; /// Helper struct for trie node decoder. This implements `codec::Input` on a byte slice, while /// tracking the absolute position. This is similar to `std::io::Cursor` but does not implement @@ -39,15 +38,12 @@ struct ByteSliceInput<'a> { impl<'a> ByteSliceInput<'a> { fn new(data: &'a [u8]) -> Self { - ByteSliceInput { - data, - offset: 0, - } + ByteSliceInput { data, offset: 0 } } fn take(&mut self, count: usize) -> Result, codec::Error> { if self.offset + count > self.data.len() { - return Err("out of data".into()); + return Err("out of data".into()) } let range = self.offset..(self.offset + count); @@ -58,11 +54,8 @@ impl<'a> ByteSliceInput<'a> { impl<'a> Input for ByteSliceInput<'a> { fn remaining_len(&mut self) -> Result, codec::Error> { - let remaining = if self.offset <= self.data.len() { - Some(self.data.len() - self.offset) - } else { - None - }; + let remaining = + if self.offset <= self.data.len() { Some(self.data.len() - self.offset) } else { None }; Ok(remaining) } @@ -74,7 +67,7 @@ impl<'a> Input for ByteSliceInput<'a> { fn read_byte(&mut self) -> Result { if self.offset + 1 > self.data.len() { - return Err("out of data".into()); + return Err("out of data".into()) } let byte = self.data[self.offset]; @@ -103,10 +96,11 @@ impl NodeCodecT for NodeCodec { let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0; // check that the padding is valid (if any) if padding && nibble_ops::pad_left(data[input.offset]) != 0 { - return Err(Error::BadFormat); + return Err(Error::BadFormat) } let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE, + (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / + nibble_ops::NIBBLE_PER_BYTE, )?; let partial_padding = nibble_ops::number_padding(nibble_count); let bitmap_range = input.take(BITMAP_LENGTH)?; @@ -118,8 +112,8 @@ impl NodeCodecT for NodeCodec { None }; let mut children = [ - None, None, None, None, None, None, None, None, - None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, ]; for i in 0..nibble_ops::NIBBLE_LENGTH { if bitmap.value_at(i) { @@ -137,15 +131,16 @@ impl NodeCodecT for NodeCodec { value, children, }) - } + }, NodeHeader::Leaf(nibble_count) => { let padding = nibble_count % nibble_ops::NIBBLE_PER_BYTE != 0; // check that the padding is valid (if any) if padding && nibble_ops::pad_left(data[input.offset]) != 0 { - return Err(Error::BadFormat); + return Err(Error::BadFormat) } let partial = input.take( - (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / nibble_ops::NIBBLE_PER_BYTE, + (nibble_count + (nibble_ops::NIBBLE_PER_BYTE - 1)) / + nibble_ops::NIBBLE_PER_BYTE, )?; let partial_padding = nibble_ops::number_padding(nibble_count); let count = >::decode(&mut input)?.0 as usize; @@ -153,7 +148,7 @@ impl NodeCodecT for NodeCodec { partial: NibbleSlicePlan::new(partial, partial_padding), value: input.take(count)?, }) - } + }, } } @@ -199,26 +194,28 @@ impl NodeCodecT for NodeCodec { }; let bitmap_index = output.len(); let mut bitmap: [u8; BITMAP_LENGTH] = [0; BITMAP_LENGTH]; - (0..BITMAP_LENGTH).for_each(|_|output.push(0)); + (0..BITMAP_LENGTH).for_each(|_| output.push(0)); if let Some(value) = maybe_value { value.encode_to(&mut output); }; - Bitmap::encode(children.map(|maybe_child| match maybe_child.borrow() { - Some(ChildReference::Hash(h)) => { - h.as_ref().encode_to(&mut output); - true - } - &Some(ChildReference::Inline(inline_data, len)) => { - inline_data.as_ref()[..len].encode_to(&mut output); - true - } - None => false, - }), bitmap.as_mut()); + Bitmap::encode( + children.map(|maybe_child| match maybe_child.borrow() { + Some(ChildReference::Hash(h)) => { + h.as_ref().encode_to(&mut output); + true + }, + &Some(ChildReference::Inline(inline_data, len)) => { + inline_data.as_ref()[..len].encode_to(&mut output); + true + }, + None => false, + }), + bitmap.as_mut(), + ); output[bitmap_index..bitmap_index + BITMAP_LENGTH] .copy_from_slice(&bitmap[..BITMAP_LENGTH]); output } - } // utils @@ -280,11 +277,13 @@ impl Bitmap { self.0 & (1u16 << i) != 0 } - pub fn encode>(has_children: I , dest: &mut [u8]) { + pub fn encode>(has_children: I, dest: &mut [u8]) { let mut bitmap: u16 = 0; let mut cursor: u16 = 1; for v in has_children { - if v { bitmap |= cursor } + if v { + bitmap |= cursor + } cursor <<= 1; } dest[0] = (bitmap % 256) as u8; diff --git a/primitives/trie/src/node_header.rs b/primitives/trie/src/node_header.rs index 0fdf6fefbd0bc..9f05113a35935 100644 --- a/primitives/trie/src/node_header.rs +++ b/primitives/trie/src/node_header.rs @@ -18,12 +18,11 @@ //! The node header. use crate::trie_constants; -use codec::{Encode, Decode, Input, Output}; +use codec::{Decode, Encode, Input, Output}; use sp_std::iter::once; /// A node header -#[derive(Copy, Clone, PartialEq, Eq)] -#[derive(sp_core::RuntimeDebug)] +#[derive(Copy, Clone, PartialEq, Eq, sp_core::RuntimeDebug)] pub(crate) enum NodeHeader { Null, Branch(bool, usize), @@ -41,7 +40,7 @@ impl Encode for NodeHeader { fn encode_to(&self, output: &mut T) { match self { NodeHeader::Null => output.push_byte(trie_constants::EMPTY_TRIE), - NodeHeader::Branch(true, nibble_count) => + NodeHeader::Branch(true, nibble_count) => encode_size_and_prefix(*nibble_count, trie_constants::BRANCH_WITH_MASK, output), NodeHeader::Branch(false, nibble_count) => encode_size_and_prefix(*nibble_count, trie_constants::BRANCH_WITHOUT_MASK, output), @@ -57,12 +56,14 @@ impl Decode for NodeHeader { fn decode(input: &mut I) -> Result { let i = input.read_byte()?; if i == trie_constants::EMPTY_TRIE { - return Ok(NodeHeader::Null); + return Ok(NodeHeader::Null) } match i & (0b11 << 6) { trie_constants::LEAF_PREFIX_MASK => Ok(NodeHeader::Leaf(decode_size(i, input)?)), - trie_constants::BRANCH_WITHOUT_MASK => Ok(NodeHeader::Branch(false, decode_size(i, input)?)), - trie_constants::BRANCH_WITH_MASK => Ok(NodeHeader::Branch(true, decode_size(i, input)?)), + trie_constants::BRANCH_WITHOUT_MASK => + Ok(NodeHeader::Branch(false, decode_size(i, input)?)), + trie_constants::BRANCH_WITH_MASK => + Ok(NodeHeader::Branch(true, decode_size(i, input)?)), // do not allow any special encoding _ => Err("Unallowed encoding".into()), } @@ -76,11 +77,8 @@ pub(crate) fn size_and_prefix_iterator(size: usize, prefix: u8) -> impl Iterator let size = sp_std::cmp::min(trie_constants::NIBBLE_SIZE_BOUND, size); let l1 = sp_std::cmp::min(62, size); - let (first_byte, mut rem) = if size == l1 { - (once(prefix + l1 as u8), 0) - } else { - (once(prefix + 63), size - l1) - }; + let (first_byte, mut rem) = + if size == l1 { (once(prefix + l1 as u8), 0) } else { (once(prefix + 63), size - l1) }; let next_bytes = move || { if rem > 0 { if rem < 256 { @@ -109,13 +107,13 @@ fn encode_size_and_prefix(size: usize, prefix: u8, out: &mut fn decode_size(first: u8, input: &mut impl Input) -> Result { let mut result = (first & 255u8 >> 2) as usize; if result < 63 { - return Ok(result); + return Ok(result) } result -= 1; while result <= trie_constants::NIBBLE_SIZE_BOUND { let n = input.read_byte()? as usize; if n < 255 { - return Ok(result + n + 1); + return Ok(result + n + 1) } result += 255; } diff --git a/primitives/trie/src/storage_proof.rs b/primitives/trie/src/storage_proof.rs index 03668920509b8..b4e4b393a71ab 100644 --- a/primitives/trie/src/storage_proof.rs +++ b/primitives/trie/src/storage_proof.rs @@ -15,9 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. +use codec::{Decode, Encode}; +use hash_db::{HashDB, Hasher}; use sp_std::vec::Vec; -use codec::{Encode, Decode}; -use hash_db::{Hasher, HashDB}; /// A proof that some set of key-value pairs are included in the storage trie. The proof contains /// the storage values so that the partial storage backend can be reconstructed by a verifier that @@ -48,9 +48,7 @@ impl StorageProof { /// An empty proof is capable of only proving trivial statements (ie. that an empty set of /// key-value pairs exist in storage). pub fn empty() -> Self { - StorageProof { - trie_nodes: Vec::new(), - } + StorageProof { trie_nodes: Vec::new() } } /// Returns whether this is an empty proof. @@ -76,8 +74,12 @@ impl StorageProof { /// Merges multiple storage proofs covering potentially different sets of keys into one proof /// covering all keys. The merged proof output may be smaller than the aggregate size of the input /// proofs due to deduplication of trie nodes. - pub fn merge(proofs: I) -> Self where I: IntoIterator { - let trie_nodes = proofs.into_iter() + pub fn merge(proofs: I) -> Self + where + I: IntoIterator, + { + let trie_nodes = proofs + .into_iter() .flat_map(|proof| proof.iter_nodes()) .collect::>() .into_iter() @@ -94,7 +96,7 @@ impl StorageProof { ) -> Result>> { crate::encode_compact::>(self, root) } - + /// Returns the estimated encoded size of the compact proof. /// /// Runing this operation is a slow operation (build the whole compact proof) and should only be @@ -104,7 +106,6 @@ impl StorageProof { let compact_proof = self.into_compact_proof::(root); compact_proof.ok().map(|p| p.encoded_size()) } - } impl CompactProof { @@ -127,13 +128,15 @@ impl CompactProof { self.iter_compact_encoded_nodes(), expected_root, )?; - Ok((StorageProof::new(db.drain().into_iter().filter_map(|kv| - if (kv.1).1 > 0 { - Some((kv.1).0) - } else { - None - } - ).collect()), root)) + Ok(( + StorageProof::new( + db.drain() + .into_iter() + .filter_map(|kv| if (kv.1).1 > 0 { Some((kv.1).0) } else { None }) + .collect(), + ), + root, + )) } } @@ -145,9 +148,7 @@ pub struct StorageProofNodeIterator { impl StorageProofNodeIterator { fn new(proof: StorageProof) -> Self { - StorageProofNodeIterator { - inner: proof.trie_nodes.into_iter(), - } + StorageProofNodeIterator { inner: proof.trie_nodes.into_iter() } } } diff --git a/primitives/trie/src/trie_codec.rs b/primitives/trie/src/trie_codec.rs index efe3223580f3f..ed5724e0455d1 100644 --- a/primitives/trie/src/trie_codec.rs +++ b/primitives/trie/src/trie_codec.rs @@ -21,17 +21,14 @@ //! it to substrate specific layout and child trie system. use crate::{ - EMPTY_PREFIX, HashDBT, TrieHash, TrieError, TrieConfiguration, - CompactProof, StorageProof, + CompactProof, HashDBT, StorageProof, TrieConfiguration, TrieError, TrieHash, EMPTY_PREFIX, }; -use sp_std::boxed::Box; -use sp_std::vec::Vec; -use trie_db::Trie; -#[cfg(feature="std")] -use std::fmt; -#[cfg(feature="std")] +use sp_std::{boxed::Box, vec::Vec}; +#[cfg(feature = "std")] use std::error::Error as StdError; - +#[cfg(feature = "std")] +use std::fmt; +use trie_db::Trie; /// Error for trie node decoding. pub enum Error { @@ -55,7 +52,7 @@ impl From>> for Error { } } -#[cfg(feature="std")] +#[cfg(feature = "std")] impl StdError for Error { fn description(&self) -> &str { match self { @@ -69,14 +66,14 @@ impl StdError for Error { } } -#[cfg(feature="std")] +#[cfg(feature = "std")] impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ::fmt(&self, f) } } -#[cfg(feature="std")] +#[cfg(feature = "std")] impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { @@ -84,7 +81,8 @@ impl fmt::Display for Error { Error::TrieError(e) => write!(f, "Trie error: {}", e), Error::IncompleteProof => write!(f, "Incomplete proof"), Error::ExtraneousChildNode => write!(f, "Child node content with no root in proof"), - Error::ExtraneousChildProof(root) => write!(f, "Proof of child trie {:x?} not in parent proof", root.as_ref()), + Error::ExtraneousChildProof(root) => + write!(f, "Proof of child trie {:x?} not in parent proof", root.as_ref()), Error::RootMismatch(root, expected) => write!( f, "Verification error, root is {:x?}, expected: {:x?}", @@ -107,21 +105,19 @@ pub fn decode_compact<'a, L, DB, I>( encoded: I, expected_root: Option<&TrieHash>, ) -> Result, Error> - where - L: TrieConfiguration, - DB: HashDBT + hash_db::HashDBRef, - I: IntoIterator, +where + L: TrieConfiguration, + DB: HashDBT + hash_db::HashDBRef, + I: IntoIterator, { let mut nodes_iter = encoded.into_iter(); - let (top_root, _nb_used) = trie_db::decode_compact_from_iter::( - db, - &mut nodes_iter, - )?; + let (top_root, _nb_used) = + trie_db::decode_compact_from_iter::(db, &mut nodes_iter)?; // Only check root if expected root is passed as argument. if let Some(expected_root) = expected_root { if expected_root != &top_root { - return Err(Error::RootMismatch(top_root.clone(), expected_root.clone())); + return Err(Error::RootMismatch(top_root.clone(), expected_root.clone())) } } @@ -142,7 +138,7 @@ pub fn decode_compact<'a, L, DB, I>( let mut root = TrieHash::::default(); // still in a proof so prevent panic if root.as_mut().len() != value.as_slice().len() { - return Err(Error::InvalidChildRoot(key, value)); + return Err(Error::InvalidChildRoot(key, value)) } root.as_mut().copy_from_slice(value.as_ref()); child_tries.push(root); @@ -160,16 +156,14 @@ pub fn decode_compact<'a, L, DB, I>( } if !HashDBT::::contains(db, &top_root, EMPTY_PREFIX) { - return Err(Error::IncompleteProof); + return Err(Error::IncompleteProof) } let mut previous_extracted_child_trie = None; for child_root in child_tries.into_iter() { if previous_extracted_child_trie.is_none() { - let (top_root, _) = trie_db::decode_compact_from_iter::( - db, - &mut nodes_iter, - )?; + let (top_root, _) = + trie_db::decode_compact_from_iter::(db, &mut nodes_iter)?; previous_extracted_child_trie = Some(top_root); } @@ -184,11 +178,11 @@ pub fn decode_compact<'a, L, DB, I>( if let Some(child_root) = previous_extracted_child_trie { // A child root was read from proof but is not present // in top trie. - return Err(Error::ExtraneousChildProof(child_root)); + return Err(Error::ExtraneousChildProof(child_root)) } if nodes_iter.next().is_some() { - return Err(Error::ExtraneousChildNode); + return Err(Error::ExtraneousChildNode) } Ok(top_root) @@ -201,12 +195,9 @@ pub fn decode_compact<'a, L, DB, I>( /// Then parse all child trie root and compress main trie content first /// then all child trie contents. /// Child trie are ordered by the order of their roots in the top trie. -pub fn encode_compact( - proof: StorageProof, - root: TrieHash, -) -> Result> - where - L: TrieConfiguration, +pub fn encode_compact(proof: StorageProof, root: TrieHash) -> Result> +where + L: TrieConfiguration, { let mut child_tries = Vec::new(); let partial_db = proof.into_memory_db(); @@ -223,7 +214,7 @@ pub fn encode_compact( let mut root = TrieHash::::default(); if root.as_mut().len() != value.as_slice().len() { // some child trie root in top trie are not an encoded hash. - return Err(Error::InvalidChildRoot(key.to_vec(), value.to_vec())); + return Err(Error::InvalidChildRoot(key.to_vec(), value.to_vec())) } root.as_mut().copy_from_slice(value.as_ref()); child_tries.push(root); @@ -246,7 +237,7 @@ pub fn encode_compact( if !HashDBT::::contains(&partial_db, &child_root, EMPTY_PREFIX) { // child proof are allowed to be missing (unused root can be included // due to trie structure modification). - continue; + continue } let trie = crate::TrieDB::::new(&partial_db, &child_root)?; diff --git a/primitives/trie/src/trie_stream.rs b/primitives/trie/src/trie_stream.rs index 3a65c5a9190b4..e0e26fea67c2e 100644 --- a/primitives/trie/src/trie_stream.rs +++ b/primitives/trie/src/trie_stream.rs @@ -17,13 +17,15 @@ //! `TrieStream` implementation for Substrate's trie format. -use hash_db::Hasher; -use trie_root; +use crate::{ + node_codec::Bitmap, + node_header::{size_and_prefix_iterator, NodeKind}, + trie_constants, +}; use codec::Encode; +use hash_db::Hasher; use sp_std::vec::Vec; -use crate::trie_constants; -use crate::node_header::{NodeKind, size_and_prefix_iterator}; -use crate::node_codec::Bitmap; +use trie_root; const BRANCH_NODE_NO_VALUE: u8 = 254; const BRANCH_NODE_WITH_VALUE: u8 = 255; @@ -36,41 +38,42 @@ pub struct TrieStream { impl TrieStream { // useful for debugging but not used otherwise - pub fn as_raw(&self) -> &[u8] { &self.buffer } + pub fn as_raw(&self) -> &[u8] { + &self.buffer + } } fn branch_node_bit_mask(has_children: impl Iterator) -> (u8, u8) { let mut bitmap: u16 = 0; let mut cursor: u16 = 1; for v in has_children { - if v { bitmap |= cursor } + if v { + bitmap |= cursor + } cursor <<= 1; } - ((bitmap % 256 ) as u8, (bitmap / 256 ) as u8) + ((bitmap % 256) as u8, (bitmap / 256) as u8) } - /// Create a leaf/branch node, encoding a number of nibbles. fn fuse_nibbles_node<'a>(nibbles: &'a [u8], kind: NodeKind) -> impl Iterator + 'a { let size = sp_std::cmp::min(trie_constants::NIBBLE_SIZE_BOUND, nibbles.len()); let iter_start = match kind { NodeKind::Leaf => size_and_prefix_iterator(size, trie_constants::LEAF_PREFIX_MASK), - NodeKind::BranchNoValue => size_and_prefix_iterator(size, trie_constants::BRANCH_WITHOUT_MASK), - NodeKind::BranchWithValue => size_and_prefix_iterator(size, trie_constants::BRANCH_WITH_MASK), + NodeKind::BranchNoValue => + size_and_prefix_iterator(size, trie_constants::BRANCH_WITHOUT_MASK), + NodeKind::BranchWithValue => + size_and_prefix_iterator(size, trie_constants::BRANCH_WITH_MASK), }; iter_start .chain(if nibbles.len() % 2 == 1 { Some(nibbles[0]) } else { None }) .chain(nibbles[nibbles.len() % 2..].chunks(2).map(|ch| ch[0] << 4 | ch[1])) } - impl trie_root::TrieStream for TrieStream { - fn new() -> Self { - TrieStream { - buffer: Vec::new() - } + TrieStream { buffer: Vec::new() } } fn append_empty_data(&mut self) { @@ -95,7 +98,7 @@ impl trie_root::TrieStream for TrieStream { self.buffer.extend(fuse_nibbles_node(partial, NodeKind::BranchNoValue)); } let bm = branch_node_bit_mask(has_children); - self.buffer.extend([bm.0,bm.1].iter()); + self.buffer.extend([bm.0, bm.1].iter()); } else { debug_assert!(false, "trie stream codec only for no extension trie"); self.buffer.extend(&branch_node(maybe_value.is_some(), has_children)); @@ -117,7 +120,9 @@ impl trie_root::TrieStream for TrieStream { } } - fn out(self) -> Vec { self.buffer } + fn out(self) -> Vec { + self.buffer + } } fn branch_node(has_value: bool, has_children: impl Iterator) -> [u8; 3] { @@ -126,15 +131,11 @@ fn branch_node(has_value: bool, has_children: impl Iterator) -> [u8 result } -fn branch_node_buffered(has_value: bool, has_children: I, output: &mut[u8]) - where - I: Iterator, +fn branch_node_buffered(has_value: bool, has_children: I, output: &mut [u8]) +where + I: Iterator, { - let first = if has_value { - BRANCH_NODE_WITH_VALUE - } else { - BRANCH_NODE_NO_VALUE - }; + let first = if has_value { BRANCH_NODE_WITH_VALUE } else { BRANCH_NODE_NO_VALUE }; output[0] = first; Bitmap::encode(has_children, &mut output[1..]); } diff --git a/primitives/utils/src/metrics.rs b/primitives/utils/src/metrics.rs index 45d68ae4e6f70..45d8b3b7311d7 100644 --- a/primitives/utils/src/metrics.rs +++ b/primitives/utils/src/metrics.rs @@ -19,22 +19,20 @@ use lazy_static::lazy_static; use prometheus::{ - Registry, Error as PrometheusError, - core::{ AtomicU64, GenericGauge, GenericCounter }, + core::{AtomicU64, GenericCounter, GenericGauge}, + Error as PrometheusError, Registry, }; #[cfg(feature = "metered")] use prometheus::{core::GenericCounterVec, Opts}; - lazy_static! { - pub static ref TOKIO_THREADS_TOTAL: GenericCounter = GenericCounter::new( - "tokio_threads_total", "Total number of threads created" - ).expect("Creating of statics doesn't fail. qed"); - - pub static ref TOKIO_THREADS_ALIVE: GenericGauge = GenericGauge::new( - "tokio_threads_alive", "Number of threads alive right now" - ).expect("Creating of statics doesn't fail. qed"); + pub static ref TOKIO_THREADS_TOTAL: GenericCounter = + GenericCounter::new("tokio_threads_total", "Total number of threads created") + .expect("Creating of statics doesn't fail. qed"); + pub static ref TOKIO_THREADS_ALIVE: GenericGauge = + GenericGauge::new("tokio_threads_alive", "Number of threads alive right now") + .expect("Creating of statics doesn't fail. qed"); } #[cfg(feature = "metered")] @@ -46,7 +44,6 @@ lazy_static! { } - /// Register the statics to report to registry pub fn register_globals(registry: &Registry) -> Result<(), PrometheusError> { registry.register(Box::new(TOKIO_THREADS_ALIVE.clone()))?; diff --git a/primitives/utils/src/mpsc.rs b/primitives/utils/src/mpsc.rs index b033a5527d84a..ff4f11873db7b 100644 --- a/primitives/utils/src/mpsc.rs +++ b/primitives/utils/src/mpsc.rs @@ -25,22 +25,26 @@ mod inner { pub type TracingUnboundedReceiver = UnboundedReceiver; /// Alias `mpsc::unbounded` - pub fn tracing_unbounded(_key: &'static str) ->(TracingUnboundedSender, TracingUnboundedReceiver) { + pub fn tracing_unbounded( + _key: &'static str, + ) -> (TracingUnboundedSender, TracingUnboundedReceiver) { mpsc::unbounded() } } - #[cfg(feature = "metered")] mod inner { //tracing implementation - use futures::channel::mpsc::{self, - UnboundedReceiver, UnboundedSender, - TryRecvError, TrySendError, SendError + use crate::metrics::UNBOUNDED_CHANNELS_COUNTER; + use futures::{ + channel::mpsc::{ + self, SendError, TryRecvError, TrySendError, UnboundedReceiver, UnboundedSender, + }, + sink::Sink, + stream::{FusedStream, Stream}, + task::{Context, Poll}, }; - use futures::{sink::Sink, task::{Poll, Context}, stream::{Stream, FusedStream}}; use std::pin::Pin; - use crate::metrics::UNBOUNDED_CHANNELS_COUNTER; /// Wrapper Type around `UnboundedSender` that increases the global /// measure when a message is added @@ -61,9 +65,11 @@ mod inner { /// Wrapper around `mpsc::unbounded` that tracks the in- and outflow via /// `UNBOUNDED_CHANNELS_COUNTER` - pub fn tracing_unbounded(key: &'static str) ->(TracingUnboundedSender, TracingUnboundedReceiver) { + pub fn tracing_unbounded( + key: &'static str, + ) -> (TracingUnboundedSender, TracingUnboundedReceiver) { let (s, r) = mpsc::unbounded(); - (TracingUnboundedSender(key, s), TracingUnboundedReceiver(key,r)) + (TracingUnboundedSender(key, s), TracingUnboundedReceiver(key, r)) } impl TracingUnboundedSender { @@ -94,7 +100,7 @@ mod inner { /// Proxy function to mpsc::UnboundedSender pub fn unbounded_send(&self, msg: T) -> Result<(), TrySendError> { - self.1.unbounded_send(msg).map(|s|{ + self.1.unbounded_send(msg).map(|s| { UNBOUNDED_CHANNELS_COUNTER.with_label_values(&[self.0, &"send"]).inc(); s }) @@ -107,25 +113,25 @@ mod inner { } impl TracingUnboundedReceiver { - fn consume(&mut self) { // consume all items, make sure to reflect the updated count let mut count = 0; loop { if self.1.is_terminated() { - break; + break } match self.try_next() { Ok(Some(..)) => count += 1, - _ => break + _ => break, } } // and discount the messages if count > 0 { - UNBOUNDED_CHANNELS_COUNTER.with_label_values(&[self.0, &"dropped"]).inc_by(count); + UNBOUNDED_CHANNELS_COUNTER + .with_label_values(&[self.0, &"dropped"]) + .inc_by(count); } - } /// Proxy function to mpsc::UnboundedReceiver @@ -158,21 +164,16 @@ mod inner { impl Stream for TracingUnboundedReceiver { type Item = T; - fn poll_next( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - ) -> Poll> { + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { let s = self.get_mut(); match Pin::new(&mut s.1).poll_next(cx) { Poll::Ready(msg) => { if msg.is_some() { UNBOUNDED_CHANNELS_COUNTER.with_label_values(&[s.0, "received"]).inc(); - } + } Poll::Ready(msg) - } - Poll::Pending => { - Poll::Pending - } + }, + Poll::Pending => Poll::Pending, } } } @@ -186,24 +187,15 @@ mod inner { impl Sink for TracingUnboundedSender { type Error = SendError; - fn poll_ready( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - ) -> Poll> { + fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { TracingUnboundedSender::poll_ready(&*self, cx) } - fn start_send( - mut self: Pin<&mut Self>, - msg: T, - ) -> Result<(), Self::Error> { + fn start_send(mut self: Pin<&mut Self>, msg: T) -> Result<(), Self::Error> { TracingUnboundedSender::start_send(&mut *self, msg) } - fn poll_flush( - self: Pin<&mut Self>, - _: &mut Context<'_>, - ) -> Poll> { + fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll> { Poll::Ready(Ok(())) } @@ -219,33 +211,23 @@ mod inner { impl Sink for &TracingUnboundedSender { type Error = SendError; - fn poll_ready( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - ) -> Poll> { + fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { TracingUnboundedSender::poll_ready(*self, cx) } fn start_send(self: Pin<&mut Self>, msg: T) -> Result<(), Self::Error> { - self.unbounded_send(msg) - .map_err(TrySendError::into_send_error) + self.unbounded_send(msg).map_err(TrySendError::into_send_error) } - fn poll_flush( - self: Pin<&mut Self>, - _: &mut Context<'_>, - ) -> Poll> { + fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll> { Poll::Ready(Ok(())) } - fn poll_close( - self: Pin<&mut Self>, - _: &mut Context<'_>, - ) -> Poll> { + fn poll_close(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll> { self.close_channel(); Poll::Ready(Ok(())) } } } -pub use inner::{tracing_unbounded, TracingUnboundedSender, TracingUnboundedReceiver}; +pub use inner::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; diff --git a/primitives/utils/src/status_sinks.rs b/primitives/utils/src/status_sinks.rs index dc8115670de1e..0870ab1192996 100644 --- a/primitives/utils/src/status_sinks.rs +++ b/primitives/utils/src/status_sinks.rs @@ -16,9 +16,13 @@ // limitations under the License. use crate::mpsc::{tracing_unbounded, TracingUnboundedReceiver, TracingUnboundedSender}; -use futures::{prelude::*, lock::Mutex}; +use futures::{lock::Mutex, prelude::*}; use futures_timer::Delay; -use std::{pin::Pin, task::{Poll, Context}, time::Duration}; +use std::{ + pin::Pin, + task::{Context, Poll}, + time::Duration, +}; /// Holds a list of `UnboundedSender`s, each associated with a certain time period. Every time the /// period elapses, we push an element on the sender. @@ -44,7 +48,7 @@ struct YieldAfter { sender: Option>, } -impl Default for StatusSinks { +impl Default for StatusSinks { fn default() -> Self { Self::new() } @@ -56,10 +60,7 @@ impl StatusSinks { let (entries_tx, entries_rx) = tracing_unbounded("status-sinks-entries"); StatusSinks { - inner: Mutex::new(Inner { - entries: stream::FuturesUnordered::new(), - entries_rx, - }), + inner: Mutex::new(Inner { entries: stream::FuturesUnordered::new(), entries_rx }), entries_tx, } } @@ -100,7 +101,7 @@ impl StatusSinks { } }; - futures::select!{ + futures::select! { new_entry = inner.entries_rx.next() => { if let Some(new_entry) = new_entry { inner.entries.push(new_entry); @@ -149,7 +150,7 @@ impl<'a, T> Drop for ReadySinkEvent<'a, T> { fn drop(&mut self) { if let Some(sender) = self.sender.take() { if sender.is_closed() { - return; + return } let _ = self.sinks.entries_tx.unbounded_send(YieldAfter { @@ -170,18 +171,20 @@ impl futures::Future for YieldAfter { match Pin::new(&mut this.delay).poll(cx) { Poll::Pending => Poll::Pending, Poll::Ready(()) => { - let sender = this.sender.take() + let sender = this + .sender + .take() .expect("sender is always Some unless the future is finished; qed"); Poll::Ready((sender, this.interval)) - } + }, } } } #[cfg(test)] mod tests { - use crate::mpsc::tracing_unbounded; use super::StatusSinks; + use crate::mpsc::tracing_unbounded; use futures::prelude::*; use std::time::Duration; @@ -208,7 +211,7 @@ mod tests { Box::pin(async { let items: Vec = rx.take(3).collect().await; assert_eq!(items, [6, 7, 8]); - }) + }), )); } } diff --git a/primitives/version/proc-macro/src/decl_runtime_version.rs b/primitives/version/proc-macro/src/decl_runtime_version.rs index 22803f07d811d..918a3d98101f7 100644 --- a/primitives/version/proc-macro/src/decl_runtime_version.rs +++ b/primitives/version/proc-macro/src/decl_runtime_version.rs @@ -16,14 +16,14 @@ // limitations under the License. use codec::Encode; +use proc_macro2::{Span, TokenStream}; +use quote::quote; use syn::{ - Expr, ExprLit, FieldValue, ItemConst, Lit, - parse::{Result, Error}, + parse::{Error, Result}, parse_macro_input, spanned::Spanned as _, + Expr, ExprLit, FieldValue, ItemConst, Lit, }; -use quote::quote; -use proc_macro2::{TokenStream, Span}; /// This macro accepts a `const` item that has a struct initializer expression of `RuntimeVersion`-like type. /// The macro will pass through this declaration and append an item declaration that will @@ -79,11 +79,8 @@ impl ParseRuntimeVersion { let init_expr = match init_expr { Expr::Struct(ref e) => e, _ => { - return Err(Error::new( - init_expr.span(), - "expected a struct initializer expression", - )); - } + return Err(Error::new(init_expr.span(), "expected a struct initializer expression")) + }, }; let mut parsed = ParseRuntimeVersion::default(); @@ -97,11 +94,8 @@ impl ParseRuntimeVersion { let field_name = match field_value.member { syn::Member::Named(ref ident) => ident, syn::Member::Unnamed(_) => { - return Err(Error::new( - field_value.span(), - "only named members must be used", - )); - } + return Err(Error::new(field_value.span(), "only named members must be used")) + }, }; fn parse_once( @@ -110,10 +104,7 @@ impl ParseRuntimeVersion { parser: impl FnOnce(&Expr) -> Result, ) -> Result<()> { if value.is_some() { - return Err(Error::new( - field.span(), - "field is already initialized before", - )); + return Err(Error::new(field.span(), "field is already initialized before")) } else { *value = Some(parser(&field.expr)?); Ok(()) @@ -125,21 +116,13 @@ impl ParseRuntimeVersion { } else if field_name == "impl_name" { parse_once(&mut self.impl_name, field_value, Self::parse_str_literal)?; } else if field_name == "authoring_version" { - parse_once( - &mut self.authoring_version, - field_value, - Self::parse_num_literal, - )?; + parse_once(&mut self.authoring_version, field_value, Self::parse_num_literal)?; } else if field_name == "spec_version" { parse_once(&mut self.spec_version, field_value, Self::parse_num_literal)?; } else if field_name == "impl_version" { parse_once(&mut self.impl_version, field_value, Self::parse_num_literal)?; } else if field_name == "transaction_version" { - parse_once( - &mut self.transaction_version, - field_value, - Self::parse_num_literal, - )?; + parse_once(&mut self.transaction_version, field_value, Self::parse_num_literal)?; } else if field_name == "apis" { // Intentionally ignored // @@ -147,7 +130,7 @@ impl ParseRuntimeVersion { // the "runtime_version" custom section. `impl_runtime_apis` is responsible for generating // a custom section with the supported runtime apis descriptor. } else { - return Err(Error::new(field_name.span(), "unknown field")); + return Err(Error::new(field_name.span(), "unknown field")) } Ok(()) @@ -155,16 +138,13 @@ impl ParseRuntimeVersion { fn parse_num_literal(expr: &Expr) -> Result { let lit = match *expr { - Expr::Lit(ExprLit { - lit: Lit::Int(ref lit), - .. - }) => lit, + Expr::Lit(ExprLit { lit: Lit::Int(ref lit), .. }) => lit, _ => { return Err(Error::new( expr.span(), "only numeric literals (e.g. `10`) are supported here", - )); - } + )) + }, }; lit.base10_parse::() } @@ -173,43 +153,29 @@ impl ParseRuntimeVersion { let mac = match *expr { Expr::Macro(syn::ExprMacro { ref mac, .. }) => mac, _ => { - return Err(Error::new( - expr.span(), - "a macro expression is expected here", - )); - } + return Err(Error::new(expr.span(), "a macro expression is expected here")) + }, }; let lit: ExprLit = mac.parse_body().map_err(|e| { Error::new( e.span(), - format!( - "a single literal argument is expected, but parsing is failed: {}", - e - ), + format!("a single literal argument is expected, but parsing is failed: {}", e), ) })?; match lit.lit { Lit::Str(ref lit) => Ok(lit.value()), - _ => Err(Error::new( - lit.span(), - "only string literals are supported here", - )), + _ => Err(Error::new(lit.span(), "only string literals are supported here")), } } fn build(self, span: Span) -> Result { macro_rules! required { ($e:expr) => { - $e.ok_or_else(|| - { - Error::new( - span, - format!("required field '{}' is missing", stringify!($e)), - ) - } - )? + $e.ok_or_else(|| { + Error::new(span, format!("required field '{}' is missing", stringify!($e))) + })? }; } diff --git a/primitives/version/src/embed.rs b/primitives/version/src/embed.rs index f32bc73d883aa..452762dcf687a 100644 --- a/primitives/version/src/embed.rs +++ b/primitives/version/src/embed.rs @@ -19,7 +19,7 @@ //! into a WASM file. use codec::Encode; -use parity_wasm::elements::{Module, deserialize_buffer, serialize}; +use parity_wasm::elements::{deserialize_buffer, serialize, Module}; #[derive(Clone, Copy, Eq, PartialEq, Debug, thiserror::Error)] pub enum Error { @@ -40,7 +40,8 @@ pub fn embed_runtime_version( ) -> Result, Error> { let mut module: Module = deserialize_buffer(wasm).map_err(|_| Error::Deserialize)?; - let apis = version.apis + let apis = version + .apis .iter() .map(Encode::encode) .map(|v| v.into_iter()) diff --git a/primitives/version/src/lib.rs b/primitives/version/src/lib.rs index c4f1378e81e87..4b212f35f4907 100644 --- a/primitives/version/src/lib.rs +++ b/primitives/version/src/lib.rs @@ -20,21 +20,21 @@ #![cfg_attr(not(feature = "std"), no_std)] #[cfg(feature = "std")] -use serde::{Serialize, Deserialize}; -#[cfg(feature = "std")] -use std::fmt; +use serde::{Deserialize, Serialize}; #[cfg(feature = "std")] use std::collections::HashSet; +#[cfg(feature = "std")] +use std::fmt; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; -use sp_runtime::RuntimeString; pub use sp_runtime::create_runtime_str; +use sp_runtime::RuntimeString; #[doc(hidden)] pub use sp_std; #[cfg(feature = "std")] -use sp_runtime::{traits::Block as BlockT, generic::BlockId}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; #[cfg(feature = "std")] pub mod embed; @@ -107,7 +107,9 @@ pub type ApisVec = sp_std::borrow::Cow<'static, [(ApiId, u32)]>; /// Create a vector of Api declarations. #[macro_export] macro_rules! create_apis_vec { - ( $y:expr ) => { $crate::sp_std::borrow::Cow::Borrowed(& $y) } + ( $y:expr ) => { + $crate::sp_std::borrow::Cow::Borrowed(&$y) + }; } /// Runtime version. @@ -173,7 +175,9 @@ pub struct RuntimeVersion { #[cfg(feature = "std")] impl fmt::Display for RuntimeVersion { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}-{} ({}-{}.tx{}.au{})", + write!( + f, + "{}-{} ({}-{}.tx{}.au{})", self.spec_name, self.spec_version, self.impl_name, @@ -189,17 +193,13 @@ impl RuntimeVersion { /// Check if this version matches other version for calling into runtime. pub fn can_call_with(&self, other: &RuntimeVersion) -> bool { self.spec_version == other.spec_version && - self.spec_name == other.spec_name && - self.authoring_version == other.authoring_version + self.spec_name == other.spec_name && + self.authoring_version == other.authoring_version } /// Check if the given api with `api_id` is implemented and the version passes the given /// `predicate`. - pub fn has_api_with bool>( - &self, - id: &ApiId, - predicate: P, - ) -> bool { + pub fn has_api_with bool>(&self, id: &ApiId, predicate: P) -> bool { self.apis.iter().any(|(s, v)| s == id && predicate(*v)) } @@ -230,11 +230,10 @@ impl NativeVersion { if self.runtime_version.spec_name != other.spec_name { Err(format!( "`spec_name` does not match `{}` vs `{}`", - self.runtime_version.spec_name, - other.spec_name, + self.runtime_version.spec_name, other.spec_name, )) - } else if self.runtime_version.authoring_version != other.authoring_version - && !self.can_author_with.contains(&other.authoring_version) + } else if self.runtime_version.authoring_version != other.authoring_version && + !self.can_author_with.contains(&other.authoring_version) { Err(format!( "`authoring_version` does not match `{version}` vs `{other_version}` and \ @@ -273,15 +272,13 @@ impl, Block: BlockT> GetRuntimeVersion for st mod apis_serialize { use super::*; use impl_serde::serialize as bytes; - use serde::{Serializer, de, ser::SerializeTuple}; + use serde::{de, ser::SerializeTuple, Serializer}; #[derive(Serialize)] - struct ApiId<'a>( - #[serde(serialize_with="serialize_bytesref")] &'a super::ApiId, - &'a u32, - ); + struct ApiId<'a>(#[serde(serialize_with = "serialize_bytesref")] &'a super::ApiId, &'a u32); - pub fn serialize(apis: &ApisVec, ser: S) -> Result where + pub fn serialize(apis: &ApisVec, ser: S) -> Result + where S: Serializer, { let len = apis.len(); @@ -292,20 +289,18 @@ mod apis_serialize { seq.end() } - pub fn serialize_bytesref(&apis: &&super::ApiId, ser: S) -> Result where + pub fn serialize_bytesref(&apis: &&super::ApiId, ser: S) -> Result + where S: Serializer, { bytes::serialize(apis, ser) } #[derive(Deserialize)] - struct ApiIdOwned( - #[serde(deserialize_with="deserialize_bytes")] - super::ApiId, - u32, - ); + struct ApiIdOwned(#[serde(deserialize_with = "deserialize_bytes")] super::ApiId, u32); - pub fn deserialize<'de, D>(deserializer: D) -> Result where + pub fn deserialize<'de, D>(deserializer: D) -> Result + where D: de::Deserializer<'de>, { struct Visitor; @@ -316,7 +311,8 @@ mod apis_serialize { formatter.write_str("a sequence of api id and version tuples") } - fn visit_seq(self, mut visitor: V) -> Result where + fn visit_seq(self, mut visitor: V) -> Result + where V: de::SeqAccess<'de>, { let mut apis = Vec::new(); @@ -329,8 +325,9 @@ mod apis_serialize { deserializer.deserialize_seq(Visitor) } - pub fn deserialize_bytes<'de, D>(d: D) -> Result where - D: de::Deserializer<'de> + pub fn deserialize_bytes<'de, D>(d: D) -> Result + where + D: de::Deserializer<'de>, { let mut arr = [0; 8]; bytes::deserialize_check_len(d, bytes::ExpectedLen::Exact(&mut arr[..]))?; diff --git a/primitives/wasm-interface/src/lib.rs b/primitives/wasm-interface/src/lib.rs index fd200268473b0..3f1f1c1714036 100644 --- a/primitives/wasm-interface/src/lib.rs +++ b/primitives/wasm-interface/src/lib.rs @@ -19,10 +19,7 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::{ - vec, - borrow::Cow, marker::PhantomData, mem, iter::Iterator, result, vec::Vec, -}; +use sp_std::{borrow::Cow, iter::Iterator, marker::PhantomData, mem, result, vec, vec::Vec}; #[cfg(feature = "std")] mod wasmi_impl; @@ -141,10 +138,7 @@ pub struct Pointer { impl Pointer { /// Create a new instance of `Self`. pub fn new(ptr: u32) -> Self { - Self { - ptr, - _marker: Default::default(), - } + Self { ptr, _marker: Default::default() } } /// Calculate the offset from this pointer. @@ -153,12 +147,10 @@ impl Pointer { /// /// Returns an `Option` to respect that the pointer could probably overflow. pub fn offset(self, offset: u32) -> Option { - offset.checked_mul(T::SIZE).and_then(|o| self.ptr.checked_add(o)).map(|ptr| { - Self { - ptr, - _marker: Default::default(), - } - }) + offset + .checked_mul(T::SIZE) + .and_then(|o| self.ptr.checked_add(o)) + .map(|ptr| Self { ptr, _marker: Default::default() }) } /// Create a null pointer. @@ -198,7 +190,9 @@ impl From> for usize { impl IntoValue for Pointer { const VALUE_TYPE: ValueType = ValueType::I32; - fn into_value(self) -> Value { Value::I32(self.ptr as _) } + fn into_value(self) -> Value { + Value::I32(self.ptr as _) + } } impl TryFromValue for Pointer { @@ -224,19 +218,16 @@ pub struct Signature { impl Signature { /// Create a new instance of `Signature`. - pub fn new>>(args: T, return_value: Option) -> Self { - Self { - args: args.into(), - return_value, - } + pub fn new>>( + args: T, + return_value: Option, + ) -> Self { + Self { args: args.into(), return_value } } /// Create a new instance of `Signature` with the given `args` and without any return value. pub fn new_with_args>>(args: T) -> Self { - Self { - args: args.into(), - return_value: None, - } + Self { args: args.into(), return_value: None } } } @@ -500,7 +491,6 @@ mod tests { assert_eq!(ptr.offset(32).unwrap(), Pointer::new(256)); } - #[test] fn return_value_encoded_max_size() { let encoded = ReturnValue::Value(Value::I64(-1)).encode(); diff --git a/primitives/wasm-interface/src/wasmi_impl.rs b/primitives/wasm-interface/src/wasmi_impl.rs index 79110487ffca5..f7e0ec6f16d4a 100644 --- a/primitives/wasm-interface/src/wasmi_impl.rs +++ b/primitives/wasm-interface/src/wasmi_impl.rs @@ -17,7 +17,7 @@ //! Implementation of conversions between Substrate and wasmi types. -use crate::{Value, ValueType, Signature}; +use crate::{Signature, Value, ValueType}; impl From for wasmi::RuntimeValue { fn from(value: Value) -> Self { diff --git a/test-utils/client/src/client_ext.rs b/test-utils/client/src/client_ext.rs index edba96d760fc2..ef778ca968057 100644 --- a/test-utils/client/src/client_ext.rs +++ b/test-utils/client/src/client_ext.rs @@ -17,17 +17,13 @@ //! Client extension for tests. +use codec::alloc::collections::hash_map::HashMap; +use sc_client_api::{backend::Finalizer, client::BlockBackend}; use sc_service::client::Client; -use sc_client_api::backend::Finalizer; -use sc_client_api::client::BlockBackend; use sp_consensus::{ - BlockImportParams, BlockImport, BlockOrigin, Error as ConsensusError, - ForkChoiceStrategy, + BlockImport, BlockImportParams, BlockOrigin, Error as ConsensusError, ForkChoiceStrategy, }; -use sp_runtime::{Justification, Justifications}; -use sp_runtime::traits::{Block as BlockT}; -use sp_runtime::generic::BlockId; -use codec::alloc::collections::hash_map::HashMap; +use sp_runtime::{generic::BlockId, traits::Block as BlockT, Justification, Justifications}; /// Extension trait for a test client. pub trait ClientExt: Sized { @@ -49,11 +45,18 @@ pub trait ClientBlockImportExt: Sized { async fn import(&mut self, origin: BlockOrigin, block: Block) -> Result<(), ConsensusError>; /// Import a block and make it our best block if possible. - async fn import_as_best(&mut self, origin: BlockOrigin, block: Block) -> Result<(), ConsensusError>; + async fn import_as_best( + &mut self, + origin: BlockOrigin, + block: Block, + ) -> Result<(), ConsensusError>; /// Import a block and finalize it. - async fn import_as_final(&mut self, origin: BlockOrigin, block: Block) - -> Result<(), ConsensusError>; + async fn import_as_final( + &mut self, + origin: BlockOrigin, + block: Block, + ) -> Result<(), ConsensusError>; /// Import block with justification(s), finalizes block. async fn import_justified( @@ -65,11 +68,11 @@ pub trait ClientBlockImportExt: Sized { } impl ClientExt for Client - where - B: sc_client_api::backend::Backend, - E: sc_client_api::CallExecutor + 'static, - Self: BlockImport, - Block: BlockT, +where + B: sc_client_api::backend::Backend, + E: sc_client_api::CallExecutor + 'static, + Self: BlockImport, + Block: BlockT, { fn finalize_block( &self, @@ -87,16 +90,12 @@ impl ClientExt for Client /// This implementation is required, because of the weird api requirements around `BlockImport`. #[async_trait::async_trait] impl ClientBlockImportExt for std::sync::Arc - where - for<'r> &'r T: BlockImport, - Transaction: Send + 'static, - T: Send + Sync, +where + for<'r> &'r T: BlockImport, + Transaction: Send + 'static, + T: Send + Sync, { - async fn import( - &mut self, - origin: BlockOrigin, - block: Block, - ) -> Result<(), ConsensusError> { + async fn import(&mut self, origin: BlockOrigin, block: Block) -> Result<(), ConsensusError> { let (header, extrinsics) = block.deconstruct(); let mut import = BlockImportParams::new(origin, header); import.body = Some(extrinsics); @@ -151,18 +150,14 @@ impl ClientBlockImportExt for std::sync::A #[async_trait::async_trait] impl ClientBlockImportExt for Client - where - Self: BlockImport, - RA: Send, - B: Send + Sync, - E: Send, - >::Transaction: Send, +where + Self: BlockImport, + RA: Send, + B: Send + Sync, + E: Send, + >::Transaction: Send, { - async fn import( - &mut self, - origin: BlockOrigin, - block: Block, - ) -> Result<(), ConsensusError> { + async fn import(&mut self, origin: BlockOrigin, block: Block) -> Result<(), ConsensusError> { let (header, extrinsics) = block.deconstruct(); let mut import = BlockImportParams::new(origin, header); import.body = Some(extrinsics); diff --git a/test-utils/client/src/lib.rs b/test-utils/client/src/lib.rs index 0971c00d78428..d08a01a4decbe 100644 --- a/test-utils/client/src/lib.rs +++ b/test-utils/client/src/lib.rs @@ -21,39 +21,44 @@ pub mod client_ext; +pub use self::client_ext::{ClientBlockImportExt, ClientExt}; pub use sc_client_api::{ - execution_extensions::{ExecutionStrategies, ExecutionExtensions}, - ForkBlocks, BadBlocks, + execution_extensions::{ExecutionExtensions, ExecutionStrategies}, + BadBlocks, ForkBlocks, }; -pub use sc_client_db::{Backend, self}; +pub use sc_client_db::{self, Backend}; +pub use sc_executor::{self, NativeExecutor, WasmExecutionMethod}; +pub use sc_service::{client, RpcHandlers, RpcSession}; pub use sp_consensus; -pub use sc_executor::{NativeExecutor, WasmExecutionMethod, self}; pub use sp_keyring::{ - AccountKeyring, - ed25519::Keyring as Ed25519Keyring, - sr25519::Keyring as Sr25519Keyring, + ed25519::Keyring as Ed25519Keyring, sr25519::Keyring as Sr25519Keyring, AccountKeyring, }; -pub use sp_keystore::{SyncCryptoStorePtr, SyncCryptoStore}; +pub use sp_keystore::{SyncCryptoStore, SyncCryptoStorePtr}; pub use sp_runtime::{Storage, StorageChild}; pub use sp_state_machine::ExecutionStrategy; -pub use sc_service::{RpcHandlers, RpcSession, client}; -pub use self::client_ext::{ClientExt, ClientBlockImportExt}; -use std::pin::Pin; -use std::sync::Arc; -use std::collections::{HashSet, HashMap}; -use futures::{future::{Future, FutureExt}, stream::StreamExt}; +use futures::{ + future::{Future, FutureExt}, + stream::StreamExt, +}; +use sc_client_api::BlockchainEvents; +use sc_service::client::{ClientConfig, LocalCallExecutor}; use serde::Deserialize; use sp_core::storage::ChildInfo; -use sp_runtime::{OpaqueExtrinsic, codec::Encode, traits::{Block as BlockT, BlakeTwo256}}; -use sc_service::client::{LocalCallExecutor, ClientConfig}; -use sc_client_api::BlockchainEvents; +use sp_runtime::{ + codec::Encode, + traits::{BlakeTwo256, Block as BlockT}, + OpaqueExtrinsic, +}; +use std::{ + collections::{HashMap, HashSet}, + pin::Pin, + sync::Arc, +}; /// Test client light database backend. -pub type LightBackend = sc_light::Backend< - sc_client_db::light::LightStorage, - BlakeTwo256, ->; +pub type LightBackend = + sc_light::Backend, BlakeTwo256>; /// A genesis storage initialization trait. pub trait GenesisInit: Default { @@ -84,13 +89,16 @@ pub struct TestClientBuilder { } impl Default - for TestClientBuilder, G> { + for TestClientBuilder, G> +{ fn default() -> Self { Self::with_default_backend() } } -impl TestClientBuilder, G> { +impl + TestClientBuilder, G> +{ /// Create new `TestClientBuilder` with default backend. pub fn with_default_backend() -> Self { let backend = Arc::new(Backend::new_test(std::u32::MAX, std::u64::MAX)); @@ -114,7 +122,9 @@ impl TestClientBuilder TestClientBuilder { +impl + TestClientBuilder +{ /// Create a new instance of the test client builder. pub fn with_backend(backend: Arc) -> Self { TestClientBuilder { @@ -155,20 +165,15 @@ impl TestClientBuilder, ) -> Self { let storage_key = child_info.storage_key(); - let entry = self.child_storage_extension.entry(storage_key.to_vec()) - .or_insert_with(|| StorageChild { - data: Default::default(), - child_info: child_info.clone(), - }); + let entry = self.child_storage_extension.entry(storage_key.to_vec()).or_insert_with(|| { + StorageChild { data: Default::default(), child_info: child_info.clone() } + }); entry.data.insert(key.as_ref().to_vec(), value.as_ref().to_vec()); self } /// Set the execution strategy that should be used by all contexts. - pub fn set_execution_strategy( - mut self, - execution_strategy: ExecutionStrategy - ) -> Self { + pub fn set_execution_strategy(mut self, execution_strategy: ExecutionStrategy) -> Self { self.execution_strategies = ExecutionStrategies { syncing: execution_strategy, importing: execution_strategy, @@ -180,7 +185,8 @@ impl TestClientBuilder, bad_blocks: BadBlocks, ) -> Self { @@ -206,14 +212,10 @@ impl TestClientBuilder ( - client::Client< - Backend, - Executor, - Block, - RuntimeApi, - >, + client::Client, sc_consensus::LongestChain, - ) where + ) + where Executor: sc_client_api::CallExecutor + 'static, Backend: sc_client_api::backend::Backend, >::OffchainStorage: 'static, @@ -253,7 +255,8 @@ impl TestClientBuilder TestClientBuilder TestClientBuilder< - Block, - client::LocalCallExecutor>, - Backend, - G, -> { +impl + TestClientBuilder>, Backend, G> +{ /// Build the test client with the given native executor. pub fn build_with_native_executor( self, @@ -276,23 +276,25 @@ impl TestClientBuilder< Backend, client::LocalCallExecutor>, Block, - RuntimeApi + RuntimeApi, >, sc_consensus::LongestChain, - ) where + ) + where I: Into>>, E: sc_executor::NativeExecutionDispatch + 'static, Backend: sc_client_api::backend::Backend + 'static, { - let executor = executor.into().unwrap_or_else(|| - NativeExecutor::new(WasmExecutionMethod::Interpreted, None, 8) - ); + let executor = executor + .into() + .unwrap_or_else(|| NativeExecutor::new(WasmExecutionMethod::Interpreted, None, 8)); let executor = LocalCallExecutor::new( self.backend.clone(), executor, Box::new(sp_core::testing::TaskExecutor::new()), Default::default(), - ).expect("Creates LocalCallExecutor"); + ) + .expect("Creates LocalCallExecutor"); self.build_with_executor(executor) } @@ -347,8 +349,8 @@ impl RpcHandlersExt for RpcHandlers { ) -> Pin> + Send>> { let (tx, rx) = futures01::sync::mpsc::channel(0); let mem = RpcSession::new(tx.into()); - Box::pin(self - .rpc_query( + Box::pin( + self.rpc_query( &mem, &format!( r#"{{ @@ -360,7 +362,7 @@ impl RpcHandlersExt for RpcHandlers { hex::encode(extrinsic.encode()) ), ) - .map(move |result| parse_rpc_result(result, mem, rx)) + .map(move |result| parse_rpc_result(result, mem, rx)), ) } } @@ -371,26 +373,17 @@ pub(crate) fn parse_rpc_result( receiver: futures01::sync::mpsc::Receiver, ) -> Result { if let Some(ref result) = result { - let json: serde_json::Value = serde_json::from_str(result) - .expect("the result can only be a JSONRPC string; qed"); - let error = json - .as_object() - .expect("JSON result is always an object; qed") - .get("error"); + let json: serde_json::Value = + serde_json::from_str(result).expect("the result can only be a JSONRPC string; qed"); + let error = json.as_object().expect("JSON result is always an object; qed").get("error"); if let Some(error) = error { - return Err( - serde_json::from_value(error.clone()) - .expect("the JSONRPC result's error is always valid; qed") - ) + return Err(serde_json::from_value(error.clone()) + .expect("the JSONRPC result's error is always valid; qed")) } } - Ok(RpcTransactionOutput { - result, - session, - receiver, - }) + Ok(RpcTransactionOutput { result, session, receiver }) } /// An extension trait for `BlockchainEvents`. @@ -420,7 +413,7 @@ where if notification.is_new_best { blocks.insert(notification.hash); if blocks.len() == count { - break; + break } } } @@ -445,31 +438,45 @@ mod tests { assert!(super::parse_rpc_result(None, mem, rx).is_ok()); let (mem, rx) = create_session_and_receiver(); - assert!( - super::parse_rpc_result(Some(r#"{ + assert!(super::parse_rpc_result( + Some( + r#"{ "jsonrpc": "2.0", "result": 19, "id": 1 - }"#.to_string()), mem, rx) - .is_ok(), - ); + }"# + .to_string() + ), + mem, + rx + ) + .is_ok(),); let (mem, rx) = create_session_and_receiver(); - let error = super::parse_rpc_result(Some(r#"{ + let error = super::parse_rpc_result( + Some( + r#"{ "jsonrpc": "2.0", "error": { "code": -32601, "message": "Method not found" }, "id": 1 - }"#.to_string()), mem, rx) - .unwrap_err(); + }"# + .to_string(), + ), + mem, + rx, + ) + .unwrap_err(); assert_eq!(error.code, -32601); assert_eq!(error.message, "Method not found"); assert!(error.data.is_none()); let (mem, rx) = create_session_and_receiver(); - let error = super::parse_rpc_result(Some(r#"{ + let error = super::parse_rpc_result( + Some( + r#"{ "jsonrpc": "2.0", "error": { "code": -32601, @@ -477,8 +484,13 @@ mod tests { "data": 42 }, "id": 1 - }"#.to_string()), mem, rx) - .unwrap_err(); + }"# + .to_string(), + ), + mem, + rx, + ) + .unwrap_err(); assert_eq!(error.code, -32601); assert_eq!(error.message, "Method not found"); assert!(error.data.is_some()); diff --git a/test-utils/derive/src/lib.rs b/test-utils/derive/src/lib.rs index fb1cb24cae40d..877792f82de6a 100644 --- a/test-utils/derive/src/lib.rs +++ b/test-utils/derive/src/lib.rs @@ -43,15 +43,15 @@ fn parse_knobs( if sig.inputs.len() != 1 { let msg = "the test function accepts only one argument of type sc_service::TaskExecutor"; - return Err(syn::Error::new_spanned(&sig, msg)); + return Err(syn::Error::new_spanned(&sig, msg)) } let (task_executor_name, task_executor_type) = match sig.inputs.pop().map(|x| x.into_value()) { Some(syn::FnArg::Typed(x)) => (x.pat, x.ty), _ => { let msg = "the test function accepts only one argument of type sc_service::TaskExecutor"; - return Err(syn::Error::new_spanned(&sig, msg)); - } + return Err(syn::Error::new_spanned(&sig, msg)) + }, }; let crate_name = match crate_name("substrate-test-utils") { diff --git a/test-utils/runtime/client/src/block_builder_ext.rs b/test-utils/runtime/client/src/block_builder_ext.rs index 0d3211fa05a9d..e8c1d2ac5cd48 100644 --- a/test-utils/runtime/client/src/block_builder_ext.rs +++ b/test-utils/runtime/client/src/block_builder_ext.rs @@ -17,16 +17,19 @@ //! Block Builder extensions for tests. +use sc_client_api::backend; use sp_api::{ApiExt, ProvideRuntimeApi}; use sp_core::ChangesTrieConfiguration; -use sc_client_api::backend; use sc_block_builder::BlockBuilderApi; /// Extension trait for test block builder. pub trait BlockBuilderExt { /// Add transfer extrinsic to the block. - fn push_transfer(&mut self, transfer: substrate_test_runtime::Transfer) -> Result<(), sp_blockchain::Error>; + fn push_transfer( + &mut self, + transfer: substrate_test_runtime::Transfer, + ) -> Result<(), sp_blockchain::Error>; /// Add storage change extrinsic to the block. fn push_storage_change( &mut self, @@ -40,16 +43,21 @@ pub trait BlockBuilderExt { ) -> Result<(), sp_blockchain::Error>; } -impl<'a, A, B> BlockBuilderExt for sc_block_builder::BlockBuilder<'a, substrate_test_runtime::Block, A, B> where +impl<'a, A, B> BlockBuilderExt + for sc_block_builder::BlockBuilder<'a, substrate_test_runtime::Block, A, B> +where A: ProvideRuntimeApi + 'a, - A::Api: BlockBuilderApi + - ApiExt< + A::Api: BlockBuilderApi + + ApiExt< substrate_test_runtime::Block, - StateBackend = backend::StateBackendFor + StateBackend = backend::StateBackendFor, >, B: backend::Backend, { - fn push_transfer(&mut self, transfer: substrate_test_runtime::Transfer) -> Result<(), sp_blockchain::Error> { + fn push_transfer( + &mut self, + transfer: substrate_test_runtime::Transfer, + ) -> Result<(), sp_blockchain::Error> { self.push(transfer.into_signed_tx()) } diff --git a/test-utils/runtime/client/src/lib.rs b/test-utils/runtime/client/src/lib.rs index a9ff26a5adf8d..3db433968c9f8 100644 --- a/test-utils/runtime/client/src/lib.rs +++ b/test-utils/runtime/client/src/lib.rs @@ -23,34 +23,36 @@ pub mod trait_tests; mod block_builder_ext; -use std::sync::Arc; -use std::collections::HashMap; +pub use sc_consensus::LongestChain; +use std::{collections::HashMap, sync::Arc}; pub use substrate_test_client::*; pub use substrate_test_runtime as runtime; -pub use sc_consensus::LongestChain; pub use self::block_builder_ext::BlockBuilderExt; -use sp_core::{sr25519, ChangesTrieConfiguration}; -use sp_core::storage::{ChildInfo, Storage, StorageChild}; -use substrate_test_runtime::genesismap::{GenesisConfig, additional_storage_with_genesis}; -use sp_runtime::traits::{Block as BlockT, Header as HeaderT, Hash as HashT, NumberFor, HashFor}; use sc_client_api::light::{ - RemoteCallRequest, RemoteChangesRequest, RemoteBodyRequest, - Fetcher, RemoteHeaderRequest, RemoteReadRequest, RemoteReadChildRequest, + Fetcher, RemoteBodyRequest, RemoteCallRequest, RemoteChangesRequest, RemoteHeaderRequest, + RemoteReadChildRequest, RemoteReadRequest, }; +use sp_core::{ + sr25519, + storage::{ChildInfo, Storage, StorageChild}, + ChangesTrieConfiguration, +}; +use sp_runtime::traits::{Block as BlockT, Hash as HashT, HashFor, Header as HeaderT, NumberFor}; +use substrate_test_runtime::genesismap::{additional_storage_with_genesis, GenesisConfig}; /// A prelude to import in tests. pub mod prelude { // Trait extensions pub use super::{ - BlockBuilderExt, DefaultTestClientBuilderExt, TestClientBuilderExt, ClientExt, - ClientBlockImportExt, + BlockBuilderExt, ClientBlockImportExt, ClientExt, DefaultTestClientBuilderExt, + TestClientBuilderExt, }; // Client structs pub use super::{ - TestClient, TestClientBuilder, Backend, LightBackend, - Executor, LightExecutor, LocalExecutor, NativeExecutor, WasmExecutionMethod, + Backend, Executor, LightBackend, LightExecutor, LocalExecutor, NativeExecutor, TestClient, + TestClientBuilder, WasmExecutionMethod, }; // Keyring pub use super::{AccountKeyring, Sr25519Keyring}; @@ -82,10 +84,10 @@ pub type LightExecutor = sc_light::GenesisCallExecutor< substrate_test_runtime::Block, sc_light::Backend< sc_client_db::light::LightStorage, - HashFor + HashFor, >, - NativeExecutor - > + NativeExecutor, + >, >; /// Parameters of test-client builder with test-runtime. @@ -130,19 +132,23 @@ impl substrate_test_client::GenesisInit for GenesisParameters { let mut storage = self.genesis_config().genesis_map(); if let Some(ref code) = self.wasm_code { - storage.top.insert(sp_core::storage::well_known_keys::CODE.to_vec(), code.clone()); + storage + .top + .insert(sp_core::storage::well_known_keys::CODE.to_vec(), code.clone()); } let child_roots = storage.children_default.iter().map(|(_sk, child_content)| { - let state_root = <<::Header as HeaderT>::Hashing as HashT>::trie_root( - child_content.data.clone().into_iter().collect() - ); + let state_root = + <<::Header as HeaderT>::Hashing as HashT>::trie_root( + child_content.data.clone().into_iter().collect(), + ); let prefixed_storage_key = child_content.child_info.prefixed_storage_key(); (prefixed_storage_key.into_inner(), state_root.encode()) }); - let state_root = <<::Header as HeaderT>::Hashing as HashT>::trie_root( - storage.top.clone().into_iter().chain(child_roots).collect() - ); + let state_root = + <<::Header as HeaderT>::Hashing as HashT>::trie_root( + storage.top.clone().into_iter().chain(child_roots).collect(), + ); let block: runtime::Block = client::genesis::construct_genesis_block(state_root); storage.top.extend(additional_storage_with_genesis(&block)); @@ -164,7 +170,7 @@ pub type Client = client::Client< client::LocalCallExecutor< substrate_test_runtime::Block, B, - sc_executor::NativeExecutor + sc_executor::NativeExecutor, >, substrate_test_runtime::Block, substrate_test_runtime::RuntimeApi, @@ -217,12 +223,16 @@ pub trait TestClientBuilderExt: Sized { let key = key.into(); assert!(!storage_key.is_empty()); assert!(!key.is_empty()); - self.genesis_init_mut().extra_storage.children_default + self.genesis_init_mut() + .extra_storage + .children_default .entry(storage_key) .or_insert_with(|| StorageChild { data: Default::default(), child_info: child_info.clone(), - }).data.insert(key, value.into()); + }) + .data + .insert(key, value.into()); self } @@ -244,27 +254,32 @@ pub trait TestClientBuilderExt: Sized { } /// Build the test client and longest chain selector. - fn build_with_longest_chain(self) -> (Client, sc_consensus::LongestChain); + fn build_with_longest_chain( + self, + ) -> (Client, sc_consensus::LongestChain); /// Build the test client and the backend. fn build_with_backend(self) -> (Client, Arc); } -impl TestClientBuilderExt for TestClientBuilder< - client::LocalCallExecutor< - substrate_test_runtime::Block, +impl TestClientBuilderExt + for TestClientBuilder< + client::LocalCallExecutor< + substrate_test_runtime::Block, + B, + sc_executor::NativeExecutor, + >, B, - sc_executor::NativeExecutor - >, - B -> where + > where B: sc_client_api::backend::Backend + 'static, { fn genesis_init_mut(&mut self) -> &mut GenesisParameters { Self::genesis_init_mut(self) } - fn build_with_longest_chain(self) -> (Client, sc_consensus::LongestChain) { + fn build_with_longest_chain( + self, + ) -> (Client, sc_consensus::LongestChain) { self.build_with_native_executor(None) } @@ -275,7 +290,8 @@ impl TestClientBuilderExt for TestClientBuilder< } /// Type of optional fetch callback. -type MaybeFetcherCallback = Option Result + Send + Sync>>; +type MaybeFetcherCallback = + Option Result + Send + Sync>>; /// Type of fetcher future result. type FetcherFutureResult = futures::future::Ready>; @@ -284,7 +300,10 @@ type FetcherFutureResult = futures::future::Ready, Vec>, - body: MaybeFetcherCallback, Vec>, + body: MaybeFetcherCallback< + RemoteBodyRequest, + Vec, + >, } impl LightFetcher { @@ -293,21 +312,18 @@ impl LightFetcher { self, call: MaybeFetcherCallback, Vec>, ) -> Self { - LightFetcher { - call, - body: self.body, - } + LightFetcher { call, body: self.body } } /// Sets remote body callback. pub fn with_remote_body( self, - body: MaybeFetcherCallback, Vec>, + body: MaybeFetcherCallback< + RemoteBodyRequest, + Vec, + >, ) -> Self { - LightFetcher { - call: self.call, - body, - } + LightFetcher { call: self.call, body } } } @@ -315,14 +331,21 @@ impl Fetcher for LightFetcher { type RemoteHeaderResult = FetcherFutureResult; type RemoteReadResult = FetcherFutureResult, Option>>>; type RemoteCallResult = FetcherFutureResult>; - type RemoteChangesResult = FetcherFutureResult, u32)>>; + type RemoteChangesResult = + FetcherFutureResult, u32)>>; type RemoteBodyResult = FetcherFutureResult>; - fn remote_header(&self, _: RemoteHeaderRequest) -> Self::RemoteHeaderResult { + fn remote_header( + &self, + _: RemoteHeaderRequest, + ) -> Self::RemoteHeaderResult { unimplemented!() } - fn remote_read(&self, _: RemoteReadRequest) -> Self::RemoteReadResult { + fn remote_read( + &self, + _: RemoteReadRequest, + ) -> Self::RemoteReadResult { unimplemented!() } @@ -333,18 +356,27 @@ impl Fetcher for LightFetcher { unimplemented!() } - fn remote_call(&self, req: RemoteCallRequest) -> Self::RemoteCallResult { + fn remote_call( + &self, + req: RemoteCallRequest, + ) -> Self::RemoteCallResult { match self.call { Some(ref call) => futures::future::ready(call(req)), None => unimplemented!(), } } - fn remote_changes(&self, _: RemoteChangesRequest) -> Self::RemoteChangesResult { + fn remote_changes( + &self, + _: RemoteChangesRequest, + ) -> Self::RemoteChangesResult { unimplemented!() } - fn remote_body(&self, req: RemoteBodyRequest) -> Self::RemoteBodyResult { + fn remote_body( + &self, + req: RemoteBodyRequest, + ) -> Self::RemoteBodyResult { match self.body { Some(ref body) => futures::future::ready(body(req)), None => unimplemented!(), @@ -359,10 +391,14 @@ pub fn new() -> Client { /// Creates new light client instance used for tests. pub fn new_light() -> ( - client::Client, + client::Client< + LightBackend, + LightExecutor, + substrate_test_runtime::Block, + substrate_test_runtime::RuntimeApi, + >, Arc, ) { - let storage = sc_client_db::light::LightStorage::new_test(); let blockchain = Arc::new(sc_light::Blockchain::new(storage)); let backend = Arc::new(LightBackend::new(blockchain)); @@ -372,11 +408,9 @@ pub fn new_light() -> ( executor, Box::new(sp_core::testing::TaskExecutor::new()), Default::default(), - ).expect("Creates LocalCallExecutor"); - let call_executor = LightExecutor::new( - backend.clone(), - local_call_executor, - ); + ) + .expect("Creates LocalCallExecutor"); + let call_executor = LightExecutor::new(backend.clone(), local_call_executor); ( TestClientBuilder::with_backend(backend.clone()) diff --git a/test-utils/runtime/client/src/trait_tests.rs b/test-utils/runtime/client/src/trait_tests.rs index 797c7ec089bd6..6934e1ba54343 100644 --- a/test-utils/runtime/client/src/trait_tests.rs +++ b/test-utils/runtime/client/src/trait_tests.rs @@ -23,19 +23,21 @@ use std::sync::Arc; use crate::{ - AccountKeyring, ClientBlockImportExt, BlockBuilderExt, TestClientBuilder, TestClientBuilderExt, + AccountKeyring, BlockBuilderExt, ClientBlockImportExt, TestClientBuilder, TestClientBuilderExt, +}; +use futures::executor::block_on; +use sc_block_builder::BlockBuilderProvider; +use sc_client_api::{ + backend, + blockchain::{Backend as BlockChainBackendT, HeaderBackend}, }; -use sc_client_api::backend; -use sc_client_api::blockchain::{Backend as BlockChainBackendT, HeaderBackend}; use sp_consensus::BlockOrigin; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; use substrate_test_runtime::{self, Transfer}; -use sp_runtime::generic::BlockId; -use sp_runtime::traits::Block as BlockT; -use sc_block_builder::BlockBuilderProvider; -use futures::executor::block_on; /// helper to test the `leaves` implementation for various backends -pub fn test_leaves_for_backend(backend: Arc) where +pub fn test_leaves_for_backend(backend: Arc) +where B: backend::Backend, { // block tree: @@ -49,159 +51,134 @@ pub fn test_leaves_for_backend(backend: Arc) where let genesis_hash = client.chain_info().genesis_hash; - assert_eq!( - blockchain.leaves().unwrap(), - vec![genesis_hash]); + assert_eq!(blockchain.leaves().unwrap(), vec![genesis_hash]); // G -> A1 let a1 = client.new_block(Default::default()).unwrap().build().unwrap().block; block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a1.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a1.hash()],); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); #[allow(deprecated)] - assert_eq!( - blockchain.leaves().unwrap(), - vec![a2.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a2.hash()],); // A2 -> A3 - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a3.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a3.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a3.hash()],); // A3 -> A4 - let a4 = client.new_block_at( - &BlockId::Hash(a3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a4 = client + .new_block_at(&BlockId::Hash(a3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a4.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a4.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a4.hash()],); // A4 -> A5 - let a5 = client.new_block_at( - &BlockId::Hash(a4.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a5 = client + .new_block_at(&BlockId::Hash(a4.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a5.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash()],); // A1 -> B2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise B2 has the same hash as A2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 41, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 41, + nonce: 0, + }) + .unwrap(); let b2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash(), b2.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash(), b2.hash()],); // B2 -> B3 - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash(), b3.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash(), b3.hash()],); // B3 -> B4 - let b4 = client.new_block_at( - &BlockId::Hash(b3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b4 = client + .new_block_at(&BlockId::Hash(b3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b4.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash(), b4.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash(), b4.hash()],); // // B2 -> C3 - let mut builder = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise C3 has the same hash as B3 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 1, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 1, + }) + .unwrap(); let c3 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, c3.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash(), b4.hash(), c3.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash(), b4.hash(), c3.hash()],); // A1 -> D2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise D2 has the same hash as B2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 0, + }) + .unwrap(); let d2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, d2.clone())).unwrap(); - assert_eq!( - blockchain.leaves().unwrap(), - vec![a5.hash(), b4.hash(), c3.hash(), d2.hash()], - ); + assert_eq!(blockchain.leaves().unwrap(), vec![a5.hash(), b4.hash(), c3.hash(), d2.hash()],); } /// helper to test the `children` implementation for various backends -pub fn test_children_for_backend(backend: Arc) where +pub fn test_children_for_backend(backend: Arc) +where B: backend::LocalBackend, { // block tree: @@ -218,98 +195,104 @@ pub fn test_children_for_backend(backend: Arc) where block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); // A2 -> A3 - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a3.clone())).unwrap(); // A3 -> A4 - let a4 = client.new_block_at( - &BlockId::Hash(a3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a4 = client + .new_block_at(&BlockId::Hash(a3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a4.clone())).unwrap(); // A4 -> A5 - let a5 = client.new_block_at( - &BlockId::Hash(a4.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a5 = client + .new_block_at(&BlockId::Hash(a4.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a5.clone())).unwrap(); // A1 -> B2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise B2 has the same hash as A2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 41, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 41, + nonce: 0, + }) + .unwrap(); let b2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // B2 -> B3 - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); // B3 -> B4 - let b4 = client.new_block_at( - &BlockId::Hash(b3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b4 = client + .new_block_at(&BlockId::Hash(b3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b4)).unwrap(); // // B2 -> C3 - let mut builder = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise C3 has the same hash as B3 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 1, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 1, + }) + .unwrap(); let c3 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, c3.clone())).unwrap(); // A1 -> D2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise D2 has the same hash as B2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 0, + }) + .unwrap(); let d2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, d2.clone())).unwrap(); @@ -345,98 +328,104 @@ where block_on(client.import(BlockOrigin::Own, a1.clone())).unwrap(); // A1 -> A2 - let a2 = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a2 = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a2.clone())).unwrap(); // A2 -> A3 - let a3 = client.new_block_at( - &BlockId::Hash(a2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a3 = client + .new_block_at(&BlockId::Hash(a2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a3.clone())).unwrap(); // A3 -> A4 - let a4 = client.new_block_at( - &BlockId::Hash(a3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a4 = client + .new_block_at(&BlockId::Hash(a3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a4.clone())).unwrap(); // A4 -> A5 - let a5 = client.new_block_at( - &BlockId::Hash(a4.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let a5 = client + .new_block_at(&BlockId::Hash(a4.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, a5.clone())).unwrap(); // A1 -> B2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise B2 has the same hash as A2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 41, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 41, + nonce: 0, + }) + .unwrap(); let b2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, b2.clone())).unwrap(); // B2 -> B3 - let b3 = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b3 = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b3.clone())).unwrap(); // B3 -> B4 - let b4 = client.new_block_at( - &BlockId::Hash(b3.hash()), - Default::default(), - false, - ).unwrap().build().unwrap().block; + let b4 = client + .new_block_at(&BlockId::Hash(b3.hash()), Default::default(), false) + .unwrap() + .build() + .unwrap() + .block; block_on(client.import(BlockOrigin::Own, b4)).unwrap(); // // B2 -> C3 - let mut builder = client.new_block_at( - &BlockId::Hash(b2.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(b2.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise C3 has the same hash as B3 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 1, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 1, + }) + .unwrap(); let c3 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, c3)).unwrap(); // A1 -> D2 - let mut builder = client.new_block_at( - &BlockId::Hash(a1.hash()), - Default::default(), - false, - ).unwrap(); + let mut builder = client + .new_block_at(&BlockId::Hash(a1.hash()), Default::default(), false) + .unwrap(); // this push is required as otherwise D2 has the same hash as B2 and won't get imported - builder.push_transfer(Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Ferdie.into(), - amount: 1, - nonce: 0, - }).unwrap(); + builder + .push_transfer(Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Ferdie.into(), + amount: 1, + nonce: 0, + }) + .unwrap(); let d2 = builder.build().unwrap().block; block_on(client.import(BlockOrigin::Own, d2)).unwrap(); diff --git a/test-utils/runtime/src/genesismap.rs b/test-utils/runtime/src/genesismap.rs index 63c4bab55ec49..a8801b8519dfe 100644 --- a/test-utils/runtime/src/genesismap.rs +++ b/test-utils/runtime/src/genesismap.rs @@ -17,14 +17,17 @@ //! Tool for creating the genesis block. -use std::collections::BTreeMap; +use super::{system, wasm_binary_unwrap, AccountId, AuthorityId}; +use codec::{Encode, Joiner, KeyedVec}; +use sc_service::client::genesis; +use sp_core::{ + map, + storage::{well_known_keys, Storage}, + ChangesTrieConfiguration, +}; use sp_io::hashing::{blake2_256, twox_128}; -use super::{AuthorityId, AccountId, wasm_binary_unwrap, system}; -use codec::{Encode, KeyedVec, Joiner}; -use sp_core::{ChangesTrieConfiguration, map}; -use sp_core::storage::{well_known_keys, Storage}; use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT}; -use sc_service::client::genesis; +use std::collections::BTreeMap; /// Configuration of a general Substrate test genesis block. pub struct GenesisConfig { @@ -47,7 +50,7 @@ impl GenesisConfig { ) -> Self { GenesisConfig { changes_trie_config, - authorities: authorities, + authorities, balances: endowed_accounts.into_iter().map(|a| (a, balance)).collect(), heap_pages_override, extra_storage, @@ -56,16 +59,23 @@ impl GenesisConfig { pub fn genesis_map(&self) -> Storage { let wasm_runtime = wasm_binary_unwrap().to_vec(); - let mut map: BTreeMap, Vec> = self.balances.iter() - .map(|&(ref account, balance)| (account.to_keyed_vec(b"balance:"), vec![].and(&balance))) + let mut map: BTreeMap, Vec> = self + .balances + .iter() + .map(|&(ref account, balance)| { + (account.to_keyed_vec(b"balance:"), vec![].and(&balance)) + }) .map(|(k, v)| (blake2_256(&k[..])[..].to_vec(), v.to_vec())) - .chain(vec![ - (well_known_keys::CODE.into(), wasm_runtime), - ( - well_known_keys::HEAP_PAGES.into(), - vec![].and(&(self.heap_pages_override.unwrap_or(16 as u64))), - ), - ].into_iter()) + .chain( + vec![ + (well_known_keys::CODE.into(), wasm_runtime), + ( + well_known_keys::HEAP_PAGES.into(), + vec![].and(&(self.heap_pages_override.unwrap_or(16 as u64))), + ), + ] + .into_iter(), + ) .collect(); if let Some(ref changes_trie_config) = self.changes_trie_config { map.insert(well_known_keys::CHANGES_TRIE_CONFIG.to_vec(), changes_trie_config.encode()); @@ -75,28 +85,30 @@ impl GenesisConfig { map.extend(self.extra_storage.top.clone().into_iter()); // Assimilate the system genesis config. - let mut storage = Storage { top: map, children_default: self.extra_storage.children_default.clone()}; + let mut storage = + Storage { top: map, children_default: self.extra_storage.children_default.clone() }; let mut config = system::GenesisConfig::default(); config.authorities = self.authorities.clone(); - config.assimilate_storage(&mut storage).expect("Adding `system::GensisConfig` to the genesis"); + config + .assimilate_storage(&mut storage) + .expect("Adding `system::GensisConfig` to the genesis"); storage } } -pub fn insert_genesis_block( - storage: &mut Storage, -) -> sp_core::hash::H256 { +pub fn insert_genesis_block(storage: &mut Storage) -> sp_core::hash::H256 { let child_roots = storage.children_default.iter().map(|(sk, child_content)| { - let state_root = <<::Header as HeaderT>::Hashing as HashT>::trie_root( - child_content.data.clone().into_iter().collect(), - ); + let state_root = + <<::Header as HeaderT>::Hashing as HashT>::trie_root( + child_content.data.clone().into_iter().collect(), + ); (sk.clone(), state_root.encode()) }); // add child roots to storage storage.top.extend(child_roots); let state_root = <<::Header as HeaderT>::Hashing as HashT>::trie_root( - storage.top.clone().into_iter().collect() + storage.top.clone().into_iter().collect(), ); let block: crate::Block = genesis::construct_genesis_block(state_root); let genesis_hash = block.header.hash(); diff --git a/test-utils/runtime/src/lib.rs b/test-utils/runtime/src/lib.rs index 3289892fc6f41..111d1e5185a2a 100644 --- a/test-utils/runtime/src/lib.rs +++ b/test-utils/runtime/src/lib.rs @@ -23,46 +23,44 @@ pub mod genesismap; pub mod system; -use sp_std::{prelude::*, marker::PhantomData}; -use codec::{Encode, Decode, Input, Error}; +use codec::{Decode, Encode, Error, Input}; use scale_info::TypeInfo; +use sp_std::{marker::PhantomData, prelude::*}; +use sp_application_crypto::{ecdsa, ed25519, sr25519, RuntimeAppPublic}; use sp_core::{offchain::KeyTypeId, ChangesTrieConfiguration, OpaqueMetadata, RuntimeDebug}; -use sp_application_crypto::{ed25519, sr25519, ecdsa, RuntimeAppPublic}; -use trie_db::{TrieMut, Trie}; -use sp_trie::{PrefixedMemoryDB, StorageProof}; -use sp_trie::trie_types::{TrieDB, TrieDBMut}; +use sp_trie::{ + trie_types::{TrieDB, TrieDBMut}, + PrefixedMemoryDB, StorageProof, +}; +use trie_db::{Trie, TrieMut}; +use cfg_if::cfg_if; +use frame_support::{parameter_types, traits::KeyOwnerProofSystem, weights::RuntimeDbWeight}; +use frame_system::limits::{BlockLength, BlockWeights}; use sp_api::{decl_runtime_apis, impl_runtime_apis}; +pub use sp_core::hash::H256; +use sp_inherents::{CheckInherentsResult, InherentData}; +#[cfg(feature = "std")] +use sp_runtime::traits::NumberFor; use sp_runtime::{ create_runtime_str, impl_opaque_keys, - ApplyExtrinsicResult, Perbill, - transaction_validity::{ - TransactionValidity, ValidTransaction, TransactionValidityError, InvalidTransaction, - TransactionSource, - }, traits::{ - BlindCheckable, BlakeTwo256, Block as BlockT, Extrinsic as ExtrinsicT, - GetNodeBlockType, GetRuntimeBlockType, Verify, IdentityLookup, + BlakeTwo256, BlindCheckable, Block as BlockT, Extrinsic as ExtrinsicT, GetNodeBlockType, + GetRuntimeBlockType, IdentityLookup, Verify, }, + transaction_validity::{ + InvalidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, + ValidTransaction, + }, + ApplyExtrinsicResult, Perbill, }; -#[cfg(feature = "std")] -use sp_runtime::traits::NumberFor; -use sp_version::RuntimeVersion; -pub use sp_core::hash::H256; #[cfg(any(feature = "std", test))] use sp_version::NativeVersion; -use frame_support::{ - parameter_types, - traits::KeyOwnerProofSystem, - weights::RuntimeDbWeight, -}; -use frame_system::limits::{BlockWeights, BlockLength}; -use sp_inherents::{CheckInherentsResult, InherentData}; -use cfg_if::cfg_if; +use sp_version::RuntimeVersion; // Ensure Babe and Aura use the same crypto to simplify things a bit. -pub use sp_consensus_babe::{AuthorityId, Slot, AllowedSlots}; +pub use sp_consensus_babe::{AllowedSlots, AuthorityId, Slot}; pub type AuraId = sp_consensus_aura::sr25519::AuthorityId; @@ -78,18 +76,19 @@ pub mod wasm_binary_logging_disabled { /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_unwrap() -> &'static [u8] { - WASM_BINARY.expect("Development wasm binary is not available. Testing is only \ - supported with the flag disabled.") + WASM_BINARY.expect( + "Development wasm binary is not available. Testing is only \ + supported with the flag disabled.", + ) } /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] pub fn wasm_binary_logging_disabled_unwrap() -> &'static [u8] { - wasm_binary_logging_disabled::WASM_BINARY - .expect( - "Development wasm binary is not available. Testing is only supported with the flag \ - disabled." - ) + wasm_binary_logging_disabled::WASM_BINARY.expect( + "Development wasm binary is not available. Testing is only supported with the flag \ + disabled.", + ) } /// Test runtime version. @@ -111,10 +110,7 @@ fn version() -> RuntimeVersion { /// Native version. #[cfg(any(feature = "std", test))] pub fn native_version() -> NativeVersion { - NativeVersion { - runtime_version: VERSION, - can_author_with: Default::default(), - } + NativeVersion { runtime_version: VERSION, can_author_with: Default::default() } } /// Calls in transactions. @@ -131,12 +127,10 @@ impl Transfer { #[cfg(feature = "std")] pub fn into_signed_tx(self) -> Extrinsic { let signature = sp_keyring::AccountKeyring::from_public(&self.from) - .expect("Creates keyring from public key.").sign(&self.encode()).into(); - Extrinsic::Transfer { - transfer: self, - signature, - exhaust_resources_when_not_first: false, - } + .expect("Creates keyring from public key.") + .sign(&self.encode()) + .into(); + Extrinsic::Transfer { transfer: self, signature, exhaust_resources_when_not_first: false } } /// Convert into a signed extrinsic, which will only end up included in the block @@ -145,12 +139,10 @@ impl Transfer { #[cfg(feature = "std")] pub fn into_resources_exhausting_tx(self) -> Extrinsic { let signature = sp_keyring::AccountKeyring::from_public(&self.from) - .expect("Creates keyring from public key.").sign(&self.encode()).into(); - Extrinsic::Transfer { - transfer: self, - signature, - exhaust_resources_when_not_first: true, - } + .expect("Creates keyring from public key.") + .sign(&self.encode()) + .into(); + Extrinsic::Transfer { transfer: self, signature, exhaust_resources_when_not_first: true } } } @@ -175,7 +167,10 @@ parity_util_mem::malloc_size_of_is_0!(Extrinsic); // non-opaque extrinsic does n #[cfg(feature = "std")] impl serde::Serialize for Extrinsic { - fn serialize(&self, seq: S) -> Result where S: ::serde::Serializer { + fn serialize(&self, seq: S) -> Result + where + S: ::serde::Serializer, + { self.using_encoded(|bytes| seq.serialize_bytes(bytes)) } } @@ -186,21 +181,22 @@ impl BlindCheckable for Extrinsic { fn check(self) -> Result { match self { Extrinsic::AuthoritiesChange(new_auth) => Ok(Extrinsic::AuthoritiesChange(new_auth)), - Extrinsic::Transfer { transfer, signature, exhaust_resources_when_not_first } => { + Extrinsic::Transfer { transfer, signature, exhaust_resources_when_not_first } => if sp_runtime::verify_encoded_lazy(&signature, &transfer, &transfer.from) { - Ok(Extrinsic::Transfer { transfer, signature, exhaust_resources_when_not_first }) + Ok(Extrinsic::Transfer { + transfer, + signature, + exhaust_resources_when_not_first, + }) } else { Err(InvalidTransaction::BadProof.into()) - } - }, + }, Extrinsic::IncludeData(v) => Ok(Extrinsic::IncludeData(v)), Extrinsic::StorageChange(key, value) => Ok(Extrinsic::StorageChange(key, value)), Extrinsic::ChangesTrieConfigUpdate(new_config) => Ok(Extrinsic::ChangesTrieConfigUpdate(new_config)), - Extrinsic::OffchainIndexSet(key, value) => - Ok(Extrinsic::OffchainIndexSet(key, value)), - Extrinsic::OffchainIndexClear(key) => - Ok(Extrinsic::OffchainIndexClear(key)), + Extrinsic::OffchainIndexSet(key, value) => Ok(Extrinsic::OffchainIndexSet(key, value)), + Extrinsic::OffchainIndexClear(key) => Ok(Extrinsic::OffchainIndexClear(key)), Extrinsic::Store(data) => Ok(Extrinsic::Store(data)), } } @@ -302,9 +298,7 @@ impl codec::EncodeLike for DecodeFails {} impl DecodeFails { /// Create a new instance. pub fn new() -> DecodeFails { - DecodeFails { - _phantom: Default::default(), - } + DecodeFails { _phantom: Default::default() } } } @@ -620,7 +614,8 @@ fn code_using_trie() -> u64 { let pairs = [ (b"0103000000000000000464".to_vec(), b"0400000000".to_vec()), (b"0103000000000000000469".to_vec(), b"0401000000".to_vec()), - ].to_vec(); + ] + .to_vec(); let mut mdb = PrefixedMemoryDB::default(); let mut root = sp_std::default::Default::default(); @@ -628,10 +623,10 @@ fn code_using_trie() -> u64 { let v = &pairs; let mut t = TrieDBMut::::new(&mut mdb, &mut root); for i in 0..v.len() { - let key: &[u8]= &v[i].0; + let key: &[u8] = &v[i].0; let val: &[u8] = &v[i].1; if !t.insert(key, val).is_ok() { - return 101; + return 101 } } t @@ -646,8 +641,12 @@ fn code_using_trie() -> u64 { } } iter_pairs.len() as u64 - } else { 102 } - } else { 103 } + } else { + 102 + } + } else { + 103 + } } impl_opaque_keys! { @@ -1207,29 +1206,15 @@ fn test_read_storage() { fn test_read_child_storage() { const STORAGE_KEY: &[u8] = b"unique_id_1"; const KEY: &[u8] = b":read_child_storage"; - sp_io::default_child_storage::set( - STORAGE_KEY, - KEY, - b"test", - ); + sp_io::default_child_storage::set(STORAGE_KEY, KEY, b"test"); let mut v = [0u8; 4]; - let r = sp_io::default_child_storage::read( - STORAGE_KEY, - KEY, - &mut v, - 0, - ); + let r = sp_io::default_child_storage::read(STORAGE_KEY, KEY, &mut v, 0); assert_eq!(r, Some(4)); assert_eq!(&v, b"test"); let mut v = [0u8; 4]; - let r = sp_io::default_child_storage::read( - STORAGE_KEY, - KEY, - &mut v, - 8, - ); + let r = sp_io::default_child_storage::read(STORAGE_KEY, KEY, &mut v, 8); assert_eq!(r, Some(0)); assert_eq!(&v, &[0, 0, 0, 0]); } @@ -1237,10 +1222,7 @@ fn test_read_child_storage() { fn test_witness(proof: StorageProof, root: crate::Hash) { use sp_externalities::Externalities; let db: sp_trie::MemoryDB = proof.into_memory_db(); - let backend = sp_state_machine::TrieBackend::<_, crate::Hashing>::new( - db, - root, - ); + let backend = sp_state_machine::TrieBackend::<_, crate::Hashing>::new(db, root); let mut overlay = sp_state_machine::OverlayedChanges::default(); let mut cache = sp_state_machine::StorageTransactionCache::<_, _, BlockNumber>::default(); let mut ext = sp_state_machine::Ext::new( @@ -1260,18 +1242,16 @@ fn test_witness(proof: StorageProof, root: crate::Hash) { #[cfg(test)] mod tests { - use substrate_test_runtime_client::{ - prelude::*, - sp_consensus::BlockOrigin, - DefaultTestClientBuilderExt, TestClientBuilder, - runtime::TestAPI, - }; + use codec::Encode; + use sc_block_builder::BlockBuilderProvider; use sp_api::ProvideRuntimeApi; - use sp_runtime::generic::BlockId; use sp_core::storage::well_known_keys::HEAP_PAGES; + use sp_runtime::generic::BlockId; use sp_state_machine::ExecutionStrategy; - use codec::Encode; - use sc_block_builder::BlockBuilderProvider; + use substrate_test_runtime_client::{ + prelude::*, runtime::TestAPI, sp_consensus::BlockOrigin, DefaultTestClientBuilderExt, + TestClientBuilder, + }; #[test] fn heap_pages_is_respected() { @@ -1308,9 +1288,8 @@ mod tests { #[test] fn test_storage() { - let client = TestClientBuilder::new() - .set_execution_strategy(ExecutionStrategy::Both) - .build(); + let client = + TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::Both).build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); @@ -1332,14 +1311,10 @@ mod tests { #[test] fn witness_backend_works() { let (db, root) = witness_backend(); - let backend = sp_state_machine::TrieBackend::<_, crate::Hashing>::new( - db, - root, - ); + let backend = sp_state_machine::TrieBackend::<_, crate::Hashing>::new(db, root); let proof = sp_state_machine::prove_read(backend, vec![b"value3"]).unwrap(); - let client = TestClientBuilder::new() - .set_execution_strategy(ExecutionStrategy::Both) - .build(); + let client = + TestClientBuilder::new().set_execution_strategy(ExecutionStrategy::Both).build(); let runtime_api = client.runtime_api(); let block_id = BlockId::Number(client.chain_info().best_number); diff --git a/test-utils/runtime/src/system.rs b/test-utils/runtime/src/system.rs index c4b88c09e8d23..316a553ed027d 100644 --- a/test-utils/runtime/src/system.rs +++ b/test-utils/runtime/src/system.rs @@ -18,25 +18,27 @@ //! System manager: Handles all of the top-level stuff; executing block/transaction, setting code //! and depositing logs. -use sp_std::prelude::*; +use crate::{ + AccountId, AuthorityId, Block, BlockNumber, Digest, Extrinsic, Header, Transfer, H256 as Hash, +}; +use codec::{Decode, Encode, KeyedVec}; +use frame_support::{decl_module, decl_storage, storage}; +use frame_system::Config; +use sp_core::{storage::well_known_keys, ChangesTrieConfiguration}; use sp_io::{ - storage::root as storage_root, storage::changes_root as storage_changes_root, - hashing::blake2_256, trie, + hashing::blake2_256, + storage::{changes_root as storage_changes_root, root as storage_root}, + trie, }; -use frame_support::storage; -use frame_support::{decl_storage, decl_module}; use sp_runtime::{ - traits::Header as _, generic, ApplyExtrinsicResult, + generic, + traits::Header as _, transaction_validity::{ - TransactionValidity, ValidTransaction, InvalidTransaction, TransactionValidityError, + InvalidTransaction, TransactionValidity, TransactionValidityError, ValidTransaction, }, + ApplyExtrinsicResult, }; -use codec::{KeyedVec, Encode, Decode}; -use frame_system::Config; -use crate::{ - AccountId, BlockNumber, Extrinsic, Transfer, H256 as Hash, Block, Header, Digest, AuthorityId -}; -use sp_core::{storage::well_known_keys, ChangesTrieConfiguration}; +use sp_std::prelude::*; const NONCE_OF: &[u8] = b"nonce:"; const BALANCE_OF: &[u8] = b"balance:"; @@ -159,17 +161,17 @@ impl frame_support::traits::ExecuteBlock for BlockExecutor { /// This doesn't attempt to validate anything regarding the block. pub fn validate_transaction(utx: Extrinsic) -> TransactionValidity { if check_signature(&utx).is_err() { - return InvalidTransaction::BadProof.into(); + return InvalidTransaction::BadProof.into() } let tx = utx.transfer(); let nonce_key = tx.from.to_keyed_vec(NONCE_OF); let expected_nonce: u64 = storage::hashed::get_or(&blake2_256, &nonce_key, 0); if tx.nonce < expected_nonce { - return InvalidTransaction::Stale.into(); + return InvalidTransaction::Stale.into() } if tx.nonce > expected_nonce + 64 { - return InvalidTransaction::Future.into(); + return InvalidTransaction::Future.into() } let encode = |from: &AccountId, nonce: u64| (from, nonce).encode(); @@ -181,20 +183,14 @@ pub fn validate_transaction(utx: Extrinsic) -> TransactionValidity { let provides = vec![encode(&tx.from, tx.nonce)]; - Ok(ValidTransaction { - priority: tx.amount, - requires, - provides, - longevity: 64, - propagate: true, - }) + Ok(ValidTransaction { priority: tx.amount, requires, provides, longevity: 64, propagate: true }) } /// Execute a transaction outside of the block execution function. /// This doesn't attempt to validate anything regarding the block. pub fn execute_transaction(utx: Extrinsic) -> ApplyExtrinsicResult { - let extrinsic_index: u32 = storage::unhashed::get(well_known_keys::EXTRINSIC_INDEX) - .unwrap_or_default(); + let extrinsic_index: u32 = + storage::unhashed::get(well_known_keys::EXTRINSIC_INDEX).unwrap_or_default(); let result = execute_transaction_backend(&utx, extrinsic_index); ExtrinsicData::insert(extrinsic_index, utx.encode()); storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &(extrinsic_index + 1)); @@ -215,8 +211,8 @@ pub fn finalize_block() -> Header { // This MUST come after all changes to storage are done. Otherwise we will fail the // “Storage root does not match that calculated” assertion. - let storage_root = Hash::decode(&mut &storage_root()[..]) - .expect("`storage_root` is a valid hash"); + let storage_root = + Hash::decode(&mut &storage_root()[..]).expect("`storage_root` is a valid hash"); let storage_changes_root = storage_changes_root(&parent_hash.encode()) .map(|r| Hash::decode(&mut &r[..]).expect("`storage_changes_root` is a valid hash")); @@ -231,17 +227,11 @@ pub fn finalize_block() -> Header { if let Some(new_config) = new_changes_trie_config { digest.push(generic::DigestItem::ChangesTrieSignal( - generic::ChangesTrieSignal::NewConfiguration(new_config) + generic::ChangesTrieSignal::NewConfiguration(new_config), )); } - Header { - number, - extrinsics_root, - state_root: storage_root, - parent_hash, - digest, - } + Header { number, extrinsics_root, state_root: storage_root, parent_hash, digest } } #[inline(always)] @@ -253,12 +243,11 @@ fn check_signature(utx: &Extrinsic) -> Result<(), TransactionValidityError> { fn execute_transaction_backend(utx: &Extrinsic, extrinsic_index: u32) -> ApplyExtrinsicResult { check_signature(utx)?; match utx { - Extrinsic::Transfer { exhaust_resources_when_not_first: true, .. } if extrinsic_index != 0 => + Extrinsic::Transfer { exhaust_resources_when_not_first: true, .. } + if extrinsic_index != 0 => Err(InvalidTransaction::ExhaustsResources.into()), - Extrinsic::Transfer { ref transfer, .. } => - execute_transfer_backend(transfer), - Extrinsic::AuthoritiesChange(ref new_auth) => - execute_new_authorities_backend(new_auth), + Extrinsic::Transfer { ref transfer, .. } => execute_transfer_backend(transfer), + Extrinsic::AuthoritiesChange(ref new_auth) => execute_new_authorities_backend(new_auth), Extrinsic::IncludeData(_) => Ok(Ok(())), Extrinsic::StorageChange(key, value) => execute_storage_change(key, value.as_ref().map(|v| &**v)), @@ -271,9 +260,8 @@ fn execute_transaction_backend(utx: &Extrinsic, extrinsic_index: u32) -> ApplyEx Extrinsic::OffchainIndexClear(key) => { sp_io::offchain_index::clear(&key); Ok(Ok(())) - } - Extrinsic::Store(data) => - execute_store(data.clone()), + }, + Extrinsic::Store(data) => execute_store(data.clone()), } } @@ -282,7 +270,7 @@ fn execute_transfer_backend(tx: &Transfer) -> ApplyExtrinsicResult { let nonce_key = tx.from.to_keyed_vec(NONCE_OF); let expected_nonce: u64 = storage::hashed::get_or(&blake2_256, &nonce_key, 0); if !(tx.nonce == expected_nonce) { - return Err(InvalidTransaction::Stale.into()); + return Err(InvalidTransaction::Stale.into()) } // increment nonce in storage @@ -294,7 +282,7 @@ fn execute_transfer_backend(tx: &Transfer) -> ApplyExtrinsicResult { // enact transfer if !(tx.amount <= from_balance) { - return Err(InvalidTransaction::Payment.into()); + return Err(InvalidTransaction::Payment.into()) } let to_balance_key = tx.to.to_keyed_vec(BALANCE_OF); let to_balance: u64 = storage::hashed::get_or(&blake2_256, &to_balance_key, 0); @@ -323,12 +311,12 @@ fn execute_storage_change(key: &[u8], value: Option<&[u8]>) -> ApplyExtrinsicRes Ok(Ok(())) } -fn execute_changes_trie_config_update(new_config: Option) -> ApplyExtrinsicResult { +fn execute_changes_trie_config_update( + new_config: Option, +) -> ApplyExtrinsicResult { match new_config.clone() { - Some(new_config) => storage::unhashed::put_raw( - well_known_keys::CHANGES_TRIE_CONFIG, - &new_config.encode(), - ), + Some(new_config) => + storage::unhashed::put_raw(well_known_keys::CHANGES_TRIE_CONFIG, &new_config.encode()), None => storage::unhashed::kill(well_known_keys::CHANGES_TRIE_CONFIG), } ::put(new_config); @@ -360,19 +348,18 @@ fn info_expect_equal_hash(given: &Hash, expected: &Hash) { mod tests { use super::*; - use sp_io::TestExternalities; + use crate::{wasm_binary_unwrap, Header, Transfer}; + use sc_executor::{native_executor_instance, NativeExecutor, WasmExecutionMethod}; + use sp_core::{ + map, + traits::{CodeExecutor, RuntimeCode}, + NeverNativeValue, + }; + use sp_io::{hashing::twox_128, TestExternalities}; use substrate_test_runtime_client::{AccountKeyring, Sr25519Keyring}; - use crate::{Header, Transfer, wasm_binary_unwrap}; - use sp_core::{NeverNativeValue, map, traits::{CodeExecutor, RuntimeCode}}; - use sc_executor::{NativeExecutor, WasmExecutionMethod, native_executor_instance}; - use sp_io::hashing::twox_128; // Declare an instance of the native executor dispatch for the test runtime. - native_executor_instance!( - NativeDispatch, - crate::api::dispatch, - crate::native_version - ); + native_executor_instance!(NativeDispatch, crate::api::dispatch, crate::native_version); fn executor() -> NativeExecutor { NativeExecutor::new(WasmExecutionMethod::Interpreted, None, 8) @@ -382,7 +369,7 @@ mod tests { let authorities = vec![ Sr25519Keyring::Alice.to_raw_public(), Sr25519Keyring::Bob.to_raw_public(), - Sr25519Keyring::Charlie.to_raw_public() + Sr25519Keyring::Charlie.to_raw_public(), ]; TestExternalities::new_with_code( wasm_binary_unwrap(), @@ -399,7 +386,10 @@ mod tests { ) } - fn block_import_works(block_executor: F) where F: Fn(Block, &mut TestExternalities) { + fn block_import_works(block_executor: F) + where + F: Fn(Block, &mut TestExternalities), + { let h = Header { parent_hash: [69u8; 32].into(), number: 1, @@ -407,10 +397,7 @@ mod tests { extrinsics_root: Default::default(), digest: Default::default(), }; - let mut b = Block { - header: h, - extrinsics: vec![], - }; + let mut b = Block { header: h, extrinsics: vec![] }; new_test_ext().execute_with(|| polish_block(&mut b)); @@ -419,7 +406,11 @@ mod tests { #[test] fn block_import_works_native() { - block_import_works(|b, ext| ext.execute_with(|| { execute_block(b); })); + block_import_works(|b, ext| { + ext.execute_with(|| { + execute_block(b); + }) + }); } #[test] @@ -432,19 +423,23 @@ mod tests { heap_pages: None, }; - executor().call:: _>( - &mut ext, - &runtime_code, - "Core_execute_block", - &b.encode(), - false, - None, - ).0.unwrap(); + executor() + .call:: _>( + &mut ext, + &runtime_code, + "Core_execute_block", + &b.encode(), + false, + None, + ) + .0 + .unwrap(); }) } fn block_import_with_transaction_works(block_executor: F) - where F: Fn(Block, &mut TestExternalities) + where + F: Fn(Block, &mut TestExternalities), { let mut b1 = Block { header: Header { @@ -454,14 +449,13 @@ mod tests { extrinsics_root: Default::default(), digest: Default::default(), }, - extrinsics: vec![ - Transfer { - from: AccountKeyring::Alice.into(), - to: AccountKeyring::Bob.into(), - amount: 69, - nonce: 0, - }.into_signed_tx() - ], + extrinsics: vec![Transfer { + from: AccountKeyring::Alice.into(), + to: AccountKeyring::Bob.into(), + amount: 69, + nonce: 0, + } + .into_signed_tx()], }; let mut dummy_ext = new_test_ext(); @@ -481,13 +475,15 @@ mod tests { to: AccountKeyring::Alice.into(), amount: 27, nonce: 0, - }.into_signed_tx(), + } + .into_signed_tx(), Transfer { from: AccountKeyring::Alice.into(), to: AccountKeyring::Charlie.into(), amount: 69, nonce: 1, - }.into_signed_tx(), + } + .into_signed_tx(), ], }; @@ -519,7 +515,11 @@ mod tests { #[test] fn block_import_with_transaction_works_native() { - block_import_with_transaction_works(|b, ext| ext.execute_with(|| { execute_block(b); })); + block_import_with_transaction_works(|b, ext| { + ext.execute_with(|| { + execute_block(b); + }) + }); } #[test] @@ -532,14 +532,17 @@ mod tests { heap_pages: None, }; - executor().call:: _>( - &mut ext, - &runtime_code, - "Core_execute_block", - &b.encode(), - false, - None, - ).0.unwrap(); + executor() + .call:: _>( + &mut ext, + &runtime_code, + "Core_execute_block", + &b.encode(), + false, + None, + ) + .0 + .unwrap(); }) } } diff --git a/test-utils/runtime/transaction-pool/src/lib.rs b/test-utils/runtime/transaction-pool/src/lib.rs index b3717d22a8bed..d0cd50394c533 100644 --- a/test-utils/runtime/transaction-pool/src/lib.rs +++ b/test-utils/runtime/transaction-pool/src/lib.rs @@ -20,22 +20,22 @@ //! See [`TestApi`] for more information. use codec::Encode; +use futures::future::ready; use parking_lot::RwLock; +use sp_blockchain::CachedHeaderMetadata; use sp_runtime::{ generic::{self, BlockId}, - traits::{BlakeTwo256, Hash as HashT, Block as BlockT, Header as _}, + traits::{BlakeTwo256, Block as BlockT, Hash as HashT, Header as _}, transaction_validity::{ - TransactionValidity, ValidTransaction, TransactionValidityError, InvalidTransaction, - TransactionSource, + InvalidTransaction, TransactionSource, TransactionValidity, TransactionValidityError, + ValidTransaction, }, }; -use std::collections::{HashSet, HashMap, BTreeMap}; +use std::collections::{BTreeMap, HashMap, HashSet}; use substrate_test_runtime_client::{ - runtime::{Index, AccountId, Block, BlockNumber, Extrinsic, Hash, Header, Transfer}, + runtime::{AccountId, Block, BlockNumber, Extrinsic, Hash, Header, Index, Transfer}, AccountKeyring::{self, *}, }; -use sp_blockchain::CachedHeaderMetadata; -use futures::future::ready; /// Error type used by [`TestApi`]. #[derive(Debug, derive_more::From, derive_more::Display)] @@ -130,12 +130,9 @@ impl TestApi { block_number .checked_sub(1) .and_then(|num| { - chain.block_by_number - .get(&num) - .map(|blocks| { - blocks[0].0.header.hash() - }) - }).unwrap_or_default() + chain.block_by_number.get(&num).map(|blocks| blocks[0].0.header.hash()) + }) + .unwrap_or_default() }; self.push_block_with_parent(parent_hash, xts, is_best_block) @@ -154,7 +151,9 @@ impl TestApi { let block_number = if parent == Hash::default() { 0 } else { - *self.chain.read() + *self + .chain + .read() .block_by_hash .get(&parent) .expect("`parent` exists") @@ -182,7 +181,11 @@ impl TestApi { let mut chain = self.chain.write(); chain.block_by_hash.insert(hash, block.clone()); - chain.block_by_number.entry(block_number).or_default().push((block, is_best_block.into())); + chain + .block_by_number + .entry(block_number) + .or_default() + .push((block, is_best_block.into())); } fn hash_and_length_inner(ex: &Extrinsic) -> (Hash, usize) { @@ -195,9 +198,7 @@ impl TestApi { /// Next time transaction pool will try to validate this /// extrinsic, api will return invalid result. pub fn add_invalid(&self, xts: &Extrinsic) { - self.chain.write().invalid_hashes.insert( - Self::hash_and_length_inner(xts).0 - ); + self.chain.write().invalid_hashes.insert(Self::hash_and_length_inner(xts).0); } /// Query validation requests received. @@ -242,7 +243,8 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { match self.block_id_to_number(at) { Ok(Some(number)) => { - let found_best = self.chain + let found_best = self + .chain .read() .block_by_number .get(&number) @@ -253,24 +255,24 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { // the transaction. (This is not required for this test function, but in real // environment it would fail because of this). if !found_best { - return ready(Ok( - Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(1)).into()) - )) + return ready(Ok(Err(TransactionValidityError::Invalid( + InvalidTransaction::Custom(1), + ) + .into()))) } }, - Ok(None) => return ready(Ok( - Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(2)).into()) - )), + Ok(None) => + return ready(Ok(Err(TransactionValidityError::Invalid( + InvalidTransaction::Custom(2), + ) + .into()))), Err(e) => return ready(Err(e)), } let (requires, provides) = if let Some(transfer) = uxt.try_transfer() { let chain_nonce = self.chain.read().nonces.get(&transfer.from).cloned().unwrap_or(0); - let requires = if chain_nonce == transfer.nonce { - vec![] - } else { - vec![vec![chain_nonce as u8]] - }; + let requires = + if chain_nonce == transfer.nonce { vec![] } else { vec![vec![chain_nonce as u8]] }; let provides = vec![vec![transfer.nonce as u8]]; (requires, provides) @@ -279,18 +281,13 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { }; if self.chain.read().invalid_hashes.contains(&self.hash_and_length(&uxt).0) { - return ready(Ok( - Err(TransactionValidityError::Invalid(InvalidTransaction::Custom(0)).into()) - )) + return ready(Ok(Err( + TransactionValidityError::Invalid(InvalidTransaction::Custom(0)).into() + ))) } - let mut validity = ValidTransaction { - priority: 1, - requires, - provides, - longevity: 64, - propagate: true, - }; + let mut validity = + ValidTransaction { priority: 1, requires, provides, longevity: 64, propagate: true }; (self.valid_modifier.read())(&mut validity); @@ -302,11 +299,8 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { at: &BlockId, ) -> Result>, Error> { Ok(match at { - generic::BlockId::Hash(x) => self.chain - .read() - .block_by_hash - .get(x) - .map(|b| *b.header.number()), + generic::BlockId::Hash(x) => + self.chain.read().block_by_hash.get(x).map(|b| *b.header.number()), generic::BlockId::Number(num) => Some(*num), }) } @@ -317,11 +311,10 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { ) -> Result>, Error> { Ok(match at { generic::BlockId::Hash(x) => Some(x.clone()), - generic::BlockId::Number(num) => self.chain - .read() - .block_by_number - .get(num) - .and_then(|blocks| blocks.iter().find(|b| b.1.is_best()).map(|b| b.0.header().hash())), + generic::BlockId::Number(num) => + self.chain.read().block_by_number.get(num).and_then(|blocks| { + blocks.iter().find(|b| b.1.is_best()).map(|b| b.0.header().hash()) + }), }) } @@ -334,16 +327,10 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { fn block_body(&self, id: &BlockId) -> Self::BodyFuture { futures::future::ready(Ok(match id { - BlockId::Number(num) => self.chain - .read() - .block_by_number - .get(num) - .map(|b| b[0].0.extrinsics().to_vec()), - BlockId::Hash(hash) => self.chain - .read() - .block_by_hash - .get(hash) - .map(|b| b.extrinsics().to_vec()), + BlockId::Number(num) => + self.chain.read().block_by_number.get(num).map(|b| b[0].0.extrinsics().to_vec()), + BlockId::Hash(hash) => + self.chain.read().block_by_hash.get(hash).map(|b| b.extrinsics().to_vec()), })) } @@ -352,16 +339,10 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { at: &BlockId, ) -> Result::Header>, Self::Error> { Ok(match at { - BlockId::Number(num) => self.chain - .read() - .block_by_number - .get(num) - .map(|b| b[0].0.header().clone()), - BlockId::Hash(hash) => self.chain - .read() - .block_by_hash - .get(hash) - .map(|b| b.header().clone()), + BlockId::Number(num) => + self.chain.read().block_by_number.get(num).map(|b| b[0].0.header().clone()), + BlockId::Hash(hash) => + self.chain.read().block_by_hash.get(hash).map(|b| b.header().clone()), }) } } @@ -369,21 +350,14 @@ impl sc_transaction_pool::test_helpers::ChainApi for TestApi { impl sp_blockchain::HeaderMetadata for TestApi { type Error = Error; - fn header_metadata( - &self, - hash: Hash, - ) -> Result, Self::Error> { + fn header_metadata(&self, hash: Hash) -> Result, Self::Error> { let chain = self.chain.read(); let block = chain.block_by_hash.get(&hash).expect("Hash exists"); Ok(block.header().into()) } - fn insert_header_metadata( - &self, - _: Hash, - _: CachedHeaderMetadata, - ) { + fn insert_header_metadata(&self, _: Hash, _: CachedHeaderMetadata) { unimplemented!("Not implemented for tests") } @@ -396,12 +370,7 @@ impl sp_blockchain::HeaderMetadata for TestApi { /// /// Part of the test api. pub fn uxt(who: AccountKeyring, nonce: Index) -> Extrinsic { - let transfer = Transfer { - from: who.into(), - to: AccountId::default(), - nonce, - amount: 1, - }; + let transfer = Transfer { from: who.into(), to: AccountId::default(), nonce, amount: 1 }; let signature = transfer.using_encoded(|e| who.sign(e)).into(); Extrinsic::Transfer { transfer, signature, exhaust_resources_when_not_first: false } } diff --git a/test-utils/src/lib.rs b/test-utils/src/lib.rs index b3a0f322a639f..07aeafe7146e8 100644 --- a/test-utils/src/lib.rs +++ b/test-utils/src/lib.rs @@ -64,7 +64,7 @@ macro_rules! assert_eq_uvec { ( $x:expr, $y:expr $(,)? ) => { $crate::__assert_eq_uvec!($x, $y); $crate::__assert_eq_uvec!($y, $x); - } + }; } #[macro_export] @@ -72,7 +72,9 @@ macro_rules! assert_eq_uvec { macro_rules! __assert_eq_uvec { ( $x:expr, $y:expr ) => { $x.iter().for_each(|e| { - if !$y.contains(e) { panic!("vectors not equal: {:?} != {:?}", $x, $y); } + if !$y.contains(e) { + panic!("vectors not equal: {:?} != {:?}", $x, $y); + } }); - } + }; } diff --git a/test-utils/test-runner/src/client.rs b/test-utils/test-runner/src/client.rs index 4cadfe58c605a..71a156b8bc0d9 100644 --- a/test-utils/test-runner/src/client.rs +++ b/test-utils/test-runner/src/client.rs @@ -16,204 +16,218 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . //! Client parts -use sp_transaction_pool::runtime_api::TaggedTransactionQueue; -use sp_consensus_babe::BabeApi; -use crate::{ChainInfo, default_config}; -use manual_seal::consensus::babe::{BabeConsensusDataProvider, SlotTimestampProvider}; -use sp_keyring::sr25519::Keyring::Alice; -use std::str::FromStr; -use sp_runtime::traits::Header; +use crate::{default_config, ChainInfo}; use futures::channel::mpsc; use jsonrpc_core::MetaIoHandler; -use manual_seal::{run_manual_seal, EngineCommand, ManualSealParams, import_queue, rpc::{ManualSeal, ManualSealApi}}; +use manual_seal::{ + consensus::babe::{BabeConsensusDataProvider, SlotTimestampProvider}, + import_queue, + rpc::{ManualSeal, ManualSealApi}, + run_manual_seal, EngineCommand, ManualSealParams, +}; use sc_client_api::backend::Backend; use sc_service::{ - build_network, spawn_tasks, BuildNetworkParams, SpawnTasksParams, TFullBackend, - TFullClient, TaskManager, new_full_parts, Configuration, ChainSpec, TaskExecutor, + build_network, new_full_parts, spawn_tasks, BuildNetworkParams, ChainSpec, Configuration, + SpawnTasksParams, TFullBackend, TFullClient, TaskExecutor, TaskManager, }; use sc_transaction_pool::BasicPool; use sc_transaction_pool_api::TransactionPool; use sp_api::{ApiExt, ConstructRuntimeApi, Core, Metadata}; use sp_block_builder::BlockBuilder; -use sp_runtime::traits::Block as BlockT; -use sp_session::SessionKeys; +use sp_consensus_babe::BabeApi; +use sp_keyring::sr25519::Keyring::Alice; use sp_offchain::OffchainWorkerApi; -use std::sync::Arc; +use sp_runtime::traits::{Block as BlockT, Header}; +use sp_session::SessionKeys; +use sp_transaction_pool::runtime_api::TaggedTransactionQueue; +use std::{str::FromStr, sync::Arc}; type ClientParts = ( - Arc>, - TaskManager, - Arc::Block, ::RuntimeApi, ::Executor>>, - Arc::Block, - Hash = <::Block as BlockT>::Hash, - Error = sc_transaction_pool::error::Error, - InPoolTransaction = sc_transaction_pool::Transaction< - <::Block as BlockT>::Hash, - <::Block as BlockT>::Extrinsic, - >, - >>, - mpsc::Sender::Block as BlockT>::Hash>>, - Arc::Block>>, + Arc>, + TaskManager, + Arc< + TFullClient< + ::Block, + ::RuntimeApi, + ::Executor, + >, + >, + Arc< + dyn TransactionPool< + Block = ::Block, + Hash = <::Block as BlockT>::Hash, + Error = sc_transaction_pool::error::Error, + InPoolTransaction = sc_transaction_pool::Transaction< + <::Block as BlockT>::Hash, + <::Block as BlockT>::Extrinsic, + >, + >, + >, + mpsc::Sender::Block as BlockT>::Hash>>, + Arc::Block>>, ); /// Provide the config or chain spec for a given chain pub enum ConfigOrChainSpec { - /// Configuration object - Config(Configuration), - /// Chain spec object - ChainSpec(Box, TaskExecutor) + /// Configuration object + Config(Configuration), + /// Chain spec object + ChainSpec(Box, TaskExecutor), } /// Creates all the client parts you need for [`Node`](crate::node::Node) -pub fn client_parts(config_or_chain_spec: ConfigOrChainSpec) -> Result, sc_service::Error> - where - T: ChainInfo + 'static, - >>::RuntimeApi: - Core + Metadata + OffchainWorkerApi + SessionKeys - + TaggedTransactionQueue + BlockBuilder + BabeApi - + ApiExt as Backend>::State>, - ::Call: From>, - <::Block as BlockT>::Hash: FromStr, - <<::Block as BlockT>::Header as Header>::Number: num_traits::cast::AsPrimitive, +pub fn client_parts( + config_or_chain_spec: ConfigOrChainSpec, +) -> Result, sc_service::Error> +where + T: ChainInfo + 'static, + , + >>::RuntimeApi: Core + + Metadata + + OffchainWorkerApi + + SessionKeys + + TaggedTransactionQueue + + BlockBuilder + + BabeApi + + ApiExt as Backend>::State>, + ::Call: From>, + <::Block as BlockT>::Hash: FromStr, + <<::Block as BlockT>::Header as Header>::Number: + num_traits::cast::AsPrimitive, { - use sp_consensus_babe::AuthorityId; - let config = match config_or_chain_spec { - ConfigOrChainSpec::Config(config) => config, - ConfigOrChainSpec::ChainSpec(chain_spec, task_executor) => { - default_config(task_executor, chain_spec) - }, - }; - - let (client, backend, keystore, mut task_manager) = - new_full_parts::(&config, None)?; - let client = Arc::new(client); - - let select_chain = sc_consensus::LongestChain::new(backend.clone()); - - let (grandpa_block_import, ..) = - grandpa::block_import(client.clone(), &(client.clone() as Arc<_>), select_chain.clone(), None)?; - - let slot_duration = sc_consensus_babe::Config::get_or_compute(&*client)?; - let (block_import, babe_link) = sc_consensus_babe::block_import( - slot_duration.clone(), - grandpa_block_import, - client.clone(), - )?; - - let consensus_data_provider = BabeConsensusDataProvider::new( - client.clone(), - keystore.sync_keystore(), - babe_link.epoch_changes().clone(), - vec![(AuthorityId::from(Alice.public()), 1000)], - ) - .expect("failed to create ConsensusDataProvider"); - - let import_queue = - import_queue(Box::new(block_import.clone()), &task_manager.spawn_essential_handle(), None); - - let transaction_pool = BasicPool::new_full( - config.transaction_pool.clone(), - true.into(), - config.prometheus_registry(), - task_manager.spawn_essential_handle(), - client.clone(), - ); - - let (network, system_rpc_tx, network_starter) = { - let params = BuildNetworkParams { - config: &config, - client: client.clone(), - transaction_pool: transaction_pool.clone(), - spawn_handle: task_manager.spawn_handle(), - import_queue, - on_demand: None, - block_announce_validator_builder: None, - }; - build_network(params)? - }; - - // offchain workers - sc_service::build_offchain_workers( - &config, - task_manager.spawn_handle(), - client.clone(), - network.clone(), - ); - - // Proposer object for block authorship. - let env = sc_basic_authorship::ProposerFactory::new( - task_manager.spawn_handle(), - client.clone(), - transaction_pool.clone(), - config.prometheus_registry(), - None - ); - - // Channel for the rpc handler to communicate with the authorship task. - let (command_sink, commands_stream) = mpsc::channel(10); - - let rpc_sink = command_sink.clone(); - - let rpc_handlers = { - let params = SpawnTasksParams { - config, - client: client.clone(), - backend: backend.clone(), - task_manager: &mut task_manager, - keystore: keystore.sync_keystore(), - on_demand: None, - transaction_pool: transaction_pool.clone(), - rpc_extensions_builder: Box::new(move |_, _| { - let mut io = jsonrpc_core::IoHandler::default(); - io.extend_with( - ManualSealApi::to_delegate(ManualSeal::new(rpc_sink.clone())) - ); - io - }), - remote_blockchain: None, - network, - system_rpc_tx, - telemetry: None - }; - spawn_tasks(params)? - }; - - let cloned_client = client.clone(); - let create_inherent_data_providers = Box::new(move |_, _| { - let client = cloned_client.clone(); - async move { - let timestamp = SlotTimestampProvider::new(client.clone()).map_err(|err| format!("{:?}", err))?; - let babe = sp_consensus_babe::inherents::InherentDataProvider::new(timestamp.slot().into()); - Ok((timestamp, babe)) - } - }); - - // Background authorship future. - let authorship_future = run_manual_seal(ManualSealParams { - block_import, - env, - client: client.clone(), - pool: transaction_pool.clone(), - commands_stream, - select_chain, - consensus_data_provider: Some(Box::new(consensus_data_provider)), - create_inherent_data_providers, - }); - - // spawn the authorship task as an essential task. - task_manager - .spawn_essential_handle() - .spawn("manual-seal", authorship_future); - - network_starter.start_network(); - let rpc_handler = rpc_handlers.io_handler(); - - Ok(( - rpc_handler, - task_manager, - client, - transaction_pool, - command_sink, - backend, - )) + use sp_consensus_babe::AuthorityId; + let config = match config_or_chain_spec { + ConfigOrChainSpec::Config(config) => config, + ConfigOrChainSpec::ChainSpec(chain_spec, task_executor) => + default_config(task_executor, chain_spec), + }; + + let (client, backend, keystore, mut task_manager) = + new_full_parts::(&config, None)?; + let client = Arc::new(client); + + let select_chain = sc_consensus::LongestChain::new(backend.clone()); + + let (grandpa_block_import, ..) = grandpa::block_import( + client.clone(), + &(client.clone() as Arc<_>), + select_chain.clone(), + None, + )?; + + let slot_duration = sc_consensus_babe::Config::get_or_compute(&*client)?; + let (block_import, babe_link) = sc_consensus_babe::block_import( + slot_duration.clone(), + grandpa_block_import, + client.clone(), + )?; + + let consensus_data_provider = BabeConsensusDataProvider::new( + client.clone(), + keystore.sync_keystore(), + babe_link.epoch_changes().clone(), + vec![(AuthorityId::from(Alice.public()), 1000)], + ) + .expect("failed to create ConsensusDataProvider"); + + let import_queue = + import_queue(Box::new(block_import.clone()), &task_manager.spawn_essential_handle(), None); + + let transaction_pool = BasicPool::new_full( + config.transaction_pool.clone(), + true.into(), + config.prometheus_registry(), + task_manager.spawn_essential_handle(), + client.clone(), + ); + + let (network, system_rpc_tx, network_starter) = { + let params = BuildNetworkParams { + config: &config, + client: client.clone(), + transaction_pool: transaction_pool.clone(), + spawn_handle: task_manager.spawn_handle(), + import_queue, + on_demand: None, + block_announce_validator_builder: None, + }; + build_network(params)? + }; + + // offchain workers + sc_service::build_offchain_workers( + &config, + task_manager.spawn_handle(), + client.clone(), + network.clone(), + ); + + // Proposer object for block authorship. + let env = sc_basic_authorship::ProposerFactory::new( + task_manager.spawn_handle(), + client.clone(), + transaction_pool.clone(), + config.prometheus_registry(), + None, + ); + + // Channel for the rpc handler to communicate with the authorship task. + let (command_sink, commands_stream) = mpsc::channel(10); + + let rpc_sink = command_sink.clone(); + + let rpc_handlers = { + let params = SpawnTasksParams { + config, + client: client.clone(), + backend: backend.clone(), + task_manager: &mut task_manager, + keystore: keystore.sync_keystore(), + on_demand: None, + transaction_pool: transaction_pool.clone(), + rpc_extensions_builder: Box::new(move |_, _| { + let mut io = jsonrpc_core::IoHandler::default(); + io.extend_with(ManualSealApi::to_delegate(ManualSeal::new(rpc_sink.clone()))); + io + }), + remote_blockchain: None, + network, + system_rpc_tx, + telemetry: None, + }; + spawn_tasks(params)? + }; + + let cloned_client = client.clone(); + let create_inherent_data_providers = Box::new(move |_, _| { + let client = cloned_client.clone(); + async move { + let timestamp = + SlotTimestampProvider::new(client.clone()).map_err(|err| format!("{:?}", err))?; + let babe = + sp_consensus_babe::inherents::InherentDataProvider::new(timestamp.slot().into()); + Ok((timestamp, babe)) + } + }); + + // Background authorship future. + let authorship_future = run_manual_seal(ManualSealParams { + block_import, + env, + client: client.clone(), + pool: transaction_pool.clone(), + commands_stream, + select_chain, + consensus_data_provider: Some(Box::new(consensus_data_provider)), + create_inherent_data_providers, + }); + + // spawn the authorship task as an essential task. + task_manager.spawn_essential_handle().spawn("manual-seal", authorship_future); + + network_starter.start_network(); + let rpc_handler = rpc_handlers.io_handler(); + + Ok((rpc_handler, task_manager, client, transaction_pool, command_sink, backend)) } diff --git a/test-utils/test-runner/src/host_functions.rs b/test-utils/test-runner/src/host_functions.rs index 534d4a23fdccb..6bd91929256a3 100644 --- a/test-utils/test-runner/src/host_functions.rs +++ b/test-utils/test-runner/src/host_functions.rs @@ -73,12 +73,16 @@ macro_rules! override_host_functions { pub struct SignatureVerificationOverride; impl sp_wasm_interface::HostFunctions for SignatureVerificationOverride { - fn host_functions() -> Vec<&'static dyn sp_wasm_interface::Function> { - override_host_functions!( - "ext_crypto_ecdsa_verify_version_1", EcdsaVerify, - "ext_crypto_ed25519_verify_version_1", Ed25519Verify, - "ext_crypto_sr25519_verify_version_1", Sr25519Verify, - "ext_crypto_sr25519_verify_version_2", Sr25519VerifyV2, - ) - } + fn host_functions() -> Vec<&'static dyn sp_wasm_interface::Function> { + override_host_functions!( + "ext_crypto_ecdsa_verify_version_1", + EcdsaVerify, + "ext_crypto_ed25519_verify_version_1", + Ed25519Verify, + "ext_crypto_sr25519_verify_version_1", + Sr25519Verify, + "ext_crypto_sr25519_verify_version_2", + Sr25519VerifyV2, + ) + } } diff --git a/test-utils/test-runner/src/lib.rs b/test-utils/test-runner/src/lib.rs index 1976d132b7c50..482fa15abd9ed 100644 --- a/test-utils/test-runner/src/lib.rs +++ b/test-utils/test-runner/src/lib.rs @@ -235,14 +235,14 @@ use sp_inherents::InherentDataProvider; use sp_runtime::traits::{Block as BlockT, SignedExtension}; mod client; +mod host_functions; mod node; mod utils; -mod host_functions; +pub use client::*; pub use host_functions::*; pub use node::*; pub use utils::*; -pub use client::*; /// Wrapper trait for concrete type required by this testing framework. pub trait ChainInfo: Sized { @@ -271,7 +271,10 @@ pub trait ChainInfo: Sized { + BlockImport< Self::Block, Error = sp_consensus::Error, - Transaction = TransactionFor, Self::Block>, + Transaction = TransactionFor< + TFullClient, + Self::Block, + >, > + 'static; /// The signed extras required by the runtime @@ -281,5 +284,7 @@ pub trait ChainInfo: Sized { type InherentDataProviders: InherentDataProvider + 'static; /// Signed extras, this function is caled in an externalities provided environment. - fn signed_extras(from: ::AccountId) -> Self::SignedExtras; + fn signed_extras( + from: ::AccountId, + ) -> Self::SignedExtras; } diff --git a/test-utils/test-runner/src/node.rs b/test-utils/test-runner/src/node.rs index b1e5854798eec..8e8873ec7d657 100644 --- a/test-utils/test-runner/src/node.rs +++ b/test-utils/test-runner/src/node.rs @@ -18,21 +18,28 @@ use std::sync::Arc; -use futures::{FutureExt, SinkExt, channel::{mpsc, oneshot}}; +use crate::ChainInfo; +use futures::{ + channel::{mpsc, oneshot}, + FutureExt, SinkExt, +}; use jsonrpc_core::MetaIoHandler; use manual_seal::EngineCommand; -use sc_client_api::{backend::{self, Backend}, CallExecutor, ExecutorProvider}; +use sc_client_api::{ + backend::{self, Backend}, + CallExecutor, ExecutorProvider, +}; use sc_service::{TFullBackend, TFullCallExecutor, TFullClient, TaskManager}; +use sc_transaction_pool_api::TransactionPool; use sp_api::{OverlayedChanges, StorageTransactionCache}; use sp_blockchain::HeaderBackend; use sp_core::ExecutionContext; use sp_runtime::{ generic::{BlockId, UncheckedExtrinsic}, - traits::{Block as BlockT, Header, Extrinsic, NumberFor}, - transaction_validity::TransactionSource, MultiSignature, MultiAddress + traits::{Block as BlockT, Extrinsic, Header, NumberFor}, + transaction_validity::TransactionSource, + MultiAddress, MultiSignature, }; -use crate::ChainInfo; -use sc_transaction_pool_api::TransactionPool; use sp_state_machine::Ext; /// This holds a reference to a running node on another thread, @@ -46,44 +53,51 @@ pub struct Node { /// client instance client: Arc>, /// transaction pool - pool: Arc::Block, - Hash = <::Block as BlockT>::Hash, - Error = sc_transaction_pool::error::Error, - InPoolTransaction = sc_transaction_pool::Transaction< - <::Block as BlockT>::Hash, - <::Block as BlockT>::Extrinsic, + pool: Arc< + dyn TransactionPool< + Block = ::Block, + Hash = <::Block as BlockT>::Hash, + Error = sc_transaction_pool::error::Error, + InPoolTransaction = sc_transaction_pool::Transaction< + <::Block as BlockT>::Hash, + <::Block as BlockT>::Extrinsic, + >, >, - >>, + >, /// channel to communicate with manual seal on. manual_seal_command_sink: mpsc::Sender::Hash>>, /// backend type. backend: Arc>, /// Block number at initialization of this Node. - initial_block_number: NumberFor + initial_block_number: NumberFor, } -type EventRecord = frame_system::EventRecord<::Event, ::Hash>; +type EventRecord = frame_system::EventRecord< + ::Event, + ::Hash, +>; impl Node - where - T: ChainInfo, - <::Header as Header>::Number: From, +where + T: ChainInfo, + <::Header as Header>::Number: From, { /// Creates a new node. pub fn new( rpc_handler: Arc>, task_manager: TaskManager, client: Arc>, - pool: Arc::Block, - Hash = <::Block as BlockT>::Hash, - Error = sc_transaction_pool::error::Error, - InPoolTransaction = sc_transaction_pool::Transaction< - <::Block as BlockT>::Hash, - <::Block as BlockT>::Extrinsic, + pool: Arc< + dyn TransactionPool< + Block = ::Block, + Hash = <::Block as BlockT>::Hash, + Error = sc_transaction_pool::error::Error, + InPoolTransaction = sc_transaction_pool::Transaction< + <::Block as BlockT>::Hash, + <::Block as BlockT>::Extrinsic, + >, >, - >>, + >, command_sink: mpsc::Sender::Hash>>, backend: Arc>, ) -> Self { @@ -105,7 +119,9 @@ impl Node /// let response = node.rpc_handler() /// .handle_request_sync(request, Default::default()); /// ``` - pub fn rpc_handler(&self) -> Arc> { + pub fn rpc_handler( + &self, + ) -> Arc> { self.rpc_handler.clone() } @@ -117,13 +133,18 @@ impl Node /// Executes closure in an externalities provided environment. pub fn with_state(&self, closure: impl FnOnce() -> R) -> R where - as CallExecutor>::Error: std::fmt::Debug, + as CallExecutor>::Error: + std::fmt::Debug, { let id = BlockId::Hash(self.client.info().best_hash); let mut overlay = OverlayedChanges::default(); - let changes_trie = backend::changes_tries_state_at_block(&id, self.backend.changes_trie_storage()).unwrap(); - let mut cache = - StorageTransactionCache:: as Backend>::State>::default(); + let changes_trie = + backend::changes_tries_state_at_block(&id, self.backend.changes_trie_storage()) + .unwrap(); + let mut cache = StorageTransactionCache::< + T::Block, + as Backend>::State, + >::default(); let mut extensions = self .client .execution_extensions() @@ -176,7 +197,9 @@ impl Node .expect("UncheckedExtrinsic::new() always returns Some"); let at = self.client.info().best_hash; - self.pool.submit_one(&BlockId::Hash(at), TransactionSource::Local, ext.into()).await + self.pool + .submit_one(&BlockId::Hash(at), TransactionSource::Local, ext.into()) + .await } /// Get the events of the most recently produced block @@ -186,7 +209,7 @@ impl Node /// Instructs manual seal to seal new, possibly empty blocks. pub async fn seal_blocks(&self, num: usize) { - let mut sink = self.manual_seal_command_sink.clone(); + let mut sink = self.manual_seal_command_sink.clone(); for count in 0..num { let (sender, future_block) = oneshot::channel(); @@ -201,8 +224,10 @@ impl Node future.await.expect(ERROR); match future_block.await.expect(ERROR) { - Ok(block) => log::info!("sealed {} (hash: {}) of {} blocks", count + 1, block.hash, num), - Err(err) => log::error!("failed to seal block {} of {}, error: {:?}", count + 1, num, err), + Ok(block) => + log::info!("sealed {} (hash: {}) of {} blocks", count + 1, block.hash, num), + Err(err) => + log::error!("failed to seal block {} of {}, error: {:?}", count + 1, num, err), } } } diff --git a/test-utils/test-runner/src/utils.rs b/test-utils/test-runner/src/utils.rs index 9e722bcc510aa..e0176fcb6cc29 100644 --- a/test-utils/test-runner/src/utils.rs +++ b/test-utils/test-runner/src/utils.rs @@ -16,18 +16,20 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . +use futures::FutureExt; +use sc_client_api::execution_extensions::ExecutionStrategies; +use sc_executor::WasmExecutionMethod; +use sc_informant::OutputFormat; +use sc_network::{ + config::{NetworkConfiguration, Role, TransportConfig}, + multiaddr, +}; use sc_service::{ - BasePath, ChainSpec, Configuration, TaskExecutor, - DatabaseConfig, KeepBlocks, TransactionStorageMode, TaskType, + config::KeystoreConfig, BasePath, ChainSpec, Configuration, DatabaseConfig, KeepBlocks, + TaskExecutor, TaskType, TransactionStorageMode, }; use sp_keyring::sr25519::Keyring::Alice; -use sc_network::{multiaddr, config::{NetworkConfiguration, TransportConfig, Role}}; -use sc_informant::OutputFormat; -use sc_service::config::KeystoreConfig; -use sc_executor::WasmExecutionMethod; -use sc_client_api::execution_extensions::ExecutionStrategies; use tokio::runtime::Handle; -use futures::FutureExt; pub use sc_cli::build_runtime; @@ -41,7 +43,10 @@ pub fn base_path() -> BasePath { } /// Produces a default configuration object, suitable for use with most set ups. -pub fn default_config(task_executor: TaskExecutor, mut chain_spec: Box) -> Configuration { +pub fn default_config( + task_executor: TaskExecutor, + mut chain_spec: Box, +) -> Configuration { let base_path = base_path(); let root_path = base_path.path().to_path_buf().join("chains").join(chain_spec.id()); @@ -62,9 +67,7 @@ pub fn default_config(task_executor: TaskExecutor, mut chain_spec: Box TaskExecutor { let task_executor = move |fut, task_type| match task_type { TaskType::Async => handle.spawn(fut).map(drop), - TaskType::Blocking => handle.spawn_blocking(move || futures::executor::block_on(fut)).map(drop), + TaskType::Blocking => + handle.spawn_blocking(move || futures::executor::block_on(fut)).map(drop), }; task_executor.into() diff --git a/utils/browser/src/lib.rs b/utils/browser/src/lib.rs index 0d4937ceeee43..0870ea84296c0 100644 --- a/utils/browser/src/lib.rs +++ b/utils/browser/src/lib.rs @@ -15,23 +15,25 @@ // See the License for the specific language governing permissions and // limitations under the License. +use futures::{ + channel::{mpsc, oneshot}, + compat::*, + future::{ok, ready, select}, + prelude::*, +}; use futures01::sync::mpsc as mpsc01; +use libp2p_wasm_ext::{ffi, ExtTransport}; use log::{debug, info}; +use sc_chain_spec::Extension; use sc_network::config::TransportConfig; use sc_service::{ - RpcSession, Role, Configuration, TaskManager, RpcHandlers, config::{DatabaseConfig, KeystoreConfig, NetworkConfiguration}, - GenericChainSpec, RuntimeGenesis, - KeepBlocks, TransactionStorageMode, + Configuration, GenericChainSpec, KeepBlocks, Role, RpcHandlers, RpcSession, RuntimeGenesis, + TaskManager, TransactionStorageMode, }; use sc_tracing::logging::LoggerBuilder; -use wasm_bindgen::prelude::*; -use futures::{ - prelude::*, channel::{oneshot, mpsc}, compat::*, future::{ready, ok, select} -}; use std::pin::Pin; -use sc_chain_spec::Extension; -use libp2p_wasm_ext::{ExtTransport, ffi}; +use wasm_bindgen::prelude::*; pub use console_error_panic_hook::set_once as set_console_error_panic_hook; @@ -73,7 +75,8 @@ where task_executor: (|fut, _| { wasm_bindgen_futures::spawn_local(fut); async {} - }).into(), + }) + .into(), telemetry_external_transport: Some(transport), role: Role::Light, database: { @@ -114,9 +117,7 @@ where max_runtime_instances: 8, announce_block: true, base_path: None, - informant_output_format: sc_informant::OutputFormat { - enable_color: false, - }, + informant_output_format: sc_informant::OutputFormat { enable_color: false }, disable_log_reloading: false, }; @@ -153,12 +154,11 @@ pub fn start_client(mut task_manager: TaskManager, rpc_handlers: RpcHandlers) -> Box::pin(async move { let _ = task_manager.future().await; }), - ).map(drop) + ) + .map(drop), ); - Client { - rpc_send_tx, - } + Client { rpc_send_tx } } #[wasm_bindgen] @@ -175,12 +175,8 @@ impl Client { }); wasm_bindgen_futures::future_to_promise(async { match rx.await { - Ok(fut) => { - fut.await - .map(|s| JsValue::from_str(&s)) - .ok_or_else(|| JsValue::NULL) - }, - Err(_) => Err(JsValue::NULL) + Ok(fut) => fut.await.map(|s| JsValue::from_str(&s)).ok_or_else(|| JsValue::NULL), + Err(_) => Err(JsValue::NULL), } }) } @@ -203,7 +199,8 @@ impl Client { }); wasm_bindgen_futures::spawn_local(async move { - let _ = rx.compat() + let _ = rx + .compat() .try_for_each(|s| { let _ = callback.call1(&callback, &JsValue::from_str(&s)); ok(()) diff --git a/utils/build-script-utils/src/git.rs b/utils/build-script-utils/src/git.rs index d01343634bc94..66a15737f84ca 100644 --- a/utils/build-script-utils/src/git.rs +++ b/utils/build-script-utils/src/git.rs @@ -33,16 +33,16 @@ pub fn rerun_if_git_head_changed() { Err(err) => { eprintln!("cargo:warning=Unable to read the Git repository: {}", err); - return; - } - Ok(None) => {} + return + }, + Ok(None) => {}, Ok(Some(paths)) => { for p in paths { println!("cargo:rerun-if-changed={}", p.display()); } - return; - } + return + }, } manifest_dir.pop(); diff --git a/utils/build-script-utils/src/lib.rs b/utils/build-script-utils/src/lib.rs index 8eb17a7de61fb..0c45c4b34ebe8 100644 --- a/utils/build-script-utils/src/lib.rs +++ b/utils/build-script-utils/src/lib.rs @@ -17,8 +17,8 @@ //! Crate with utility functions for `build.rs` scripts. -mod version; mod git; +mod version; pub use git::*; pub use version::*; diff --git a/utils/build-script-utils/src/version.rs b/utils/build-script-utils/src/version.rs index f92c637c78cca..52336eb0b6a24 100644 --- a/utils/build-script-utils/src/version.rs +++ b/utils/build-script-utils/src/version.rs @@ -20,15 +20,13 @@ use std::{borrow::Cow, process::Command}; /// Generate the `cargo:` key output pub fn generate_cargo_keys() { - let output = Command::new("git") - .args(&["rev-parse", "--short", "HEAD"]) - .output(); + let output = Command::new("git").args(&["rev-parse", "--short", "HEAD"]).output(); let commit = match output { Ok(o) if o.status.success() => { let sha = String::from_utf8_lossy(&o.stdout).trim().to_owned(); Cow::from(sha) - } + }, Ok(o) => { println!("cargo:warning=Git command failed with status: {}", o.status); Cow::from("unknown") diff --git a/utils/fork-tree/src/lib.rs b/utils/fork-tree/src/lib.rs index d1ec67d37b954..cab3af18f8a59 100644 --- a/utils/fork-tree/src/lib.rs +++ b/utils/fork-tree/src/lib.rs @@ -20,9 +20,8 @@ #![warn(missing_docs)] -use std::cmp::Reverse; -use std::fmt; use codec::{Decode, Encode}; +use std::{cmp::Reverse, fmt}; /// Error occurred when iterating with the tree. #[derive(Clone, Debug, PartialEq)] @@ -83,7 +82,8 @@ pub struct ForkTree { best_finalized_number: Option, } -impl ForkTree where +impl ForkTree +where H: PartialEq + Clone, N: Ord + Clone, V: Clone, @@ -102,17 +102,14 @@ impl ForkTree where number: &N, is_descendent_of: &F, predicate: &P, - ) -> Result, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + ) -> Result, Error> + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { - let new_root_index = self.find_node_index_where( - hash, - number, - is_descendent_of, - predicate, - )?; + let new_root_index = + self.find_node_index_where(hash, number, is_descendent_of, predicate)?; let removed = if let Some(mut root_index) = new_root_index { let mut old_roots = std::mem::take(&mut self.roots); @@ -130,9 +127,10 @@ impl ForkTree where } } - let mut root = root - .expect("find_node_index_where will return array with at least one index; \ - this results in at least one item in removed; qed"); + let mut root = root.expect( + "find_node_index_where will return array with at least one index; \ + this results in at least one item in removed; qed", + ); let mut removed = old_roots; @@ -144,7 +142,7 @@ impl ForkTree where for child in root_children { if is_first && (child.number == *number && child.hash == *hash || - child.number < *number && is_descendent_of(&child.hash, hash)?) + child.number < *number && is_descendent_of(&child.hash, hash)?) { root.children.push(child); // assuming that the tree is well formed only one child should pass this requirement @@ -168,16 +166,14 @@ impl ForkTree where } } -impl ForkTree where +impl ForkTree +where H: PartialEq, N: Ord, { /// Create a new empty tree. pub fn new() -> ForkTree { - ForkTree { - roots: Vec::new(), - best_finalized_number: None, - } + ForkTree { roots: Vec::new(), best_finalized_number: None } } /// Rebalance the tree, i.e. sort child nodes by max branch depth @@ -209,18 +205,19 @@ impl ForkTree where mut data: V, is_descendent_of: &F, ) -> Result> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, { if let Some(ref best_finalized_number) = self.best_finalized_number { if number <= *best_finalized_number { - return Err(Error::Revert); + return Err(Error::Revert) } } for root in self.roots.iter_mut() { if root.hash == hash { - return Err(Error::Duplicate); + return Err(Error::Duplicate) } match root.import(hash, number, data, is_descendent_of)? { @@ -231,17 +228,12 @@ impl ForkTree where }, None => { self.rebalance(); - return Ok(false); + return Ok(false) }, } } - self.roots.push(Node { - data, - hash: hash, - number: number, - children: Vec::new(), - }); + self.roots.push(Node { data, hash, number, children: Vec::new() }); self.rebalance(); @@ -249,18 +241,18 @@ impl ForkTree where } /// Iterates over the existing roots in the tree. - pub fn roots(&self) -> impl Iterator { + pub fn roots(&self) -> impl Iterator { self.roots.iter().map(|node| (&node.hash, &node.number, &node.data)) } - fn node_iter(&self) -> impl Iterator> { + fn node_iter(&self) -> impl Iterator> { // we need to reverse the order of roots to maintain the expected // ordering since the iterator uses a stack to track state. ForkTreeIterator { stack: self.roots.iter().rev().collect() } } /// Iterates the nodes in the tree in pre-order. - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.node_iter().map(|node| (&node.hash, &node.number, &node.data)) } @@ -274,7 +266,8 @@ impl ForkTree where number: &N, is_descendent_of: &F, predicate: &P, - ) -> Result>, Error> where + ) -> Result>, Error> + where E: std::error::Error, F: Fn(&H, &H) -> Result, P: Fn(&V) -> bool, @@ -285,7 +278,7 @@ impl ForkTree where // found the node, early exit if let FindOutcome::Found(node) = node { - return Ok(Some(node)); + return Ok(Some(node)) } } @@ -293,23 +286,13 @@ impl ForkTree where } /// Map fork tree into values of new types. - pub fn map( - self, - f: &mut F, - ) -> ForkTree where + pub fn map(self, f: &mut F) -> ForkTree + where F: FnMut(&H, &N, V) -> VT, { - let roots = self.roots - .into_iter() - .map(|root| { - root.map(f) - }) - .collect(); - - ForkTree { - roots, - best_finalized_number: self.best_finalized_number, - } + let roots = self.roots.into_iter().map(|root| root.map(f)).collect(); + + ForkTree { roots, best_finalized_number: self.best_finalized_number } } /// Same as [`find_node_where`](ForkTree::find_node_where), but returns mutable reference. @@ -319,7 +302,8 @@ impl ForkTree where number: &N, is_descendent_of: &F, predicate: &P, - ) -> Result>, Error> where + ) -> Result>, Error> + where E: std::error::Error, F: Fn(&H, &H) -> Result, P: Fn(&V) -> bool, @@ -330,7 +314,7 @@ impl ForkTree where // found the node, early exit if let FindOutcome::Found(node) = node { - return Ok(Some(node)); + return Ok(Some(node)) } } @@ -344,7 +328,8 @@ impl ForkTree where number: &N, is_descendent_of: &F, predicate: &P, - ) -> Result>, Error> where + ) -> Result>, Error> + where E: std::error::Error, F: Fn(&H, &H) -> Result, P: Fn(&V) -> bool, @@ -356,7 +341,7 @@ impl ForkTree where // found the node, early exit if let FindOutcome::Found(mut node) = node { node.push(index); - return Ok(Some(node)); + return Ok(Some(node)) } } @@ -367,7 +352,9 @@ impl ForkTree where /// with the given hash exists. All other roots are pruned, and the children /// of the finalized node become the new roots. pub fn finalize_root(&mut self, hash: &H) -> Option { - self.roots.iter().position(|node| node.hash == *hash) + self.roots + .iter() + .position(|node| node.hash == *hash) .map(|position| self.finalize_root_at(position)) } @@ -376,7 +363,7 @@ impl ForkTree where let node = self.roots.swap_remove(position); self.roots = node.children; self.best_finalized_number = Some(node.number); - return node.data; + return node.data } /// Finalize a node in the tree. This method will make sure that the node @@ -390,24 +377,25 @@ impl ForkTree where number: N, is_descendent_of: &F, ) -> Result, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, { if let Some(ref best_finalized_number) = self.best_finalized_number { if number <= *best_finalized_number { - return Err(Error::Revert); + return Err(Error::Revert) } } // check if one of the current roots is being finalized if let Some(root) = self.finalize_root(hash) { - return Ok(FinalizationResult::Changed(Some(root))); + return Ok(FinalizationResult::Changed(Some(root))) } // make sure we're not finalizing a descendent of any root for root in self.roots.iter() { if number > root.number && is_descendent_of(&root.hash, hash)? { - return Err(Error::UnfinalizedAncestor); + return Err(Error::UnfinalizedAncestor) } } @@ -443,18 +431,19 @@ impl ForkTree where number: N, is_descendent_of: &F, ) -> Result, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, { if let Some(ref best_finalized_number) = self.best_finalized_number { if number <= *best_finalized_number { - return Err(Error::Revert); + return Err(Error::Revert) } } // check if one of the current roots is being finalized if let Some(root) = self.finalize_root(hash) { - return Ok(FinalizationResult::Changed(Some(root))); + return Ok(FinalizationResult::Changed(Some(root))) } // we need to: @@ -469,23 +458,21 @@ impl ForkTree where let is_finalized = root.hash == *hash; let is_descendant = !is_finalized && root.number > number && is_descendent_of(hash, &root.hash)?; - let is_ancestor = !is_finalized - && !is_descendant && root.number < number - && is_descendent_of(&root.hash, hash)?; + let is_ancestor = !is_finalized && + !is_descendant && root.number < number && + is_descendent_of(&root.hash, hash)?; (is_finalized, is_descendant, is_ancestor) }; // if we have met finalized root - open it and return if is_finalized { - return Ok(FinalizationResult::Changed(Some( - self.finalize_root_at(idx), - ))); + return Ok(FinalizationResult::Changed(Some(self.finalize_root_at(idx)))) } // if node is descendant of finalized block - just leave it as is if is_descendant { idx += 1; - continue; + continue } // if node is ancestor of finalized block - remove it and continue with children @@ -493,7 +480,7 @@ impl ForkTree where let root = self.roots.swap_remove(idx); self.roots.extend(root.children); changed = true; - continue; + continue } // if node is neither ancestor, nor descendant of the finalized block - remove it @@ -526,13 +513,14 @@ impl ForkTree where is_descendent_of: &F, predicate: P, ) -> Result, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { if let Some(ref best_finalized_number) = self.best_finalized_number { if number <= *best_finalized_number { - return Err(Error::Revert); + return Err(Error::Revert) } } @@ -544,11 +532,11 @@ impl ForkTree where if node.hash == *hash || is_descendent_of(&node.hash, hash)? { for node in node.children.iter() { if node.number <= number && is_descendent_of(&node.hash, &hash)? { - return Err(Error::UnfinalizedAncestor); + return Err(Error::UnfinalizedAncestor) } } - return Ok(Some(self.roots.iter().any(|root| root.hash == node.hash))); + return Ok(Some(self.roots.iter().any(|root| root.hash == node.hash))) } } } @@ -570,13 +558,14 @@ impl ForkTree where is_descendent_of: &F, predicate: P, ) -> Result, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { if let Some(ref best_finalized_number) = self.best_finalized_number { if number <= *best_finalized_number { - return Err(Error::Revert); + return Err(Error::Revert) } } @@ -589,12 +578,12 @@ impl ForkTree where if root.hash == *hash || is_descendent_of(&root.hash, hash)? { for node in root.children.iter() { if node.number <= number && is_descendent_of(&node.hash, &hash)? { - return Err(Error::UnfinalizedAncestor); + return Err(Error::UnfinalizedAncestor) } } position = Some(i); - break; + break } } } @@ -616,9 +605,9 @@ impl ForkTree where let roots = std::mem::take(&mut self.roots); for root in roots { - let retain = root.number > number && is_descendent_of(hash, &root.hash)? - || root.number == number && root.hash == *hash - || is_descendent_of(&root.hash, hash)?; + let retain = root.number > number && is_descendent_of(hash, &root.hash)? || + root.number == number && root.hash == *hash || + is_descendent_of(&root.hash, hash)?; if retain { self.roots.push(root); @@ -681,26 +670,14 @@ mod node_implementation { } /// Map node data into values of new types. - pub fn map( - self, - f: &mut F, - ) -> Node where + pub fn map(self, f: &mut F) -> Node + where F: FnMut(&H, &N, V) -> VT, { - let children = self.children - .into_iter() - .map(|node| { - node.map(f) - }) - .collect(); + let children = self.children.into_iter().map(|node| node.map(f)).collect(); let vt = f(&self.hash, &self.number, self.data); - Node { - hash: self.hash, - number: self.number, - data: vt, - children, - } + Node { hash: self.hash, number: self.number, data: vt, children } } pub fn import( @@ -710,14 +687,17 @@ mod node_implementation { mut data: V, is_descendent_of: &F, ) -> Result, Error> - where E: fmt::Debug, - F: Fn(&H, &H) -> Result, + where + E: fmt::Debug, + F: Fn(&H, &H) -> Result, { if self.hash == hash { - return Err(Error::Duplicate); + return Err(Error::Duplicate) }; - if number <= self.number { return Ok(Some((hash, number, data))); } + if number <= self.number { + return Ok(Some((hash, number, data))) + } for node in self.children.iter_mut() { match node.import(hash, number, data, is_descendent_of)? { @@ -731,12 +711,7 @@ mod node_implementation { } if is_descendent_of(&self.hash, &hash)? { - self.children.push(Node { - data, - hash: hash, - number: number, - children: Vec::new(), - }); + self.children.push(Node { data, hash, number, children: Vec::new() }); Ok(None) } else { @@ -760,13 +735,14 @@ mod node_implementation { is_descendent_of: &F, predicate: &P, ) -> Result>, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { // stop searching this branch if *number < self.number { - return Ok(FindOutcome::Failure(false)); + return Ok(FindOutcome::Failure(false)) } let mut known_descendent_of = false; @@ -785,7 +761,7 @@ mod node_implementation { // then it cannot be a descendent of any others, // so we don't search them. known_descendent_of = true; - break; + break }, FindOutcome::Failure(false) => {}, } @@ -799,7 +775,7 @@ mod node_implementation { if is_descendent_of { // if the predicate passes we return the node if predicate(&self.data) { - return Ok(FindOutcome::Found(Vec::new())); + return Ok(FindOutcome::Found(Vec::new())) } } @@ -820,9 +796,10 @@ mod node_implementation { is_descendent_of: &F, predicate: &P, ) -> Result>, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { let outcome = self.find_node_index_where(hash, number, is_descendent_of, predicate)?; @@ -852,9 +829,10 @@ mod node_implementation { is_descendent_of: &F, predicate: &P, ) -> Result>, Error> - where E: std::error::Error, - F: Fn(&H, &H) -> Result, - P: Fn(&V) -> bool, + where + E: std::error::Error, + F: Fn(&H, &H) -> Result, + P: Fn(&V) -> bool, { let outcome = self.find_node_index_where(hash, number, is_descendent_of, predicate)?; @@ -875,7 +853,7 @@ mod node_implementation { } // Workaround for: https://github.com/rust-lang/rust/issues/34537 -use node_implementation::{Node, FindOutcome}; +use node_implementation::{FindOutcome, Node}; struct ForkTreeIterator<'a, H, N, V> { stack: Vec<&'a Node>, @@ -917,7 +895,7 @@ impl Iterator for RemovedIterator { #[cfg(test)] mod test { - use super::{FinalizationResult, ForkTree, Error}; + use super::{Error, FinalizationResult, ForkTree}; #[derive(Debug, PartialEq)] struct TestError; @@ -930,7 +908,8 @@ mod test { impl std::error::Error for TestError {} - fn test_fork_tree<'a>() -> (ForkTree<&'a str, u64, ()>, impl Fn(&&str, &&str) -> Result) { + fn test_fork_tree<'a>( + ) -> (ForkTree<&'a str, u64, ()>, impl Fn(&&str, &&str) -> Result) { let mut tree = ForkTree::new(); // @@ -959,7 +938,8 @@ mod test { ("C", b) => Ok(b == "D" || b == "E"), ("D", b) => Ok(b == "E"), ("E", _) => Ok(false), - ("F", b) => Ok(b == "G" || b == "H" || b == "I" || b == "L" || b == "M" || b == "O"), + ("F", b) => + Ok(b == "G" || b == "H" || b == "I" || b == "L" || b == "M" || b == "O"), ("G", _) => Ok(false), ("H", b) => Ok(b == "I" || b == "L" || b == "M" || b == "O"), ("I", _) => Ok(false), @@ -1001,40 +981,22 @@ mod test { tree.finalize_root(&"A"); - assert_eq!( - tree.best_finalized_number, - Some(1), - ); + assert_eq!(tree.best_finalized_number, Some(1),); - assert_eq!( - tree.import("A", 1, (), &is_descendent_of), - Err(Error::Revert), - ); + assert_eq!(tree.import("A", 1, (), &is_descendent_of), Err(Error::Revert),); } #[test] fn import_doesnt_add_duplicates() { let (mut tree, is_descendent_of) = test_fork_tree(); - assert_eq!( - tree.import("A", 1, (), &is_descendent_of), - Err(Error::Duplicate), - ); + assert_eq!(tree.import("A", 1, (), &is_descendent_of), Err(Error::Duplicate),); - assert_eq!( - tree.import("I", 4, (), &is_descendent_of), - Err(Error::Duplicate), - ); + assert_eq!(tree.import("I", 4, (), &is_descendent_of), Err(Error::Duplicate),); - assert_eq!( - tree.import("G", 3, (), &is_descendent_of), - Err(Error::Duplicate), - ); + assert_eq!(tree.import("G", 3, (), &is_descendent_of), Err(Error::Duplicate),); - assert_eq!( - tree.import("K", 3, (), &is_descendent_of), - Err(Error::Duplicate), - ); + assert_eq!(tree.import("K", 3, (), &is_descendent_of), Err(Error::Duplicate),); } #[test] @@ -1096,10 +1058,7 @@ mod test { let original_roots = tree.roots.clone(); // finalizing a block prior to any in the node doesn't change the tree - assert_eq!( - tree.finalize(&"0", 0, &is_descendent_of), - Ok(FinalizationResult::Unchanged), - ); + assert_eq!(tree.finalize(&"0", 0, &is_descendent_of), Ok(FinalizationResult::Unchanged),); assert_eq!(tree.roots, original_roots); @@ -1115,21 +1074,12 @@ mod test { ); // finalizing anything lower than what we observed will fail - assert_eq!( - tree.best_finalized_number, - Some(1), - ); + assert_eq!(tree.best_finalized_number, Some(1),); - assert_eq!( - tree.finalize(&"Z", 1, &is_descendent_of), - Err(Error::Revert), - ); + assert_eq!(tree.finalize(&"Z", 1, &is_descendent_of), Err(Error::Revert),); // trying to finalize a node without finalizing its ancestors first will fail - assert_eq!( - tree.finalize(&"H", 3, &is_descendent_of), - Err(Error::UnfinalizedAncestor), - ); + assert_eq!(tree.finalize(&"H", 3, &is_descendent_of), Err(Error::UnfinalizedAncestor),); // after finalizing "F" we can finalize "H" assert_eq!( @@ -1195,10 +1145,7 @@ mod test { vec![("L", 4), ("I", 4)], ); - assert_eq!( - tree.best_finalized_number, - Some(3), - ); + assert_eq!(tree.best_finalized_number, Some(3),); // finalizing N (which is not a part of the tree): // 1) removes roots that are not ancestors/descendants of N (I) @@ -1215,22 +1162,20 @@ mod test { vec![], ); - assert_eq!( - tree.best_finalized_number, - Some(6), - ); + assert_eq!(tree.best_finalized_number, Some(6),); } #[test] fn finalize_with_descendent_works() { #[derive(Debug, PartialEq)] - struct Change { effective: u64 } + struct Change { + effective: u64, + } let (mut tree, is_descendent_of) = { let mut tree = ForkTree::new(); let is_descendent_of = |base: &&str, block: &&str| -> Result { - // // A0 #1 - (B #2) - (C #5) - D #10 - E #15 - (F #100) // \ @@ -1270,24 +1215,15 @@ mod test { // finalizing "D" will finalize a block from the tree, but it can't be applied yet // since it is not a root change assert_eq!( - tree.finalizes_any_with_descendent_if( - &"D", - 10, - &is_descendent_of, - |c| c.effective == 10, - ), + tree.finalizes_any_with_descendent_if(&"D", 10, &is_descendent_of, |c| c.effective == + 10,), Ok(Some(false)), ); // finalizing "B" doesn't finalize "A0" since the predicate doesn't pass, // although it will clear out "A1" from the tree assert_eq!( - tree.finalize_with_descendent_if( - &"B", - 2, - &is_descendent_of, - |c| c.effective <= 2, - ), + tree.finalize_with_descendent_if(&"B", 2, &is_descendent_of, |c| c.effective <= 2,), Ok(FinalizationResult::Changed(None)), ); @@ -1308,12 +1244,7 @@ mod test { ); assert_eq!( - tree.finalize_with_descendent_if( - &"C", - 5, - &is_descendent_of, - |c| c.effective <= 5, - ), + tree.finalize_with_descendent_if(&"C", 5, &is_descendent_of, |c| c.effective <= 5,), Ok(FinalizationResult::Changed(Some(Change { effective: 5 }))), ); @@ -1324,33 +1255,20 @@ mod test { // finalizing "F" will fail since it would finalize past "E" without finalizing "D" first assert_eq!( - tree.finalizes_any_with_descendent_if( - &"F", - 100, - &is_descendent_of, - |c| c.effective <= 100, - ), + tree.finalizes_any_with_descendent_if(&"F", 100, &is_descendent_of, |c| c.effective <= + 100,), Err(Error::UnfinalizedAncestor), ); // it will work with "G" though since it is not in the same branch as "E" assert_eq!( - tree.finalizes_any_with_descendent_if( - &"G", - 100, - &is_descendent_of, - |c| c.effective <= 100, - ), + tree.finalizes_any_with_descendent_if(&"G", 100, &is_descendent_of, |c| c.effective <= + 100,), Ok(Some(true)), ); assert_eq!( - tree.finalize_with_descendent_if( - &"G", - 100, - &is_descendent_of, - |c| c.effective <= 100, - ), + tree.finalize_with_descendent_if(&"G", 100, &is_descendent_of, |c| c.effective <= 100,), Ok(FinalizationResult::Changed(Some(Change { effective: 10 }))), ); @@ -1365,12 +1283,19 @@ mod test { tree.iter().map(|(h, n, _)| (h.clone(), n.clone())).collect::>(), vec![ ("A", 1), - ("B", 2), ("C", 3), ("D", 4), ("E", 5), - ("F", 2), ("H", 3), ("L", 4), ("M", 5), + ("B", 2), + ("C", 3), + ("D", 4), + ("E", 5), + ("F", 2), + ("H", 3), + ("L", 4), + ("M", 5), ("O", 5), ("I", 4), ("G", 3), - ("J", 2), ("K", 3), + ("J", 2), + ("K", 3), ], ); } @@ -1400,19 +1325,11 @@ mod test { // "L" is a descendent of "K", but the predicate will only pass for "K", // therefore only one call to `is_descendent_of` should be made assert_eq!( - tree.finalizes_any_with_descendent_if( - &"L", - 11, - &is_descendent_of, - |i| *i == 10, - ), + tree.finalizes_any_with_descendent_if(&"L", 11, &is_descendent_of, |i| *i == 10,), Ok(Some(false)), ); - assert_eq!( - n_is_descendent_of_calls.load(Ordering::SeqCst), - 1, - ); + assert_eq!(n_is_descendent_of_calls.load(Ordering::SeqCst), 1,); } n_is_descendent_of_calls.store(0, Ordering::SeqCst); @@ -1431,19 +1348,11 @@ mod test { // "L" is a descendent of "K", but the predicate will only pass for "K", // therefore only one call to `is_descendent_of` should be made assert_eq!( - tree.finalize_with_descendent_if( - &"L", - 11, - &is_descendent_of, - |i| *i == 10, - ), + tree.finalize_with_descendent_if(&"L", 11, &is_descendent_of, |i| *i == 10,), Ok(FinalizationResult::Changed(Some(10))), ); - assert_eq!( - n_is_descendent_of_calls.load(Ordering::SeqCst), - 1, - ); + assert_eq!(n_is_descendent_of_calls.load(Ordering::SeqCst), 1,); } } @@ -1451,12 +1360,7 @@ mod test { fn find_node_works() { let (tree, is_descendent_of) = test_fork_tree(); - let node = tree.find_node_where( - &"D", - &4, - &is_descendent_of, - &|_| true, - ).unwrap().unwrap(); + let node = tree.find_node_where(&"D", &4, &is_descendent_of, &|_| true).unwrap().unwrap(); assert_eq!(node.hash, "C"); assert_eq!(node.number, 3); @@ -1473,17 +1377,9 @@ mod test { fn prune_works() { let (mut tree, is_descendent_of) = test_fork_tree(); - let removed = tree.prune( - &"C", - &3, - &is_descendent_of, - &|_| true, - ).unwrap(); + let removed = tree.prune(&"C", &3, &is_descendent_of, &|_| true).unwrap(); - assert_eq!( - tree.roots.iter().map(|node| node.hash).collect::>(), - vec!["B"], - ); + assert_eq!(tree.roots.iter().map(|node| node.hash).collect::>(), vec!["B"],); assert_eq!( tree.iter().map(|(hash, _, _)| *hash).collect::>(), @@ -1495,27 +1391,13 @@ mod test { vec!["A", "F", "H", "L", "M", "O", "I", "G", "J", "K"] ); - let removed = tree.prune( - &"E", - &5, - &is_descendent_of, - &|_| true, - ).unwrap(); + let removed = tree.prune(&"E", &5, &is_descendent_of, &|_| true).unwrap(); - assert_eq!( - tree.roots.iter().map(|node| node.hash).collect::>(), - vec!["D"], - ); + assert_eq!(tree.roots.iter().map(|node| node.hash).collect::>(), vec!["D"],); - assert_eq!( - tree.iter().map(|(hash, _, _)| *hash).collect::>(), - vec!["D", "E"], - ); + assert_eq!(tree.iter().map(|(hash, _, _)| *hash).collect::>(), vec!["D", "E"],); - assert_eq!( - removed.map(|(hash, _, _)| hash).collect::>(), - vec!["B", "C"] - ); + assert_eq!(removed.map(|(hash, _, _)| hash).collect::>(), vec!["B", "C"]); } #[test] @@ -1543,12 +1425,7 @@ mod test { // when searching the tree we reach node `C`, but the // predicate doesn't pass. we should backtrack to `B`, but not to `A`, // since "B" fulfills the predicate. - let node = tree.find_node_where( - &"D", - &3, - &is_descendent_of, - &|data| *data < 3, - ).unwrap(); + let node = tree.find_node_where(&"D", &3, &is_descendent_of, &|data| *data < 3).unwrap(); assert_eq!(node.unwrap().hash, "B"); } diff --git a/utils/frame/benchmarking-cli/src/command.rs b/utils/frame/benchmarking-cli/src/command.rs index 3bfb639dd9eb7..2ef9f3914a5d7 100644 --- a/utils/frame/benchmarking-cli/src/command.rs +++ b/utils/frame/benchmarking-cli/src/command.rs @@ -19,7 +19,7 @@ use crate::BenchmarkCmd; use codec::{Decode, Encode}; use frame_benchmarking::{Analysis, BenchmarkBatch, BenchmarkSelector}; use frame_support::traits::StorageInfo; -use sc_cli::{SharedParams, CliConfiguration, ExecutionStrategy, Result}; +use sc_cli::{CliConfiguration, ExecutionStrategy, Result, SharedParams}; use sc_client_db::BenchmarkingState; use sc_executor::NativeExecutor; use sc_service::{Configuration, NativeExecutionDispatch}; @@ -49,11 +49,15 @@ impl BenchmarkCmd { } if let Some(header_file) = &self.header { - if !header_file.is_file() { return Err("Header file is invalid!".into()) }; + if !header_file.is_file() { + return Err("Header file is invalid!".into()) + }; } if let Some(handlebars_template_file) = &self.template { - if !handlebars_template_file.is_file() { return Err("Handlebars template file is invalid!".into()) }; + if !handlebars_template_file.is_file() { + return Err("Handlebars template file is invalid!".into()) + }; } let spec = config.chain_spec; @@ -93,7 +97,8 @@ impl BenchmarkCmd { self.repeat, !self.no_verify, self.extra, - ).encode(), + ) + .encode(), extensions, &sp_state_machine::backend::BackendRuntimeCode::new(&state).runtime_code()?, sp_core::testing::TaskExecutor::new(), @@ -126,20 +131,25 @@ impl BenchmarkCmd { ); // Skip raw data + analysis if there are no results - if batch.results.is_empty() { continue } + if batch.results.is_empty() { + continue + } if self.raw_data { // Print the table header - batch.results[0].components.iter().for_each(|param| print!("{:?},", param.0)); + batch.results[0] + .components + .iter() + .for_each(|param| print!("{:?},", param.0)); print!("extrinsic_time_ns,storage_root_time_ns,reads,repeat_reads,writes,repeat_writes,proof_size_bytes\n"); // Print the values batch.results.iter().for_each(|result| { - let parameters = &result.components; parameters.iter().for_each(|param| print!("{:?},", param.1)); // Print extrinsic time and storage root time - print!("{:?},{:?},{:?},{:?},{:?},{:?},{:?}\n", + print!( + "{:?},{:?},{:?},{:?},{:?},{:?},{:?}\n", result.extrinsic_time, result.storage_root_time, result.reads, @@ -156,25 +166,39 @@ impl BenchmarkCmd { // Conduct analysis. if !self.no_median_slopes { println!("Median Slopes Analysis\n========"); - if let Some(analysis) = Analysis::median_slopes(&batch.results, BenchmarkSelector::ExtrinsicTime) { + if let Some(analysis) = Analysis::median_slopes( + &batch.results, + BenchmarkSelector::ExtrinsicTime, + ) { println!("-- Extrinsic Time --\n{}", analysis); } - if let Some(analysis) = Analysis::median_slopes(&batch.results, BenchmarkSelector::Reads) { + if let Some(analysis) = + Analysis::median_slopes(&batch.results, BenchmarkSelector::Reads) + { println!("Reads = {:?}", analysis); } - if let Some(analysis) = Analysis::median_slopes(&batch.results, BenchmarkSelector::Writes) { + if let Some(analysis) = + Analysis::median_slopes(&batch.results, BenchmarkSelector::Writes) + { println!("Writes = {:?}", analysis); } } if !self.no_min_squares { println!("Min Squares Analysis\n========"); - if let Some(analysis) = Analysis::min_squares_iqr(&batch.results, BenchmarkSelector::ExtrinsicTime) { + if let Some(analysis) = Analysis::min_squares_iqr( + &batch.results, + BenchmarkSelector::ExtrinsicTime, + ) { println!("-- Extrinsic Time --\n{}", analysis); } - if let Some(analysis) = Analysis::min_squares_iqr(&batch.results, BenchmarkSelector::Reads) { + if let Some(analysis) = + Analysis::min_squares_iqr(&batch.results, BenchmarkSelector::Reads) + { println!("Reads = {:?}", analysis); } - if let Some(analysis) = Analysis::min_squares_iqr(&batch.results, BenchmarkSelector::Writes) { + if let Some(analysis) = + Analysis::min_squares_iqr(&batch.results, BenchmarkSelector::Writes) + { println!("Writes = {:?}", analysis); } } diff --git a/utils/frame/benchmarking-cli/src/writer.rs b/utils/frame/benchmarking-cli/src/writer.rs index 64a4ea62f0d4c..16c93081ac6e1 100644 --- a/utils/frame/benchmarking-cli/src/writer.rs +++ b/utils/frame/benchmarking-cli/src/writer.rs @@ -17,21 +17,23 @@ // Outputs benchmark results to Rust files that can be ingested by the runtime. -use std::collections::{HashMap, HashSet}; -use std::fs; -use std::path::PathBuf; use core::convert::TryInto; +use std::{ + collections::{HashMap, HashSet}, + fs, + path::PathBuf, +}; -use serde::Serialize; use inflector::Inflector; +use serde::Serialize; use crate::BenchmarkCmd; use frame_benchmarking::{ - BenchmarkBatch, BenchmarkSelector, Analysis, AnalysisChoice, RegressionModel, BenchmarkResults, + Analysis, AnalysisChoice, BenchmarkBatch, BenchmarkResults, BenchmarkSelector, RegressionModel, }; +use frame_support::traits::StorageInfo; use sp_core::hexdisplay::HexDisplay; use sp_runtime::traits::Zero; -use frame_support::traits::StorageInfo; const VERSION: &'static str = env!("CARGO_PKG_VERSION"); const TEMPLATE: &str = include_str!("./template.hbs"); @@ -117,7 +119,9 @@ fn map_results( analysis_choice: &AnalysisChoice, ) -> Result>, std::io::Error> { // Skip if batches is empty. - if batches.is_empty() { return Err(io_error("empty batches")) } + if batches.is_empty() { + return Err(io_error("empty batches")) + } let mut all_benchmarks = HashMap::new(); let mut pallet_benchmarks = Vec::new(); @@ -125,7 +129,9 @@ fn map_results( let mut batches_iter = batches.iter().peekable(); while let Some(batch) = batches_iter.next() { // Skip if there are no results - if batch.results.is_empty() { continue } + if batch.results.is_empty() { + continue + } let pallet_string = String::from_utf8(batch.pallet.clone()).unwrap(); let instance_string = String::from_utf8(batch.instance.clone()).unwrap(); @@ -150,13 +156,11 @@ fn map_results( } // Get an iterator of errors from a model. If the model is `None` all errors are zero. -fn extract_errors(model: &Option) -> impl Iterator + '_ { +fn extract_errors(model: &Option) -> impl Iterator + '_ { let mut errors = model.as_ref().map(|m| m.se.regressor_values.iter()); - std::iter::from_fn(move || { - match &mut errors { - Some(model) => model.next().map(|val| *val as u128), - _ => Some(0), - } + std::iter::from_fn(move || match &mut errors { + Some(model) => model.next().map(|val| *val as u128), + _ => Some(0), }) } @@ -189,12 +193,16 @@ fn get_benchmark_data( let mut used_reads = Vec::new(); let mut used_writes = Vec::new(); - extrinsic_time.slopes.into_iter() + extrinsic_time + .slopes + .into_iter() .zip(extrinsic_time.names.iter()) .zip(extract_errors(&extrinsic_time.model)) .for_each(|((slope, name), error)| { if !slope.is_zero() { - if !used_components.contains(&name) { used_components.push(name); } + if !used_components.contains(&name) { + used_components.push(name); + } used_extrinsic_time.push(ComponentSlope { name: name.clone(), slope: slope.saturating_mul(1000), @@ -202,35 +210,36 @@ fn get_benchmark_data( }); } }); - reads.slopes.into_iter() + reads + .slopes + .into_iter() .zip(reads.names.iter()) .zip(extract_errors(&reads.model)) .for_each(|((slope, name), error)| { if !slope.is_zero() { - if !used_components.contains(&name) { used_components.push(name); } - used_reads.push(ComponentSlope { - name: name.clone(), - slope, - error, - }); + if !used_components.contains(&name) { + used_components.push(name); + } + used_reads.push(ComponentSlope { name: name.clone(), slope, error }); } }); - writes.slopes.into_iter() + writes + .slopes + .into_iter() .zip(writes.names.iter()) .zip(extract_errors(&writes.model)) .for_each(|((slope, name), error)| { if !slope.is_zero() { - if !used_components.contains(&name) { used_components.push(name); } - used_writes.push(ComponentSlope { - name: name.clone(), - slope, - error, - }); + if !used_components.contains(&name) { + used_components.push(name); + } + used_writes.push(ComponentSlope { name: name.clone(), slope, error }); } }); // This puts a marker on any component which is entirely unused in the weight formula. - let components = batch.results[0].components + let components = batch.results[0] + .components .iter() .map(|(name, _)| -> Component { let name_string = name.to_string(); @@ -264,12 +273,8 @@ pub fn write_results( ) -> Result<(), std::io::Error> { // Use custom template if provided. let template: String = match &cmd.template { - Some(template_file) => { - fs::read_to_string(template_file)? - }, - None => { - TEMPLATE.to_string() - }, + Some(template_file) => fs::read_to_string(template_file)?, + None => TEMPLATE.to_string(), }; // Use header if provided @@ -288,9 +293,8 @@ pub fn write_results( let args = std::env::args().collect::>(); // Which analysis function should be used when outputting benchmarks - let analysis_choice: AnalysisChoice = cmd.output_analysis.clone() - .try_into() - .map_err(|e| io_error(e))?; + let analysis_choice: AnalysisChoice = + cmd.output_analysis.clone().try_into().map_err(|e| io_error(e))?; // Capture individual args let cmd_data = CmdData { @@ -341,7 +345,8 @@ pub fn write_results( }; let mut output_file = fs::File::create(file_path)?; - handlebars.render_template_to_write(&template, &hbs_data, &mut output_file) + handlebars + .render_template_to_write(&template, &hbs_data, &mut output_file) .map_err(|e| io_error(&e.to_string()))?; } Ok(()) @@ -355,7 +360,9 @@ fn add_storage_comments( results: &[BenchmarkResults], storage_info: &[StorageInfo], ) { - let storage_info_map = storage_info.iter().map(|info| (info.prefix.clone(), info)) + let storage_info_map = storage_info + .iter() + .map(|info| (info.prefix.clone(), info)) .collect::>(); // This tracks the keys we already identified, so we only generate a single comment. let mut identified = HashSet::>::new(); @@ -363,12 +370,14 @@ fn add_storage_comments( for result in results.clone() { for (key, reads, writes, whitelisted) in &result.keys { // skip keys which are whitelisted - if *whitelisted { continue; } + if *whitelisted { + continue + } let prefix_length = key.len().min(32); let prefix = key[0..prefix_length].to_vec(); if identified.contains(&prefix) { // skip adding comments for keys we already identified - continue; + continue } else { // track newly identified keys identified.insert(prefix.clone()); @@ -377,8 +386,10 @@ fn add_storage_comments( Some(key_info) => { let comment = format!( "Storage: {} {} (r:{} w:{})", - String::from_utf8(key_info.pallet_name.clone()).expect("encoded from string"), - String::from_utf8(key_info.storage_name.clone()).expect("encoded from string"), + String::from_utf8(key_info.pallet_name.clone()) + .expect("encoded from string"), + String::from_utf8(key_info.storage_name.clone()) + .expect("encoded from string"), reads, writes, ); @@ -392,7 +403,7 @@ fn add_storage_comments( writes, ); comments.push(comment) - } + }, } } } @@ -400,7 +411,8 @@ fn add_storage_comments( // Add an underscore after every 3rd character, i.e. a separator for large numbers. fn underscore(i: Number) -> String - where Number: std::string::ToString +where + Number: std::string::ToString, { let mut s = String::new(); let i_str = i.to_string(); @@ -420,11 +432,12 @@ fn underscore(i: Number) -> String struct UnderscoreHelper; impl handlebars::HelperDef for UnderscoreHelper { fn call<'reg: 'rc, 'rc>( - &self, h: &handlebars::Helper, + &self, + h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, - out: &mut dyn handlebars::Output + out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).unwrap(); @@ -439,17 +452,20 @@ impl handlebars::HelperDef for UnderscoreHelper { struct JoinHelper; impl handlebars::HelperDef for JoinHelper { fn call<'reg: 'rc, 'rc>( - &self, h: &handlebars::Helper, + &self, + h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, _rc: &mut handlebars::RenderContext, - out: &mut dyn handlebars::Output + out: &mut dyn handlebars::Output, ) -> handlebars::HelperResult { use handlebars::JsonRender; let param = h.param(0).unwrap(); let value = param.value(); let joined = if value.is_array() { - value.as_array().unwrap() + value + .as_array() + .unwrap() .iter() .map(|v| v.render()) .collect::>() @@ -465,9 +481,9 @@ impl handlebars::HelperDef for JoinHelper { // u128 does not serialize well into JSON for `handlebars`, so we represent it as a string. fn string_serialize(x: &u128, s: S) -> Result where - S: serde::Serializer, + S: serde::Serializer, { - s.serialize_str(&x.to_string()) + s.serialize_str(&x.to_string()) } #[cfg(test)] @@ -475,22 +491,26 @@ mod test { use super::*; use frame_benchmarking::{BenchmarkBatch, BenchmarkParameter, BenchmarkResults}; - fn test_data(pallet: &[u8], benchmark: &[u8], param: BenchmarkParameter, base: u32, slope: u32) -> BenchmarkBatch { + fn test_data( + pallet: &[u8], + benchmark: &[u8], + param: BenchmarkParameter, + base: u32, + slope: u32, + ) -> BenchmarkBatch { let mut results = Vec::new(); - for i in 0 .. 5 { - results.push( - BenchmarkResults { - components: vec![(param, i), (BenchmarkParameter::z, 0)], - extrinsic_time: (base + slope * i).into(), - storage_root_time: (base + slope * i).into(), - reads: (base + slope * i).into(), - repeat_reads: 0, - writes: (base + slope * i).into(), - repeat_writes: 0, - proof_size: 0, - keys: vec![], - } - ) + for i in 0..5 { + results.push(BenchmarkResults { + components: vec![(param, i), (BenchmarkParameter::z, 0)], + extrinsic_time: (base + slope * i).into(), + storage_root_time: (base + slope * i).into(), + reads: (base + slope * i).into(), + repeat_reads: 0, + writes: (base + slope * i).into(), + repeat_writes: 0, + proof_size: 0, + keys: vec![], + }) } return BenchmarkBatch { @@ -506,37 +526,25 @@ mod test { benchmark.components, vec![ Component { name: component.to_string(), is_used: true }, - Component { name: "z".to_string(), is_used: false}, + Component { name: "z".to_string(), is_used: false }, ], ); // Weights multiplied by 1,000 assert_eq!(benchmark.base_weight, base * 1_000); assert_eq!( benchmark.component_weight, - vec![ComponentSlope { - name: component.to_string(), - slope: slope * 1_000, - error: 0, - }] + vec![ComponentSlope { name: component.to_string(), slope: slope * 1_000, error: 0 }] ); // DB Reads/Writes are untouched assert_eq!(benchmark.base_reads, base); assert_eq!( benchmark.component_reads, - vec![ComponentSlope { - name: component.to_string(), - slope, - error: 0, - }] + vec![ComponentSlope { name: component.to_string(), slope, error: 0 }] ); assert_eq!(benchmark.base_writes, base); assert_eq!( benchmark.component_writes, - vec![ComponentSlope { - name: component.to_string(), - slope, - error: 0, - }] + vec![ComponentSlope { name: component.to_string(), slope, error: 0 }] ); } @@ -550,23 +558,24 @@ mod test { ], &[], &AnalysisChoice::default(), - ).unwrap(); + ) + .unwrap(); - let first_benchmark = &mapped_results.get( - &("first_pallet".to_string(), "instance".to_string()) - ).unwrap()[0]; + let first_benchmark = &mapped_results + .get(&("first_pallet".to_string(), "instance".to_string())) + .unwrap()[0]; assert_eq!(first_benchmark.name, "first_benchmark"); check_data(first_benchmark, "a", 10, 3); - let second_benchmark = &mapped_results.get( - &("first_pallet".to_string(), "instance".to_string()) - ).unwrap()[1]; + let second_benchmark = &mapped_results + .get(&("first_pallet".to_string(), "instance".to_string())) + .unwrap()[1]; assert_eq!(second_benchmark.name, "second_benchmark"); check_data(second_benchmark, "b", 9, 2); - let second_pallet_benchmark = &mapped_results.get( - &("second_pallet".to_string(), "instance".to_string()) - ).unwrap()[0]; + let second_pallet_benchmark = &mapped_results + .get(&("second_pallet".to_string(), "instance".to_string())) + .unwrap()[0]; assert_eq!(second_pallet_benchmark.name, "first_benchmark"); check_data(second_pallet_benchmark, "c", 3, 4); } diff --git a/utils/frame/frame-utilities-cli/src/lib.rs b/utils/frame/frame-utilities-cli/src/lib.rs index 83f3e9ea00d45..4f5b1da5766a3 100644 --- a/utils/frame/frame-utilities-cli/src/lib.rs +++ b/utils/frame/frame-utilities-cli/src/lib.rs @@ -20,4 +20,3 @@ mod pallet_id; pub use pallet_id::PalletIdCmd; - diff --git a/utils/frame/frame-utilities-cli/src/pallet_id.rs b/utils/frame/frame-utilities-cli/src/pallet_id.rs index 09304979cb09f..2caac7db588a9 100644 --- a/utils/frame/frame-utilities-cli/src/pallet_id.rs +++ b/utils/frame/frame-utilities-cli/src/pallet_id.rs @@ -17,22 +17,19 @@ //! Implementation of the `palletid` subcommand +use frame_support::PalletId; use sc_cli::{ - Error, utils::print_from_uri, CryptoSchemeFlag, - OutputTypeFlag, KeystoreParams, with_crypto_scheme, + utils::print_from_uri, with_crypto_scheme, CryptoSchemeFlag, Error, KeystoreParams, + OutputTypeFlag, }; +use sp_core::crypto::{Ss58AddressFormat, Ss58Codec}; use sp_runtime::traits::AccountIdConversion; -use sp_core::crypto::{Ss58Codec, Ss58AddressFormat}; -use std::convert::{TryInto, TryFrom}; +use std::convert::{TryFrom, TryInto}; use structopt::StructOpt; -use frame_support::PalletId; /// The `palletid` command #[derive(Debug, StructOpt)] -#[structopt( - name = "palletid", - about = "Inspect a module ID address" -)] +#[structopt(name = "palletid", about = "Inspect a module ID address")] pub struct PalletIdCmd { /// The module ID used to derive the account id: String, @@ -63,18 +60,18 @@ pub struct PalletIdCmd { impl PalletIdCmd { /// runs the command pub fn run(&self) -> Result<(), Error> - where - R: frame_system::Config, - R::AccountId: Ss58Codec, + where + R: frame_system::Config, + R::AccountId: Ss58Codec, { if self.id.len() != 8 { Err("a module id must be a string of 8 characters")? } let password = self.keystore_params.read_password()?; - let id_fixed_array: [u8; 8] = self.id.as_bytes() - .try_into() - .map_err(|_| "Cannot convert argument to palletid: argument should be 8-character string")?; + let id_fixed_array: [u8; 8] = self.id.as_bytes().try_into().map_err(|_| { + "Cannot convert argument to palletid: argument should be 8-character string" + })?; let account_id: R::AccountId = PalletId(id_fixed_array).into_account(); @@ -91,4 +88,3 @@ impl PalletIdCmd { Ok(()) } } - diff --git a/utils/frame/remote-externalities/src/lib.rs b/utils/frame/remote-externalities/src/lib.rs index 4c1aeccf5041c..0ad6ae578b06c 100644 --- a/utils/frame/remote-externalities/src/lib.rs +++ b/utils/frame/remote-externalities/src/lib.rs @@ -20,21 +20,19 @@ //! An equivalent of `sp_io::TestExternalities` that can load its state from a remote substrate //! based chain, or a local state snapshot file. -use std::{ - fs, - path::{Path, PathBuf}, -}; +use codec::{Decode, Encode}; +use jsonrpsee_ws_client::{types::v2::params::JsonRpcParams, WsClient, WsClientBuilder}; use log::*; -use sp_core::hashing::twox_128; -pub use sp_io::TestExternalities; use sp_core::{ + hashing::twox_128, hexdisplay::HexDisplay, - storage::{StorageKey, StorageData}, + storage::{StorageData, StorageKey}, }; -use codec::{Encode, Decode}; +pub use sp_io::TestExternalities; use sp_runtime::traits::Block as BlockT; -use jsonrpsee_ws_client::{ - WsClientBuilder, WsClient, types::v2::params::JsonRpcParams, +use std::{ + fs, + path::{Path, PathBuf}, }; pub mod rpc_api; @@ -122,7 +120,10 @@ pub struct OnlineConfig { impl OnlineConfig { /// Return rpc (ws) client. fn rpc_client(&self) -> &WsClient { - self.transport.client.as_ref().expect("ws client must have been initialized by now; qed.") + self.transport + .client + .as_ref() + .expect("ws client must have been initialized by now; qed.") } } @@ -137,7 +138,6 @@ impl Default for OnlineConfig { } } - /// Configuration of the state snapshot. #[derive(Clone)] pub struct SnapshotConfig { @@ -208,10 +208,12 @@ impl Builder { maybe_at: Option, ) -> Result { trace!(target: LOG_TARGET, "rpc: get_storage"); - RpcApi::::get_storage(self.as_online().rpc_client(), key, maybe_at).await.map_err(|e| { - error!("Error = {:?}", e); - "rpc get_storage failed." - }) + RpcApi::::get_storage(self.as_online().rpc_client(), key, maybe_at) + .await + .map_err(|e| { + error!("Error = {:?}", e); + "rpc get_storage failed." + }) } /// Get the latest finalized head. async fn rpc_get_head(&self) -> Result { @@ -249,7 +251,7 @@ impl Builder { if page_len < PAGE as usize { debug!(target: LOG_TARGET, "last page received: {}", page_len); - break all_keys; + break all_keys } else { let new_last_key = all_keys.last().expect("all_keys is populated; has .last(); qed"); @@ -290,21 +292,22 @@ impl Builder { .map(|key| { ( "state_getStorage", - JsonRpcParams::Array( - vec![ - to_value(key).expect("json serialization will work; qed."), - to_value(at).expect("json serialization will work; qed."), - ] - ), + JsonRpcParams::Array(vec![ + to_value(key).expect("json serialization will work; qed."), + to_value(at).expect("json serialization will work; qed."), + ]), ) }) .collect::>(); - let values = client.batch_request::>(batch) - .await - .map_err(|e| { - log::error!(target: LOG_TARGET, "failed to execute batch: {:?}. Error: {:?}", chunk_keys, e); - "batch failed." - })?; + let values = client.batch_request::>(batch).await.map_err(|e| { + log::error!( + target: LOG_TARGET, + "failed to execute batch: {:?}. Error: {:?}", + chunk_keys, + e + ); + "batch failed." + })?; assert_eq!(chunk_keys.len(), values.len()); for (idx, key) in chunk_keys.into_iter().enumerate() { let maybe_value = values[idx].clone(); @@ -428,7 +431,7 @@ impl Builder { self.save_state_snapshot(&kp, &c.path)?; } kp - } + }, }; info!( @@ -497,7 +500,7 @@ impl Builder { #[cfg(test)] mod test_prelude { pub(crate) use super::*; - pub(crate) use sp_runtime::testing::{H256 as Hash, Block as RawBlock, ExtrinsicWrapper}; + pub(crate) use sp_runtime::testing::{Block as RawBlock, ExtrinsicWrapper, H256 as Hash}; pub(crate) type Block = RawBlock>; @@ -551,7 +554,11 @@ mod remote_tests { init_logger(); Builder::::new() .mode(Mode::Online(OnlineConfig { - modules: vec!["Proxy".to_owned(), "Multisig".to_owned(), "PhragmenElection".to_owned()], + modules: vec![ + "Proxy".to_owned(), + "Multisig".to_owned(), + "PhragmenElection".to_owned(), + ], ..Default::default() })) .build() diff --git a/utils/frame/remote-externalities/src/rpc_api.rs b/utils/frame/remote-externalities/src/rpc_api.rs index 59d6bba8dd867..be77cd9499191 100644 --- a/utils/frame/remote-externalities/src/rpc_api.rs +++ b/utils/frame/remote-externalities/src/rpc_api.rs @@ -18,14 +18,13 @@ //! WS RPC API for one off RPC calls to a substrate node. // TODO: Consolidate one off RPC calls https://github.com/paritytech/substrate/issues/8988 -use sp_runtime::{generic::SignedBlock, traits::{Block as BlockT, Header as HeaderT}}; use jsonrpsee_ws_client::{ - WsClientBuilder, - WsClient, - types::{ - v2::params::JsonRpcParams, - traits::Client - }, + types::{traits::Client, v2::params::JsonRpcParams}, + WsClient, WsClientBuilder, +}; +use sp_runtime::{ + generic::SignedBlock, + traits::{Block as BlockT, Header as HeaderT}, }; /// Get the header of the block identified by `at` @@ -38,7 +37,8 @@ where let params = vec![hash_to_json::(at)?]; let client = build_client(from).await?; - client.request::("chain_getHeader", JsonRpcParams::Array(params)) + client + .request::("chain_getHeader", JsonRpcParams::Array(params)) .await .map_err(|e| format!("chain_getHeader request failed: {:?}", e)) } @@ -51,7 +51,8 @@ where { let client = build_client(from).await?; - client.request::("chain_getFinalizedHead", JsonRpcParams::NoParams) + client + .request::("chain_getFinalizedHead", JsonRpcParams::NoParams) .await .map_err(|e| format!("chain_getFinalizedHead request failed: {:?}", e)) } @@ -81,7 +82,7 @@ fn hash_to_json(hash: Block::Hash) -> Result>(from: S) -> Result { - WsClientBuilder::default() + WsClientBuilder::default() .max_request_body_size(u32::MAX) .build(from.as_ref()) .await diff --git a/utils/frame/rpc/support/src/lib.rs b/utils/frame/rpc/support/src/lib.rs index 417f2bfc22ac8..49fef4ffd603c 100644 --- a/utils/frame/rpc/support/src/lib.rs +++ b/utils/frame/rpc/support/src/lib.rs @@ -20,16 +20,14 @@ #![warn(missing_docs)] +use codec::{DecodeAll, FullCodec, FullEncode}; use core::marker::PhantomData; +use frame_support::storage::generator::{StorageDoubleMap, StorageMap, StorageValue}; use futures::compat::Future01CompatExt; use jsonrpc_client_transports::RpcError; -use codec::{DecodeAll, FullCodec, FullEncode}; +use sc_rpc_api::state::StateClient; use serde::{de::DeserializeOwned, Serialize}; -use frame_support::storage::generator::{ - StorageDoubleMap, StorageMap, StorageValue -}; use sp_storage::{StorageData, StorageKey}; -use sc_rpc_api::state::StateClient; /// A typed query on chain state usable from an RPC client. /// @@ -98,18 +96,12 @@ pub struct StorageQuery { impl StorageQuery { /// Create a storage query for a StorageValue. pub fn value>() -> Self { - Self { - key: StorageKey(St::storage_value_final_key().to_vec()), - _spook: PhantomData, - } + Self { key: StorageKey(St::storage_value_final_key().to_vec()), _spook: PhantomData } } /// Create a storage query for a value in a StorageMap. pub fn map, K: FullEncode>(key: K) -> Self { - Self { - key: StorageKey(St::storage_map_final_key(key)), - _spook: PhantomData, - } + Self { key: StorageKey(St::storage_map_final_key(key)), _spook: PhantomData } } /// Create a storage query for a value in a StorageDoubleMap. @@ -117,10 +109,7 @@ impl StorageQuery { key1: K1, key2: K2, ) -> Self { - Self { - key: StorageKey(St::storage_double_map_final_key(key1, key2)), - _spook: PhantomData, - } + Self { key: StorageKey(St::storage_double_map_final_key(key1, key2)), _spook: PhantomData } } /// Send this query over RPC, await the typed result. diff --git a/utils/frame/rpc/system/src/lib.rs b/utils/frame/rpc/system/src/lib.rs index e80d457de98dd..64c25157dbe21 100644 --- a/utils/frame/rpc/system/src/lib.rs +++ b/utils/frame/rpc/system/src/lib.rs @@ -20,28 +20,22 @@ use std::sync::Arc; use codec::{self, Codec, Decode, Encode}; -use sc_client_api::light::{future_header, RemoteBlockchain, Fetcher, RemoteCallRequest}; +use futures::future::{ready, TryFutureExt}; use jsonrpc_core::{ + futures::future::{self as rpc_future, result, Future}, Error as RpcError, ErrorCode, - futures::future::{self as rpc_future,result, Future}, }; use jsonrpc_derive::rpc; -use futures::future::{ready, TryFutureExt}; -use sp_blockchain::{ - HeaderBackend, - Error as ClientError -}; -use sp_runtime::{ - generic::BlockId, - traits, -}; -use sp_core::{hexdisplay::HexDisplay, Bytes}; -use sc_transaction_pool_api::{TransactionPool, InPoolTransaction}; -use sp_block_builder::BlockBuilder; +use sc_client_api::light::{future_header, Fetcher, RemoteBlockchain, RemoteCallRequest}; use sc_rpc_api::DenyUnsafe; +use sc_transaction_pool_api::{InPoolTransaction, TransactionPool}; +use sp_block_builder::BlockBuilder; +use sp_blockchain::{Error as ClientError, HeaderBackend}; +use sp_core::{hexdisplay::HexDisplay, Bytes}; +use sp_runtime::{generic::BlockId, traits}; -pub use frame_system_rpc_runtime_api::AccountNonceApi; pub use self::gen_client::Client as SystemClient; +pub use frame_system_rpc_runtime_api::AccountNonceApi; /// Future that resolves to account nonce. pub type FutureResult = Box + Send>; @@ -89,13 +83,8 @@ pub struct FullSystem { impl FullSystem { /// Create new `FullSystem` given client and transaction pool. - pub fn new(client: Arc, pool: Arc

, deny_unsafe: DenyUnsafe,) -> Self { - FullSystem { - client, - pool, - deny_unsafe, - _marker: Default::default(), - } + pub fn new(client: Arc, pool: Arc

, deny_unsafe: DenyUnsafe) -> Self { + FullSystem { client, pool, deny_unsafe, _marker: Default::default() } } } @@ -130,35 +119,37 @@ where Box::new(result(get_nonce())) } - fn dry_run(&self, extrinsic: Bytes, at: Option<::Hash>) -> FutureResult { + fn dry_run( + &self, + extrinsic: Bytes, + at: Option<::Hash>, + ) -> FutureResult { if let Err(err) = self.deny_unsafe.check_if_safe() { - return Box::new(rpc_future::err(err.into())); + return Box::new(rpc_future::err(err.into())) } let dry_run = || { let api = self.client.runtime_api(); let at = BlockId::::hash(at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. - self.client.info().best_hash - )); - - let uxt: ::Extrinsic = Decode::decode(&mut &*extrinsic).map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::DecodeError.into()), - message: "Unable to dry run extrinsic.".into(), - data: Some(format!("{:?}", e).into()), - })?; + self.client.info().best_hash)); - let result = api.apply_extrinsic(&at, uxt) + let uxt: ::Extrinsic = Decode::decode(&mut &*extrinsic) .map_err(|e| RpcError { - code: ErrorCode::ServerError(Error::RuntimeError.into()), + code: ErrorCode::ServerError(Error::DecodeError.into()), message: "Unable to dry run extrinsic.".into(), data: Some(format!("{:?}", e).into()), })?; + let result = api.apply_extrinsic(&at, uxt).map_err(|e| RpcError { + code: ErrorCode::ServerError(Error::RuntimeError.into()), + message: "Unable to dry run extrinsic.".into(), + data: Some(format!("{:?}", e).into()), + })?; + Ok(Encode::encode(&result).into()) }; - Box::new(result(dry_run())) } } @@ -179,12 +170,7 @@ impl LightSystem { fetcher: Arc, pool: Arc

, ) -> Self { - LightSystem { - client, - remote_blockchain, - fetcher, - pool, - } + LightSystem { client, remote_blockchain, fetcher, pool } } } @@ -205,21 +191,27 @@ where let future_best_header = future_header(&*self.remote_blockchain, &*self.fetcher, best_id); let fetcher = self.fetcher.clone(); let call_data = account.encode(); - let future_best_header = future_best_header - .and_then(move |maybe_best_header| ready( - maybe_best_header.ok_or_else(|| { ClientError::UnknownBlock(format!("{}", best_hash)) }) - )); - let future_nonce = future_best_header.and_then(move |best_header| - fetcher.remote_call(RemoteCallRequest { - block: best_hash, - header: best_header, - method: "AccountNonceApi_account_nonce".into(), - call_data, - retry_count: None, + let future_best_header = future_best_header.and_then(move |maybe_best_header| { + ready( + maybe_best_header + .ok_or_else(|| ClientError::UnknownBlock(format!("{}", best_hash))), + ) + }); + let future_nonce = future_best_header + .and_then(move |best_header| { + fetcher.remote_call(RemoteCallRequest { + block: best_hash, + header: best_header, + method: "AccountNonceApi_account_nonce".into(), + call_data, + retry_count: None, + }) }) - ).compat(); - let future_nonce = future_nonce.and_then(|nonce| Decode::decode(&mut &nonce[..]) - .map_err(|e| ClientError::CallResultDecode("Cannot decode account nonce", e))); + .compat(); + let future_nonce = future_nonce.and_then(|nonce| { + Decode::decode(&mut &nonce[..]) + .map_err(|e| ClientError::CallResultDecode("Cannot decode account nonce", e)) + }); let future_nonce = future_nonce.map_err(|e| RpcError { code: ErrorCode::ServerError(Error::RuntimeError.into()), message: "Unable to query nonce.".into(), @@ -232,7 +224,11 @@ where Box::new(future_nonce) } - fn dry_run(&self, _extrinsic: Bytes, _at: Option<::Hash>) -> FutureResult { + fn dry_run( + &self, + _extrinsic: Bytes, + _at: Option<::Hash>, + ) -> FutureResult { Box::new(result(Err(RpcError { code: ErrorCode::MethodNotFound, message: "Unable to dry run extrinsic.".into(), @@ -243,11 +239,8 @@ where /// Adjust account nonce from state, so that tx with the nonce will be /// placed after all ready txpool transactions. -fn adjust_nonce( - pool: &P, - account: AccountId, - nonce: Index, -) -> Index where +fn adjust_nonce(pool: &P, account: AccountId, nonce: Index) -> Index +where P: TransactionPool, AccountId: Clone + std::fmt::Display + Encode, Index: Clone + std::fmt::Display + Encode + traits::AtLeast32Bit + 'static, @@ -285,9 +278,12 @@ mod tests { use super::*; use futures::executor::block_on; - use substrate_test_runtime_client::{runtime::Transfer, AccountKeyring}; use sc_transaction_pool::BasicPool; - use sp_runtime::{ApplyExtrinsicResult, transaction_validity::{TransactionValidityError, InvalidTransaction}}; + use sp_runtime::{ + transaction_validity::{InvalidTransaction, TransactionValidityError}, + ApplyExtrinsicResult, + }; + use substrate_test_runtime_client::{runtime::Transfer, AccountKeyring}; #[test] fn should_return_next_nonce_for_some_account() { @@ -296,13 +292,8 @@ mod tests { // given let client = Arc::new(substrate_test_runtime_client::new()); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); let source = sp_runtime::transaction_validity::TransactionSource::External; let new_transaction = |nonce: u64| { @@ -336,13 +327,8 @@ mod tests { // given let client = Arc::new(substrate_test_runtime_client::new()); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); let accounts = FullSystem::new(client, pool, DenyUnsafe::Yes); @@ -360,13 +346,8 @@ mod tests { // given let client = Arc::new(substrate_test_runtime_client::new()); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); let accounts = FullSystem::new(client, pool, DenyUnsafe::No); @@ -375,7 +356,8 @@ mod tests { to: AccountKeyring::Bob.into(), amount: 5, nonce: 0, - }.into_signed_tx(); + } + .into_signed_tx(); // when let res = accounts.dry_run(tx.encode().into(), None); @@ -393,13 +375,8 @@ mod tests { // given let client = Arc::new(substrate_test_runtime_client::new()); let spawner = sp_core::testing::TaskExecutor::new(); - let pool = BasicPool::new_full( - Default::default(), - true.into(), - None, - spawner, - client.clone(), - ); + let pool = + BasicPool::new_full(Default::default(), true.into(), None, spawner, client.clone()); let accounts = FullSystem::new(client, pool, DenyUnsafe::No); @@ -408,7 +385,8 @@ mod tests { to: AccountKeyring::Bob.into(), amount: 5, nonce: 100, - }.into_signed_tx(); + } + .into_signed_tx(); // when let res = accounts.dry_run(tx.encode().into(), None); diff --git a/utils/frame/try-runtime/cli/src/lib.rs b/utils/frame/try-runtime/cli/src/lib.rs index e0d09ff7fbcf4..4f31bd741b3a0 100644 --- a/utils/frame/try-runtime/cli/src/lib.rs +++ b/utils/frame/try-runtime/cli/src/lib.rs @@ -18,24 +18,23 @@ //! `Structopt`-ready structs for `try-runtime`. use parity_scale_codec::{Decode, Encode}; -use std::{fmt::Debug, path::PathBuf, str::FromStr, sync::Arc}; -use sc_service::Configuration; +use remote_externalities::{rpc_api, Builder, Mode, OfflineConfig, OnlineConfig, SnapshotConfig}; +use sc_chain_spec::ChainSpec; use sc_cli::{CliConfiguration, ExecutionStrategy, WasmExecutionMethod}; use sc_executor::NativeExecutor; -use sc_service::NativeExecutionDispatch; -use sc_chain_spec::ChainSpec; -use sp_state_machine::StateMachine; -use sp_runtime::traits::{Block as BlockT, NumberFor, Header as HeaderT}; +use sc_service::{Configuration, NativeExecutionDispatch}; use sp_core::{ + hashing::twox_128, offchain::{ - OffchainWorkerExt, OffchainDbExt, TransactionPoolExt, testing::{TestOffchainExt, TestTransactionPoolExt}, + OffchainDbExt, OffchainWorkerExt, TransactionPoolExt, }, - storage::{StorageData, StorageKey, well_known_keys}, - hashing::twox_128, + storage::{well_known_keys, StorageData, StorageKey}, }; -use sp_keystore::{KeystoreExt, testing::KeyStore}; -use remote_externalities::{Builder, Mode, SnapshotConfig, OfflineConfig, OnlineConfig, rpc_api}; +use sp_keystore::{testing::KeyStore, KeystoreExt}; +use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; +use sp_state_machine::StateMachine; +use std::{fmt::Debug, path::PathBuf, str::FromStr, sync::Arc}; mod parse; @@ -170,7 +169,7 @@ pub enum State { /// The modules to scrape. If empty, entire chain state will be scraped. #[structopt(short, long, require_delimiter = true)] modules: Option>, - } + }, } async fn on_runtime_upgrade( @@ -192,36 +191,31 @@ where let mut changes = Default::default(); let max_runtime_instances = config.max_runtime_instances; - let executor = NativeExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); + let executor = + NativeExecutor::::new(wasm_method.into(), heap_pages, max_runtime_instances); let ext = { let builder = match command.state { - State::Snap { snapshot_path } => { + State::Snap { snapshot_path } => Builder::::new().mode(Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path), - })) - }, - State::Live { - snapshot_path, - modules - } => Builder::::new().mode(Mode::Online(OnlineConfig { - transport: shared.url.to_owned().into(), - state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), - modules: modules.to_owned().unwrap_or_default(), - at: Some(shared.block_at::()?), - ..Default::default() - })), + })), + State::Live { snapshot_path, modules } => + Builder::::new().mode(Mode::Online(OnlineConfig { + transport: shared.url.to_owned().into(), + state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), + modules: modules.to_owned().unwrap_or_default(), + at: Some(shared.block_at::()?), + ..Default::default() + })), }; let (code_key, code) = extract_code(config.chain_spec)?; builder .inject_key_value(&[(code_key, code)]) .inject_hashed_key(&[twox_128(b"System"), twox_128(b"LastRuntimeUpgrade")].concat()) - .build().await? + .build() + .await? }; let encoded_result = StateMachine::<_, _, NumberFor, _>::new( @@ -232,8 +226,7 @@ where "TryRuntime_on_runtime_upgrade", &[], ext.extensions, - &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend) - .runtime_code()?, + &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, sp_core::testing::TaskExecutor::new(), ) .execute(execution.into()) @@ -271,35 +264,28 @@ where let mut changes = Default::default(); let max_runtime_instances = config.max_runtime_instances; - let executor = NativeExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); + let executor = + NativeExecutor::::new(wasm_method.into(), heap_pages, max_runtime_instances); let mode = match command.state { - State::Live { - snapshot_path, - modules - } => { - let at = shared.block_at::()?; - let online_config = OnlineConfig { - transport: shared.url.to_owned().into(), - state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), - modules: modules.to_owned().unwrap_or_default(), - at: Some(at), - ..Default::default() - }; + State::Live { snapshot_path, modules } => { + let at = shared.block_at::()?; + let online_config = OnlineConfig { + transport: shared.url.to_owned().into(), + state_snapshot: snapshot_path.as_ref().map(SnapshotConfig::new), + modules: modules.to_owned().unwrap_or_default(), + at: Some(at), + ..Default::default() + }; - Mode::Online(online_config) - }, - State::Snap { snapshot_path } => { - let mode = Mode::Offline(OfflineConfig { - state_snapshot: SnapshotConfig::new(snapshot_path), - }); + Mode::Online(online_config) + }, + State::Snap { snapshot_path } => { + let mode = + Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path) }); - mode - } + mode + }, }; let builder = Builder::::new() .mode(mode) @@ -308,10 +294,7 @@ where let (code_key, code) = extract_code(config.chain_spec)?; builder.inject_key_value(&[(code_key, code)]).build().await? } else { - builder - .inject_hashed_key(well_known_keys::CODE) - .build() - .await? + builder.inject_hashed_key(well_known_keys::CODE).build().await? }; let (offchain, _offchain_state) = TestOffchainExt::new(); @@ -332,8 +315,7 @@ where "OffchainWorkerApi_offchain_worker", header.encode().as_ref(), ext.extensions, - &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend) - .runtime_code()?, + &sp_state_machine::backend::BackendRuntimeCode::new(&ext.backend).runtime_code()?, sp_core::testing::TaskExecutor::new(), ) .execute(execution.into()) @@ -363,20 +345,16 @@ where let mut changes = Default::default(); let max_runtime_instances = config.max_runtime_instances; - let executor = NativeExecutor::::new( - wasm_method.into(), - heap_pages, - max_runtime_instances, - ); + let executor = + NativeExecutor::::new(wasm_method.into(), heap_pages, max_runtime_instances); let block_hash = shared.block_at::()?; let block: Block = rpc_api::get_block::(shared.url.clone(), block_hash).await?; let mode = match command.state { State::Snap { snapshot_path } => { - let mode = Mode::Offline(OfflineConfig { - state_snapshot: SnapshotConfig::new(snapshot_path), - }); + let mode = + Mode::Offline(OfflineConfig { state_snapshot: SnapshotConfig::new(snapshot_path) }); mode }, @@ -392,7 +370,7 @@ where }); mode - } + }, }; let ext = { @@ -403,10 +381,7 @@ where let (code_key, code) = extract_code(config.chain_spec)?; builder.inject_key_value(&[(code_key, code)]).build().await? } else { - builder - .inject_hashed_key(well_known_keys::CODE) - .build() - .await? + builder.inject_hashed_key(well_known_keys::CODE).build().await? }; // register externality extensions in order to provide host interface for OCW to the @@ -459,15 +434,14 @@ impl TryRuntimeCmd { ExecDispatch: NativeExecutionDispatch + 'static, { match &self.command { - Command::OnRuntimeUpgrade(ref cmd) => { - on_runtime_upgrade::(self.shared.clone(), cmd.clone(), config).await - } - Command::OffchainWorker(cmd) => { - offchain_worker::(self.shared.clone(), cmd.clone(), config).await - } - Command::ExecuteBlock(cmd) => { - execute_block::(self.shared.clone(), cmd.clone(), config).await - } + Command::OnRuntimeUpgrade(ref cmd) => + on_runtime_upgrade::(self.shared.clone(), cmd.clone(), config) + .await, + Command::OffchainWorker(cmd) => + offchain_worker::(self.shared.clone(), cmd.clone(), config) + .await, + Command::ExecuteBlock(cmd) => + execute_block::(self.shared.clone(), cmd.clone(), config).await, } } } diff --git a/utils/frame/try-runtime/cli/src/parse.rs b/utils/frame/try-runtime/cli/src/parse.rs index beb9a6508fed1..7f205fbacd310 100644 --- a/utils/frame/try-runtime/cli/src/parse.rs +++ b/utils/frame/try-runtime/cli/src/parse.rs @@ -18,11 +18,8 @@ //! Utils for parsing user input pub(crate) fn hash(block_hash: &str) -> Result { - let (block_hash, offset) = if block_hash.starts_with("0x") { - (&block_hash[2..], 2) - } else { - (block_hash, 0) - }; + let (block_hash, offset) = + if block_hash.starts_with("0x") { (&block_hash[2..], 2) } else { (block_hash, 0) }; if let Some(pos) = block_hash.chars().position(|c| !c.is_ascii_hexdigit()) { Err(format!( diff --git a/utils/prometheus/src/lib.rs b/utils/prometheus/src/lib.rs index 93a56d084fd04..96407b0062356 100644 --- a/utils/prometheus/src/lib.rs +++ b/utils/prometheus/src/lib.rs @@ -15,33 +15,34 @@ // See the License for the specific language governing permissions and // limitations under the License. -use futures_util::{FutureExt, future::Future}; +use futures_util::{future::Future, FutureExt}; pub use prometheus::{ self, - Registry, Error as PrometheusError, Opts, - Histogram, HistogramOpts, HistogramVec, - exponential_buckets, core::{ - GenericGauge as Gauge, GenericCounter as Counter, - GenericGaugeVec as GaugeVec, GenericCounterVec as CounterVec, - AtomicF64 as F64, AtomicI64 as I64, AtomicU64 as U64, - } + AtomicF64 as F64, AtomicI64 as I64, AtomicU64 as U64, GenericCounter as Counter, + GenericCounterVec as CounterVec, GenericGauge as Gauge, GenericGaugeVec as GaugeVec, + }, + exponential_buckets, Error as PrometheusError, Histogram, HistogramOpts, HistogramVec, Opts, + Registry, }; -use prometheus::{Encoder, TextEncoder, core::Collector}; +use prometheus::{core::Collector, Encoder, TextEncoder}; use std::net::SocketAddr; #[cfg(not(target_os = "unknown"))] mod networking; mod sourced; -pub use sourced::{SourcedCounter, SourcedGauge, MetricSource, SourcedMetric}; +pub use sourced::{MetricSource, SourcedCounter, SourcedGauge, SourcedMetric}; -#[cfg(target_os = "unknown")] -pub use unknown_os::init_prometheus; #[cfg(not(target_os = "unknown"))] pub use known_os::init_prometheus; +#[cfg(target_os = "unknown")] +pub use unknown_os::init_prometheus; -pub fn register(metric: T, registry: &Registry) -> Result { +pub fn register( + metric: T, + registry: &Registry, +) -> Result { registry.register(Box::new(metric.clone()))?; Ok(metric) } @@ -61,8 +62,11 @@ mod unknown_os { #[cfg(not(target_os = "unknown"))] mod known_os { use super::*; - use hyper::http::StatusCode; - use hyper::{Server, Body, Request, Response, service::{service_fn, make_service_fn}}; + use hyper::{ + http::StatusCode, + service::{make_service_fn, service_fn}, + Body, Request, Response, Server, + }; #[derive(Debug, derive_more::Display, derive_more::From)] pub enum Error { @@ -73,7 +77,7 @@ mod known_os { /// i/o error. Io(std::io::Error), #[display(fmt = "Prometheus port {} already in use.", _0)] - PortInUse(SocketAddr) + PortInUse(SocketAddr), } impl std::error::Error for Error { @@ -82,28 +86,32 @@ mod known_os { Error::Hyper(error) => Some(error), Error::Http(error) => Some(error), Error::Io(error) => Some(error), - Error::PortInUse(_) => None + Error::PortInUse(_) => None, } } } - async fn request_metrics(req: Request, registry: Registry) -> Result, Error> { + async fn request_metrics( + req: Request, + registry: Registry, + ) -> Result, Error> { if req.uri().path() == "/metrics" { let metric_families = registry.gather(); let mut buffer = vec![]; let encoder = TextEncoder::new(); encoder.encode(&metric_families, &mut buffer).unwrap(); - Response::builder().status(StatusCode::OK) + Response::builder() + .status(StatusCode::OK) .header("Content-Type", encoder.format_type()) .body(Body::from(buffer)) .map_err(Error::Http) } else { - Response::builder().status(StatusCode::NOT_FOUND) + Response::builder() + .status(StatusCode::NOT_FOUND) .body(Body::from("Not found.")) .map_err(Error::Http) } - } #[derive(Clone)] @@ -121,7 +129,10 @@ mod known_os { /// Initializes the metrics context, and starts an HTTP server /// to serve metrics. - pub async fn init_prometheus(prometheus_addr: SocketAddr, registry: Registry) -> Result<(), Error>{ + pub async fn init_prometheus( + prometheus_addr: SocketAddr, + registry: Registry, + ) -> Result<(), Error> { use networking::Incoming; let listener = async_std::net::TcpListener::bind(&prometheus_addr) .await diff --git a/utils/prometheus/src/networking.rs b/utils/prometheus/src/networking.rs index 48ae8a23297c9..e04ac99a56948 100644 --- a/utils/prometheus/src/networking.rs +++ b/utils/prometheus/src/networking.rs @@ -16,8 +16,11 @@ // limitations under the License. use async_std::pin::Pin; -use std::task::{Poll, Context}; -use futures_util::{stream::Stream, io::{AsyncRead, AsyncWrite}}; +use futures_util::{ + io::{AsyncRead, AsyncWrite}, + stream::Stream, +}; +use std::task::{Context, Poll}; pub struct Incoming<'a>(pub async_std::net::Incoming<'a>); @@ -25,7 +28,10 @@ impl hyper::server::accept::Accept for Incoming<'_> { type Conn = TcpStream; type Error = async_std::io::Error; - fn poll_accept(self: Pin<&mut Self>, cx: &mut Context) -> Poll>> { + fn poll_accept( + self: Pin<&mut Self>, + cx: &mut Context, + ) -> Poll>> { Pin::new(&mut Pin::into_inner(self).0) .poll_next(cx) .map(|opt| opt.map(|res| res.map(TcpStream))) @@ -38,10 +44,9 @@ impl tokio::io::AsyncRead for TcpStream { fn poll_read( self: Pin<&mut Self>, cx: &mut Context, - buf: &mut [u8] + buf: &mut [u8], ) -> Poll> { - Pin::new(&mut Pin::into_inner(self).0) - .poll_read(cx, buf) + Pin::new(&mut Pin::into_inner(self).0).poll_read(cx, buf) } } @@ -49,19 +54,16 @@ impl tokio::io::AsyncWrite for TcpStream { fn poll_write( self: Pin<&mut Self>, cx: &mut Context, - buf: &[u8] + buf: &[u8], ) -> Poll> { - Pin::new(&mut Pin::into_inner(self).0) - .poll_write(cx, buf) + Pin::new(&mut Pin::into_inner(self).0).poll_write(cx, buf) } fn poll_flush(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { - Pin::new(&mut Pin::into_inner(self).0) - .poll_flush(cx) + Pin::new(&mut Pin::into_inner(self).0).poll_flush(cx) } fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context) -> Poll> { - Pin::new(&mut Pin::into_inner(self).0) - .poll_close(cx) + Pin::new(&mut Pin::into_inner(self).0).poll_close(cx) } } diff --git a/utils/prometheus/src/sourced.rs b/utils/prometheus/src/sourced.rs index 014bdb30f8ab7..78853a6ef354f 100644 --- a/utils/prometheus/src/sourced.rs +++ b/utils/prometheus/src/sourced.rs @@ -17,8 +17,10 @@ //! Metrics that are collected from existing sources. -use prometheus::core::{Collector, Desc, Describer, Number, Opts}; -use prometheus::proto; +use prometheus::{ + core::{Collector, Desc, Describer, Number, Opts}, + proto, +}; use std::{cmp::Ordering, marker::PhantomData}; /// A counter whose values are obtained from an existing source. @@ -80,15 +82,15 @@ impl Collector for SourcedMetric { let mut c = proto::Counter::default(); c.set_value(value.into_f64()); m.set_counter(c); - } + }, proto::MetricType::GAUGE => { let mut g = proto::Gauge::default(); g.set_value(value.into_f64()); m.set_gauge(g); - } + }, t => { log::error!("Unsupported sourced metric type: {:?}", t); - } + }, } debug_assert_eq!(self.desc.variable_labels.len(), label_values.len()); @@ -97,18 +99,23 @@ impl Collector for SourcedMetric { log::warn!("Missing label values for sourced metric {}", self.desc.fq_name), Ordering::Less => log::warn!("Too many label values for sourced metric {}", self.desc.fq_name), - Ordering::Equal => {} + Ordering::Equal => {}, } - m.set_label(self.desc.variable_labels.iter().zip(label_values) - .map(|(l_name, l_value)| { - let mut l = proto::LabelPair::default(); - l.set_name(l_name.to_string()); - l.set_value(l_value.to_string()); - l - }) - .chain(self.desc.const_label_pairs.iter().cloned()) - .collect::>()); + m.set_label( + self.desc + .variable_labels + .iter() + .zip(label_values) + .map(|(l_name, l_value)| { + let mut l = proto::LabelPair::default(); + l.set_name(l_name.to_string()); + l.set_value(l_value.to_string()); + l + }) + .chain(self.desc.const_label_pairs.iter().cloned()) + .collect::>(), + ); counters.push(m); }); @@ -130,11 +137,15 @@ pub trait SourcedType: private::Sealed + Sync + Send { } impl SourcedType for Counter { - fn proto() -> proto::MetricType { proto::MetricType::COUNTER } + fn proto() -> proto::MetricType { + proto::MetricType::COUNTER + } } impl SourcedType for Gauge { - fn proto() -> proto::MetricType { proto::MetricType::GAUGE } + fn proto() -> proto::MetricType { + proto::MetricType::GAUGE + } } mod private { diff --git a/utils/wasm-builder/src/builder.rs b/utils/wasm-builder/src/builder.rs index 9e8216f04fedb..20f33583b8920 100644 --- a/utils/wasm-builder/src/builder.rs +++ b/utils/wasm-builder/src/builder.rs @@ -15,7 +15,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::{env, path::{PathBuf, Path}, process}; +use std::{ + env, + path::{Path, PathBuf}, + process, +}; /// Returns the manifest dir from the `CARGO_MANIFEST_DIR` env. fn get_manifest_dir() -> PathBuf { @@ -50,10 +54,7 @@ impl WasmBuilderSelectProject { /// Use the given `path` as project for building the WASM binary. /// /// Returns an error if the given `path` does not points to a `Cargo.toml`. - pub fn with_project( - self, - path: impl Into, - ) -> Result { + pub fn with_project(self, path: impl Into) -> Result { let path = path.into(); if path.ends_with("Cargo.toml") && path.exists() { @@ -97,9 +98,7 @@ pub struct WasmBuilder { impl WasmBuilder { /// Create a new instance of the builder. pub fn new() -> WasmBuilderSelectProject { - WasmBuilderSelectProject { - _ignore: (), - } + WasmBuilderSelectProject { _ignore: () } } /// Enable exporting `__heap_base` as global variable in the WASM binary. @@ -147,9 +146,8 @@ impl WasmBuilder { /// Build the WASM binary. pub fn build(self) { let out_dir = PathBuf::from(env::var("OUT_DIR").expect("`OUT_DIR` is set by cargo!")); - let file_path = out_dir.join( - self.file_name.clone().unwrap_or_else(|| "wasm_binary.rs".into()), - ); + let file_path = + out_dir.join(self.file_name.clone().unwrap_or_else(|| "wasm_binary.rs".into())); if check_skip_build() { // If we skip the build, we still want to make sure to be called when an env variable @@ -158,7 +156,7 @@ impl WasmBuilder { provide_dummy_wasm_binary_if_not_exist(&file_path); - return; + return } build_project( @@ -179,13 +177,17 @@ impl WasmBuilder { fn generate_crate_skip_build_env_name() -> String { format!( "SKIP_{}_WASM_BUILD", - env::var("CARGO_PKG_NAME").expect("Package name is set").to_uppercase().replace('-', "_"), + env::var("CARGO_PKG_NAME") + .expect("Package name is set") + .to_uppercase() + .replace('-', "_"), ) } /// Checks if the build of the WASM binary should be skipped. fn check_skip_build() -> bool { - env::var(crate::SKIP_BUILD_ENV).is_ok() || env::var(generate_crate_skip_build_env_name()).is_ok() + env::var(crate::SKIP_BUILD_ENV).is_ok() || + env::var(generate_crate_skip_build_env_name()).is_ok() } /// Provide a dummy WASM binary if there doesn't exist one. @@ -243,15 +245,9 @@ fn build_project( ); let (wasm_binary, wasm_binary_bloaty) = if let Some(wasm_binary) = wasm_binary { - ( - wasm_binary.wasm_binary_path_escaped(), - bloaty.wasm_binary_bloaty_path_escaped(), - ) + (wasm_binary.wasm_binary_path_escaped(), bloaty.wasm_binary_bloaty_path_escaped()) } else { - ( - bloaty.wasm_binary_bloaty_path_escaped(), - bloaty.wasm_binary_bloaty_path_escaped(), - ) + (bloaty.wasm_binary_bloaty_path_escaped(), bloaty.wasm_binary_bloaty_path_escaped()) }; crate::write_file_if_changed( diff --git a/utils/wasm-builder/src/lib.rs b/utils/wasm-builder/src/lib.rs index 0a3c856344dcd..0bfd4e7550146 100644 --- a/utils/wasm-builder/src/lib.rs +++ b/utils/wasm-builder/src/lib.rs @@ -96,7 +96,12 @@ //! as well. For example if installing the rust nightly from 20.02.2020 using `rustup install nightly-2020-02-20`, //! the wasm target needs to be installed as well `rustup target add wasm32-unknown-unknown --toolchain nightly-2020-02-20`. -use std::{env, fs, path::{PathBuf, Path}, process::Command, io::BufRead}; +use std::{ + env, fs, + io::BufRead, + path::{Path, PathBuf}, + process::Command, +}; mod builder; mod prerequisites; @@ -144,18 +149,16 @@ fn copy_file_if_changed(src: PathBuf, dst: PathBuf) { let dst_file = fs::read_to_string(&dst).ok(); if src_file != dst_file { - fs::copy(&src, &dst) - .unwrap_or_else( - |_| panic!("Copying `{}` to `{}` can not fail; qed", src.display(), dst.display()) - ); + fs::copy(&src, &dst).unwrap_or_else(|_| { + panic!("Copying `{}` to `{}` can not fail; qed", src.display(), dst.display()) + }); } } /// Get a cargo command that compiles with nightly fn get_nightly_cargo() -> CargoCommand { - let env_cargo = CargoCommand::new( - &env::var("CARGO").expect("`CARGO` env variable is always set by cargo"), - ); + let env_cargo = + CargoCommand::new(&env::var("CARGO").expect("`CARGO` env variable is always set by cargo")); let default_cargo = CargoCommand::new("cargo"); let rustup_run_nightly = CargoCommand::new_with_args("rustup", &["run", "nightly", "cargo"]); let wasm_toolchain = env::var(WASM_BUILD_TOOLCHAIN).ok(); @@ -197,7 +200,7 @@ fn get_rustup_nightly(selected: Option) -> Option { } latest_nightly?.trim_end_matches(&host).into() - } + }, }; Some(CargoCommand::new_with_args("rustup", &["run", &version, "cargo"])) @@ -253,10 +256,7 @@ struct CargoCommandVersioned { impl CargoCommandVersioned { fn new(command: CargoCommand, version: String) -> Self { - Self { - command, - version, - } + Self { command, version } } /// Returns the `rustc` version. diff --git a/utils/wasm-builder/src/prerequisites.rs b/utils/wasm-builder/src/prerequisites.rs index dbbd9c0a56229..0dad8b781ae5a 100644 --- a/utils/wasm-builder/src/prerequisites.rs +++ b/utils/wasm-builder/src/prerequisites.rs @@ -15,12 +15,12 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::{CargoCommandVersioned, CargoCommand, write_file_if_changed}; +use crate::{write_file_if_changed, CargoCommand, CargoCommandVersioned}; use std::{fs, path::Path}; -use tempfile::tempdir; use ansi_term::Color; +use tempfile::tempdir; /// Print an error message. fn print_error_message(message: &str) -> String { @@ -95,7 +95,7 @@ fn create_check_toolchain_project(project_dir: &Path) { rustc_version.unwrap_or_else(|| "unknown rustc version".into()), ); } - "# + "#, ); // Just prints the `RURSTC_VERSION` environment variable that is being created by the // `build.rs` script. @@ -105,7 +105,7 @@ fn create_check_toolchain_project(project_dir: &Path) { fn main() { println!("{}", env!("RUSTC_VERSION")); } - "# + "#, ); } @@ -120,7 +120,12 @@ fn check_wasm_toolchain_installed( let manifest_path = temp.path().join("Cargo.toml").display().to_string(); let mut build_cmd = cargo_command.command(); - build_cmd.args(&["build", "--target=wasm32-unknown-unknown", "--manifest-path", &manifest_path]); + build_cmd.args(&[ + "build", + "--target=wasm32-unknown-unknown", + "--manifest-path", + &manifest_path, + ]); if super::color_output_enabled() { build_cmd.arg("--color=always"); @@ -133,33 +138,27 @@ fn check_wasm_toolchain_installed( build_cmd.env_remove("CARGO_TARGET_DIR"); run_cmd.env_remove("CARGO_TARGET_DIR"); - build_cmd - .output() - .map_err(|_| err_msg.clone()) - .and_then(|s| - if s.status.success() { - let version = run_cmd.output().ok().and_then(|o| String::from_utf8(o.stdout).ok()); - Ok(CargoCommandVersioned::new( - cargo_command, - version.unwrap_or_else(|| "unknown rustc version".into()), - )) - } else { - match String::from_utf8(s.stderr) { - Ok(ref err) if err.contains("linker `rust-lld` not found") => { - Err(print_error_message("`rust-lld` not found, please install it!")) - }, - Ok(ref err) => Err( - format!( - "{}\n\n{}\n{}\n{}{}\n", - err_msg, - Color::Yellow.bold().paint("Further error information:"), - Color::Yellow.bold().paint("-".repeat(60)), - err, - Color::Yellow.bold().paint("-".repeat(60)), - ) - ), - Err(_) => Err(err_msg), - } + build_cmd.output().map_err(|_| err_msg.clone()).and_then(|s| { + if s.status.success() { + let version = run_cmd.output().ok().and_then(|o| String::from_utf8(o.stdout).ok()); + Ok(CargoCommandVersioned::new( + cargo_command, + version.unwrap_or_else(|| "unknown rustc version".into()), + )) + } else { + match String::from_utf8(s.stderr) { + Ok(ref err) if err.contains("linker `rust-lld` not found") => + Err(print_error_message("`rust-lld` not found, please install it!")), + Ok(ref err) => Err(format!( + "{}\n\n{}\n{}\n{}{}\n", + err_msg, + Color::Yellow.bold().paint("Further error information:"), + Color::Yellow.bold().paint("-".repeat(60)), + err, + Color::Yellow.bold().paint("-".repeat(60)), + )), + Err(_) => Err(err_msg), } - ) + } + }) } diff --git a/utils/wasm-builder/src/wasm_project.rs b/utils/wasm-builder/src/wasm_project.rs index 466c2145e6cee..60b0d76fd0c93 100644 --- a/utils/wasm-builder/src/wasm_project.rs +++ b/utils/wasm-builder/src/wasm_project.rs @@ -18,15 +18,20 @@ use crate::{write_file_if_changed, CargoCommandVersioned}; use std::{ - fs, path::{Path, PathBuf}, borrow::ToOwned, process, env, collections::HashSet, - hash::{Hash, Hasher}, ops::Deref, + borrow::ToOwned, + collections::HashSet, + env, fs, + hash::{Hash, Hasher}, + ops::Deref, + path::{Path, PathBuf}, + process, }; use toml::value::Table; use build_helper::rerun_if_changed; -use cargo_metadata::{MetadataCommand, Metadata}; +use cargo_metadata::{Metadata, MetadataCommand}; use walkdir::WalkDir; @@ -114,19 +119,16 @@ pub(crate) fn create_and_compile( ); build_project(&project, default_rustflags, cargo_cmd); - let (wasm_binary, wasm_binary_compressed, bloaty) = compact_wasm_file( - &project, - project_cargo_toml, - wasm_binary_name, - ); + let (wasm_binary, wasm_binary_compressed, bloaty) = + compact_wasm_file(&project, project_cargo_toml, wasm_binary_name); - wasm_binary.as_ref().map(|wasm_binary| - copy_wasm_to_target_directory(project_cargo_toml, wasm_binary) - ); + wasm_binary + .as_ref() + .map(|wasm_binary| copy_wasm_to_target_directory(project_cargo_toml, wasm_binary)); - wasm_binary_compressed.as_ref().map(|wasm_binary_compressed| + wasm_binary_compressed.as_ref().map(|wasm_binary_compressed| { copy_wasm_to_target_directory(project_cargo_toml, wasm_binary_compressed) - ); + }); generate_rerun_if_changed_instructions(project_cargo_toml, &project, &wasm_workspace); @@ -144,17 +146,17 @@ fn find_cargo_lock(cargo_manifest: &Path) -> Option { } if !path.pop() { - return None; + return None } } } if let Some(path) = find_impl(build_helper::out_dir()) { - return Some(path); + return Some(path) } if let Some(path) = find_impl(cargo_manifest.to_path_buf()) { - return Some(path); + return Some(path) } build_helper::warning!( @@ -169,15 +171,20 @@ fn find_cargo_lock(cargo_manifest: &Path) -> Option { /// Extract the crate name from the given `Cargo.toml`. fn get_crate_name(cargo_manifest: &Path) -> String { let cargo_toml: Table = toml::from_str( - &fs::read_to_string(cargo_manifest).expect("File exists as checked before; qed") - ).expect("Cargo manifest is a valid toml file; qed"); + &fs::read_to_string(cargo_manifest).expect("File exists as checked before; qed"), + ) + .expect("Cargo manifest is a valid toml file; qed"); let package = cargo_toml .get("package") .and_then(|t| t.as_table()) .expect("`package` key exists in valid `Cargo.toml`; qed"); - package.get("name").and_then(|p| p.as_str()).map(ToOwned::to_owned).expect("Package name exists; qed") + package + .get("name") + .and_then(|p| p.as_str()) + .map(ToOwned::to_owned) + .expect("Package name exists; qed") } /// Returns the name for the wasm binary. @@ -192,9 +199,10 @@ fn get_wasm_workspace_root() -> PathBuf { loop { match out_dir.parent() { Some(parent) if out_dir.ends_with("build") => return parent.to_path_buf(), - _ => if !out_dir.pop() { - break; - } + _ => + if !out_dir.pop() { + break + }, } } @@ -210,10 +218,10 @@ fn create_project_cargo_toml( enabled_features: impl Iterator, ) { let mut workspace_toml: Table = toml::from_str( - &fs::read_to_string( - workspace_root_path.join("Cargo.toml"), - ).expect("Workspace root `Cargo.toml` exists; qed") - ).expect("Workspace root `Cargo.toml` is a valid toml file; qed"); + &fs::read_to_string(workspace_root_path.join("Cargo.toml")) + .expect("Workspace root `Cargo.toml` exists; qed"), + ) + .expect("Workspace root `Cargo.toml` is a valid toml file; qed"); let mut wasm_workspace_toml = Table::new(); @@ -232,25 +240,25 @@ fn create_project_cargo_toml( wasm_workspace_toml.insert("profile".into(), profile.into()); // Add patch section from the project root `Cargo.toml` - while let Some(mut patch) = workspace_toml.remove("patch") - .and_then(|p| p.try_into::().ok()) { + while let Some(mut patch) = + workspace_toml.remove("patch").and_then(|p| p.try_into::
().ok()) + { // Iterate over all patches and make the patch path absolute from the workspace root path. - patch.iter_mut() - .filter_map(|p| + patch + .iter_mut() + .filter_map(|p| { p.1.as_table_mut().map(|t| t.iter_mut().filter_map(|t| t.1.as_table_mut())) - ) + }) .flatten() - .for_each(|p| - p.iter_mut() - .filter(|(k, _)| k == &"path") - .for_each(|(_, v)| { - if let Some(path) = v.as_str().map(PathBuf::from) { - if path.is_relative() { - *v = workspace_root_path.join(path).display().to_string().into(); - } + .for_each(|p| { + p.iter_mut().filter(|(k, _)| k == &"path").for_each(|(_, v)| { + if let Some(path) = v.as_str().map(PathBuf::from) { + if path.is_relative() { + *v = workspace_root_path.join(path).display().to_string().into(); } - }) - ); + } + }) + }); wasm_workspace_toml.insert("patch".into(), patch.into()); } @@ -296,7 +304,8 @@ fn find_package_by_manifest_path<'a>( manifest_path: &Path, crate_metadata: &'a cargo_metadata::Metadata, ) -> &'a cargo_metadata::Package { - crate_metadata.packages + crate_metadata + .packages .iter() .find(|p| p.manifest_path == manifest_path) .expect("Wasm project exists in its own metadata; qed") @@ -309,18 +318,19 @@ fn project_enabled_features( ) -> Vec { let package = find_package_by_manifest_path(cargo_manifest, crate_metadata); - let mut enabled_features = package.features.keys() + let mut enabled_features = package + .features + .keys() .filter(|f| { let mut feature_env = f.replace("-", "_"); feature_env.make_ascii_uppercase(); // We don't want to enable the `std`/`default` feature for the wasm build and // we need to check if the feature is enabled by checking the env variable. - *f != "std" - && *f != "default" - && env::var(format!("CARGO_FEATURE_{}", feature_env)) - .map(|v| v == "1") - .unwrap_or_default() + *f != "std" && + *f != "default" && env::var(format!("CARGO_FEATURE_{}", feature_env)) + .map(|v| v == "1") + .unwrap_or_default() }) .cloned() .collect::>(); @@ -418,7 +428,8 @@ fn build_project(project: &Path, default_rustflags: &str, cargo_cmd: CargoComman env::var(crate::WASM_BUILD_RUSTFLAGS_ENV).unwrap_or_default(), ); - build_cmd.args(&["rustc", "--target=wasm32-unknown-unknown"]) + build_cmd + .args(&["rustc", "--target=wasm32-unknown-unknown"]) .arg(format!("--manifest-path={}", manifest_path.display())) .env("RUSTFLAGS", rustflags) // Unset the `CARGO_TARGET_DIR` to prevent a cargo deadlock (cargo locks a target dir exclusive). @@ -456,17 +467,16 @@ fn compact_wasm_file( let is_release_build = is_release_build(); let target = if is_release_build { "release" } else { "debug" }; let default_wasm_binary_name = get_wasm_binary_name(cargo_manifest); - let wasm_file = project.join("target/wasm32-unknown-unknown") + let wasm_file = project + .join("target/wasm32-unknown-unknown") .join(target) .join(format!("{}.wasm", default_wasm_binary_name)); let wasm_compact_file = if is_release_build { - let wasm_compact_file = project.join( - format!( - "{}.compact.wasm", - wasm_binary_name.clone().unwrap_or_else(|| default_wasm_binary_name.clone()), - ) - ); + let wasm_compact_file = project.join(format!( + "{}.compact.wasm", + wasm_binary_name.clone().unwrap_or_else(|| default_wasm_binary_name.clone()), + )); wasm_gc::garbage_collect_file(&wasm_file, &wasm_compact_file) .expect("Failed to compact generated WASM binary."); Some(WasmBinary(wasm_compact_file)) @@ -474,24 +484,19 @@ fn compact_wasm_file( None }; - let wasm_compact_compressed_file = wasm_compact_file.as_ref() - .and_then(|compact_binary| { - let file_name = wasm_binary_name.clone() - .unwrap_or_else(|| default_wasm_binary_name.clone()); - - let wasm_compact_compressed_file = project.join( - format!( - "{}.compact.compressed.wasm", - file_name, - ) - ); - - if compress_wasm(&compact_binary.0, &wasm_compact_compressed_file) { - Some(WasmBinary(wasm_compact_compressed_file)) - } else { - None - } - }); + let wasm_compact_compressed_file = wasm_compact_file.as_ref().and_then(|compact_binary| { + let file_name = + wasm_binary_name.clone().unwrap_or_else(|| default_wasm_binary_name.clone()); + + let wasm_compact_compressed_file = + project.join(format!("{}.compact.compressed.wasm", file_name,)); + + if compress_wasm(&compact_binary.0, &wasm_compact_compressed_file) { + Some(WasmBinary(wasm_compact_compressed_file)) + } else { + None + } + }); let bloaty_file_name = if let Some(name) = wasm_binary_name { format!("{}.wasm", name) @@ -502,24 +507,14 @@ fn compact_wasm_file( let bloaty_file = project.join(bloaty_file_name); fs::copy(wasm_file, &bloaty_file).expect("Copying the bloaty file to the project dir."); - ( - wasm_compact_file, - wasm_compact_compressed_file, - WasmBinaryBloaty(bloaty_file), - ) + (wasm_compact_file, wasm_compact_compressed_file, WasmBinaryBloaty(bloaty_file)) } -fn compress_wasm( - wasm_binary_path: &Path, - compressed_binary_out_path: &Path, -) -> bool { +fn compress_wasm(wasm_binary_path: &Path, compressed_binary_out_path: &Path) -> bool { use sp_maybe_compressed_blob::CODE_BLOB_BOMB_LIMIT; let data = fs::read(wasm_binary_path).expect("Failed to read WASM binary"); - if let Some(compressed) = sp_maybe_compressed_blob::compress( - &data, - CODE_BLOB_BOMB_LIMIT, - ) { + if let Some(compressed) = sp_maybe_compressed_blob::compress(&data, CODE_BLOB_BOMB_LIMIT) { fs::write(compressed_binary_out_path, &compressed[..]) .expect("Failed to write WASM binary"); @@ -590,7 +585,8 @@ fn generate_rerun_if_changed_instructions( .exec() .expect("`cargo metadata` can not fail!"); - let package = metadata.packages + let package = metadata + .packages .iter() .find(|p| p.manifest_path == cargo_manifest) .expect("The crate package is contained in its own metadata; qed"); @@ -603,12 +599,11 @@ fn generate_rerun_if_changed_instructions( packages.insert(DeduplicatePackage::from(package)); while let Some(dependency) = dependencies.pop() { - let path_or_git_dep = dependency.source - .as_ref() - .map(|s| s.starts_with("git+")) - .unwrap_or(true); + let path_or_git_dep = + dependency.source.as_ref().map(|s| s.starts_with("git+")).unwrap_or(true); - let package = metadata.packages + let package = metadata + .packages .iter() .filter(|p| !p.manifest_path.starts_with(wasm_workspace)) .find(|p| { @@ -649,9 +644,7 @@ fn package_rerun_if_changed(package: &DeduplicatePackage) { // Ignore this entry if it is a directory that contains a `Cargo.toml` that is not the // `Cargo.toml` related to the current package. This is done to ignore sub-crates of a crate. // If such a sub-crate is a dependency, it will be processed independently anyway. - p.path() == manifest_path - || !p.path().is_dir() - || !p.path().join("Cargo.toml").exists() + p.path() == manifest_path || !p.path().is_dir() || !p.path().join("Cargo.toml").exists() }) .filter_map(|p| p.ok().map(|p| p.into_path())) .filter(|p| { @@ -681,5 +674,6 @@ fn copy_wasm_to_target_directory(cargo_manifest: &Path, wasm_binary: &WasmBinary fs::copy( wasm_binary.wasm_binary_path(), target_dir.join(format!("{}.wasm", get_wasm_binary_name(cargo_manifest))), - ).expect("Copies WASM binary to `WASM_TARGET_DIRECTORY`."); + ) + .expect("Copies WASM binary to `WASM_TARGET_DIRECTORY`."); } From d92d9b51c2b9f9deca029a8b83afc6c47ff21778 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 10:53:56 +0100 Subject: [PATCH 379/503] Update to new call variant structs --- frame/balances/src/tests.rs | 2 +- .../src/unsigned.rs | 2 +- frame/example-offchain-worker/src/lib.rs | 12 +- frame/im-online/src/lib.rs | 2 +- frame/scheduler/src/lib.rs | 104 +++++++++--------- frame/sudo/src/tests.rs | 24 ++-- 6 files changed, 73 insertions(+), 73 deletions(-) diff --git a/frame/balances/src/tests.rs b/frame/balances/src/tests.rs index 624c2de618900..8aa661ce6bd90 100644 --- a/frame/balances/src/tests.rs +++ b/frame/balances/src/tests.rs @@ -39,7 +39,7 @@ macro_rules! decl_tests { const ID_2: LockIdentifier = *b"2 "; pub const CALL: &<$test as frame_system::Config>::Call = - &Call::Balances(pallet_balances::Call::transfer(0, 0)); + &Call::Balances(pallet_balances::Call::transfer { dest: 0, value: 0 }); /// create a transaction info struct from weight. Handy to avoid building the whole struct. pub fn info_from_weight(w: Weight) -> DispatchInfo { diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index bc2f2fda5fa52..c1479cf3368a7 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -154,7 +154,7 @@ impl Pallet { let call = restore_solution::() .and_then(|call| { // ensure the cached call is still current before submitting - if let Call::submit_unsigned(solution, _) = &call { + if let Call::submit_unsigned { solution, .. } = &call { // prevent errors arising from state changes in a forkful chain Self::basic_checks(solution, "restored")?; Ok(call) diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index 35e8dffd4edac..ead5fb0e1758b 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -304,7 +304,7 @@ pub mod pallet { return InvalidTransaction::BadProof.into() } Self::validate_transaction_parameters(&payload.block_number, &payload.price) - } else if let Call::submit_price_unsigned { block_number, new_price } = call { + } else if let Call::submit_price_unsigned { _block_number: block_number, price: new_price } = call { Self::validate_transaction_parameters(block_number, new_price) } else { InvalidTransaction::Call.into() @@ -473,7 +473,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price_unsigned` public function of this // pallet. This means that the transaction, when executed, will simply call that function // passing `price` as an argument. - let call = Call::submit_price_unsigned { block_number, price }; + let call = Call::submit_price_unsigned { _block_number: block_number, price }; // Now let's create a transaction out of this call and submit it to the pool. // Here we showcase two ways to send an unsigned transaction / unsigned payload (raw) @@ -509,8 +509,8 @@ impl Pallet { .send_unsigned_transaction( |account| PricePayload { price, block_number, public: account.public.clone() }, |payload, signature| Call::submit_price_unsigned_with_signed_payload { - payload, - signature, + price_payload: payload, + _signature: signature, }, ) .ok_or("No local accounts accounts available.")?; @@ -539,8 +539,8 @@ impl Pallet { .send_unsigned_transaction( |account| PricePayload { price, block_number, public: account.public.clone() }, |payload, signature| Call::submit_price_unsigned_with_signed_payload { - payload, - signature, + price_payload: payload, + _signature: signature, }, ); for (_account_id, result) in transaction_results.into_iter() { diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index ce2399629ed07..80462b85b00f3 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -627,7 +627,7 @@ impl Pallet { let prepare_heartbeat = || -> OffchainResult> { let network_state = sp_io::offchain::network_state().map_err(|_| OffchainErr::NetworkState)?; - let heartbeat_data = Heartbeat { + let heartbeat = Heartbeat { block_number, network_state, session_index, diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 706b8abe6e5a0..5c0415a31c689 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -927,7 +927,7 @@ mod tests { pub struct BaseFilter; impl Filter for BaseFilter { fn filter(call: &Call) -> bool { - !matches!(call, Call::Logger(LoggerCall::log(_, _))) + !matches!(call, Call::Logger(LoggerCall::log { .. })) } } @@ -1005,7 +1005,7 @@ mod tests { #[test] fn basic_scheduling_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); assert_ok!(Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call)); run_to_block(3); @@ -1021,7 +1021,7 @@ mod tests { fn schedule_after_works() { new_test_ext().execute_with(|| { run_to_block(2); - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); // This will schedule the call 3 blocks after the next block... so block 3 + 3 = 6 assert_ok!(Scheduler::do_schedule(DispatchTime::After(3), None, 127, root(), call)); @@ -1038,7 +1038,7 @@ mod tests { fn schedule_after_zero_works() { new_test_ext().execute_with(|| { run_to_block(2); - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); assert_ok!(Scheduler::do_schedule(DispatchTime::After(0), None, 127, root(), call)); // Will trigger on the next block. @@ -1058,7 +1058,7 @@ mod tests { Some((3, 3)), 127, root(), - Call::Logger(logger::Call::log(42, 1000)) + Call::Logger(logger::Call::log { i: 42, weight: 1000 }) )); run_to_block(3); assert!(logger::log().is_empty()); @@ -1080,7 +1080,7 @@ mod tests { #[test] fn reschedule_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); assert_eq!( Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call).unwrap(), @@ -1111,7 +1111,7 @@ mod tests { #[test] fn reschedule_named_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); assert_eq!( Scheduler::do_schedule_named( @@ -1153,7 +1153,7 @@ mod tests { #[test] fn reschedule_named_perodic_works() { new_test_ext().execute_with(|| { - let call = Call::Logger(LoggerCall::log(42, 1000)); + let call = Call::Logger(LoggerCall::log { i: 42, weight: 1000 }); assert!(!::BaseCallFilter::filter(&call)); assert_eq!( Scheduler::do_schedule_named( @@ -1215,7 +1215,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)), + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), ) .unwrap(); let i = Scheduler::do_schedule( @@ -1223,7 +1223,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(42, 1000)), + Call::Logger(LoggerCall::log { i: 42, weight: 1000 }), ) .unwrap(); run_to_block(3); @@ -1245,7 +1245,7 @@ mod tests { Some((3, 3)), 127, root(), - Call::Logger(LoggerCall::log(42, 1000)), + Call::Logger(LoggerCall::log { i: 42, weight: 1000 }), ) .unwrap(); // same id results in error. @@ -1255,7 +1255,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)) + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }) ) .is_err()); // different id is ok. @@ -1265,7 +1265,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, 1000)), + Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), ) .unwrap(); run_to_block(3); @@ -1287,14 +1287,14 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // 69 and 42 do not fit together run_to_block(4); @@ -1312,14 +1312,14 @@ mod tests { None, 0, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 0, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // With base weights, 69 and 42 should not fit together, but do because of hard deadlines run_to_block(4); @@ -1335,14 +1335,14 @@ mod tests { None, 1, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 0, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); run_to_block(4); assert_eq!(logger::log(), vec![(root(), 69u32), (root(), 42u32)]); @@ -1357,21 +1357,21 @@ mod tests { None, 255, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 3 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); assert_ok!(Scheduler::do_schedule( DispatchTime::At(4), None, 126, root(), - Call::Logger(LoggerCall::log(2600, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 2600, weight: MaximumSchedulerWeight::get() / 2 }) )); // 2600 does not fit with 69 or 42, but has higher priority, so will go through @@ -1400,7 +1400,7 @@ mod tests { None, 255, root(), - Call::Logger(LoggerCall::log(3, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 3, weight: MaximumSchedulerWeight::get() / 3 }) )); // Anon Periodic assert_ok!(Scheduler::do_schedule( @@ -1408,7 +1408,7 @@ mod tests { Some((1000, 3)), 128, root(), - Call::Logger(LoggerCall::log(42, MaximumSchedulerWeight::get() / 3)) + Call::Logger(LoggerCall::log { i: 42, weight: MaximumSchedulerWeight::get() / 3 }) )); // Anon assert_ok!(Scheduler::do_schedule( @@ -1416,7 +1416,7 @@ mod tests { None, 127, root(), - Call::Logger(LoggerCall::log(69, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 69, weight: MaximumSchedulerWeight::get() / 2 }) )); // Named Periodic assert_ok!(Scheduler::do_schedule_named( @@ -1425,7 +1425,7 @@ mod tests { Some((1000, 3)), 126, root(), - Call::Logger(LoggerCall::log(2600, MaximumSchedulerWeight::get() / 2)) + Call::Logger(LoggerCall::log { i: 2600, weight: MaximumSchedulerWeight::get() / 2 }) )); // Will include the named periodic only @@ -1469,8 +1469,8 @@ mod tests { #[test] fn root_calls_works() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( Origin::root(), 1u32.encode(), @@ -1497,8 +1497,8 @@ mod tests { new_test_ext().execute_with(|| { run_to_block(3); - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_err!( Scheduler::schedule_named(Origin::root(), 1u32.encode(), 2, None, 127, call), @@ -1520,8 +1520,8 @@ mod tests { #[test] fn should_use_orign() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( system::RawOrigin::Signed(1).into(), 1u32.encode(), @@ -1552,8 +1552,8 @@ mod tests { #[test] fn should_check_orign() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log { i: 42, weight: 1000 })); assert_noop!( Scheduler::schedule_named( system::RawOrigin::Signed(2).into(), @@ -1575,8 +1575,8 @@ mod tests { #[test] fn should_check_orign_for_cancel() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log_without_filter(69, 1000))); - let call2 = Box::new(Call::Logger(LoggerCall::log_without_filter(42, 1000))); + let call = Box::new(Call::Logger(LoggerCall::log_without_filter { i: 69, weight: 1000 })); + let call2 = Box::new(Call::Logger(LoggerCall::log_without_filter { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( system::RawOrigin::Signed(1).into(), 1u32.encode(), @@ -1626,14 +1626,14 @@ mod tests { Some(ScheduledV1 { maybe_id: None, priority: i as u8 + 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, }), None, Some(ScheduledV1 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), }), ]; @@ -1653,7 +1653,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1662,7 +1662,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1675,7 +1675,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 11, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1684,7 +1684,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1697,7 +1697,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 12, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: root(), _phantom: PhantomData::::default(), @@ -1706,7 +1706,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: root(), _phantom: PhantomData::::default(), @@ -1729,7 +1729,7 @@ mod tests { Some(Scheduled { maybe_id: None, priority: i as u8 + 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), origin: 3u32, maybe_periodic: None, _phantom: Default::default(), @@ -1739,7 +1739,7 @@ mod tests { maybe_id: Some(b"test".to_vec()), priority: 123, origin: 2u32, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), _phantom: Default::default(), }), @@ -1768,7 +1768,7 @@ mod tests { Some(ScheduledV2::<_, _, OriginCaller, u64> { maybe_id: None, priority: 10, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1777,7 +1777,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), @@ -1790,7 +1790,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 11, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1799,7 +1799,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), @@ -1812,7 +1812,7 @@ mod tests { Some(ScheduledV2 { maybe_id: None, priority: 12, - call: Call::Logger(LoggerCall::log(96, 100)), + call: Call::Logger(LoggerCall::log { i: 96, weight: 100 }), maybe_periodic: None, origin: system::RawOrigin::Root.into(), _phantom: PhantomData::::default(), @@ -1821,7 +1821,7 @@ mod tests { Some(ScheduledV2 { maybe_id: Some(b"test".to_vec()), priority: 123, - call: Call::Logger(LoggerCall::log(69, 1000)), + call: Call::Logger(LoggerCall::log { i: 69, weight: 1000 }), maybe_periodic: Some((456u64, 10)), origin: system::RawOrigin::None.into(), _phantom: PhantomData::::default(), diff --git a/frame/sudo/src/tests.rs b/frame/sudo/src/tests.rs index 9437f20832c44..d19d24e389f4b 100644 --- a/frame/sudo/src/tests.rs +++ b/frame/sudo/src/tests.rs @@ -39,12 +39,12 @@ fn sudo_basics() { // Configure a default test environment and set the root `key` to 1. new_test_ext(1).execute_with(|| { // A privileged function should work when `sudo` is passed the root `key` as `origin`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo(Origin::signed(1), call)); assert_eq!(Logger::i32_log(), vec![42i32]); // A privileged function should not work when `sudo` is passed a non-root `key` as `origin`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_noop!(Sudo::sudo(Origin::signed(2), call), Error::::RequireSudo); }); } @@ -56,7 +56,7 @@ fn sudo_emits_events_correctly() { System::set_block_number(1); // Should emit event to indicate success when called with the root `key` and `call` is `Ok`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo(Origin::signed(1), call)); System::assert_has_event(TestEvent::Sudo(Event::Sudid(Ok(())))); }) @@ -66,12 +66,12 @@ fn sudo_emits_events_correctly() { fn sudo_unchecked_weight_basics() { new_test_ext(1).execute_with(|| { // A privileged function should work when `sudo` is passed the root `key` as origin. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo_unchecked_weight(Origin::signed(1), call, 1_000)); assert_eq!(Logger::i32_log(), vec![42i32]); // A privileged function should not work when called with a non-root `key`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_noop!( Sudo::sudo_unchecked_weight(Origin::signed(2), call, 1_000), Error::::RequireSudo, @@ -80,8 +80,8 @@ fn sudo_unchecked_weight_basics() { assert_eq!(Logger::i32_log(), vec![42i32]); // Controls the dispatched weight. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); - let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight(call, 1_000); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); + let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight { call, _weight: 1_000 }; let info = sudo_unchecked_weight_call.get_dispatch_info(); assert_eq!(info.weight, 1_000); }); @@ -94,7 +94,7 @@ fn sudo_unchecked_weight_emits_events_correctly() { System::set_block_number(1); // Should emit event to indicate success when called with the root `key` and `call` is `Ok`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_unchecked_weight(Origin::signed(1), call, 1_000)); System::assert_has_event(TestEvent::Sudo(Event::Sudid(Ok(())))); }) @@ -133,17 +133,17 @@ fn set_key_emits_events_correctly() { fn sudo_as_basics() { new_test_ext(1).execute_with(|| { // A privileged function will not work when passed to `sudo_as`. - let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log(42, 1_000))); + let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1_000 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); assert!(Logger::i32_log().is_empty()); assert!(Logger::account_log().is_empty()); // A non-privileged function should not work when called with a non-root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_noop!(Sudo::sudo_as(Origin::signed(3), 2, call), Error::::RequireSudo); // A non-privileged function will work when passed to `sudo_as` with the root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); assert_eq!(Logger::i32_log(), vec![42i32]); // The correct user makes the call within `sudo_as`. @@ -158,7 +158,7 @@ fn sudo_as_emits_events_correctly() { System::set_block_number(1); // A non-privileged function will work when passed to `sudo_as` with the root `key`. - let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); + let call = Box::new(Call::Logger(LoggerCall::non_privileged_log { i: 42, weight: 1 })); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); System::assert_has_event(TestEvent::Sudo(Event::SudoAsDone(Ok(())))); }); From d2c8dfd01e6b5a09932ce390cb01bbe3d29b1708 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 10:58:55 +0100 Subject: [PATCH 380/503] More call variant struct updates --- frame/support/src/dispatch.rs | 47 ----------------------------------- frame/support/src/weights.rs | 16 ++++++------ 2 files changed, 8 insertions(+), 55 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 12a7753c465d0..0e8f0e9f8e0db 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2487,52 +2487,6 @@ mod tests { } } - fn expected_calls() -> Vec { - vec![ - FunctionMetadata { name: "aux_0", args: vec![], docs: vec![" Hi, this is a comment."] }, - FunctionMetadata { - name: "aux_1", - args: vec![FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::>(), - }], - docs: vec![], - }, - FunctionMetadata { - name: "aux_2", - args: vec![ - FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::() }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::(), - }, - ], - docs: vec![], - }, - FunctionMetadata { name: "aux_3", args: vec![], docs: vec![] }, - FunctionMetadata { - name: "aux_4", - args: vec![FunctionArgumentMetadata { - name: "_data", - ty: scale_info::meta_type::(), - }], - docs: vec![], - }, - FunctionMetadata { - name: "aux_5", - args: vec![ - FunctionArgumentMetadata { name: "_data", ty: scale_info::meta_type::() }, - FunctionArgumentMetadata { - name: "_data2", - ty: scale_info::meta_type::>(), - }, - ], - docs: vec![], - }, - FunctionMetadata { name: "operational", args: vec![], docs: vec![] }, - ] - } - #[derive(scale_info::TypeInfo)] pub struct TraitImpl {} impl Config for TraitImpl {} @@ -2617,7 +2571,6 @@ mod tests { fn module_json_metadata() { let metadata = Module::::call_functions(); let expected_metadata = PalletCallMetadata { - calls: expected_calls(), ty: scale_info::meta_type::>(), }; assert_eq!(expected_metadata, metadata); diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 517978f40c03b..681d51174ca96 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -876,49 +876,49 @@ mod tests { #[test] fn weights_are_correct() { // #[weight = 1000] - let info = Call::::f00().get_dispatch_info(); + let info = Call::::f00 { }.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, DispatchClass::Mandatory)] - let info = Call::::f01().get_dispatch_info(); + let info = Call::::f01 { }.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Mandatory); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, Pays::No)] - let info = Call::::f02().get_dispatch_info(); + let info = Call::::f02 { }.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::No); // #[weight = (1000, DispatchClass::Operational, Pays::No)] - let info = Call::::f03().get_dispatch_info(); + let info = Call::::f03 { }.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::No); // #[weight = ((_a * 10 + _eb * 1) as Weight, DispatchClass::Normal, Pays::Yes)] - let info = Call::::f11(13, 20).get_dispatch_info(); + let info = Call::::f11 { _a: 13, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 150); // 13*10 + 20 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (0, DispatchClass::Operational, Pays::Yes)] - let info = Call::::f12(10, 20).get_dispatch_info(); + let info = Call::::f12 { _a: 10, _eb: 20 }.get_dispatch_info(); assert_eq!(info.weight, 0); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads(3) + T::DbWeight::get().writes(2) + 10_000] - let info = Call::::f20().get_dispatch_info(); + let info = Call::::f20 { }.get_dispatch_info(); assert_eq!(info.weight, 12300); // 100*3 + 1000*2 + 10_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads_writes(6, 5) + 40_000] - let info = Call::::f21().get_dispatch_info(); + let info = Call::::f21 { }.get_dispatch_info(); assert_eq!(info.weight, 45600); // 100*6 + 1000*5 + 40_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); From bfd13c19cbbfd68d7dbdde120d7f11a9a6f88dab Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 10:59:16 +0100 Subject: [PATCH 381/503] Remove unused import --- frame/support/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index 26fc7f810effc..e155405512c50 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -803,7 +803,7 @@ pub mod tests { }; use codec::{Codec, EncodeLike}; use sp_io::TestExternalities; - use sp_std::{marker::PhantomData, result}; + use sp_std::result; /// A PalletInfo implementation which just panics. pub struct PanicPalletInfo; From 56512956df8880d36af4fdb0c29c205d124b606f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 11:35:48 +0100 Subject: [PATCH 382/503] More call variant structs --- frame/collective/src/lib.rs | 8 ++--- frame/example/src/lib.rs | 2 +- frame/executive/src/lib.rs | 20 ++++++----- frame/multisig/src/tests.rs | 50 +++++++++++++++------------- frame/proxy/src/tests.rs | 18 ++++++---- frame/transaction-payment/src/lib.rs | 4 +-- 6 files changed, 57 insertions(+), 45 deletions(-) diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index a15297517da89..11fb44a79f988 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -1103,7 +1103,7 @@ mod tests { } fn make_proposal(value: u64) -> Call { - Call::System(frame_system::Call::remark(value.encode())) + Call::System(frame_system::Call::remark { _remark: value.encode() }) } #[test] @@ -1171,7 +1171,7 @@ mod tests { fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -1210,7 +1210,7 @@ mod tests { fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -1553,7 +1553,7 @@ mod tests { fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let length = proposal.encode().len() as u32; assert_ok!(Collective::propose( Origin::signed(1), diff --git a/frame/example/src/lib.rs b/frame/example/src/lib.rs index 5df2e894e1a9c..ad3fa8dceda05 100644 --- a/frame/example/src/lib.rs +++ b/frame/example/src/lib.rs @@ -717,7 +717,7 @@ where // check for `set_dummy` match call.is_sub_type() { - Some(Call::set_dummy(..)) => { + Some(Call::set_dummy { .. }) => { sp_runtime::print("set_dummy was received."); let mut valid_tx = ValidTransaction::default(); diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 3e2cdd241f6df..563e84b368242 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -645,7 +645,7 @@ mod tests { None } fn is_inherent(call: &Self::Call) -> bool { - *call == Call::::inherent_call() + *call == Call::::inherent_call { } } } @@ -658,7 +658,7 @@ mod tests { call: &Self::Call, ) -> TransactionValidity { match call { - Call::allowed_unsigned(..) => Ok(Default::default()), + Call::allowed_unsigned { .. } => Ok(Default::default()), _ => UnknownTransaction::NoUnsignedValidator.into(), } } @@ -666,8 +666,8 @@ mod tests { // Inherent call is accepted for being dispatched fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { match call { - Call::allowed_unsigned(..) => Ok(()), - Call::inherent_call(..) => Ok(()), + Call::allowed_unsigned { .. } => Ok(()), + Call::inherent_call { .. } => Ok(()), _ => Err(UnknownTransaction::NoUnsignedValidator.into()), } } @@ -809,13 +809,17 @@ mod tests { Some((who, extra(nonce, fee))) } + fn call_transfer(dest: u32, value: u32) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) + } + #[test] fn balance_transfer_dispatch_works() { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 211)] } .assimilate_storage(&mut t) .unwrap(); - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(2, 69)), sign_extra(1, 0, 0)); + let xt = TestXt::new(call_transfer(2, 69), sign_extra(1, 0, 0)); let weight = xt.get_dispatch_info().weight + ::BlockWeights::get() .get(DispatchClass::Normal) @@ -912,7 +916,7 @@ mod tests { fn bad_extrinsic_not_inserted() { let mut t = new_test_ext(1); // bad nonce check! - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 69)), sign_extra(1, 30, 0)); + let xt = TestXt::new(call_transfer(33, 69), sign_extra(1, 30, 0)); t.execute_with(|| { Executive::initialize_block(&Header::new( 1, @@ -1378,8 +1382,8 @@ mod tests { #[test] fn valid_inherents_position_works() { - let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call()), None); - let xt2 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call { }), None); + let xt2 = TestXt::new(call_transfer(33, 0), sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { // Let's build some fake block. diff --git a/frame/multisig/src/tests.rs b/frame/multisig/src/tests.rs index 6dba6f7d4ab5a..c036c36d5a1b4 100644 --- a/frame/multisig/src/tests.rs +++ b/frame/multisig/src/tests.rs @@ -99,7 +99,7 @@ impl Filter for TestBaseCallFilter { match *c { Call::Balances(_) => true, // Needed for benchmarking - Call::System(frame_system::Call::remark(_)) => true, + Call::System(frame_system::Call::remark { .. }) => true, _ => false, } } @@ -132,6 +132,10 @@ fn now() -> Timepoint { Multisig::timepoint() } +fn call_transfer(dest: u32, value: u32) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + #[test] fn multisig_deposit_is_taken_and_returned() { new_test_ext().execute_with(|| { @@ -140,7 +144,7 @@ fn multisig_deposit_is_taken_and_returned() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); assert_ok!(Multisig::as_multi( @@ -177,7 +181,7 @@ fn multisig_deposit_is_taken_and_returned_with_call_storage() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -206,7 +210,7 @@ fn multisig_deposit_is_taken_and_returned_with_alt_call_storage() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -254,7 +258,7 @@ fn multisig_deposit_is_taken_and_returned_with_alt_call_storage() { #[test] fn cancel_multisig_returns_deposit() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -294,7 +298,7 @@ fn timepoint_checking_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_noop!( @@ -339,7 +343,7 @@ fn multisig_2_of_3_works_with_call_storing() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -366,7 +370,7 @@ fn multisig_2_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -394,7 +398,7 @@ fn multisig_3_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -432,7 +436,7 @@ fn multisig_3_of_3_works() { #[test] fn cancel_multisig_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -467,7 +471,7 @@ fn cancel_multisig_works() { #[test] fn cancel_multisig_with_call_storage_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::as_multi(Origin::signed(1), 3, vec![2, 3], None, call, true, 0)); assert_eq!(Balances::free_balance(1), 4); @@ -497,7 +501,7 @@ fn cancel_multisig_with_call_storage_works() { #[test] fn cancel_multisig_with_alt_call_storage_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -532,7 +536,7 @@ fn multisig_2_of_3_as_multi_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); assert_ok!(Multisig::as_multi( @@ -567,10 +571,10 @@ fn multisig_2_of_3_as_multi_with_many_calls_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call1 = Call::Balances(BalancesCall::transfer(6, 10)); + let call1 = call_transfer(6, 10); let call1_weight = call1.get_dispatch_info().weight; let data1 = call1.encode(); - let call2 = Call::Balances(BalancesCall::transfer(7, 5)); + let call2 = call_transfer(7, 5); let call2_weight = call2.get_dispatch_info().weight; let data2 = call2.encode(); @@ -624,7 +628,7 @@ fn multisig_2_of_3_cannot_reissue_same_call() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 10)); + let call = call_transfer(6, 10); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); @@ -677,7 +681,7 @@ fn multisig_2_of_3_cannot_reissue_same_call() { #[test] fn minimum_threshold_check_works() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); assert_noop!( Multisig::as_multi(Origin::signed(1), 0, vec![2], None, call.clone(), false, 0), Error::::MinimumThreshold, @@ -692,7 +696,7 @@ fn minimum_threshold_check_works() { #[test] fn too_many_signatories_fails() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); assert_noop!( Multisig::as_multi(Origin::signed(1), 2, vec![2, 3, 4], None, call.clone(), false, 0), Error::::TooManySignatories, @@ -703,7 +707,7 @@ fn too_many_signatories_fails() { #[test] fn duplicate_approvals_are_ignored() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_ok!(Multisig::approve_as_multi( Origin::signed(1), @@ -754,7 +758,7 @@ fn multisig_1_of_3_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); + let call = call_transfer(6, 15).encode(); let hash = blake2_256(&call); assert_noop!( Multisig::approve_as_multi(Origin::signed(1), 1, vec![2, 3], None, hash.clone(), 0), @@ -764,7 +768,7 @@ fn multisig_1_of_3_works() { Multisig::as_multi(Origin::signed(1), 1, vec![2, 3], None, call.clone(), false, 0), Error::::MinimumThreshold, ); - let boxed_call = Box::new(Call::Balances(BalancesCall::transfer(6, 15))); + let boxed_call = Box::new(call_transfer(6, 15)); assert_ok!(Multisig::as_multi_threshold_1(Origin::signed(1), vec![2, 3], boxed_call)); assert_eq!(Balances::free_balance(6), 15); @@ -790,7 +794,7 @@ fn weight_check_works() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let data = call.encode(); assert_ok!(Multisig::as_multi( Origin::signed(1), @@ -820,7 +824,7 @@ fn multisig_handles_no_preimage_after_all_approve() { assert_ok!(Balances::transfer(Origin::signed(2), multi, 5)); assert_ok!(Balances::transfer(Origin::signed(3), multi, 5)); - let call = Call::Balances(BalancesCall::transfer(6, 15)); + let call = call_transfer(6, 15); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index 227cc85be596f..0bc12c226038e 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -133,7 +133,7 @@ impl InstanceFilter for ProxyType { match self { ProxyType::Any => true, ProxyType::JustTransfer => - matches!(c, Call::Balances(pallet_balances::Call::transfer(..))), + matches!(c, Call::Balances(pallet_balances::Call::transfer { .. })), ProxyType::JustUtility => matches!(c, Call::Utility(..)), } } @@ -198,6 +198,10 @@ fn expect_events(e: Vec) { assert_eq!(last_events(e.len()), e); } +fn call_transfer(dest: u32, value: u32) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + #[test] fn announcement_works() { new_test_ext().execute_with(|| { @@ -279,7 +283,7 @@ fn announcer_must_be_proxy() { fn delayed_requires_pre_announcement() { new_test_ext().execute_with(|| { assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::Any, 1)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); let e = Error::::Unannounced; assert_noop!(Proxy::proxy(Origin::signed(2), 1, None, call.clone()), e); let e = Error::::Unannounced; @@ -296,7 +300,7 @@ fn proxy_announced_removes_announcement_and_returns_deposit() { new_test_ext().execute_with(|| { assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::Any, 1)); assert_ok!(Proxy::add_proxy(Origin::signed(2), 3, ProxyType::Any, 1)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); let call_hash = BlakeTwo256::hash_of(&call); assert_ok!(Proxy::announce(Origin::signed(3), 1, call_hash)); assert_ok!(Proxy::announce(Origin::signed(3), 2, call_hash)); @@ -320,7 +324,7 @@ fn filtering_works() { assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::JustTransfer, 0)); assert_ok!(Proxy::add_proxy(Origin::signed(1), 4, ProxyType::JustUtility, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -330,7 +334,7 @@ fn filtering_works() { let derivative_id = Utility::derivative_account_id(1, 0); assert!(Balances::mutate_account(&derivative_id, |a| a.free = 1000).is_ok()); - let inner = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let inner = Box::new(call_transfer(6, 1)); let call = Box::new(Call::Utility(UtilityCall::as_derivative(0, inner.clone()))); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); @@ -438,7 +442,7 @@ fn proxying_works() { assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::JustTransfer, 0)); assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::Any, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_noop!( Proxy::proxy(Origin::signed(4), 1, None, call.clone()), Error::::NotProxy @@ -490,7 +494,7 @@ fn anonymous_works() { System::set_block_number(2); assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0)); - let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); + let call = Box::new(call_transfer(6, 1)); assert_ok!(Balances::transfer(Origin::signed(3), anon, 5)); assert_ok!(Proxy::proxy(Origin::signed(1), anon, None, call)); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); diff --git a/frame/transaction-payment/src/lib.rs b/frame/transaction-payment/src/lib.rs index 03a4992431c73..cd76f0afa58b3 100644 --- a/frame/transaction-payment/src/lib.rs +++ b/frame/transaction-payment/src/lib.rs @@ -703,7 +703,7 @@ mod tests { ); const CALL: &::Call = - &Call::Balances(BalancesCall::transfer(2, 69)); + &Call::Balances(BalancesCall::transfer { dest: 2, value: 69 }); thread_local! { static EXTRINSIC_BASE_WEIGHT: RefCell = RefCell::new(0); @@ -1043,7 +1043,7 @@ mod tests { #[test] fn query_info_works() { - let call = Call::Balances(BalancesCall::transfer(2, 69)); + let call = Call::Balances(BalancesCall::transfer { dest: 2, value: 69 }); let origin = 111111; let extra = (); let xt = TestXt::new(call, Some((origin, extra))); From 529d0d8db87f37d2681f074f1c776548a1b652bf Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 12:37:44 +0100 Subject: [PATCH 383/503] More call variant structs --- frame/executive/src/lib.rs | 14 ++-- frame/identity/src/types.rs | 4 +- frame/lottery/src/tests.rs | 32 ++++----- frame/recovery/src/tests.rs | 4 +- frame/utility/src/tests.rs | 130 +++++++++++++++++++----------------- 5 files changed, 95 insertions(+), 89 deletions(-) diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 563e84b368242..2e098291d8676 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -937,7 +937,7 @@ mod tests { fn block_weight_limit_enforced() { let mut t = new_test_ext(10000); // given: TestXt uses the encoded len as fixed Len: - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); let encoded = xt.encode(); let encoded_len = encoded.len() as Weight; // on_initialize weight + base block execution weight @@ -958,7 +958,7 @@ mod tests { for nonce in 0..=num_to_exhaust_block { let xt = TestXt::new( - Call::Balances(BalancesCall::transfer(33, 0)), + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, nonce.into(), 0), ); let res = Executive::apply_extrinsic(xt); @@ -982,9 +982,9 @@ mod tests { #[test] fn block_weight_and_size_is_stored_per_tx() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); - let x1 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 1, 0)); - let x2 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 2, 0)); + let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); + let x1 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 1, 0)); + let x2 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 2, 0)); let len = xt.clone().encode().len() as u32; let mut t = new_test_ext(1); t.execute_with(|| { @@ -1226,7 +1226,7 @@ mod tests { /// used through the `ExecuteBlock` trait. #[test] fn custom_runtime_upgrade_is_called_when_using_execute_block_trait() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. @@ -1356,7 +1356,7 @@ mod tests { #[test] #[should_panic(expected = "Invalid inherent position for extrinsic at index 1")] fn invalid_inherent_position_fail() { - let xt1 = TestXt::new(Call::Balances(BalancesCall::transfer(33, 0)), sign_extra(1, 0, 0)); + let xt1 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call()), None); let header = new_test_ext(1).execute_with(|| { diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 5ef4eef464715..2218d9b6e1c58 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -438,8 +438,6 @@ mod tests { .find(|v| v.name() == &variant_name) .expect(&format!("Expected to find variant {}", variant_name)); - let index = variant.index().expect("index for all variants should be set"); - let field_arr_len = variant .fields() .first() @@ -454,7 +452,7 @@ mod tests { .unwrap_or(0); let encoded = data.encode(); - assert_eq!(encoded[0], index); + assert_eq!(encoded[0], variant.index()); assert_eq!(encoded.len() as u32 - 1, field_arr_len); } else { panic!("Should be a variant type") diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index 800ae223d9739..c4415f782239f 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -43,8 +43,8 @@ fn basic_end_to_end_works() { let length = 20; let delay = 5; let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; // Set calls for the lottery @@ -55,7 +55,7 @@ fn basic_end_to_end_works() { assert!(crate::Lottery::::get().is_some()); assert_eq!(Balances::free_balance(&1), 100); - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 20))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 20 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); // 20 from the transfer, 10 from buying a ticket assert_eq!(Balances::free_balance(&1), 100 - 20 - 10); @@ -96,16 +96,16 @@ fn set_calls_works() { assert!(!CallIndices::::exists()); let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); assert!(CallIndices::::exists()); let too_many_calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), Call::System(SystemCall::remark(vec![])), ]; @@ -150,7 +150,7 @@ fn buy_ticket_works_as_simple_passthrough() { // as a simple passthrough to the real call. new_test_ext().execute_with(|| { // No lottery set up - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 20))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 20 })); // This is just a basic transfer then assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(Balances::free_balance(&1), 100 - 20); @@ -158,8 +158,8 @@ fn buy_ticket_works_as_simple_passthrough() { // Lottery is set up, but too expensive to enter, so `do_buy_ticket` fails. let calls = vec![ - Call::Balances(BalancesCall::force_transfer(0, 0, 0)), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); @@ -170,13 +170,13 @@ fn buy_ticket_works_as_simple_passthrough() { assert_eq!(TicketsCount::::get(), 0); // If call would fail, the whole thing still fails the same - let fail_call = Box::new(Call::Balances(BalancesCall::transfer(2, 1000))); + let fail_call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1000 })); assert_noop!( Lottery::buy_ticket(Origin::signed(1), fail_call), BalancesError::::InsufficientBalance, ); - let bad_origin_call = Box::new(Call::Balances(BalancesCall::force_transfer(0, 0, 0))); + let bad_origin_call = Box::new(Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 })); assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin,); // User can call other txs, but doesn't get a ticket @@ -184,7 +184,7 @@ fn buy_ticket_works_as_simple_passthrough() { assert_ok!(Lottery::buy_ticket(Origin::signed(2), remark_call)); assert_eq!(TicketsCount::::get(), 0); - let successful_call = Box::new(Call::Balances(BalancesCall::transfer(2, 1))); + let successful_call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), successful_call)); assert_eq!(TicketsCount::::get(), 1); }); @@ -196,12 +196,12 @@ fn buy_ticket_works() { // Set calls for the lottery. let calls = vec![ Call::System(SystemCall::remark(vec![])), - Call::Balances(BalancesCall::transfer(0, 0)), + Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); // Can't buy ticket before start - let call = Box::new(Call::Balances(BalancesCall::transfer(2, 1))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 0); @@ -214,7 +214,7 @@ fn buy_ticket_works() { assert_eq!(TicketsCount::::get(), 1); // Can't buy another of the same ticket (even if call is slightly changed) - let call = Box::new(Call::Balances(BalancesCall::transfer(3, 30))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 3, value: 30 })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call)); assert_eq!(TicketsCount::::get(), 1); diff --git a/frame/recovery/src/tests.rs b/frame/recovery/src/tests.rs index 9065e9afe8861..de99b8d5e396b 100644 --- a/frame/recovery/src/tests.rs +++ b/frame/recovery/src/tests.rs @@ -44,7 +44,7 @@ fn set_recovered_works() { // Root can set a recovered account though assert_ok!(Recovery::set_recovered(Origin::root(), 5, 1)); // Account 1 should now be able to make a call through account 5 - let call = Box::new(Call::Balances(BalancesCall::transfer(1, 100))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 1, value: 100 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 has successfully drained the funds from account 5 assert_eq!(Balances::free_balance(1), 200); @@ -83,7 +83,7 @@ fn recovery_life_cycle_works() { assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 should now be able to make a call through account 5 to get all of their funds assert_eq!(Balances::free_balance(5), 110); - let call = Box::new(Call::Balances(BalancesCall::transfer(1, 110))); + let call = Box::new(Call::Balances(BalancesCall::transfer { dest: 1, value: 110 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // All funds have been fully recovered! assert_eq!(Balances::free_balance(1), 200); diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 61890972d3a03..f5861bf53477f 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -143,10 +143,10 @@ impl Filter for TestBaseCallFilter { fn filter(c: &Call) -> bool { match *c { // Transfer works. Use `transfer_keep_alive` for a call that doesn't pass the filter. - Call::Balances(pallet_balances::Call::transfer(..)) => true, + Call::Balances(pallet_balances::Call::transfer { .. }) => true, Call::Utility(_) => true, // For benchmarking, this acts as a noop call - Call::System(frame_system::Call::remark(..)) => true, + Call::System(frame_system::Call::remark { .. }) => true, // For tests Call::Example(_) => true, _ => false, @@ -177,6 +177,14 @@ pub fn new_test_ext() -> sp_io::TestExternalities { ext } +fn call_transfer(dest: u64, value: u64) -> Call { + Call::Balances(BalancesCall::transfer { dest, value }) +} + +fn call_foobar(err: bool, _start_weight: u64, end_weight: Option) -> Call { + Call::Example(ExampleCall::foobar { err, _start_weight, end_weight }) +} + #[test] fn as_derivative_works() { new_test_ext().execute_with(|| { @@ -186,14 +194,14 @@ fn as_derivative_works() { Utility::as_derivative( Origin::signed(1), 1, - Box::new(Call::Balances(BalancesCall::transfer(6, 3))), + Box::new(call_transfer(6, 3)), ), BalancesError::::InsufficientBalance ); assert_ok!(Utility::as_derivative( Origin::signed(1), 0, - Box::new(Call::Balances(BalancesCall::transfer(2, 3))), + Box::new(call_transfer(2, 3)), )); assert_eq!(Balances::free_balance(sub_1_0), 2); assert_eq!(Balances::free_balance(2), 13); @@ -208,16 +216,16 @@ fn as_derivative_handles_weight_refund() { let diff = start_weight - end_weight; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(false, start_weight, None); + let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); + let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -225,8 +233,8 @@ fn as_derivative_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff); // Full weight when err - let inner_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(true, start_weight, None); + let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -242,8 +250,8 @@ fn as_derivative_handles_weight_refund() { ); // Refund weight when err - let inner_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); - let call = Call::Utility(UtilityCall::as_derivative(0, Box::new(inner_call))); + let inner_call = call_foobar(true, start_weight, Some(end_weight)); + let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -267,7 +275,7 @@ fn as_derivative_filters() { Utility::as_derivative( Origin::signed(1), 1, - Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))), + Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 })), ), DispatchError::BadOrigin ); @@ -285,8 +293,8 @@ fn batch_with_root_works() { assert_ok!(Utility::batch( Origin::root(), vec![ - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), - Call::Balances(BalancesCall::force_transfer(1, 2, 5)), + Call::Balances(BalancesCall::force_transfer { source: 1, dest: 2, value: 5 }), + Call::Balances(BalancesCall::force_transfer { source: 1, dest: 2, value: 5 }), call, // Check filters are correctly bypassed ] )); @@ -304,8 +312,8 @@ fn batch_with_signed_works() { assert_ok!(Utility::batch( Origin::signed(1), vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 5)) + call_transfer(2, 5), + call_transfer(2, 5) ] ),); assert_eq!(Balances::free_balance(1), 0); @@ -318,7 +326,7 @@ fn batch_with_signed_filters() { new_test_ext().execute_with(|| { assert_ok!(Utility::batch( Origin::signed(1), - vec![Call::Balances(pallet_balances::Call::transfer_keep_alive(2, 1))] + vec![Call::Balances(pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 })] ),); System::assert_last_event( utility::Event::BatchInterrupted(0, DispatchError::BadOrigin).into(), @@ -334,9 +342,9 @@ fn batch_early_exit_works() { assert_ok!(Utility::batch( Origin::signed(1), vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 10)), - Call::Balances(BalancesCall::transfer(2, 5)), + call_transfer(2, 5), + call_transfer(2, 10), + call_transfer(2, 5), ] ),); assert_eq!(Balances::free_balance(1), 5); @@ -352,11 +360,11 @@ fn batch_weight_calculation_doesnt_overflow() { assert_eq!(big_call.get_dispatch_info().weight, Weight::max_value() / 2); // 3 * 50% saturates to 100% - let batch_call = Call::Utility(crate::Call::batch(vec![ + let batch_call = Call::Utility(crate::Call::batch { calls: vec![ big_call.clone(), big_call.clone(), big_call.clone(), - ])); + ]}); assert_eq!(batch_call.get_dispatch_info().weight, Weight::max_value()); }); @@ -371,18 +379,18 @@ fn batch_handles_weight_refund() { let batch_len: Weight = 4; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); + let inner_call = call_foobar(false, start_weight, None); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -390,10 +398,10 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Full weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); let batch_calls = vec![good_call, bad_call]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -404,11 +412,11 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call]; let batch_len = batch_calls.len() as Weight; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -418,10 +426,10 @@ fn batch_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Partial batch completion - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call.clone(), bad_call]; - let call = Call::Utility(UtilityCall::batch(batch_calls)); + let call = Call::Utility(UtilityCall::batch { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -444,8 +452,8 @@ fn batch_all_works() { assert_ok!(Utility::batch_all( Origin::signed(1), vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 5)) + call_transfer(2, 5), + call_transfer(2, 5) ] ),); assert_eq!(Balances::free_balance(1), 0); @@ -456,7 +464,7 @@ fn batch_all_works() { #[test] fn batch_all_revert() { new_test_ext().execute_with(|| { - let call = Call::Balances(BalancesCall::transfer(2, 5)); + let call = call_transfer(2, 5); let info = call.get_dispatch_info(); assert_eq!(Balances::free_balance(1), 10); @@ -465,9 +473,9 @@ fn batch_all_revert() { Utility::batch_all( Origin::signed(1), vec![ - Call::Balances(BalancesCall::transfer(2, 5)), - Call::Balances(BalancesCall::transfer(2, 10)), - Call::Balances(BalancesCall::transfer(2, 5)), + call_transfer(2, 5), + call_transfer(2, 10), + call_transfer(2, 5), ] ), DispatchErrorWithPostInfo { @@ -494,18 +502,18 @@ fn batch_all_handles_weight_refund() { let batch_len: Weight = 4; // Full weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); + let inner_call = call_foobar(false, start_weight, None); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when ok - let inner_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); + let inner_call = call_foobar(false, start_weight, Some(end_weight)); let batch_calls = vec![inner_call; batch_len as usize]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -513,10 +521,10 @@ fn batch_all_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Full weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, None)); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, None)); + let good_call = call_foobar(false, start_weight, None); + let bad_call = call_foobar(true, start_weight, None); let batch_calls = vec![good_call, bad_call]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); @@ -524,21 +532,21 @@ fn batch_all_handles_weight_refund() { assert_eq!(extract_actual_weight(&result, &info), info.weight); // Refund weight when err - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call]; let batch_len = batch_calls.len() as Weight; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); assert_eq!(extract_actual_weight(&result, &info), info.weight - diff * batch_len); // Partial batch completion - let good_call = Call::Example(ExampleCall::foobar(false, start_weight, Some(end_weight))); - let bad_call = Call::Example(ExampleCall::foobar(true, start_weight, Some(end_weight))); + let good_call = call_foobar(false, start_weight, Some(end_weight)); + let bad_call = call_foobar(true, start_weight, Some(end_weight)); let batch_calls = vec![good_call, bad_call.clone(), bad_call]; - let call = Call::Utility(UtilityCall::batch_all(batch_calls)); + let call = Call::Utility(UtilityCall::batch_all { calls: batch_calls }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_err_ignore_postinfo!(result, "The cake is a lie."); @@ -553,11 +561,11 @@ fn batch_all_handles_weight_refund() { #[test] fn batch_all_does_not_nest() { new_test_ext().execute_with(|| { - let batch_all = Call::Utility(UtilityCall::batch_all(vec![ - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - Call::Balances(BalancesCall::transfer(2, 1)), - ])); + let batch_all = Call::Utility(UtilityCall::batch_all { calls: vec![ + call_transfer(2, 1), + call_transfer(2, 1), + call_transfer(2, 1), + ]}); let info = batch_all.get_dispatch_info(); @@ -577,7 +585,7 @@ fn batch_all_does_not_nest() { // And for those who want to get a little fancy, we check that the filter persists across // other kinds of dispatch wrapping functions... in this case `batch_all(batch(batch_all(..)))` - let batch_nested = Call::Utility(UtilityCall::batch(vec![batch_all])); + let batch_nested = Call::Utility(UtilityCall::batch { calls: vec![batch_all] }); // Batch will end with `Ok`, but does not actually execute as we can see from the event // and balances. assert_ok!(Utility::batch_all(Origin::signed(1), vec![batch_nested])); From 6b88259fd475ba9b1e1ff8e682543b3f78bcfe9f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 13:29:29 +0100 Subject: [PATCH 384/503] Even more call variant structs --- frame/executive/src/lib.rs | 10 +++++----- frame/proxy/src/tests.rs | 22 +++++++++++----------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 2e098291d8676..54d8ed7cfb1c6 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -809,7 +809,7 @@ mod tests { Some((who, extra(nonce, fee))) } - fn call_transfer(dest: u32, value: u32) -> Call { + fn call_transfer(dest: u64, value: u64) -> Call { Call::Balances(BalancesCall::transfer { dest, value }) } @@ -1038,8 +1038,8 @@ mod tests { #[test] fn validate_unsigned() { - let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned()), None); - let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned()), None); + let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned { }), None); + let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned { }), None); let mut t = new_test_ext(1); let mut default_with_prio_3 = ValidTransaction::default(); @@ -1330,7 +1330,7 @@ mod tests { #[test] fn calculating_storage_root_twice_works() { - let call = Call::Custom(custom::Call::calculate_storage_root()); + let call = Call::Custom(custom::Call::calculate_storage_root { }); let xt = TestXt::new(call, sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { @@ -1357,7 +1357,7 @@ mod tests { #[should_panic(expected = "Invalid inherent position for extrinsic at index 1")] fn invalid_inherent_position_fail() { let xt1 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); - let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call()), None); + let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call { }), None); let header = new_test_ext(1).execute_with(|| { // Let's build some fake block. diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index 0bc12c226038e..e598d3a14b556 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -146,7 +146,7 @@ impl Filter for BaseFilter { fn filter(c: &Call) -> bool { match *c { // Remark is used as a no-op call in the benchmarking - Call::System(SystemCall::remark(_)) => true, + Call::System(SystemCall::remark { .. }) => true, Call::System(_) => false, _ => true, } @@ -198,7 +198,7 @@ fn expect_events(e: Vec) { assert_eq!(last_events(e.len()), e); } -fn call_transfer(dest: u32, value: u32) -> Call { +fn call_transfer(dest: u64, value: u64) -> Call { Call::Balances(BalancesCall::transfer { dest, value }) } @@ -336,7 +336,7 @@ fn filtering_works() { assert!(Balances::mutate_account(&derivative_id, |a| a.free = 1000).is_ok()); let inner = Box::new(call_transfer(6, 1)); - let call = Box::new(Call::Utility(UtilityCall::as_derivative(0, inner.clone()))); + let call = Box::new(Call::Utility(UtilityCall::as_derivative { index: 0, call: inner.clone() })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -344,7 +344,7 @@ fn filtering_works() { assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); - let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); + let call = Box::new(Call::Utility(UtilityCall::batch { calls: vec![*inner] })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); expect_events(vec![ UtilityEvent::BatchCompleted.into(), @@ -358,8 +358,8 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let inner = Box::new(Call::Proxy(ProxyCall::add_proxy(5, ProxyType::Any, 0))); - let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); + let inner = Box::new(Call::Proxy(ProxyCall::new_call_variant_add_proxy(5, ProxyType::Any, 0))); + let call = Box::new(Call::Utility(UtilityCall::batch { calls: vec![*inner] })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); expect_events(vec![ UtilityEvent::BatchCompleted.into(), @@ -373,7 +373,7 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let call = Box::new(Call::Proxy(ProxyCall::remove_proxies())); + let call = Box::new(Call::Proxy(ProxyCall::remove_proxies { })); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); @@ -455,13 +455,13 @@ fn proxying_works() { System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_eq!(Balances::free_balance(6), 1); - let call = Box::new(Call::System(SystemCall::set_code(vec![]))); + let call = Box::new(Call::System(SystemCall::set_code { code: vec![] })); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); - let call = Box::new(Call::Balances(BalancesCall::transfer_keep_alive(6, 1))); + let call = Box::new(Call::Balances(BalancesCall::transfer_keep_alive { dest: 6, value: 1 })); assert_ok!( - Call::Proxy(super::Call::proxy(1, None, call.clone())).dispatch(Origin::signed(2)) + Call::Proxy(super::Call::new_call_variant_proxy(1, None, call.clone())).dispatch(Origin::signed(2)) ); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -500,7 +500,7 @@ fn anonymous_works() { System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_eq!(Balances::free_balance(6), 1); - let call = Box::new(Call::Proxy(ProxyCall::kill_anonymous(1, ProxyType::Any, 0, 1, 0))); + let call = Box::new(Call::Proxy(ProxyCall::new_call_variant_kill_anonymous(1, ProxyType::Any, 0, 1, 0))); assert_ok!(Proxy::proxy(Origin::signed(2), anon2, None, call.clone())); let de = DispatchError::from(Error::::NoPermission).stripped(); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(de)).into()); From 0d69600d97015719c316ddc32a5c7595c507a809 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 13:33:36 +0100 Subject: [PATCH 385/503] Mooar variant structs --- frame/executive/src/lib.rs | 2 +- frame/multisig/src/tests.rs | 4 ++-- frame/utility/src/tests.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index 54d8ed7cfb1c6..a3fb6b1ef1316 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -1079,7 +1079,7 @@ mod tests { id, &1, 110, lock, ); let xt = - TestXt::new(Call::System(SystemCall::remark(vec![1u8])), sign_extra(1, 0, 0)); + TestXt::new(Call::System(SystemCall::remark { _remark: vec![1u8] }), sign_extra(1, 0, 0)); let weight = xt.get_dispatch_info().weight + ::BlockWeights::get() .get(DispatchClass::Normal) diff --git a/frame/multisig/src/tests.rs b/frame/multisig/src/tests.rs index c036c36d5a1b4..fa46011f6e8b4 100644 --- a/frame/multisig/src/tests.rs +++ b/frame/multisig/src/tests.rs @@ -132,7 +132,7 @@ fn now() -> Timepoint { Multisig::timepoint() } -fn call_transfer(dest: u32, value: u32) -> Call { +fn call_transfer(dest: u64, value: u64) -> Call { Call::Balances(BalancesCall::transfer { dest, value }) } @@ -778,7 +778,7 @@ fn multisig_1_of_3_works() { #[test] fn multisig_filters() { new_test_ext().execute_with(|| { - let call = Box::new(Call::System(frame_system::Call::set_code(vec![]))); + let call = Box::new(Call::System(frame_system::Call::set_code { code: vec![] })); assert_noop!( Multisig::as_multi_threshold_1(Origin::signed(1), vec![2], call.clone()), DispatchError::BadOrigin, diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index f5861bf53477f..9934e6cf0c0c6 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -286,7 +286,7 @@ fn as_derivative_filters() { fn batch_with_root_works() { new_test_ext().execute_with(|| { let k = b"a".to_vec(); - let call = Call::System(frame_system::Call::set_storage(vec![(k.clone(), k.clone())])); + let call = Call::System(frame_system::Call::set_storage { items: vec![(k.clone(), k.clone())] }); assert!(!TestBaseCallFilter::filter(&call)); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); @@ -356,7 +356,7 @@ fn batch_early_exit_works() { fn batch_weight_calculation_doesnt_overflow() { use sp_runtime::Perbill; new_test_ext().execute_with(|| { - let big_call = Call::System(SystemCall::fill_block(Perbill::from_percent(50))); + let big_call = Call::System(SystemCall::fill_block { _ratio: Perbill::from_percent(50) }); assert_eq!(big_call.get_dispatch_info().weight, Weight::max_value() / 2); // 3 * 50% saturates to 100% From 490d742e824ee4fafc7a8e879c130982ed82c1b4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 22 Jul 2021 16:35:04 +0100 Subject: [PATCH 386/503] Evermore variant structs --- frame/contracts/src/exec.rs | 12 ++++++------ frame/democracy/src/tests.rs | 4 ++-- .../src/benchmarking.rs | 2 +- .../src/unsigned.rs | 16 ++++++++-------- frame/example/src/tests.rs | 6 +++--- frame/lottery/src/tests.rs | 8 ++++---- frame/recovery/src/tests.rs | 4 ++-- frame/support/test/tests/pallet_compatibility.rs | 4 ++-- .../test/tests/pallet_compatibility_instance.rs | 4 ++-- 9 files changed, 30 insertions(+), 30 deletions(-) diff --git a/frame/contracts/src/exec.rs b/frame/contracts/src/exec.rs index a862a98802e49..d4486a3c54df5 100644 --- a/frame/contracts/src/exec.rs +++ b/frame/contracts/src/exec.rs @@ -2428,7 +2428,7 @@ mod tests { #[test] fn call_runtime_works() { let code_hash = MockLoader::insert(Call, |ctx, _| { - let call = Call::System(frame_system::Call::remark_with_event(b"Hello World".to_vec())); + let call = Call::System(frame_system::Call::remark_with_event { remark: b"Hello World".to_vec() }); ctx.ext.call_runtime(call).unwrap(); exec_success() }); @@ -2462,20 +2462,20 @@ mod tests { use pallet_utility::Call as UtilCall; // remark should still be allowed - let allowed_call = Call::System(SysCall::remark_with_event(b"Hello".to_vec())); + let allowed_call = Call::System(SysCall::remark_with_event { remark: b"Hello".to_vec() }); // transfers are disallowed by the `TestFiler` (see below) - let forbidden_call = Call::Balances(BalanceCall::transfer(CHARLIE, 22)); + let forbidden_call = Call::Balances(BalanceCall::transfer { dest: CHARLIE, value: 22 }); // simple cases: direct call assert_err!(ctx.ext.call_runtime(forbidden_call.clone()), BadOrigin); // as part of a patch: return is OK (but it interrupted the batch) - assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch(vec![ + assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch { calls: vec![ allowed_call.clone(), forbidden_call, allowed_call - ]))),); + ]})),); // the transfer wasn't performed assert_eq!(get_balance(&CHARLIE), 0); @@ -2484,7 +2484,7 @@ mod tests { }); TestFilter::set_filter(|call| match call { - Call::Balances(pallet_balances::Call::transfer(_, _)) => false, + Call::Balances(pallet_balances::Call::transfer { .. }) => false, _ => true, }); diff --git a/frame/democracy/src/tests.rs b/frame/democracy/src/tests.rs index 64444304db673..7d29b07fce01e 100644 --- a/frame/democracy/src/tests.rs +++ b/frame/democracy/src/tests.rs @@ -72,7 +72,7 @@ frame_support::construct_runtime!( pub struct BaseFilter; impl Filter for BaseFilter { fn filter(call: &Call) -> bool { - !matches!(call, &Call::Balances(pallet_balances::Call::set_balance(..))) + !matches!(call, &Call::Balances(pallet_balances::Call::set_balance {..})) } } @@ -224,7 +224,7 @@ fn params_should_work() { } fn set_balance_proposal(value: u64) -> Vec { - Call::Balances(pallet_balances::Call::set_balance(42, value, 0)).encode() + Call::Balances(pallet_balances::Call::set_balance { who: 42, new_free: value, new_reserved: 0 }).encode() } #[test] diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index 5e89db7537d07..4acb5f1117977 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -344,7 +344,7 @@ frame_benchmarking::benchmarks! { // encode the most significant storage item that needs to be decoded in the dispatch. let encoded_snapshot = >::snapshot().unwrap().encode(); - let encoded_call = >::submit_unsigned(raw_solution.clone(), witness).encode(); + let encoded_call = Call::::submit_unsigned { solution: raw_solution.clone(), witness }.encode(); }: { assert_ok!(>::submit_unsigned(RawOrigin::None.into(), raw_solution, witness)); let _decoded_snap = as Decode>::decode(&mut &*encoded_snapshot) diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index c1479cf3368a7..8d07e83f0df03 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -773,7 +773,7 @@ mod tests { fn validate_unsigned_retracts_wrong_phase() { ExtBuilder::default().desired_targets(0).build_and_execute(|| { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + let call = Call::submit_unsigned { solution: solution.clone(), witness: witness() }; // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); @@ -842,7 +842,7 @@ mod tests { assert!(MultiPhase::current_phase().is_unsigned()); let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + let call = Call::submit_unsigned { solution: solution.clone(), witness: witness() }; // initial assert!(::validate_unsigned( @@ -879,7 +879,7 @@ mod tests { assert!(MultiPhase::current_phase().is_unsigned()); let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + let call = Call::submit_unsigned { solution: solution.clone(), witness: witness() }; assert_eq!(solution.compact.unique_targets().len(), 0); // won't work anymore. @@ -905,7 +905,7 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + let call = Call::submit_unsigned { solution: solution.clone(), witness: witness() }; assert_eq!( ::validate_unsigned( @@ -931,7 +931,7 @@ mod tests { // This is in itself an invalid BS solution. let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned(solution.clone(), witness()); + let call = Call::submit_unsigned { solution: solution.clone(), witness: witness() }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -951,7 +951,7 @@ mod tests { let mut correct_witness = witness(); correct_witness.voters += 1; correct_witness.targets -= 1; - let call = Call::submit_unsigned(solution.clone(), correct_witness); + let call = Call::submit_unsigned { solution: solution.clone(), witness: correct_witness }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -1346,7 +1346,7 @@ mod tests { let encoded = pool.read().transactions[0].clone(); let extrinsic: Extrinsic = Decode::decode(&mut &*encoded).unwrap(); let call = extrinsic.call; - assert!(matches!(call, OuterCall::MultiPhase(Call::submit_unsigned(..)))); + assert!(matches!(call, OuterCall::MultiPhase(Call::submit_unsigned { .. }))); }) } @@ -1363,7 +1363,7 @@ mod tests { let encoded = pool.read().transactions[0].clone(); let extrinsic = Extrinsic::decode(&mut &*encoded).unwrap(); let call = match extrinsic.call { - OuterCall::MultiPhase(call @ Call::submit_unsigned(..)) => call, + OuterCall::MultiPhase(call @ Call::submit_unsigned { .. }) => call, _ => panic!("bad call: unexpected submission"), }; diff --git a/frame/example/src/tests.rs b/frame/example/src/tests.rs index 18089888dba1a..94367195b76d9 100644 --- a/frame/example/src/tests.rs +++ b/frame/example/src/tests.rs @@ -163,7 +163,7 @@ fn set_dummy_works() { #[test] fn signed_ext_watch_dummy_works() { new_test_ext().execute_with(|| { - let call = >::set_dummy(10).into(); + let call = pallet_example::Call::set_dummy { new_value: 10 }.into(); let info = DispatchInfo::default(); assert_eq!( @@ -183,14 +183,14 @@ fn signed_ext_watch_dummy_works() { #[test] fn weights_work() { // must have a defined weight. - let default_call = >::accumulate_dummy(10); + let default_call = pallet_example::Call::::accumulate_dummy { increase_by: 10 }; let info1 = default_call.get_dispatch_info(); // aka. `let info = as GetDispatchInfo>::get_dispatch_info(&default_call);` assert!(info1.weight > 0); // `set_dummy` is simpler than `accumulate_dummy`, and the weight // should be less. - let custom_call = >::set_dummy(20); + let custom_call = pallet_example::Call::::set_dummy { new_value: 20 }; let info2 = custom_call.get_dispatch_info(); assert!(info1.weight > info2.weight); } diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index c4415f782239f..427ada545ab38 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -106,7 +106,7 @@ fn set_calls_works() { let too_many_calls = vec![ Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), - Call::System(SystemCall::remark(vec![])), + Call::System(SystemCall::remark { _remark: vec![] }), ]; assert_noop!( @@ -180,7 +180,7 @@ fn buy_ticket_works_as_simple_passthrough() { assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin,); // User can call other txs, but doesn't get a ticket - let remark_call = Box::new(Call::System(SystemCall::remark(b"hello, world!".to_vec()))); + let remark_call = Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), remark_call)); assert_eq!(TicketsCount::::get(), 0); @@ -195,7 +195,7 @@ fn buy_ticket_works() { new_test_ext().execute_with(|| { // Set calls for the lottery. let calls = vec![ - Call::System(SystemCall::remark(vec![])), + Call::System(SystemCall::remark { _remark: vec![] }), Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); @@ -219,7 +219,7 @@ fn buy_ticket_works() { assert_eq!(TicketsCount::::get(), 1); // Buy ticket for remark - let call = Box::new(Call::System(SystemCall::remark(b"hello, world!".to_vec()))); + let call = Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 2); diff --git a/frame/recovery/src/tests.rs b/frame/recovery/src/tests.rs index de99b8d5e396b..fb6d744eb31c0 100644 --- a/frame/recovery/src/tests.rs +++ b/frame/recovery/src/tests.rs @@ -75,11 +75,11 @@ fn recovery_life_cycle_works() { assert_ok!(Recovery::claim_recovery(Origin::signed(1), 5)); // Account 1 can use account 5 to close the active recovery process, claiming the deposited // funds used to initiate the recovery process into account 5. - let call = Box::new(Call::Recovery(RecoveryCall::close_recovery(1))); + let call = Box::new(Call::Recovery(RecoveryCall::close_recovery { rescuer: 1 })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 can then use account 5 to remove the recovery configuration, claiming the // deposited funds used to create the recovery configuration into account 5. - let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery())); + let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery { })); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 should now be able to make a call through account 5 to get all of their funds assert_eq!(Balances::free_balance(5), 110); diff --git a/frame/support/test/tests/pallet_compatibility.rs b/frame/support/test/tests/pallet_compatibility.rs index def3e130b41ed..e77e18fe21683 100644 --- a/frame/support/test/tests/pallet_compatibility.rs +++ b/frame/support/test/tests/pallet_compatibility.rs @@ -358,10 +358,10 @@ mod test { assert_eq!( pallet_old::Call::::decode( - &mut &pallet::Call::::set_dummy(10).encode()[..] + &mut &pallet::Call::::set_dummy { new_value: 10 }.encode()[..] ) .unwrap(), - pallet_old::Call::::set_dummy(10), + pallet_old::Call::::set_dummy { new_value: 10 }, ); } } diff --git a/frame/support/test/tests/pallet_compatibility_instance.rs b/frame/support/test/tests/pallet_compatibility_instance.rs index af07ed83aae7d..a07d64e6fd2e4 100644 --- a/frame/support/test/tests/pallet_compatibility_instance.rs +++ b/frame/support/test/tests/pallet_compatibility_instance.rs @@ -357,10 +357,10 @@ mod test { assert_eq!( pallet_old::Call::::decode( - &mut &pallet::Call::::set_dummy(10).encode()[..] + &mut &pallet::Call::::set_dummy { new_value: 10 }.encode()[..] ) .unwrap(), - pallet_old::Call::::set_dummy(10), + pallet_old::Call::::set_dummy { new_value: 10 }, ); } } From bc17010febbcbaf22473768ebbf654bb6c9ecd29 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 12:49:41 +0100 Subject: [PATCH 387/503] Call variant structs ad infinitum --- bin/node/executor/tests/basic.rs | 72 +++++++++---------- bin/node/executor/tests/common.rs | 2 +- bin/node/executor/tests/fees.rs | 8 +-- bin/node/executor/tests/submit_transaction.rs | 17 +++-- bin/node/testing/src/bench.rs | 18 ++--- frame/babe/src/tests.rs | 13 ++-- frame/grandpa/src/tests.rs | 10 +-- frame/im-online/src/tests.rs | 6 +- frame/support/test/tests/construct_runtime.rs | 46 ++++++------ frame/support/test/tests/pallet.rs | 56 +++++++-------- frame/system/src/mock.rs | 2 +- 11 files changed, 128 insertions(+), 122 deletions(-) diff --git a/bin/node/executor/tests/basic.rs b/bin/node/executor/tests/basic.rs index 062e9f7b5a7be..1a7a4cbf6e4f0 100644 --- a/bin/node/executor/tests/basic.rs +++ b/bin/node/executor/tests/basic.rs @@ -83,14 +83,14 @@ fn changes_trie_block() -> (Vec, Hash) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 69 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 69 * DOLLARS, + }), }, ], (time / SLOT_DURATION).into(), @@ -110,14 +110,14 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time1)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time1 }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 69 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 69 * DOLLARS, + }), }, ], (time1 / SLOT_DURATION).into(), @@ -130,21 +130,21 @@ fn blocks() -> ((Vec, Hash), (Vec, Hash)) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time2)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time2 }), }, CheckedExtrinsic { signed: Some((bob(), signed_extra(0, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - alice().into(), - 5 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: alice().into(), + value: 5 * DOLLARS, + }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(1, 0))), - function: Call::Balances(pallet_balances::Call::transfer( - bob().into(), - 15 * DOLLARS, - )), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 15 * DOLLARS, + }), }, ], (time2 / SLOT_DURATION).into(), @@ -165,11 +165,11 @@ fn block_with_size(time: u64, nonce: u32, size: usize) -> (Vec, Hash) { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time * 1000)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time * 1000 }), }, CheckedExtrinsic { signed: Some((alice(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark(vec![0; size])), + function: Call::System(frame_system::Call::remark { _remark: vec![0; size] }), }, ], (time * 1000 / SLOT_DURATION).into(), @@ -356,7 +356,7 @@ fn full_native_block_import_works() { let mut fees = t.execute_with(|| transfer_fee(&xt())); let transfer_weight = default_transfer_call().get_dispatch_info().weight; - let timestamp_weight = pallet_timestamp::Call::set::(Default::default()) + let timestamp_weight = pallet_timestamp::Call::set:: { now: Default::default() } .get_dispatch_info() .weight; @@ -645,28 +645,28 @@ fn deploying_wasm_contract_should_work() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), function: Call::Contracts( - pallet_contracts::Call::instantiate_with_code::( - 1000 * DOLLARS + subsistence, - 500_000_000, - transfer_code, - Vec::new(), - Vec::new(), - ), + pallet_contracts::Call::instantiate_with_code:: { + endowment: 1000 * DOLLARS + subsistence, + gas_limit: 500_000_000, + code: transfer_code, + data: Vec::new(), + salt: Vec::new(), + }, ), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::Contracts(pallet_contracts::Call::call::( - sp_runtime::MultiAddress::Id(addr.clone()), - 10, - 500_000_000, - vec![0x00, 0x01, 0x02, 0x03], - )), + function: Call::Contracts(pallet_contracts::Call::call:: { + dest: sp_runtime::MultiAddress::Id(addr.clone()), + value: 10, + gas_limit: 500_000_000, + data: vec![0x00, 0x01, 0x02, 0x03], + }), }, ], (time / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/common.rs b/bin/node/executor/tests/common.rs index 414b335406be8..b0fd668e0580b 100644 --- a/bin/node/executor/tests/common.rs +++ b/bin/node/executor/tests/common.rs @@ -89,7 +89,7 @@ pub fn sign(xt: CheckedExtrinsic) -> UncheckedExtrinsic { } pub fn default_transfer_call() -> pallet_balances::Call { - pallet_balances::Call::transfer::(bob().into(), 69 * DOLLARS) + pallet_balances::Call::::transfer { dest: bob().into(), value: 69 * DOLLARS } } pub fn from_block_number(n: u32) -> Header { diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 3bc9179da2b3d..b3a9ab6b36157 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -56,11 +56,11 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time1)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time1 }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), - function: Call::System(frame_system::Call::fill_block(Perbill::from_percent(60))), + function: Call::System(frame_system::Call::fill_block { _ratio: Perbill::from_percent(60)}), }, ], (time1 / SLOT_DURATION).into(), @@ -75,11 +75,11 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { vec![ CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(time2)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: time2 }), }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::System(frame_system::Call::remark(vec![0; 1])), + function: Call::System(frame_system::Call::remark { _remark: vec![0; 1] }), }, ], (time2 / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index c83e48c8c933b..dfadef68feaba 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -42,7 +42,10 @@ fn should_submit_unsigned_transaction() { validators_len: 0, }; - let call = pallet_im_online::Call::heartbeat(heartbeat_data, signature); + let call = pallet_im_online::Call::heartbeat { + heartbeat: heartbeat_data, + _signature: signature, + }; SubmitTransaction::>::submit_unsigned_transaction( call.into(), ) @@ -84,7 +87,7 @@ fn should_submit_signed_transaction() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); @@ -118,7 +121,7 @@ fn should_submit_signed_twice_from_the_same_account() { t.execute_with(|| { let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); assert!(result.is_some()); @@ -127,7 +130,7 @@ fn should_submit_signed_twice_from_the_same_account() { // submit another one from the same account. The nonce should be incremented. let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); assert!(result.is_some()); @@ -163,7 +166,7 @@ fn should_submit_signed_twice_from_all_accounts() { t.execute_with(|| { let results = Signer::::all_accounts() .send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); @@ -174,7 +177,7 @@ fn should_submit_signed_twice_from_all_accounts() { // submit another one from the same account. The nonce should be incremented. let results = Signer::::all_accounts() .send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); @@ -227,7 +230,7 @@ fn submitted_transaction_should_be_valid() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer(Default::default(), Default::default()) + pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } }); let len = results.len(); assert_eq!(len, 1); diff --git a/bin/node/testing/src/bench.rs b/bin/node/testing/src/bench.rs index ceca493874dcb..eb6cd7c50fca5 100644 --- a/bin/node/testing/src/bench.rs +++ b/bin/node/testing/src/bench.rs @@ -300,19 +300,19 @@ impl<'a> Iterator for BlockContentIterator<'a> { )), function: match self.content.block_type { BlockType::RandomTransfersKeepAlive => - Call::Balances(BalancesCall::transfer_keep_alive( - sp_runtime::MultiAddress::Id(receiver), - node_runtime::ExistentialDeposit::get() + 1, - )), + Call::Balances(BalancesCall::transfer_keep_alive { + dest: sp_runtime::MultiAddress::Id(receiver), + value: node_runtime::ExistentialDeposit::get() + 1, + }), BlockType::RandomTransfersReaping => { - Call::Balances(BalancesCall::transfer( - sp_runtime::MultiAddress::Id(receiver), + Call::Balances(BalancesCall::transfer { + dest: sp_runtime::MultiAddress::Id(receiver), // Transfer so that ending balance would be 1 less than existential deposit // so that we kill the sender account. - 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), - )) + value: 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), + }) }, - BlockType::Noop => Call::System(SystemCall::remark(Vec::new())), + BlockType::Noop => Call::System(SystemCall::remark { _remark: Vec::new() }), }, }, self.runtime_version.spec_version, diff --git a/frame/babe/src/tests.rs b/frame/babe/src/tests.rs index 5e72e14877a48..57b509162e6f6 100644 --- a/frame/babe/src/tests.rs +++ b/frame/babe/src/tests.rs @@ -690,7 +690,10 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key_owner_proof = Historical::prove(key).unwrap(); let inner = - Call::report_equivocation_unsigned(equivocation_proof.clone(), key_owner_proof.clone()); + Call::report_equivocation_unsigned { + equivocation_proof: equivocation_proof.clone(), + key_owner_proof: key_owner_proof.clone() + }; // only local/inblock reports are allowed assert_eq!( @@ -779,10 +782,10 @@ fn valid_equivocation_reports_dont_pay_fees() { .unwrap(); // check the dispatch info for the call. - let info = Call::::report_equivocation_unsigned( - equivocation_proof.clone(), - key_owner_proof.clone(), - ) + let info = Call::::report_equivocation_unsigned { + equivocation_proof: equivocation_proof.clone(), + key_owner_proof: key_owner_proof.clone(), + } .get_dispatch_info(); // it should have non-zero weight and the fee has to be paid. diff --git a/frame/grandpa/src/tests.rs b/frame/grandpa/src/tests.rs index 2439c8c819576..27ae100a8912e 100644 --- a/frame/grandpa/src/tests.rs +++ b/frame/grandpa/src/tests.rs @@ -682,7 +682,7 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); let call = - Call::report_equivocation_unsigned(equivocation_proof.clone(), key_owner_proof.clone()); + Call::report_equivocation_unsigned { equivocation_proof: equivocation_proof.clone(), key_owner_proof: key_owner_proof.clone() }; // only local/inblock reports are allowed assert_eq!( @@ -837,10 +837,10 @@ fn valid_equivocation_reports_dont_pay_fees() { Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); // check the dispatch info for the call. - let info = Call::::report_equivocation_unsigned( - equivocation_proof.clone(), - key_owner_proof.clone(), - ) + let info = Call::::report_equivocation_unsigned { + equivocation_proof: equivocation_proof.clone(), + key_owner_proof: key_owner_proof.clone(), + } .get_dispatch_info(); // it should have non-zero weight and the fee has to be paid. diff --git a/frame/im-online/src/tests.rs b/frame/im-online/src/tests.rs index 2492e46ef18a0..5048915c3e15d 100644 --- a/frame/im-online/src/tests.rs +++ b/frame/im-online/src/tests.rs @@ -130,7 +130,7 @@ fn heartbeat( }; let signature = id.sign(&heartbeat.encode()).unwrap(); - ImOnline::pre_dispatch(&crate::Call::heartbeat(heartbeat.clone(), signature.clone())).map_err( + ImOnline::pre_dispatch(&crate::Call::heartbeat { heartbeat: heartbeat.clone(), _signature: signature.clone() }).map_err( |e| match e { TransactionValidityError::Invalid(InvalidTransaction::Custom( INVALID_VALIDATORS_LEN, @@ -237,7 +237,7 @@ fn should_generate_heartbeats() { // check stuff about the transaction. let ex: Extrinsic = Decode::decode(&mut &*transaction).unwrap(); let heartbeat = match ex.call { - crate::mock::Call::ImOnline(crate::Call::heartbeat(h, ..)) => h, + crate::mock::Call::ImOnline(crate::Call::heartbeat { heartbeat, .. }) => heartbeat, e => panic!("Unexpected call: {:?}", e), }; @@ -352,7 +352,7 @@ fn should_not_send_a_report_if_already_online() { // check stuff about the transaction. let ex: Extrinsic = Decode::decode(&mut &*transaction).unwrap(); let heartbeat = match ex.call { - crate::mock::Call::ImOnline(crate::Call::heartbeat(h, ..)) => h, + crate::mock::Call::ImOnline(crate::Call::heartbeat { heartbeat, .. }) => heartbeat, e => panic!("Unexpected call: {:?}", e), }; diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 335eaa6591610..6b3273c3123fe 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -310,8 +310,8 @@ mod origin_test { #[test] fn origin_default_filter() { - let accepted_call = nested::module3::Call::fail().into(); - let rejected_call = module3::Call::fail().into(); + let accepted_call = nested::module3::Call::fail { }.into(); + let rejected_call = module3::Call::fail { }.into(); assert_eq!(Origin::root().filter_call(&accepted_call), true); assert_eq!(Origin::root().filter_call(&rejected_call), true); @@ -473,28 +473,28 @@ fn event_codec() { #[test] fn call_codec() { use codec::Encode; - assert_eq!(Call::System(system::Call::noop()).encode()[0], 30); - assert_eq!(Call::Module1_1(module1::Call::fail()).encode()[0], 31); - assert_eq!(Call::Module2(module2::Call::fail()).encode()[0], 32); - assert_eq!(Call::Module1_2(module1::Call::fail()).encode()[0], 33); - assert_eq!(Call::NestedModule3(nested::module3::Call::fail()).encode()[0], 34); - assert_eq!(Call::Module3(module3::Call::fail()).encode()[0], 35); - assert_eq!(Call::Module1_4(module1::Call::fail()).encode()[0], 3); - assert_eq!(Call::Module1_6(module1::Call::fail()).encode()[0], 1); - assert_eq!(Call::Module1_7(module1::Call::fail()).encode()[0], 2); - assert_eq!(Call::Module1_8(module1::Call::fail()).encode()[0], 12); - assert_eq!(Call::Module1_9(module1::Call::fail()).encode()[0], 13); + assert_eq!(Call::System(system::Call::noop { }).encode()[0], 30); + assert_eq!(Call::Module1_1(module1::Call::fail { }).encode()[0], 31); + assert_eq!(Call::Module2(module2::Call::fail { }).encode()[0], 32); + assert_eq!(Call::Module1_2(module1::Call::fail { }).encode()[0], 33); + assert_eq!(Call::NestedModule3(nested::module3::Call::fail { }).encode()[0], 34); + assert_eq!(Call::Module3(module3::Call::fail { }).encode()[0], 35); + assert_eq!(Call::Module1_4(module1::Call::fail { }).encode()[0], 3); + assert_eq!(Call::Module1_6(module1::Call::fail { }).encode()[0], 1); + assert_eq!(Call::Module1_7(module1::Call::fail { }).encode()[0], 2); + assert_eq!(Call::Module1_8(module1::Call::fail { }).encode()[0], 12); + assert_eq!(Call::Module1_9(module1::Call::fail { }).encode()[0], 13); } #[test] fn call_compact_attr() { use codec::Encode; - let call: module3::Call = module3::Call::aux_1(1); + let call: module3::Call = module3::Call::aux_1 { _data: 1 }; let encoded = call.encode(); assert_eq!(2, encoded.len()); assert_eq!(vec![1, 4], encoded); - let call: module3::Call = module3::Call::aux_2(1, 2); + let call: module3::Call = module3::Call::aux_2 { _data: 1, _data2: 2 }; let encoded = call.encode(); assert_eq!(6, encoded.len()); assert_eq!(vec![2, 1, 0, 0, 0, 8], encoded); @@ -503,13 +503,13 @@ fn call_compact_attr() { #[test] fn call_encode_is_correct_and_decode_works() { use codec::{Decode, Encode}; - let call: module3::Call = module3::Call::fail(); + let call: module3::Call = module3::Call::fail { }; let encoded = call.encode(); assert_eq!(vec![0], encoded); let decoded = module3::Call::::decode(&mut &encoded[..]).unwrap(); assert_eq!(decoded, call); - let call: module3::Call = module3::Call::aux_3(32, "hello".into()); + let call: module3::Call = module3::Call::aux_3 { _data: 32, _data2: "hello".into() }; let encoded = call.encode(); assert_eq!(vec![3, 32, 0, 0, 0, 20, 104, 101, 108, 108, 111], encoded); let decoded = module3::Call::::decode(&mut &encoded[..]).unwrap(); @@ -524,12 +524,12 @@ fn call_weight_should_attach_to_call_enum() { }; // operational. assert_eq!( - module3::Call::::operational().get_dispatch_info(), + module3::Call::::operational { }.get_dispatch_info(), DispatchInfo { weight: 5, class: DispatchClass::Operational, pays_fee: Pays::Yes }, ); // custom basic assert_eq!( - module3::Call::::aux_4().get_dispatch_info(), + module3::Call::::aux_4 { }.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes }, ); } @@ -537,14 +537,14 @@ fn call_weight_should_attach_to_call_enum() { #[test] fn call_name() { use frame_support::dispatch::GetCallName; - let name = module3::Call::::aux_4().get_call_name(); + let name = module3::Call::::aux_4 { }.get_call_name(); assert_eq!("aux_4", name); } #[test] fn call_metadata() { use frame_support::dispatch::{CallMetadata, GetCallMetadata}; - let call = Call::Module3(module3::Call::::aux_4()); + let call = Call::Module3(module3::Call::::aux_4 { }); let metadata = call.get_call_metadata(); let expected = CallMetadata { function_name: "aux_4".into(), pallet_name: "Module3".into() }; assert_eq!(metadata, expected); @@ -582,10 +582,10 @@ fn get_module_names() { #[test] fn call_subtype_conversion() { use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; - let call = Call::Module3(module3::Call::::fail()); + let call = Call::Module3(module3::Call::::fail { }); let subcall: Option<&CallableCallFor> = call.is_sub_type(); let subcall_none: Option<&CallableCallFor> = call.is_sub_type(); - assert_eq!(Some(&module3::Call::::fail()), subcall); + assert_eq!(Some(&module3::Call::::fail { }), subcall); assert_eq!(None, subcall_none); let from = Call::from(subcall.unwrap().clone()); diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index af68c15cd2a07..3b504304565da 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -356,7 +356,7 @@ pub mod pallet { fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType5); // Test for where clause - if matches!(call, Call::foo_transactional(_)) { + if matches!(call, Call::foo_transactional { .. }) { return Ok(ValidTransaction::default()) } Err(TransactionValidityError::Invalid(InvalidTransaction::Call)) @@ -376,18 +376,18 @@ pub mod pallet { fn create_inherent(_data: &InherentData) -> Option { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType6); // Test for where clause - Some(Call::foo_no_post_info()) + Some(Call::foo_no_post_info { }) } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::foo_no_post_info() | Call::foo(..)) + matches!(call, Call::foo_no_post_info { } | Call::foo { .. }) } fn check_inherent(call: &Self::Call, _: &InherentData) -> Result<(), Self::Error> { match call { - Call::foo_no_post_info() => Ok(()), - Call::foo(0, 0) => Err(InherentError::Fatal), - Call::foo(..) => Ok(()), + Call::foo_no_post_info { } => Ok(()), + Call::foo { _foo: 0, _bar: 0 } => Err(InherentError::Fatal), + Call::foo { .. } => Ok(()), _ => unreachable!("other calls are not inherents"), } } @@ -554,13 +554,13 @@ fn transactional_works() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo_transactional(0) + pallet::Call::::foo_transactional { foo: 0 } .dispatch_bypass_filter(None.into()) .err() .unwrap(); assert!(frame_system::Pallet::::events().is_empty()); - pallet::Call::::foo_transactional(1) + pallet::Call::::foo_transactional { foo: 1 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( @@ -575,7 +575,7 @@ fn transactional_works() { #[test] fn call_expand() { - let call_foo = pallet::Call::::foo(3, 0); + let call_foo = pallet::Call::::foo { _foo: 3, _bar: 0 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -624,7 +624,7 @@ fn inherent_expand() { let inherents = InherentData::new().create_extrinsics(); let expected = vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: None, }]; assert_eq!(expected, inherents); @@ -639,11 +639,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 0)), + function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 0 }), signature: None, }, ], @@ -661,11 +661,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(0, 0)), + function: Call::Example(pallet::Call::foo { _foo: 0, _bar: 0 }), signature: None, }, ], @@ -682,7 +682,7 @@ fn inherent_expand() { Digest::default(), ), vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }], ); @@ -700,7 +700,7 @@ fn inherent_expand() { Digest::default(), ), vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: Some((1, (), ())), }], ); @@ -719,11 +719,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }, ], @@ -741,15 +741,15 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_transactional(0)), + function: Call::Example(pallet::Call::foo_transactional { foo: 0 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: None, }, ], @@ -767,15 +767,15 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 1)), + function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo(1, 0)), + function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 0 }), signature: Some((1, (), ())), }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info()), + function: Call::Example(pallet::Call::foo_no_post_info { }), signature: None, }, ], @@ -790,12 +790,12 @@ fn validate_unsigned_expand() { InvalidTransaction, TransactionSource, TransactionValidityError, ValidTransaction, ValidateUnsigned, }; - let call = pallet::Call::::foo_no_post_info(); + let call = pallet::Call::::foo_no_post_info { }; let validity = pallet::Pallet::validate_unsigned(TransactionSource::Local, &call).unwrap_err(); assert_eq!(validity, TransactionValidityError::Invalid(InvalidTransaction::Call)); - let call = pallet::Call::::foo_transactional(0); + let call = pallet::Call::::foo_transactional { foo: 0 }; let validity = pallet::Pallet::validate_unsigned(TransactionSource::External, &call).unwrap(); assert_eq!(validity, ValidTransaction::default()); @@ -814,7 +814,7 @@ fn trait_store_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo(3, 0).dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { _foo: 3, _bar: 0 }.dispatch_bypass_filter(None.into()).unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), @@ -824,7 +824,7 @@ fn pallet_expand_deposit_event() { #[test] fn pallet_new_call_variant() { - Call::Example(Example::Call::new_call_variant_foo(3, 4)); + Call::Example(pallet::Call::new_call_variant_foo(3, 4)); } #[test] diff --git a/frame/system/src/mock.rs b/frame/system/src/mock.rs index 480e8b1a26bae..0cde4b031d1e2 100644 --- a/frame/system/src/mock.rs +++ b/frame/system/src/mock.rs @@ -116,7 +116,7 @@ impl Config for Test { pub type SysEvent = frame_system::Event; /// A simple call, which one doesn't matter. -pub const CALL: &::Call = &Call::System(frame_system::Call::set_heap_pages(0u64)); +pub const CALL: &::Call = &Call::System(frame_system::Call::set_heap_pages { pages: 0u64 }); /// Create new externalities for `System` module tests. pub fn new_test_ext() -> sp_io::TestExternalities { From 759c715378c461df20215619eb5d767da0bb5964 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 12:54:23 +0100 Subject: [PATCH 388/503] Fmt --- bin/node/executor/tests/fees.rs | 4 +- bin/node/executor/tests/submit_transaction.rs | 26 ++++--- client/network/src/request_responses.rs | 2 +- frame/babe/src/tests.rs | 9 ++- frame/collective/src/lib.rs | 21 ++++-- frame/contracts/src/exec.rs | 15 ++-- frame/democracy/src/tests.rs | 5 +- .../src/unsigned.rs | 3 +- frame/example-offchain-worker/src/lib.rs | 6 +- frame/executive/src/lib.rs | 48 +++++++++---- frame/grandpa/src/tests.rs | 6 +- frame/im-online/src/tests.rs | 17 ++--- frame/lottery/src/tests.rs | 12 ++-- frame/proxy/src/tests.rs | 26 ++++--- frame/recovery/src/tests.rs | 2 +- frame/scheduler/src/lib.rs | 16 +++-- frame/staking/src/lib.rs | 3 +- frame/support/src/dispatch.rs | 5 +- frame/support/src/weights.rs | 16 +++-- frame/support/test/tests/construct_runtime.rs | 40 +++++------ frame/support/test/tests/pallet.rs | 24 ++++--- frame/system/src/mock.rs | 3 +- frame/utility/src/tests.rs | 70 +++++++------------ 23 files changed, 217 insertions(+), 162 deletions(-) diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index b3a9ab6b36157..ff477a51b557f 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -60,7 +60,9 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), - function: Call::System(frame_system::Call::fill_block { _ratio: Perbill::from_percent(60)}), + function: Call::System(frame_system::Call::fill_block { + _ratio: Perbill::from_percent(60), + }), }, ], (time1 / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index dfadef68feaba..b611cee365b0f 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -42,10 +42,8 @@ fn should_submit_unsigned_transaction() { validators_len: 0, }; - let call = pallet_im_online::Call::heartbeat { - heartbeat: heartbeat_data, - _signature: signature, - }; + let call = + pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, _signature: signature }; SubmitTransaction::>::submit_unsigned_transaction( call.into(), ) @@ -87,7 +85,10 @@ fn should_submit_signed_transaction() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); let len = results.len(); @@ -121,7 +122,10 @@ fn should_submit_signed_twice_from_the_same_account() { t.execute_with(|| { let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); assert!(result.is_some()); @@ -130,7 +134,10 @@ fn should_submit_signed_twice_from_the_same_account() { // submit another one from the same account. The nonce should be incremented. let result = Signer::::any_account().send_signed_transaction(|_| { - pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); assert!(result.is_some()); @@ -230,7 +237,10 @@ fn submitted_transaction_should_be_valid() { t.execute_with(|| { let results = Signer::::all_accounts().send_signed_transaction(|_| { - pallet_balances::Call::transfer { dest: Default::default(), value: Default::default() } + pallet_balances::Call::transfer { + dest: Default::default(), + value: Default::default(), + } }); let len = results.len(); assert_eq!(len, 1); diff --git a/client/network/src/request_responses.rs b/client/network/src/request_responses.rs index f51055af55243..226e1c546d6c9 100644 --- a/client/network/src/request_responses.rs +++ b/client/network/src/request_responses.rs @@ -788,7 +788,7 @@ pub enum ResponseFailure { /// Implements the libp2p [`RequestResponseCodec`] trait. Defines how streams of bytes are turned /// into requests and responses and vice-versa. #[derive(Debug, Clone)] -#[doc(hidden)]// Needs to be public in order to satisfy the Rust compiler. +#[doc(hidden)] // Needs to be public in order to satisfy the Rust compiler. pub struct GenericCodec { max_request_size: u64, max_response_size: u64, diff --git a/frame/babe/src/tests.rs b/frame/babe/src/tests.rs index 57b509162e6f6..907a9980b26b5 100644 --- a/frame/babe/src/tests.rs +++ b/frame/babe/src/tests.rs @@ -689,11 +689,10 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key = (sp_consensus_babe::KEY_TYPE, &offending_authority_pair.public()); let key_owner_proof = Historical::prove(key).unwrap(); - let inner = - Call::report_equivocation_unsigned { - equivocation_proof: equivocation_proof.clone(), - key_owner_proof: key_owner_proof.clone() - }; + let inner = Call::report_equivocation_unsigned { + equivocation_proof: equivocation_proof.clone(), + key_owner_proof: key_owner_proof.clone(), + }; // only local/inblock reports are allowed assert_eq!( diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index 11fb44a79f988..65b6b02d7a6de 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -1170,8 +1170,11 @@ mod tests { #[test] fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -1209,8 +1212,11 @@ mod tests { #[test] fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -1552,8 +1558,11 @@ mod tests { #[test] fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let length = proposal.encode().len() as u32; assert_ok!(Collective::propose( Origin::signed(1), diff --git a/frame/contracts/src/exec.rs b/frame/contracts/src/exec.rs index d4486a3c54df5..dbf0eb16b5f9a 100644 --- a/frame/contracts/src/exec.rs +++ b/frame/contracts/src/exec.rs @@ -2428,7 +2428,9 @@ mod tests { #[test] fn call_runtime_works() { let code_hash = MockLoader::insert(Call, |ctx, _| { - let call = Call::System(frame_system::Call::remark_with_event { remark: b"Hello World".to_vec() }); + let call = Call::System(frame_system::Call::remark_with_event { + remark: b"Hello World".to_vec(), + }); ctx.ext.call_runtime(call).unwrap(); exec_success() }); @@ -2462,7 +2464,8 @@ mod tests { use pallet_utility::Call as UtilCall; // remark should still be allowed - let allowed_call = Call::System(SysCall::remark_with_event { remark: b"Hello".to_vec() }); + let allowed_call = + Call::System(SysCall::remark_with_event { remark: b"Hello".to_vec() }); // transfers are disallowed by the `TestFiler` (see below) let forbidden_call = Call::Balances(BalanceCall::transfer { dest: CHARLIE, value: 22 }); @@ -2471,11 +2474,9 @@ mod tests { assert_err!(ctx.ext.call_runtime(forbidden_call.clone()), BadOrigin); // as part of a patch: return is OK (but it interrupted the batch) - assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch { calls: vec![ - allowed_call.clone(), - forbidden_call, - allowed_call - ]})),); + assert_ok!(ctx.ext.call_runtime(Call::Utility(UtilCall::batch { + calls: vec![allowed_call.clone(), forbidden_call, allowed_call] + })),); // the transfer wasn't performed assert_eq!(get_balance(&CHARLIE), 0); diff --git a/frame/democracy/src/tests.rs b/frame/democracy/src/tests.rs index 7d29b07fce01e..9f1d729a0a7c2 100644 --- a/frame/democracy/src/tests.rs +++ b/frame/democracy/src/tests.rs @@ -72,7 +72,7 @@ frame_support::construct_runtime!( pub struct BaseFilter; impl Filter for BaseFilter { fn filter(call: &Call) -> bool { - !matches!(call, &Call::Balances(pallet_balances::Call::set_balance {..})) + !matches!(call, &Call::Balances(pallet_balances::Call::set_balance { .. })) } } @@ -224,7 +224,8 @@ fn params_should_work() { } fn set_balance_proposal(value: u64) -> Vec { - Call::Balances(pallet_balances::Call::set_balance { who: 42, new_free: value, new_reserved: 0 }).encode() + Call::Balances(pallet_balances::Call::set_balance { who: 42, new_free: value, new_reserved: 0 }) + .encode() } #[test] diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 8d07e83f0df03..a27327b0f9f5a 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -951,7 +951,8 @@ mod tests { let mut correct_witness = witness(); correct_witness.voters += 1; correct_witness.targets -= 1; - let call = Call::submit_unsigned { solution: solution.clone(), witness: correct_witness }; + let call = + Call::submit_unsigned { solution: solution.clone(), witness: correct_witness }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index ead5fb0e1758b..1acc125383eec 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -304,7 +304,11 @@ pub mod pallet { return InvalidTransaction::BadProof.into() } Self::validate_transaction_parameters(&payload.block_number, &payload.price) - } else if let Call::submit_price_unsigned { _block_number: block_number, price: new_price } = call { + } else if let Call::submit_price_unsigned { + _block_number: block_number, + price: new_price, + } = call + { Self::validate_transaction_parameters(block_number, new_price) } else { InvalidTransaction::Call.into() diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index a3fb6b1ef1316..a7ad3847b2048 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -645,7 +645,7 @@ mod tests { None } fn is_inherent(call: &Self::Call) -> bool { - *call == Call::::inherent_call { } + *call == Call::::inherent_call {} } } @@ -937,7 +937,10 @@ mod tests { fn block_weight_limit_enforced() { let mut t = new_test_ext(10000); // given: TestXt uses the encoded len as fixed Len: - let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); let encoded = xt.encode(); let encoded_len = encoded.len() as Weight; // on_initialize weight + base block execution weight @@ -982,9 +985,18 @@ mod tests { #[test] fn block_weight_and_size_is_stored_per_tx() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); - let x1 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 1, 0)); - let x2 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 2, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); + let x1 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 1, 0), + ); + let x2 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 2, 0), + ); let len = xt.clone().encode().len() as u32; let mut t = new_test_ext(1); t.execute_with(|| { @@ -1038,8 +1050,8 @@ mod tests { #[test] fn validate_unsigned() { - let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned { }), None); - let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned { }), None); + let valid = TestXt::new(Call::Custom(custom::Call::allowed_unsigned {}), None); + let invalid = TestXt::new(Call::Custom(custom::Call::unallowed_unsigned {}), None); let mut t = new_test_ext(1); let mut default_with_prio_3 = ValidTransaction::default(); @@ -1078,8 +1090,10 @@ mod tests { as LockableCurrency>::set_lock( id, &1, 110, lock, ); - let xt = - TestXt::new(Call::System(SystemCall::remark { _remark: vec![1u8] }), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::System(SystemCall::remark { _remark: vec![1u8] }), + sign_extra(1, 0, 0), + ); let weight = xt.get_dispatch_info().weight + ::BlockWeights::get() .get(DispatchClass::Normal) @@ -1226,7 +1240,10 @@ mod tests { /// used through the `ExecuteBlock` trait. #[test] fn custom_runtime_upgrade_is_called_when_using_execute_block_trait() { - let xt = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); + let xt = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); let header = new_test_ext(1).execute_with(|| { // Make sure `on_runtime_upgrade` is called. @@ -1330,7 +1347,7 @@ mod tests { #[test] fn calculating_storage_root_twice_works() { - let call = Call::Custom(custom::Call::calculate_storage_root { }); + let call = Call::Custom(custom::Call::calculate_storage_root {}); let xt = TestXt::new(call, sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { @@ -1356,8 +1373,11 @@ mod tests { #[test] #[should_panic(expected = "Invalid inherent position for extrinsic at index 1")] fn invalid_inherent_position_fail() { - let xt1 = TestXt::new(Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), sign_extra(1, 0, 0)); - let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call { }), None); + let xt1 = TestXt::new( + Call::Balances(BalancesCall::transfer { dest: 33, value: 0 }), + sign_extra(1, 0, 0), + ); + let xt2 = TestXt::new(Call::Custom(custom::Call::inherent_call {}), None); let header = new_test_ext(1).execute_with(|| { // Let's build some fake block. @@ -1382,7 +1402,7 @@ mod tests { #[test] fn valid_inherents_position_works() { - let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call { }), None); + let xt1 = TestXt::new(Call::Custom(custom::Call::inherent_call {}), None); let xt2 = TestXt::new(call_transfer(33, 0), sign_extra(1, 0, 0)); let header = new_test_ext(1).execute_with(|| { diff --git a/frame/grandpa/src/tests.rs b/frame/grandpa/src/tests.rs index 27ae100a8912e..a9ebf914e4bd7 100644 --- a/frame/grandpa/src/tests.rs +++ b/frame/grandpa/src/tests.rs @@ -681,8 +681,10 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key_owner_proof = Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); - let call = - Call::report_equivocation_unsigned { equivocation_proof: equivocation_proof.clone(), key_owner_proof: key_owner_proof.clone() }; + let call = Call::report_equivocation_unsigned { + equivocation_proof: equivocation_proof.clone(), + key_owner_proof: key_owner_proof.clone(), + }; // only local/inblock reports are allowed assert_eq!( diff --git a/frame/im-online/src/tests.rs b/frame/im-online/src/tests.rs index 5048915c3e15d..72470de3475a3 100644 --- a/frame/im-online/src/tests.rs +++ b/frame/im-online/src/tests.rs @@ -130,14 +130,15 @@ fn heartbeat( }; let signature = id.sign(&heartbeat.encode()).unwrap(); - ImOnline::pre_dispatch(&crate::Call::heartbeat { heartbeat: heartbeat.clone(), _signature: signature.clone() }).map_err( - |e| match e { - TransactionValidityError::Invalid(InvalidTransaction::Custom( - INVALID_VALIDATORS_LEN, - )) => "invalid validators len", - e @ _ => <&'static str>::from(e), - }, - )?; + ImOnline::pre_dispatch(&crate::Call::heartbeat { + heartbeat: heartbeat.clone(), + _signature: signature.clone(), + }) + .map_err(|e| match e { + TransactionValidityError::Invalid(InvalidTransaction::Custom(INVALID_VALIDATORS_LEN)) => + "invalid validators len", + e @ _ => <&'static str>::from(e), + })?; ImOnline::heartbeat(Origin::none(), heartbeat, signature) } diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index 427ada545ab38..d228b57f5112b 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -176,15 +176,18 @@ fn buy_ticket_works_as_simple_passthrough() { BalancesError::::InsufficientBalance, ); - let bad_origin_call = Box::new(Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 })); + let bad_origin_call = + Box::new(Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 })); assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin,); // User can call other txs, but doesn't get a ticket - let remark_call = Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); + let remark_call = + Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), remark_call)); assert_eq!(TicketsCount::::get(), 0); - let successful_call = Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); + let successful_call = + Box::new(Call::Balances(BalancesCall::transfer { dest: 2, value: 1 })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), successful_call)); assert_eq!(TicketsCount::::get(), 1); }); @@ -219,7 +222,8 @@ fn buy_ticket_works() { assert_eq!(TicketsCount::::get(), 1); // Buy ticket for remark - let call = Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); + let call = + Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 2); diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index e598d3a14b556..964391c97534c 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -199,7 +199,7 @@ fn expect_events(e: Vec) { } fn call_transfer(dest: u64, value: u64) -> Call { - Call::Balances(BalancesCall::transfer { dest, value }) + Call::Balances(BalancesCall::transfer { dest, value }) } #[test] @@ -336,7 +336,8 @@ fn filtering_works() { assert!(Balances::mutate_account(&derivative_id, |a| a.free = 1000).is_ok()); let inner = Box::new(call_transfer(6, 1)); - let call = Box::new(Call::Utility(UtilityCall::as_derivative { index: 0, call: inner.clone() })); + let call = + Box::new(Call::Utility(UtilityCall::as_derivative { index: 0, call: inner.clone() })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); @@ -358,7 +359,8 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let inner = Box::new(Call::Proxy(ProxyCall::new_call_variant_add_proxy(5, ProxyType::Any, 0))); + let inner = + Box::new(Call::Proxy(ProxyCall::new_call_variant_add_proxy(5, ProxyType::Any, 0))); let call = Box::new(Call::Utility(UtilityCall::batch { calls: vec![*inner] })); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); expect_events(vec![ @@ -373,7 +375,7 @@ fn filtering_works() { ProxyEvent::ProxyExecuted(Ok(())).into(), ]); - let call = Box::new(Call::Proxy(ProxyCall::remove_proxies { })); + let call = Box::new(Call::Proxy(ProxyCall::remove_proxies {})); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); @@ -459,10 +461,10 @@ fn proxying_works() { assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); - let call = Box::new(Call::Balances(BalancesCall::transfer_keep_alive { dest: 6, value: 1 })); - assert_ok!( - Call::Proxy(super::Call::new_call_variant_proxy(1, None, call.clone())).dispatch(Origin::signed(2)) - ); + let call = + Box::new(Call::Balances(BalancesCall::transfer_keep_alive { dest: 6, value: 1 })); + assert_ok!(Call::Proxy(super::Call::new_call_variant_proxy(1, None, call.clone())) + .dispatch(Origin::signed(2))); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); @@ -500,7 +502,13 @@ fn anonymous_works() { System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); assert_eq!(Balances::free_balance(6), 1); - let call = Box::new(Call::Proxy(ProxyCall::new_call_variant_kill_anonymous(1, ProxyType::Any, 0, 1, 0))); + let call = Box::new(Call::Proxy(ProxyCall::new_call_variant_kill_anonymous( + 1, + ProxyType::Any, + 0, + 1, + 0, + ))); assert_ok!(Proxy::proxy(Origin::signed(2), anon2, None, call.clone())); let de = DispatchError::from(Error::::NoPermission).stripped(); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(de)).into()); diff --git a/frame/recovery/src/tests.rs b/frame/recovery/src/tests.rs index fb6d744eb31c0..71a687f4bde1d 100644 --- a/frame/recovery/src/tests.rs +++ b/frame/recovery/src/tests.rs @@ -79,7 +79,7 @@ fn recovery_life_cycle_works() { assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 can then use account 5 to remove the recovery configuration, claiming the // deposited funds used to create the recovery configuration into account 5. - let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery { })); + let call = Box::new(Call::Recovery(RecoveryCall::remove_recovery {})); assert_ok!(Recovery::as_recovered(Origin::signed(1), 5, call)); // Account 1 should now be able to make a call through account 5 to get all of their funds assert_eq!(Balances::free_balance(5), 110); diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index 5c0415a31c689..447fe24a357ed 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -1371,7 +1371,10 @@ mod tests { None, 126, root(), - Call::Logger(LoggerCall::log { i: 2600, weight: MaximumSchedulerWeight::get() / 2 }) + Call::Logger(LoggerCall::log { + i: 2600, + weight: MaximumSchedulerWeight::get() / 2 + }) )); // 2600 does not fit with 69 or 42, but has higher priority, so will go through @@ -1425,7 +1428,10 @@ mod tests { Some((1000, 3)), 126, root(), - Call::Logger(LoggerCall::log { i: 2600, weight: MaximumSchedulerWeight::get() / 2 }) + Call::Logger(LoggerCall::log { + i: 2600, + weight: MaximumSchedulerWeight::get() / 2 + }) )); // Will include the named periodic only @@ -1575,8 +1581,10 @@ mod tests { #[test] fn should_check_orign_for_cancel() { new_test_ext().execute_with(|| { - let call = Box::new(Call::Logger(LoggerCall::log_without_filter { i: 69, weight: 1000 })); - let call2 = Box::new(Call::Logger(LoggerCall::log_without_filter { i: 42, weight: 1000 })); + let call = + Box::new(Call::Logger(LoggerCall::log_without_filter { i: 69, weight: 1000 })); + let call2 = + Box::new(Call::Logger(LoggerCall::log_without_filter { i: 42, weight: 1000 })); assert_ok!(Scheduler::schedule_named( system::RawOrigin::Signed(1).into(), 1u32.encode(), diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index 351fd1b1dfee0..0ec6edb13e31f 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -2367,8 +2367,7 @@ impl Pallet { })?; let controller = Self::bonded(&validator_stash).ok_or_else(|| { - Error::::NotStash - .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) + Error::::NotStash.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) })?; let mut ledger = >::get(&controller).ok_or(Error::::NotController)?; diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 0e8f0e9f8e0db..850d7ce32920a 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2570,9 +2570,8 @@ mod tests { #[test] fn module_json_metadata() { let metadata = Module::::call_functions(); - let expected_metadata = PalletCallMetadata { - ty: scale_info::meta_type::>(), - }; + let expected_metadata = + PalletCallMetadata { ty: scale_info::meta_type::>() }; assert_eq!(expected_metadata, metadata); } diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 681d51174ca96..3a8f8480dd6a5 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -621,7 +621,9 @@ impl GetDispatchInfo for sp_runtime::testing::TestX } /// The weight of database operations that the runtime can invoke. -#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo)] +#[derive( + Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo, +)] pub struct RuntimeDbWeight { pub read: Weight, pub write: Weight, @@ -876,25 +878,25 @@ mod tests { #[test] fn weights_are_correct() { // #[weight = 1000] - let info = Call::::f00 { }.get_dispatch_info(); + let info = Call::::f00 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, DispatchClass::Mandatory)] - let info = Call::::f01 { }.get_dispatch_info(); + let info = Call::::f01 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Mandatory); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = (1000, Pays::No)] - let info = Call::::f02 { }.get_dispatch_info(); + let info = Call::::f02 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::No); // #[weight = (1000, DispatchClass::Operational, Pays::No)] - let info = Call::::f03 { }.get_dispatch_info(); + let info = Call::::f03 {}.get_dispatch_info(); assert_eq!(info.weight, 1000); assert_eq!(info.class, DispatchClass::Operational); assert_eq!(info.pays_fee, Pays::No); @@ -912,13 +914,13 @@ mod tests { assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads(3) + T::DbWeight::get().writes(2) + 10_000] - let info = Call::::f20 { }.get_dispatch_info(); + let info = Call::::f20 {}.get_dispatch_info(); assert_eq!(info.weight, 12300); // 100*3 + 1000*2 + 10_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); // #[weight = T::DbWeight::get().reads_writes(6, 5) + 40_000] - let info = Call::::f21 { }.get_dispatch_info(); + let info = Call::::f21 {}.get_dispatch_info(); assert_eq!(info.weight, 45600); // 100*6 + 1000*5 + 40_1000 assert_eq!(info.class, DispatchClass::Normal); assert_eq!(info.pays_fee, Pays::Yes); diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 6b3273c3123fe..bbf07a7a18d75 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -310,8 +310,8 @@ mod origin_test { #[test] fn origin_default_filter() { - let accepted_call = nested::module3::Call::fail { }.into(); - let rejected_call = module3::Call::fail { }.into(); + let accepted_call = nested::module3::Call::fail {}.into(); + let rejected_call = module3::Call::fail {}.into(); assert_eq!(Origin::root().filter_call(&accepted_call), true); assert_eq!(Origin::root().filter_call(&rejected_call), true); @@ -473,17 +473,17 @@ fn event_codec() { #[test] fn call_codec() { use codec::Encode; - assert_eq!(Call::System(system::Call::noop { }).encode()[0], 30); - assert_eq!(Call::Module1_1(module1::Call::fail { }).encode()[0], 31); - assert_eq!(Call::Module2(module2::Call::fail { }).encode()[0], 32); - assert_eq!(Call::Module1_2(module1::Call::fail { }).encode()[0], 33); - assert_eq!(Call::NestedModule3(nested::module3::Call::fail { }).encode()[0], 34); - assert_eq!(Call::Module3(module3::Call::fail { }).encode()[0], 35); - assert_eq!(Call::Module1_4(module1::Call::fail { }).encode()[0], 3); - assert_eq!(Call::Module1_6(module1::Call::fail { }).encode()[0], 1); - assert_eq!(Call::Module1_7(module1::Call::fail { }).encode()[0], 2); - assert_eq!(Call::Module1_8(module1::Call::fail { }).encode()[0], 12); - assert_eq!(Call::Module1_9(module1::Call::fail { }).encode()[0], 13); + assert_eq!(Call::System(system::Call::noop {}).encode()[0], 30); + assert_eq!(Call::Module1_1(module1::Call::fail {}).encode()[0], 31); + assert_eq!(Call::Module2(module2::Call::fail {}).encode()[0], 32); + assert_eq!(Call::Module1_2(module1::Call::fail {}).encode()[0], 33); + assert_eq!(Call::NestedModule3(nested::module3::Call::fail {}).encode()[0], 34); + assert_eq!(Call::Module3(module3::Call::fail {}).encode()[0], 35); + assert_eq!(Call::Module1_4(module1::Call::fail {}).encode()[0], 3); + assert_eq!(Call::Module1_6(module1::Call::fail {}).encode()[0], 1); + assert_eq!(Call::Module1_7(module1::Call::fail {}).encode()[0], 2); + assert_eq!(Call::Module1_8(module1::Call::fail {}).encode()[0], 12); + assert_eq!(Call::Module1_9(module1::Call::fail {}).encode()[0], 13); } #[test] @@ -503,7 +503,7 @@ fn call_compact_attr() { #[test] fn call_encode_is_correct_and_decode_works() { use codec::{Decode, Encode}; - let call: module3::Call = module3::Call::fail { }; + let call: module3::Call = module3::Call::fail {}; let encoded = call.encode(); assert_eq!(vec![0], encoded); let decoded = module3::Call::::decode(&mut &encoded[..]).unwrap(); @@ -524,12 +524,12 @@ fn call_weight_should_attach_to_call_enum() { }; // operational. assert_eq!( - module3::Call::::operational { }.get_dispatch_info(), + module3::Call::::operational {}.get_dispatch_info(), DispatchInfo { weight: 5, class: DispatchClass::Operational, pays_fee: Pays::Yes }, ); // custom basic assert_eq!( - module3::Call::::aux_4 { }.get_dispatch_info(), + module3::Call::::aux_4 {}.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes }, ); } @@ -537,14 +537,14 @@ fn call_weight_should_attach_to_call_enum() { #[test] fn call_name() { use frame_support::dispatch::GetCallName; - let name = module3::Call::::aux_4 { }.get_call_name(); + let name = module3::Call::::aux_4 {}.get_call_name(); assert_eq!("aux_4", name); } #[test] fn call_metadata() { use frame_support::dispatch::{CallMetadata, GetCallMetadata}; - let call = Call::Module3(module3::Call::::aux_4 { }); + let call = Call::Module3(module3::Call::::aux_4 {}); let metadata = call.get_call_metadata(); let expected = CallMetadata { function_name: "aux_4".into(), pallet_name: "Module3".into() }; assert_eq!(metadata, expected); @@ -582,10 +582,10 @@ fn get_module_names() { #[test] fn call_subtype_conversion() { use frame_support::{dispatch::CallableCallFor, traits::IsSubType}; - let call = Call::Module3(module3::Call::::fail { }); + let call = Call::Module3(module3::Call::::fail {}); let subcall: Option<&CallableCallFor> = call.is_sub_type(); let subcall_none: Option<&CallableCallFor> = call.is_sub_type(); - assert_eq!(Some(&module3::Call::::fail { }), subcall); + assert_eq!(Some(&module3::Call::::fail {}), subcall); assert_eq!(None, subcall_none); let from = Call::from(subcall.unwrap().clone()); diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 3b504304565da..4f1e8f3ef40d7 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -376,16 +376,16 @@ pub mod pallet { fn create_inherent(_data: &InherentData) -> Option { T::AccountId::from(SomeType1); // Test for where clause T::AccountId::from(SomeType6); // Test for where clause - Some(Call::foo_no_post_info { }) + Some(Call::foo_no_post_info {}) } fn is_inherent(call: &Self::Call) -> bool { - matches!(call, Call::foo_no_post_info { } | Call::foo { .. }) + matches!(call, Call::foo_no_post_info {} | Call::foo { .. }) } fn check_inherent(call: &Self::Call, _: &InherentData) -> Result<(), Self::Error> { match call { - Call::foo_no_post_info { } => Ok(()), + Call::foo_no_post_info {} => Ok(()), Call::foo { _foo: 0, _bar: 0 } => Err(InherentError::Fatal), Call::foo { .. } => Ok(()), _ => unreachable!("other calls are not inherents"), @@ -624,7 +624,7 @@ fn inherent_expand() { let inherents = InherentData::new().create_extrinsics(); let expected = vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }]; assert_eq!(expected, inherents); @@ -639,7 +639,7 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, UncheckedExtrinsic { @@ -661,7 +661,7 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, UncheckedExtrinsic { @@ -700,7 +700,7 @@ fn inherent_expand() { Digest::default(), ), vec![UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: Some((1, (), ())), }], ); @@ -749,7 +749,7 @@ fn inherent_expand() { signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, ], @@ -775,7 +775,7 @@ fn inherent_expand() { signature: Some((1, (), ())), }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo_no_post_info { }), + function: Call::Example(pallet::Call::foo_no_post_info {}), signature: None, }, ], @@ -790,7 +790,7 @@ fn validate_unsigned_expand() { InvalidTransaction, TransactionSource, TransactionValidityError, ValidTransaction, ValidateUnsigned, }; - let call = pallet::Call::::foo_no_post_info { }; + let call = pallet::Call::::foo_no_post_info {}; let validity = pallet::Pallet::validate_unsigned(TransactionSource::Local, &call).unwrap_err(); assert_eq!(validity, TransactionValidityError::Invalid(InvalidTransaction::Call)); @@ -814,7 +814,9 @@ fn trait_store_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo { _foo: 3, _bar: 0 }.dispatch_bypass_filter(None.into()).unwrap(); + pallet::Call::::foo { _foo: 3, _bar: 0 } + .dispatch_bypass_filter(None.into()) + .unwrap(); assert_eq!( frame_system::Pallet::::events()[0].event, Event::Example(pallet::Event::Something(3)), diff --git a/frame/system/src/mock.rs b/frame/system/src/mock.rs index 0cde4b031d1e2..e4e3ab97d81c4 100644 --- a/frame/system/src/mock.rs +++ b/frame/system/src/mock.rs @@ -116,7 +116,8 @@ impl Config for Test { pub type SysEvent = frame_system::Event; /// A simple call, which one doesn't matter. -pub const CALL: &::Call = &Call::System(frame_system::Call::set_heap_pages { pages: 0u64 }); +pub const CALL: &::Call = + &Call::System(frame_system::Call::set_heap_pages { pages: 0u64 }); /// Create new externalities for `System` module tests. pub fn new_test_ext() -> sp_io::TestExternalities { diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 9934e6cf0c0c6..02cb623bebc53 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -191,18 +191,10 @@ fn as_derivative_works() { let sub_1_0 = Utility::derivative_account_id(1, 0); assert_ok!(Balances::transfer(Origin::signed(1), sub_1_0, 5)); assert_err_ignore_postinfo!( - Utility::as_derivative( - Origin::signed(1), - 1, - Box::new(call_transfer(6, 3)), - ), + Utility::as_derivative(Origin::signed(1), 1, Box::new(call_transfer(6, 3)),), BalancesError::::InsufficientBalance ); - assert_ok!(Utility::as_derivative( - Origin::signed(1), - 0, - Box::new(call_transfer(2, 3)), - )); + assert_ok!(Utility::as_derivative(Origin::signed(1), 0, Box::new(call_transfer(2, 3)),)); assert_eq!(Balances::free_balance(sub_1_0), 2); assert_eq!(Balances::free_balance(2), 13); }); @@ -217,7 +209,8 @@ fn as_derivative_handles_weight_refund() { // Full weight when ok let inner_call = call_foobar(false, start_weight, None); - let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -225,7 +218,8 @@ fn as_derivative_handles_weight_refund() { // Refund weight when ok let inner_call = call_foobar(false, start_weight, Some(end_weight)); - let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_ok!(result); @@ -234,7 +228,8 @@ fn as_derivative_handles_weight_refund() { // Full weight when err let inner_call = call_foobar(true, start_weight, None); - let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -251,7 +246,8 @@ fn as_derivative_handles_weight_refund() { // Refund weight when err let inner_call = call_foobar(true, start_weight, Some(end_weight)); - let call = Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); + let call = + Call::Utility(UtilityCall::as_derivative { index: 0, call: Box::new(inner_call) }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(1)); assert_noop!( @@ -275,7 +271,10 @@ fn as_derivative_filters() { Utility::as_derivative( Origin::signed(1), 1, - Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive { dest: 2, value: 1 })), + Box::new(Call::Balances(pallet_balances::Call::transfer_keep_alive { + dest: 2, + value: 1 + })), ), DispatchError::BadOrigin ); @@ -286,7 +285,8 @@ fn as_derivative_filters() { fn batch_with_root_works() { new_test_ext().execute_with(|| { let k = b"a".to_vec(); - let call = Call::System(frame_system::Call::set_storage { items: vec![(k.clone(), k.clone())] }); + let call = + Call::System(frame_system::Call::set_storage { items: vec![(k.clone(), k.clone())] }); assert!(!TestBaseCallFilter::filter(&call)); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::free_balance(2), 10); @@ -311,10 +311,7 @@ fn batch_with_signed_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch( Origin::signed(1), - vec![ - call_transfer(2, 5), - call_transfer(2, 5) - ] + vec![call_transfer(2, 5), call_transfer(2, 5)] ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); @@ -341,11 +338,7 @@ fn batch_early_exit_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch( Origin::signed(1), - vec![ - call_transfer(2, 5), - call_transfer(2, 10), - call_transfer(2, 5), - ] + vec![call_transfer(2, 5), call_transfer(2, 10), call_transfer(2, 5),] ),); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::free_balance(2), 15); @@ -360,11 +353,9 @@ fn batch_weight_calculation_doesnt_overflow() { assert_eq!(big_call.get_dispatch_info().weight, Weight::max_value() / 2); // 3 * 50% saturates to 100% - let batch_call = Call::Utility(crate::Call::batch { calls: vec![ - big_call.clone(), - big_call.clone(), - big_call.clone(), - ]}); + let batch_call = Call::Utility(crate::Call::batch { + calls: vec![big_call.clone(), big_call.clone(), big_call.clone()], + }); assert_eq!(batch_call.get_dispatch_info().weight, Weight::max_value()); }); @@ -451,10 +442,7 @@ fn batch_all_works() { assert_eq!(Balances::free_balance(2), 10); assert_ok!(Utility::batch_all( Origin::signed(1), - vec![ - call_transfer(2, 5), - call_transfer(2, 5) - ] + vec![call_transfer(2, 5), call_transfer(2, 5)] ),); assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::free_balance(2), 20); @@ -472,11 +460,7 @@ fn batch_all_revert() { assert_noop!( Utility::batch_all( Origin::signed(1), - vec![ - call_transfer(2, 5), - call_transfer(2, 10), - call_transfer(2, 5), - ] + vec![call_transfer(2, 5), call_transfer(2, 10), call_transfer(2, 5),] ), DispatchErrorWithPostInfo { post_info: PostDispatchInfo { @@ -561,11 +545,9 @@ fn batch_all_handles_weight_refund() { #[test] fn batch_all_does_not_nest() { new_test_ext().execute_with(|| { - let batch_all = Call::Utility(UtilityCall::batch_all { calls: vec![ - call_transfer(2, 1), - call_transfer(2, 1), - call_transfer(2, 1), - ]}); + let batch_all = Call::Utility(UtilityCall::batch_all { + calls: vec![call_transfer(2, 1), call_transfer(2, 1), call_transfer(2, 1)], + }); let info = batch_all.get_dispatch_info(); From fe357cf6cc445436c80ddbe3a4851e5d1cb3d764 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 14:12:19 +0100 Subject: [PATCH 389/503] More call variants --- bin/node/cli/src/service.rs | 2 +- frame/election-provider-multi-phase/src/lib.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/node/cli/src/service.rs b/bin/node/cli/src/service.rs index 47bc5f5b021f5..fa0011e5200cc 100644 --- a/bin/node/cli/src/service.rs +++ b/bin/node/cli/src/service.rs @@ -787,7 +787,7 @@ mod tests { }; let signer = charlie.clone(); - let function = Call::Balances(BalancesCall::transfer(to.into(), amount)); + let function = Call::Balances(BalancesCall::transfer { dest: to.into(), value: amount }); let check_spec_version = frame_system::CheckSpecVersion::new(); let check_tx_version = frame_system::CheckTxVersion::new(); diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 0352ad5606b56..289e784039c72 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -980,7 +980,7 @@ pub mod pallet { // create the submission let deposit = Self::deposit_for(&solution, size); let reward = { - let call = Call::submit(solution.clone(), num_signed_submissions); + let call = Call::submit { solution: solution.clone(), num_signed_submissions }; let call_fee = T::EstimateCallFee::estimate_call_fee(&call, None.into()); T::SignedRewardBase::get().saturating_add(call_fee) }; From c2d27089e9a909af09de205c8d7f69314f03044c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 15:32:30 +0100 Subject: [PATCH 390/503] Last call variant --- bin/node/test-runner-example/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/node/test-runner-example/src/lib.rs b/bin/node/test-runner-example/src/lib.rs index f0b306db6b0c1..c1ec5b605993c 100644 --- a/bin/node/test-runner-example/src/lib.rs +++ b/bin/node/test-runner-example/src/lib.rs @@ -107,7 +107,7 @@ mod tests { // submit extrinsics let alice = MultiSigner::from(Alice.public()).into_account(); let _hash = node - .submit_extrinsic(frame_system::Call::remark((b"hello world").to_vec()), alice) + .submit_extrinsic(frame_system::Call::remark{ _remark: (b"hello world").to_vec() }, alice) .await .unwrap(); From 5b4cc00e13323d919828186d7cce4b582f6ee2d2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 15:46:58 +0100 Subject: [PATCH 391/503] Call variants all done? --- bin/node/cli/src/service.rs | 3 ++- bin/node/test-runner-example/src/lib.rs | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/bin/node/cli/src/service.rs b/bin/node/cli/src/service.rs index fa0011e5200cc..8b71ef8a4dc51 100644 --- a/bin/node/cli/src/service.rs +++ b/bin/node/cli/src/service.rs @@ -787,7 +787,8 @@ mod tests { }; let signer = charlie.clone(); - let function = Call::Balances(BalancesCall::transfer { dest: to.into(), value: amount }); + let function = + Call::Balances(BalancesCall::transfer { dest: to.into(), value: amount }); let check_spec_version = frame_system::CheckSpecVersion::new(); let check_tx_version = frame_system::CheckTxVersion::new(); diff --git a/bin/node/test-runner-example/src/lib.rs b/bin/node/test-runner-example/src/lib.rs index c1ec5b605993c..539626d71d9c1 100644 --- a/bin/node/test-runner-example/src/lib.rs +++ b/bin/node/test-runner-example/src/lib.rs @@ -107,7 +107,10 @@ mod tests { // submit extrinsics let alice = MultiSigner::from(Alice.public()).into_account(); let _hash = node - .submit_extrinsic(frame_system::Call::remark{ _remark: (b"hello world").to_vec() }, alice) + .submit_extrinsic( + frame_system::Call::remark { _remark: (b"hello world").to_vec() }, + alice, + ) .await .unwrap(); From 125695aacb4407c7c505fee21dc5e0d601cd9834 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 23 Jul 2021 16:10:01 +0100 Subject: [PATCH 392/503] Fix SS58Prefix type --- frame/support/test/tests/pallet.rs | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 4f1e8f3ef40d7..b7c9984dab712 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -989,7 +989,7 @@ fn metadata() { }, PalletConstantMetadata { name: "SS58Prefix", - ty: scale_info::meta_type::(), + ty: scale_info::meta_type::(), value: vec![], docs: vec![], }, @@ -1377,7 +1377,18 @@ fn metadata() { PalletMetadata { index: 2, name: "Example2", - storage: Some(PalletStorageMetadata { prefix: "Example2", entries: vec![] }), + storage: Some(PalletStorageMetadata { + prefix: "Example2", + entries: vec![ + StorageEntryMetadata { + name: "SomeValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::>()), + default: vec![0], + docs: vec![], + } + ] + }), calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), constants: vec![], @@ -1407,7 +1418,7 @@ fn metadata() { _ => panic!("metadata has been bumped, test needs to be updated"), }; - pretty_assertions::assert_eq!(actual_metadata, expected_metadata); + pretty_assertions::assert_eq!(actual_metadata.pallets[1], expected_metadata.pallets[1]); } #[test] From 7adee68d5db8ad02eaaf33c687273b485a4bf57a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 09:25:33 +0100 Subject: [PATCH 393/503] Potential workaround for BitFlags TypeInfo --- frame/identity/src/types.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 2218d9b6e1c58..ab5c6b72a0948 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -22,10 +22,7 @@ use frame_support::{ traits::{ConstU32, Get}, BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; -use scale_info::{ - build::{Fields, Variants}, - Path, Type, TypeInfo, -}; +use scale_info::{build::{Fields, Variants}, Path, Type, TypeInfo, TypeParameter}; use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::{fmt::Debug, iter::once, ops::Add, prelude::*}; @@ -275,9 +272,12 @@ impl TypeInfo for IdentityFields { type Identity = Self; fn type_info() -> Type { - Type::builder().path(Path::new("IdentityFields", module_path!())).composite( + Type::builder() + .path(Path::new("BitFlags", module_path!())) + .type_params(vec![ TypeParameter::new("T", Some(meta_type::())) ]) + .composite( Fields::unnamed() - .field(|f| f.ty::().type_name("BitFlags")), + .field(|f| f.ty::().type_name("IdentityField")), ) } } From 006b24874f75f76f786fa4df0986aa8005ebd8a2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 16:16:26 +0100 Subject: [PATCH 394/503] Enable docs capturing for Call, Event, and Error types --- Cargo.lock | 4 ++-- frame/support/procedural/src/pallet/expand/call.rs | 2 +- frame/support/procedural/src/pallet/expand/error.rs | 4 +++- frame/support/procedural/src/pallet/expand/event.rs | 6 ++++-- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index db273bab1f9f4..cd38e32cfec71 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8349,7 +8349,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#21b6d3515ef140912f8b6e86bbb6a6d8ba5b6fea" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ce28c82827c127bc2a581a00934e9270f88893b7" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8362,7 +8362,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#21b6d3515ef140912f8b6e86bbb6a6d8ba5b6fea" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ce28c82827c127bc2a581a00934e9270f88893b7" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 01da27266b020..860708cea20a7 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -127,7 +127,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { )] #[codec(encode_bound())] #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_gen))] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] #[allow(non_camel_case_types)] pub enum #call_ident<#type_decl_bounded_gen> #where_clause { #[doc(hidden)] diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 91d3df3416be4..1de86d12e1e2f 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -63,7 +63,9 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { .push(syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] )); error_item .attrs - .push(syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] )); + .push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] + )); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 5af5170a87316..db716168d213e 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -116,10 +116,12 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { )] )); - // skip requirement for type params to implement `TypeInfo` + // skip requirement for type params to implement `TypeInfo`, and require docs capture event_item .attrs - .push(syn::parse_quote!( #[scale_info(skip_type_params(#event_use_gen))] )); + .push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = true)] + )); let deposit_event = if let Some(deposit_event) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); From fc49b39fa13aa13ac8889c6f46e90ab21ff1c46d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 16:18:13 +0100 Subject: [PATCH 395/503] Fix IdentityFields TypeInfo --- frame/identity/src/types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index ab5c6b72a0948..e628eaa2b95f9 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -22,7 +22,7 @@ use frame_support::{ traits::{ConstU32, Get}, BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; -use scale_info::{build::{Fields, Variants}, Path, Type, TypeInfo, TypeParameter}; +use scale_info::{build::{Fields, Variants}, meta_type, Path, Type, TypeInfo, TypeParameter}; use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::{fmt::Debug, iter::once, ops::Add, prelude::*}; From 9e18d2d01238f46cfe1dd4104915bd8a3ae36c17 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 16:50:55 +0100 Subject: [PATCH 396/503] Remove metadata-docs feature --- bin/node/runtime/Cargo.toml | 5 --- frame/support/Cargo.toml | 6 +--- frame/support/procedural/Cargo.toml | 3 +- frame/support/procedural/tools/src/docs.rs | 41 ---------------------- frame/support/procedural/tools/src/lib.rs | 21 +++++++++-- frame/support/src/dispatch.rs | 3 -- frame/support/test/Cargo.toml | 2 +- 7 files changed, 21 insertions(+), 60 deletions(-) delete mode 100644 frame/support/procedural/tools/src/docs.rs diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index c9b4dfd7c1a80..ad27acac9de42 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -252,8 +252,3 @@ try-runtime = [ contracts-unstable-interface = [ "pallet-contracts/unstable-interface" ] -# Enable docs to be included in metadata -metadata-docs = [ - "frame-support/metadata-docs", - "scale-info/docs" -] diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 6ad90ec846692..71418256a02ad 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -41,7 +41,7 @@ frame-system = { version = "4.0.0-dev", path = "../system" } parity-util-mem = { version = "0.10.0", default-features = false, features = ["primitive-types"] } [features] -default = ["std", "metadata-docs"] +default = ["std"] std = [ "once_cell", "serde", @@ -60,9 +60,5 @@ std = [ "frame-support-procedural/std", "log/std", ] -metadata-docs = [ - "frame-support-procedural/metadata-docs", - "scale-info/docs" -] runtime-benchmarks = [] try-runtime = [] diff --git a/frame/support/procedural/Cargo.toml b/frame/support/procedural/Cargo.toml index 6f815cb02e65e..ba71a7d12c62f 100644 --- a/frame/support/procedural/Cargo.toml +++ b/frame/support/procedural/Cargo.toml @@ -23,5 +23,4 @@ syn = { version = "1.0.58", features = ["full"] } [features] default = ["std"] -std = ["metadata-docs"] -metadata-docs = [] +std = [] diff --git a/frame/support/procedural/tools/src/docs.rs b/frame/support/procedural/tools/src/docs.rs deleted file mode 100644 index e5a01747fcaf7..0000000000000 --- a/frame/support/procedural/tools/src/docs.rs +++ /dev/null @@ -1,41 +0,0 @@ -// This file is part of Substrate. - -// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd. -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#[cfg(feature = "metadata-docs")] -/// Return all doc attributes literals found. -pub fn get_doc_literals(attrs: &Vec) -> Vec { - attrs - .iter() - .filter_map(|attr| { - if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { - if meta.path.get_ident().map_or(false, |ident| ident == "doc") { - Some(meta.lit) - } else { - None - } - } else { - None - } - }) - .collect() -} - -#[cfg(not(feature = "metadata-docs"))] -/// No-op, returns empty Vec unless the "metadata-docs" feature is enabled. -pub fn get_doc_literals(_attrs: &Vec) -> Vec { - Vec::new() -} diff --git a/frame/support/procedural/tools/src/lib.rs b/frame/support/procedural/tools/src/lib.rs index c262e8f3c58a6..4a900010b8c34 100644 --- a/frame/support/procedural/tools/src/lib.rs +++ b/frame/support/procedural/tools/src/lib.rs @@ -26,11 +26,8 @@ use proc_macro_crate::{crate_name, FoundCrate}; use quote::quote; use syn::parse::Error; -mod docs; pub mod syn_ext; -pub use docs::get_doc_literals; - // FIXME #1569, remove the following functions, which are copied from sp-api-macros use proc_macro2::{Span, TokenStream}; use syn::Ident; @@ -103,3 +100,21 @@ pub fn clean_type_string(input: &str) -> String { .replace("< ", "<") .replace(" >", ">") } + +/// Return all doc attributes literals found. +pub fn get_doc_literals(attrs: &Vec) -> Vec { + attrs + .iter() + .filter_map(|attr| { + if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { + if meta.path.get_ident().map_or(false, |ident| ident == "doc") { + Some(meta.lit) + } else { + None + } + } else { + None + } + }) + .collect() +} diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 850d7ce32920a..3bb790d6a4765 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2334,10 +2334,7 @@ macro_rules! __impl_module_constants_metadata { value: $default_byte_name::<$const_trait_instance $(, $const_instance)?>( Default::default() ).default_byte(), - #[cfg(feature = "metadata-docs")] docs: $crate::sp_std::vec![ $( $doc_attr ),* ], - #[cfg(not(feature = "metadata-docs"))] - docs: $crate::sp_std::vec![], } ),* ] diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 3aef129362173..2f541ebb73c1d 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -18,7 +18,7 @@ scale-info = { version = "0.9.0", default-features = false, features = ["derive" sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } -frame-support = { version = "4.0.0-dev", default-features = false, features = ["metadata-docs"], path = "../" } +frame-support = { version = "4.0.0-dev", default-features = false, path = "../" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } From a32257b947663ab60d3b33e246cb5edbb16d3776 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 17:42:43 +0100 Subject: [PATCH 397/503] Add capture_docs = true for legacy Call, Event and Error types --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- frame/support/src/dispatch.rs | 2 +- frame/support/src/error.rs | 2 +- frame/support/src/event.rs | 2 ++ 5 files changed, 7 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd38e32cfec71..bb735907a2884 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8349,7 +8349,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ce28c82827c127bc2a581a00934e9270f88893b7" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#11bc0cfe65368a2dde8f4afbda0d97625cd29657" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8362,7 +8362,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#ce28c82827c127bc2a581a00934e9270f88893b7" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#11bc0cfe65368a2dde8f4afbda0d97625cd29657" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 54303f7b82543..42bcd474a2147 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -274,4 +274,4 @@ panic = "unwind" [patch.crates-io] finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate", features = ["derive"] } +scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate" } diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 3bb790d6a4765..4649d00474a99 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1807,7 +1807,7 @@ macro_rules! decl_module { /// /// Each variant of this enum maps to a dispatchable function from the associated module. #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($trait_instance, $($instance)?))] + #[scale_info(skip_type_params($trait_instance, $($instance)?), capture_docs = true)] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index b6a46d7633e26..8f658080a2f41 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -86,7 +86,7 @@ macro_rules! decl_error { ) => { $(#[$attr])* #[derive($crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($generic, $($inst_generic)?))] + #[scale_info(skip_type_params($generic, $($inst_generic)?), capture_docs = true)] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 0066bf3626c25..9838916d14501 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -130,6 +130,7 @@ macro_rules! decl_event { $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] + #[scale_info(capture_docs = true)] /// Events for this module. /// $(#[$attr])* @@ -267,6 +268,7 @@ macro_rules! __decl_generic_event { $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] + #[scale_info(capture_docs = true)] /// Events for this module. /// $(#[$attr])* From 686dd77ce530af2131402683b29974c74eb23e8f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 26 Jul 2021 17:44:58 +0100 Subject: [PATCH 398/503] Fmt --- frame/identity/src/types.rs | 12 ++++++------ .../procedural/src/pallet/expand/error.rs | 8 +++----- .../procedural/src/pallet/expand/event.rs | 8 +++----- frame/support/test/tests/pallet.rs | 16 +++++++--------- 4 files changed, 19 insertions(+), 25 deletions(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index e628eaa2b95f9..c1019d6536afc 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -22,7 +22,10 @@ use frame_support::{ traits::{ConstU32, Get}, BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; -use scale_info::{build::{Fields, Variants}, meta_type, Path, Type, TypeInfo, TypeParameter}; +use scale_info::{ + build::{Fields, Variants}, + meta_type, Path, Type, TypeInfo, TypeParameter, +}; use sp_runtime::{traits::Zero, RuntimeDebug}; use sp_std::{fmt::Debug, iter::once, ops::Add, prelude::*}; @@ -274,11 +277,8 @@ impl TypeInfo for IdentityFields { fn type_info() -> Type { Type::builder() .path(Path::new("BitFlags", module_path!())) - .type_params(vec![ TypeParameter::new("T", Some(meta_type::())) ]) - .composite( - Fields::unnamed() - .field(|f| f.ty::().type_name("IdentityField")), - ) + .type_params(vec![TypeParameter::new("T", Some(meta_type::()))]) + .composite(Fields::unnamed().field(|f| f.ty::().type_name("IdentityField"))) } } diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 1de86d12e1e2f..8fecae27b4269 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -61,11 +61,9 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { error_item .attrs .push(syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] )); - error_item - .attrs - .push(syn::parse_quote!( - #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] - )); + error_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] + )); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index db716168d213e..a7b95fc04c6ba 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -117,11 +117,9 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { )); // skip requirement for type params to implement `TypeInfo`, and require docs capture - event_item - .attrs - .push(syn::parse_quote!( - #[scale_info(skip_type_params(#event_use_gen), capture_docs = true)] - )); + event_item.attrs.push(syn::parse_quote!( + #[scale_info(skip_type_params(#event_use_gen), capture_docs = true)] + )); let deposit_event = if let Some(deposit_event) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index b7c9984dab712..10e3b832d4b19 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1379,15 +1379,13 @@ fn metadata() { name: "Example2", storage: Some(PalletStorageMetadata { prefix: "Example2", - entries: vec![ - StorageEntryMetadata { - name: "SomeValue", - modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::>()), - default: vec![0], - docs: vec![], - } - ] + entries: vec![StorageEntryMetadata { + name: "SomeValue", + modifier: StorageEntryModifier::Optional, + ty: StorageEntryType::Plain(scale_info::meta_type::>()), + default: vec![0], + docs: vec![], + }], }), calls: Some(scale_info::meta_type::>().into()), event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), From 2aaf01d25dba3f0d118df111b51510bf731d7d22 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 08:47:20 +0100 Subject: [PATCH 399/503] Fix metadata test type --- frame/support/test/tests/pallet_instance.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index d4ce9199527ae..0944811aadf0e 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -619,7 +619,7 @@ fn metadata() { }, PalletConstantMetadata { name: "SS58Prefix", - ty: scale_info::meta_type::(), + ty: scale_info::meta_type::(), value: vec![], docs: vec![], }, From 26bcc509721774cb67d709a0321a5685ce2a6151 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 10:32:37 +0100 Subject: [PATCH 400/503] Update benchmarks with call struct variants --- frame/assets/src/benchmarking.rs | 34 ++++++++++++++-------------- frame/collective/src/benchmarking.rs | 22 +++++++++--------- frame/democracy/src/benchmarking.rs | 20 ++++++++-------- frame/gilt/src/benchmarking.rs | 6 ++--- frame/im-online/src/benchmarking.rs | 4 ++-- frame/lottery/src/benchmarking.rs | 20 ++++++++-------- frame/multisig/src/benchmarking.rs | 4 ++-- frame/proxy/src/benchmarking.rs | 10 ++++---- frame/scheduler/src/benchmarking.rs | 6 ++--- frame/uniques/src/benchmarking.rs | 16 ++++++------- frame/utility/src/benchmarking.rs | 6 ++--- 11 files changed, 74 insertions(+), 74 deletions(-) diff --git a/frame/assets/src/benchmarking.rs b/frame/assets/src/benchmarking.rs index 89a1308db1712..43eadffbe8497 100644 --- a/frame/assets/src/benchmarking.rs +++ b/frame/assets/src/benchmarking.rs @@ -330,13 +330,13 @@ benchmarks_instance_pallet! { create_default_asset::(true); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_set_metadata( - Default::default(), - name.clone(), - symbol.clone(), + let call = Call::::force_set_metadata { + id: Default::default(), + name: name.clone(), + symbol: symbol.clone(), decimals, - false, - ); + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { let id = Default::default(); @@ -351,7 +351,7 @@ benchmarks_instance_pallet! { Assets::::set_metadata(origin, Default::default(), dummy.clone(), dummy, 12)?; let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_clear_metadata(Default::default()); + let call = Call::::force_clear_metadata { id: Default::default() }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::MetadataCleared(Default::default()).into()); @@ -361,16 +361,16 @@ benchmarks_instance_pallet! { let (caller, caller_lookup) = create_default_asset::(true); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_asset_status( - Default::default(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - 100u32.into(), - true, - false, - ); + let call = Call::::force_asset_status { + id: Default::default(), + owner: caller_lookup.clone(), + issuer: caller_lookup.clone(), + admin: caller_lookup.clone(), + freezer: caller_lookup.clone(), + min_balance: 100u32.into(), + is_sufficient: true, + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::AssetStatusChanged(Default::default()).into()); diff --git a/frame/collective/src/benchmarking.rs b/frame/collective/src/benchmarking.rs index ccc20356fbf46..f102ca129e022 100644 --- a/frame/collective/src/benchmarking.rs +++ b/frame/collective/src/benchmarking.rs @@ -66,7 +66,7 @@ benchmarks_instance! { let length = 100; for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; length]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; length] }.into(); Collective::::propose( SystemOrigin::Signed(last_old_member.clone()).into(), threshold, @@ -122,7 +122,7 @@ benchmarks_instance! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark(vec![1; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![1; b as usize] }.into(); }: _(SystemOrigin::Signed(caller), Box::new(proposal.clone()), bytes_in_storage) verify { @@ -152,7 +152,7 @@ benchmarks_instance! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark(vec![1; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![1; b as usize] }.into(); let threshold = 1; }: propose(SystemOrigin::Signed(caller), threshold, Box::new(proposal.clone()), bytes_in_storage) @@ -186,7 +186,7 @@ benchmarks_instance! { // Add previous proposals. for i in 0 .. p - 1 { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -197,7 +197,7 @@ benchmarks_instance! { assert_eq!(Collective::::proposals().len(), (p - 1) as usize); - let proposal: T::Proposal = SystemCall::::remark(vec![p as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![p as u8; b as usize] }.into(); }: propose(SystemOrigin::Signed(caller.clone()), threshold, Box::new(proposal.clone()), bytes_in_storage) verify { @@ -234,7 +234,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -309,7 +309,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; bytes as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; bytes as usize] }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -386,7 +386,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -472,7 +472,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; bytes as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; bytes as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -544,7 +544,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -615,7 +615,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark(vec![i as u8; b as usize]).into(); + let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, diff --git a/frame/democracy/src/benchmarking.rs b/frame/democracy/src/benchmarking.rs index ddc3de5906591..4d670193f80ff 100644 --- a/frame/democracy/src/benchmarking.rs +++ b/frame/democracy/src/benchmarking.rs @@ -73,7 +73,7 @@ fn add_referendum(n: u32) -> Result { None, 63, frame_system::RawOrigin::Root.into(), - Call::enact_proposal(proposal_hash, referendum_index).into(), + Call::enact_proposal { proposal_hash, index: referendum_index }.into(), ) .map_err(|_| "failed to schedule named")?; Ok(referendum_index) @@ -194,7 +194,7 @@ benchmarks! { emergency_cancel { let origin = T::CancellationOrigin::successful_origin(); let referendum_index = add_referendum::(0)?; - let call = Call::::emergency_cancel(referendum_index); + let call = Call::::emergency_cancel { ref_index: referendum_index }; assert_ok!(Democracy::::referendum_status(referendum_index)); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -224,7 +224,7 @@ benchmarks! { let referendum_index = add_referendum::(0)?; assert_ok!(Democracy::::referendum_status(referendum_index)); - let call = Call::::blacklist(hash, Some(referendum_index)); + let call = Call::::blacklist { proposal_hash: hash, maybe_ref_index: Some(referendum_index) }; let origin = T::BlacklistOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -247,7 +247,7 @@ benchmarks! { (T::BlockNumber::zero(), vec![T::AccountId::default(); v as usize]) ); - let call = Call::::external_propose(proposal_hash); + let call = Call::::external_propose { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -257,7 +257,7 @@ benchmarks! { external_propose_majority { let origin = T::ExternalMajorityOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&0); - let call = Call::::external_propose_majority(proposal_hash); + let call = Call::::external_propose_majority { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -267,7 +267,7 @@ benchmarks! { external_propose_default { let origin = T::ExternalDefaultOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&0); - let call = Call::::external_propose_default(proposal_hash); + let call = Call::::external_propose_default { proposal_hash }; }: { call.dispatch_bypass_filter(origin)? } verify { // External proposal created @@ -283,7 +283,7 @@ benchmarks! { let origin_fast_track = T::FastTrackOrigin::successful_origin(); let voting_period = T::FastTrackVotingPeriod::get(); let delay = 0u32; - let call = Call::::fast_track(proposal_hash, voting_period.into(), delay.into()); + let call = Call::::fast_track { proposal_hash, voting_period: voting_period.into(), delay: delay.into() }; }: { call.dispatch_bypass_filter(origin_fast_track)? } verify { @@ -306,7 +306,7 @@ benchmarks! { vetoers.sort(); Blacklist::::insert(proposal_hash, (T::BlockNumber::zero(), vetoers)); - let call = Call::::veto_external(proposal_hash); + let call = Call::::veto_external { proposal_hash }; let origin = T::VetoOrigin::successful_origin(); ensure!(NextExternal::::get().is_some(), "no external proposal"); }: { call.dispatch_bypass_filter(origin)? } @@ -356,7 +356,7 @@ benchmarks! { let origin = T::ExternalMajorityOrigin::successful_origin(); let proposal_hash = T::Hashing::hash_of(&r); - let call = Call::::external_propose_majority(proposal_hash); + let call = Call::::external_propose_majority { proposal_hash }; call.dispatch_bypass_filter(origin)?; // External proposal created ensure!(>::exists(), "External proposal didn't work"); @@ -739,7 +739,7 @@ benchmarks! { let b in 0 .. MAX_BYTES; let proposer = funded_account::("proposer", 0); - let raw_call = Call::note_preimage(vec![1; b as usize]); + let raw_call = Call::note_preimage { encoded_proposal: vec![1; b as usize] }; let generic_call: T::Proposal = raw_call.into(); let encoded_proposal = generic_call.encode(); let proposal_hash = T::Hashing::hash(&encoded_proposal[..]); diff --git a/frame/gilt/src/benchmarking.rs b/frame/gilt/src/benchmarking.rs index befa373e6e7fa..55d34a35a7ce4 100644 --- a/frame/gilt/src/benchmarking.rs +++ b/frame/gilt/src/benchmarking.rs @@ -81,7 +81,7 @@ benchmarks! { } set_target { - let call = Call::::set_target(Default::default()); + let call = Call::::set_target { target: Default::default() }; let origin = T::AdminOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } @@ -111,7 +111,7 @@ benchmarks! { Gilt::::place_bid(RawOrigin::Signed(caller.clone()).into(), T::MinFreeze::get(), 1)?; } - Call::::set_target(Perquintill::from_percent(100)) + Call::::set_target { target: Perquintill::from_percent(100) } .dispatch_bypass_filter(T::AdminOrigin::successful_origin())?; }: { Gilt::::pursue_target(b) } @@ -127,7 +127,7 @@ benchmarks! { Gilt::::place_bid(RawOrigin::Signed(caller.clone()).into(), T::MinFreeze::get(), i + 1)?; } - Call::::set_target(Perquintill::from_percent(100)) + Call::::set_target { target: Perquintill::from_percent(100) } .dispatch_bypass_filter(T::AdminOrigin::successful_origin())?; }: { Gilt::::pursue_target(q) } diff --git a/frame/im-online/src/benchmarking.rs b/frame/im-online/src/benchmarking.rs index 46552cda68c08..e958ac78bfc85 100644 --- a/frame/im-online/src/benchmarking.rs +++ b/frame/im-online/src/benchmarking.rs @@ -80,7 +80,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat(input_heartbeat, signature); + let call = Call::heartbeat { heartbeat: input_heartbeat, _signature: signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call)?; } @@ -89,7 +89,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat(input_heartbeat, signature); + let call = Call::heartbeat { heartbeat: input_heartbeat, _signature: signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call)?; call.dispatch_bypass_filter(RawOrigin::None.into())?; diff --git a/frame/lottery/src/benchmarking.rs b/frame/lottery/src/benchmarking.rs index 706561471ee5d..ce6463c90b3da 100644 --- a/frame/lottery/src/benchmarking.rs +++ b/frame/lottery/src/benchmarking.rs @@ -35,11 +35,11 @@ fn setup_lottery(repeat: bool) -> Result<(), &'static str> { let delay = 5u32.into(); // Calls will be maximum length... let mut calls = vec![ - frame_system::Call::::set_code(vec![]).into(); + frame_system::Call::::set_code { code: vec![] }.into(); T::MaxCalls::get().saturating_sub(1) as usize ]; // Last call will be the match for worst case scenario. - calls.push(frame_system::Call::::remark(vec![]).into()); + calls.push(frame_system::Call::::remark { _remark: vec![] }.into()); let origin = T::ManagerOrigin::successful_origin(); Lottery::::set_calls(origin.clone(), calls)?; Lottery::::start_lottery(origin, price, length, delay, repeat)?; @@ -53,7 +53,7 @@ benchmarks! { setup_lottery::(false)?; // force user to have a long vec of calls participating let set_code_index: CallIndex = Lottery::::call_to_index( - &frame_system::Call::::set_code(vec![]).into() + &frame_system::Call::::set_code{ code: vec![] }.into() )?; let already_called: (u32, Vec) = ( LotteryIndex::::get(), @@ -64,7 +64,7 @@ benchmarks! { ); Participants::::insert(&caller, already_called); - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { _remark: vec![] }; }: _(RawOrigin::Signed(caller), Box::new(call.into())) verify { assert_eq!(TicketsCount::::get(), 1); @@ -72,9 +72,9 @@ benchmarks! { set_calls { let n in 0 .. T::MaxCalls::get() as u32; - let calls = vec![frame_system::Call::::remark(vec![]).into(); n as usize]; + let calls = vec![frame_system::Call::::remark { _remark: vec![] }.into(); n as usize]; - let call = Call::::set_calls(calls); + let call = Call::::set_calls { calls }; let origin = T::ManagerOrigin::successful_origin(); assert!(CallIndices::::get().is_empty()); }: { call.dispatch_bypass_filter(origin)? } @@ -89,7 +89,7 @@ benchmarks! { let end = 10u32.into(); let payout = 5u32.into(); - let call = Call::::start_lottery(price, end, payout, true); + let call = Call::::start_lottery { price, length: end, delay: payout, repeat: true }; let origin = T::ManagerOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -99,7 +99,7 @@ benchmarks! { stop_repeat { setup_lottery::(true)?; assert_eq!(crate::Lottery::::get().unwrap().repeat, true); - let call = Call::::stop_repeat(); + let call = Call::::stop_repeat {}; let origin = T::ManagerOrigin::successful_origin(); }: { call.dispatch_bypass_filter(origin)? } verify { @@ -115,7 +115,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { _remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); @@ -146,7 +146,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark(vec![]); + let call = frame_system::Call::::remark { _remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index 6847036ce4716..52bea3262912f 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -40,7 +40,7 @@ fn setup_multi(s: u32, z: u32) -> Result<(Vec, Vec) } signatories.sort(); // Must first convert to outer call type. - let call: ::Call = frame_system::Call::::remark(vec![0; z as usize]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); let call_data = call.encode(); return Ok((signatories, call_data)) } @@ -51,7 +51,7 @@ benchmarks! { let z in 0 .. 10_000; let max_signatories = T::MaxSignatories::get().into(); let (mut signatories, _) = setup_multi::(max_signatories, z)?; - let call: ::Call = frame_system::Call::::remark(vec![0; z as usize]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); let call_hash = call.using_encoded(blake2_256); let multi_account_id = Multisig::::multi_account_id(&signatories, 1); let caller = signatories.pop().ok_or("signatories should have len 2 or more")?; diff --git a/frame/proxy/src/benchmarking.rs b/frame/proxy/src/benchmarking.rs index 77cdff11de9ce..d76335c285ba5 100644 --- a/frame/proxy/src/benchmarking.rs +++ b/frame/proxy/src/benchmarking.rs @@ -83,7 +83,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); }: _(RawOrigin::Signed(caller), real, Some(T::ProxyType::default()), Box::new(call)) verify { assert_last_event::(Event::ProxyExecuted(Ok(())).into()) @@ -98,7 +98,7 @@ benchmarks! { T::Currency::make_free_balance_be(&delegate, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(delegate.clone()).into(), real.clone(), @@ -118,7 +118,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -139,7 +139,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -161,7 +161,7 @@ benchmarks! { // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); add_announcements::(a, Some(caller.clone()), None)?; - let call: ::Call = frame_system::Call::::remark(vec![]).into(); + let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); let call_hash = T::CallHasher::hash_of(&call); }: _(RawOrigin::Signed(caller.clone()), real.clone(), call_hash) verify { diff --git a/frame/scheduler/src/benchmarking.rs b/frame/scheduler/src/benchmarking.rs index c122bed71b1fd..2c164eaede229 100644 --- a/frame/scheduler/src/benchmarking.rs +++ b/frame/scheduler/src/benchmarking.rs @@ -33,7 +33,7 @@ const BLOCK_NUMBER: u32 = 2; // Add `n` named items to the schedule fn fill_schedule(when: T::BlockNumber, n: u32) -> Result<(), &'static str> { // Essentially a no-op call. - let call = frame_system::Call::set_storage(vec![]); + let call = frame_system::Call::set_storage { items: vec![] }; for i in 0..n { // Named schedule is strictly heavier than anonymous Scheduler::::do_schedule_named( @@ -58,7 +58,7 @@ benchmarks! { let periodic = Some((T::BlockNumber::one(), 100)); let priority = 0; // Essentially a no-op call. - let call = Box::new(frame_system::Call::set_storage(vec![]).into()); + let call = Box::new(frame_system::Call::set_storage { items: vec![] }.into()); fill_schedule::(when, s)?; }: _(RawOrigin::Root, when, periodic, priority, call) @@ -95,7 +95,7 @@ benchmarks! { let periodic = Some((T::BlockNumber::one(), 100)); let priority = 0; // Essentially a no-op call. - let call = Box::new(frame_system::Call::set_storage(vec![]).into()); + let call = Box::new(frame_system::Call::set_storage { items: vec![] }.into()); fill_schedule::(when, s)?; }: _(RawOrigin::Root, id, when, periodic, priority, call) diff --git a/frame/uniques/src/benchmarking.rs b/frame/uniques/src/benchmarking.rs index 20ddbb15d5360..5c777dc961e9e 100644 --- a/frame/uniques/src/benchmarking.rs +++ b/frame/uniques/src/benchmarking.rs @@ -286,15 +286,15 @@ benchmarks_instance_pallet! { force_asset_status { let (class, caller, caller_lookup) = create_class::(); let origin = T::ForceOrigin::successful_origin(); - let call = Call::::force_asset_status( + let call = Call::::force_asset_status { class, - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - caller_lookup.clone(), - true, - false, - ); + owner: caller_lookup.clone(), + issuer: caller_lookup.clone(), + admin: caller_lookup.clone(), + freezer: caller_lookup.clone(), + free_holding: true, + is_frozen: false, + }; }: { call.dispatch_bypass_filter(origin)? } verify { assert_last_event::(Event::AssetStatusChanged(class).into()); diff --git a/frame/utility/src/benchmarking.rs b/frame/utility/src/benchmarking.rs index 9fd0184b8fa3d..3b9cce7162bb3 100644 --- a/frame/utility/src/benchmarking.rs +++ b/frame/utility/src/benchmarking.rs @@ -34,7 +34,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark(vec![]).into(); + let call = frame_system::Call::remark { _remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); @@ -45,7 +45,7 @@ benchmarks! { as_derivative { let caller = account("caller", SEED, SEED); - let call = Box::new(frame_system::Call::remark(vec![]).into()); + let call = Box::new(frame_system::Call::remark { _remark: vec![] }.into()); // Whitelist caller account from further DB operations. let caller_key = frame_system::Account::::hashed_key_for(&caller); frame_benchmarking::benchmarking::add_to_whitelist(caller_key.into()); @@ -55,7 +55,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark(vec![]).into(); + let call = frame_system::Call::remark { _remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); From d74d11e6e3e77602eb04a36c1c70b026686800b4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 10:35:33 +0100 Subject: [PATCH 401/503] Fmt --- frame/multisig/src/benchmarking.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index 52bea3262912f..53f13dc0ee08f 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -40,7 +40,8 @@ fn setup_multi(s: u32, z: u32) -> Result<(Vec, Vec) } signatories.sort(); // Must first convert to outer call type. - let call: ::Call = frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); + let call: ::Call = + frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); let call_data = call.encode(); return Ok((signatories, call_data)) } From de94f5d31cc362124fe0f97ea28e8e2d1c8bd471 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 10:44:37 +0100 Subject: [PATCH 402/503] More test fixes --- frame/contracts/src/tests.rs | 2 +- frame/contracts/src/wasm/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/contracts/src/tests.rs b/frame/contracts/src/tests.rs index 30340eaead198..53bcdc3fcde2e 100644 --- a/frame/contracts/src/tests.rs +++ b/frame/contracts/src/tests.rs @@ -2764,7 +2764,7 @@ fn gas_estimation_call_runtime() { // Call something trivial with a huge gas limit so that we can observe the effects // of pre-charging. This should create a difference between consumed and required. - let call = Call::Contracts(crate::Call::call(addr_callee, 0, GAS_LIMIT / 3, vec![])); + let call = Call::Contracts(crate::Call::call { dest: addr_callee, value: 0, gas_limit: GAS_LIMIT / 3, data: vec![] }); let result = Contracts::bare_call(ALICE, addr_caller.clone(), 0, GAS_LIMIT, call.encode(), false); assert_ok!(&result.result); diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index 55cdb08a2529e..34f2089d40ba3 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -2031,7 +2031,7 @@ mod tests { #[cfg(feature = "unstable-interface")] fn call_runtime_works() { use std::convert::TryInto; - let call = Call::System(frame_system::Call::remark(b"Hello World".to_vec())); + let call = Call::System(frame_system::Call::remark { _remark: b"Hello World".to_vec() }); let mut ext = MockExt::default(); let result = execute(CODE_CALL_RUNTIME, call.encode(), &mut ext).unwrap(); assert_eq!(*ext.runtime_calls.borrow(), vec![call]); From a39bb31c40074c1bc2161a75ecb0cabde1e9af14 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 10:48:26 +0100 Subject: [PATCH 403/503] Fmt --- frame/contracts/src/tests.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/frame/contracts/src/tests.rs b/frame/contracts/src/tests.rs index 53bcdc3fcde2e..1ea022b56bf0a 100644 --- a/frame/contracts/src/tests.rs +++ b/frame/contracts/src/tests.rs @@ -2764,7 +2764,12 @@ fn gas_estimation_call_runtime() { // Call something trivial with a huge gas limit so that we can observe the effects // of pre-charging. This should create a difference between consumed and required. - let call = Call::Contracts(crate::Call::call { dest: addr_callee, value: 0, gas_limit: GAS_LIMIT / 3, data: vec![] }); + let call = Call::Contracts(crate::Call::call { + dest: addr_callee, + value: 0, + gas_limit: GAS_LIMIT / 3, + data: vec![], + }); let result = Contracts::bare_call(ALICE, addr_caller.clone(), 0, GAS_LIMIT, call.encode(), false); assert_ok!(&result.result); From ab822b5efa28bd5950be48f1aa99c77ef995fe2b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 27 Jul 2021 17:15:06 +0100 Subject: [PATCH 404/503] Fix benches --- bin/node/executor/benches/bench.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bin/node/executor/benches/bench.rs b/bin/node/executor/benches/bench.rs index cd201cfc95987..57c1ea7e2aad1 100644 --- a/bin/node/executor/benches/bench.rs +++ b/bin/node/executor/benches/bench.rs @@ -162,11 +162,11 @@ fn test_blocks( let mut test_ext = new_test_ext(genesis_config); let mut block1_extrinsics = vec![CheckedExtrinsic { signed: None, - function: Call::Timestamp(pallet_timestamp::Call::set(0)), + function: Call::Timestamp(pallet_timestamp::Call::set { now: 0 }), }]; block1_extrinsics.extend((0..20).map(|i| CheckedExtrinsic { signed: Some((alice(), signed_extra(i, 0))), - function: Call::Balances(pallet_balances::Call::transfer(bob().into(), 1 * DOLLARS)), + function: Call::Balances(pallet_balances::Call::transfer { dest: bob().into(), value: 1 * DOLLARS }), })); let block1 = construct_block(executor, &mut test_ext.ext(), 1, GENESIS_HASH.into(), block1_extrinsics); From 2a53f72f5153a983240778f6780fb226a5a12d9a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 28 Jul 2021 11:24:11 +0100 Subject: [PATCH 405/503] Use latest capture_docs attr --- Cargo.lock | 4 ++-- frame/support/procedural/src/pallet/expand/call.rs | 2 +- frame/support/procedural/src/pallet/expand/error.rs | 2 +- frame/support/procedural/src/pallet/expand/event.rs | 2 +- frame/support/src/dispatch.rs | 2 +- frame/support/src/error.rs | 2 +- frame/support/src/event.rs | 4 ++-- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 64950c01ad35a..a9455ead279e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8349,7 +8349,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#11bc0cfe65368a2dde8f4afbda0d97625cd29657" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#207272e7b948d2d568d965144a306d18dba10e14" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8362,7 +8362,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#11bc0cfe65368a2dde8f4afbda0d97625cd29657" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#207272e7b948d2d568d965144a306d18dba10e14" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 860708cea20a7..043246dc37e66 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -127,7 +127,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { )] #[codec(encode_bound())] #[codec(decode_bound())] - #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = "always")] #[allow(non_camel_case_types)] pub enum #call_ident<#type_decl_bounded_gen> #where_clause { #[doc(hidden)] diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 8fecae27b4269..a66def19a2788 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -62,7 +62,7 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { .attrs .push(syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] )); error_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#type_use_gen), capture_docs = true)] + #[scale_info(skip_type_params(#type_use_gen), capture_docs = "always")] )); if get_doc_literals(&error_item.attrs).is_empty() { diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index a7b95fc04c6ba..179774696585f 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -118,7 +118,7 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { // skip requirement for type params to implement `TypeInfo`, and require docs capture event_item.attrs.push(syn::parse_quote!( - #[scale_info(skip_type_params(#event_use_gen), capture_docs = true)] + #[scale_info(skip_type_params(#event_use_gen), capture_docs = "always")] )); let deposit_event = if let Some(deposit_event) = &event.deposit_event { diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 4649d00474a99..740ead3c33d8d 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1807,7 +1807,7 @@ macro_rules! decl_module { /// /// Each variant of this enum maps to a dispatchable function from the associated module. #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($trait_instance, $($instance)?), capture_docs = true)] + #[scale_info(skip_type_params($trait_instance, $($instance)?), capture_docs = "always")] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 8f658080a2f41..2e2f425d92d43 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -86,7 +86,7 @@ macro_rules! decl_error { ) => { $(#[$attr])* #[derive($crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($generic, $($inst_generic)?), capture_docs = true)] + #[scale_info(skip_type_params($generic, $($inst_generic)?), capture_docs = "always")] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { diff --git a/frame/support/src/event.rs b/frame/support/src/event.rs index 9838916d14501..3d042a3122db8 100644 --- a/frame/support/src/event.rs +++ b/frame/support/src/event.rs @@ -130,7 +130,7 @@ macro_rules! decl_event { $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] - #[scale_info(capture_docs = true)] + #[scale_info(capture_docs = "always")] /// Events for this module. /// $(#[$attr])* @@ -268,7 +268,7 @@ macro_rules! __decl_generic_event { $crate::scale_info::TypeInfo, $crate::RuntimeDebug, )] - #[scale_info(capture_docs = true)] + #[scale_info(capture_docs = "always")] /// Events for this module. /// $(#[$attr])* From 9194f00925e805d486d839269311567492967041 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 28 Jul 2021 17:40:27 +0100 Subject: [PATCH 406/503] Latest scale_info --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a9455ead279e0..4d441b0c026d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8349,7 +8349,7 @@ dependencies = [ [[package]] name = "scale-info" version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#207272e7b948d2d568d965144a306d18dba10e14" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#08fc6d497b5ab64086b2810cbe55f82c15bc13cb" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8362,7 +8362,7 @@ dependencies = [ [[package]] name = "scale-info-derive" version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#207272e7b948d2d568d965144a306d18dba10e14" +source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#08fc6d497b5ab64086b2810cbe55f82c15bc13cb" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", From 98b4c28c75bb1c80a9883814f31dcfcb19a0914a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 09:16:26 +0100 Subject: [PATCH 407/503] Fmt --- bin/node/executor/benches/bench.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/bin/node/executor/benches/bench.rs b/bin/node/executor/benches/bench.rs index 57c1ea7e2aad1..9e1d9df00a059 100644 --- a/bin/node/executor/benches/bench.rs +++ b/bin/node/executor/benches/bench.rs @@ -166,7 +166,10 @@ fn test_blocks( }]; block1_extrinsics.extend((0..20).map(|i| CheckedExtrinsic { signed: Some((alice(), signed_extra(i, 0))), - function: Call::Balances(pallet_balances::Call::transfer { dest: bob().into(), value: 1 * DOLLARS }), + function: Call::Balances(pallet_balances::Call::transfer { + dest: bob().into(), + value: 1 * DOLLARS, + }), })); let block1 = construct_block(executor, &mut test_ext.ext(), 1, GENESIS_HASH.into(), block1_extrinsics); From b0abff329584ec213def1577b08b7d037645b511 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 09:29:37 +0100 Subject: [PATCH 408/503] review: change &Vec to &[] --- frame/support/procedural/tools/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/procedural/tools/src/lib.rs b/frame/support/procedural/tools/src/lib.rs index 4a900010b8c34..d7aba4c7cbf1c 100644 --- a/frame/support/procedural/tools/src/lib.rs +++ b/frame/support/procedural/tools/src/lib.rs @@ -102,7 +102,7 @@ pub fn clean_type_string(input: &str) -> String { } /// Return all doc attributes literals found. -pub fn get_doc_literals(attrs: &Vec) -> Vec { +pub fn get_doc_literals(attrs: &[syn::Attribute]) -> Vec { attrs .iter() .filter_map(|attr| { From c7786aa70404a8c2d963ea24e891c5aec0a0dc09 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 09:29:54 +0100 Subject: [PATCH 409/503] Remove pallet metadata attr --- frame/staking/src/pallet/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/staking/src/pallet/mod.rs b/frame/staking/src/pallet/mod.rs index 4e7f06ebab188..a0788de18361c 100644 --- a/frame/staking/src/pallet/mod.rs +++ b/frame/staking/src/pallet/mod.rs @@ -520,7 +520,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(crate) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", BalanceOf = "Balance")] pub enum Event { /// The era payout has been set; the first balance is the validator-payout; the second is /// the remainder from the maximum amount of reward. From b5f1f272db1c19c74e061746577a1ef7208d39ed Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 09:36:40 +0100 Subject: [PATCH 410/503] review: remove commented out test code --- frame/support/test/tests/decl_storage.rs | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index ec8bcda6d8ba4..2c8011eaa7b18 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -242,28 +242,6 @@ mod tests { default: vec![0], docs: vec![], }, - // StorageEntryMetadata { - // name: "MAPU32MYDEF", - // modifier: StorageEntryModifier::Optional, - // ty: StorageEntryType::Map { - // hasher: StorageHasher::Blake2_128Concat, - // key: scale_info::meta_type::(), - // value: scale_info::meta_type::(), - // }, - // default: vec![0], - // docs:vec![], - // }, - // StorageEntryMetadata { - // name: "PUBMAPU32MYDEF", - // modifier: StorageEntryModifier::Optional, - // ty: StorageEntryType::Map { - // hasher: StorageHasher::Blake2_128Concat, - // key: scale_info::meta_type::(), - // value: scale_info::meta_type::(), - // }, - // default: vec![0], - // docs:vec![], - // }, StorageEntryMetadata { name: "GETMAPU32", modifier: StorageEntryModifier::Default, From 52488ec3437708c2f1eb380b31156dd0ada9bd58 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 09:40:13 +0100 Subject: [PATCH 411/503] review: skip_type_params trailing comma suggestion --- frame/support/src/dispatch.rs | 2 +- frame/support/src/error.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index a767724c72c29..23bf6fadbdaeb 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -1883,7 +1883,7 @@ macro_rules! decl_module { /// /// Each variant of this enum maps to a dispatchable function from the associated module. #[derive($crate::codec::Encode, $crate::codec::Decode, $crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($trait_instance, $($instance)?), capture_docs = "always")] + #[scale_info(skip_type_params($trait_instance $(, $instance)?), capture_docs = "always")] pub enum $call_type<$trait_instance: $trait_name$(, $instance: $instantiable $( = $module_default_instance)?)?> where $( $other_where_bounds )* { diff --git a/frame/support/src/error.rs b/frame/support/src/error.rs index 2e2f425d92d43..836428c6bc7db 100644 --- a/frame/support/src/error.rs +++ b/frame/support/src/error.rs @@ -86,7 +86,7 @@ macro_rules! decl_error { ) => { $(#[$attr])* #[derive($crate::scale_info::TypeInfo)] - #[scale_info(skip_type_params($generic, $($inst_generic)?), capture_docs = "always")] + #[scale_info(skip_type_params($generic $(, $inst_generic)?), capture_docs = "always")] pub enum $error<$generic: $trait $(, $inst_generic: $instance)?> $( where $( $where_ty: $where_bound ),* )? { From a11fc270b2dbd7b22e13ddc2c757b79076c6e2d6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 10:55:19 +0100 Subject: [PATCH 412/503] Update to scale-info 0.10.0 --- Cargo.lock | 14 ++++++++------ Cargo.toml | 3 +-- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- frame/election-provider-multi-phase/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/support/test/pallet/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- primitives/transaction-storage-proof/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- 80 files changed, 87 insertions(+), 86 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d8d9ef77bc7e3..a2b00cf22f805 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1704,7 +1704,7 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.3" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#e7cdb8781c51f2f02d1dc24de5b50f811be1309f" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2928dc840989ec3aeb1b639f68cee6c5e686e7c7" dependencies = [ "either", "futures 0.3.15", @@ -1852,7 +1852,7 @@ dependencies = [ [[package]] name = "frame-metadata" version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#7bb2935f9c2a51193708d5cd4b190deee726c663" +source = "git+https://github.com/paritytech/frame-metadata?branch=main#e5c1540cf17577c40fc00b905a2d6afee9090cf6" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", @@ -8348,8 +8348,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "0.9.2" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#08fc6d497b5ab64086b2810cbe55f82c15bc13cb" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2e62ff266e136db561a007c84569985805f84a1d5a08278e52c36aacb6e061b" dependencies = [ "bitvec", "cfg-if 1.0.0", @@ -8361,8 +8362,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "0.6.1" -source = "git+https://github.com/paritytech/scale-info?branch=aj-substrate#08fc6d497b5ab64086b2810cbe55f82c15bc13cb" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b648fa291891a4c80187a25532f6a7d96b82c70353e30b868b14632b8fe043d6" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index 42bcd474a2147..376002842a463 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -273,5 +273,4 @@ zeroize = { opt-level = 3 } panic = "unwind" [patch.crates-io] -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } -scale-info = { git = "https://github.com/paritytech/scale-info", branch = "aj-substrate" } +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index e5b9def974a9b..bcc7dd9bc9d00 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index e1c3517261583..64c0564cdbb7d 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index 934b834fb38bc..e9a1db4570687 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 45110b88b3736..db69d86e9c326 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -12,7 +12,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index ad27acac9de42..0b799151dd6df 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] # third-party dependencies codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 372f7150dc108..f535657a29cb8 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 4161ffa6fbba5..957efe999df02 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index c4824f706c5ac..833edbfa35aa7 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", default-features = false, path = "../session" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 655b43a2001f2..13a95600b5cba 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-authority-discovery = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authority-discovery" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = ["historical" ], path = "../session", default-features = false } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index b7fb2a7265c25..03cabda9a6afe 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 0ba4d57e671d2..924b3c2ce1f3d 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 0b93aa8f83823..d0e2eab7032e7 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index be3955a6cd738..e88f48a2bb510 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 5b22e6f3de8f6..6d492facc6e2b 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index a03717fff43be..e3eb450eca1bd 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index d714365842f95..8fae3e9d35250 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index a37db744a3420..8252d280ef18c 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index ab25c9687661f..c57e0cee213ab 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index a16c917f4b86a..0e226dc9ebe94 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 0b552e68008bf..39096aed9e0b5 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 585a1429a40cc..6912f34b4007e 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 635935fcc679c..7742bfaa5d23b 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index dc830b4de4c45..82dcbd156bb81 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 3a9ec90e58b64..27a4a362820de 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 32ead0bc01dfa..c22f72790ca94 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index ca91b5cc86a3f..e0c016f61a654 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 7823e7e97eadb..4f46a1170ffa4 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 0fb805ffff3d3..ae29deff74bd8 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index db24982d33874..4979ff131df15 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.2", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 0f2a3dfb2be1b..ad27c4a0bf502 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 5759e18ba0ea7..6415313e0bd35 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 77ba8e3fa984f..db5871daa3dbc 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 641457d9fc960..529a1f5f89f97 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index d206461cad5cf..f0caaa48f785f 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index e48e8ee1fdfed..342f7ec7b3d5c 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index ab690cc8d8e00..83bdb6af141aa 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index b96b1ded2a380..566730abf7fb9 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 3d8cdaf6ce3b4..b0bb1c066d6a7 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 48ae4b36f6003..37b3f640d7c01 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 1ffdf7e879976..06fee67e07254 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index f3bb892424b5b..28134dfcdaaa1 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 2427fc7e764cb..65872f4cb10a6 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index f637ade46dc61..307d547bdd158 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 206619a7eb43a..4b3cd1d5ca1cf 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index cfaca0f0aaf65..ac4abd4a4736d 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index a5a9c9a361ef4..8dc827abb3706 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -26,7 +26,7 @@ rand = { version = "0.7.2", default-features = false } [dev-dependencies] serde = { version = "1.0.126" } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io ={ version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 9bc7e2e755a48..b790c13e443ae 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index f58b4a77353c7..5b026486ae0a0 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] static_assertions = "1.1.0" serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io ={ version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 94f681677feb6..b768326ba68d3 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 71418256a02ad..6cea4dd45342f 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index c10fa37dc7e25..0931998df1875 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 7970fb9b76760..dfd8b4b5a43f6 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 2471dcd001b88..0471e8636ea4c 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index 52f0c9ff06be9..7c2d1c8105a24 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 90017763cde68..dbec70c21be76 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 6e3b1d1f5a6da..89fc77816d2ab 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index b930e0608965f..68e43b44d0321 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 91523db89abaa..b53836e7d238d 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index e5839c3c523b7..3368b3d2a9fb2 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index be9e474921130..6931b713f14d7 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 077f898cd2d5a..582c39fe10a44 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index c2f8686bbaa4a..814326e115c8e 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index cb5927ca6876a..1b13330947595 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 7787ede155ece..f50c02b9d28c6 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index fa8b625c8a51e..561aeeeebdf33 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 4e40465cfab51..1e19ff53bb205 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 162f947866a7d..ad5e05bfa6a60 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index da7420a85eccb..84f16e5e7a1cd 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index c80471c335b63..9c239d84c2334 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 21dd62d3cfad3..d748a1951c8fd 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 6232f3ef8d6a9..41881fcf42233 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-compact = { version = "4.0.0-dev", path = "./compact" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index c468f50dc0774..d909106a6e891 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-arithmetic = { version = "4.0.0-dev", path = "../../arithmetic" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 092db4fe066c0..15a599e3b77de 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 14fe36526e851..ffb445e5dbda7 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 16e2439dbcdcd..23bf4f017b109 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index bd3600e3e6668..2ca5dcd224fad 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index 42f0217cfd557..b5ce777f993c4 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 75cbb4fae6bbd..bc5b0d3cdd3d9 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.9.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } From 66f28d280871a76d424c79fe5c558eb01d604c45 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 16:56:18 +0100 Subject: [PATCH 413/503] Update construct_runtime ui tests, different because of metadata TypeInfo impls --- .../no_std_genesis_config.stderr | 22 +++++++++++++ .../undefined_call_part.stderr | 24 ++++++++++++++ .../undefined_event_part.stderr | 32 ++++++++++++------- .../undefined_genesis_config_part.stderr | 24 ++++++++++++++ .../undefined_inherent_part.stderr | 24 ++++++++++++++ .../undefined_origin_part.stderr | 24 ++++++++++++++ .../undefined_validate_unsigned_part.stderr | 24 ++++++++++++++ 7 files changed, 163 insertions(+), 11 deletions(-) diff --git a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr index 6ae37ccf9b92d..5bc831f58988b 100644 --- a/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr +++ b/frame/support/test/tests/construct_runtime_ui/no_std_genesis_config.stderr @@ -36,6 +36,28 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/no_std_genesis_config.rs:13:1 + | +13 | / construct_runtime! { +14 | | pub enum Runtime where +15 | | Block = Block, +16 | | NodeBlock = Block, +... | +21 | | } +22 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0412]: cannot find type `GenesisConfig` in crate `test_pallet` --> $DIR/no_std_genesis_config.rs:13:1 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr index 201609b2abaf6..8781fe0df201a 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_call_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_call_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_call_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr index b68beb2b3fc65..fa837698aa642 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_event_part.stderr @@ -21,7 +21,7 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` 28 | System: system::{Pallet, Call, Storage, Config, Event}, | ^^^^^^ use of undeclared crate or module `system` -error[E0433]: failed to resolve: could not find `Event` in `pallet` +error[E0412]: cannot find type `Event` in module `pallet` --> $DIR/undefined_event_part.rs:22:1 | 22 | / construct_runtime! { @@ -31,9 +31,13 @@ error[E0433]: failed to resolve: could not find `Event` in `pallet` ... | 30 | | } 31 | | } - | |_^ could not find `Event` in `pallet` + | |_^ not found in `pallet` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing this enum + | +1 | use frame_system::Event; + | error[E0412]: cannot find type `Event` in module `pallet` --> $DIR/undefined_event_part.rs:22:1 @@ -48,12 +52,14 @@ error[E0412]: cannot find type `Event` in module `pallet` | |_^ not found in `pallet` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing this enum +help: consider importing one of these items + | +1 | use crate::Event; | 1 | use frame_system::Event; | -error[E0412]: cannot find type `Event` in module `pallet` +error[E0433]: failed to resolve: use of undeclared crate or module `system` --> $DIR/undefined_event_part.rs:22:1 | 22 | / construct_runtime! { @@ -63,14 +69,12 @@ error[E0412]: cannot find type `Event` in module `pallet` ... | 30 | | } 31 | | } - | |_^ not found in `pallet` + | |_^ not found in `system` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing one of these items - | -1 | use crate::Event; +help: consider importing this enum | -1 | use frame_system::Event; +1 | use frame_system::RawOrigin; | error[E0433]: failed to resolve: use of undeclared crate or module `system` @@ -86,9 +90,15 @@ error[E0433]: failed to resolve: use of undeclared crate or module `system` | |_^ not found in `system` | = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) -help: consider importing this enum +help: consider importing one of these items | -1 | use frame_system::RawOrigin; +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; | error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr index 686875d83a4f4..699f66a414ed2 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_genesis_config_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_genesis_config_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0412]: cannot find type `GenesisConfig` in module `pallet` --> $DIR/undefined_genesis_config_part.rs:22:1 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr index 303819b45dd7c..88ff9ee910937 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_inherent_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_inherent_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_inherent_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr index f49dcf5783e74..3b3aa75c1ea08 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_origin_part.stderr @@ -77,6 +77,30 @@ help: consider importing one of these items 1 | use frame_system::Origin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_origin_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_origin_part.rs:20:6 | diff --git a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr index 41202c3b005b7..ac12c56d5c279 100644 --- a/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr +++ b/frame/support/test/tests/construct_runtime_ui/undefined_validate_unsigned_part.stderr @@ -39,6 +39,30 @@ help: consider importing this enum 1 | use frame_system::RawOrigin; | +error[E0433]: failed to resolve: use of undeclared crate or module `system` + --> $DIR/undefined_validate_unsigned_part.rs:22:1 + | +22 | / construct_runtime! { +23 | | pub enum Runtime where +24 | | Block = Block, +25 | | NodeBlock = Block, +... | +30 | | } +31 | | } + | |_^ not found in `system` + | + = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider importing one of these items + | +1 | use crate::pallet::Pallet; + | +1 | use frame_support_test::Pallet; + | +1 | use frame_system::Pallet; + | +1 | use test_pallet::Pallet; + | + error[E0277]: the trait bound `Runtime: frame_system::Config` is not satisfied --> $DIR/undefined_validate_unsigned_part.rs:20:6 | From 2237de85a40c16e107f3561457743557c69f23ad Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 17:07:12 +0100 Subject: [PATCH 414/503] Add some TypeInfo derives for UI tests --- frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs | 2 +- .../test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs index 569e59ef6ec27..76e3566100640 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.rs @@ -16,7 +16,7 @@ mod pallet { #[pallet::call] impl Pallet {} - #[derive(codec::Encode, codec::Decode)] + #[derive(codec::Encode, codec::Decode, scale_info::TypeInfo)] struct Bar; #[pallet::storage] diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs index 3d03099c3c4b6..c5d773d716116 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.rs @@ -16,7 +16,7 @@ mod pallet { #[pallet::call] impl Pallet {} - #[derive(codec::Encode, codec::Decode)] + #[derive(codec::Encode, codec::Decode, scale_info::TypeInfo)] struct Bar; #[pallet::storage] From 4763c9dc267c3c7537e6e9f478abdadfe5bac289 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 17:09:11 +0100 Subject: [PATCH 415/503] Update storage ensure span ui stderrs --- ...age_ensure_span_are_ok_on_wrong_gen.stderr | 26 +++++++++++++------ ...re_span_are_ok_on_wrong_gen_unnamed.stderr | 26 +++++++++++++------ 2 files changed, 36 insertions(+), 16 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr index aff86e333457c..e78eb7ff9537b 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr @@ -1,3 +1,13 @@ +error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` + error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 | @@ -6,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -17,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:20:12 @@ -26,11 +36,11 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 20 | #[pallet::storage] | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen.rs:9:12 @@ -60,7 +70,7 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 9 | #[pallet::pallet] | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` diff --git a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr index 2f4876554aa54..d9a7ddbf3443e 100644 --- a/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr +++ b/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr @@ -1,3 +1,13 @@ +error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied + --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 + | +20 | #[pallet::storage] + | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` + | + = note: required because of the requirements on the impl of `StaticTypeInfo` for `Bar` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` + error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 | @@ -6,8 +16,8 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied | = note: required because of the requirements on the impl of `Decode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -17,8 +27,8 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied | = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:20:12 @@ -26,11 +36,11 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 20 | #[pallet::storage] | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` - = note: required because of the requirements on the impl of `StorageValueMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` - = note: required by `frame_support::storage::types::StorageValueMetadata::NAME` + = note: required because of the requirements on the impl of `frame_support::storage::StorageEntryMetadata` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` + = note: required by `NAME` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> $DIR/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:9:12 @@ -60,7 +70,7 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied 9 | #[pallet::pallet] | ^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `Bar` + = note: required because of the requirements on the impl of `Encode` for `Bar` = note: required because of the requirements on the impl of `FullEncode` for `Bar` = note: required because of the requirements on the impl of `FullCodec` for `Bar` = note: required because of the requirements on the impl of `PartialStorageInfoTrait` for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` From 66aa463164fc86d2797a503e881fbf68146eef36 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 29 Jul 2021 17:15:17 +0100 Subject: [PATCH 416/503] Update call argument bound ui tests Possibly changed because change from tuple to struct variants? --- .../call_argument_invalid_bound.stderr | 12 +++++----- .../call_argument_invalid_bound_2.stderr | 22 +++++++++---------- .../call_argument_invalid_bound_3.stderr | 12 +++++----- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr index d32d8ada7a11a..d1b040c16091f 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr @@ -1,26 +1,26 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` + | ^^^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` | = help: the trait `std::fmt::Debug` is not implemented for `::Bar` = note: required because of the requirements on the impl of `std::fmt::Debug` for `&::Bar` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `Clone` is not implemented for `::Bar` + | ^^^ the trait `Clone` is not implemented for `::Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&::Bar` - --> $DIR/call_argument_invalid_bound.rs:20:41 + --> $DIR/call_argument_invalid_bound.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ + | ^^^ | help: consider further restricting this bound | diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr index 0e1ebbf525257..84d4863672957 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr @@ -1,26 +1,26 @@ error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` + | ^^^ `::Bar` cannot be formatted using `{:?}` because it doesn't implement `std::fmt::Debug` | = help: the trait `std::fmt::Debug` is not implemented for `::Bar` = note: required because of the requirements on the impl of `std::fmt::Debug` for `&::Bar` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `::Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `Clone` is not implemented for `::Bar` + | ^^^ the trait `Clone` is not implemented for `::Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&::Bar` - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ + | ^^^ | help: consider further restricting this bound | @@ -28,23 +28,23 @@ help: consider further restricting this bound | ^^^^^^^^^^^^^^^^^^^^^ error[E0277]: the trait bound `::Bar: WrapperTypeEncode` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `WrapperTypeEncode` is not implemented for `::Bar` + | ^^^ the trait `WrapperTypeEncode` is not implemented for `::Bar` | ::: $CARGO/parity-scale-codec-2.2.0/src/codec.rs | | fn encode_to(&self, dest: &mut T) { | ------ required by this bound in `encode_to` | - = note: required because of the requirements on the impl of `pallet::_::_parity_scale_codec::Encode` for `::Bar` + = note: required because of the requirements on the impl of `Encode` for `::Bar` error[E0277]: the trait bound `::Bar: WrapperTypeDecode` is not satisfied - --> $DIR/call_argument_invalid_bound_2.rs:20:41 + --> $DIR/call_argument_invalid_bound_2.rs:20:36 | 20 | pub fn foo(origin: OriginFor, bar: T::Bar) -> DispatchResultWithPostInfo { - | ^ the trait `WrapperTypeDecode` is not implemented for `::Bar` + | ^^^ the trait `WrapperTypeDecode` is not implemented for `::Bar` | ::: $CARGO/parity-scale-codec-2.2.0/src/codec.rs | diff --git a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr index b6f4494033f7b..73513907e85f3 100644 --- a/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr +++ b/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr @@ -1,8 +1,8 @@ error[E0277]: `Bar` doesn't implement `std::fmt::Debug` - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ `Bar` cannot be formatted using `{:?}` + | ^^^ `Bar` cannot be formatted using `{:?}` | = help: the trait `std::fmt::Debug` is not implemented for `Bar` = note: add `#[derive(Debug)]` or manually implement `std::fmt::Debug` @@ -10,17 +10,17 @@ error[E0277]: `Bar` doesn't implement `std::fmt::Debug` = note: required for the cast to the object type `dyn std::fmt::Debug` error[E0277]: the trait bound `Bar: Clone` is not satisfied - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ the trait `Clone` is not implemented for `Bar` + | ^^^ the trait `Clone` is not implemented for `Bar` | = note: required by `clone` error[E0369]: binary operation `==` cannot be applied to type `&Bar` - --> $DIR/call_argument_invalid_bound_3.rs:22:41 + --> $DIR/call_argument_invalid_bound_3.rs:22:36 | 22 | pub fn foo(origin: OriginFor, bar: Bar) -> DispatchResultWithPostInfo { - | ^^^ + | ^^^ | = note: an implementation of `std::cmp::PartialEq` might be missing for `&Bar` From 8ce94a8dcc771c008e1afee6e0be9c87f67adcbe Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 10:11:11 +0100 Subject: [PATCH 417/503] Add scale-info dev dependency --- Cargo.lock | 1 + primitives/npos-elections/compact/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index a2b00cf22f805..cd38e6224376f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9225,6 +9225,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", + "scale-info", "sp-arithmetic", "sp-npos-elections", "syn", diff --git a/primitives/npos-elections/compact/Cargo.toml b/primitives/npos-elections/compact/Cargo.toml index d90bdf373b4d1..654b141bc0dc8 100644 --- a/primitives/npos-elections/compact/Cargo.toml +++ b/primitives/npos-elections/compact/Cargo.toml @@ -22,6 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-arithmetic = { path = "../../arithmetic" , version = "4.0.0-dev"} sp-npos-elections = { path = ".." , version = "4.0.0-dev"} trybuild = "1.0.43" From 7683b7d85b56666521ac25348815ddd8706de1db Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 12:29:18 +0100 Subject: [PATCH 418/503] Update to latest finality-grandpa release --- Cargo.lock | 3 ++- Cargo.toml | 5 +---- client/finality-grandpa-warp-sync/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 4 ++-- client/finality-grandpa/rpc/Cargo.toml | 2 +- 5 files changed, 7 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cd38e6224376f..34bd3f1476a34 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1704,7 +1704,8 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.3" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info#2928dc840989ec3aeb1b639f68cee6c5e686e7c7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c832d0ed507622c7cb98e9b7f10426850fc9d38527ab8071778dcc3a81d45875" dependencies = [ "either", "futures 0.3.15", diff --git a/Cargo.toml b/Cargo.toml index 376002842a463..e51caba1eb77d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -270,7 +270,4 @@ zeroize = { opt-level = 3 } [profile.release] # Substrate runtime requires unwinding. -panic = "unwind" - -[patch.crates-io] -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info" } \ No newline at end of file +panic = "unwind" \ No newline at end of file diff --git a/client/finality-grandpa-warp-sync/Cargo.toml b/client/finality-grandpa-warp-sync/Cargo.toml index 43a7cc0565cde..802f6a2f10c2f 100644 --- a/client/finality-grandpa-warp-sync/Cargo.toml +++ b/client/finality-grandpa-warp-sync/Cargo.toml @@ -28,7 +28,7 @@ sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } [dev-dependencies] -finality-grandpa = { version = "0.14.1" } +finality-grandpa = { version = "0.14.3" } rand = "0.8" sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } sp-consensus = { version = "0.10.0-dev", path = "../../primitives/consensus/common" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index 706538e807243..14ee1a00351ce 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -44,7 +44,7 @@ sc-network-gossip = { version = "0.10.0-dev", path = "../network-gossip" } sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality-grandpa" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0"} sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.3", features = ["derive-codec"] } pin-project = "1.0.4" linked-hash-map = "0.5.2" async-trait = "0.1.50" @@ -52,7 +52,7 @@ wasm-timer = "0.2" [dev-dependencies] assert_matches = "1.3.0" -finality-grandpa = { version = "0.14.1", features = ["derive-codec", "test-helpers"] } +finality-grandpa = { version = "0.14.3", features = ["derive-codec", "test-helpers"] } sc-network = { version = "0.10.0-dev", path = "../network" } sc-network-test = { version = "0.8.0", path = "../network/test" } sp-keyring = { version = "4.0.0-dev", path = "../../primitives/keyring" } diff --git a/client/finality-grandpa/rpc/Cargo.toml b/client/finality-grandpa/rpc/Cargo.toml index e965f9279bd37..9b3c74df079b2 100644 --- a/client/finality-grandpa/rpc/Cargo.toml +++ b/client/finality-grandpa/rpc/Cargo.toml @@ -14,7 +14,7 @@ sc-rpc = { version = "4.0.0-dev", path = "../../rpc" } sp-blockchain = { version = "4.0.0-dev", path = "../../../primitives/blockchain" } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } -finality-grandpa = { version = "0.14.1", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.3", features = ["derive-codec"] } jsonrpc-core = "15.1.0" jsonrpc-core-client = "15.1.0" jsonrpc-derive = "15.1.0" From 4cbfc853217684f62622bd0e71f274054e83bef4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:05:04 +0100 Subject: [PATCH 419/503] review: missing newline --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e51caba1eb77d..b071792b33a6f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -270,4 +270,4 @@ zeroize = { opt-level = 3 } [profile.release] # Substrate runtime requires unwinding. -panic = "unwind" \ No newline at end of file +panic = "unwind" From e20f56f8cc184e0d941f9fd7b13864d84c095219 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:06:41 +0100 Subject: [PATCH 420/503] review: missing scale-info/std --- frame/session/benchmarking/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 8dc827abb3706..2cd757112765d 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -37,6 +37,7 @@ frame-election-provider-support = { version = "4.0.0-dev", path = "../../electio [features] default = ["std"] std = [ + "scale-info/std", "sp-std/std", "sp-session/std", "sp-runtime/std", From 106eea10e07425e2752b054a7a2d4647a448f3b9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:07:32 +0100 Subject: [PATCH 421/503] review: remove duplicate scale-info/std --- frame/support/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 6cea4dd45342f..44c29e788a884 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -45,7 +45,6 @@ default = ["std"] std = [ "once_cell", "serde", - "scale-info/std", "sp-io/std", "codec/std", "scale-info/std", From b6645964d3fd3efd4ecac4acddf8ef3f88fd469c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:08:35 +0100 Subject: [PATCH 422/503] review: remove fully qualified TypeInfo --- frame/support/src/weights.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 3a8f8480dd6a5..6f38da5f6bf7e 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -622,7 +622,7 @@ impl GetDispatchInfo for sp_runtime::testing::TestX /// The weight of database operations that the runtime can invoke. #[derive( - Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, scale_info::TypeInfo, + Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo, )] pub struct RuntimeDbWeight { pub read: Weight, From 5c1f8fe4c4d344408dfad6306f4dba0dabd5a02c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:10:43 +0100 Subject: [PATCH 423/503] review: add missing scale-info/std --- frame/support/test/pallet/Cargo.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index dfd8b4b5a43f6..35d51cbcbbb1b 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -21,6 +21,7 @@ frame-system = { version = "4.0.0-dev", default-features = false, path = "../../ default = ["std"] std = [ "codec/std", + "scale-info/std", "frame-support/std", "frame-system/std", ] From 1551a8367e27793c15f27d644786408de63a5ab0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:13:35 +0100 Subject: [PATCH 424/503] review: remove unnecessary imports. --- frame/support/test/tests/pallet.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 5cb38fe2c2bb4..855cae9577fcf 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -17,7 +17,6 @@ use frame_support::{ dispatch::{Parameter, UnfilteredDispatchable}, - scale_info, storage::unhashed, traits::{ GetCallName, GetStorageVersion, OnFinalize, OnGenesis, OnInitialize, OnRuntimeUpgrade, @@ -100,7 +99,7 @@ pub mod pallet { SomeAssociation1, SomeAssociation2, SomeType1, SomeType2, SomeType3, SomeType4, SomeType5, SomeType6, SomeType7, StorageVersion, }; - use frame_support::{pallet_prelude::*, scale_info}; + use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; type BalanceOf = ::Balance; From 54147ebe63453b21252b35fd55b7fbb9645a6617 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:55:07 +0100 Subject: [PATCH 425/503] Fmt --- frame/support/src/weights.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/frame/support/src/weights.rs b/frame/support/src/weights.rs index 6f38da5f6bf7e..b7730478ecdf4 100644 --- a/frame/support/src/weights.rs +++ b/frame/support/src/weights.rs @@ -621,9 +621,7 @@ impl GetDispatchInfo for sp_runtime::testing::TestX } /// The weight of database operations that the runtime can invoke. -#[derive( - Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo, -)] +#[derive(Clone, Copy, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo)] pub struct RuntimeDbWeight { pub read: Weight, pub write: Weight, From 288385a81d968975b3d936e31464f2f889b6df40 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 15:58:30 +0100 Subject: [PATCH 426/503] Use crates.io RC version of frame-metadata --- Cargo.lock | 5 +++-- frame/support/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 34bd3f1476a34..b92424bbde9b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1852,8 +1852,9 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0" -source = "git+https://github.com/paritytech/frame-metadata?branch=main#e5c1540cf17577c40fc00b905a2d6afee9090cf6" +version = "14.0.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3401bd6270e52e1c6c1f55c667490037f532d5013d09ae399cfe02c9ad56764" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 44c29e788a884..ede168390a6d9 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } -frame-metadata = { package = "frame-metadata", git = "https://github.com/paritytech/frame-metadata", branch = "main", default-features = false, features = ["v14"] } +frame-metadata = { version = "14.0.0-rc.1", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } From ee5299f88fc7c33b937211ec2b619163d78f781b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 16:26:10 +0100 Subject: [PATCH 427/503] Remove scale-info/std because it is a dev dependency --- frame/session/benchmarking/Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 2cd757112765d..8dc827abb3706 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -37,7 +37,6 @@ frame-election-provider-support = { version = "4.0.0-dev", path = "../../electio [features] default = ["std"] std = [ - "scale-info/std", "sp-std/std", "sp-session/std", "sp-runtime/std", From 00a6476775d56b29a83d3836d770b695b565373f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 30 Jul 2021 17:28:18 +0100 Subject: [PATCH 428/503] Add missing scale_info dev-dependency for test --- Cargo.lock | 1 + utils/frame/rpc/support/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index b92424bbde9b0..3ef6870e6a274 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9785,6 +9785,7 @@ dependencies = [ "jsonrpc-core", "parity-scale-codec", "sc-rpc-api", + "scale-info", "serde", "sp-storage", "tokio 0.2.25", diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index 4d4631be2025e..c5c6030647f00 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -23,4 +23,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } tokio = "0.2" From a96a65ffe7c6f290e2d5dd2f9dbe2269047bcc56 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 2 Aug 2021 10:16:13 +0100 Subject: [PATCH 429/503] Delete empty metadata folder --- frame/metadata/src/lib.rs | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 frame/metadata/src/lib.rs diff --git a/frame/metadata/src/lib.rs b/frame/metadata/src/lib.rs deleted file mode 100644 index e69de29bb2d1d..0000000000000 From 79e8d1039dfd2716f9a3c146eb38840d4a560911 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 2 Aug 2021 10:21:50 +0100 Subject: [PATCH 430/503] Fix sp_std import --- frame/support/src/dispatch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 23bf6fadbdaeb..9000603f012ff 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2430,7 +2430,7 @@ macro_rules! __impl_module_constants_metadata { { #[doc(hidden)] #[allow(dead_code)] - pub fn pallet_constants_metadata() -> ::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { + pub fn pallet_constants_metadata() -> $crate::sp_std::vec::Vec<$crate::metadata::PalletConstantMetadata> { // Create the `ByteGetter`s $( #[allow(non_upper_case_types)] From 030f7329411cf193d614dddbd56c28403d71f804 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 2 Aug 2021 17:30:52 +0100 Subject: [PATCH 431/503] review: improve manual UncheckedExtrinsic TypeInfo impl --- .../src/generic/unchecked_extrinsic.rs | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index 50a09aea650e4..dcc5dbaa96f37 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -27,7 +27,7 @@ use crate::{ OpaqueExtrinsic, }; use codec::{Decode, Encode, EncodeLike, Error, Input}; -use scale_info::{build::Fields, Path, Type, TypeInfo}; +use scale_info::{build::Fields, meta_type, Path, StaticTypeInfo, Type, TypeInfo, TypeParameter}; use sp_io::hashing::blake2_256; use sp_std::{fmt, prelude::*}; @@ -56,14 +56,29 @@ where impl TypeInfo for UncheckedExtrinsic where - Extra: SignedExtension + TypeInfo, + Address: StaticTypeInfo, + Call: StaticTypeInfo, + Signature: StaticTypeInfo, + Extra: SignedExtension + StaticTypeInfo, { - type Identity = UncheckedExtrinsic<(), (), (), Extra>; + type Identity = UncheckedExtrinsic; fn type_info() -> Type { Type::builder() .path(Path::new("UncheckedExtrinsic", module_path!())) + // Include the type parameter types, even though they are not used directly in any of + // the described fields. These type definitions can be used by downstream consumers + // to help construct the custom decoding from the opaque bytes (see below). + .type_params(vec![ + TypeParameter::new("Address", Some(meta_type::
())), + TypeParameter::new("Call", Some(meta_type::())), + TypeParameter::new("Signature", Some(meta_type::())), + TypeParameter::new("Extra", Some(meta_type::())), + ]) .docs(&["UncheckedExtrinsic raw bytes, requires custom decoding routine"]) + // Because of the custom encoding, we can only accurately describe the encoding as an + // opaque `Vec`. Downstream consumers will need to manually implement the codec to + // encode/decode the `signature` and `function` fields. .composite(Fields::unnamed().field(|f| f.ty::>())) } } From 0905ccf114cd67a451ffec631dc29e51172a6bfe Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 2 Aug 2021 17:36:52 +0100 Subject: [PATCH 432/503] review: use full scale-info for dev-dependency --- primitives/npos-elections/compact/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/npos-elections/compact/Cargo.toml b/primitives/npos-elections/compact/Cargo.toml index 654b141bc0dc8..4ba79f85a1e38 100644 --- a/primitives/npos-elections/compact/Cargo.toml +++ b/primitives/npos-elections/compact/Cargo.toml @@ -22,7 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = "0.10.0" sp-arithmetic = { path = "../../arithmetic" , version = "4.0.0-dev"} sp-npos-elections = { path = ".." , version = "4.0.0-dev"} trybuild = "1.0.43" From 6e2c25c1575071deb67a230a99ef9f4ac201a137 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 09:38:02 +0100 Subject: [PATCH 433/503] Remove DefaultByteGetter impl --- .../procedural/src/pallet/expand/constants.rs | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index 5e4f5a200b285..a3ca3d791874d 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -32,7 +32,6 @@ struct ConstDef { pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let type_impl_gen = &def.type_impl_generics(proc_macro2::Span::call_site()); - let type_decl_gen = &def.type_decl_generics(proc_macro2::Span::call_site()); let type_use_gen = &def.type_use_generics(proc_macro2::Span::call_site()); let pallet_ident = &def.pallet_struct.pallet; @@ -75,28 +74,12 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { let ident_str = format!("{}", ident); let doc = const_.doc.clone().into_iter(); let default_byte_impl = &const_.default_byte_impl; - let default_byte_getter = - syn::Ident::new(&format!("{}DefaultByteGetter", ident), ident.span()); quote::quote!({ - #[allow(non_upper_case_types)] - #[allow(non_camel_case_types)] - struct #default_byte_getter<#type_decl_gen>( - #frame_support::sp_std::marker::PhantomData<(#type_use_gen)> - ); - - impl<#type_impl_gen> #default_byte_getter<#type_use_gen> - #completed_where_clause - { - fn default_byte(&self) -> #frame_support::sp_std::vec::Vec { - #default_byte_impl - } - } - #frame_support::metadata::PalletConstantMetadata { name: #ident_str, ty: #frame_support::scale_info::meta_type::<#const_type>(), - value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), + value: { #default_byte_impl }, docs: #frame_support::sp_std::vec![ #( #doc ),* ], } }) From 9d0b2170adf80c73f8f086425cb29f06681859c6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 10:38:30 +0100 Subject: [PATCH 434/503] review: derive TypeInfo for generic header --- primitives/runtime/src/generic/header.rs | 33 ++---------------------- 1 file changed, 2 insertions(+), 31 deletions(-) diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index bd9961a68af2c..9bb917c71082f 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -32,7 +32,7 @@ use sp_core::U256; use sp_std::{convert::TryFrom, fmt::Debug}; /// Abstraction over a block header for a substrate chain. -#[derive(PartialEq, Eq, Clone, sp_core::RuntimeDebug)] +#[derive(PartialEq, Eq, Clone, sp_core::RuntimeDebug, TypeInfo)] #[cfg_attr(feature = "std", derive(Serialize, Deserialize))] #[cfg_attr(feature = "std", serde(rename_all = "camelCase"))] #[cfg_attr(feature = "std", serde(deny_unknown_fields))] @@ -44,7 +44,7 @@ pub struct Header + TryFrom, Hash: HashT> { feature = "std", serde(serialize_with = "serialize_number", deserialize_with = "deserialize_number") )] - pub number: Number, + #[codec(compact)] pub number: Number, /// The state trie merkle root pub state_root: Hash::Output, /// The merkle root of the extrinsics. @@ -123,35 +123,6 @@ where } } -impl TypeInfo for Header -where - Number: HasCompact + Copy + Into + TryFrom + TypeInfo + 'static, - Hash: HashT, - Hash::Output: TypeInfo, -{ - type Identity = Self; - - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("Header", module_path!())) - .docs(&["Abstraction over a block header for a substrate chain."]) - .composite( - scale_info::build::Fields::named() - .field(|f| f.name("parent_hash").ty::().type_name("Hash::Output")) - .field(|f| f.name("number").compact::().type_name("Number")) - .field(|f| f.name("state_root").ty::().type_name("Hash::Output")) - .field(|f| { - f.name("extrinsics_root").ty::().type_name("Hash::Output") - }) - .field(|f| { - f.name("digest") - .ty::>() - .type_name("Digest") - }), - ) - } -} - impl codec::EncodeLike for Header where Number: HasCompact + Copy + Into + TryFrom, From fb09d4f7b80da4dccaba770c7a790ace3b6b2e22 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 10:45:57 +0100 Subject: [PATCH 435/503] Fmt --- primitives/runtime/src/generic/header.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/header.rs b/primitives/runtime/src/generic/header.rs index 9bb917c71082f..b8965b6101c71 100644 --- a/primitives/runtime/src/generic/header.rs +++ b/primitives/runtime/src/generic/header.rs @@ -44,7 +44,8 @@ pub struct Header + TryFrom, Hash: HashT> { feature = "std", serde(serialize_with = "serialize_number", deserialize_with = "deserialize_number") )] - #[codec(compact)] pub number: Number, + #[codec(compact)] + pub number: Number, /// The state trie merkle root pub state_root: Hash::Output, /// The merkle root of the extrinsics. From 8956089130dd4f37c8d982a8019f8dbdd2ea8156 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 13:42:07 +0100 Subject: [PATCH 436/503] Update primitives/runtime/src/generic/unchecked_extrinsic.rs Co-authored-by: Keith Yeung --- primitives/runtime/src/generic/unchecked_extrinsic.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index dcc5dbaa96f37..1f7b0c659671b 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -366,7 +366,7 @@ mod tests { // NOTE: this is demonstration. One can simply use `()` for testing. #[derive( - Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, scale_info::TypeInfo, + Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, TypeInfo, )] struct TestExtra; impl SignedExtension for TestExtra { From a48635cd7f9963d66bf770fc25d9c695134cc457 Mon Sep 17 00:00:00 2001 From: Shawn Tabrizi Date: Tue, 3 Aug 2021 14:55:56 +0200 Subject: [PATCH 437/503] Update primitives/runtime/src/generic/unchecked_extrinsic.rs Co-authored-by: Keith Yeung --- primitives/runtime/src/generic/unchecked_extrinsic.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/primitives/runtime/src/generic/unchecked_extrinsic.rs b/primitives/runtime/src/generic/unchecked_extrinsic.rs index 1f7b0c659671b..ca52a1d4a4008 100644 --- a/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -365,9 +365,7 @@ mod tests { const TEST_ACCOUNT: TestAccountId = 0; // NOTE: this is demonstration. One can simply use `()` for testing. - #[derive( - Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, TypeInfo, - )] + #[derive(Debug, Encode, Decode, Clone, Eq, PartialEq, Ord, PartialOrd, TypeInfo)] struct TestExtra; impl SignedExtension for TestExtra { const IDENTIFIER: &'static str = "TestExtra"; From cc86b080d3a02ea818dca1289d8cd14ec89d5021 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 14:28:56 +0100 Subject: [PATCH 438/503] Update bin/node/executor/Cargo.toml MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bastian Köcher --- bin/node/executor/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index e9a1db4570687..3752529b363f8 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } From 9cbdff5e6be27e77b178737ac8fbaf56b0d3f97a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 14:37:48 +0100 Subject: [PATCH 439/503] Update frame/identity/src/types.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bastian Köcher --- frame/identity/src/types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index ec6bdcd0e758e..ed6aeb18e96a1 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -98,7 +98,7 @@ macro_rules! data_raw_variants { ($variants:ident, $(($index:literal, $size:literal)),* ) => { $variants $( - .variant(concat!(stringify!(Raw), stringify!($size)), |v| v + .variant(concat!("Raw", stringify!($size)), |v| v .index($index) .fields(Fields::unnamed().field(|f| f.ty::<[u8; $size]>())) ) From 911ca7d7ada5084fa590fb0ffb1de2736e4e8cb9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 14:40:49 +0100 Subject: [PATCH 440/503] Update frame/support/src/dispatch.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Bastian Köcher --- frame/support/src/dispatch.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/src/dispatch.rs b/frame/support/src/dispatch.rs index 4c405f1fc6166..6458c74c16192 100644 --- a/frame/support/src/dispatch.rs +++ b/frame/support/src/dispatch.rs @@ -2325,7 +2325,7 @@ macro_rules! __dispatch_impl_metadata { } } -/// Implement metadata for dispatch. +/// Implement metadata for pallet error. #[macro_export] #[doc(hidden)] macro_rules! __impl_error_metadata { From f082b5d095f823d306d4c0e8444f9d0f61744f64 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 14:50:38 +0100 Subject: [PATCH 441/503] Remove redundant derive --- frame/election-provider-multi-phase/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 289e784039c72..35a873e3839aa 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -389,7 +389,7 @@ impl Phase { /// A configuration for the pallet to indicate what should happen in the case of a fallback i.e. /// reaching a call to `elect` with no good solution. #[cfg_attr(test, derive(Clone))] -#[derive(TypeInfo)] +// #[derive(TypeInfo)] pub enum FallbackStrategy { /// Run a on-chain sequential phragmen. /// From 5b1297d2668d15c22277db630082ca724d39568a Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 3 Aug 2021 14:56:23 +0100 Subject: [PATCH 442/503] Simplify scale-info dependency --- utils/frame/rpc/support/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index c5c6030647f00..a250eb52bbc1c 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -23,5 +23,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = "0.10.0" tokio = "0.2" From 2b91aa8a9c7aa2a0a6a9bf41b1394323ee997c89 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 10:14:03 +0100 Subject: [PATCH 443/503] Strip underscore prefix from call variant struct names --- bin/node/executor/tests/basic.rs | 2 +- bin/node/executor/tests/fees.rs | 2 +- bin/node/executor/tests/submit_transaction.rs | 2 +- bin/node/test-runner-example/src/lib.rs | 2 +- bin/node/testing/src/bench.rs | 2 +- frame/collective/src/benchmarking.rs | 22 ++++----- frame/collective/src/lib.rs | 2 +- frame/contracts/src/wasm/mod.rs | 2 +- frame/example-offchain-worker/src/lib.rs | 10 ++-- frame/example-offchain-worker/src/tests.rs | 6 +-- frame/executive/src/lib.rs | 2 +- frame/im-online/src/benchmarking.rs | 4 +- frame/im-online/src/lib.rs | 4 +- frame/im-online/src/tests.rs | 2 +- frame/lottery/src/benchmarking.rs | 10 ++-- frame/lottery/src/tests.rs | 8 ++-- frame/multisig/src/benchmarking.rs | 4 +- frame/proxy/src/benchmarking.rs | 10 ++-- frame/sudo/src/tests.rs | 2 +- .../procedural/src/pallet/expand/call.rs | 46 +++++++++++++++++-- frame/utility/src/benchmarking.rs | 6 +-- 21 files changed, 93 insertions(+), 57 deletions(-) diff --git a/bin/node/executor/tests/basic.rs b/bin/node/executor/tests/basic.rs index 1a7a4cbf6e4f0..0093cf64e84d7 100644 --- a/bin/node/executor/tests/basic.rs +++ b/bin/node/executor/tests/basic.rs @@ -169,7 +169,7 @@ fn block_with_size(time: u64, nonce: u32, size: usize) -> (Vec, Hash) { }, CheckedExtrinsic { signed: Some((alice(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark { _remark: vec![0; size] }), + function: Call::System(frame_system::Call::remark { remark: vec![0; size] }), }, ], (time * 1000 / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index ff477a51b557f..0e4abb98e7d46 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -81,7 +81,7 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(1, 0))), - function: Call::System(frame_system::Call::remark { _remark: vec![0; 1] }), + function: Call::System(frame_system::Call::remark { remark: vec![0; 1] }), }, ], (time2 / SLOT_DURATION).into(), diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index b611cee365b0f..78b51a6d2b093 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -43,7 +43,7 @@ fn should_submit_unsigned_transaction() { }; let call = - pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, _signature: signature }; + pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, signature }; SubmitTransaction::>::submit_unsigned_transaction( call.into(), ) diff --git a/bin/node/test-runner-example/src/lib.rs b/bin/node/test-runner-example/src/lib.rs index 539626d71d9c1..37a69d3a28d2c 100644 --- a/bin/node/test-runner-example/src/lib.rs +++ b/bin/node/test-runner-example/src/lib.rs @@ -108,7 +108,7 @@ mod tests { let alice = MultiSigner::from(Alice.public()).into_account(); let _hash = node .submit_extrinsic( - frame_system::Call::remark { _remark: (b"hello world").to_vec() }, + frame_system::Call::remark { remark: (b"hello world").to_vec() }, alice, ) .await diff --git a/bin/node/testing/src/bench.rs b/bin/node/testing/src/bench.rs index f006a8aa7ccc2..a58770117238d 100644 --- a/bin/node/testing/src/bench.rs +++ b/bin/node/testing/src/bench.rs @@ -311,7 +311,7 @@ impl<'a> Iterator for BlockContentIterator<'a> { value: 100 * DOLLARS - (node_runtime::ExistentialDeposit::get() - 1), }) }, - BlockType::Noop => Call::System(SystemCall::remark { _remark: Vec::new() }), + BlockType::Noop => Call::System(SystemCall::remark { remark: Vec::new() }), }, }, self.runtime_version.spec_version, diff --git a/frame/collective/src/benchmarking.rs b/frame/collective/src/benchmarking.rs index f102ca129e022..a5b5246ca20aa 100644 --- a/frame/collective/src/benchmarking.rs +++ b/frame/collective/src/benchmarking.rs @@ -66,7 +66,7 @@ benchmarks_instance! { let length = 100; for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; length] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; length] }.into(); Collective::::propose( SystemOrigin::Signed(last_old_member.clone()).into(), threshold, @@ -122,7 +122,7 @@ benchmarks_instance! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![1; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![1; b as usize] }.into(); }: _(SystemOrigin::Signed(caller), Box::new(proposal.clone()), bytes_in_storage) verify { @@ -152,7 +152,7 @@ benchmarks_instance! { Collective::::set_members(SystemOrigin::Root.into(), members, None, T::MaxMembers::get())?; - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![1; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![1; b as usize] }.into(); let threshold = 1; }: propose(SystemOrigin::Signed(caller), threshold, Box::new(proposal.clone()), bytes_in_storage) @@ -186,7 +186,7 @@ benchmarks_instance! { // Add previous proposals. for i in 0 .. p - 1 { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -197,7 +197,7 @@ benchmarks_instance! { assert_eq!(Collective::::proposals().len(), (p - 1) as usize); - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![p as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![p as u8; b as usize] }.into(); }: propose(SystemOrigin::Signed(caller.clone()), threshold, Box::new(proposal.clone()), bytes_in_storage) verify { @@ -234,7 +234,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -309,7 +309,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; bytes as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; bytes as usize] }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -386,7 +386,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -472,7 +472,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; bytes as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; bytes as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -544,7 +544,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, @@ -615,7 +615,7 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { _remark: vec![i as u8; b as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; b as usize] }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index 65b6b02d7a6de..ff61840539cf5 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -1103,7 +1103,7 @@ mod tests { } fn make_proposal(value: u64) -> Call { - Call::System(frame_system::Call::remark { _remark: value.encode() }) + Call::System(frame_system::Call::remark { remark: value.encode() }) } #[test] diff --git a/frame/contracts/src/wasm/mod.rs b/frame/contracts/src/wasm/mod.rs index 34f2089d40ba3..2e263dea2b274 100644 --- a/frame/contracts/src/wasm/mod.rs +++ b/frame/contracts/src/wasm/mod.rs @@ -2031,7 +2031,7 @@ mod tests { #[cfg(feature = "unstable-interface")] fn call_runtime_works() { use std::convert::TryInto; - let call = Call::System(frame_system::Call::remark { _remark: b"Hello World".to_vec() }); + let call = Call::System(frame_system::Call::remark { remark: b"Hello World".to_vec() }); let mut ext = MockExt::default(); let result = execute(CODE_CALL_RUNTIME, call.encode(), &mut ext).unwrap(); assert_eq!(*ext.runtime_calls.borrow(), vec![call]); diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index 1acc125383eec..3584240efbdfe 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -295,7 +295,7 @@ pub mod pallet { // Firstly let's check that we call the right function. if let Call::submit_price_unsigned_with_signed_payload { price_payload: ref payload, - _signature: ref signature, + ref signature, } = call { let signature_valid = @@ -305,7 +305,7 @@ pub mod pallet { } Self::validate_transaction_parameters(&payload.block_number, &payload.price) } else if let Call::submit_price_unsigned { - _block_number: block_number, + block_number, price: new_price, } = call { @@ -477,7 +477,7 @@ impl Pallet { // Received price is wrapped into a call to `submit_price_unsigned` public function of this // pallet. This means that the transaction, when executed, will simply call that function // passing `price` as an argument. - let call = Call::submit_price_unsigned { _block_number: block_number, price }; + let call = Call::submit_price_unsigned { block_number, price }; // Now let's create a transaction out of this call and submit it to the pool. // Here we showcase two ways to send an unsigned transaction / unsigned payload (raw) @@ -514,7 +514,7 @@ impl Pallet { |account| PricePayload { price, block_number, public: account.public.clone() }, |payload, signature| Call::submit_price_unsigned_with_signed_payload { price_payload: payload, - _signature: signature, + signature, }, ) .ok_or("No local accounts accounts available.")?; @@ -544,7 +544,7 @@ impl Pallet { |account| PricePayload { price, block_number, public: account.public.clone() }, |payload, signature| Call::submit_price_unsigned_with_signed_payload { price_payload: payload, - _signature: signature, + signature, }, ); for (_account_id, result) in transaction_results.into_iter() { diff --git a/frame/example-offchain-worker/src/tests.rs b/frame/example-offchain-worker/src/tests.rs index 36f69a2d76c82..6e90bb10b70ee 100644 --- a/frame/example-offchain-worker/src/tests.rs +++ b/frame/example-offchain-worker/src/tests.rs @@ -275,7 +275,7 @@ fn should_submit_unsigned_transaction_on_chain_for_any_account() { assert_eq!(tx.signature, None); if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { price_payload: body, - _signature: signature, + signature, }) = tx.call { assert_eq!(body, price_payload); @@ -335,7 +335,7 @@ fn should_submit_unsigned_transaction_on_chain_for_all_accounts() { assert_eq!(tx.signature, None); if let Call::Example(crate::Call::submit_price_unsigned_with_signed_payload { price_payload: body, - _signature: signature, + signature, }) = tx.call { assert_eq!(body, price_payload); @@ -375,7 +375,7 @@ fn should_submit_raw_unsigned_transaction_on_chain() { assert_eq!(tx.signature, None); assert_eq!( tx.call, - Call::Example(crate::Call::submit_price_unsigned { _block_number: 1, price: 15523 }) + Call::Example(crate::Call::submit_price_unsigned { block_number: 1, price: 15523 }) ); }); } diff --git a/frame/executive/src/lib.rs b/frame/executive/src/lib.rs index a7ad3847b2048..da8ba79685cb6 100644 --- a/frame/executive/src/lib.rs +++ b/frame/executive/src/lib.rs @@ -1091,7 +1091,7 @@ mod tests { id, &1, 110, lock, ); let xt = TestXt::new( - Call::System(SystemCall::remark { _remark: vec![1u8] }), + Call::System(SystemCall::remark { remark: vec![1u8] }), sign_extra(1, 0, 0), ); let weight = xt.get_dispatch_info().weight + diff --git a/frame/im-online/src/benchmarking.rs b/frame/im-online/src/benchmarking.rs index e958ac78bfc85..4b67506d5c0b5 100644 --- a/frame/im-online/src/benchmarking.rs +++ b/frame/im-online/src/benchmarking.rs @@ -80,7 +80,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat { heartbeat: input_heartbeat, _signature: signature }; + let call = Call::heartbeat { heartbeat: input_heartbeat, signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call)?; } @@ -89,7 +89,7 @@ benchmarks! { let k in 1 .. MAX_KEYS; let e in 1 .. MAX_EXTERNAL_ADDRESSES; let (input_heartbeat, signature) = create_heartbeat::(k, e)?; - let call = Call::heartbeat { heartbeat: input_heartbeat, _signature: signature }; + let call = Call::heartbeat { heartbeat: input_heartbeat, signature }; }: { ImOnline::::validate_unsigned(TransactionSource::InBlock, &call)?; call.dispatch_bypass_filter(RawOrigin::None.into())?; diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 80462b85b00f3..0ef1630feb876 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -455,7 +455,7 @@ pub mod pallet { type Call = Call; fn validate_unsigned(_source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::heartbeat { heartbeat, _signature: signature } = call { + if let Call::heartbeat { heartbeat, signature } = call { if >::is_online(heartbeat.authority_index) { // we already received a heartbeat for this authority return InvalidTransaction::Stale.into() @@ -637,7 +637,7 @@ impl Pallet { let signature = key.sign(&heartbeat.encode()).ok_or(OffchainErr::FailedSigning)?; - Ok(Call::heartbeat { heartbeat, _signature: signature }) + Ok(Call::heartbeat { heartbeat, signature }) }; if Self::is_online(authority_index) { diff --git a/frame/im-online/src/tests.rs b/frame/im-online/src/tests.rs index 72470de3475a3..bb2c4c7cae548 100644 --- a/frame/im-online/src/tests.rs +++ b/frame/im-online/src/tests.rs @@ -132,7 +132,7 @@ fn heartbeat( ImOnline::pre_dispatch(&crate::Call::heartbeat { heartbeat: heartbeat.clone(), - _signature: signature.clone(), + signature: signature.clone(), }) .map_err(|e| match e { TransactionValidityError::Invalid(InvalidTransaction::Custom(INVALID_VALIDATORS_LEN)) => diff --git a/frame/lottery/src/benchmarking.rs b/frame/lottery/src/benchmarking.rs index ce6463c90b3da..3b7035c72deb0 100644 --- a/frame/lottery/src/benchmarking.rs +++ b/frame/lottery/src/benchmarking.rs @@ -39,7 +39,7 @@ fn setup_lottery(repeat: bool) -> Result<(), &'static str> { T::MaxCalls::get().saturating_sub(1) as usize ]; // Last call will be the match for worst case scenario. - calls.push(frame_system::Call::::remark { _remark: vec![] }.into()); + calls.push(frame_system::Call::::remark { remark: vec![] }.into()); let origin = T::ManagerOrigin::successful_origin(); Lottery::::set_calls(origin.clone(), calls)?; Lottery::::start_lottery(origin, price, length, delay, repeat)?; @@ -64,7 +64,7 @@ benchmarks! { ); Participants::::insert(&caller, already_called); - let call = frame_system::Call::::remark { _remark: vec![] }; + let call = frame_system::Call::::remark { remark: vec![] }; }: _(RawOrigin::Signed(caller), Box::new(call.into())) verify { assert_eq!(TicketsCount::::get(), 1); @@ -72,7 +72,7 @@ benchmarks! { set_calls { let n in 0 .. T::MaxCalls::get() as u32; - let calls = vec![frame_system::Call::::remark { _remark: vec![] }.into(); n as usize]; + let calls = vec![frame_system::Call::::remark { remark: vec![] }.into(); n as usize]; let call = Call::::set_calls { calls }; let origin = T::ManagerOrigin::successful_origin(); @@ -115,7 +115,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark { _remark: vec![] }; + let call = frame_system::Call::::remark { remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); @@ -146,7 +146,7 @@ benchmarks! { let lottery_account = Lottery::::account_id(); T::Currency::make_free_balance_be(&lottery_account, T::Currency::minimum_balance() * 10u32.into()); // Buy a ticket - let call = frame_system::Call::::remark { _remark: vec![] }; + let call = frame_system::Call::::remark { remark: vec![] }; Lottery::::buy_ticket(RawOrigin::Signed(winner.clone()).into(), Box::new(call.into()))?; // Kill user account for worst case T::Currency::make_free_balance_be(&winner, 0u32.into()); diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index d228b57f5112b..c2919835ce382 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -106,7 +106,7 @@ fn set_calls_works() { let too_many_calls = vec![ Call::Balances(BalancesCall::force_transfer { source: 0, dest: 0, value: 0 }), Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), - Call::System(SystemCall::remark { _remark: vec![] }), + Call::System(SystemCall::remark { remark: vec![] }), ]; assert_noop!( @@ -182,7 +182,7 @@ fn buy_ticket_works_as_simple_passthrough() { // User can call other txs, but doesn't get a ticket let remark_call = - Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); + Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(2), remark_call)); assert_eq!(TicketsCount::::get(), 0); @@ -198,7 +198,7 @@ fn buy_ticket_works() { new_test_ext().execute_with(|| { // Set calls for the lottery. let calls = vec![ - Call::System(SystemCall::remark { _remark: vec![] }), + Call::System(SystemCall::remark { remark: vec![] }), Call::Balances(BalancesCall::transfer { dest: 0, value: 0 }), ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); @@ -223,7 +223,7 @@ fn buy_ticket_works() { // Buy ticket for remark let call = - Box::new(Call::System(SystemCall::remark { _remark: b"hello, world!".to_vec() })); + Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 2); diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index 53f13dc0ee08f..c2fec637e0532 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -41,7 +41,7 @@ fn setup_multi(s: u32, z: u32) -> Result<(Vec, Vec) signatories.sort(); // Must first convert to outer call type. let call: ::Call = - frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); + frame_system::Call::::remark { remark: vec![0; z as usize] }.into(); let call_data = call.encode(); return Ok((signatories, call_data)) } @@ -52,7 +52,7 @@ benchmarks! { let z in 0 .. 10_000; let max_signatories = T::MaxSignatories::get().into(); let (mut signatories, _) = setup_multi::(max_signatories, z)?; - let call: ::Call = frame_system::Call::::remark { _remark: vec![0; z as usize] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![0; z as usize] }.into(); let call_hash = call.using_encoded(blake2_256); let multi_account_id = Multisig::::multi_account_id(&signatories, 1); let caller = signatories.pop().ok_or("signatories should have len 2 or more")?; diff --git a/frame/proxy/src/benchmarking.rs b/frame/proxy/src/benchmarking.rs index d76335c285ba5..e66f6782c19e1 100644 --- a/frame/proxy/src/benchmarking.rs +++ b/frame/proxy/src/benchmarking.rs @@ -83,7 +83,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); }: _(RawOrigin::Signed(caller), real, Some(T::ProxyType::default()), Box::new(call)) verify { assert_last_event::(Event::ProxyExecuted(Ok(())).into()) @@ -98,7 +98,7 @@ benchmarks! { T::Currency::make_free_balance_be(&delegate, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(delegate.clone()).into(), real.clone(), @@ -118,7 +118,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -139,7 +139,7 @@ benchmarks! { T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); - let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); Proxy::::announce( RawOrigin::Signed(caller.clone()).into(), real.clone(), @@ -161,7 +161,7 @@ benchmarks! { // ... and "real" is the traditional caller. This is not a typo. let real: T::AccountId = whitelisted_caller(); add_announcements::(a, Some(caller.clone()), None)?; - let call: ::Call = frame_system::Call::::remark { _remark: vec![] }.into(); + let call: ::Call = frame_system::Call::::remark { remark: vec![] }.into(); let call_hash = T::CallHasher::hash_of(&call); }: _(RawOrigin::Signed(caller.clone()), real.clone(), call_hash) verify { diff --git a/frame/sudo/src/tests.rs b/frame/sudo/src/tests.rs index d19d24e389f4b..1b573e3576771 100644 --- a/frame/sudo/src/tests.rs +++ b/frame/sudo/src/tests.rs @@ -81,7 +81,7 @@ fn sudo_unchecked_weight_basics() { // Controls the dispatched weight. let call = Box::new(Call::Logger(LoggerCall::privileged_i32_log { i: 42, weight: 1 })); - let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight { call, _weight: 1_000 }; + let sudo_unchecked_weight_call = SudoCall::sudo_unchecked_weight { call, weight: 1_000 }; let info = sudo_unchecked_weight_call.get_dispatch_info(); assert_eq!(info.weight, 1_000); }); diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 043246dc37e66..1c72ed9659792 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -60,6 +60,42 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) .collect::>(); + let args_name_stripped = methods + .iter() + .map(|method| { + method + .args + .iter() + .map(|(_, name, _)| { + syn::Ident::new(&name.to_string().trim_start_matches('_'), name.span()) + }) + .collect::>() + }) + .collect::>(); + + let make_args_name_pattern = |ref_tok| { + args_name + .iter() + .zip(args_name_stripped.iter()) + .map(|(args_name, args_name_stripped)| { + args_name + .iter() + .zip(args_name_stripped) + .map(|(args_name, args_name_stripped)| { + if args_name == args_name_stripped { + quote::quote!( #ref_tok #args_name ) + } else { + quote::quote!( #args_name_stripped: #ref_tok #args_name ) + } + }) + .collect::>() + }) + .collect::>() + }; + + let args_name_pattern = make_args_name_pattern(None); + let args_name_pattern_ref = make_args_name_pattern(Some(quote::quote!(ref))); + let args_type = methods .iter() .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) @@ -139,7 +175,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #( #( #[doc = #fn_doc] )* #fn_name { - #( #args_compact_attr #args_name: #args_type ),* + #( #args_compact_attr #args_name_stripped: #args_type ),* }, )* } @@ -148,10 +184,10 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #( #[doc = #new_call_variant_doc] pub fn #new_call_variant_fn_name( - #( #args_name: #args_type ),* + #( #args_name_stripped: #args_type ),* ) -> Self { Self::#fn_name { - #( #args_name ),* + #( #args_name_stripped ),* } } )* @@ -164,7 +200,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { match *self { #( - Self::#fn_name { #( ref #args_name, )* } => { + Self::#fn_name { #( #args_name_pattern_ref, )* } => { let __pallet_base_weight = #fn_weight; let __pallet_weight = < @@ -219,7 +255,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { ) -> #frame_support::dispatch::DispatchResultWithPostInfo { match self { #( - Self::#fn_name { #( #args_name, )* } => { + Self::#fn_name { #( #args_name_pattern, )* } => { #frame_support::sp_tracing::enter_span!( #frame_support::sp_tracing::trace_span!(stringify!(#fn_name)) ); diff --git a/frame/utility/src/benchmarking.rs b/frame/utility/src/benchmarking.rs index 3b9cce7162bb3..210a6156499cf 100644 --- a/frame/utility/src/benchmarking.rs +++ b/frame/utility/src/benchmarking.rs @@ -34,7 +34,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark { _remark: vec![] }.into(); + let call = frame_system::Call::remark { remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); @@ -45,7 +45,7 @@ benchmarks! { as_derivative { let caller = account("caller", SEED, SEED); - let call = Box::new(frame_system::Call::remark { _remark: vec![] }.into()); + let call = Box::new(frame_system::Call::remark { remark: vec![] }.into()); // Whitelist caller account from further DB operations. let caller_key = frame_system::Account::::hashed_key_for(&caller); frame_benchmarking::benchmarking::add_to_whitelist(caller_key.into()); @@ -55,7 +55,7 @@ benchmarks! { let c in 0 .. 1000; let mut calls: Vec<::Call> = Vec::new(); for i in 0 .. c { - let call = frame_system::Call::remark { _remark: vec![] }.into(); + let call = frame_system::Call::remark { remark: vec![] }.into(); calls.push(call); } let caller = whitelisted_caller(); From 03f43ba391f1c07f5118b00a5af8a6f4fc3a92bb Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 10:38:05 +0100 Subject: [PATCH 444/503] Another underscore field --- frame/utility/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 02cb623bebc53..22eab5356e717 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -349,7 +349,7 @@ fn batch_early_exit_works() { fn batch_weight_calculation_doesnt_overflow() { use sp_runtime::Perbill; new_test_ext().execute_with(|| { - let big_call = Call::System(SystemCall::fill_block { _ratio: Perbill::from_percent(50) }); + let big_call = Call::System(SystemCall::fill_block { ratio: Perbill::from_percent(50) }); assert_eq!(big_call.get_dispatch_info().weight, Weight::max_value() / 2); // 3 * 50% saturates to 100% From eced48b6b23fe5d34e8c0dfa7aaa168076568501 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 10:45:54 +0100 Subject: [PATCH 445/503] More underscore fields --- bin/node/executor/tests/submit_transaction.rs | 3 +-- frame/example-offchain-worker/src/lib.rs | 6 +----- frame/lottery/src/tests.rs | 3 +-- frame/support/test/tests/pallet.rs | 18 +++++++++--------- frame/support/test/tests/pallet_instance.rs | 8 ++++---- 5 files changed, 16 insertions(+), 22 deletions(-) diff --git a/bin/node/executor/tests/submit_transaction.rs b/bin/node/executor/tests/submit_transaction.rs index 78b51a6d2b093..19ca8e5677c43 100644 --- a/bin/node/executor/tests/submit_transaction.rs +++ b/bin/node/executor/tests/submit_transaction.rs @@ -42,8 +42,7 @@ fn should_submit_unsigned_transaction() { validators_len: 0, }; - let call = - pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, signature }; + let call = pallet_im_online::Call::heartbeat { heartbeat: heartbeat_data, signature }; SubmitTransaction::>::submit_unsigned_transaction( call.into(), ) diff --git a/frame/example-offchain-worker/src/lib.rs b/frame/example-offchain-worker/src/lib.rs index 3584240efbdfe..70e2c4b277441 100644 --- a/frame/example-offchain-worker/src/lib.rs +++ b/frame/example-offchain-worker/src/lib.rs @@ -304,11 +304,7 @@ pub mod pallet { return InvalidTransaction::BadProof.into() } Self::validate_transaction_parameters(&payload.block_number, &payload.price) - } else if let Call::submit_price_unsigned { - block_number, - price: new_price, - } = call - { + } else if let Call::submit_price_unsigned { block_number, price: new_price } = call { Self::validate_transaction_parameters(block_number, new_price) } else { InvalidTransaction::Call.into() diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index c2919835ce382..623beea4a6b5b 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -222,8 +222,7 @@ fn buy_ticket_works() { assert_eq!(TicketsCount::::get(), 1); // Buy ticket for remark - let call = - Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); + let call = Box::new(Call::System(SystemCall::remark { remark: b"hello, world!".to_vec() })); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); assert_eq!(TicketsCount::::get(), 2); diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 855cae9577fcf..9d5c1821b1c2e 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -389,7 +389,7 @@ pub mod pallet { fn check_inherent(call: &Self::Call, _: &InherentData) -> Result<(), Self::Error> { match call { Call::foo_no_post_info {} => Ok(()), - Call::foo { _foo: 0, _bar: 0 } => Err(InherentError::Fatal), + Call::foo { foo: 0, bar: 0 } => Err(InherentError::Fatal), Call::foo { .. } => Ok(()), _ => unreachable!("other calls are not inherents"), } @@ -578,7 +578,7 @@ fn transactional_works() { #[test] fn call_expand() { - let call_foo = pallet::Call::::foo { _foo: 3, _bar: 0 }; + let call_foo = pallet::Call::::foo { foo: 3, bar: 0 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -646,7 +646,7 @@ fn inherent_expand() { signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 0 }), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 0 }), signature: None, }, ], @@ -668,7 +668,7 @@ fn inherent_expand() { signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 0, _bar: 0 }), + function: Call::Example(pallet::Call::foo { foo: 0, bar: 0 }), signature: None, }, ], @@ -722,7 +722,7 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { @@ -744,7 +744,7 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { @@ -770,11 +770,11 @@ fn inherent_expand() { ), vec![ UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 1 }), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 1 }), signature: None, }, UncheckedExtrinsic { - function: Call::Example(pallet::Call::foo { _foo: 1, _bar: 0 }), + function: Call::Example(pallet::Call::foo { foo: 1, bar: 0 }), signature: Some((1, (), ())), }, UncheckedExtrinsic { @@ -817,7 +817,7 @@ fn trait_store_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo { _foo: 3, _bar: 0 } + pallet::Call::::foo { foo: 3, bar: 0 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 4506e6d94e7fb..c51ee9dce09c9 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -314,7 +314,7 @@ frame_support::construct_runtime!( #[test] fn call_expand() { - let call_foo = pallet::Call::::foo { _foo: 3 }; + let call_foo = pallet::Call::::foo { foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -322,7 +322,7 @@ fn call_expand() { assert_eq!(call_foo.get_call_name(), "foo"); assert_eq!(pallet::Call::::get_call_names(), &["foo", "foo_transactional"]); - let call_foo = pallet::Call::::foo { _foo: 3 }; + let call_foo = pallet::Call::::foo { foo: 3 }; assert_eq!( call_foo.get_dispatch_info(), DispatchInfo { weight: 3, class: DispatchClass::Normal, pays_fee: Pays::Yes } @@ -377,7 +377,7 @@ fn instance_expand() { fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo { _foo: 3 } + pallet::Call::::foo { foo: 3 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( @@ -388,7 +388,7 @@ fn pallet_expand_deposit_event() { TestExternalities::default().execute_with(|| { frame_system::Pallet::::set_block_number(1); - pallet::Call::::foo { _foo: 3 } + pallet::Call::::foo { foo: 3 } .dispatch_bypass_filter(None.into()) .unwrap(); assert_eq!( From 594b0e8abf55a7a2ef6353722cbf4d26bb4db251 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 11:02:17 +0100 Subject: [PATCH 446/503] Another underscore field --- bin/node/executor/tests/fees.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 0e4abb98e7d46..f00e5efa69953 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -61,7 +61,7 @@ fn fee_multiplier_increases_and_decreases_on_big_weight() { CheckedExtrinsic { signed: Some((charlie(), signed_extra(0, 0))), function: Call::System(frame_system::Call::fill_block { - _ratio: Perbill::from_percent(60), + ratio: Perbill::from_percent(60), }), }, ], From 3f01437653bc83cdfbcd35d6ec110c6d292e68e6 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 12:36:42 +0100 Subject: [PATCH 447/503] Update to frame-metadata 14.0.0-rc.2 with combined StorageEntryType::Map --- Cargo.lock | 4 +- frame/support/Cargo.toml | 2 +- .../procedural/src/storage/metadata.rs | 17 +++--- frame/support/src/lib.rs | 40 ++++++-------- frame/support/src/storage/types/double_map.rs | 33 ++++++------ frame/support/src/storage/types/map.rs | 19 +++++-- frame/support/src/storage/types/nmap.rs | 4 +- frame/support/test/tests/decl_storage.rs | 24 ++++----- frame/support/test/tests/pallet.rs | 52 ++++++++----------- frame/support/test/tests/pallet_instance.rs | 8 +-- 10 files changed, 97 insertions(+), 106 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 27ca26e942c26..872a836cb13ab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1854,9 +1854,9 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0-rc.1" +version = "14.0.0-rc.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3401bd6270e52e1c6c1f55c667490037f532d5013d09ae399cfe02c9ad56764" +checksum = "cb2c4a97cc93a372b141adb582d6428dc1ba1cded9ea624c4bbc40fe568cf247" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index ede168390a6d9..6c79c34d0fd58 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } -frame-metadata = { version = "14.0.0-rc.1", default-features = false, features = ["v14"] } +frame-metadata = { version = "14.0.0-rc.2", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 8cbcc6322efee..a90e5051c5b2e 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -37,7 +37,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let key = &map.key; quote! { #scrate::metadata::StorageEntryType::Map { - hasher: #scrate::metadata::#hasher, + hashers: #scrate::sp_std::vec! [ #scrate::metadata::#hasher ], key: #scrate::scale_info::meta_type::<#key>(), value: #scrate::scale_info::meta_type::<#value_type>(), } @@ -49,12 +49,13 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let key1 = &map.key1; let key2 = &map.key2; quote! { - #scrate::metadata::StorageEntryType::DoubleMap { - hasher: #scrate::metadata::#hasher1, - key1: #scrate::scale_info::meta_type::<#key1>(), - key2: #scrate::scale_info::meta_type::<#key2>(), + #scrate::metadata::StorageEntryType::Map { + hashers: #scrate::sp_std::vec! [ + #scrate::metadata::#hasher1, + #scrate::metadata::#hasher2, + ], + key: #scrate::scale_info::meta_type::<(#key1, #key2)>(), value: #scrate::scale_info::meta_type::<#value_type>(), - key2_hasher: #scrate::metadata::#hasher2, } } }, @@ -66,11 +67,11 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> .map(|hasher| hasher.to_storage_hasher_struct()) .collect::>(); quote! { - #scrate::metadata::StorageEntryType::NMap { - keys: #scrate::scale_info::meta_type::<#key_tuple>(), + #scrate::metadata::StorageEntryType::Map { hashers: #scrate::sp_std::vec! [ #( #scrate::metadata::StorageHasher::#hashers, )* ], + key: #scrate::scale_info::meta_type::<#key_tuple>(), value: #scrate::scale_info::meta_type::<#value_type>(), } } diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index f521c48e8d2ff..eb092cd1ab4db 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1130,7 +1130,7 @@ pub mod tests { name: "Data", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Twox64Concat, + hashers: vec![ StorageHasher::Twox64Concat ], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1141,7 +1141,7 @@ pub mod tests { name: "OptionLinkedMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![ StorageHasher::Blake2_128Concat ], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1152,7 +1152,7 @@ pub mod tests { name: "GenericData", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Identity, + hashers: vec! [ StorageHasher::Identity ], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1163,7 +1163,7 @@ pub mod tests { name: "GenericData2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![ StorageHasher::Blake2_128Concat ], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1173,12 +1173,10 @@ pub mod tests { StorageEntryMetadata { name: "DataDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Twox64Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), - key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0, 0, 0, 0, 0, 0, 0, 0], docs: vec![], @@ -1186,12 +1184,10 @@ pub mod tests { StorageEntryMetadata { name: "GenericDataDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Identity], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), - key2_hasher: StorageHasher::Identity, }, default: vec![0, 0, 0, 0], docs: vec![], @@ -1199,12 +1195,10 @@ pub mod tests { StorageEntryMetadata { name: "GenericData2DM", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), - key2_hasher: StorageHasher::Twox64Concat, }, default: vec![0], docs: vec![], @@ -1212,12 +1206,10 @@ pub mod tests { StorageEntryMetadata { name: "AppendableDM", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::>(), - key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], docs: vec![], diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index 7ac6869dbbe20..a7496ae64b0d4 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -529,11 +529,9 @@ where const NAME: &'static str = Prefix::STORAGE_PREFIX; fn ty() -> StorageEntryType { - StorageEntryType::DoubleMap { - hasher: Hasher1::METADATA, - key2_hasher: Hasher2::METADATA, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + StorageEntryType::Map { + hashers: vec! [ Hasher1::METADATA, Hasher2::METADATA ], + key: scale_info::meta_type::<(Key1, Key2)>(), value: scale_info::meta_type::(), } } @@ -771,21 +769,22 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_matches!( - A::ty(), - StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - .. + + let assert_map_hashers = |ty, expected_hashers| { + if let StorageEntryType::Map { hashers, .. } = ty { + assert_eq!(hashers, expected_hashers) + } else { + assert_matches!(ty, StorageEntryType::Map { .. }) } + }; + + assert_map_hashers( + A::ty(), + vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], ); - assert_matches!( + assert_map_hashers( AValueQueryWithAnOnEmpty::ty(), - StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, - .. - } + vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], ); assert_eq!(A::NAME, "foo"); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index d44698440f42f..45a327bc90128 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -352,7 +352,7 @@ where fn ty() -> StorageEntryType { StorageEntryType::Map { - hasher: Hasher::METADATA, + hashers: vec![ Hasher::METADATA ], key: scale_info::meta_type::(), value: scale_info::meta_type::(), } @@ -575,13 +575,22 @@ mod test { assert_eq!(A::MODIFIER, StorageEntryModifier::Optional); assert_eq!(AValueQueryWithAnOnEmpty::MODIFIER, StorageEntryModifier::Default); - assert_matches!( + + let assert_map_hashers = |ty, expected_hashers| { + if let StorageEntryType::Map { hashers, .. } = ty { + assert_eq!(hashers, expected_hashers) + } else { + assert_matches!(ty, StorageEntryType::Map { .. }) + } + }; + + assert_map_hashers( A::ty(), - StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, .. } + vec![ StorageHasher::Blake2_128Concat ] ); - assert_matches!( + assert_map_hashers( AValueQueryWithAnOnEmpty::ty(), - StorageEntryType::Map { hasher: StorageHasher::Blake2_128Concat, .. } + vec![ StorageHasher::Blake2_128Concat ] ); assert_eq!(A::NAME, "foo"); assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); diff --git a/frame/support/src/storage/types/nmap.rs b/frame/support/src/storage/types/nmap.rs index 8894e3964f343..7048a69d59c2c 100755 --- a/frame/support/src/storage/types/nmap.rs +++ b/frame/support/src/storage/types/nmap.rs @@ -454,8 +454,8 @@ where const NAME: &'static str = Prefix::STORAGE_PREFIX; fn ty() -> StorageEntryType { - StorageEntryType::NMap { - keys: scale_info::meta_type::(), + StorageEntryType::Map { + key: scale_info::meta_type::(), hashers: Key::HASHER_METADATA.iter().cloned().collect(), value: scale_info::meta_type::(), } diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index 2c8011eaa7b18..bc7b37d6b263d 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -289,12 +289,10 @@ mod tests { StorageEntryMetadata { name: "DOUBLEMAP", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec! [ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::<[u8; 4]>(), - key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], docs: vec![], @@ -302,12 +300,10 @@ mod tests { StorageEntryMetadata { name: "DOUBLEMAP2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Blake2_128Concat, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec! [ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::<[u8; 4]>(), - key2_hasher: StorageHasher::Blake2_128Concat, }, default: vec![0], docs: vec![], @@ -339,8 +335,8 @@ mod tests { StorageEntryMetadata { name: "NMAP", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u32, u16)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u32, u16)>(), hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], value: scale_info::meta_type::(), }, @@ -350,8 +346,8 @@ mod tests { StorageEntryMetadata { name: "NMAP2", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 9d5c1821b1c2e..5cde4c7bbfca6 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1072,7 +1072,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![ StorageHasher::Blake2_128Concat ], }, default: vec![4, 0], docs: vec![], @@ -1083,7 +1083,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, + hashers: vec![ StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -1091,12 +1091,10 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ] + key: scale_info::meta_type::<(u8, u16)>(), }, default: vec![0], docs: vec![], @@ -1106,10 +1104,8 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + key: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], }, default: vec![0], docs: vec![], @@ -1117,9 +1113,9 @@ fn metadata() { StorageEntryMetadata { name: "NMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::(), - hashers: vec![StorageHasher::Blake2_128Concat], + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), + hashers: vec![ StorageHasher::Blake2_128Concat ], value: scale_info::meta_type::(), }, default: vec![0], @@ -1128,8 +1124,8 @@ fn metadata() { StorageEntryMetadata { name: "NMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u16, u32)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, @@ -1154,7 +1150,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, + hashers: vec![ StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -1165,10 +1161,8 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::DoubleMap { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + key: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -1177,8 +1171,8 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u8, u16)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, @@ -1306,8 +1300,8 @@ fn metadata() { StorageEntryMetadata { name: "NMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, @@ -1317,8 +1311,8 @@ fn metadata() { StorageEntryMetadata { name: "NMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u16, u32)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, @@ -1366,8 +1360,8 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u8, u16)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index c51ee9dce09c9..acd6a442b6d22 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -671,8 +671,8 @@ fn metadata() { StorageEntryMetadata { name: "NMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, @@ -682,8 +682,8 @@ fn metadata() { StorageEntryMetadata { name: "NMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::NMap { - keys: scale_info::meta_type::<(u16, u32)>(), + ty: StorageEntryType::Map { + key: scale_info::meta_type::<(u16, u32)>(), hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, From c740873d91b7e81ea2dce702ad3eaac9ef065e34 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 12:38:52 +0100 Subject: [PATCH 448/503] Fmt --- frame/support/src/lib.rs | 19 +++++++++++-------- frame/support/src/storage/types/double_map.rs | 6 +++--- frame/support/src/storage/types/map.rs | 9 +++------ frame/support/test/tests/decl_storage.rs | 10 ++++++++-- 4 files changed, 25 insertions(+), 19 deletions(-) diff --git a/frame/support/src/lib.rs b/frame/support/src/lib.rs index eb092cd1ab4db..d36a9a5a8a800 100644 --- a/frame/support/src/lib.rs +++ b/frame/support/src/lib.rs @@ -1130,7 +1130,7 @@ pub mod tests { name: "Data", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Twox64Concat ], + hashers: vec![StorageHasher::Twox64Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1141,7 +1141,7 @@ pub mod tests { name: "OptionLinkedMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1152,7 +1152,7 @@ pub mod tests { name: "GenericData", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec! [ StorageHasher::Identity ], + hashers: vec![StorageHasher::Identity], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1163,7 +1163,7 @@ pub mod tests { name: "GenericData2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -1174,7 +1174,7 @@ pub mod tests { name: "DataDM", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), }, @@ -1185,7 +1185,7 @@ pub mod tests { name: "GenericDataDM", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Identity], + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Identity], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), }, @@ -1196,7 +1196,7 @@ pub mod tests { name: "GenericData2DM", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::(), }, @@ -1207,7 +1207,10 @@ pub mod tests { name: "AppendableDM", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::>(), }, diff --git a/frame/support/src/storage/types/double_map.rs b/frame/support/src/storage/types/double_map.rs index a7496ae64b0d4..7750110050868 100644 --- a/frame/support/src/storage/types/double_map.rs +++ b/frame/support/src/storage/types/double_map.rs @@ -530,7 +530,7 @@ where fn ty() -> StorageEntryType { StorageEntryType::Map { - hashers: vec! [ Hasher1::METADATA, Hasher2::METADATA ], + hashers: vec![Hasher1::METADATA, Hasher2::METADATA], key: scale_info::meta_type::<(Key1, Key2)>(), value: scale_info::meta_type::(), } @@ -780,11 +780,11 @@ mod test { assert_map_hashers( A::ty(), - vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], + vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], ); assert_map_hashers( AValueQueryWithAnOnEmpty::ty(), - vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], + vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], ); assert_eq!(A::NAME, "foo"); diff --git a/frame/support/src/storage/types/map.rs b/frame/support/src/storage/types/map.rs index 45a327bc90128..a31224f15c80f 100644 --- a/frame/support/src/storage/types/map.rs +++ b/frame/support/src/storage/types/map.rs @@ -352,7 +352,7 @@ where fn ty() -> StorageEntryType { StorageEntryType::Map { - hashers: vec![ Hasher::METADATA ], + hashers: vec![Hasher::METADATA], key: scale_info::meta_type::(), value: scale_info::meta_type::(), } @@ -584,13 +584,10 @@ mod test { } }; - assert_map_hashers( - A::ty(), - vec![ StorageHasher::Blake2_128Concat ] - ); + assert_map_hashers(A::ty(), vec![StorageHasher::Blake2_128Concat]); assert_map_hashers( AValueQueryWithAnOnEmpty::ty(), - vec![ StorageHasher::Blake2_128Concat ] + vec![StorageHasher::Blake2_128Concat], ); assert_eq!(A::NAME, "foo"); assert_eq!(AValueQueryWithAnOnEmpty::default(), 97u32.encode()); diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index bc7b37d6b263d..a57309b382e73 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -290,7 +290,10 @@ mod tests { name: "DOUBLEMAP", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hashers: vec! [ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -301,7 +304,10 @@ mod tests { name: "DOUBLEMAP2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hashers: vec! [ StorageHasher::Blake2_128Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Blake2_128Concat, + ], key: scale_info::meta_type::<(u32, u32)>(), value: scale_info::meta_type::<[u8; 4]>(), }, From 214e8f347f6f5ff58f2b9600d7a1cb0a7b473d45 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 12:42:39 +0100 Subject: [PATCH 449/503] Revert weights formatting --- frame/node-authorization/src/weights.rs | 41 ++++++------------------- 1 file changed, 10 insertions(+), 31 deletions(-) diff --git a/frame/node-authorization/src/weights.rs b/frame/node-authorization/src/weights.rs index f4c5a8ae4c1a2..dbb7956cff967 100644 --- a/frame/node-authorization/src/weights.rs +++ b/frame/node-authorization/src/weights.rs @@ -21,10 +21,7 @@ #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{ - traits::Get, - weights::{constants::RocksDbWeight, Weight}, -}; +use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; use sp_std::marker::PhantomData; pub trait WeightInfo { @@ -40,31 +37,13 @@ pub trait WeightInfo { } impl WeightInfo for () { - fn add_well_known_node() -> Weight { - 50_000_000 - } - fn remove_well_known_node() -> Weight { - 50_000_000 - } - fn swap_well_known_node() -> Weight { - 50_000_000 - } - fn reset_well_known_nodes() -> Weight { - 50_000_000 - } - fn claim_node() -> Weight { - 50_000_000 - } - fn remove_claim() -> Weight { - 50_000_000 - } - fn transfer_node() -> Weight { - 50_000_000 - } - fn add_connections() -> Weight { - 50_000_000 - } - fn remove_connections() -> Weight { - 50_000_000 - } + fn add_well_known_node() -> Weight { 50_000_000 } + fn remove_well_known_node() -> Weight { 50_000_000 } + fn swap_well_known_node() -> Weight { 50_000_000 } + fn reset_well_known_nodes() -> Weight { 50_000_000 } + fn claim_node() -> Weight { 50_000_000 } + fn remove_claim() -> Weight { 50_000_000 } + fn transfer_node() -> Weight { 50_000_000 } + fn add_connections() -> Weight { 50_000_000 } + fn remove_connections() -> Weight { 50_000_000 } } From 640e1d3b53ed894c95718632cb64246e0ec812b9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 13:11:56 +0100 Subject: [PATCH 450/503] Fix up some tests --- frame/support/test/tests/instance.rs | 8 ++---- frame/support/test/tests/pallet.rs | 32 +++++++++------------ frame/support/test/tests/pallet_instance.rs | 18 +++++------- 3 files changed, 23 insertions(+), 35 deletions(-) diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 8ede4df48b32e..478607155f1aa 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -436,11 +436,9 @@ fn expected_metadata() -> PalletStorageMetadata { StorageEntryMetadata { name: "DoubleMap", modifier: StorageEntryModifier::Default, - ty: StorageEntryType::DoubleMap { - hasher: StorageHasher::Identity, - key2_hasher: StorageHasher::Identity, - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), + ty: StorageEntryType::Map { + hashers: vec![ StorageHasher::Identity, StorageHasher::Identity ], + key: scale_info::meta_type::<(u64, u64)>(), value: scale_info::meta_type::(), }, default: [0u8; 8].to_vec(), diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 5cde4c7bbfca6..0858caa8e2b74 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1102,7 +1102,7 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], @@ -1159,7 +1159,7 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], @@ -1255,7 +1255,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, + hashers: vec! [ StorageHasher::Blake2_128Concat ], }, default: vec![4, 0], docs: vec![], @@ -1266,7 +1266,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, + hashers: vec! [ StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -1274,12 +1274,10 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + key: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -1287,12 +1285,10 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + key: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], }, default: vec![0], docs: vec![], @@ -1346,12 +1342,10 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + key: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index acd6a442b6d22..6d21e061ca03b 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -637,7 +637,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, + hashers: vec![ StorageHasher::Twox64Concat ], }, default: vec![0], docs: vec![], @@ -645,12 +645,10 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, - key2_hasher: StorageHasher::Twox64Concat, + key: scale_info::meta_type::<(u8, u16)>(), + hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -658,12 +656,10 @@ fn metadata() { StorageEntryMetadata { name: "DoubleMap2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::DoubleMap { + ty: StorageEntryType::Map { value: scale_info::meta_type::(), - key1: scale_info::meta_type::(), - key2: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, - key2_hasher: StorageHasher::Blake2_128Concat, + key: scale_info::meta_type::<(u16, u32)>(), + hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], }, default: vec![0], docs: vec![], From 8c0bb8e32af4f8664a58712f254bde568d71227b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 13:14:56 +0100 Subject: [PATCH 451/503] Fix up some tests for StorageEntryTypeMetadata --- frame/support/test/tests/decl_storage.rs | 12 +++--- frame/support/test/tests/instance.rs | 4 +- frame/support/test/tests/pallet.rs | 42 +++++++++++++++------ frame/support/test/tests/pallet_instance.rs | 8 ++-- 4 files changed, 42 insertions(+), 24 deletions(-) diff --git a/frame/support/test/tests/decl_storage.rs b/frame/support/test/tests/decl_storage.rs index a57309b382e73..fd9c481e3d1e2 100644 --- a/frame/support/test/tests/decl_storage.rs +++ b/frame/support/test/tests/decl_storage.rs @@ -224,7 +224,7 @@ mod tests { name: "MAPU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -235,7 +235,7 @@ mod tests { name: "PUBMAPU32", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -246,7 +246,7 @@ mod tests { name: "GETMAPU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -257,7 +257,7 @@ mod tests { name: "PUBGETMAPU32", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -268,7 +268,7 @@ mod tests { name: "GETMAPU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, @@ -279,7 +279,7 @@ mod tests { name: "PUBGETMAPU32MYDEF", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], key: scale_info::meta_type::(), value: scale_info::meta_type::<[u8; 4]>(), }, diff --git a/frame/support/test/tests/instance.rs b/frame/support/test/tests/instance.rs index 478607155f1aa..bd6d5eb800d05 100644 --- a/frame/support/test/tests/instance.rs +++ b/frame/support/test/tests/instance.rs @@ -426,7 +426,7 @@ fn expected_metadata() -> PalletStorageMetadata { name: "Map", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hasher: StorageHasher::Identity, + hashers: vec![StorageHasher::Identity], key: scale_info::meta_type::(), value: scale_info::meta_type::(), }, @@ -437,7 +437,7 @@ fn expected_metadata() -> PalletStorageMetadata { name: "DoubleMap", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - hashers: vec![ StorageHasher::Identity, StorageHasher::Identity ], + hashers: vec![StorageHasher::Identity, StorageHasher::Identity], key: scale_info::meta_type::<(u64, u64)>(), value: scale_info::meta_type::(), }, diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 0858caa8e2b74..a241df1c80792 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1072,7 +1072,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Blake2_128Concat], }, default: vec![4, 0], docs: vec![], @@ -1083,7 +1083,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Twox64Concat ], + hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -1093,7 +1093,10 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { value: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ] + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], key: scale_info::meta_type::<(u8, u16)>(), }, default: vec![0], @@ -1105,7 +1108,10 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u16, u32)>(), - hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], }, default: vec![0], docs: vec![], @@ -1115,7 +1121,7 @@ fn metadata() { modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { key: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Blake2_128Concat], value: scale_info::meta_type::(), }, default: vec![0], @@ -1150,7 +1156,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Twox64Concat ], + hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -1162,7 +1168,10 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], }, default: vec![0], docs: vec![], @@ -1255,7 +1264,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec! [ StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Blake2_128Concat], }, default: vec![4, 0], docs: vec![], @@ -1266,7 +1275,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec! [ StorageHasher::Twox64Concat ], + hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -1277,7 +1286,10 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], }, default: vec![0], docs: vec![], @@ -1288,7 +1300,10 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u16, u32)>(), - hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![ + StorageHasher::Twox64Concat, + StorageHasher::Blake2_128Concat, + ], }, default: vec![0], docs: vec![], @@ -1345,7 +1360,10 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat ], + hashers: vec![ + StorageHasher::Blake2_128Concat, + StorageHasher::Twox64Concat, + ], }, default: vec![0], docs: vec![], diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 6d21e061ca03b..e795dc66e30ca 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -626,7 +626,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Blake2_128Concat, + hashers: vec![StorageHasher::Blake2_128Concat], }, default: vec![0], docs: vec![], @@ -637,7 +637,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hashers: vec![ StorageHasher::Twox64Concat ], + hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -648,7 +648,7 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u8, u16)>(), - hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], + hashers: vec![StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], @@ -659,7 +659,7 @@ fn metadata() { ty: StorageEntryType::Map { value: scale_info::meta_type::(), key: scale_info::meta_type::<(u16, u32)>(), - hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat ], + hashers: vec![StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat], }, default: vec![0], docs: vec![], From 82481dde6eb9d2d034c89a5772f0150001bb2068 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 13:22:34 +0100 Subject: [PATCH 452/503] scale-info dev dependency --- frame/session/benchmarking/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 8dc827abb3706..d49380081acc4 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -26,7 +26,7 @@ rand = { version = "0.7.2", default-features = false } [dev-dependencies] serde = { version = "1.0.126" } codec = { package = "parity-scale-codec", version = "2.0.0", features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = "0.10.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io ={ version = "4.0.0-dev", path = "../../../primitives/io" } From f615b165fad4d0f229b7fadd4d8e3835a5718a20 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 4 Aug 2021 15:18:31 +0100 Subject: [PATCH 453/503] Fix test error --- frame/support/test/tests/pallet.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index a241df1c80792..c671082293a95 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1348,7 +1348,7 @@ fn metadata() { ty: StorageEntryType::Map { key: scale_info::meta_type::(), value: scale_info::meta_type::(), - hasher: StorageHasher::Twox64Concat, + hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], docs: vec![], From adf72eec91f43b95149e332222eed2c43a801eae Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Fri, 6 Aug 2021 12:38:47 +0100 Subject: [PATCH 454/503] Add missing TypeInfo derives --- Cargo.lock | 1 + primitives/trie/Cargo.toml | 2 ++ primitives/trie/src/storage_proof.rs | 5 +++-- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fff8cea715533..72f5880215d4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9548,6 +9548,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", + "scale-info", "sp-core", "sp-runtime", "sp-std", diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 60356e0a8d6d3..690cf9bbf4e6a 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,6 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } @@ -38,6 +39,7 @@ default = ["std"] std = [ "sp-std/std", "codec/std", + "scale-info/std", "hash-db/std", "memory-db/std", "trie-db/std", diff --git a/primitives/trie/src/storage_proof.rs b/primitives/trie/src/storage_proof.rs index b4e4b393a71ab..cc0ffa01ac265 100644 --- a/primitives/trie/src/storage_proof.rs +++ b/primitives/trie/src/storage_proof.rs @@ -17,6 +17,7 @@ use codec::{Decode, Encode}; use hash_db::{HashDB, Hasher}; +use scale_info::TypeInfo; use sp_std::vec::Vec; /// A proof that some set of key-value pairs are included in the storage trie. The proof contains @@ -26,13 +27,13 @@ use sp_std::vec::Vec; /// The proof consists of the set of serialized nodes in the storage trie accessed when looking up /// the keys covered by the proof. Verifying the proof requires constructing the partial trie from /// the serialized nodes and performing the key lookups. -#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode)] +#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct StorageProof { trie_nodes: Vec>, } /// Storage proof in compact form. -#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode)] +#[derive(Debug, PartialEq, Eq, Clone, Encode, Decode, TypeInfo)] pub struct CompactProof { pub encoded_nodes: Vec>, } From aa185bbb5a951770066ef3bb5b3815406335f8c8 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 12:19:23 +0100 Subject: [PATCH 455/503] Add back missing scale-info dependency --- Cargo.lock | 2952 ++++++++++++++------------ frame/authority-discovery/Cargo.toml | 1 + 2 files changed, 1652 insertions(+), 1301 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 72f5880215d4c..cd3e901e2114b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -38,58 +38,39 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aead" -version = "0.3.2" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc95d1bdb8e6666b2b217308eeeb09f2d6728d104be3e31916cc74d15420331" +checksum = "6e3e798aa0c8239776f54415bc06f3d74b1850f3f830b45c35cfc80556973f70" dependencies = [ "generic-array 0.14.4", ] [[package]] name = "aes" -version = "0.5.0" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd2bc6d3f370b5666245ff421e231cba4353df936e26986d2918e61a8fd6aef6" +checksum = "495ee669413bfbe9e8cace80f4d3d78e6d8c8d99579f97fb93bde351b185f2d4" dependencies = [ - "aes-soft", - "aesni", - "block-cipher", + "cfg-if 1.0.0", + "cipher", + "cpufeatures", + "opaque-debug 0.3.0", ] [[package]] name = "aes-gcm" -version = "0.7.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0301c9e9c443494d970a07885e8cf3e587bae8356a1d5abd0999068413f7205f" +checksum = "b2a930fd487faaa92a30afa92cc9dd1526a5cff67124abbbb1c617ce070f4dcf" dependencies = [ "aead", "aes", - "block-cipher", + "cipher", + "ctr", "ghash", "subtle 2.4.0", ] -[[package]] -name = "aes-soft" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63dd91889c49327ad7ef3b500fd1109dbd3c509a03db0d4a9ce413b79f575cb6" -dependencies = [ - "block-cipher", - "byteorder", - "opaque-debug 0.3.0", -] - -[[package]] -name = "aesni" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6fe808308bb07d393e2ea47780043ec47683fcf19cf5efc8ca51c50cc8c68a" -dependencies = [ - "block-cipher", - "opaque-debug 0.3.0", -] - [[package]] name = "ahash" version = "0.4.7" @@ -104,7 +85,7 @@ checksum = "43bb833f0bf979d8475d38fbf09ed3b8a55e1885fe93ad3f93239fc6a4f17b98" dependencies = [ "getrandom 0.2.3", "once_cell", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -269,7 +250,7 @@ dependencies = [ "fastrand", "futures-lite", "libc", - "log", + "log 0.4.14", "nb-connect", "once_cell", "parking", @@ -332,7 +313,7 @@ dependencies = [ "futures-lite", "gloo-timers", "kv-log-macro", - "log", + "log 0.4.14", "memchr", "num_cpus", "once_cell", @@ -405,7 +386,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3410529e8288c463bedb5930f82833bc0c90e5d2fe639a56582a4d09220b281" dependencies = [ - "autocfg", + "autocfg 1.0.1", ] [[package]] @@ -425,6 +406,12 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "autocfg" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2" + [[package]] name = "autocfg" version = "1.0.1" @@ -470,6 +457,25 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83" +[[package]] +name = "base64" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643" +dependencies = [ + "byteorder", + "safemem", +] + +[[package]] +name = "base64" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" +dependencies = [ + "byteorder", +] + [[package]] name = "base64" version = "0.12.3" @@ -503,9 +509,9 @@ dependencies = [ [[package]] name = "bindgen" -version = "0.57.0" +version = "0.59.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d" +checksum = "453c49e5950bb0eb63bb3df640e31618846c89d5b7faa54040d76e98e0134375" dependencies = [ "bitflags", "cexpr", @@ -522,9 +528,21 @@ dependencies = [ [[package]] name = "bitflags" -version = "1.2.1" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitvec" +version = "0.19.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693" +checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321" +dependencies = [ + "funty", + "radium 0.5.3", + "tap", + "wyz", +] [[package]] name = "bitvec" @@ -533,7 +551,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f682656975d3a682daff957be4ddeb65d6ad656737cd821f2d00685ae466af1" dependencies = [ "funty", - "radium", + "radium 0.6.2", "tap", "wyz", ] @@ -618,15 +636,6 @@ dependencies = [ "generic-array 0.14.4", ] -[[package]] -name = "block-cipher" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f337a3e6da609650eb74e02bc9fac7b735049f7623ab12f2e4c719316fcc7e80" -dependencies = [ - "generic-array 0.14.4", -] - [[package]] name = "block-padding" version = "0.1.5" @@ -714,7 +723,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" dependencies = [ "byteorder", - "either", "iovec", ] @@ -774,7 +782,7 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b9434b9a5aa1450faa3f9cb14ea0e8c53bb5d2b3c1bfd1ab4fc03e9f33fbfb0" dependencies = [ - "rustc_version", + "rustc_version 0.2.3", ] [[package]] @@ -788,9 +796,9 @@ dependencies = [ [[package]] name = "cexpr" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" +checksum = "db507a7679252d2276ed0dd8113c6875ec56d3089f9225b2b42c30cc1f8e5c89" dependencies = [ "nom", ] @@ -809,24 +817,26 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.5.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "244fbce0d47e97e8ef2f63b81d5e05882cb518c68531eb33194990d7b7e85845" +checksum = "ea8756167ea0aca10e066cdbe7813bd71d2f24e69b0bc7b50509590cef2ce0b9" dependencies = [ - "stream-cipher", + "cfg-if 1.0.0", + "cipher", + "cpufeatures", "zeroize", ] [[package]] name = "chacha20poly1305" -version = "0.6.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bf18d374d66df0c05cdddd528a7db98f78c28e2519b120855c4f84c5027b1f5" +checksum = "175a11316f33592cf2b71416ee65283730b5b7849813c4891d02a12906ed9acc" dependencies = [ "aead", "chacha20", + "cipher", "poly1305", - "stream-cipher", "zeroize", ] @@ -850,12 +860,10 @@ version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" dependencies = [ - "js-sys", "libc", "num-integer", "num-traits", "time", - "wasm-bindgen", "winapi 0.3.9", ] @@ -866,15 +874,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff0e3bc0b6446b3f9663c1a6aba6ef06c5aeaa1bc92bd18077be337198ab9768" dependencies = [ "multibase", - "multihash", + "multihash 0.13.2", "unsigned-varint 0.5.1", ] [[package]] name = "cipher" -version = "0.2.5" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ "generic-array 0.14.4", ] @@ -908,7 +916,7 @@ dependencies = [ "ansi_term 0.11.0", "atty", "bitflags", - "strsim", + "strsim 0.8.0", "textwrap", "unicode-width", "vec_map", @@ -932,48 +940,22 @@ dependencies = [ "cache-padded", ] -[[package]] -name = "console_error_panic_hook" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8d976903543e0c48546a91908f21588a680a8c8f984df9a5d69feccb2b2a211" -dependencies = [ - "cfg-if 0.1.10", - "wasm-bindgen", -] - [[package]] name = "constant_time_eq" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" -[[package]] -name = "core-foundation" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d24c7a13c43e870e37c1556b74555437870a04514f7685f5b354e090567171" -dependencies = [ - "core-foundation-sys 0.7.0", - "libc", -] - [[package]] name = "core-foundation" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62" dependencies = [ - "core-foundation-sys 0.8.2", + "core-foundation-sys", "libc", ] -[[package]] -name = "core-foundation-sys" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3a71ab494c0b5b860bdc8407ae08978052417070c2ced38573a9157ad75b8ac" - [[package]] name = "core-foundation-sys" version = "0.8.2" @@ -990,6 +972,15 @@ dependencies = [ "glob", ] +[[package]] +name = "cpufeatures" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66c99696f6c9dd7f35d486b9d04d7e6e202aa3e8c40d553f2fdf5e7e0c6a71ef" +dependencies = [ + "libc", +] + [[package]] name = "cpuid-bool" version = "0.1.2" @@ -997,10 +988,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8aebca1129a03dc6dc2b127edd729435bbc4a37e1d5f4d7513165089ceb02634" [[package]] -name = "cpuid-bool" -version = "0.2.0" +name = "cranelift-bforest" +version = "0.68.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb25d077389e53838a8158c8e99174c5a9d902dee4904320db714f3c653ffba" +checksum = "9221545c0507dc08a62b2d8b5ffe8e17ac580b0a74d1813b496b8d70b070fbd0" +dependencies = [ + "cranelift-entity 0.68.0", +] [[package]] name = "cranelift-bforest" @@ -1008,7 +1002,26 @@ version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c8ca3560686e7c9c7ed7e0fe77469f2410ba5d7781b1acaa9adc8d8deea28e3e" dependencies = [ - "cranelift-entity", + "cranelift-entity 0.74.0", +] + +[[package]] +name = "cranelift-codegen" +version = "0.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9936ea608b6cd176f107037f6adbb4deac933466fc7231154f96598b2d3ab1" +dependencies = [ + "byteorder", + "cranelift-bforest 0.68.0", + "cranelift-codegen-meta 0.68.0", + "cranelift-codegen-shared 0.68.0", + "cranelift-entity 0.68.0", + "gimli 0.22.0", + "log 0.4.14", + "regalloc", + "smallvec 1.6.1", + "target-lexicon 0.11.2", + "thiserror", ] [[package]] @@ -1017,16 +1030,26 @@ version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf9bf1ffffb6ce3d2e5ebc83549bd2436426c99b31cc550d521364cbe35d276" dependencies = [ - "cranelift-bforest", - "cranelift-codegen-meta", - "cranelift-codegen-shared", - "cranelift-entity", + "cranelift-bforest 0.74.0", + "cranelift-codegen-meta 0.74.0", + "cranelift-codegen-shared 0.74.0", + "cranelift-entity 0.74.0", "gimli 0.24.0", - "log", + "log 0.4.14", "regalloc", "serde", "smallvec 1.6.1", - "target-lexicon", + "target-lexicon 0.12.0", +] + +[[package]] +name = "cranelift-codegen-meta" +version = "0.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ef2b2768568306540f4c8db3acce9105534d34c4a1e440529c1e702d7f8c8d7" +dependencies = [ + "cranelift-codegen-shared 0.68.0", + "cranelift-entity 0.68.0", ] [[package]] @@ -1035,10 +1058,16 @@ version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cc21936a5a6d07e23849ffe83e5c1f6f50305c074f4b2970ca50c13bf55b821" dependencies = [ - "cranelift-codegen-shared", - "cranelift-entity", + "cranelift-codegen-shared 0.74.0", + "cranelift-entity 0.74.0", ] +[[package]] +name = "cranelift-codegen-shared" +version = "0.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6759012d6d19c4caec95793f052613e9d4113e925e7f14154defbac0f1d4c938" + [[package]] name = "cranelift-codegen-shared" version = "0.74.0" @@ -1048,6 +1077,15 @@ dependencies = [ "serde", ] +[[package]] +name = "cranelift-entity" +version = "0.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86badbce14e15f52a45b666b38abe47b204969dd7f8fb7488cb55dd46b361fa6" +dependencies = [ + "serde", +] + [[package]] name = "cranelift-entity" version = "0.74.0" @@ -1057,16 +1095,28 @@ dependencies = [ "serde", ] +[[package]] +name = "cranelift-frontend" +version = "0.68.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b608bb7656c554d0a4cf8f50c7a10b857e80306f6ff829ad6d468a7e2323c8d8" +dependencies = [ + "cranelift-codegen 0.68.0", + "log 0.4.14", + "smallvec 1.6.1", + "target-lexicon 0.11.2", +] + [[package]] name = "cranelift-frontend" version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c31b783b351f966fce33e3c03498cb116d16d97a8f9978164a60920bd0d3a99c" dependencies = [ - "cranelift-codegen", - "log", + "cranelift-codegen 0.74.0", + "log 0.4.14", "smallvec 1.6.1", - "target-lexicon", + "target-lexicon 0.12.0", ] [[package]] @@ -1075,8 +1125,8 @@ version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a77c88d3dd48021ff1e37e978a00098524abd3513444ae252c08d37b310b3d2a" dependencies = [ - "cranelift-codegen", - "target-lexicon", + "cranelift-codegen 0.74.0", + "target-lexicon 0.12.0", ] [[package]] @@ -1085,15 +1135,15 @@ version = "0.74.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edb6d408e2da77cdbbd65466298d44c86ae71c1785d2ab0d8657753cdb4d9d89" dependencies = [ - "cranelift-codegen", - "cranelift-entity", - "cranelift-frontend", + "cranelift-codegen 0.74.0", + "cranelift-entity 0.74.0", + "cranelift-frontend 0.74.0", "itertools 0.10.0", - "log", + "log 0.4.14", "serde", "smallvec 1.6.1", "thiserror", - "wasmparser", + "wasmparser 0.78.2", ] [[package]] @@ -1179,7 +1229,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "058ed274caafc1f60c4997b5fc07bf7dc7cca454af7c6e81edffe5f33f70dace" dependencies = [ - "autocfg", + "autocfg 1.0.1", "cfg-if 0.1.10", "crossbeam-utils 0.7.2", "lazy_static", @@ -1218,7 +1268,7 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3c7c73a2d1e9fc0886a08b93e98eb643461230d5f1925e4036204d5f2e261a8" dependencies = [ - "autocfg", + "autocfg 1.0.1", "cfg-if 0.1.10", "lazy_static", ] @@ -1229,7 +1279,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7e9d99fa91428effe99c5c6d4634cdeba32b8cf784fc428a2a687f61a952c49" dependencies = [ - "autocfg", + "autocfg 1.0.1", "cfg-if 1.0.0", "lazy_static", ] @@ -1284,9 +1334,9 @@ dependencies = [ [[package]] name = "ct-logs" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c8e13110a84b6315df212c045be706af261fd364791cad863285439ebba672e" +checksum = "c1a816186fa68d9e426e3cb4ae4dff1fcd8e4a2c34b781bf7a822574a0d0aac8" dependencies = [ "sct", ] @@ -1301,6 +1351,15 @@ dependencies = [ "syn", ] +[[package]] +name = "ctr" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "049bb91fb4aaf0e3c7efa6cd5ef877dbbbd15b39dad06d9948de4ec8a75761ea" +dependencies = [ + "cipher", +] + [[package]] name = "cuckoofilter" version = "0.5.0" @@ -1338,6 +1397,41 @@ dependencies = [ "zeroize", ] +[[package]] +name = "darling" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "757c0ded2af11d8e739c4daea1ac623dd1624b06c844cf3f5a39f1bdbd99bb12" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c34d8efb62d0c2d7f60ece80f75e5c63c1588ba68032740494b0b9a996466e3" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ade7bff147130fe5e6d39f089c6bd49ec0250f35d70b2eebf72afdfc919f15cc" +dependencies = [ + "darling_core", + "quote", + "syn", +] + [[package]] name = "data-encoding" version = "2.3.2" @@ -1407,9 +1501,9 @@ dependencies = [ [[package]] name = "directories" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8fed639d60b58d0f53498ab13d26f621fd77569cc6edb031f4cc36a2ad9da0f" +checksum = "e69600ff1703123957937708eb27f7a564e48885c537782722ed0ba3189ce1d7" dependencies = [ "dirs-sys", ] @@ -1426,12 +1520,12 @@ dependencies = [ [[package]] name = "dirs-sys" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a" +checksum = "03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780" dependencies = [ "libc", - "redox_users 0.3.5", + "redox_users", "winapi 0.3.9", ] @@ -1442,7 +1536,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", - "redox_users 0.4.0", + "redox_users", "winapi 0.3.9", ] @@ -1501,6 +1595,32 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee2626afccd7561a06cf1367e2950c4718ea04565e20fb5029b6c7d8ad09abcf" +[[package]] +name = "dynasm" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc2d9a5e44da60059bd38db2d05cbb478619541b8c79890547861ec1e3194f0" +dependencies = [ + "bitflags", + "byteorder", + "lazy_static", + "proc-macro-error 1.0.4", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dynasmrt" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42276e3f205fe63887cca255aa9a65a63fb72764c30b9a6252a7c7e46994f689" +dependencies = [ + "byteorder", + "dynasm", + "memmap2", +] + [[package]] name = "ed25519" version = "1.0.3" @@ -1562,6 +1682,27 @@ dependencies = [ "syn", ] +[[package]] +name = "enumset" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e76129da36102af021b8e5000dab2c1c30dbef85c1e482beeff8da5dde0e0b0" +dependencies = [ + "enumset_derive", +] + +[[package]] +name = "enumset_derive" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6451128aa6655d880755345d085494cf7561a6bee7c8dc821e5d77e6d267ecd4" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "env_logger" version = "0.7.1" @@ -1570,7 +1711,7 @@ checksum = "44533bbbb3bb3c1fa17d9f2e4e38bbbaf8396ba82193c4cb1b6445d711445d36" dependencies = [ "atty", "humantime 1.3.0", - "log", + "log 0.4.14", "regex", "termcolor", ] @@ -1580,10 +1721,20 @@ name = "env_logger" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17392a012ea30ef05a610aa97dfb49496e71c9f676b27879922ea5bdf60d9d3f" +dependencies = [ + "log 0.4.14", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" dependencies = [ "atty", "humantime 2.1.0", - "log", + "log 0.4.14", "regex", "termcolor", ] @@ -1636,29 +1787,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e43f2f1833d64e33f15592464d6fdd70f349dda7b1a53088eb83cd94014008c5" dependencies = [ - "futures 0.3.15", -] - -[[package]] -name = "failure" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" -dependencies = [ - "backtrace", - "failure_derive", -] - -[[package]] -name = "failure_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", + "futures 0.3.16", ] [[package]] @@ -1698,7 +1827,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fdbe0d94371f9ce939b555dd342d0686cc4c0cadbcd4b61d70af5ff97eb4126" dependencies = [ "env_logger 0.7.1", - "log", + "log 0.4.14", ] [[package]] @@ -1708,9 +1837,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c832d0ed507622c7cb98e9b7f10426850fc9d38527ab8071778dcc3a81d45875" dependencies = [ "either", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "log", + "log 0.4.14", "num-traits", "parity-scale-codec", "parking_lot 0.11.1", @@ -1755,6 +1884,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "fork-tree" version = "3.0.0" @@ -1780,11 +1924,10 @@ dependencies = [ "frame-system", "hex-literal", "linregress", - "log", + "log 0.4.14", "parity-scale-codec", "paste 1.0.4", "scale-info", - "serde", "sp-api", "sp-io", "sp-runtime", @@ -1803,7 +1946,7 @@ dependencies = [ "frame-support", "handlebars", "linked-hash-map", - "log", + "log 0.4.14", "parity-scale-codec", "sc-cli", "sc-client-db", @@ -1839,7 +1982,6 @@ dependencies = [ "frame-system", "hex-literal", "pallet-balances", - "pallet-indices", "pallet-transaction-payment", "parity-scale-codec", "scale-info", @@ -1874,7 +2016,7 @@ dependencies = [ "frame-support-procedural", "frame-system", "impl-trait-for-tuples", - "log", + "log 0.4.14", "once_cell", "parity-scale-codec", "parity-util-mem", @@ -1963,8 +2105,7 @@ version = "4.0.0-dev" dependencies = [ "criterion", "frame-support", - "impl-trait-for-tuples", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "serde", @@ -1986,7 +2127,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -2006,7 +2146,6 @@ name = "frame-try-runtime" version = "0.10.0-dev" dependencies = [ "frame-support", - "parity-scale-codec", "sp-api", "sp-runtime", "sp-std", @@ -2076,9 +2215,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7e43a803dae2fa37c1f6a8fe121e1f7bf9548b4dfc0522a42f34145dadfc27" +checksum = "1adc00f486adfc9ce99f77d717836f0c5aa84965eb0b4f051f4e83f7cab53f8b" dependencies = [ "futures-channel", "futures-core", @@ -2091,9 +2230,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e682a68b29a882df0545c143dc3646daefe80ba479bcdede94d5a703de2871e2" +checksum = "74ed2411805f6e4e3d9bc904c95d5d423b89b3b25dc0250aa74729de20629ff9" dependencies = [ "futures-core", "futures-sink", @@ -2101,25 +2240,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0402f765d8a89a26043b889b26ce3c4679d268fa6bb22cd7c6aad98340e179d1" - -[[package]] -name = "futures-cpupool" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" -dependencies = [ - "futures 0.1.31", - "num_cpus", -] +checksum = "af51b1b4a7fdff033703db39de8802c673eb91855f2e0d47dcf3bf2c0ef01f99" [[package]] name = "futures-executor" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "badaa6a909fac9e7236d0620a2f57f7664640c56575b71a7552fbd68deafab79" +checksum = "4d0d535a57b87e1ae31437b892713aee90cd2d7b0ee48727cd11fc72ef54761c" dependencies = [ "futures-core", "futures-task", @@ -2129,9 +2258,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acc499defb3b348f8d8f3f66415835a9131856ff7714bf10dadfc4ec4bdb29a1" +checksum = "0b0e06c393068f3a6ef246c75cdca793d6a46347e75286933e5e75fd2fd11582" [[package]] name = "futures-lite" @@ -2150,11 +2279,11 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c40298486cdf52cc00cd6d6987892ba502c7656a16a4192a9992b1ccedd121" +checksum = "c54913bae956fb8df7f4dc6fc90362aa72e69148e3f39041fbe8742d21e0ac57" dependencies = [ - "autocfg", + "autocfg 1.0.1", "proc-macro-hack", "proc-macro2", "quote", @@ -2168,21 +2297,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a1387e07917c711fb4ee4f48ea0adb04a3c9739e53ef85bf43ae1edc2937a8b" dependencies = [ "futures-io", - "rustls 0.19.1", + "rustls", "webpki", ] [[package]] name = "futures-sink" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a57bead0ceff0d6dde8f465ecd96c9338121bb7717d3e7b108059531870c4282" +checksum = "c0f30aaa67363d119812743aa5f33c201a7a66329f97d1a887022971feea4b53" [[package]] name = "futures-task" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a16bef9fc1a4dddb5bee51c989e3fbba26569cbb0e31f5b303c184e3dd33dae" +checksum = "bbe54a98670017f3be909561f6ad13e810d9a51f3f061b902062ca3da80799f2" [[package]] name = "futures-timer" @@ -2195,18 +2324,14 @@ name = "futures-timer" version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" -dependencies = [ - "gloo-timers", - "send_wrapper", -] [[package]] name = "futures-util" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "feb5c238d27e2bf94ffdfd27b2c29e3df4a68c4193bb6427384259e2bf191967" +checksum = "67eb846bfd58e44a8481a00049e82c43e0ccb5d61f8dc071057cb19249dd4d78" dependencies = [ - "autocfg", + "autocfg 1.0.1", "futures 0.1.31", "futures-channel", "futures-core", @@ -2244,7 +2369,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" dependencies = [ "typenum", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -2254,10 +2379,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ "cfg-if 1.0.0", - "js-sys", "libc", "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", ] [[package]] @@ -2275,14 +2398,25 @@ dependencies = [ [[package]] name = "ghash" -version = "0.3.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97304e4cd182c3846f7575ced3890c53012ce534ad9114046b0a9e00bb30a375" +checksum = "b442c439366184de619215247d24e908912b175e824a530253845ac4c251a5c1" dependencies = [ "opaque-debug 0.3.0", "polyval", ] +[[package]] +name = "gimli" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaf91faf136cb47367fa430cd46e37a788775e7fa104f8b4bcb3861dc389b724" +dependencies = [ + "fallible-iterator", + "indexmap", + "stable_deref_trait", +] + [[package]] name = "gimli" version = "0.23.0" @@ -2315,7 +2449,7 @@ dependencies = [ "aho-corasick", "bstr", "fnv", - "log", + "log 0.4.14", "regex", ] @@ -2332,44 +2466,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "h2" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5b34c246847f938a410a03c5458c7fee2274436675e76d8b903c08efc29c462" -dependencies = [ - "byteorder", - "bytes 0.4.12", - "fnv", - "futures 0.1.31", - "http 0.1.21", - "indexmap", - "log", - "slab", - "string", - "tokio-io", -] - -[[package]] -name = "h2" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e4728fd124914ad25e99e3d15a9361a879f6620f63cb56bbb08f95abb97a535" -dependencies = [ - "bytes 0.5.6", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.3", - "indexmap", - "slab", - "tokio 0.2.25", - "tokio-util", - "tracing", - "tracing-futures", -] - [[package]] name = "half" version = "1.7.1" @@ -2382,7 +2478,7 @@ version = "3.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdb0867bbc5a3da37a753e78021d5fcf8a4db00e18dd2dd90fd36e24190e162d" dependencies = [ - "log", + "log 0.4.14", "pest", "pest_derive", "quick-error 2.0.0", @@ -2481,13 +2577,13 @@ dependencies = [ [[package]] name = "hmac-drbg" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e570451493f10f6581b48cdd530413b63ea9e780f544bfd3bdcaa0d89d1a7b" +checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" dependencies = [ - "digest 0.8.1", - "generic-array 0.12.4", - "hmac 0.7.1", + "digest 0.9.0", + "generic-array 0.14.4", + "hmac 0.8.1", ] [[package]] @@ -2512,17 +2608,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "http" -version = "0.1.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ccf5ede3a895d8856620237b2f02972c1bbc78d2965ad7fe8838d4a0ed41f0" -dependencies = [ - "bytes 0.4.12", - "fnv", - "itoa", -] - [[package]] name = "http" version = "0.2.3" @@ -2534,28 +2619,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-body" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" -dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "http 0.1.21", - "tokio-buf", -] - -[[package]] -name = "http-body" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13d5ff830006f7646652e057693569bfe0d51760c0085a071769d142a205111b" -dependencies = [ - "bytes 0.5.6", - "http 0.2.3", -] - [[package]] name = "http-body" version = "0.4.2" @@ -2563,21 +2626,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60daa14be0e0786db0f03a9e57cb404c9d756eed2b6c62b9ea98ec5743ec75a9" dependencies = [ "bytes 1.0.1", - "http 0.2.3", + "http", "pin-project-lite 0.2.6", ] [[package]] name = "httparse" -version = "1.3.5" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "615caabe2c3160b313d52ccc905335f4ed5f10881dd63dc5699d47e90be85691" +checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" [[package]] name = "httpdate" -version = "0.3.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "494b4d60369511e7dea41cf646832512a94e542f68bb9c49e54518e0f468eb47" +checksum = "6456b8a6c8f33fee7d958fcd1b60d55b11940a79e63ae87013e6d22e26034440" [[package]] name = "humantime" @@ -2596,98 +2659,82 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.12.36" +version = "0.10.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c843caf6296fc1f93444735205af9ed4e109a539005abb2564ae1d6fad34c52" +checksum = "0a0652d9a2609a968c14be1a9ea00bf4b1d64e2e1f53a1b51b6fff3a6e829273" dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "futures-cpupool", - "h2 0.1.26", - "http 0.1.21", - "http-body 0.1.0", + "base64 0.9.3", "httparse", - "iovec", - "itoa", - "log", - "net2", - "rustc_version", + "language-tags", + "log 0.3.9", + "mime", + "num_cpus", "time", - "tokio 0.1.22", - "tokio-buf", - "tokio-executor", - "tokio-io", - "tokio-reactor", - "tokio-tcp", - "tokio-threadpool", - "tokio-timer", - "want 0.2.0", + "traitobject", + "typeable", + "unicase 1.4.2", + "url 1.7.2", ] [[package]] name = "hyper" -version = "0.13.10" +version = "0.14.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a6f157065790a3ed2f88679250419b5cdd96e714a0d65f7797fd337186e96bb" +checksum = "0b61cf2d1aebcf6e6352c97b81dc2244ca29194be1b276f5d8ad5c6330fffb11" dependencies = [ - "bytes 0.5.6", + "bytes 1.0.1", "futures-channel", "futures-core", "futures-util", - "h2 0.2.7", - "http 0.2.3", - "http-body 0.3.1", + "http", + "http-body", "httparse", "httpdate", "itoa", - "pin-project 1.0.5", - "socket2 0.3.19", - "tokio 0.2.25", + "pin-project-lite 0.2.6", + "socket2 0.4.0", + "tokio 1.10.0", "tower-service", "tracing", - "want 0.3.0", + "want", ] [[package]] -name = "hyper" -version = "0.14.5" +name = "hyper-rustls" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf09f61b52cfcf4c00de50df88ae423d6c02354e385a86341133b5338630ad1" +checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" dependencies = [ - "bytes 1.0.1", - "futures-channel", - "futures-core", + "ct-logs", "futures-util", - "http 0.2.3", - "http-body 0.4.2", - "httparse", - "httpdate", - "itoa", - "pin-project 1.0.5", - "tokio 1.6.0", - "tower-service", - "tracing", - "want 0.3.0", + "hyper 0.14.11", + "log 0.4.14", + "rustls", + "rustls-native-certs", + "tokio 1.10.0", + "tokio-rustls 0.22.0", + "webpki", ] [[package]] -name = "hyper-rustls" -version = "0.21.0" +name = "hyper-tls" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37743cc83e8ee85eacfce90f2f4102030d9ff0a95244098d781e9bee4a90abb6" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 0.5.6", - "ct-logs", - "futures-util", - "hyper 0.13.10", - "log", - "rustls 0.18.1", - "rustls-native-certs 0.4.0", - "tokio 0.2.25", - "tokio-rustls 0.14.1", - "webpki", + "bytes 1.0.1", + "hyper 0.14.11", + "native-tls", + "tokio 1.10.0", + "tokio-native-tls", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "0.1.5" @@ -2738,12 +2785,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a6d52908d4ea4ab2bc22474ba149bf1011c8e2c3ebc1ff593ae28ac44f494b6" dependencies = [ "async-io", - "futures 0.3.15", + "futures 0.3.16", "futures-lite", "if-addrs", "ipnet", "libc", - "log", + "log 0.4.14", "winapi 0.3.9", ] @@ -2782,7 +2829,7 @@ version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3" dependencies = [ - "autocfg", + "autocfg 1.0.1", "hashbrown 0.9.1", "serde", ] @@ -2794,9 +2841,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61124eeebbd69b8190558df225adf7e4caafce0d743919e5d6b19652314ec5ec" dependencies = [ "cfg-if 1.0.0", - "js-sys", - "wasm-bindgen", - "web-sys", ] [[package]] @@ -2814,7 +2858,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64fa110ec7b8f493f416eed552740d10e7030ad5f63b2308f82c9608ec2df275" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "futures-timer 2.0.2", ] @@ -2829,9 +2873,9 @@ dependencies = [ [[package]] name = "ip_network" -version = "0.3.4" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee15951c035f79eddbef745611ec962f63f4558f1dadf98ab723cc603487c6f" +checksum = "09b746553d2f4a1ca26fab939943ddfb217a091f34f53571620a8e3d30691303" [[package]] name = "ipconfig" @@ -2895,29 +2939,34 @@ dependencies = [ [[package]] name = "jsonrpc-client-transports" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "489b9c612e60c766f751ab40fcb43cbb55a1e10bb44a9b4307ed510ca598cbd7" +checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" dependencies = [ - "failure", - "futures 0.1.31", - "hyper 0.12.36", + "derive_more", + "futures 0.3.16", + "hyper 0.14.11", + "hyper-tls", "jsonrpc-core", "jsonrpc-pubsub", - "log", + "log 0.4.14", "serde", "serde_json", + "tokio 1.10.0", "url 1.7.2", + "websocket", ] [[package]] name = "jsonrpc-core" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0745a6379e3edc893c84ec203589790774e4247420033e71a76d3ab4687991fa" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" dependencies = [ - "futures 0.1.31", - "log", + "futures 0.3.16", + "futures-executor", + "futures-util", + "log 0.4.14", "serde", "serde_derive", "serde_json", @@ -2925,18 +2974,19 @@ dependencies = [ [[package]] name = "jsonrpc-core-client" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f764902d7b891344a0acb65625f32f6f7c6db006952143bd650209fbe7d94db" +checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" dependencies = [ + "futures 0.3.16", "jsonrpc-client-transports", ] [[package]] name = "jsonrpc-derive" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99a847f9ec7bb52149b2786a17c9cb260d6effc6b8eeb8c16b343a487a7563a3" +checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" dependencies = [ "proc-macro-crate 0.1.5", "proc-macro2", @@ -2946,73 +2996,80 @@ dependencies = [ [[package]] name = "jsonrpc-http-server" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb5c4513b7b542f42da107942b7b759f27120b5cc894729f88254b28dff44b7" +checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" dependencies = [ - "hyper 0.12.36", + "futures 0.3.16", + "hyper 0.14.11", "jsonrpc-core", "jsonrpc-server-utils", - "log", + "log 0.4.14", "net2", - "parking_lot 0.10.2", - "unicase", + "parking_lot 0.11.1", + "unicase 2.6.0", ] [[package]] name = "jsonrpc-ipc-server" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf50e53e4eea8f421a7316c5f63e395f7bc7c4e786a6dc54d76fab6ff7aa7ce7" +checksum = "382bb0206323ca7cda3dcd7e245cea86d37d02457a02a975e3378fb149a48845" dependencies = [ + "futures 0.3.16", "jsonrpc-core", "jsonrpc-server-utils", - "log", + "log 0.4.14", "parity-tokio-ipc", - "parking_lot 0.10.2", - "tokio-service", + "parking_lot 0.11.1", + "tower-service", ] [[package]] name = "jsonrpc-pubsub" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "639558e0604013be9787ae52f798506ae42bf4220fe587bdc5625871cc8b9c77" +checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" dependencies = [ + "futures 0.3.16", "jsonrpc-core", - "log", - "parking_lot 0.10.2", + "lazy_static", + "log 0.4.14", + "parking_lot 0.11.1", "rand 0.7.3", "serde", ] [[package]] name = "jsonrpc-server-utils" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72f1f3990650c033bd8f6bd46deac76d990f9bbfb5f8dc8c4767bf0a00392176" +checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" dependencies = [ - "bytes 0.4.12", + "bytes 1.0.1", + "futures 0.3.16", "globset", "jsonrpc-core", "lazy_static", - "log", - "tokio 0.1.22", - "tokio-codec", - "unicase", + "log 0.4.14", + "tokio 1.10.0", + "tokio-stream", + "tokio-util 0.6.7", + "unicase 2.6.0", ] [[package]] name = "jsonrpc-ws-server" -version = "15.1.0" +version = "18.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6596fe75209b73a2a75ebe1dce4e60e03b88a2b25e8807b667597f6315150d22" +checksum = "f892c7d766369475ab7b0669f417906302d7c0fb521285c0a0c92e52e7c8e946" dependencies = [ + "futures 0.3.16", "jsonrpc-core", "jsonrpc-server-utils", - "log", + "log 0.4.14", "parity-ws", - "parking_lot 0.10.2", + "parking_lot 0.11.1", "slab", ] @@ -3040,8 +3097,8 @@ dependencies = [ "beef", "futures-channel", "futures-util", - "hyper 0.14.5", - "log", + "hyper 0.14.11", + "log 0.4.14", "serde", "serde_json", "soketto 0.6.0", @@ -3056,19 +3113,19 @@ checksum = "8e2834b6e7f57ce9a4412ed4d6dc95125d2c8612e68f86b9d9a07369164e4198" dependencies = [ "async-trait", "fnv", - "futures 0.3.15", + "futures 0.3.16", "jsonrpsee-types", - "log", + "log 0.4.14", "pin-project 1.0.5", - "rustls 0.19.1", - "rustls-native-certs 0.5.0", + "rustls", + "rustls-native-certs", "serde", "serde_json", "soketto 0.6.0", "thiserror", "tokio 0.2.25", "tokio-rustls 0.15.0", - "tokio-util", + "tokio-util 0.3.1", "url 2.2.1", ] @@ -3105,7 +3162,7 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" dependencies = [ - "log", + "log 0.4.14", ] [[package]] @@ -3131,13 +3188,13 @@ dependencies = [ [[package]] name = "kvdb-rocksdb" -version = "0.12.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "431ca65516efab86e65d96281f750ebb54277dec656fcf6c027f3d1c0cb69e4c" +checksum = "9b1b6ea8f2536f504b645ad78419c8246550e19d2c3419a167080ce08edee35a" dependencies = [ "fs-swap", "kvdb", - "log", + "log 0.4.14", "num_cpus", "owning_ref", "parity-util-mem", @@ -3147,6 +3204,12 @@ dependencies = [ "smallvec 1.6.1", ] +[[package]] +name = "language-tags" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" + [[package]] name = "lazy_static" version = "1.4.0" @@ -3181,6 +3244,16 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "libloading" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "351a32417a12d5f7e82c368a66781e307834dae04c6ce0cd4456d52989229883" +dependencies = [ + "cfg-if 1.0.0", + "winapi 0.3.9", +] + [[package]] name = "libloading" version = "0.7.0" @@ -3199,13 +3272,13 @@ checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a" [[package]] name = "libp2p" -version = "0.37.1" +version = "0.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08053fbef67cd777049ef7a95ebaca2ece370b4ed7712c3fa404d69a88cb741b" +checksum = "9004c06878ef8f3b4b4067e69a140d87ed20bf777287f82223e49713b36ee433" dependencies = [ "atomic", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "lazy_static", "libp2p-core", "libp2p-deflate", @@ -3229,7 +3302,7 @@ dependencies = [ "libp2p-wasm-ext", "libp2p-websocket", "libp2p-yamux", - "parity-multiaddr", + "multiaddr", "parking_lot 0.11.1", "pin-project 1.0.5", "smallvec 1.6.1", @@ -3238,27 +3311,27 @@ dependencies = [ [[package]] name = "libp2p-core" -version = "0.28.2" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71dd51b562e14846e65bad00e5808d0644376e6588668c490d3c48e1dfeb4a9a" +checksum = "af9b4abdeaa420593a297c8592f63fad4234f4b88dc9343b8fd8e736c35faa59" dependencies = [ "asn1_der", "bs58", "ed25519-dalek", "either", "fnv", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "lazy_static", - "libsecp256k1", - "log", - "multihash", + "libsecp256k1 0.5.0", + "log 0.4.14", + "multiaddr", + "multihash 0.14.0", "multistream-select", - "parity-multiaddr", "parking_lot 0.11.1", "pin-project 1.0.5", - "prost", - "prost-build", + "prost 0.8.0", + "prost-build 0.8.0", "rand 0.7.3", "ring", "rw-stream-sink", @@ -3272,65 +3345,65 @@ dependencies = [ [[package]] name = "libp2p-deflate" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2181a641cd15f9b6ba71b1335800f309012a0a97a29ffaabbbf40e9d3d58f08" +checksum = "66097fccc0b7f8579f90a03ea76ba6196332ea049fd07fd969490a06819dcdc8" dependencies = [ "flate2", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", ] [[package]] name = "libp2p-dns" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62e63dab8b5ff35e0c101a3e51e843ba782c07bbb1682f5fd827622e0d02b98b" +checksum = "58ff08b3196b85a17f202d80589e93b1660a574af67275706657fdc762e42c32" dependencies = [ "async-std-resolver", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", - "log", + "log 0.4.14", "smallvec 1.6.1", "trust-dns-resolver", ] [[package]] name = "libp2p-floodsub" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48a9b570f6766301d9c4aa00fce3554cad1598e2f466debbc4dde909028417cf" +checksum = "404eca8720967179dac7a5b4275eb91f904a53859c69ca8d018560ad6beb214f" dependencies = [ "cuckoofilter", "fnv", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "libp2p-swarm", - "log", - "prost", - "prost-build", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", "rand 0.7.3", "smallvec 1.6.1", ] [[package]] name = "libp2p-gossipsub" -version = "0.30.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73cb9a89a301afde1e588c73f7e9131e12a5388725f290a9047b878862db1b53" +checksum = "b1cc48709bcbc3a3321f08a73560b4bbb4166a7d56f6fdb615bc775f4f91058e" dependencies = [ "asynchronous-codec 0.6.0", "base64 0.13.0", "byteorder", "bytes 1.0.1", "fnv", - "futures 0.3.15", + "futures 0.3.16", "hex_fmt", "libp2p-core", "libp2p-swarm", - "log", - "prost", - "prost-build", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", "rand 0.7.3", "regex", "sha2 0.9.3", @@ -3341,37 +3414,37 @@ dependencies = [ [[package]] name = "libp2p-identify" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f668f00efd9883e8b7bcc582eaf0164615792608f886f6577da18bcbeea0a46" +checksum = "a7b61f6cf07664fb97016c318c4d4512b3dd4cc07238607f3f0163245f99008e" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "libp2p-swarm", - "log", - "prost", - "prost-build", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", "smallvec 1.6.1", "wasm-timer", ] [[package]] name = "libp2p-kad" -version = "0.30.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b07312ebe5ee4fd2404447a0609814574df55c65d4e20838b957bbd34907d820" +checksum = "50ed78489c87924235665a0ab345b298ee34dff0f7ad62c0ba6608b2144fb75e" dependencies = [ "arrayvec 0.5.2", "asynchronous-codec 0.6.0", "bytes 1.0.1", "either", "fnv", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "libp2p-swarm", - "log", - "prost", - "prost-build", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", "rand 0.7.3", "sha2 0.9.3", "smallvec 1.6.1", @@ -3383,19 +3456,19 @@ dependencies = [ [[package]] name = "libp2p-mdns" -version = "0.30.0" +version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c221897b3fd7f215de7ecfec215c5eba598e5b61c605b5f8b56fe8a4fb507724" +checksum = "a29e6cbc2a24b8471b6567e580a0e8e7b70a6d0f0ea2be0844d1e842d7d4fa33" dependencies = [ "async-io", "data-encoding", "dns-parser", - "futures 0.3.15", + "futures 0.3.16", "if-watch", "lazy_static", "libp2p-core", "libp2p-swarm", - "log", + "log 0.4.14", "rand 0.8.4", "smallvec 1.6.1", "socket2 0.4.0", @@ -3404,15 +3477,15 @@ dependencies = [ [[package]] name = "libp2p-mplex" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85e9b544335d1ed30af71daa96edbefadef6f19c7a55f078b9fc92c87163105d" +checksum = "313d9ea526c68df4425f580024e67a9d3ffd49f2c33de5154b1f5019816f7a99" dependencies = [ "asynchronous-codec 0.6.0", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", - "log", + "log 0.4.14", "nohash-hasher", "parking_lot 0.11.1", "rand 0.7.3", @@ -3422,19 +3495,19 @@ dependencies = [ [[package]] name = "libp2p-noise" -version = "0.30.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36db0f0db3b0433f5b9463f1c0cd9eadc0a3734a9170439ce501ff99733a88bd" +checksum = "3f1db7212f342b6ba7c981cc40e31f76e9e56cb48e65fa4c142ecaca5839523e" dependencies = [ "bytes 1.0.1", "curve25519-dalek 3.0.2", - "futures 0.3.15", + "futures 0.3.16", "lazy_static", "libp2p-core", - "log", - "prost", - "prost-build", - "rand 0.7.3", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", + "rand 0.8.4", "sha2 0.9.3", "snow", "static_assertions", @@ -3444,14 +3517,14 @@ dependencies = [ [[package]] name = "libp2p-ping" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf4bfaffac63bf3c7ec11ed9d8879d455966ddea7e78ee14737f0b6dce0d1cd1" +checksum = "2482cfd9eb0b7a0baaf3e7b329dc4f2785181a161b1a47b7192f8d758f54a439" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "libp2p-swarm", - "log", + "log 0.4.14", "rand 0.7.3", "void", "wasm-timer", @@ -3459,29 +3532,29 @@ dependencies = [ [[package]] name = "libp2p-plaintext" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c8c37b4d2a075b4be8442760a5f8c037180f0c8dd5b5734b9978ab868b3aa11" +checksum = "13b4783e5423870b9a5c199f65a7a3bc66d86ab56b2b9beebf3c338d889cf8e4" dependencies = [ "asynchronous-codec 0.6.0", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", - "log", - "prost", - "prost-build", + "log 0.4.14", + "prost 0.8.0", + "prost-build 0.8.0", "unsigned-varint 0.7.0", "void", ] [[package]] name = "libp2p-pnet" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce3374f3b28162db9d3442c9347c4f14cb01e8290052615c7d341d40eae0599" +checksum = "07cb4dd4b917e5b40ddefe49b96b07adcd8d342e0317011d175b7b2bb1dcc974" dependencies = [ - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "pin-project 1.0.5", "rand 0.7.3", "salsa20", @@ -3490,20 +3563,20 @@ dependencies = [ [[package]] name = "libp2p-relay" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8786aca3f18671d8776289706a5521f6c9124a820f69e358de214b9939440d" +checksum = "0133f6cfd81cdc16e716de2982e012c62e6b9d4f12e41967b3ee361051c622aa" dependencies = [ "asynchronous-codec 0.6.0", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "libp2p-core", "libp2p-swarm", - "log", + "log 0.4.14", "pin-project 1.0.5", - "prost", - "prost-build", + "prost 0.8.0", + "prost-build 0.8.0", "rand 0.7.3", "smallvec 1.6.1", "unsigned-varint 0.7.0", @@ -3513,16 +3586,16 @@ dependencies = [ [[package]] name = "libp2p-request-response" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdbe172f08e6d0f95fa8634e273d4c4268c4063de2e33e7435194b0130c62e3" +checksum = "06cdae44b6821466123af93cbcdec7c9e6ba9534a8af9cdc296446d39416d241" dependencies = [ "async-trait", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "libp2p-swarm", - "log", + "log 0.4.14", "lru", "minicbor", "rand 0.7.3", @@ -3533,14 +3606,14 @@ dependencies = [ [[package]] name = "libp2p-swarm" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e04d8e1eef675029ec728ba14e8d0da7975d84b6679b699b4ae91a1de9c3a92" +checksum = "7083861341e1555467863b4cd802bea1e8c4787c0f7b5110097d0f1f3248f9a9" dependencies = [ "either", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", - "log", + "log 0.4.14", "rand 0.7.3", "smallvec 1.6.1", "void", @@ -3549,9 +3622,9 @@ dependencies = [ [[package]] name = "libp2p-swarm-derive" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "365b0a699fea5168676840567582a012ea297b1ca02eee467e58301b9c9c5eed" +checksum = "ab8cb308d4fc854869f5abb54fdab0833d2cf670d407c745849dc47e6e08d79c" dependencies = [ "quote", "syn", @@ -3559,40 +3632,40 @@ dependencies = [ [[package]] name = "libp2p-tcp" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b1a27d21c477951799e99d5c105d78868258502ce092988040a808d5a19bbd9" +checksum = "79edd26b6b4bb5feee210dcda562dca186940dfecb0024b979c3f50824b3bf28" dependencies = [ "async-io", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "if-watch", "ipnet", "libc", "libp2p-core", - "log", + "log 0.4.14", "socket2 0.4.0", ] [[package]] name = "libp2p-uds" -version = "0.28.0" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffd6564bb3b7ff203661ccbb69003c2b551e34cef974f2d6c6a28306a12170b5" +checksum = "280e793440dd4e9f273d714f4497325c72cddb0fe85a49f9a03c88f41dd20182" dependencies = [ "async-std", - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", - "log", + "log 0.4.14", ] [[package]] name = "libp2p-wasm-ext" -version = "0.28.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef45d61e43c313531b5e903e4e8415212ff6338e0c54c47da5b9b412b5760de" +checksum = "f553b7140fad3d7a76f50497b0ea591e26737d9607428a75509fc191e4d1b1f6" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "js-sys", "libp2p-core", "parity-send-wrapper", @@ -3602,15 +3675,15 @@ dependencies = [ [[package]] name = "libp2p-websocket" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cace60995ef6f637e4752cccbb2590f6bc358e8741a0d066307636c69a4b3a74" +checksum = "ddf99dcbf5063e9d59087f61b1e85c686ceab2f5abedb472d32288065c0e5e27" dependencies = [ "either", - "futures 0.3.15", + "futures 0.3.16", "futures-rustls", "libp2p-core", - "log", + "log 0.4.14", "quicksink", "rw-stream-sink", "soketto 0.4.2", @@ -3620,11 +3693,11 @@ dependencies = [ [[package]] name = "libp2p-yamux" -version = "0.32.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f35da42cfc6d5cb0dcf3ad6881bc68d146cdf38f98655e09e33fbba4d13eabc4" +checksum = "214cc0dd9c37cbed27f0bb1eba0c41bbafdb93a8be5e9d6ae1e6b4b42cd044bf" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "libp2p-core", "parking_lot 0.11.1", "thiserror", @@ -3633,9 +3706,9 @@ dependencies = [ [[package]] name = "librocksdb-sys" -version = "6.17.3" +version = "6.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5da125e1c0f22c7cae785982115523a0738728498547f415c9054cb17c7e89f9" +checksum = "c309a9d2470844aceb9a4a098cf5286154d20596868b75a6b36357d2bb9ca25d" dependencies = [ "bindgen", "cc", @@ -3645,53 +3718,104 @@ dependencies = [ [[package]] name = "libsecp256k1" -version = "0.3.5" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc1e2c808481a63dc6da2074752fdd4336a3c8fcc68b83db6f1fd5224ae7962" +checksum = "bd1137239ab33b41aa9637a88a28249e5e70c40a42ccc92db7f12cc356c1fcd7" dependencies = [ "arrayref", - "crunchy", - "digest 0.8.1", + "base64 0.12.3", + "digest 0.9.0", "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", "rand 0.7.3", - "sha2 0.8.2", - "subtle 2.4.0", + "serde", + "sha2 0.9.3", "typenum", ] [[package]] -name = "libz-sys" -version = "1.1.2" +name = "libsecp256k1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" +checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" dependencies = [ - "cc", - "pkg-config", - "vcpkg", + "arrayref", + "base64 0.12.3", + "digest 0.9.0", + "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", + "rand 0.7.3", + "serde", + "sha2 0.9.3", + "typenum", ] [[package]] -name = "linked-hash-map" -version = "0.5.4" +name = "libsecp256k1-core" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle 2.4.0", +] [[package]] -name = "linked_hash_set" -version = "0.1.4" +name = "libsecp256k1-gen-ecmult" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" +checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" dependencies = [ - "linked-hash-map", + "libsecp256k1-core", ] [[package]] -name = "linregress" -version = "0.4.3" +name = "libsecp256k1-gen-genmult" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e6e407dadb4ca4b31bc69c27aff00e7ca4534fdcee855159b039a7cebb5f395" +checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" dependencies = [ - "nalgebra", + "libsecp256k1-core", +] + +[[package]] +name = "libz-sys" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" + +[[package]] +name = "linked_hash_set" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "linregress" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6e407dadb4ca4b31bc69c27aff00e7ca4534fdcee855159b039a7cebb5f395" +dependencies = [ + "nalgebra", "statrs", ] @@ -3731,6 +3855,15 @@ dependencies = [ "scopeguard", ] +[[package]] +name = "log" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b" +dependencies = [ + "log 0.4.14", +] + [[package]] name = "log" version = "0.4.14" @@ -3759,6 +3892,26 @@ dependencies = [ "linked-hash-map", ] +[[package]] +name = "lz4" +version = "1.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac20ed6991e01bf6a2e68cc73df2b389707403662a8ba89f68511fb340f724c" +dependencies = [ + "libc", + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dca79aa95d8b3226213ad454d328369853be3a1382d89532a854f4d69640acae" +dependencies = [ + "cc", + "libc", +] + [[package]] name = "mach" version = "0.3.2" @@ -3841,7 +3994,7 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "043175f069eda7b85febe4a74abbaeff828d9f8b448515d3151a14a3542811aa" dependencies = [ - "autocfg", + "autocfg 1.0.1", ] [[package]] @@ -3850,7 +4003,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "157b4208e3059a8f9e78d559edc658e13df41410cb3ae03979c83130067fdd87" dependencies = [ - "autocfg", + "autocfg 1.0.1", ] [[package]] @@ -3882,6 +4035,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "mime" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba626b8a6de5da682e1caa06bdb42a335aee5a84db8e5046a3e8ab17ba0a3ae0" +dependencies = [ + "log 0.3.9", +] + [[package]] name = "minicbor" version = "0.8.0" @@ -3909,7 +4071,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b" dependencies = [ "adler", - "autocfg", + "autocfg 1.0.1", ] [[package]] @@ -3924,7 +4086,7 @@ dependencies = [ "iovec", "kernel32-sys", "libc", - "log", + "log 0.4.14", "miow 0.2.2", "net2", "slab", @@ -3932,27 +4094,28 @@ dependencies = [ ] [[package]] -name = "mio-extras" -version = "2.0.6" +name = "mio" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" +checksum = "8c2bdb6314ec10835cd3293dd268473a835c02b7b352e788be788b3c6ca6bb16" dependencies = [ - "lazycell", - "log", - "mio", - "slab", + "libc", + "log 0.4.14", + "miow 0.3.6", + "ntapi", + "winapi 0.3.9", ] [[package]] -name = "mio-named-pipes" -version = "0.1.7" +name = "mio-extras" +version = "2.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0840c1c50fd55e521b247f949c241c9997709f23bd7f023b9762cd561e935656" +checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" dependencies = [ - "log", - "mio", - "miow 0.3.6", - "winapi 0.3.9", + "lazycell", + "log 0.4.14", + "mio 0.6.23", + "slab", ] [[package]] @@ -3963,7 +4126,7 @@ checksum = "afcb699eb26d4332647cc848492bbc15eafb26f08d0304550d5aa1f612e066f0" dependencies = [ "iovec", "libc", - "mio", + "mio 0.6.23", ] [[package]] @@ -3994,6 +4157,24 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0debeb9fcf88823ea64d64e4a815ab1643f33127d995978e099942ce38f25238" +[[package]] +name = "multiaddr" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48ee4ea82141951ac6379f964f71b20876d43712bea8faf6dd1a375e08a46499" +dependencies = [ + "arrayref", + "bs58", + "byteorder", + "data-encoding", + "multihash 0.14.0", + "percent-encoding 2.1.0", + "serde", + "static_assertions", + "unsigned-varint 0.7.0", + "url 2.2.1", +] + [[package]] name = "multibase" version = "0.8.0" @@ -4022,13 +4203,26 @@ dependencies = [ "unsigned-varint 0.5.1", ] +[[package]] +name = "multihash" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "752a61cd890ff691b4411423d23816d5866dd5621e4d1c5687a53b94b5a979d8" +dependencies = [ + "digest 0.9.0", + "generic-array 0.14.4", + "multihash-derive", + "sha2 0.9.3", + "unsigned-varint 0.7.0", +] + [[package]] name = "multihash-derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85ee3c48cb9d9b275ad967a0e96715badc13c6029adb92f34fa17b9ff28fd81f" +checksum = "424f6e86263cd5294cbd7f1e95746b95aca0e0d66bff31e5a40d6baa87b4aa99" dependencies = [ - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.0.0", "proc-macro-error 1.0.4", "proc-macro2", "quote", @@ -4049,8 +4243,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d91ec0a2440aaff5f78ec35631a7027d50386c6163aa975f7caa0d5da4b6ff8" dependencies = [ "bytes 1.0.1", - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "pin-project 1.0.5", "smallvec 1.6.1", "unsigned-varint 0.7.0", @@ -4094,6 +4288,24 @@ dependencies = [ "rand 0.3.23", ] +[[package]] +name = "native-tls" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d" +dependencies = [ + "lazy_static", + "libc", + "log 0.4.14", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "nb-connect" version = "1.0.3" @@ -4133,13 +4345,13 @@ version = "0.9.0-dev" dependencies = [ "derive_more", "fs_extra", - "futures 0.3.15", + "futures 0.3.16", "hash-db", "hex", "kvdb", "kvdb-rocksdb", "lazy_static", - "log", + "log 0.4.14", "node-primitives", "node-runtime", "node-testing", @@ -4147,7 +4359,6 @@ dependencies = [ "parity-util-mem", "rand 0.7.3", "sc-basic-authorship", - "sc-cli", "sc-client-api", "sc-transaction-pool", "sc-transaction-pool-api", @@ -4165,24 +4376,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "node-browser-testing" -version = "3.0.0-dev" -dependencies = [ - "futures 0.3.15", - "futures-timer 3.0.2", - "jsonrpc-core", - "libp2p", - "node-cli", - "parking_lot 0.11.1", - "sc-rpc-api", - "serde", - "serde_json", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-bindgen-test", -] - [[package]] name = "node-cli" version = "3.0.0-dev" @@ -4190,29 +4383,19 @@ dependencies = [ "assert_cmd", "async-std", "frame-benchmarking-cli", - "frame-support", "frame-system", - "futures 0.3.15", + "futures 0.3.16", "hex-literal", - "libp2p-wasm-ext", - "log", + "log 0.4.14", "nix", "node-executor", "node-inspect", "node-primitives", "node-rpc", "node-runtime", - "pallet-authority-discovery", - "pallet-balances", - "pallet-contracts", - "pallet-grandpa", "pallet-im-online", - "pallet-indices", - "pallet-staking", - "pallet-timestamp", "pallet-transaction-payment", "parity-scale-codec", - "parking_lot 0.11.1", "platforms", "rand 0.7.3", "regex", @@ -4221,22 +4404,20 @@ dependencies = [ "sc-chain-spec", "sc-cli", "sc-client-api", - "sc-client-db", "sc-consensus", "sc-consensus-babe", "sc-consensus-epochs", "sc-consensus-slots", "sc-consensus-uncles", + "sc-executor", "sc-finality-grandpa", "sc-keystore", "sc-network", - "sc-offchain", "sc-rpc", "sc-service", "sc-service-test", "sc-sync-state-rpc", "sc-telemetry", - "sc-tracing", "sc-transaction-pool", "sc-transaction-pool-api", "serde", @@ -4249,7 +4430,6 @@ dependencies = [ "sp-core", "sp-finality-grandpa", "sp-inherents", - "sp-io", "sp-keyring", "sp-keystore", "sp-runtime", @@ -4258,13 +4438,10 @@ dependencies = [ "sp-transaction-storage-proof", "sp-trie", "structopt", - "substrate-browser-utils", "substrate-build-script-utils", "substrate-frame-cli", "tempfile", "try-runtime-cli", - "wasm-bindgen", - "wasm-bindgen-futures", ] [[package]] @@ -4275,18 +4452,14 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "futures 0.3.15", + "futures 0.3.16", "node-primitives", "node-runtime", "node-testing", "pallet-balances", "pallet-contracts", - "pallet-grandpa", "pallet-im-online", - "pallet-indices", - "pallet-session", "pallet-timestamp", - "pallet-transaction-payment", "pallet-treasury", "parity-scale-codec", "sc-executor", @@ -4295,13 +4468,10 @@ dependencies = [ "sp-consensus-babe", "sp-core", "sp-externalities", - "sp-io", "sp-keystore", "sp-runtime", "sp-state-machine", "sp-trie", - "substrate-test-client", - "trie-root", "wat", ] @@ -4310,10 +4480,10 @@ name = "node-inspect" version = "0.9.0-dev" dependencies = [ "derive_more", - "log", "parity-scale-codec", "sc-cli", "sc-client-api", + "sc-executor", "sc-service", "sp-blockchain", "sp-core", @@ -4327,12 +4497,10 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "pretty_assertions 0.6.1", "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", - "sp-serializer", ] [[package]] @@ -4351,7 +4519,6 @@ dependencies = [ "sc-consensus-epochs", "sc-finality-grandpa", "sc-finality-grandpa-rpc", - "sc-keystore", "sc-rpc", "sc-rpc-api", "sc-sync-state-rpc", @@ -4370,10 +4537,8 @@ dependencies = [ name = "node-rpc-client" version = "2.0.0" dependencies = [ - "futures 0.1.31", - "hyper 0.12.36", + "futures 0.3.16", "jsonrpc-core-client", - "log", "node-primitives", "sc-rpc", "sp-tracing", @@ -4392,7 +4557,7 @@ dependencies = [ "frame-system-rpc-runtime-api", "frame-try-runtime", "hex-literal", - "log", + "log 0.4.14", "node-primitives", "pallet-assets", "pallet-authority-discovery", @@ -4489,7 +4654,6 @@ dependencies = [ "sp-consensus-aura", "sp-core", "sp-finality-grandpa", - "sp-inherents", "sp-runtime", "sp-timestamp", "structopt", @@ -4537,28 +4701,16 @@ dependencies = [ name = "node-testing" version = "3.0.0-dev" dependencies = [ - "criterion", - "frame-support", "frame-system", "fs_extra", - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "node-executor", "node-primitives", "node-runtime", - "pallet-balances", - "pallet-contracts", - "pallet-grandpa", - "pallet-indices", - "pallet-session", - "pallet-society", - "pallet-staking", - "pallet-timestamp", "pallet-transaction-payment", - "pallet-treasury", "parity-scale-codec", "sc-block-builder", - "sc-cli", "sc-client-api", "sc-client-db", "sc-consensus", @@ -4592,12 +4744,23 @@ checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" [[package]] name = "nom" -version = "5.1.2" +version = "6.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" +checksum = "9c5c51b9083a3c620fa67a2a635d1ce7d95b897e957d6b28ff9a5da960a103a6" dependencies = [ + "bitvec 0.19.5", + "funty", "memchr", - "version_check", + "version_check 0.9.2", +] + +[[package]] +name = "ntapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +dependencies = [ + "winapi 0.3.9", ] [[package]] @@ -4606,7 +4769,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "090c7f9998ee0ff65aa5b723e4009f7b217707f1fb5ea551329cc4d6231fb304" dependencies = [ - "autocfg", + "autocfg 1.0.1", "num-integer", "num-traits", ] @@ -4626,7 +4789,7 @@ version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db" dependencies = [ - "autocfg", + "autocfg 1.0.1", "num-traits", ] @@ -4636,7 +4799,7 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c000134b5dbf44adc5cb772486d335293351644b801551abe8f75c84cfa4aef" dependencies = [ - "autocfg", + "autocfg 1.0.1", "num-bigint", "num-integer", "num-traits", @@ -4648,7 +4811,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" dependencies = [ - "autocfg", + "autocfg 1.0.1", "num-integer", "num-traits", ] @@ -4659,7 +4822,7 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" dependencies = [ - "autocfg", + "autocfg 1.0.1", "libm", ] @@ -4673,6 +4836,16 @@ dependencies = [ "libc", ] +[[package]] +name = "object" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397" +dependencies = [ + "crc32fast", + "indexmap", +] + [[package]] name = "object" version = "0.23.0" @@ -4716,12 +4889,39 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +[[package]] +name = "openssl" +version = "0.10.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9facdb76fec0b73c406f125d44d86fdad818d66fef0531eec9233ca425ff4a" +dependencies = [ + "bitflags", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-sys", +] + [[package]] name = "openssl-probe" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" +[[package]] +name = "openssl-sys" +version = "0.9.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1996d2d305e561b70d1ee0c53f1542833f4e1ac6ce9a6708b6ff2738ca67dc82" +dependencies = [ + "autocfg 1.0.1", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "output_vt100" version = "0.1.2" @@ -4777,11 +4977,8 @@ version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", - "lazy_static", - "pallet-session", "pallet-timestamp", "parity-scale-codec", - "parking_lot 0.11.1", "scale-info", "sp-application-crypto", "sp-consensus-aura", @@ -4805,7 +5002,6 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", - "sp-staking", "sp-std", ] @@ -4818,7 +5014,6 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "scale-info", - "serde", "sp-authorship", "sp-core", "sp-io", @@ -4834,7 +5029,7 @@ dependencies = [ "frame-election-provider-support", "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-authorship", "pallet-balances", "pallet-offences", @@ -4862,7 +5057,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", "scale-info", @@ -4887,7 +5082,6 @@ dependencies = [ "sp-io", "sp-runtime", "sp-std", - "sp-storage", ] [[package]] @@ -4898,8 +5092,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", - "log", - "pallet-balances", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -4918,7 +5111,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", - "log", + "log 0.4.14", "pallet-balances", "pallet-contracts-primitives", "pallet-contracts-proc-macro", @@ -4926,7 +5119,6 @@ dependencies = [ "pallet-timestamp", "pallet-utility", "parity-scale-codec", - "paste 1.0.4", "pretty_assertions 0.7.2", "pwasm-utils", "rand 0.8.4", @@ -5003,7 +5195,6 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "hex-literal", "pallet-balances", "pallet-scheduler", "parity-scale-codec", @@ -5013,8 +5204,6 @@ dependencies = [ "sp-io", "sp-runtime", "sp-std", - "sp-storage", - "substrate-test-utils", ] [[package]] @@ -5025,12 +5214,10 @@ dependencies = [ "frame-election-provider-support", "frame-support", "frame-system", - "hex-literal", - "log", + "log 0.4.14", "pallet-balances", "parity-scale-codec", "parking_lot 0.11.1", - "paste 1.0.4", "rand 0.7.3", "scale-info", "sp-arithmetic", @@ -5041,7 +5228,8 @@ dependencies = [ "sp-std", "sp-tracing", "static_assertions", - "substrate-test-utils", + "strum 0.21.0", + "strum_macros 0.21.1", ] [[package]] @@ -5050,10 +5238,8 @@ version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", - "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5067,8 +5253,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "hex-literal", - "log", + "log 0.4.14", "pallet-balances", "parity-scale-codec", "scale-info", @@ -5087,7 +5272,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-balances", "parity-scale-codec", "scale-info", @@ -5099,12 +5284,12 @@ dependencies = [ [[package]] name = "pallet-example-offchain-worker" -version = "3.0.0-dev" +version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", "lite-json", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -5122,7 +5307,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5156,7 +5340,7 @@ dependencies = [ "frame-election-provider-support", "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-authorship", "pallet-balances", "pallet-offences", @@ -5201,7 +5385,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-authorship", "pallet-session", "parity-scale-codec", @@ -5242,7 +5426,6 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5256,7 +5439,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -5270,7 +5453,7 @@ name = "pallet-mmr" version = "4.0.0-dev" dependencies = [ "ckb-merkle-mountain-range", - "env_logger 0.8.3", + "env_logger 0.9.0", "frame-benchmarking", "frame-support", "frame-system", @@ -5291,7 +5474,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", - "log", + "log 0.4.14", "parity-scale-codec", "serde", "sp-api", @@ -5314,7 +5497,6 @@ dependencies = [ "sp-api", "sp-blockchain", "sp-core", - "sp-rpc", "sp-runtime", ] @@ -5355,7 +5537,7 @@ version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -5370,7 +5552,7 @@ version = "4.0.0-dev" dependencies = [ "frame-support", "frame-system", - "log", + "log 0.4.14", "pallet-balances", "parity-scale-codec", "scale-info", @@ -5401,7 +5583,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5445,7 +5626,6 @@ dependencies = [ name = "pallet-recovery" version = "4.0.0-dev" dependencies = [ - "enumflags2", "frame-support", "frame-system", "pallet-balances", @@ -5464,7 +5644,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -5496,12 +5676,10 @@ dependencies = [ "frame-support", "frame-system", "impl-trait-for-tuples", - "lazy_static", - "log", + "log 0.4.14", "pallet-timestamp", "parity-scale-codec", "scale-info", - "sp-application-crypto", "sp-core", "sp-io", "sp-runtime", @@ -5527,7 +5705,6 @@ dependencies = [ "parity-scale-codec", "rand 0.7.3", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5560,16 +5737,13 @@ dependencies = [ "frame-election-provider-support", "frame-support", "frame-system", - "hex", - "log", + "log 0.4.14", "pallet-authorship", "pallet-balances", "pallet-session", "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "parking_lot 0.11.1", - "paste 1.0.4", "rand_chacha 0.2.2", "scale-info", "serde", @@ -5579,9 +5753,7 @@ dependencies = [ "sp-runtime", "sp-staking", "sp-std", - "sp-storage", "sp-tracing", - "static_assertions", "substrate-test-utils", ] @@ -5600,7 +5772,7 @@ dependencies = [ name = "pallet-staking-reward-fn" version = "4.0.0-dev" dependencies = [ - "log", + "log 0.4.14", "sp-arithmetic", ] @@ -5627,7 +5799,6 @@ dependencies = [ "frame-system", "parity-scale-codec", "scale-info", - "serde", "sp-core", "sp-io", "sp-runtime", @@ -5640,8 +5811,7 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "impl-trait-for-tuples", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -5687,7 +5857,6 @@ dependencies = [ "sp-io", "sp-runtime", "sp-std", - "sp-storage", ] [[package]] @@ -5722,7 +5891,6 @@ version = "4.0.0-dev" dependencies = [ "frame-benchmarking", "frame-support", - "frame-support-test", "frame-system", "hex-literal", "pallet-balances", @@ -5753,7 +5921,6 @@ dependencies = [ "sp-io", "sp-runtime", "sp-std", - "sp-storage", ] [[package]] @@ -5792,11 +5959,9 @@ dependencies = [ name = "pallet-vesting" version = "4.0.0-dev" dependencies = [ - "enumflags2", "frame-benchmarking", "frame-support", "frame-system", - "hex-literal", "pallet-balances", "parity-scale-codec", "scale-info", @@ -5804,42 +5969,25 @@ dependencies = [ "sp-io", "sp-runtime", "sp-std", - "sp-storage", ] [[package]] name = "parity-db" -version = "0.2.4" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e337f62db341435f0da05b8f6b97e984ef4ea5800510cd07c2d624688c40b47" +checksum = "241f9c5d25063080f2c02846221f13e1d0e5e18fa00c32c234aad585b744ee55" dependencies = [ "blake2-rfc", "crc32fast", "fs2", "hex", "libc", - "log", + "log 0.4.14", + "lz4", "memmap2", "parking_lot 0.11.1", "rand 0.8.4", -] - -[[package]] -name = "parity-multiaddr" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58341485071825827b7f03cf7efd1cb21e6a709bea778fb50227fd45d2f361b4" -dependencies = [ - "arrayref", - "bs58", - "byteorder", - "data-encoding", - "multihash", - "percent-encoding 2.1.0", - "serde", - "static_assertions", - "unsigned-varint 0.7.0", - "url 2.2.1", + "snap", ] [[package]] @@ -5849,7 +5997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8975095a2a03bbbdc70a74ab11a4f76a6d0b84680d87c68d722531b0ac28e8a9" dependencies = [ "arrayvec 0.7.0", - "bitvec", + "bitvec 0.20.2", "byte-slice-cast", "impl-trait-for-tuples", "parity-scale-codec-derive", @@ -5876,20 +6024,15 @@ checksum = "aa9777aa91b8ad9dd5aaa04a9b6bcb02c7f1deb952fca5a66034d5e63afc5c6f" [[package]] name = "parity-tokio-ipc" -version = "0.4.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e57fea504fea33f9fbb5f49f378359030e7e026a6ab849bb9e8f0787376f1bf" +checksum = "9981e32fb75e004cc148f5fb70342f393830e0a4aa62e3cc93b50976218d42b6" dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", + "futures 0.3.16", "libc", - "log", - "mio-named-pipes", - "miow 0.3.6", + "log 0.4.14", "rand 0.7.3", - "tokio 0.1.22", - "tokio-named-pipes", - "tokio-uds", + "tokio 1.10.0", "winapi 0.3.9", ] @@ -5937,15 +6080,15 @@ checksum = "be5e13c266502aadf83426d87d81a0f5d1ef45b8027f5a471c360abfe4bfae92" [[package]] name = "parity-ws" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e02a625dd75084c2a7024f07c575b61b782f729d18702dabb3cdbf31911dc61" +checksum = "d0ab8a461779bd022964cae2b4989fa9c99deb270bec162da2125ec03c09fcaa" dependencies = [ "byteorder", "bytes 0.4.12", "httparse", - "log", - "mio", + "log 0.4.14", + "mio 0.6.23", "mio-extras", "rand 0.7.3", "sha-1 0.8.2", @@ -5967,7 +6110,7 @@ checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252" dependencies = [ "lock_api 0.3.4", "parking_lot_core 0.6.2", - "rustc_version", + "rustc_version 0.2.3", ] [[package]] @@ -6001,7 +6144,7 @@ dependencies = [ "cloudabi", "libc", "redox_syscall 0.1.57", - "rustc_version", + "rustc_version 0.2.3", "smallvec 0.6.14", "winapi 0.3.9", ] @@ -6078,12 +6221,6 @@ dependencies = [ "crypto-mac 0.8.0", ] -[[package]] -name = "pdqselect" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec91767ecc0a0bbe558ce8c9da33c068066c57ecc8bb8477ef8c1ad3ef77c27" - [[package]] name = "peeking_take_while" version = "0.1.2" @@ -6261,28 +6398,30 @@ checksum = "a2a7bc6b2a29e632e45451c941832803a18cce6781db04de8a04696cdca8bde4" dependencies = [ "cfg-if 0.1.10", "libc", - "log", + "log 0.4.14", "wepoll-sys", "winapi 0.3.9", ] [[package]] name = "poly1305" -version = "0.6.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b7456bc1ad2d4cf82b3a016be4c2ac48daf11bf990c1603ebd447fe6f30fca8" +checksum = "9fcffab1f78ebbdf4b93b68c1ffebc24037eedf271edaca795732b24e5e4e349" dependencies = [ - "cpuid-bool 0.2.0", + "cpufeatures", + "opaque-debug 0.3.0", "universal-hash", ] [[package]] name = "polyval" -version = "0.4.5" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eebcc4aa140b9abd2bc40d9c3f7ccec842679cd79045ac3a7ac698c1a064b7cd" +checksum = "a6ba6a405ef63530d6cb12802014b22f9c5751bd17cdcddbe9e46d5c8ae83287" dependencies = [ - "cpuid-bool 0.2.0", + "cfg-if 1.0.0", + "cpufeatures", "opaque-debug 0.3.0", "universal-hash", ] @@ -6385,7 +6524,7 @@ dependencies = [ "proc-macro2", "quote", "syn", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -6398,7 +6537,7 @@ dependencies = [ "proc-macro2", "quote", "syn", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -6411,7 +6550,7 @@ dependencies = [ "quote", "syn", "syn-mid", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -6422,7 +6561,7 @@ checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2", "quote", - "version_check", + "version_check 0.9.2", ] [[package]] @@ -6467,7 +6606,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e6984d2f1a23009bd270b8bb56d0926810a3d483f59c987d77969e9d8e840b2" dependencies = [ "bytes 1.0.1", - "prost-derive", + "prost-derive 0.7.0", +] + +[[package]] +name = "prost" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de5e2533f59d08fcf364fd374ebda0692a70bd6d7e66ef97f306f45c6c5d8020" +dependencies = [ + "bytes 1.0.1", + "prost-derive 0.8.0", ] [[package]] @@ -6479,11 +6628,29 @@ dependencies = [ "bytes 1.0.1", "heck", "itertools 0.9.0", - "log", + "log 0.4.14", + "multimap", + "petgraph", + "prost 0.7.0", + "prost-types 0.7.0", + "tempfile", + "which", +] + +[[package]] +name = "prost-build" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "355f634b43cdd80724ee7848f95770e7e70eefa6dcf14fea676216573b8fd603" +dependencies = [ + "bytes 1.0.1", + "heck", + "itertools 0.10.0", + "log 0.4.14", "multimap", "petgraph", - "prost", - "prost-types", + "prost 0.8.0", + "prost-types 0.8.0", "tempfile", "which", ] @@ -6501,6 +6668,19 @@ dependencies = [ "syn", ] +[[package]] +name = "prost-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "600d2f334aa05acb02a755e217ef1ab6dea4d51b58b7846588b747edec04efba" +dependencies = [ + "anyhow", + "itertools 0.10.0", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "prost-types" version = "0.7.0" @@ -6508,7 +6688,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b518d7cdd93dab1d1122cf07fa9a60771836c668dde9d9e2a139f957f0d9f1bb" dependencies = [ "bytes 1.0.1", - "prost", + "prost 0.7.0", +] + +[[package]] +name = "prost-types" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "603bbd6394701d13f3f25aada59c7de9d35a6a5887cfc156181234a44002771b" +dependencies = [ + "bytes 1.0.1", + "prost 0.8.0", ] [[package]] @@ -6527,7 +6717,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0c1a2f10b47d446372a4f397c58b329aaea72b2daf9395a623a411cb8ccb54f" dependencies = [ "byteorder", - "log", + "log 0.4.14", "parity-wasm 0.42.2", ] @@ -6550,7 +6740,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ "env_logger 0.8.3", - "log", + "log 0.4.14", "rand 0.8.4", ] @@ -6574,6 +6764,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "radium" version = "0.6.2" @@ -6603,6 +6799,25 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.7", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg 0.1.2", + "rand_xorshift", + "winapi 0.3.9", +] + [[package]] name = "rand" version = "0.7.3" @@ -6629,6 +6844,16 @@ dependencies = [ "rand_hc 0.3.0", ] +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.7", + "rand_core 0.3.1", +] + [[package]] name = "rand_chacha" version = "0.2.2" @@ -6692,6 +6917,15 @@ dependencies = [ "rand 0.8.4", ] +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "rand_hc" version = "0.2.0" @@ -6710,6 +6944,50 @@ dependencies = [ "rand_core 0.6.2", ] +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi 0.3.9", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi 0.3.9", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.7", + "rand_core 0.4.2", +] + [[package]] name = "rand_pcg" version = "0.2.1" @@ -6728,6 +7006,15 @@ dependencies = [ "rand_core 0.6.2", ] +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + [[package]] name = "rawpointer" version = "0.2.1" @@ -6740,7 +7027,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b0d8e0819fadc20c74ea8373106ead0600e3a67ef1fe8da56e39b9ae7275674" dependencies = [ - "autocfg", + "autocfg 1.0.1", "crossbeam-deque 0.8.0", "either", "rayon-core", @@ -6783,17 +7070,6 @@ dependencies = [ "bitflags", ] -[[package]] -name = "redox_users" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" -dependencies = [ - "getrandom 0.1.16", - "redox_syscall 0.1.57", - "rust-argon2", -] - [[package]] name = "redox_users" version = "0.4.0" @@ -6830,7 +7106,7 @@ version = "0.0.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "571f7f397d61c4755285cd37853fe8e03271c243424a907415909379659381c5" dependencies = [ - "log", + "log 0.4.14", "rustc-hash", "serde", "smallvec 1.6.1", @@ -6880,12 +7156,10 @@ dependencies = [ name = "remote-externalities" version = "0.10.0-dev" dependencies = [ - "env_logger 0.8.3", - "frame-support", - "hex", + "env_logger 0.9.0", "jsonrpsee-proc-macros", "jsonrpsee-ws-client", - "log", + "log 0.4.14", "pallet-elections-phragmen", "parity-scale-codec", "serde", @@ -6893,6 +7167,7 @@ dependencies = [ "sp-core", "sp-io", "sp-runtime", + "sp-version", "tokio 0.2.25", ] @@ -6938,9 +7213,9 @@ dependencies = [ [[package]] name = "rocksdb" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c749134fda8bfc90d0de643d59bfc841dcb3ac8a1062e12b6754bd60235c48b3" +checksum = "7a62eca5cacf2c8261128631bed9f045598d40bfbe4b29f5163f0f802f8f44a7" dependencies = [ "libc", "librocksdb-sys", @@ -6956,18 +7231,6 @@ dependencies = [ "winapi 0.3.9", ] -[[package]] -name = "rust-argon2" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" -dependencies = [ - "base64 0.13.0", - "blake2b_simd", - "constant_time_eq", - "crossbeam-utils 0.8.3", -] - [[package]] name = "rustc-demangle" version = "0.1.18" @@ -6996,16 +7259,12 @@ dependencies = [ ] [[package]] -name = "rustls" -version = "0.18.1" +name = "rustc_version" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d1126dcf58e93cee7d098dbda643b5f92ed724f1f6a63007c1116eed6700c81" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" dependencies = [ - "base64 0.12.3", - "log", - "ring", - "sct", - "webpki", + "semver 0.11.0", ] [[package]] @@ -7015,24 +7274,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" dependencies = [ "base64 0.13.0", - "log", + "log 0.4.14", "ring", "sct", "webpki", ] -[[package]] -name = "rustls-native-certs" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "629d439a7672da82dd955498445e496ee2096fe2117b9f796558a43fdb9e59b8" -dependencies = [ - "openssl-probe", - "rustls 0.18.1", - "schannel", - "security-framework 1.0.0", -] - [[package]] name = "rustls-native-certs" version = "0.5.0" @@ -7040,9 +7287,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" dependencies = [ "openssl-probe", - "rustls 0.19.1", + "rustls", "schannel", - "security-framework 2.3.0", + "security-framework", ] [[package]] @@ -7067,7 +7314,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4da5fcb054c46f5a5dff833b129285a93d3f0179531735e6c866e8cc307d2020" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "pin-project 0.4.27", "static_assertions", ] @@ -7084,14 +7331,20 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d3d055a2582e6b00ed7a31c1524040aa391092bf636328350813f3a0605215c" dependencies = [ - "rustc_version", + "rustc_version 0.2.3", ] +[[package]] +name = "safemem" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" + [[package]] name = "salsa20" -version = "0.7.2" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "399f290ffc409596022fce5ea5d4138184be4784f2b28c62c59f0d8389059a15" +checksum = "ecbd2eb639fd7cab5804a0837fe373cc2172d15437e804c054a9fb885cb923b0" dependencies = [ "cipher", ] @@ -7109,7 +7362,7 @@ dependencies = [ name = "sc-allocator" version = "4.0.0-dev" dependencies = [ - "log", + "log 0.4.14", "sp-core", "sp-wasm-interface", "thiserror", @@ -7121,21 +7374,18 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", - "either", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "ip_network", "libp2p", - "log", + "log 0.4.14", "parity-scale-codec", - "prost", - "prost-build", + "prost 0.8.0", + "prost-build 0.7.0", "quickcheck", "rand 0.7.3", "sc-client-api", "sc-network", - "sc-peerset", - "serde_json", "sp-api", "sp-authority-discovery", "sp-blockchain", @@ -7151,9 +7401,9 @@ dependencies = [ name = "sc-basic-authorship" version = "0.10.0-dev" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-block-builder", @@ -7185,7 +7435,6 @@ dependencies = [ "sp-inherents", "sp-runtime", "sp-state-machine", - "sp-trie", "substrate-test-runtime-client", ] @@ -7220,10 +7469,10 @@ version = "0.10.0-dev" dependencies = [ "chrono", "fdlimit", - "futures 0.3.15", + "futures 0.3.16", "hex", "libp2p", - "log", + "log 0.4.14", "names", "parity-scale-codec", "rand 0.7.3", @@ -7256,14 +7505,10 @@ dependencies = [ name = "sc-client-api" version = "4.0.0-dev" dependencies = [ - "derive_more", "fnv", - "futures 0.3.15", + "futures 0.3.16", "hash-db", - "kvdb", - "kvdb-memorydb", - "lazy_static", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-executor", @@ -7274,16 +7519,13 @@ dependencies = [ "sp-core", "sp-database", "sp-externalities", - "sp-inherents", "sp-keystore", "sp-runtime", "sp-state-machine", - "sp-std", "sp-storage", "sp-test-primitives", "sp-trie", "sp-utils", - "sp-version", "substrate-prometheus-endpoint", "substrate-test-runtime", "thiserror", @@ -7293,31 +7535,26 @@ dependencies = [ name = "sc-client-db" version = "0.10.0-dev" dependencies = [ - "blake2-rfc", "hash-db", "kvdb", "kvdb-memorydb", "kvdb-rocksdb", "linked-hash-map", - "log", + "log 0.4.14", "parity-db", "parity-scale-codec", - "parity-util-mem", "parking_lot 0.11.1", "quickcheck", "sc-client-api", - "sc-executor", "sc-state-db", "sp-arithmetic", "sp-blockchain", "sp-core", "sp-database", - "sp-keyring", "sp-runtime", "sp-state-machine", "sp-tracing", "sp-trie", - "substrate-prometheus-endpoint", "substrate-test-runtime-client", "tempfile", ] @@ -7327,10 +7564,10 @@ name = "sc-consensus" version = "0.10.0-dev" dependencies = [ "async-trait", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "libp2p", - "log", + "log 0.4.14", "parking_lot 0.11.1", "sc-client-api", "serde", @@ -7344,7 +7581,6 @@ dependencies = [ "sp-utils", "substrate-prometheus-endpoint", "thiserror", - "wasm-timer", ] [[package]] @@ -7353,21 +7589,18 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", - "futures 0.3.15", - "futures-timer 3.0.2", + "futures 0.3.16", "getrandom 0.2.3", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-block-builder", "sc-client-api", "sc-consensus", "sc-consensus-slots", - "sc-executor", "sc-keystore", "sc-network", "sc-network-test", - "sc-service", "sc-telemetry", "sp-api", "sp-application-crypto", @@ -7378,13 +7611,11 @@ dependencies = [ "sp-consensus-slots", "sp-core", "sp-inherents", - "sp-io", "sp-keyring", "sp-keystore", "sp-runtime", "sp-timestamp", "sp-tracing", - "sp-version", "substrate-prometheus-endpoint", "substrate-test-runtime-client", "tempfile", @@ -7397,16 +7628,14 @@ dependencies = [ "async-trait", "derive_more", "fork-tree", - "futures 0.3.15", - "futures-timer 3.0.2", - "log", + "futures 0.3.16", + "log 0.4.14", "merlin", "num-bigint", "num-rational 0.2.4", "num-traits", "parity-scale-codec", "parking_lot 0.11.1", - "pdqselect", "rand 0.7.3", "rand_chacha 0.2.2", "retain_mut", @@ -7415,12 +7644,9 @@ dependencies = [ "sc-consensus", "sc-consensus-epochs", "sc-consensus-slots", - "sc-consensus-uncles", - "sc-executor", "sc-keystore", "sc-network", "sc-network-test", - "sc-service", "sc-telemetry", "schnorrkel", "serde", @@ -7435,12 +7661,10 @@ dependencies = [ "sp-core", "sp-inherents", "sp-io", - "sp-keyring", "sp-keystore", "sp-runtime", "sp-timestamp", "sp-tracing", - "sp-utils", "sp-version", "substrate-prometheus-endpoint", "substrate-test-runtime-client", @@ -7452,7 +7676,7 @@ name = "sc-consensus-babe-rpc" version = "0.10.0-dev" dependencies = [ "derive_more", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-core-client", "jsonrpc-derive", @@ -7495,13 +7719,12 @@ dependencies = [ "assert_matches", "async-trait", "derive_more", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-core-client", "jsonrpc-derive", - "log", + "log 0.4.14", "parity-scale-codec", - "parking_lot 0.11.1", "sc-basic-authorship", "sc-client-api", "sc-consensus", @@ -7517,14 +7740,12 @@ dependencies = [ "sp-consensus-slots", "sp-core", "sp-inherents", - "sp-keyring", "sp-keystore", "sp-runtime", "sp-timestamp", "substrate-prometheus-endpoint", "substrate-test-runtime-client", "substrate-test-runtime-transaction-pool", - "tempfile", "tokio 0.2.25", ] @@ -7534,9 +7755,9 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-client-api", @@ -7557,16 +7778,14 @@ name = "sc-consensus-slots" version = "0.10.0-dev" dependencies = [ "async-trait", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "impl-trait-for-tuples", - "log", + "log 0.4.14", "parity-scale-codec", "sc-client-api", "sc-consensus", "sc-telemetry", "sp-api", - "sp-application-crypto", "sp-arithmetic", "sp-blockchain", "sp-consensus", @@ -7576,7 +7795,6 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-timestamp", - "sp-trie", "substrate-test-runtime-client", "thiserror", ] @@ -7595,14 +7813,11 @@ dependencies = [ name = "sc-executor" version = "0.10.0-dev" dependencies = [ - "assert_matches", - "derive_more", "hex-literal", "lazy_static", - "libsecp256k1", - "log", + "libsecp256k1 0.6.0", + "log 0.4.14", "parity-scale-codec", - "parity-wasm 0.42.2", "parking_lot 0.11.1", "paste 1.0.4", "regex", @@ -7619,10 +7834,8 @@ dependencies = [ "sp-panic-handler", "sp-runtime", "sp-runtime-interface", - "sp-serializer", "sp-state-machine", "sp-tasks", - "sp-tracing", "sp-trie", "sp-version", "sp-wasm-interface", @@ -7646,6 +7859,8 @@ dependencies = [ "sp-serializer", "sp-wasm-interface", "thiserror", + "wasmer", + "wasmer-compiler-singlepass", "wasmi", ] @@ -7653,10 +7868,11 @@ dependencies = [ name = "sc-executor-wasmi" version = "0.10.0-dev" dependencies = [ - "log", + "log 0.4.14", "parity-scale-codec", "sc-allocator", "sc-executor-common", + "scoped-tls", "sp-core", "sp-runtime-interface", "sp-wasm-interface", @@ -7667,13 +7883,11 @@ dependencies = [ name = "sc-executor-wasmtime" version = "0.10.0-dev" dependencies = [ - "assert_matches", "cfg-if 1.0.0", "libc", - "log", + "log 0.4.14", "parity-scale-codec", "parity-wasm 0.42.2", - "pwasm-utils", "sc-allocator", "sc-executor-common", "sc-runtime-test", @@ -7696,13 +7910,11 @@ dependencies = [ "dyn-clone", "finality-grandpa", "fork-tree", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "linked-hash-map", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", - "pin-project 1.0.5", "rand 0.8.4", "sc-block-builder", "sc-client-api", @@ -7718,21 +7930,17 @@ dependencies = [ "sp-arithmetic", "sp-blockchain", "sp-consensus", - "sp-consensus-babe", "sp-core", "sp-finality-grandpa", - "sp-inherents", "sp-keyring", "sp-keystore", "sp-runtime", - "sp-state-machine", "sp-tracing", "sp-utils", "substrate-prometheus-endpoint", "substrate-test-runtime-client", "tempfile", "tokio 0.2.25", - "wasm-timer", ] [[package]] @@ -7741,23 +7949,20 @@ version = "0.10.0-dev" dependencies = [ "derive_more", "finality-grandpa", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-core-client", "jsonrpc-derive", "jsonrpc-pubsub", - "lazy_static", - "log", + "log 0.4.14", "parity-scale-codec", "sc-block-builder", "sc-client-api", "sc-finality-grandpa", - "sc-network-test", "sc-rpc", "serde", "serde_json", "sp-blockchain", - "sp-consensus", "sp-core", "sp-finality-grandpa", "sp-keyring", @@ -7770,16 +7975,15 @@ name = "sc-informant" version = "0.10.0-dev" dependencies = [ "ansi_term 0.12.1", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "log", + "log 0.4.14", "parity-util-mem", "sc-client-api", "sc-network", "sc-transaction-pool-api", "sp-blockchain", "sp-runtime", - "wasm-timer", ] [[package]] @@ -7788,17 +7992,12 @@ version = "4.0.0-dev" dependencies = [ "async-trait", "derive_more", - "futures 0.3.15", - "futures-util", "hex", - "merlin", "parking_lot 0.11.1", - "rand 0.7.3", "serde_json", "sp-application-crypto", "sp-core", "sp-keystore", - "subtle 2.4.0", "tempfile", ] @@ -7807,7 +8006,6 @@ name = "sc-light" version = "4.0.0-dev" dependencies = [ "hash-db", - "lazy_static", "parity-scale-codec", "parking_lot 0.11.1", "sc-client-api", @@ -7829,29 +8027,26 @@ dependencies = [ "async-trait", "asynchronous-codec 0.5.0", "bitflags", - "bs58", "bytes 1.0.1", "cid", "derive_more", "either", - "erased-serde", "fnv", "fork-tree", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "hex", "ip_network", "libp2p", "linked-hash-map", "linked_hash_set", - "log", + "log 0.4.14", "lru", - "nohash-hasher", "parity-scale-codec", "parking_lot 0.11.1", "pin-project 1.0.5", - "prost", - "prost-build", + "prost 0.8.0", + "prost-build 0.7.0", "quickcheck", "rand 0.7.3", "sc-block-builder", @@ -7866,7 +8061,6 @@ dependencies = [ "sp-consensus", "sp-core", "sp-finality-grandpa", - "sp-keyring", "sp-runtime", "sp-test-primitives", "sp-tracing", @@ -7878,7 +8072,6 @@ dependencies = [ "thiserror", "unsigned-varint 0.6.0", "void", - "wasm-timer", "zeroize", ] @@ -7887,19 +8080,17 @@ name = "sc-network-gossip" version = "0.10.0-dev" dependencies = [ "async-std", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "libp2p", - "log", + "log 0.4.14", "lru", "quickcheck", - "rand 0.7.3", "sc-network", "sp-runtime", "substrate-prometheus-endpoint", "substrate-test-runtime-client", "tracing", - "wasm-timer", ] [[package]] @@ -7908,10 +8099,10 @@ version = "0.8.0" dependencies = [ "async-std", "async-trait", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "libp2p", - "log", + "log 0.4.14", "parking_lot 0.11.1", "rand 0.7.3", "sc-block-builder", @@ -7927,22 +8118,21 @@ dependencies = [ "sp-tracing", "substrate-test-runtime", "substrate-test-runtime-client", - "tempfile", ] [[package]] name = "sc-offchain" version = "4.0.0-dev" dependencies = [ - "bytes 0.5.6", + "bytes 1.0.1", "fnv", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "hex", - "hyper 0.13.10", + "hyper 0.14.11", "hyper-rustls", "lazy_static", - "log", + "log 0.4.14", "num_cpus", "parity-scale-codec", "parking_lot 0.11.1", @@ -7950,7 +8140,6 @@ dependencies = [ "sc-block-builder", "sc-client-api", "sc-client-db", - "sc-keystore", "sc-network", "sc-transaction-pool", "sc-transaction-pool-api", @@ -7963,16 +8152,16 @@ dependencies = [ "sp-utils", "substrate-test-runtime-client", "threadpool", - "tokio 0.2.25", + "tokio 1.10.0", ] [[package]] name = "sc-peerset" version = "4.0.0-dev" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "libp2p", - "log", + "log 0.4.14", "rand 0.7.3", "serde_json", "sp-utils", @@ -7983,7 +8172,7 @@ dependencies = [ name = "sc-proposer-metrics" version = "0.9.0" dependencies = [ - "log", + "log 0.4.14", "substrate-prometheus-endpoint", ] @@ -7992,21 +8181,17 @@ name = "sc-rpc" version = "4.0.0-dev" dependencies = [ "assert_matches", - "futures 0.1.31", - "futures 0.3.15", + "futures 0.3.16", "hash-db", "jsonrpc-core", "jsonrpc-pubsub", "lazy_static", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-block-builder", "sc-chain-spec", - "sc-cli", "sc-client-api", - "sc-executor", - "sc-keystore", "sc-network", "sc-rpc-api", "sc-tracing", @@ -8023,12 +8208,9 @@ dependencies = [ "sp-rpc", "sp-runtime", "sp-session", - "sp-state-machine", - "sp-tracing", "sp-utils", "sp-version", "substrate-test-runtime-client", - "tokio 0.1.22", ] [[package]] @@ -8036,12 +8218,12 @@ name = "sc-rpc-api" version = "0.10.0-dev" dependencies = [ "derive_more", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-core-client", "jsonrpc-derive", "jsonrpc-pubsub", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-chain-spec", @@ -8059,16 +8241,14 @@ dependencies = [ name = "sc-rpc-server" version = "4.0.0-dev" dependencies = [ - "futures 0.1.31", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-http-server", "jsonrpc-ipc-server", "jsonrpc-pubsub", "jsonrpc-ws-server", - "log", - "serde", + "log 0.4.14", "serde_json", - "sp-runtime", "substrate-prometheus-endpoint", ] @@ -8093,14 +8273,12 @@ dependencies = [ "async-trait", "directories", "exit-future", - "futures 0.1.31", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", "hash-db", "jsonrpc-core", "jsonrpc-pubsub", - "lazy_static", - "log", + "log 0.4.14", "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", @@ -8112,7 +8290,6 @@ dependencies = [ "sc-client-db", "sc-consensus", "sc-executor", - "sc-finality-grandpa", "sc-informant", "sc-keystore", "sc-light", @@ -8131,12 +8308,9 @@ dependencies = [ "sp-block-builder", "sp-blockchain", "sp-consensus", - "sp-consensus-babe", "sp-core", "sp-externalities", - "sp-finality-grandpa", "sp-inherents", - "sp-io", "sp-keystore", "sp-runtime", "sp-session", @@ -8156,7 +8330,6 @@ dependencies = [ "tokio 0.2.25", "tracing", "tracing-futures", - "wasm-timer", ] [[package]] @@ -8165,9 +8338,9 @@ version = "2.0.0" dependencies = [ "fdlimit", "futures 0.1.31", - "futures 0.3.15", + "futures 0.3.16", "hex-literal", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sc-block-builder", @@ -8200,14 +8373,13 @@ dependencies = [ name = "sc-state-db" version = "0.10.0-dev" dependencies = [ - "log", + "log 0.4.14", "parity-scale-codec", "parity-util-mem", "parity-util-mem-derive", "parking_lot 0.11.1", "sc-client-api", "sp-core", - "thiserror", ] [[package]] @@ -8236,17 +8408,15 @@ name = "sc-telemetry" version = "4.0.0-dev" dependencies = [ "chrono", - "futures 0.3.15", + "futures 0.3.16", "libp2p", - "log", + "log 0.4.14", "parking_lot 0.11.1", "pin-project 1.0.5", "rand 0.7.3", "serde", "serde_json", - "take_mut", "thiserror", - "void", "wasm-timer", ] @@ -8256,33 +8426,27 @@ version = "4.0.0-dev" dependencies = [ "ansi_term 0.12.1", "atty", - "erased-serde", "lazy_static", - "log", + "log 0.4.14", "once_cell", "parking_lot 0.11.1", "regex", "rustc-hash", "sc-client-api", "sc-rpc-server", - "sc-telemetry", "sc-tracing-proc-macro", "serde", - "serde_json", "sp-api", - "sp-block-builder", "sp-blockchain", "sp-core", "sp-rpc", "sp-runtime", - "sp-storage", "sp-tracing", "thiserror", "tracing", "tracing-log", "tracing-subscriber", "wasm-bindgen", - "wasm-timer", "web-sys", ] @@ -8302,12 +8466,11 @@ version = "4.0.0-dev" dependencies = [ "assert_matches", "criterion", - "derive_more", - "futures 0.3.15", + "futures 0.3.16", "hex", "intervalier", "linked-hash-map", - "log", + "log 0.4.14", "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", @@ -8329,7 +8492,6 @@ dependencies = [ "substrate-test-runtime-client", "substrate-test-runtime-transaction-pool", "thiserror", - "wasm-timer", ] [[package]] @@ -8337,9 +8499,8 @@ name = "sc-transaction-pool-api" version = "4.0.0-dev" dependencies = [ "derive_more", - "futures 0.3.15", - "log", - "parity-scale-codec", + "futures 0.3.16", + "log 0.4.14", "serde", "sp-blockchain", "sp-runtime", @@ -8352,7 +8513,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2e62ff266e136db561a007c84569985805f84a1d5a08278e52c36aacb6e061b" dependencies = [ - "bitvec", + "bitvec 0.20.2", "cfg-if 1.0.0", "derive_more", "parity-scale-codec", @@ -8431,19 +8592,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "security-framework" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad502866817f0575705bd7be36e2b2535cc33262d493aa733a2ec862baa2bc2b" -dependencies = [ - "bitflags", - "core-foundation 0.7.0", - "core-foundation-sys 0.7.0", - "libc", - "security-framework-sys 1.0.0", -] - [[package]] name = "security-framework" version = "2.3.0" @@ -8451,20 +8599,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b239a3d5db51252f6f48f42172c65317f37202f4a21021bf5f9d40a408f4592c" dependencies = [ "bitflags", - "core-foundation 0.9.1", - "core-foundation-sys 0.8.2", - "libc", - "security-framework-sys 2.3.0", -] - -[[package]] -name = "security-framework-sys" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51ceb04988b17b6d1dcd555390fa822ca5637b4a14e1f5099f13d351bed4d6c7" -dependencies = [ - "core-foundation-sys 0.7.0", + "core-foundation", + "core-foundation-sys", "libc", + "security-framework-sys", ] [[package]] @@ -8473,7 +8611,7 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e4effb91b4b8b6fb7732e670b6cee160278ff8e6bf485c7805d9e319d76e284" dependencies = [ - "core-foundation-sys 0.8.2", + "core-foundation-sys", "libc", ] @@ -8520,12 +8658,6 @@ dependencies = [ "pest", ] -[[package]] -name = "send_wrapper" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" - [[package]] name = "serde" version = "1.0.126" @@ -8535,6 +8667,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" +dependencies = [ + "serde", +] + [[package]] name = "serde_cbor" version = "0.11.1" @@ -8587,11 +8728,17 @@ checksum = "dfebf75d25bd900fd1e7d11501efab59bc846dbc76196839663e6637bba9f25f" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool 0.1.2", + "cpuid-bool", "digest 0.9.0", "opaque-debug 0.3.0", ] +[[package]] +name = "sha1" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" + [[package]] name = "sha2" version = "0.8.2" @@ -8612,7 +8759,7 @@ checksum = "fa827a14b29ab7f44778d14a88d3cb76e949c45083f7dbfa507d0cb699dc12de" dependencies = [ "block-buffer 0.9.0", "cfg-if 1.0.0", - "cpuid-bool 0.1.2", + "cpuid-bool", "digest 0.9.0", "opaque-debug 0.3.0", ] @@ -8640,9 +8787,9 @@ dependencies = [ [[package]] name = "shlex" -version = "0.1.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" +checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d" [[package]] name = "signal-hook" @@ -8711,19 +8858,25 @@ version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e" +[[package]] +name = "snap" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451" + [[package]] name = "snow" -version = "0.7.2" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "795dd7aeeee24468e5a32661f6d27f7b5cbed802031b2d7640c7b10f8fb2dd50" +checksum = "6142f7c25e94f6fd25a32c3348ec230df9109b463f59c8c7acc4bd34936babb7" dependencies = [ "aes-gcm", "blake2", "chacha20poly1305", - "rand 0.7.3", - "rand_core 0.5.1", + "rand 0.8.4", + "rand_core 0.6.2", "ring", - "rustc_version", + "rustc_version 0.3.3", "sha2 0.9.3", "subtle 2.4.0", "x25519-dalek", @@ -8759,9 +8912,9 @@ dependencies = [ "base64 0.12.3", "bytes 0.5.6", "flate2", - "futures 0.3.15", + "futures 0.3.16", "httparse", - "log", + "log 0.4.14", "rand 0.7.3", "sha-1 0.9.4", ] @@ -8774,9 +8927,9 @@ checksum = "a74e48087dbeed4833785c2f3352b59140095dc192dce966a3bfc155020a439f" dependencies = [ "base64 0.13.0", "bytes 1.0.1", - "futures 0.3.15", + "futures 0.3.16", "httparse", - "log", + "log 0.4.14", "rand 0.8.4", "sha-1 0.9.4", ] @@ -8786,7 +8939,7 @@ name = "sp-api" version = "4.0.0-dev" dependencies = [ "hash-db", - "log", + "log 0.4.14", "parity-scale-codec", "sp-api-proc-macro", "sp-core", @@ -8814,13 +8967,12 @@ name = "sp-api-test" version = "2.0.1" dependencies = [ "criterion", - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "parity-scale-codec", "rustversion", "sc-block-builder", "sp-api", - "sp-blockchain", "sp-consensus", "sp-core", "sp-runtime", @@ -8867,7 +9019,6 @@ dependencies = [ "rand 0.7.3", "scale-info", "serde", - "serde_json", "sp-debug-derive", "sp-std", "static_assertions", @@ -8879,7 +9030,6 @@ version = "2.0.0" dependencies = [ "honggfuzz", "num-bigint", - "num-traits", "primitive-types", "sp-arithmetic", ] @@ -8922,8 +9072,8 @@ dependencies = [ name = "sp-blockchain" version = "4.0.0-dev" dependencies = [ - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "lru", "parity-scale-codec", "parking_lot 0.11.1", @@ -8940,25 +9090,18 @@ name = "sp-consensus" version = "0.10.0-dev" dependencies = [ "async-trait", - "futures 0.3.15", + "futures 0.3.16", "futures-timer 3.0.2", - "log", + "log 0.4.14", "parity-scale-codec", - "parking_lot 0.11.1", - "serde", - "sp-api", "sp-core", "sp-inherents", "sp-runtime", "sp-state-machine", "sp-std", "sp-test-primitives", - "sp-trie", - "sp-utils", "sp-version", - "substrate-prometheus-endpoint", "thiserror", - "wasm-timer", ] [[package]] @@ -9042,24 +9185,22 @@ dependencies = [ "criterion", "dyn-clonable", "ed25519-dalek", - "futures 0.3.15", + "futures 0.3.16", "hash-db", "hash256-std-hasher", "hex", "hex-literal", "impl-serde", "lazy_static", - "libsecp256k1", - "log", + "libsecp256k1 0.6.0", + "log 0.4.14", "merlin", "num-traits", "parity-scale-codec", "parity-util-mem", "parking_lot 0.11.1", - "pretty_assertions 0.6.1", "primitive-types", "rand 0.7.3", - "rand_chacha 0.2.2", "regex", "scale-info", "schnorrkel", @@ -9114,7 +9255,7 @@ name = "sp-finality-grandpa" version = "4.0.0-dev" dependencies = [ "finality-grandpa", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "serde", @@ -9131,7 +9272,7 @@ name = "sp-inherents" version = "4.0.0-dev" dependencies = [ "async-trait", - "futures 0.3.15", + "futures 0.3.16", "impl-trait-for-tuples", "parity-scale-codec", "sp-core", @@ -9144,10 +9285,10 @@ dependencies = [ name = "sp-io" version = "4.0.0-dev" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "hash-db", - "libsecp256k1", - "log", + "libsecp256k1 0.6.0", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.11.1", "sp-core", @@ -9171,7 +9312,7 @@ dependencies = [ "lazy_static", "sp-core", "sp-runtime", - "strum", + "strum 0.20.0", ] [[package]] @@ -9180,7 +9321,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "derive_more", - "futures 0.3.15", + "futures 0.3.16", "merlin", "parity-scale-codec", "parking_lot 0.11.1", @@ -9210,40 +9351,38 @@ dependencies = [ "serde", "sp-arithmetic", "sp-core", - "sp-npos-elections-compact", + "sp-npos-elections-solution-type", "sp-runtime", "sp-std", "substrate-test-utils", ] [[package]] -name = "sp-npos-elections-compact" -version = "4.0.0-dev" +name = "sp-npos-elections-fuzzer" +version = "2.0.0-alpha.5" dependencies = [ + "honggfuzz", "parity-scale-codec", - "proc-macro-crate 1.0.0", - "proc-macro2", - "quote", + "rand 0.7.3", "scale-info", - "sp-arithmetic", "sp-npos-elections", - "syn", - "trybuild", + "sp-runtime", + "structopt", ] [[package]] -name = "sp-npos-elections-fuzzer" -version = "2.0.0-alpha.5" +name = "sp-npos-elections-solution-type" +version = "4.0.0-dev" dependencies = [ - "honggfuzz", "parity-scale-codec", - "rand 0.7.3", + "proc-macro-crate 1.0.0", + "proc-macro2", + "quote", "scale-info", "sp-arithmetic", "sp-npos-elections", - "sp-runtime", - "sp-std", - "structopt", + "syn", + "trybuild", ] [[package]] @@ -9253,7 +9392,6 @@ dependencies = [ "sp-api", "sp-core", "sp-runtime", - "sp-state-machine", ] [[package]] @@ -9271,7 +9409,6 @@ dependencies = [ "serde", "serde_json", "sp-core", - "tracing-core", ] [[package]] @@ -9281,7 +9418,7 @@ dependencies = [ "either", "hash256-std-hasher", "impl-trait-for-tuples", - "log", + "log 0.4.14", "parity-scale-codec", "parity-util-mem", "paste 1.0.4", @@ -9339,7 +9476,6 @@ version = "2.0.0" dependencies = [ "sc-executor", "sc-executor-common", - "sp-core", "sp-io", "sp-runtime", "sp-runtime-interface", @@ -9423,7 +9559,7 @@ version = "0.10.0-dev" dependencies = [ "hash-db", "hex-literal", - "log", + "log 0.4.14", "num-traits", "parity-scale-codec", "parking_lot 0.11.1", @@ -9462,7 +9598,7 @@ dependencies = [ name = "sp-tasks" version = "4.0.0-dev" dependencies = [ - "log", + "log 0.4.14", "parity-scale-codec", "sp-core", "sp-externalities", @@ -9489,14 +9625,13 @@ version = "4.0.0-dev" dependencies = [ "async-trait", "futures-timer 3.0.2", - "log", + "log 0.4.14", "parity-scale-codec", "sp-api", "sp-inherents", "sp-runtime", "sp-std", "thiserror", - "wasm-timer", ] [[package]] @@ -9504,7 +9639,7 @@ name = "sp-tracing" version = "4.0.0-dev" dependencies = [ "erased-serde", - "log", + "log 0.4.14", "parity-scale-codec", "parking_lot 0.10.2", "serde", @@ -9529,7 +9664,7 @@ name = "sp-transaction-storage-proof" version = "4.0.0-dev" dependencies = [ "async-trait", - "log", + "log 0.4.14", "parity-scale-codec", "scale-info", "sp-core", @@ -9562,8 +9697,7 @@ dependencies = [ name = "sp-utils" version = "4.0.0-dev" dependencies = [ - "futures 0.3.15", - "futures-core", + "futures 0.3.16", "futures-timer 3.0.2", "lazy_static", "prometheus", @@ -9589,7 +9723,6 @@ name = "sp-version-proc-macro" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "proc-macro-crate 1.0.0", "proc-macro2", "quote", "sp-version", @@ -9638,29 +9771,16 @@ dependencies = [ ] [[package]] -name = "stream-cipher" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c80e15f898d8d8f25db24c253ea615cc14acf418ff307822995814e7d42cfa89" -dependencies = [ - "block-cipher", - "generic-array 0.14.4", -] - -[[package]] -name = "string" -version = "0.2.1" +name = "strsim" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d24114bfcceb867ca7f71a0d3fe45d45619ec47a6fbfa98cb14e14250bfa5d6d" -dependencies = [ - "bytes 0.4.12", -] +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "strsim" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "structopt" @@ -9692,9 +9812,15 @@ version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7318c509b5ba57f18533982607f24070a55d353e90d4cae30c467cdb2ad5ac5c" dependencies = [ - "strum_macros", + "strum_macros 0.20.1", ] +[[package]] +name = "strum" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2" + [[package]] name = "strum_macros" version = "0.20.1" @@ -9707,6 +9833,18 @@ dependencies = [ "syn", ] +[[package]] +name = "strum_macros" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "subkey" version = "2.0.1" @@ -9728,31 +9866,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "substrate-browser-utils" -version = "0.10.0-dev" -dependencies = [ - "chrono", - "console_error_panic_hook", - "futures 0.1.31", - "futures 0.3.15", - "futures-timer 3.0.2", - "getrandom 0.2.3", - "js-sys", - "kvdb-memorydb", - "libp2p-wasm-ext", - "log", - "rand 0.7.3", - "sc-chain-spec", - "sc-informant", - "sc-network", - "sc-service", - "sc-tracing", - "sp-database", - "wasm-bindgen", - "wasm-bindgen-futures", -] - [[package]] name = "substrate-build-script-utils" version = "3.0.0" @@ -9778,9 +9891,8 @@ version = "3.0.0" dependencies = [ "frame-support", "frame-system", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-client-transports", - "jsonrpc-core", "parity-scale-codec", "sc-rpc-api", "scale-info", @@ -9794,17 +9906,16 @@ name = "substrate-frame-rpc-system" version = "4.0.0-dev" dependencies = [ "frame-system-rpc-runtime-api", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", "jsonrpc-core-client", "jsonrpc-derive", - "log", + "log 0.4.14", "parity-scale-codec", "sc-client-api", "sc-rpc-api", "sc-transaction-pool", "sc-transaction-pool-api", - "serde", "sp-api", "sp-block-builder", "sp-blockchain", @@ -9821,10 +9932,10 @@ dependencies = [ "async-std", "derive_more", "futures-util", - "hyper 0.13.10", - "log", + "hyper 0.14.11", + "log 0.4.14", "prometheus", - "tokio 0.2.25", + "tokio 1.10.0", ] [[package]] @@ -9832,9 +9943,7 @@ name = "substrate-test-client" version = "2.0.1" dependencies = [ "async-trait", - "futures 0.1.31", - "futures 0.3.15", - "hash-db", + "futures 0.3.16", "hex", "parity-scale-codec", "sc-client-api", @@ -9863,8 +9972,8 @@ dependencies = [ "frame-support", "frame-system", "frame-system-rpc-runtime-api", - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "memory-db", "pallet-babe", "pallet-timestamp", @@ -9905,13 +10014,12 @@ dependencies = [ name = "substrate-test-runtime-client" version = "2.0.0" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "parity-scale-codec", "sc-block-builder", "sc-client-api", "sc-consensus", "sc-light", - "sc-service", "sp-api", "sp-blockchain", "sp-consensus", @@ -9926,7 +10034,7 @@ name = "substrate-test-runtime-transaction-pool" version = "2.0.0" dependencies = [ "derive_more", - "futures 0.3.15", + "futures 0.3.16", "parity-scale-codec", "parking_lot 0.11.1", "sc-transaction-pool", @@ -9940,7 +10048,7 @@ dependencies = [ name = "substrate-test-utils" version = "4.0.0-dev" dependencies = [ - "futures 0.3.15", + "futures 0.3.16", "sc-service", "substrate-test-utils-derive", "tokio 0.2.25", @@ -9970,7 +10078,6 @@ name = "substrate-wasm-builder" version = "5.0.0-dev" dependencies = [ "ansi_term 0.12.1", - "atty", "build-helper", "cargo_metadata", "sp-maybe-compressed-blob", @@ -10026,18 +10133,18 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "take_mut" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" - [[package]] name = "tap" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "target-lexicon" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "422045212ea98508ae3d28025bc5aaa2bd4a9cdaecd442a08da2ee620ee9ea95" + [[package]] name = "target-lexicon" version = "0.12.0" @@ -10072,9 +10179,9 @@ name = "test-runner" version = "0.9.0" dependencies = [ "frame-system", - "futures 0.3.15", + "futures 0.3.16", "jsonrpc-core", - "log", + "log 0.4.14", "num-traits", "sc-basic-authorship", "sc-cli", @@ -10101,7 +10208,6 @@ dependencies = [ "sp-finality-grandpa", "sp-inherents", "sp-keyring", - "sp-keystore", "sp-offchain", "sp-runtime", "sp-runtime-interface", @@ -10117,31 +10223,20 @@ name = "test-runner-example" version = "0.1.0" dependencies = [ "frame-benchmarking", - "frame-support", "frame-system", - "log", "node-cli", "node-primitives", "node-runtime", - "pallet-balances", - "pallet-sudo", "pallet-transaction-payment", - "sc-client-api", "sc-consensus", "sc-consensus-babe", "sc-consensus-manual-seal", "sc-executor", "sc-finality-grandpa", - "sc-informant", - "sc-network", "sc-service", - "sp-api", "sp-consensus-babe", - "sp-inherents", "sp-keyring", - "sp-keystore", "sp-runtime", - "sp-timestamp", "test-runner", ] @@ -10263,7 +10358,7 @@ checksum = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" dependencies = [ "bytes 0.4.12", "futures 0.1.31", - "mio", + "mio 0.6.23", "num_cpus", "tokio-codec", "tokio-current-thread", @@ -10287,40 +10382,37 @@ checksum = "6703a273949a90131b290be1fe7b039d0fc884aa1935860dfcbe056f28cd8092" dependencies = [ "bytes 0.5.6", "fnv", - "futures-core", "iovec", "lazy_static", "libc", - "memchr", - "mio", + "mio 0.6.23", "mio-uds", "num_cpus", "pin-project-lite 0.1.12", "signal-hook-registry", "slab", - "tokio-macros", + "tokio-macros 0.2.6", "winapi 0.3.9", ] [[package]] name = "tokio" -version = "1.6.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd3076b5c8cc18138b8f8814895c11eb4de37114a5d127bafdc5e55798ceef37" +checksum = "01cf844b23c6131f624accf65ce0e4e9956a8bb329400ea5bcc26ae3a5c20b0b" dependencies = [ - "autocfg", + "autocfg 1.0.1", + "bytes 1.0.1", + "libc", + "memchr", + "mio 0.7.13", + "num_cpus", + "once_cell", + "parking_lot 0.11.1", "pin-project-lite 0.2.6", -] - -[[package]] -name = "tokio-buf" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" -dependencies = [ - "bytes 0.4.12", - "either", - "futures 0.1.31", + "signal-hook-registry", + "tokio-macros 1.3.0", + "winapi 0.3.9", ] [[package]] @@ -10373,7 +10465,7 @@ checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" dependencies = [ "bytes 0.4.12", "futures 0.1.31", - "log", + "log 0.4.14", ] [[package]] @@ -10388,16 +10480,24 @@ dependencies = [ ] [[package]] -name = "tokio-named-pipes" -version = "0.1.0" +name = "tokio-macros" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d282d483052288b2308ba5ee795f5673b159c9bdf63c385a05609da782a5eae" +checksum = "54473be61f4ebe4efd09cec9bd5d16fa51d70ea0192213d754d2d500457db110" dependencies = [ - "bytes 0.4.12", - "futures 0.1.31", - "mio", - "mio-named-pipes", - "tokio 0.1.22", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b" +dependencies = [ + "native-tls", + "tokio 1.10.0", ] [[package]] @@ -10409,8 +10509,8 @@ dependencies = [ "crossbeam-utils 0.7.2", "futures 0.1.31", "lazy_static", - "log", - "mio", + "log 0.4.14", + "mio 0.6.23", "num_cpus", "parking_lot 0.9.0", "slab", @@ -10421,35 +10521,36 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.14.1" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e12831b255bcfa39dc0436b01e19fea231a37db570686c06ee72c423479f889a" +checksum = "03d15e5669243a45f630a5167d101b942174ca94b615445b2057eace1c818736" dependencies = [ "futures-core", - "rustls 0.18.1", + "rustls", "tokio 0.2.25", "webpki", ] [[package]] name = "tokio-rustls" -version = "0.15.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03d15e5669243a45f630a5167d101b942174ca94b615445b2057eace1c818736" +checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" dependencies = [ - "futures-core", - "rustls 0.19.1", - "tokio 0.2.25", + "rustls", + "tokio 1.10.0", "webpki", ] [[package]] -name = "tokio-service" -version = "0.1.0" +name = "tokio-stream" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24da22d077e0f15f55162bdbdc661228c1581892f52074fb242678d015b45162" +checksum = "7b2f3f698253f03119ac0102beaa64f67a67e08074d03a22d18784104543727f" dependencies = [ - "futures 0.1.31", + "futures-core", + "pin-project-lite 0.2.6", + "tokio 1.10.0", ] [[package]] @@ -10471,7 +10572,7 @@ dependencies = [ "bytes 0.4.12", "futures 0.1.31", "iovec", - "mio", + "mio 0.6.23", "tokio-io", "tokio-reactor", ] @@ -10487,7 +10588,7 @@ dependencies = [ "crossbeam-utils 0.7.2", "futures 0.1.31", "lazy_static", - "log", + "log 0.4.14", "num_cpus", "slab", "tokio-executor", @@ -10505,6 +10606,17 @@ dependencies = [ "tokio-executor", ] +[[package]] +name = "tokio-tls" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "354b8cd83825b3c20217a9dc174d6a0c67441a2fae5c41bcb1ea6679f6ae0f7c" +dependencies = [ + "futures 0.1.31", + "native-tls", + "tokio-io", +] + [[package]] name = "tokio-udp" version = "0.1.6" @@ -10513,8 +10625,8 @@ checksum = "e2a0b10e610b39c38b031a2fcab08e4b82f16ece36504988dcbd81dbba650d82" dependencies = [ "bytes 0.4.12", "futures 0.1.31", - "log", - "mio", + "log 0.4.14", + "mio 0.6.23", "tokio-codec", "tokio-io", "tokio-reactor", @@ -10530,8 +10642,8 @@ dependencies = [ "futures 0.1.31", "iovec", "libc", - "log", - "mio", + "log 0.4.14", + "mio 0.6.23", "mio-uds", "tokio-codec", "tokio-io", @@ -10548,11 +10660,25 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", - "log", + "log 0.4.14", "pin-project-lite 0.1.12", "tokio 0.2.25", ] +[[package]] +name = "tokio-util" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1caa0b0c8d94a049db56b5acf8cba99dc0623aab1b26d5b5f5e2d945846b3592" +dependencies = [ + "bytes 1.0.1", + "futures-core", + "futures-sink", + "log 0.4.14", + "pin-project-lite 0.2.6", + "tokio 1.10.0", +] + [[package]] name = "toml" version = "0.5.8" @@ -10575,7 +10701,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01ebdc2bb4498ab1ab5f5b73c5803825e60199229ccba0698170e3be0e7f959f" dependencies = [ "cfg-if 1.0.0", - "log", "pin-project-lite 0.2.6", "tracing-attributes", "tracing-core", @@ -10618,7 +10743,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" dependencies = [ "lazy_static", - "log", + "log 0.4.14", "tracing-core", ] @@ -10634,9 +10759,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.18" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa5553bf0883ba7c9cbe493b085c29926bd41b66afc31ff72cf17ff4fb60dcd5" +checksum = "b9cbe87a2fa7e35900ce5de20220a582a9483a7063811defce79d7cbd59d4cfe" dependencies = [ "ansi_term 0.12.1", "chrono", @@ -10654,6 +10779,12 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "traitobject" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079" + [[package]] name = "treeline" version = "0.1.0" @@ -10684,7 +10815,7 @@ checksum = "9eac131e334e81b6b3be07399482042838adcd7957aa0010231d0813e39e02fa" dependencies = [ "hash-db", "hashbrown 0.11.2", - "log", + "log 0.4.14", "rustc-hex", "smallvec 1.6.1", ] @@ -10724,7 +10855,7 @@ dependencies = [ "idna 0.2.2", "ipnet", "lazy_static", - "log", + "log 0.4.14", "rand 0.8.4", "smallvec 1.6.1", "thiserror", @@ -10742,7 +10873,7 @@ dependencies = [ "futures-util", "ipconfig", "lazy_static", - "log", + "log 0.4.14", "lru-cache", "parking_lot 0.11.1", "resolv-conf", @@ -10761,20 +10892,15 @@ checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" name = "try-runtime-cli" version = "0.10.0-dev" dependencies = [ - "frame-try-runtime", - "log", + "log 0.4.14", "parity-scale-codec", "remote-externalities", "sc-chain-spec", "sc-cli", - "sc-client-api", "sc-executor", "sc-service", "serde", - "sp-api", - "sp-blockchain", "sp-core", - "sp-externalities", "sp-keystore", "sp-runtime", "sp-state-machine", @@ -10807,6 +10933,12 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "typeable" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" + [[package]] name = "typenum" version = "1.12.0" @@ -10831,13 +10963,22 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "unicase" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" +dependencies = [ + "version_check 0.1.5", +] + [[package]] name = "unicase" version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" dependencies = [ - "version_check", + "version_check 0.9.2", ] [[package]] @@ -10972,6 +11113,12 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" +[[package]] +name = "version_check" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" + [[package]] name = "version_check" version = "0.9.2" @@ -11010,24 +11157,13 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "want" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" -dependencies = [ - "futures 0.1.31", - "log", - "try-lock", -] - [[package]] name = "want" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" dependencies = [ - "log", + "log 0.4.14", "try-lock", ] @@ -11050,8 +11186,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9" dependencies = [ "cfg-if 1.0.0", - "serde", - "serde_json", "wasm-bindgen-macro", ] @@ -11063,7 +11197,7 @@ checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae" dependencies = [ "bumpalo", "lazy_static", - "log", + "log 0.4.14", "proc-macro2", "quote", "syn", @@ -11112,53 +11246,222 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489" [[package]] -name = "wasm-bindgen-test" -version = "0.3.20" +name = "wasm-gc-api" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0c32691b6c7e6c14e7f8fd55361a9088b507aa49620fcd06c09b3a1082186b9" +dependencies = [ + "log 0.4.14", + "parity-wasm 0.32.0", + "rustc-demangle", +] + +[[package]] +name = "wasm-timer" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d4da138503a4cf86801b94d95781ee3619faa8feca830569cc6b54997b8b5c" +checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" dependencies = [ - "console_error_panic_hook", + "futures 0.3.16", "js-sys", - "scoped-tls", + "parking_lot 0.11.1", + "pin-utils", "wasm-bindgen", "wasm-bindgen-futures", - "wasm-bindgen-test-macro", + "web-sys", +] + +[[package]] +name = "wasmer" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a70cfae554988d904d64ca17ab0e7cd652ee5c8a0807094819c1ea93eb9d6866" +dependencies = [ + "cfg-if 0.1.10", + "indexmap", + "more-asserts", + "target-lexicon 0.11.2", + "thiserror", + "wasmer-compiler", + "wasmer-compiler-cranelift", + "wasmer-derive", + "wasmer-engine", + "wasmer-engine-jit", + "wasmer-engine-native", + "wasmer-types", + "wasmer-vm", + "wat", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-compiler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b7732a9cab472bd921d5a0c422f45b3d03f62fa2c40a89e0770cef6d47e383e" +dependencies = [ + "enumset", + "serde", + "serde_bytes", + "smallvec 1.6.1", + "target-lexicon 0.11.2", + "thiserror", + "wasmer-types", + "wasmer-vm", + "wasmparser 0.65.0", +] + +[[package]] +name = "wasmer-compiler-cranelift" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48cb9395f094e1d81534f4c5e330ed4cdb424e8df870d29ad585620284f5fddb" +dependencies = [ + "cranelift-codegen 0.68.0", + "cranelift-frontend 0.68.0", + "gimli 0.22.0", + "more-asserts", + "rayon", + "serde", + "smallvec 1.6.1", + "tracing", + "wasmer-compiler", + "wasmer-types", + "wasmer-vm", +] + +[[package]] +name = "wasmer-compiler-singlepass" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "426ae6ef0f606ca815510f3e2ef6f520e217514bfb7a664defe180b9a9e75d07" +dependencies = [ + "byteorder", + "dynasm", + "dynasmrt", + "lazy_static", + "more-asserts", + "rayon", + "serde", + "smallvec 1.6.1", + "wasmer-compiler", + "wasmer-types", + "wasmer-vm", ] [[package]] -name = "wasm-bindgen-test-macro" -version = "0.3.20" +name = "wasmer-derive" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3199c33f06500c731d5544664c24d0c2b742b98debc6b1c6f0c6d6e8fb7c19b" +checksum = "d8b86dcd2c3efdb8390728a2b56f762db07789aaa5aa872a9dc776ba3a7912ed" dependencies = [ + "proc-macro-error 1.0.4", "proc-macro2", "quote", + "syn", ] [[package]] -name = "wasm-gc-api" -version = "0.1.11" +name = "wasmer-engine" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c32691b6c7e6c14e7f8fd55361a9088b507aa49620fcd06c09b3a1082186b9" +checksum = "efe4667d6bd888f26ae8062a63a9379fa697415b4b4e380f33832e8418fd71b5" dependencies = [ - "log", - "parity-wasm 0.32.0", + "backtrace", + "bincode", + "lazy_static", + "memmap2", + "more-asserts", "rustc-demangle", + "serde", + "serde_bytes", + "target-lexicon 0.11.2", + "thiserror", + "wasmer-compiler", + "wasmer-types", + "wasmer-vm", ] [[package]] -name = "wasm-timer" -version = "0.2.5" +name = "wasmer-engine-jit" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be0ecb0db480561e9a7642b5d3e4187c128914e58aa84330b9493e3eb68c5e7f" +checksum = "26770be802888011b4a3072f2a282fc2faa68aa48c71b3db6252a3937a85f3da" dependencies = [ - "futures 0.3.15", - "js-sys", - "parking_lot 0.11.1", - "pin-utils", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", + "bincode", + "cfg-if 0.1.10", + "region", + "serde", + "serde_bytes", + "wasmer-compiler", + "wasmer-engine", + "wasmer-types", + "wasmer-vm", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-engine-native" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bb4083a6c69f2cd4b000b82a80717f37c6cc2e536aee3a8ffe9af3edc276a8b" +dependencies = [ + "bincode", + "cfg-if 0.1.10", + "leb128", + "libloading 0.6.7", + "serde", + "tempfile", + "tracing", + "wasmer-compiler", + "wasmer-engine", + "wasmer-object", + "wasmer-types", + "wasmer-vm", + "which", +] + +[[package]] +name = "wasmer-object" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf8e0c12b82ff81ebecd30d7e118be5fec871d6de885a90eeb105df0a769a7b" +dependencies = [ + "object 0.22.0", + "thiserror", + "wasmer-compiler", + "wasmer-types", +] + +[[package]] +name = "wasmer-types" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f4ac28c2951cd792c18332f03da523ed06b170f5cf6bb5b1bdd7e36c2a8218" +dependencies = [ + "cranelift-entity 0.68.0", + "serde", + "thiserror", +] + +[[package]] +name = "wasmer-vm" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7635ba0b6d2fd325f588d69a950ad9fa04dddbf6ad08b6b2a183146319bf6ae" +dependencies = [ + "backtrace", + "cc", + "cfg-if 0.1.10", + "indexmap", + "libc", + "memoffset 0.6.1", + "more-asserts", + "region", + "serde", + "thiserror", + "wasmer-types", + "winapi 0.3.9", ] [[package]] @@ -11186,6 +11489,12 @@ dependencies = [ "parity-wasm 0.42.2", ] +[[package]] +name = "wasmparser" +version = "0.65.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc2fe6350834b4e528ba0901e7aa405d78b89dc1fa3145359eb4de0e323fcf" + [[package]] name = "wasmparser" version = "0.78.2" @@ -11206,15 +11515,15 @@ dependencies = [ "indexmap", "lazy_static", "libc", - "log", + "log 0.4.14", "paste 1.0.4", "psm", "region", "rustc-demangle", "serde", "smallvec 1.6.1", - "target-lexicon", - "wasmparser", + "target-lexicon 0.12.0", + "wasmparser 0.78.2", "wasmtime-cache", "wasmtime-environ", "wasmtime-jit", @@ -11236,7 +11545,7 @@ dependencies = [ "errno", "file-per-thread-logger", "libc", - "log", + "log 0.4.14", "serde", "sha2 0.9.3", "toml", @@ -11250,12 +11559,12 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c525b39f062eada7db3c1298287b96dcb6e472b9f6b22501300b28d9fa7582f6" dependencies = [ - "cranelift-codegen", - "cranelift-entity", - "cranelift-frontend", + "cranelift-codegen 0.74.0", + "cranelift-entity 0.74.0", + "cranelift-frontend 0.74.0", "cranelift-wasm", - "target-lexicon", - "wasmparser", + "target-lexicon 0.12.0", + "wasmparser 0.78.2", "wasmtime-environ", ] @@ -11269,9 +11578,9 @@ dependencies = [ "gimli 0.24.0", "more-asserts", "object 0.24.0", - "target-lexicon", + "target-lexicon 0.12.0", "thiserror", - "wasmparser", + "wasmparser 0.78.2", "wasmtime-environ", ] @@ -11282,16 +11591,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f64d0c2d881c31b0d65c1f2695e022d71eb60b9fbdd336aacca28208b58eac90" dependencies = [ "cfg-if 1.0.0", - "cranelift-codegen", - "cranelift-entity", + "cranelift-codegen 0.74.0", + "cranelift-entity 0.74.0", "cranelift-wasm", "gimli 0.24.0", "indexmap", - "log", + "log 0.4.14", "more-asserts", "serde", "thiserror", - "wasmparser", + "wasmparser 0.78.2", ] [[package]] @@ -11303,21 +11612,21 @@ dependencies = [ "addr2line 0.15.1", "anyhow", "cfg-if 1.0.0", - "cranelift-codegen", - "cranelift-entity", - "cranelift-frontend", + "cranelift-codegen 0.74.0", + "cranelift-entity 0.74.0", + "cranelift-frontend 0.74.0", "cranelift-native", "cranelift-wasm", "gimli 0.24.0", - "log", + "log 0.4.14", "more-asserts", "object 0.24.0", "rayon", "region", "serde", - "target-lexicon", + "target-lexicon 0.12.0", "thiserror", - "wasmparser", + "wasmparser 0.78.2", "wasmtime-cranelift", "wasmtime-debug", "wasmtime-environ", @@ -11336,7 +11645,7 @@ dependencies = [ "anyhow", "more-asserts", "object 0.24.0", - "target-lexicon", + "target-lexicon 0.12.0", "wasmtime-debug", "wasmtime-environ", ] @@ -11352,7 +11661,7 @@ dependencies = [ "lazy_static", "libc", "serde", - "target-lexicon", + "target-lexicon 0.12.0", "wasmtime-environ", "wasmtime-runtime", ] @@ -11370,7 +11679,7 @@ dependencies = [ "indexmap", "lazy_static", "libc", - "log", + "log 0.4.14", "mach", "memoffset 0.6.1", "more-asserts", @@ -11428,6 +11737,47 @@ dependencies = [ "webpki", ] +[[package]] +name = "websocket" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413b37840b9e27b340ce91b319ede10731de8c72f5bc4cb0206ec1ca4ce581d0" +dependencies = [ + "bytes 0.4.12", + "futures 0.1.31", + "hyper 0.10.16", + "native-tls", + "rand 0.6.5", + "tokio-codec", + "tokio-io", + "tokio-reactor", + "tokio-tcp", + "tokio-tls", + "unicase 1.4.2", + "url 1.7.2", + "websocket-base", +] + +[[package]] +name = "websocket-base" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e3810f0d00c4dccb54c30a4eee815e703232819dec7b007db115791c42aa374" +dependencies = [ + "base64 0.10.1", + "bitflags", + "byteorder", + "bytes 0.4.12", + "futures 0.1.31", + "native-tls", + "rand 0.6.5", + "sha1", + "tokio-codec", + "tokio-io", + "tokio-tcp", + "tokio-tls", +] + [[package]] name = "wepoll-sys" version = "3.0.1" @@ -11538,8 +11888,8 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7d9028f208dd5e63c614be69f115c1b53cacc1111437d4c765185856666c107" dependencies = [ - "futures 0.3.15", - "log", + "futures 0.3.16", + "log 0.4.14", "nohash-hasher", "parking_lot 0.11.1", "rand 0.8.4", @@ -11548,9 +11898,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81a974bcdd357f0dca4d41677db03436324d45a4c9ed2d0b873a5a360ce41c36" +checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" dependencies = [ "zeroize_derive", ] diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 85a65602062a4..0c198a79011d4 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,6 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", From 2c47f9b829947881a3f29099120fdeaba386b27c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 12:23:44 +0100 Subject: [PATCH 456/503] Add back missing scale-info dependency --- Cargo.lock | 2 +- frame/elections/Cargo.toml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index cd3e901e2114b..4ef1d0e78b7aa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5240,6 +5240,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5704,7 +5705,6 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info", "sp-core", "sp-io", "sp-runtime", diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index f3ff3015bd0c8..474e4125d4a87 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,6 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } From 32371bb2827b9b47ab50015b51d19592b16672e3 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 12:31:41 +0100 Subject: [PATCH 457/503] Fix npos compact impls --- primitives/npos-elections/solution-type/src/codec.rs | 6 +++--- primitives/npos-elections/solution-type/src/single_page.rs | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/primitives/npos-elections/solution-type/src/codec.rs b/primitives/npos-elections/solution-type/src/codec.rs index 1d955bede1024..2dac076fcde42 100644 --- a/primitives/npos-elections/solution-type/src/codec.rs +++ b/primitives/npos-elections/solution-type/src/codec.rs @@ -179,7 +179,7 @@ fn scale_info_impl( count: usize, ) -> TokenStream2 { let scale_info_impl_single = { - let name = format!("{}", field_name_for(1)); + let name = format!("{}", vote_field(1)); quote! { .field(|f| f.ty::<_npos::sp_std::prelude::Vec< @@ -191,7 +191,7 @@ fn scale_info_impl( }; let scale_info_impl_double = { - let name = format!("{}", field_name_for(2)); + let name = format!("{}", vote_field(2)); quote! { .field(|f| f.ty::<_npos::sp_std::prelude::Vec<( @@ -206,7 +206,7 @@ fn scale_info_impl( let scale_info_impl_rest = (3..=count) .map(|c| { - let name = format!("{}", field_name_for(c)); + let name = format!("{}", vote_field(c)); quote! { .field(|f| f.ty::<_npos::sp_std::prelude::Vec<( diff --git a/primitives/npos-elections/solution-type/src/single_page.rs b/primitives/npos-elections/solution-type/src/single_page.rs index 7dfd0e56618f9..afb28b0d0b64c 100644 --- a/primitives/npos-elections/solution-type/src/single_page.rs +++ b/primitives/npos-elections/solution-type/src/single_page.rs @@ -64,7 +64,7 @@ pub(crate) fn generate(def: crate::SolutionDef) -> Result { let derives_and_maybe_compact_encoding = if compact_encoding { // custom compact encoding. - let compact_impl = crate::codec::codec_impl( + let compact_impl = crate::codec::codec_and_info_impl( ident.clone(), voter_type.clone(), target_type.clone(), @@ -77,7 +77,7 @@ pub(crate) fn generate(def: crate::SolutionDef) -> Result { } } else { // automatically derived. - quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode)]) + quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode, _npos::scale_info::TypeInfo)]) }; let struct_name = syn::Ident::new("solution", proc_macro2::Span::call_site()); From 91f82061d0f26c3354070f6aa5743c70fee26305 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 12:37:53 +0100 Subject: [PATCH 458/503] Cargo.lock --- Cargo.lock | 1 + 1 file changed, 1 insertion(+) diff --git a/Cargo.lock b/Cargo.lock index 4ef1d0e78b7aa..2092a422bed4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5705,6 +5705,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", + "scale-info", "sp-core", "sp-io", "sp-runtime", From 9dbdbdc4fd2265a4371b07d98dfcc44d4f631991 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 12:40:52 +0100 Subject: [PATCH 459/503] Fmt --- frame/babe/src/equivocation.rs | 6 +++-- frame/benchmarking/src/lib.rs | 2 +- .../src/unsigned.rs | 26 +++++++++++++------ frame/grandpa/src/equivocation.rs | 6 +++-- frame/proxy/src/lib.rs | 3 ++- 5 files changed, 29 insertions(+), 14 deletions(-) diff --git a/frame/babe/src/equivocation.rs b/frame/babe/src/equivocation.rs index 734b1755c993a..2397918d1ef13 100644 --- a/frame/babe/src/equivocation.rs +++ b/frame/babe/src/equivocation.rs @@ -155,8 +155,10 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = - Call::report_equivocation_unsigned { equivocation_proof: Box::new(equivocation_proof), key_owner_proof }; + let call = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof), + key_owner_proof, + }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( diff --git a/frame/benchmarking/src/lib.rs b/frame/benchmarking/src/lib.rs index 3452aab4e6e97..429e3369b1042 100644 --- a/frame/benchmarking/src/lib.rs +++ b/frame/benchmarking/src/lib.rs @@ -683,7 +683,7 @@ macro_rules! benchmark_backend { // Every variant must implement [`BenchmarkingSetup`]. // // ```nocompile -// +// // struct Transfer; // impl BenchmarkingSetup for Transfer { ... } // diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 8c0020686c907..cae5fe79369d6 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -209,7 +209,8 @@ impl Pallet { let (solution, witness) = Self::mine_and_check(iters)?; let score = raw_solution.score.clone(); - let call: Call = Call::submit_unsigned { solution: Box::new(raw_solution), witness }.into(); + let call: Call = + Call::submit_unsigned { solution: Box::new(raw_solution), witness }.into(); log!( debug, @@ -777,7 +778,8 @@ mod tests { ExtBuilder::default().desired_targets(0).build_and_execute(|| { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + let call = + Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); @@ -845,8 +847,10 @@ mod tests { roll_to(25); assert!(MultiPhase::current_phase().is_unsigned()); - let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + let solution = + RawSolution:: { score: [5, 0, 0], ..Default::default() }; + let call = + Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; // initial assert!(::validate_unsigned( @@ -883,7 +887,8 @@ mod tests { assert!(MultiPhase::current_phase().is_unsigned()); let raw = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned { solution: Box::new(raw.clone()), witness: witness() }; + let call = + Call::submit_unsigned { solution: Box::new(raw.clone()), witness: witness() }; assert_eq!(raw.solution.unique_targets().len(), 0); // won't work anymore. @@ -909,7 +914,10 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + let call = Call::submit_unsigned { + solution: Box::new(solution.clone()), + witness: witness(), + }; assert_eq!( ::validate_unsigned( @@ -936,7 +944,8 @@ mod tests { // This is in itself an invalid BS solution. let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + let call = + Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -957,7 +966,8 @@ mod tests { let mut correct_witness = witness(); correct_witness.voters += 1; correct_witness.targets -= 1; - let call = Call::submit_unsigned { solution: Box::new(solution.clone()), correct_witness }; + let call = + Call::submit_unsigned { solution: Box::new(solution.clone()), correct_witness }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) diff --git a/frame/grandpa/src/equivocation.rs b/frame/grandpa/src/equivocation.rs index ed28ad6c00785..8a23ce6e1ef1e 100644 --- a/frame/grandpa/src/equivocation.rs +++ b/frame/grandpa/src/equivocation.rs @@ -164,8 +164,10 @@ where ) -> DispatchResult { use frame_system::offchain::SubmitTransaction; - let call = - Call::report_equivocation_unsigned { equivocation_proof: Box::new(equivocation_proof), key_owner_proof }; + let call = Call::report_equivocation_unsigned { + equivocation_proof: Box::new(equivocation_proof), + key_owner_proof, + }; match SubmitTransaction::>::submit_unsigned_transaction(call.into()) { Ok(()) => log::info!( diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index bf9620b527c92..3e06fc282465b 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -775,7 +775,8 @@ impl Pallet { match c.is_sub_type() { // Proxy call cannot add or remove a proxy with more permissions than it already // has. - Some(Call::add_proxy { ref proxy_type, .. }) | Some(Call::remove_proxy { ref proxy_type, .. }) + Some(Call::add_proxy { ref proxy_type, .. }) | + Some(Call::remove_proxy { ref proxy_type, .. }) if !def.proxy_type.is_superset(&proxy_type) => false, // Proxy call cannot remove all proxies or kill anonymous proxies unless it has full From f695da4b7fa861439a1ad087ed71fe922cfded2c Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 13:21:13 +0100 Subject: [PATCH 460/503] Fix errors --- frame/election-provider-multi-phase/src/lib.rs | 14 +++++++------- .../election-provider-multi-phase/src/unsigned.rs | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 616fafe572173..10c8a67b99c35 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -977,7 +977,7 @@ pub mod pallet { // create the submission let deposit = Self::deposit_for(&raw_solution, size); let reward = { - let call = Call::submit { solution: raw_solution.clone(), num_signed_submissions }; + let call = Call::submit { raw_solution: raw_solution.clone(), num_signed_submissions }; let call_fee = T::EstimateCallFee::estimate_call_fee(&call, None.into()); T::SignedRewardBase::get().saturating_add(call_fee) }; @@ -1069,14 +1069,14 @@ pub mod pallet { impl ValidateUnsigned for Pallet { type Call = Call; fn validate_unsigned(source: TransactionSource, call: &Self::Call) -> TransactionValidity { - if let Call::submit_unsigned { solution, .. } = call { + if let Call::submit_unsigned { raw_solution, .. } = call { // Discard solution not coming from the local OCW. match source { TransactionSource::Local | TransactionSource::InBlock => { /* allowed */ }, _ => return InvalidTransaction::Call.into(), } - let _ = Self::unsigned_pre_dispatch_checks(solution) + let _ = Self::unsigned_pre_dispatch_checks(raw_solution) .map_err(|err| { log!(debug, "unsigned transaction validation failed due to {:?}", err); err @@ -1087,11 +1087,11 @@ pub mod pallet { // The higher the score[0], the better a solution is. .priority( T::MinerTxPriority::get() - .saturating_add(solution.score[0].saturated_into()), + .saturating_add(raw_solution.score[0].saturated_into()), ) // Used to deduplicate unsigned solutions: each validator should produce one // solution per round at most, and solutions are not propagate. - .and_provides(solution.round) + .and_provides(raw_solution.round) // Transaction should stay in the pool for the duration of the unsigned phase. .longevity(T::UnsignedPhase::get().saturated_into::()) // We don't propagate this. This can never be validated at a remote node. @@ -1103,8 +1103,8 @@ pub mod pallet { } fn pre_dispatch(call: &Self::Call) -> Result<(), TransactionValidityError> { - if let Call::submit_unsigned { solution, .. } = call { - Self::unsigned_pre_dispatch_checks(solution) + if let Call::submit_unsigned { raw_solution, .. } = call { + Self::unsigned_pre_dispatch_checks(raw_solution) .map_err(dispatch_error_to_invalid) .map_err(Into::into) } else { diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index cae5fe79369d6..13504ebcd4ffe 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -155,9 +155,9 @@ impl Pallet { let call = restore_solution::() .and_then(|call| { // ensure the cached call is still current before submitting - if let Call::submit_unsigned { solution, .. } = &call { + if let Call::submit_unsigned { raw_solution, .. } = &call { // prevent errors arising from state changes in a forkful chain - Self::basic_checks(solution, "restored")?; + Self::basic_checks(raw_solution, "restored")?; Ok(call) } else { Err(MinerError::SolutionCallInvalid) @@ -208,9 +208,9 @@ impl Pallet { // get the solution, with a load of checks to ensure if submitted, IT IS ABSOLUTELY VALID. let (solution, witness) = Self::mine_and_check(iters)?; - let score = raw_solution.score.clone(); + let score = solution.score.clone(); let call: Call = - Call::submit_unsigned { solution: Box::new(raw_solution), witness }.into(); + Call::submit_unsigned { raw_solution: Box::new(solution), witness }.into(); log!( debug, From e2d74d0f30b8d4cb663d73ffa29a4cacf2736834 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:08:09 +0100 Subject: [PATCH 461/503] Fmt --- frame/election-provider-multi-phase/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/lib.rs b/frame/election-provider-multi-phase/src/lib.rs index 10c8a67b99c35..860260731bcc4 100644 --- a/frame/election-provider-multi-phase/src/lib.rs +++ b/frame/election-provider-multi-phase/src/lib.rs @@ -977,7 +977,8 @@ pub mod pallet { // create the submission let deposit = Self::deposit_for(&raw_solution, size); let reward = { - let call = Call::submit { raw_solution: raw_solution.clone(), num_signed_submissions }; + let call = + Call::submit { raw_solution: raw_solution.clone(), num_signed_submissions }; let call_fee = T::EstimateCallFee::estimate_call_fee(&call, None.into()); T::SignedRewardBase::get().saturating_add(call_fee) }; From a61df94e0b1ddc230d90ce137e7db15edc1c2bac Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:22:00 +0100 Subject: [PATCH 462/503] Fix renamed raw_solution field --- .../election-provider-multi-phase/src/benchmarking.rs | 2 +- frame/election-provider-multi-phase/src/unsigned.rs | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index 89cc1fd3110eb..c5c95f9054145 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -352,7 +352,7 @@ frame_benchmarking::benchmarks! { // encode the most significant storage item that needs to be decoded in the dispatch. let encoded_snapshot = >::snapshot().ok_or("missing snapshot")?.encode(); - let encoded_call = >::submit_unsigned { solution: Box::new(raw_solution.clone()), witness }.encode(); + let encoded_call = >::submit_unsigned { raw_solution: Box::new(raw_solution.clone()), witness }.encode(); }: { assert_ok!( >::submit_unsigned( diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 13504ebcd4ffe..66c2384348779 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -779,7 +779,7 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; let call = - Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); @@ -850,7 +850,7 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; let call = - Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; // initial assert!(::validate_unsigned( @@ -888,7 +888,7 @@ mod tests { let raw = RawSolution:: { score: [5, 0, 0], ..Default::default() }; let call = - Call::submit_unsigned { solution: Box::new(raw.clone()), witness: witness() }; + Call::submit_unsigned { raw_solution: Box::new(raw.clone()), witness: witness() }; assert_eq!(raw.solution.unique_targets().len(), 0); // won't work anymore. @@ -945,7 +945,7 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; let call = - Call::submit_unsigned { solution: Box::new(solution.clone()), witness: witness() }; + Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) @@ -967,7 +967,7 @@ mod tests { correct_witness.voters += 1; correct_witness.targets -= 1; let call = - Call::submit_unsigned { solution: Box::new(solution.clone()), correct_witness }; + Call::submit_unsigned { raw_solution: Box::new(solution.clone()), correct_witness }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) From 8e37997038323a83d0adfdf837a2b979a50f8867 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:24:51 +0100 Subject: [PATCH 463/503] Fix error --- frame/election-provider-multi-phase/src/benchmarking.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index c5c95f9054145..480572f93b3e3 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -352,7 +352,7 @@ frame_benchmarking::benchmarks! { // encode the most significant storage item that needs to be decoded in the dispatch. let encoded_snapshot = >::snapshot().ok_or("missing snapshot")?.encode(); - let encoded_call = >::submit_unsigned { raw_solution: Box::new(raw_solution.clone()), witness }.encode(); + let encoded_call = Call::::submit_unsigned { raw_solution: Box::new(raw_solution.clone()), witness }.encode(); }: { assert_ok!( >::submit_unsigned( From 2f526bcd4549f6dae04472b3defb37061c764de4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:29:07 +0100 Subject: [PATCH 464/503] Fmt --- .../src/unsigned.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 66c2384348779..5d84d31581599 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -778,8 +778,10 @@ mod tests { ExtBuilder::default().desired_targets(0).build_and_execute(|| { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = - Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; // initial assert_eq!(MultiPhase::current_phase(), Phase::Off); @@ -849,8 +851,10 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = - Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; // initial assert!(::validate_unsigned( @@ -944,8 +948,10 @@ mod tests { // This is in itself an invalid BS solution. let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; - let call = - Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: witness() }; + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: witness(), + }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) From 7b386d837ff0e1df087a7f85fe4b810162c89347 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:49:27 +0100 Subject: [PATCH 465/503] Fix some benchmarks --- bin/node/executor/tests/fees.rs | 4 ++-- frame/election-provider-multi-phase/src/unsigned.rs | 2 +- frame/utility/src/tests.rs | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index f00e5efa69953..3f9e882a1a068 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -323,11 +323,11 @@ fn block_length_capacity_report() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark(vec![ + function: Call::System(frame_system::Call::remark { remark: vec![ 0u8; (block_number * factor) as usize - ])), + ]}), }, ], (time * 1000 / SLOT_DURATION).into(), diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 5d84d31581599..d7c0387fc3229 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -973,7 +973,7 @@ mod tests { correct_witness.voters += 1; correct_witness.targets -= 1; let call = - Call::submit_unsigned { raw_solution: Box::new(solution.clone()), correct_witness }; + Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: correct_witness }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 62beb8a0b8dba..bce216d3ca39d 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -582,11 +582,11 @@ fn batch_all_does_not_nest() { assert_eq!(Balances::free_balance(2), 10); }); } - +r #[test] fn batch_limit() { new_test_ext().execute_with(|| { - let calls = vec![Call::System(SystemCall::remark(vec![])); 40_000]; + let calls = vec![Call::System(SystemCall::remark { remark: vec![] }); 40_000]; assert_noop!(Utility::batch(Origin::signed(1), calls.clone()), Error::::TooManyCalls); assert_noop!(Utility::batch_all(Origin::signed(1), calls), Error::::TooManyCalls); }); From 1f1215245a4474864627c05b962b8e304e535e7d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:51:46 +0100 Subject: [PATCH 466/503] Fmt --- bin/node/executor/tests/fees.rs | 8 +++----- frame/election-provider-multi-phase/src/unsigned.rs | 6 ++++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/bin/node/executor/tests/fees.rs b/bin/node/executor/tests/fees.rs index 3f9e882a1a068..379cdda5b76a3 100644 --- a/bin/node/executor/tests/fees.rs +++ b/bin/node/executor/tests/fees.rs @@ -323,11 +323,9 @@ fn block_length_capacity_report() { }, CheckedExtrinsic { signed: Some((charlie(), signed_extra(nonce, 0))), - function: Call::System(frame_system::Call::remark { remark: vec![ - 0u8; - (block_number * factor) - as usize - ]}), + function: Call::System(frame_system::Call::remark { + remark: vec![0u8; (block_number * factor) as usize], + }), }, ], (time * 1000 / SLOT_DURATION).into(), diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index d7c0387fc3229..5a583e31c36a3 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -972,8 +972,10 @@ mod tests { let mut correct_witness = witness(); correct_witness.voters += 1; correct_witness.targets -= 1; - let call = - Call::submit_unsigned { raw_solution: Box::new(solution.clone()), witness: correct_witness }; + let call = Call::submit_unsigned { + raw_solution: Box::new(solution.clone()), + witness: correct_witness, + }; let outer_call: OuterCall = call.into(); let _ = outer_call.dispatch(Origin::none()); }) From cd8a63aa93f540c39a02c724b59136637352e47e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 23 Aug 2021 17:52:07 +0100 Subject: [PATCH 467/503] Stray R --- frame/utility/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index bce216d3ca39d..02971dddf3232 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -582,7 +582,7 @@ fn batch_all_does_not_nest() { assert_eq!(Balances::free_balance(2), 10); }); } -r + #[test] fn batch_limit() { new_test_ext().execute_with(|| { From 9cf422368e7a7aa321aaa167d5b0945d059de55f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 24 Aug 2021 09:16:42 +0100 Subject: [PATCH 468/503] Fix --- frame/election-provider-multi-phase/src/unsigned.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index 5a583e31c36a3..9e2b9fe2f5485 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -919,7 +919,7 @@ mod tests { let solution = RawSolution:: { score: [5, 0, 0], ..Default::default() }; let call = Call::submit_unsigned { - solution: Box::new(solution.clone()), + raw_solution: Box::new(solution.clone()), witness: witness(), }; From fda7a890153bd2b3041d9cbb3465a4001c8bda12 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 24 Aug 2021 09:38:38 +0100 Subject: [PATCH 469/503] Add missing TypeInfos --- frame/vesting/src/lib.rs | 1 + frame/vesting/src/vesting_info.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/vesting/src/lib.rs b/frame/vesting/src/lib.rs index 30fd085813fe3..27862a5ca4b72 100644 --- a/frame/vesting/src/lib.rs +++ b/frame/vesting/src/lib.rs @@ -65,6 +65,7 @@ use frame_support::{ }; use frame_system::{ensure_root, ensure_signed, pallet_prelude::*}; pub use pallet::*; +use scale_info::TypeInfo; use sp_runtime::{ traits::{ AtLeast32BitUnsigned, Bounded, Convert, MaybeSerializeDeserialize, One, Saturating, diff --git a/frame/vesting/src/vesting_info.rs b/frame/vesting/src/vesting_info.rs index 72171910086cd..81bffa199fd72 100644 --- a/frame/vesting/src/vesting_info.rs +++ b/frame/vesting/src/vesting_info.rs @@ -20,7 +20,7 @@ use super::*; /// Struct to encode the vesting schedule of an individual account. -#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen)] +#[derive(Encode, Decode, Copy, Clone, PartialEq, Eq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct VestingInfo { /// Locked amount at genesis. locked: Balance, From 2d15ec058a216e3f92d713f1174603a2bb1eac65 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 24 Aug 2021 11:42:27 +0100 Subject: [PATCH 470/503] ui test fix --- .../test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 6c92423c6a7fe..545520124bfee 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,6 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` = note: required by `storage_info` From 9b0e6507be1ebcefe97e59ecda2c669f9a48c9a0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 24 Aug 2021 12:02:04 +0100 Subject: [PATCH 471/503] Fix line widths --- frame/collective/src/benchmarking.rs | 12 +++++++++--- frame/democracy/src/benchmarking.rs | 6 +++++- .../src/benchmarking.rs | 5 ++++- frame/multisig/src/benchmarking.rs | 4 +++- .../npos-elections/solution-type/src/single_page.rs | 11 ++++++++++- 5 files changed, 31 insertions(+), 7 deletions(-) diff --git a/frame/collective/src/benchmarking.rs b/frame/collective/src/benchmarking.rs index 4ecdedf10ed7e..f24aad97de57a 100644 --- a/frame/collective/src/benchmarking.rs +++ b/frame/collective/src/benchmarking.rs @@ -66,7 +66,9 @@ benchmarks_instance! { let length = 100; for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; length] }.into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; length] + }.into(); Collective::::propose( SystemOrigin::Signed(last_old_member.clone()).into(), threshold, @@ -309,7 +311,9 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; bytes as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; bytes as usize] + }.into(); Collective::::propose( SystemOrigin::Signed(proposer.clone()).into(), threshold, @@ -472,7 +476,9 @@ benchmarks_instance! { let mut last_hash = T::Hash::default(); for i in 0 .. p { // Proposals should be different so that different proposal hashes are generated - let proposal: T::Proposal = SystemCall::::remark { remark: vec![i as u8; bytes as usize] }.into(); + let proposal: T::Proposal = SystemCall::::remark { + remark: vec![i as u8; bytes as usize] + }.into(); Collective::::propose( SystemOrigin::Signed(caller.clone()).into(), threshold, diff --git a/frame/democracy/src/benchmarking.rs b/frame/democracy/src/benchmarking.rs index 712ebc514e8eb..7d4d7aee140b9 100644 --- a/frame/democracy/src/benchmarking.rs +++ b/frame/democracy/src/benchmarking.rs @@ -283,7 +283,11 @@ benchmarks! { let origin_fast_track = T::FastTrackOrigin::successful_origin(); let voting_period = T::FastTrackVotingPeriod::get(); let delay = 0u32; - let call = Call::::fast_track { proposal_hash, voting_period: voting_period.into(), delay: delay.into() }; + let call = Call::::fast_track { + proposal_hash, + voting_period: voting_period.into(), + delay: delay.into() + }; }: { call.dispatch_bypass_filter(origin_fast_track)? } verify { diff --git a/frame/election-provider-multi-phase/src/benchmarking.rs b/frame/election-provider-multi-phase/src/benchmarking.rs index 480572f93b3e3..8d4ddd754f528 100644 --- a/frame/election-provider-multi-phase/src/benchmarking.rs +++ b/frame/election-provider-multi-phase/src/benchmarking.rs @@ -352,7 +352,10 @@ frame_benchmarking::benchmarks! { // encode the most significant storage item that needs to be decoded in the dispatch. let encoded_snapshot = >::snapshot().ok_or("missing snapshot")?.encode(); - let encoded_call = Call::::submit_unsigned { raw_solution: Box::new(raw_solution.clone()), witness }.encode(); + let encoded_call = Call::::submit_unsigned { + raw_solution: Box::new(raw_solution.clone()), + witness + }.encode(); }: { assert_ok!( >::submit_unsigned( diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index c2fec637e0532..2e23dff156e07 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -52,7 +52,9 @@ benchmarks! { let z in 0 .. 10_000; let max_signatories = T::MaxSignatories::get().into(); let (mut signatories, _) = setup_multi::(max_signatories, z)?; - let call: ::Call = frame_system::Call::::remark { remark: vec![0; z as usize] }.into(); + let call: ::Call = frame_system::Call::::remark { + remark: vec![0; z as usize] + }.into(); let call_hash = call.using_encoded(blake2_256); let multi_account_id = Multisig::::multi_account_id(&signatories, 1); let caller = signatories.pop().ok_or("signatories should have len 2 or more")?; diff --git a/primitives/npos-elections/solution-type/src/single_page.rs b/primitives/npos-elections/solution-type/src/single_page.rs index afb28b0d0b64c..33017d558331a 100644 --- a/primitives/npos-elections/solution-type/src/single_page.rs +++ b/primitives/npos-elections/solution-type/src/single_page.rs @@ -77,7 +77,16 @@ pub(crate) fn generate(def: crate::SolutionDef) -> Result { } } else { // automatically derived. - quote!(#[derive(Default, PartialEq, Eq, Clone, Debug, _npos::codec::Encode, _npos::codec::Decode, _npos::scale_info::TypeInfo)]) + quote!(#[derive( + Default, + PartialEq, + Eq, + Clone, + Debug, + _npos::codec::Encode, + _npos::codec::Decode, + _npos::scale_info::TypeInfo, + )]) }; let struct_name = syn::Ident::new("solution", proc_macro2::Span::call_site()); From c094c04cdc834b94e148cbef2ea0781c012da5df Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 25 Aug 2021 08:47:23 +0100 Subject: [PATCH 472/503] Revert "ui test fix" This reverts commit 2d15ec058a216e3f92d713f1174603a2bb1eac65. --- .../test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 545520124bfee..6c92423c6a7fe 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,6 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` = note: required by `storage_info` From 047bb179085a0059c36cd20ab405f55cf0867e28 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 25 Aug 2021 10:40:35 +0100 Subject: [PATCH 473/503] Upgrade to scale-info 0.11.0 --- Cargo.lock | 181 ++++++++++-------- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/support/test/pallet/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- .../npos-elections/solution-type/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- .../transaction-storage-proof/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- utils/frame/rpc/support/Cargo.toml | 2 +- 82 files changed, 178 insertions(+), 165 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b711213c772f3..fb361dae58e74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1807,7 +1807,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.4", - "scale-info", + "scale-info 0.10.0", ] [[package]] @@ -1890,7 +1890,7 @@ dependencies = [ "log 0.4.14", "parity-scale-codec", "paste 1.0.4", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-io", "sp-runtime", @@ -1947,7 +1947,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-inherents", "sp-io", @@ -1965,7 +1965,7 @@ checksum = "cb2c4a97cc93a372b141adb582d6428dc1ba1cded9ea624c4bbc40fe568cf247" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", + "scale-info 0.10.0", "serde", ] @@ -1985,7 +1985,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", - "scale-info", + "scale-info 0.11.0", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -2040,7 +2040,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", - "scale-info", + "scale-info 0.11.0", "serde", "sp-arithmetic", "sp-core", @@ -2059,7 +2059,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", ] [[package]] @@ -2070,7 +2070,7 @@ dependencies = [ "frame-support", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-externalities", @@ -2089,7 +2089,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -4406,7 +4406,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4440,7 +4440,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4546,7 +4546,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4625,7 +4625,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4892,7 +4892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -4907,7 +4907,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -4922,7 +4922,7 @@ dependencies = [ "frame-system", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4939,7 +4939,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-authorship", "sp-core", "sp-io", @@ -4981,7 +4981,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -5003,7 +5003,7 @@ dependencies = [ "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5020,7 +5020,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5037,7 +5037,7 @@ dependencies = [ "hex-literal", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5066,7 +5066,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.4", "rand_pcg 0.3.0", - "scale-info", + "scale-info 0.11.0", "serde", "smallvec 1.6.1", "sp-core", @@ -5084,7 +5084,7 @@ version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-runtime", @@ -5125,7 +5125,7 @@ version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-runtime", "sp-std", @@ -5141,7 +5141,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-io", @@ -5162,7 +5162,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "sp-arithmetic", "sp-core", "sp-io", @@ -5183,7 +5183,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5200,7 +5200,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-npos-elections", @@ -5219,7 +5219,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5235,7 +5235,7 @@ dependencies = [ "lite-json", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-keystore", @@ -5250,7 +5250,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5267,7 +5267,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-arithmetic", "sp-core", "sp-io", @@ -5293,7 +5293,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5315,7 +5315,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-application-crypto", "sp-core", "sp-io", @@ -5351,7 +5351,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-keyring", @@ -5369,7 +5369,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5385,7 +5385,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5404,7 +5404,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5453,7 +5453,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5468,7 +5468,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5483,7 +5483,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5499,7 +5499,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-io", @@ -5526,7 +5526,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5544,7 +5544,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5559,7 +5559,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5574,7 +5574,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5590,7 +5590,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5606,7 +5606,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5623,7 +5623,7 @@ dependencies = [ "log 0.4.14", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5648,7 +5648,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5666,7 +5666,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5689,7 +5689,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.11.0", "serde", "sp-application-crypto", "sp-core", @@ -5727,7 +5727,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5742,7 +5742,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5757,7 +5757,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-inherents", "sp-io", @@ -5776,7 +5776,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-io", @@ -5793,7 +5793,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "serde_json", "smallvec 1.6.1", @@ -5839,7 +5839,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-inherents", @@ -5859,7 +5859,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-io", @@ -5876,7 +5876,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5892,7 +5892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -5909,7 +5909,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-io", "sp-runtime", @@ -6436,7 +6436,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", - "scale-info", + "scale-info 0.10.0", "uint", ] @@ -8465,6 +8465,19 @@ dependencies = [ "serde", ] +[[package]] +name = "scale-info" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8680870bf65cb3f2c69b24e12ac37400a37a254346cd8520fa34886a0780ec" +dependencies = [ + "bitvec 0.20.2", + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + [[package]] name = "scale-info-derive" version = "0.7.0" @@ -8932,7 +8945,7 @@ name = "sp-application-crypto" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-core", "sp-io", @@ -8961,7 +8974,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "serde", "sp-debug-derive", "sp-std", @@ -8983,7 +8996,7 @@ name = "sp-authority-discovery" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-application-crypto", "sp-runtime", @@ -9054,7 +9067,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-application-crypto", "sp-consensus", @@ -9072,7 +9085,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-api", "sp-application-crypto", @@ -9103,7 +9116,7 @@ name = "sp-consensus-slots" version = "0.10.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-arithmetic", "sp-runtime", ] @@ -9146,7 +9159,7 @@ dependencies = [ "primitive-types", "rand 0.7.3", "regex", - "scale-info", + "scale-info 0.11.0", "schnorrkel", "secrecy", "serde", @@ -9201,7 +9214,7 @@ dependencies = [ "finality-grandpa", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "serde", "sp-api", "sp-application-crypto", @@ -9291,7 +9304,7 @@ version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "serde", "sp-arithmetic", "sp-core", @@ -9308,7 +9321,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "sp-npos-elections", "sp-runtime", "structopt", @@ -9322,7 +9335,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", - "scale-info", + "scale-info 0.11.0", "sp-arithmetic", "sp-npos-elections", "syn", @@ -9367,7 +9380,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info", + "scale-info 0.11.0", "serde", "serde_json", "sp-api", @@ -9479,7 +9492,7 @@ name = "sp-session" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-api", "sp-core", "sp-runtime", @@ -9492,7 +9505,7 @@ name = "sp-staking" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-runtime", "sp-std", ] @@ -9610,7 +9623,7 @@ dependencies = [ "async-trait", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-inherents", "sp-runtime", @@ -9627,7 +9640,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", - "scale-info", + "scale-info 0.11.0", "sp-core", "sp-runtime", "sp-std", @@ -9654,7 +9667,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", - "scale-info", + "scale-info 0.11.0", "serde", "sp-runtime", "sp-std", @@ -9839,7 +9852,7 @@ dependencies = [ "jsonrpc-client-transports", "parity-scale-codec", "sc-rpc-api", - "scale-info", + "scale-info 0.11.0", "serde", "sp-storage", "tokio", @@ -9926,7 +9939,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", + "scale-info 0.11.0", "serde", "sp-api", "sp-application-crypto", diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index d67f10ab99d66..586be156aacb3 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 64c0564cdbb7d..c4b8257fb9b10 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index e01fc2baadec4..68597534ed538 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.10.0", features = ["derive"] } +scale-info = { version = "0.11.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index e89485792d6a6..0d2460cac6053 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 4aa96d91a0355..fb0328ea71531 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index f535657a29cb8..a265c20a4f5b9 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 957efe999df02..89f6df26b8748 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 1cd6d04a94b11..cb42a38c47974 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -17,7 +17,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 0c198a79011d4..f5cedb1dfd3f9 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,7 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 93fc9ee22acf5..8e4f1edf08ce1 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 924b3c2ce1f3d..c7e3ac83ef1b9 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index d0e2eab7032e7..660a45143b9cf 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index a3d3126e48c02..d9dcb847af405 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index f3d4a4e736f0c..7b854d68ea1bb 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 5a662719eefe5..abdf5827b1b6d 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 45a697f9f2de2..e4a5e45103050 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 8252d280ef18c..80e3de67602a1 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index c57e0cee213ab..e406ae3760cd6 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 212f37b13f836..4e47f02ff611b 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 5361269597d07..08d5643bcee0c 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -17,7 +17,7 @@ static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 7256da32d5ba8..05573b29e4c9d 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 474e4125d4a87..27e6d3b2e8802 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 3755e5edac15d..5430795388325 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 1cbba6e1a06e5..ffbf8fffd83ef 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index c22f72790ca94..5dd2b5b2640ea 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 8a8a1ca1c080c..253e6fc6a7656 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 4f46a1170ffa4..ee9add610254f 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index ae29deff74bd8..6fcbdfffeb1c9 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 0f69a197789b6..6dfae1829e14d 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index ad27c4a0bf502..e7d23171143ae 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 6415313e0bd35..3eef96dbb1a3a 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 90705c85f5001..35bf96a4aa2bd 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 529a1f5f89f97..6f98f4f999d5e 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 56f7cae8cc1e1..5ed835d7382c4 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 196e8ea0165bd..ba757ee97abd7 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 83bdb6af141aa..fcfb332f0e1f1 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 566730abf7fb9..ecfb10ac32aa9 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index b0bb1c066d6a7..2aab766cd9bf6 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 8198bacb177cb..065be15fb0129 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 5a0a285bac8d5..3031be9a94811 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 28134dfcdaaa1..0ec7836c6095e 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 789aa58005f94..9e33d57c916ab 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 307d547bdd158..0b2f270c632df 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 4b3cd1d5ca1cf..51ff7c3812f4e 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 7bd3c7c9aee33..dbc6ecfd62c32 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 65e42b4334aaa..f81f89b88f387 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -29,7 +29,7 @@ rand = { version = "0.7.2", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", features = [ "derive", ] } -scale-info = "0.10.0" +scale-info = "0.11.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index b790c13e443ae..e449b403a412a 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index c7ae438ede870..8f00787b9a261 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index b768326ba68d3..047364c5cb212 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index aef13f1a339ad..8240aecbff1ba 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-metadata = { version = "14.0.0-rc.2", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 0931998df1875..76ac48b0d1f7b 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 35d51cbcbbb1b..fe314ecd5292b 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index b1e8e4a8985ce..6a07334c693f8 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index a895636a00a17..c2f45c8e1132f 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 7545961504116..97bc55f78da0e 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 89fc77816d2ab..3601b02c6f100 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 4af927f31d461..55c7a37f3101c 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index ccac144560bf2..2e9588422c4b6 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 4faf88433e321..cafbd003b7573 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index d3746fd503d35..77e1c707f8459 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 049cba68cec43..c3a914bfa9250 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index ecfd0ea8fd422..ae5141a6d2793 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 1b13330947595..ea019a7ac1467 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 71b354e699273..018f3172d54d8 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 561aeeeebdf33..fe966e90c8836 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 1e19ff53bb205..8105a5422feb3 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index ad5e05bfa6a60..db99210726013 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 84f16e5e7a1cd..6c6d5d8fc6fbd 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 8bde234cae2e9..20c45460231b5 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index d748a1951c8fd..387e8be3fd5ee 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 63d24dfb3993c..5e14690eafa9f 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-solution-type = { version = "4.0.0-dev", path = "./solution-type" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index 4b79e0561eb9e..fb9113ce1525b 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-npos-elections = { version = "4.0.0-dev", path = ".." } diff --git a/primitives/npos-elections/solution-type/Cargo.toml b/primitives/npos-elections/solution-type/Cargo.toml index 296bee861188d..6a06b395e77e3 100644 --- a/primitives/npos-elections/solution-type/Cargo.toml +++ b/primitives/npos-elections/solution-type/Cargo.toml @@ -22,7 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" -scale-info = "0.10.0" +scale-info = "0.11.0" sp-arithmetic = { path = "../../arithmetic", version = "4.0.0-dev" } # used by generate_solution_type: sp-npos-elections = { path = "..", version = "4.0.0-dev" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 15a599e3b77de..5fa5bf5d6cf45 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index ffb445e5dbda7..0ec21a42d95e2 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 23bf4f017b109..aac3cbed71b70 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 2ca5dcd224fad..7a175399f0fec 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 690cf9bbf4e6a..e40d596b9f20b 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,7 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index b5ce777f993c4..72aa68130548a 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 349b3cb66fff5..5ab1e68e1e3a3 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index 98853d6cce59f..b3b51df5e2a44 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -25,5 +25,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } -scale-info = "0.10.0" +scale-info = "0.11.0" tokio = "1.10" From 6b18b04242283fa29d84e746a314fba6308eca91 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 25 Aug 2021 10:42:41 +0100 Subject: [PATCH 474/503] Revert "Upgrade to scale-info 0.11.0" This reverts commit 047bb179085a0059c36cd20ab405f55cf0867e28. --- Cargo.lock | 181 ++++++++---------- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/support/test/pallet/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- .../npos-elections/solution-type/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- .../transaction-storage-proof/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- utils/frame/rpc/support/Cargo.toml | 2 +- 82 files changed, 165 insertions(+), 178 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fb361dae58e74..b711213c772f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1807,7 +1807,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.4", - "scale-info 0.10.0", + "scale-info", ] [[package]] @@ -1890,7 +1890,7 @@ dependencies = [ "log 0.4.14", "parity-scale-codec", "paste 1.0.4", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-io", "sp-runtime", @@ -1947,7 +1947,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1965,7 +1965,7 @@ checksum = "cb2c4a97cc93a372b141adb582d6428dc1ba1cded9ea624c4bbc40fe568cf247" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info 0.10.0", + "scale-info", "serde", ] @@ -1985,7 +1985,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", - "scale-info 0.11.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -2040,7 +2040,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", - "scale-info 0.11.0", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -2059,7 +2059,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", ] [[package]] @@ -2070,7 +2070,7 @@ dependencies = [ "frame-support", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2089,7 +2089,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4406,7 +4406,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4440,7 +4440,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4546,7 +4546,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4625,7 +4625,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4892,7 +4892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4907,7 +4907,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4922,7 +4922,7 @@ dependencies = [ "frame-system", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4939,7 +4939,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-authorship", "sp-core", "sp-io", @@ -4981,7 +4981,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -5003,7 +5003,7 @@ dependencies = [ "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5020,7 +5020,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5037,7 +5037,7 @@ dependencies = [ "hex-literal", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5066,7 +5066,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.4", "rand_pcg 0.3.0", - "scale-info 0.11.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -5084,7 +5084,7 @@ version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-runtime", @@ -5125,7 +5125,7 @@ version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -5141,7 +5141,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5162,7 +5162,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5183,7 +5183,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5200,7 +5200,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -5219,7 +5219,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5235,7 +5235,7 @@ dependencies = [ "lite-json", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5250,7 +5250,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5267,7 +5267,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5293,7 +5293,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5315,7 +5315,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5351,7 +5351,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5369,7 +5369,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5385,7 +5385,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5404,7 +5404,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5453,7 +5453,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5468,7 +5468,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5483,7 +5483,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5499,7 +5499,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5526,7 +5526,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5544,7 +5544,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5559,7 +5559,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5574,7 +5574,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5590,7 +5590,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5606,7 +5606,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5623,7 +5623,7 @@ dependencies = [ "log 0.4.14", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5648,7 +5648,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5666,7 +5666,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5689,7 +5689,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.11.0", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5727,7 +5727,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5742,7 +5742,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5757,7 +5757,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5776,7 +5776,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5793,7 +5793,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5839,7 +5839,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5859,7 +5859,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5876,7 +5876,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5892,7 +5892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5909,7 +5909,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -6436,7 +6436,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", - "scale-info 0.10.0", + "scale-info", "uint", ] @@ -8465,19 +8465,6 @@ dependencies = [ "serde", ] -[[package]] -name = "scale-info" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c8680870bf65cb3f2c69b24e12ac37400a37a254346cd8520fa34886a0780ec" -dependencies = [ - "bitvec 0.20.2", - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive", -] - [[package]] name = "scale-info-derive" version = "0.7.0" @@ -8945,7 +8932,7 @@ name = "sp-application-crypto" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -8974,7 +8961,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "serde", "sp-debug-derive", "sp-std", @@ -8996,7 +8983,7 @@ name = "sp-authority-discovery" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -9067,7 +9054,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -9085,7 +9072,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9116,7 +9103,7 @@ name = "sp-consensus-slots" version = "0.10.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -9159,7 +9146,7 @@ dependencies = [ "primitive-types", "rand 0.7.3", "regex", - "scale-info 0.11.0", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -9214,7 +9201,7 @@ dependencies = [ "finality-grandpa", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9304,7 +9291,7 @@ version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9321,7 +9308,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "sp-npos-elections", "sp-runtime", "structopt", @@ -9335,7 +9322,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", - "scale-info 0.11.0", + "scale-info", "sp-arithmetic", "sp-npos-elections", "syn", @@ -9380,7 +9367,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info 0.11.0", + "scale-info", "serde", "serde_json", "sp-api", @@ -9492,7 +9479,7 @@ name = "sp-session" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9505,7 +9492,7 @@ name = "sp-staking" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-runtime", "sp-std", ] @@ -9623,7 +9610,7 @@ dependencies = [ "async-trait", "log 0.4.14", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-inherents", "sp-runtime", @@ -9640,7 +9627,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", - "scale-info 0.11.0", + "scale-info", "sp-core", "sp-runtime", "sp-std", @@ -9667,7 +9654,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", - "scale-info 0.11.0", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9852,7 +9839,7 @@ dependencies = [ "jsonrpc-client-transports", "parity-scale-codec", "sc-rpc-api", - "scale-info 0.11.0", + "scale-info", "serde", "sp-storage", "tokio", @@ -9939,7 +9926,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info 0.11.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index 586be156aacb3..d67f10ab99d66 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index c4b8257fb9b10..64c0564cdbb7d 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index 68597534ed538..e01fc2baadec4 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.11.0", features = ["derive"] } +scale-info = { version = "0.10.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 0d2460cac6053..e89485792d6a6 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index fb0328ea71531..4aa96d91a0355 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index a265c20a4f5b9..f535657a29cb8 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 89f6df26b8748..957efe999df02 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index cb42a38c47974..1cd6d04a94b11 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -17,7 +17,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index f5cedb1dfd3f9..0c198a79011d4 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,7 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 8e4f1edf08ce1..93fc9ee22acf5 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index c7e3ac83ef1b9..924b3c2ce1f3d 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index 660a45143b9cf..d0e2eab7032e7 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index d9dcb847af405..a3d3126e48c02 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index 7b854d68ea1bb..f3d4a4e736f0c 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index abdf5827b1b6d..5a662719eefe5 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index e4a5e45103050..45a697f9f2de2 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 80e3de67602a1..8252d280ef18c 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index e406ae3760cd6..c57e0cee213ab 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 4e47f02ff611b..212f37b13f836 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 08d5643bcee0c..5361269597d07 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -17,7 +17,7 @@ static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 05573b29e4c9d..7256da32d5ba8 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 27e6d3b2e8802..474e4125d4a87 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 5430795388325..3755e5edac15d 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index ffbf8fffd83ef..1cbba6e1a06e5 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 5dd2b5b2640ea..c22f72790ca94 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 253e6fc6a7656..8a8a1ca1c080c 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index ee9add610254f..4f46a1170ffa4 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 6fcbdfffeb1c9..ae29deff74bd8 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 6dfae1829e14d..0f69a197789b6 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index e7d23171143ae..ad27c4a0bf502 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 3eef96dbb1a3a..6415313e0bd35 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 35bf96a4aa2bd..90705c85f5001 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 6f98f4f999d5e..529a1f5f89f97 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 5ed835d7382c4..56f7cae8cc1e1 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index ba757ee97abd7..196e8ea0165bd 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index fcfb332f0e1f1..83bdb6af141aa 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index ecfb10ac32aa9..566730abf7fb9 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 2aab766cd9bf6..b0bb1c066d6a7 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 065be15fb0129..8198bacb177cb 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 3031be9a94811..5a0a285bac8d5 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 0ec7836c6095e..28134dfcdaaa1 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 9e33d57c916ab..789aa58005f94 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 0b2f270c632df..307d547bdd158 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 51ff7c3812f4e..4b3cd1d5ca1cf 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index dbc6ecfd62c32..7bd3c7c9aee33 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index f81f89b88f387..65e42b4334aaa 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -29,7 +29,7 @@ rand = { version = "0.7.2", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", features = [ "derive", ] } -scale-info = "0.11.0" +scale-info = "0.10.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index e449b403a412a..b790c13e443ae 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index 8f00787b9a261..c7ae438ede870 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index 047364c5cb212..b768326ba68d3 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 8240aecbff1ba..aef13f1a339ad 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-metadata = { version = "14.0.0-rc.2", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 76ac48b0d1f7b..0931998df1875 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index fe314ecd5292b..35d51cbcbbb1b 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 6a07334c693f8..b1e8e4a8985ce 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index c2f45c8e1132f..a895636a00a17 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 97bc55f78da0e..7545961504116 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 3601b02c6f100..89fc77816d2ab 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 55c7a37f3101c..4af927f31d461 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 2e9588422c4b6..ccac144560bf2 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index cafbd003b7573..4faf88433e321 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index 77e1c707f8459..d3746fd503d35 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index c3a914bfa9250..049cba68cec43 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index ae5141a6d2793..ecfd0ea8fd422 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index ea019a7ac1467..1b13330947595 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 018f3172d54d8..71b354e699273 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index fe966e90c8836..561aeeeebdf33 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 8105a5422feb3..1e19ff53bb205 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index db99210726013..ad5e05bfa6a60 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 6c6d5d8fc6fbd..84f16e5e7a1cd 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 20c45460231b5..8bde234cae2e9 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 387e8be3fd5ee..d748a1951c8fd 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 5e14690eafa9f..63d24dfb3993c 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-solution-type = { version = "4.0.0-dev", path = "./solution-type" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index fb9113ce1525b..4b79e0561eb9e 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-npos-elections = { version = "4.0.0-dev", path = ".." } diff --git a/primitives/npos-elections/solution-type/Cargo.toml b/primitives/npos-elections/solution-type/Cargo.toml index 6a06b395e77e3..296bee861188d 100644 --- a/primitives/npos-elections/solution-type/Cargo.toml +++ b/primitives/npos-elections/solution-type/Cargo.toml @@ -22,7 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" -scale-info = "0.11.0" +scale-info = "0.10.0" sp-arithmetic = { path = "../../arithmetic", version = "4.0.0-dev" } # used by generate_solution_type: sp-npos-elections = { path = "..", version = "4.0.0-dev" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 5fa5bf5d6cf45..15a599e3b77de 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 0ec21a42d95e2..ffb445e5dbda7 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index aac3cbed71b70..23bf4f017b109 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 7a175399f0fec..2ca5dcd224fad 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index e40d596b9f20b..690cf9bbf4e6a 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,7 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index 72aa68130548a..b5ce777f993c4 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 5ab1e68e1e3a3..349b3cb66fff5 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.11.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index b3b51df5e2a44..98853d6cce59f 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -25,5 +25,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } -scale-info = "0.11.0" +scale-info = "0.10.0" tokio = "1.10" From c3567baa2f61d9a8cfe3b99aee7cb8fffe36ca24 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 31 Aug 2021 17:11:04 +0100 Subject: [PATCH 475/503] Add Runtime type --- frame/support/Cargo.toml | 2 +- .../support/procedural/src/construct_runtime/expand/metadata.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index aef13f1a339ad..654edec8801ce 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } -frame-metadata = { version = "14.0.0-rc.2", default-features = false, features = ["v14"] } +frame-metadata = { version = "14.0.0-rc.3", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index 1a64059b04660..c8445e0bbc255 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -83,6 +83,7 @@ pub fn expand_runtime_metadata( }) .collect(), }, + #scrate::scale_info::meta_type::<#runtime>() ).into() } } From 3497b6f5cbfe95dfb44f0c20f4763dff6b578e41 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 31 Aug 2021 17:13:41 +0100 Subject: [PATCH 476/503] Update to scale-info 0.12 --- Cargo.lock | 185 ++++++++++-------- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- .../election-provider-multi-phase/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/support/test/pallet/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- .../npos-elections/solution-type/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- .../transaction-storage-proof/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- utils/frame/rpc/support/Cargo.toml | 2 +- 82 files changed, 180 insertions(+), 167 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07ea541ff4abc..b6f3df43e5975 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1807,7 +1807,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.4", - "scale-info", + "scale-info 0.10.0", ] [[package]] @@ -1890,7 +1890,7 @@ dependencies = [ "log 0.4.14", "parity-scale-codec", "paste 1.0.4", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-io", "sp-runtime", @@ -1947,7 +1947,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-inherents", "sp-io", @@ -1959,13 +1959,13 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0-rc.2" +version = "14.0.0-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb2c4a97cc93a372b141adb582d6428dc1ba1cded9ea624c4bbc40fe568cf247" +checksum = "76dfa4ecc509676b1416b834196a27639110e66e9da68342e709e5f7b855c54f" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", ] @@ -1985,7 +1985,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", - "scale-info", + "scale-info 0.12.0", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -2040,7 +2040,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", - "scale-info", + "scale-info 0.12.0", "serde", "sp-arithmetic", "sp-core", @@ -2059,7 +2059,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", ] [[package]] @@ -2070,7 +2070,7 @@ dependencies = [ "frame-support", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-externalities", @@ -2089,7 +2089,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -4406,7 +4406,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4440,7 +4440,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4546,7 +4546,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4625,7 +4625,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4892,7 +4892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -4907,7 +4907,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -4922,7 +4922,7 @@ dependencies = [ "frame-system", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4939,7 +4939,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4956,7 +4956,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-authorship", "sp-core", "sp-io", @@ -4981,7 +4981,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -5003,7 +5003,7 @@ dependencies = [ "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5020,7 +5020,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5037,7 +5037,7 @@ dependencies = [ "hex-literal", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5066,7 +5066,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.4", "rand_pcg 0.3.0", - "scale-info", + "scale-info 0.12.0", "serde", "smallvec 1.6.1", "sp-core", @@ -5084,7 +5084,7 @@ version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-runtime", @@ -5125,7 +5125,7 @@ version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-runtime", "sp-std", @@ -5141,7 +5141,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-io", @@ -5162,7 +5162,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "sp-arithmetic", "sp-core", "sp-io", @@ -5183,7 +5183,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5200,7 +5200,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-npos-elections", @@ -5219,7 +5219,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5235,7 +5235,7 @@ dependencies = [ "lite-json", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-keystore", @@ -5250,7 +5250,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5267,7 +5267,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-arithmetic", "sp-core", "sp-io", @@ -5293,7 +5293,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5315,7 +5315,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5333,7 +5333,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-application-crypto", "sp-core", "sp-io", @@ -5351,7 +5351,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-keyring", @@ -5369,7 +5369,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5385,7 +5385,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5404,7 +5404,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5453,7 +5453,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5468,7 +5468,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5483,7 +5483,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5499,7 +5499,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-io", @@ -5526,7 +5526,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5544,7 +5544,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5559,7 +5559,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5574,7 +5574,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5590,7 +5590,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5606,7 +5606,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5623,7 +5623,7 @@ dependencies = [ "log 0.4.14", "pallet-timestamp", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5648,7 +5648,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5666,7 +5666,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5689,7 +5689,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info", + "scale-info 0.12.0", "serde", "sp-application-crypto", "sp-core", @@ -5727,7 +5727,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5742,7 +5742,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5757,7 +5757,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-inherents", "sp-io", @@ -5776,7 +5776,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-io", @@ -5793,7 +5793,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "serde_json", "smallvec 1.6.1", @@ -5839,7 +5839,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-inherents", @@ -5859,7 +5859,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-io", @@ -5876,7 +5876,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5892,7 +5892,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -5909,7 +5909,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-io", "sp-runtime", @@ -6436,7 +6436,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", - "scale-info", + "scale-info 0.10.0", "uint", ] @@ -8456,6 +8456,19 @@ name = "scale-info" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2e62ff266e136db561a007c84569985805f84a1d5a08278e52c36aacb6e061b" +dependencies = [ + "bitvec 0.20.2", + "cfg-if 1.0.0", + "derive_more", + "parity-scale-codec", + "scale-info-derive", +] + +[[package]] +name = "scale-info" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cf40f28c3eca6a9abe66aa368d7475f6c92a626fb5737d131c82c0170b24f36" dependencies = [ "bitvec 0.20.2", "cfg-if 1.0.0", @@ -8932,7 +8945,7 @@ name = "sp-application-crypto" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-core", "sp-io", @@ -8961,7 +8974,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "serde", "sp-debug-derive", "sp-std", @@ -8983,7 +8996,7 @@ name = "sp-authority-discovery" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-application-crypto", "sp-runtime", @@ -9054,7 +9067,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-application-crypto", "sp-consensus", @@ -9072,7 +9085,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-api", "sp-application-crypto", @@ -9103,7 +9116,7 @@ name = "sp-consensus-slots" version = "0.10.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-arithmetic", "sp-runtime", ] @@ -9146,7 +9159,7 @@ dependencies = [ "primitive-types", "rand 0.7.3", "regex", - "scale-info", + "scale-info 0.12.0", "schnorrkel", "secrecy", "serde", @@ -9201,7 +9214,7 @@ dependencies = [ "finality-grandpa", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "serde", "sp-api", "sp-application-crypto", @@ -9291,7 +9304,7 @@ version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "serde", "sp-arithmetic", "sp-core", @@ -9308,7 +9321,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "sp-npos-elections", "sp-runtime", "structopt", @@ -9322,7 +9335,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", - "scale-info", + "scale-info 0.12.0", "sp-arithmetic", "sp-npos-elections", "syn", @@ -9367,7 +9380,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info", + "scale-info 0.12.0", "serde", "serde_json", "sp-api", @@ -9480,7 +9493,7 @@ name = "sp-session" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-api", "sp-core", "sp-runtime", @@ -9493,7 +9506,7 @@ name = "sp-staking" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-runtime", "sp-std", ] @@ -9611,7 +9624,7 @@ dependencies = [ "async-trait", "log 0.4.14", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-inherents", "sp-runtime", @@ -9628,7 +9641,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", - "scale-info", + "scale-info 0.12.0", "sp-core", "sp-runtime", "sp-std", @@ -9655,7 +9668,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", - "scale-info", + "scale-info 0.12.0", "serde", "sp-runtime", "sp-std", @@ -9840,7 +9853,7 @@ dependencies = [ "jsonrpc-client-transports", "parity-scale-codec", "sc-rpc-api", - "scale-info", + "scale-info 0.12.0", "serde", "sp-storage", "tokio", @@ -9927,7 +9940,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info", + "scale-info 0.12.0", "serde", "sp-api", "sp-application-crypto", diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index d67f10ab99d66..bf53f49bba572 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 64c0564cdbb7d..57735cd4fc69b 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index e01fc2baadec4..c87bb41a57e68 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.10.0", features = ["derive"] } +scale-info = { version = "0.12.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index e89485792d6a6..61748e895f49a 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index 4aa96d91a0355..dc5c8fce95f6b 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index f535657a29cb8..42be1e8543e69 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 957efe999df02..50b26dfb179c3 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 1cd6d04a94b11..61f9095d47db0 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -17,7 +17,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 0c198a79011d4..0078bd933f255 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,7 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 93fc9ee22acf5..53c8165921fa6 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 924b3c2ce1f3d..0d65428d7d5ac 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index d0e2eab7032e7..e63bac8a868a5 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index a3d3126e48c02..43f2fcfb6981d 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index f3d4a4e736f0c..c46803665e986 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 5a662719eefe5..793cd9b040102 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index d61ffbb01efb2..508acaa8d4902 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 8252d280ef18c..9783a84bfd905 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index c57e0cee213ab..30d3a5cc93f74 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 212f37b13f836..3457037c0aec6 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index 5361269597d07..a790f431b1e71 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -17,7 +17,7 @@ static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index 7256da32d5ba8..d241e39965e09 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 474e4125d4a87..7dcc408bd04fd 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index 3755e5edac15d..b2c2ec1c4a94c 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 1cbba6e1a06e5..133372ba3891c 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index c22f72790ca94..76954fe7d97c1 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 8a8a1ca1c080c..4ff6b28297a99 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 4f46a1170ffa4..171d5f491ee53 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index ae29deff74bd8..08b02afa3599b 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 0f69a197789b6..19b73baf0ae9f 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index ad27c4a0bf502..79935f8a59e5a 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 6415313e0bd35..572c9869a1a43 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index 90705c85f5001..ffc9831ef8e2b 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 529a1f5f89f97..4fbc4c61e8370 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 56f7cae8cc1e1..826b4dbabd256 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index 196e8ea0165bd..d704ea0380c80 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 83bdb6af141aa..7ed54f5f20032 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 566730abf7fb9..1a176cbf07fa7 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index b0bb1c066d6a7..982d99cf40858 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 8198bacb177cb..8b90c473d0ed7 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 5a0a285bac8d5..0cabe3f047113 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 28134dfcdaaa1..26468abfd68f1 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index 789aa58005f94..ec38df80e9076 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 307d547bdd158..6f5919f86c21e 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 4b3cd1d5ca1cf..3c26675a76412 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 7bd3c7c9aee33..7b5480fa840d7 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index 65e42b4334aaa..a9e05b88a570b 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -29,7 +29,7 @@ rand = { version = "0.7.2", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", features = [ "derive", ] } -scale-info = "0.10.0" +scale-info = "0.12.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index b790c13e443ae..5c9ac478e6bfe 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index c7ae438ede870..cab57e00d82df 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index b768326ba68d3..b0f9371cd97ff 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 654edec8801ce..fc6616df1a68a 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-metadata = { version = "14.0.0-rc.3", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index 0931998df1875..f0bda64980362 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 35d51cbcbbb1b..2f0c1c4a907ba 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index b1e8e4a8985ce..3884639316b08 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index a895636a00a17..915b103b57196 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 7545961504116..7881428d504c3 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 89fc77816d2ab..79e55169e5828 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 4af927f31d461..3dfc8e9aa1cd0 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index ccac144560bf2..1395acd0d0ab5 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 4faf88433e321..50e8f3be9222a 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index d3746fd503d35..2133450983c88 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 049cba68cec43..5d5a1eff1570a 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index ecfd0ea8fd422..a472362d02de4 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 1b13330947595..8029084a39d3d 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 71b354e699273..433e06bc5a16d 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 561aeeeebdf33..8dcb059ed5040 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index 1e19ff53bb205..fadc738a4b5dc 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index ad5e05bfa6a60..5a5ab41c92e49 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index 84f16e5e7a1cd..bc95ec739d6de 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index 8bde234cae2e9..dd0736c380f81 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index d748a1951c8fd..243d043e64088 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 63d24dfb3993c..841d3be148007 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-solution-type = { version = "4.0.0-dev", path = "./solution-type" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index 4b79e0561eb9e..6d431ddc5328b 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-npos-elections = { version = "4.0.0-dev", path = ".." } diff --git a/primitives/npos-elections/solution-type/Cargo.toml b/primitives/npos-elections/solution-type/Cargo.toml index 296bee861188d..87023cb361b7b 100644 --- a/primitives/npos-elections/solution-type/Cargo.toml +++ b/primitives/npos-elections/solution-type/Cargo.toml @@ -22,7 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" -scale-info = "0.10.0" +scale-info = "0.12.0" sp-arithmetic = { path = "../../arithmetic", version = "4.0.0-dev" } # used by generate_solution_type: sp-npos-elections = { path = "..", version = "4.0.0-dev" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 15a599e3b77de..95394ff894d41 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index ffb445e5dbda7..9555d2022e092 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 23bf4f017b109..8fdcb940ed40b 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 2ca5dcd224fad..38748c9031153 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 690cf9bbf4e6a..5c3a742746c85 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,7 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index b5ce777f993c4..b9187aef5dee9 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index 349b3cb66fff5..e677bf6b0fb7b 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.10.0", default-features = false, features = ["derive"] } +scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index 98853d6cce59f..cd343d219a2ad 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -25,5 +25,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } -scale-info = "0.10.0" +scale-info = "0.12.0" tokio = "1.10" From 894f0a4c0a128fecab63ce628110865f731bbf0d Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 1 Sep 2021 16:47:50 +0100 Subject: [PATCH 477/503] Update to scale-info 1.0 --- bin/node-template/pallets/template/Cargo.toml | 2 +- bin/node-template/runtime/Cargo.toml | 2 +- bin/node/executor/Cargo.toml | 2 +- bin/node/primitives/Cargo.toml | 2 +- bin/node/runtime/Cargo.toml | 2 +- client/finality-grandpa-warp-sync/Cargo.toml | 2 +- client/finality-grandpa/Cargo.toml | 2 +- client/finality-grandpa/rpc/Cargo.toml | 2 +- frame/assets/Cargo.toml | 2 +- frame/atomic-swap/Cargo.toml | 2 +- frame/aura/Cargo.toml | 2 +- frame/authority-discovery/Cargo.toml | 2 +- frame/authorship/Cargo.toml | 2 +- frame/babe/Cargo.toml | 2 +- frame/balances/Cargo.toml | 2 +- frame/benchmarking/Cargo.toml | 2 +- frame/bounties/Cargo.toml | 2 +- frame/collective/Cargo.toml | 2 +- frame/contracts/Cargo.toml | 2 +- frame/contracts/common/Cargo.toml | 2 +- frame/contracts/rpc/runtime-api/Cargo.toml | 2 +- frame/democracy/Cargo.toml | 2 +- frame/election-provider-multi-phase/Cargo.toml | 2 +- frame/elections-phragmen/Cargo.toml | 2 +- frame/elections/Cargo.toml | 2 +- frame/example-offchain-worker/Cargo.toml | 2 +- frame/example-parallel/Cargo.toml | 2 +- frame/example/Cargo.toml | 2 +- frame/executive/Cargo.toml | 2 +- frame/gilt/Cargo.toml | 2 +- frame/grandpa/Cargo.toml | 2 +- frame/identity/Cargo.toml | 2 +- frame/im-online/Cargo.toml | 2 +- frame/indices/Cargo.toml | 2 +- frame/lottery/Cargo.toml | 2 +- frame/membership/Cargo.toml | 2 +- frame/merkle-mountain-range/Cargo.toml | 2 +- frame/multisig/Cargo.toml | 2 +- frame/nicks/Cargo.toml | 2 +- frame/node-authorization/Cargo.toml | 2 +- frame/offences/Cargo.toml | 2 +- frame/offences/benchmarking/Cargo.toml | 2 +- frame/proxy/Cargo.toml | 2 +- frame/randomness-collective-flip/Cargo.toml | 2 +- frame/recovery/Cargo.toml | 2 +- frame/scheduler/Cargo.toml | 2 +- frame/scored-pool/Cargo.toml | 2 +- frame/session/Cargo.toml | 2 +- frame/session/benchmarking/Cargo.toml | 2 +- frame/society/Cargo.toml | 2 +- frame/staking/Cargo.toml | 2 +- frame/sudo/Cargo.toml | 2 +- frame/support/Cargo.toml | 2 +- frame/support/test/Cargo.toml | 2 +- frame/support/test/pallet/Cargo.toml | 2 +- frame/system/Cargo.toml | 2 +- frame/system/benchmarking/Cargo.toml | 2 +- frame/timestamp/Cargo.toml | 2 +- frame/tips/Cargo.toml | 2 +- frame/transaction-payment/Cargo.toml | 2 +- frame/transaction-storage/Cargo.toml | 2 +- frame/treasury/Cargo.toml | 2 +- frame/uniques/Cargo.toml | 2 +- frame/utility/Cargo.toml | 2 +- frame/vesting/Cargo.toml | 2 +- primitives/application-crypto/Cargo.toml | 2 +- primitives/arithmetic/Cargo.toml | 2 +- primitives/authority-discovery/Cargo.toml | 2 +- primitives/consensus/aura/Cargo.toml | 2 +- primitives/consensus/babe/Cargo.toml | 2 +- primitives/consensus/slots/Cargo.toml | 2 +- primitives/core/Cargo.toml | 2 +- primitives/finality-grandpa/Cargo.toml | 2 +- primitives/npos-elections/Cargo.toml | 2 +- primitives/npos-elections/fuzzer/Cargo.toml | 2 +- primitives/npos-elections/solution-type/Cargo.toml | 2 +- primitives/runtime/Cargo.toml | 2 +- primitives/session/Cargo.toml | 2 +- primitives/staking/Cargo.toml | 2 +- primitives/transaction-storage-proof/Cargo.toml | 2 +- primitives/trie/Cargo.toml | 2 +- primitives/version/Cargo.toml | 2 +- test-utils/runtime/Cargo.toml | 2 +- utils/frame/rpc/support/Cargo.toml | 2 +- 84 files changed, 84 insertions(+), 84 deletions(-) diff --git a/bin/node-template/pallets/template/Cargo.toml b/bin/node-template/pallets/template/Cargo.toml index bf53f49bba572..b3eb747625b4f 100644 --- a/bin/node-template/pallets/template/Cargo.toml +++ b/bin/node-template/pallets/template/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/support" } frame-system = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/system" } frame-benchmarking = { default-features = false, version = "4.0.0-dev", path = "../../../../frame/benchmarking", optional = true } diff --git a/bin/node-template/runtime/Cargo.toml b/bin/node-template/runtime/Cargo.toml index 57735cd4fc69b..47e67af2b9ae1 100644 --- a/bin/node-template/runtime/Cargo.toml +++ b/bin/node-template/runtime/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } pallet-aura = { version = "4.0.0-dev", default-features = false, path = "../../../frame/aura" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../../../frame/balances" } diff --git a/bin/node/executor/Cargo.toml b/bin/node/executor/Cargo.toml index c87bb41a57e68..f283a913915f3 100644 --- a/bin/node/executor/Cargo.toml +++ b/bin/node/executor/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0" } -scale-info = { version = "0.12.0", features = ["derive"] } +scale-info = { version = "1.0", features = ["derive"] } node-primitives = { version = "2.0.0", path = "../primitives" } node-runtime = { version = "3.0.0-dev", path = "../runtime" } sc-executor = { version = "0.10.0-dev", path = "../../../client/executor" } diff --git a/bin/node/primitives/Cargo.toml b/bin/node/primitives/Cargo.toml index 61748e895f49a..12ec57e4d55b6 100644 --- a/bin/node/primitives/Cargo.toml +++ b/bin/node/primitives/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../frame/system" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/core" } diff --git a/bin/node/runtime/Cargo.toml b/bin/node/runtime/Cargo.toml index dc5c8fce95f6b..66bc8e2a3d7b8 100644 --- a/bin/node/runtime/Cargo.toml +++ b/bin/node/runtime/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } static_assertions = "1.1.0" hex-literal = { version = "0.3.1", optional = true } log = { version = "0.4.14", default-features = false } diff --git a/client/finality-grandpa-warp-sync/Cargo.toml b/client/finality-grandpa-warp-sync/Cargo.toml index c535898d59e86..7eebac2f382a8 100644 --- a/client/finality-grandpa-warp-sync/Cargo.toml +++ b/client/finality-grandpa-warp-sync/Cargo.toml @@ -26,7 +26,7 @@ sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime" } [dev-dependencies] -finality-grandpa = { version = "0.14.3" } +finality-grandpa = { version = "0.14.4" } rand = "0.8" sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } sp-consensus = { version = "0.10.0-dev", path = "../../primitives/consensus/common" } diff --git a/client/finality-grandpa/Cargo.toml b/client/finality-grandpa/Cargo.toml index a16addfad35b1..965f192ccefe4 100644 --- a/client/finality-grandpa/Cargo.toml +++ b/client/finality-grandpa/Cargo.toml @@ -43,7 +43,7 @@ sc-network-gossip = { version = "0.10.0-dev", path = "../network-gossip" } sp-finality-grandpa = { version = "4.0.0-dev", path = "../../primitives/finality-grandpa" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus", version = "0.9.0" } sc-block-builder = { version = "0.10.0-dev", path = "../block-builder" } -finality-grandpa = { version = "0.14.3", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.4", features = ["derive-codec"] } async-trait = "0.1.50" [dev-dependencies] diff --git a/client/finality-grandpa/rpc/Cargo.toml b/client/finality-grandpa/rpc/Cargo.toml index 1325298f3362b..d2976ee71275f 100644 --- a/client/finality-grandpa/rpc/Cargo.toml +++ b/client/finality-grandpa/rpc/Cargo.toml @@ -14,7 +14,7 @@ sc-rpc = { version = "4.0.0-dev", path = "../../rpc" } sp-blockchain = { version = "4.0.0-dev", path = "../../../primitives/blockchain" } sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } sp-runtime = { version = "4.0.0-dev", path = "../../../primitives/runtime" } -finality-grandpa = { version = "0.14.3", features = ["derive-codec"] } +finality-grandpa = { version = "0.14.4", features = ["derive-codec"] } jsonrpc-core = "18.0.0" jsonrpc-core-client = "18.0.0" jsonrpc-derive = "18.0.0" diff --git a/frame/assets/Cargo.toml b/frame/assets/Cargo.toml index 42be1e8543e69..05e7912dd07c6 100644 --- a/frame/assets/Cargo.toml +++ b/frame/assets/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } # Needed for various traits. In our case, `OnFinalize`. sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/atomic-swap/Cargo.toml b/frame/atomic-swap/Cargo.toml index 50b26dfb179c3..53a8c3a81165b 100644 --- a/frame/atomic-swap/Cargo.toml +++ b/frame/atomic-swap/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/aura/Cargo.toml b/frame/aura/Cargo.toml index 61f9095d47db0..4ce48e4596595 100644 --- a/frame/aura/Cargo.toml +++ b/frame/aura/Cargo.toml @@ -17,7 +17,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/authority-discovery/Cargo.toml b/frame/authority-discovery/Cargo.toml index 0078bd933f255..80a320c31e77f 100644 --- a/frame/authority-discovery/Cargo.toml +++ b/frame/authority-discovery/Cargo.toml @@ -18,7 +18,7 @@ sp-application-crypto = { version = "4.0.0-dev", default-features = false, path codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } pallet-session = { version = "4.0.0-dev", features = [ "historical", diff --git a/frame/authorship/Cargo.toml b/frame/authorship/Cargo.toml index 53c8165921fa6..120b72f8e6511 100644 --- a/frame/authorship/Cargo.toml +++ b/frame/authorship/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-authorship = { version = "4.0.0-dev", default-features = false, path = "../../primitives/authorship" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/babe/Cargo.toml b/frame/babe/Cargo.toml index 0d65428d7d5ac..d95f1419fd035 100644 --- a/frame/babe/Cargo.toml +++ b/frame/babe/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } diff --git a/frame/balances/Cargo.toml b/frame/balances/Cargo.toml index e63bac8a868a5..2263387d6d8ef 100644 --- a/frame/balances/Cargo.toml +++ b/frame/balances/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../benchmarking", optional = true } diff --git a/frame/benchmarking/Cargo.toml b/frame/benchmarking/Cargo.toml index 43f2fcfb6981d..ea690d966c979 100644 --- a/frame/benchmarking/Cargo.toml +++ b/frame/benchmarking/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] linregress = { version = "0.4.3", optional = true } paste = "1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", path = "../../primitives/api", default-features = false } sp-runtime-interface = { version = "4.0.0-dev", path = "../../primitives/runtime-interface", default-features = false } sp-runtime = { version = "4.0.0-dev", path = "../../primitives/runtime", default-features = false } diff --git a/frame/bounties/Cargo.toml b/frame/bounties/Cargo.toml index c46803665e986..3bb184d5b3393 100644 --- a/frame/bounties/Cargo.toml +++ b/frame/bounties/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/collective/Cargo.toml b/frame/collective/Cargo.toml index 793cd9b040102..095b06d99c3c6 100644 --- a/frame/collective/Cargo.toml +++ b/frame/collective/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/contracts/Cargo.toml b/frame/contracts/Cargo.toml index 508acaa8d4902..dbc6a0f86e890 100644 --- a/frame/contracts/Cargo.toml +++ b/frame/contracts/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4", default-features = false } pwasm-utils = { version = "0.18", default-features = false } serde = { version = "1", optional = true, features = ["derive"] } diff --git a/frame/contracts/common/Cargo.toml b/frame/contracts/common/Cargo.toml index 9783a84bfd905..b441d88453ae2 100644 --- a/frame/contracts/common/Cargo.toml +++ b/frame/contracts/common/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] bitflags = "1.0" codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1", features = ["derive"], optional = true } # Substrate Dependencies (This crate should not rely on frame) diff --git a/frame/contracts/rpc/runtime-api/Cargo.toml b/frame/contracts/rpc/runtime-api/Cargo.toml index 30d3a5cc93f74..e5f6d1ec7eb8e 100644 --- a/frame/contracts/rpc/runtime-api/Cargo.toml +++ b/frame/contracts/rpc/runtime-api/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } # Substrate Dependencies pallet-contracts-primitives = { version = "4.0.0-dev", default-features = false, path = "../../common" } diff --git a/frame/democracy/Cargo.toml b/frame/democracy/Cargo.toml index 3457037c0aec6..94719553e28aa 100644 --- a/frame/democracy/Cargo.toml +++ b/frame/democracy/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/election-provider-multi-phase/Cargo.toml b/frame/election-provider-multi-phase/Cargo.toml index a790f431b1e71..b2d50321e8cd3 100644 --- a/frame/election-provider-multi-phase/Cargo.toml +++ b/frame/election-provider-multi-phase/Cargo.toml @@ -17,7 +17,7 @@ static_assertions = "1.1.0" codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.14", default-features = false } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/elections-phragmen/Cargo.toml b/frame/elections-phragmen/Cargo.toml index d241e39965e09..f2771a9f72783 100644 --- a/frame/elections-phragmen/Cargo.toml +++ b/frame/elections-phragmen/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/elections/Cargo.toml b/frame/elections/Cargo.toml index 7dcc408bd04fd..8557cfba6b58c 100644 --- a/frame/elections/Cargo.toml +++ b/frame/elections/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/example-offchain-worker/Cargo.toml b/frame/example-offchain-worker/Cargo.toml index b2c2ec1c4a94c..1ccd9f33f0318 100644 --- a/frame/example-offchain-worker/Cargo.toml +++ b/frame/example-offchain-worker/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example-parallel/Cargo.toml b/frame/example-parallel/Cargo.toml index 133372ba3891c..5e0f6d4bc255a 100644 --- a/frame/example-parallel/Cargo.toml +++ b/frame/example-parallel/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/example/Cargo.toml b/frame/example/Cargo.toml index 76954fe7d97c1..58daaf1c75558 100644 --- a/frame/example/Cargo.toml +++ b/frame/example/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/executive/Cargo.toml b/frame/executive/Cargo.toml index 4ff6b28297a99..1abbf50e6a4c4 100644 --- a/frame/executive/Cargo.toml +++ b/frame/executive/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/gilt/Cargo.toml b/frame/gilt/Cargo.toml index 171d5f491ee53..c275b693d8f27 100644 --- a/frame/gilt/Cargo.toml +++ b/frame/gilt/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } diff --git a/frame/grandpa/Cargo.toml b/frame/grandpa/Cargo.toml index 08b02afa3599b..53ab443783e5d 100644 --- a/frame/grandpa/Cargo.toml +++ b/frame/grandpa/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-finality-grandpa = { version = "4.0.0-dev", default-features = false, path = "../../primitives/finality-grandpa" } diff --git a/frame/identity/Cargo.toml b/frame/identity/Cargo.toml index 19b73baf0ae9f..598be25c5ef38 100644 --- a/frame/identity/Cargo.toml +++ b/frame/identity/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } enumflags2 = { version = "0.6.2" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/im-online/Cargo.toml b/frame/im-online/Cargo.toml index 79935f8a59e5a..a1efd626c0690 100644 --- a/frame/im-online/Cargo.toml +++ b/frame/im-online/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../primitives/application-crypto" } pallet-authorship = { version = "4.0.0-dev", default-features = false, path = "../authorship" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/indices/Cargo.toml b/frame/indices/Cargo.toml index 572c9869a1a43..86828dca26b09 100644 --- a/frame/indices/Cargo.toml +++ b/frame/indices/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/lottery/Cargo.toml b/frame/lottery/Cargo.toml index ffc9831ef8e2b..f14d65310cc70 100644 --- a/frame/lottery/Cargo.toml +++ b/frame/lottery/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/membership/Cargo.toml b/frame/membership/Cargo.toml index 4fbc4c61e8370..31fb4c273df83 100644 --- a/frame/membership/Cargo.toml +++ b/frame/membership/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/merkle-mountain-range/Cargo.toml b/frame/merkle-mountain-range/Cargo.toml index 826b4dbabd256..02b4be182ef82 100644 --- a/frame/merkle-mountain-range/Cargo.toml +++ b/frame/merkle-mountain-range/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } mmr-lib = { package = "ckb-merkle-mountain-range", default-features = false, version = "0.3.1" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/multisig/Cargo.toml b/frame/multisig/Cargo.toml index d704ea0380c80..121c31efe7ea2 100644 --- a/frame/multisig/Cargo.toml +++ b/frame/multisig/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/nicks/Cargo.toml b/frame/nicks/Cargo.toml index 7ed54f5f20032..431ee2c84157c 100644 --- a/frame/nicks/Cargo.toml +++ b/frame/nicks/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/node-authorization/Cargo.toml b/frame/node-authorization/Cargo.toml index 1a176cbf07fa7..635e72e3a8b8a 100644 --- a/frame/node-authorization/Cargo.toml +++ b/frame/node-authorization/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/offences/Cargo.toml b/frame/offences/Cargo.toml index 982d99cf40858..8fdcbf46fa3e1 100644 --- a/frame/offences/Cargo.toml +++ b/frame/offences/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } serde = { version = "1.0.126", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/offences/benchmarking/Cargo.toml b/frame/offences/benchmarking/Cargo.toml index 8b90c473d0ed7..b21e6cf9b7e13 100644 --- a/frame/offences/benchmarking/Cargo.toml +++ b/frame/offences/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../system" } diff --git a/frame/proxy/Cargo.toml b/frame/proxy/Cargo.toml index 0cabe3f047113..83db82990d105 100644 --- a/frame/proxy/Cargo.toml +++ b/frame/proxy/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/randomness-collective-flip/Cargo.toml b/frame/randomness-collective-flip/Cargo.toml index 26468abfd68f1..5e8eb6b082879 100644 --- a/frame/randomness-collective-flip/Cargo.toml +++ b/frame/randomness-collective-flip/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] safe-mix = { version = "1.0", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/recovery/Cargo.toml b/frame/recovery/Cargo.toml index ec38df80e9076..40a89e9b59f89 100644 --- a/frame/recovery/Cargo.toml +++ b/frame/recovery/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scheduler/Cargo.toml b/frame/scheduler/Cargo.toml index 6f5919f86c21e..62b21fe04c9df 100644 --- a/frame/scheduler/Cargo.toml +++ b/frame/scheduler/Cargo.toml @@ -11,7 +11,7 @@ readme = "README.md" [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/scored-pool/Cargo.toml b/frame/scored-pool/Cargo.toml index 3c26675a76412..9d5f156c175d5 100644 --- a/frame/scored-pool/Cargo.toml +++ b/frame/scored-pool/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } diff --git a/frame/session/Cargo.toml b/frame/session/Cargo.toml index 7b5480fa840d7..8f07de2e7a6db 100644 --- a/frame/session/Cargo.toml +++ b/frame/session/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/session/benchmarking/Cargo.toml b/frame/session/benchmarking/Cargo.toml index a9e05b88a570b..cc242085bf5e4 100644 --- a/frame/session/benchmarking/Cargo.toml +++ b/frame/session/benchmarking/Cargo.toml @@ -29,7 +29,7 @@ rand = { version = "0.7.2", default-features = false } codec = { package = "parity-scale-codec", version = "2.0.0", features = [ "derive", ] } -scale-info = "0.12.0" +scale-info = "1.0" sp-core = { version = "4.0.0-dev", path = "../../../primitives/core" } pallet-staking-reward-curve = { version = "4.0.0-dev", path = "../../staking/reward-curve" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io" } diff --git a/frame/society/Cargo.toml b/frame/society/Cargo.toml index 5c9ac478e6bfe..942b2844195f2 100644 --- a/frame/society/Cargo.toml +++ b/frame/society/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/staking/Cargo.toml b/frame/staking/Cargo.toml index cab57e00d82df..aba19ba56357a 100644 --- a/frame/staking/Cargo.toml +++ b/frame/staking/Cargo.toml @@ -17,7 +17,7 @@ serde = { version = "1.0.126", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/sudo/Cargo.toml b/frame/sudo/Cargo.toml index b0f9371cd97ff..baacb66d5c751 100644 --- a/frame/sudo/Cargo.toml +++ b/frame/sudo/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index fc6616df1a68a..94dd3c0b29b5a 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-metadata = { version = "14.0.0-rc.3", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } diff --git a/frame/support/test/Cargo.toml b/frame/support/test/Cargo.toml index f0bda64980362..e12880871e5c2 100644 --- a/frame/support/test/Cargo.toml +++ b/frame/support/test/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", default-features = false, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/arithmetic" } sp-io = { version = "4.0.0-dev", path = "../../../primitives/io", default-features = false } sp-state-machine = { version = "0.10.0-dev", optional = true, path = "../../../primitives/state-machine" } diff --git a/frame/support/test/pallet/Cargo.toml b/frame/support/test/pallet/Cargo.toml index 2f0c1c4a907ba..35eb4f34acae1 100644 --- a/frame/support/test/pallet/Cargo.toml +++ b/frame/support/test/pallet/Cargo.toml @@ -13,7 +13,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../../" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../../../system" } diff --git a/frame/system/Cargo.toml b/frame/system/Cargo.toml index 3884639316b08..389730107b439 100644 --- a/frame/system/Cargo.toml +++ b/frame/system/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", path = "../../primitives/io", default-features = false } diff --git a/frame/system/benchmarking/Cargo.toml b/frame/system/benchmarking/Cargo.toml index 915b103b57196..29bcccfd7d830 100644 --- a/frame/system/benchmarking/Cargo.toml +++ b/frame/system/benchmarking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../../primitives/runtime" } frame-benchmarking = { version = "4.0.0-dev", default-features = false, path = "../../benchmarking" } diff --git a/frame/timestamp/Cargo.toml b/frame/timestamp/Cargo.toml index 7881428d504c3..1c95c4782b5c4 100644 --- a/frame/timestamp/Cargo.toml +++ b/frame/timestamp/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io", optional = true } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } diff --git a/frame/tips/Cargo.toml b/frame/tips/Cargo.toml index 79e55169e5828..50eaddf391c3c 100644 --- a/frame/tips/Cargo.toml +++ b/frame/tips/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/transaction-payment/Cargo.toml b/frame/transaction-payment/Cargo.toml index 3dfc8e9aa1cd0..fd09636bf95ac 100644 --- a/frame/transaction-payment/Cargo.toml +++ b/frame/transaction-payment/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true } smallvec = "1.4.1" diff --git a/frame/transaction-storage/Cargo.toml b/frame/transaction-storage/Cargo.toml index 1395acd0d0ab5..a4ebd5cfbc876 100644 --- a/frame/transaction-storage/Cargo.toml +++ b/frame/transaction-storage/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true } hex-literal = { version = "0.3.1", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } pallet-balances = { version = "4.0.0-dev", default-features = false, path = "../balances" } diff --git a/frame/treasury/Cargo.toml b/frame/treasury/Cargo.toml index 50e8f3be9222a..b2991f3febcad 100644 --- a/frame/treasury/Cargo.toml +++ b/frame/treasury/Cargo.toml @@ -17,7 +17,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", features = ["derive"], optional = true } impl-trait-for-tuples = "0.2.1" diff --git a/frame/uniques/Cargo.toml b/frame/uniques/Cargo.toml index 2133450983c88..4f664ecc2b6a9 100644 --- a/frame/uniques/Cargo.toml +++ b/frame/uniques/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/frame/utility/Cargo.toml b/frame/utility/Cargo.toml index 5d5a1eff1570a..b5b8eab9cdbf3 100644 --- a/frame/utility/Cargo.toml +++ b/frame/utility/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } frame-system = { version = "4.0.0-dev", default-features = false, path = "../system" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../../primitives/core" } diff --git a/frame/vesting/Cargo.toml b/frame/vesting/Cargo.toml index a472362d02de4..806e0e6036862 100644 --- a/frame/vesting/Cargo.toml +++ b/frame/vesting/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } frame-support = { version = "4.0.0-dev", default-features = false, path = "../support" } diff --git a/primitives/application-crypto/Cargo.toml b/primitives/application-crypto/Cargo.toml index 8029084a39d3d..6849dc25f8561 100644 --- a/primitives/application-crypto/Cargo.toml +++ b/primitives/application-crypto/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../io" } diff --git a/primitives/arithmetic/Cargo.toml b/primitives/arithmetic/Cargo.toml index 433e06bc5a16d..9efff5daeae22 100644 --- a/primitives/arithmetic/Cargo.toml +++ b/primitives/arithmetic/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = [ "derive", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } integer-sqrt = "0.1.2" static_assertions = "1.1.0" num-traits = { version = "0.2.8", default-features = false } diff --git a/primitives/authority-discovery/Cargo.toml b/primitives/authority-discovery/Cargo.toml index 8dcb059ed5040..6638e478b4cd7 100644 --- a/primitives/authority-discovery/Cargo.toml +++ b/primitives/authority-discovery/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } codec = { package = "parity-scale-codec", default-features = false, version = "2.0.0" } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } diff --git a/primitives/consensus/aura/Cargo.toml b/primitives/consensus/aura/Cargo.toml index fadc738a4b5dc..c228b88fd6570 100644 --- a/primitives/consensus/aura/Cargo.toml +++ b/primitives/consensus/aura/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } diff --git a/primitives/consensus/babe/Cargo.toml b/primitives/consensus/babe/Cargo.toml index 5a5ab41c92e49..5f6bfec219739 100644 --- a/primitives/consensus/babe/Cargo.toml +++ b/primitives/consensus/babe/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../../application-crypto" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } merlin = { version = "2.0", default-features = false } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../std" } sp-api = { version = "4.0.0-dev", default-features = false, path = "../../api" } diff --git a/primitives/consensus/slots/Cargo.toml b/primitives/consensus/slots/Cargo.toml index bc95ec739d6de..141e580672975 100644 --- a/primitives/consensus/slots/Cargo.toml +++ b/primitives/consensus/slots/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../runtime" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../arithmetic" } diff --git a/primitives/core/Cargo.toml b/primitives/core/Cargo.toml index dd0736c380f81..2e7668a39fcf5 100644 --- a/primitives/core/Cargo.toml +++ b/primitives/core/Cargo.toml @@ -18,7 +18,7 @@ codec = { package = "parity-scale-codec", version = "2.2.0", default-features = "derive", "max-encoded-len", ] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.11", default-features = false } serde = { version = "1.0.126", optional = true, features = ["derive"] } byteorder = { version = "1.3.2", default-features = false } diff --git a/primitives/finality-grandpa/Cargo.toml b/primitives/finality-grandpa/Cargo.toml index 243d043e64088..c0c2a654270f7 100644 --- a/primitives/finality-grandpa/Cargo.toml +++ b/primitives/finality-grandpa/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } grandpa = { package = "finality-grandpa", version = "0.14.1", default-features = false, features = ["derive-codec"] } log = { version = "0.4.8", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } diff --git a/primitives/npos-elections/Cargo.toml b/primitives/npos-elections/Cargo.toml index 841d3be148007..c420ab18d8459 100644 --- a/primitives/npos-elections/Cargo.toml +++ b/primitives/npos-elections/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } serde = { version = "1.0.126", optional = true, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-npos-elections-solution-type = { version = "4.0.0-dev", path = "./solution-type" } diff --git a/primitives/npos-elections/fuzzer/Cargo.toml b/primitives/npos-elections/fuzzer/Cargo.toml index 6d431ddc5328b..d6fcc09c8b586 100644 --- a/primitives/npos-elections/fuzzer/Cargo.toml +++ b/primitives/npos-elections/fuzzer/Cargo.toml @@ -15,7 +15,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.7.3", features = ["std", "small_rng"] } sp-npos-elections = { version = "4.0.0-dev", path = ".." } diff --git a/primitives/npos-elections/solution-type/Cargo.toml b/primitives/npos-elections/solution-type/Cargo.toml index 87023cb361b7b..e039e9b0f30c2 100644 --- a/primitives/npos-elections/solution-type/Cargo.toml +++ b/primitives/npos-elections/solution-type/Cargo.toml @@ -22,7 +22,7 @@ proc-macro-crate = "1.0.0" [dev-dependencies] parity-scale-codec = "2.0.1" -scale-info = "0.12.0" +scale-info = "1.0" sp-arithmetic = { path = "../../arithmetic", version = "4.0.0-dev" } # used by generate_solution_type: sp-npos-elections = { path = "..", version = "4.0.0-dev" } diff --git a/primitives/runtime/Cargo.toml b/primitives/runtime/Cargo.toml index 95394ff894d41..be134958fbf0b 100644 --- a/primitives/runtime/Cargo.toml +++ b/primitives/runtime/Cargo.toml @@ -17,7 +17,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-application-crypto = { version = "4.0.0-dev", default-features = false, path = "../application-crypto" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../arithmetic" } diff --git a/primitives/session/Cargo.toml b/primitives/session/Cargo.toml index 9555d2022e092..8e1e2464e49ec 100644 --- a/primitives/session/Cargo.toml +++ b/primitives/session/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-api = { version = "4.0.0-dev", default-features = false, path = "../api" } sp-core = { version = "4.0.0-dev", default-features = false, path = "../core" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/staking/Cargo.toml b/primitives/staking/Cargo.toml index 8fdcb940ed40b..9e852319ede42 100644 --- a/primitives/staking/Cargo.toml +++ b/primitives/staking/Cargo.toml @@ -14,7 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } diff --git a/primitives/transaction-storage-proof/Cargo.toml b/primitives/transaction-storage-proof/Cargo.toml index 38748c9031153..8a41105b20b74 100644 --- a/primitives/transaction-storage-proof/Cargo.toml +++ b/primitives/transaction-storage-proof/Cargo.toml @@ -19,7 +19,7 @@ sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-trie = { version = "4.0.0-dev", optional = true, path = "../trie" } sp-core = { version = "4.0.0-dev", path = "../core", optional = true } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } log = { version = "0.4.8", optional = true } async-trait = { version = "0.1.50", optional = true } diff --git a/primitives/trie/Cargo.toml b/primitives/trie/Cargo.toml index 5c3a742746c85..5a2de4f16f9a4 100644 --- a/primitives/trie/Cargo.toml +++ b/primitives/trie/Cargo.toml @@ -19,7 +19,7 @@ harness = false [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } hash-db = { version = "0.15.2", default-features = false } trie-db = { version = "0.22.6", default-features = false } diff --git a/primitives/version/Cargo.toml b/primitives/version/Cargo.toml index b9187aef5dee9..fcab1eeabcaf4 100644 --- a/primitives/version/Cargo.toml +++ b/primitives/version/Cargo.toml @@ -18,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] impl-serde = { version = "0.3.1", optional = true } serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../std" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../runtime" } sp-version-proc-macro = { version = "4.0.0-dev", default-features = false, path = "proc-macro" } diff --git a/test-utils/runtime/Cargo.toml b/test-utils/runtime/Cargo.toml index e677bf6b0fb7b..24f4d404c18bd 100644 --- a/test-utils/runtime/Cargo.toml +++ b/test-utils/runtime/Cargo.toml @@ -18,7 +18,7 @@ sp-consensus-aura = { version = "0.10.0-dev", default-features = false, path = " sp-consensus-babe = { version = "0.10.0-dev", default-features = false, path = "../../primitives/consensus/babe" } sp-block-builder = { version = "4.0.0-dev", default-features = false, path = "../../primitives/block-builder" } codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } -scale-info = { version = "0.12.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-inherents = { version = "4.0.0-dev", default-features = false, path = "../../primitives/inherents" } sp-keyring = { version = "4.0.0-dev", optional = true, path = "../../primitives/keyring" } memory-db = { version = "0.27.0", default-features = false } diff --git a/utils/frame/rpc/support/Cargo.toml b/utils/frame/rpc/support/Cargo.toml index cd343d219a2ad..a94f18d0e8925 100644 --- a/utils/frame/rpc/support/Cargo.toml +++ b/utils/frame/rpc/support/Cargo.toml @@ -25,5 +25,5 @@ sc-rpc-api = { version = "0.10.0-dev", path = "../../../../client/rpc-api" } [dev-dependencies] frame-system = { version = "4.0.0-dev", path = "../../../../frame/system" } -scale-info = "0.12.0" +scale-info = "1.0" tokio = "1.10" From 4022140061c44b3653b6736c0affdf0e01b09bad Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 1 Sep 2021 17:02:46 +0100 Subject: [PATCH 478/503] Update frame-metadata to version 14.0.0 --- frame/support/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/support/Cargo.toml b/frame/support/Cargo.toml index 94dd3c0b29b5a..b09ed65a114dc 100644 --- a/frame/support/Cargo.toml +++ b/frame/support/Cargo.toml @@ -16,7 +16,7 @@ targets = ["x86_64-unknown-linux-gnu"] serde = { version = "1.0.126", optional = true, features = ["derive"] } codec = { package = "parity-scale-codec", version = "2.2.0", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "1.0", default-features = false, features = ["derive"] } -frame-metadata = { version = "14.0.0-rc.3", default-features = false, features = ["v14"] } +frame-metadata = { version = "14.0.0", default-features = false, features = ["v14"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-io = { version = "4.0.0-dev", default-features = false, path = "../../primitives/io" } sp-runtime = { version = "4.0.0-dev", default-features = false, path = "../../primitives/runtime" } From e332c5dabe1f39acbc2181a7e2eaef97797d474f Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 1 Sep 2021 17:03:24 +0100 Subject: [PATCH 479/503] Patch finality-grandpa until release available --- Cargo.lock | 198 +++++++++++++++++++++++++---------------------------- Cargo.toml | 4 +- 2 files changed, 95 insertions(+), 107 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b6f3df43e5975..21ea583f3b464 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1795,9 +1795,8 @@ dependencies = [ [[package]] name = "finality-grandpa" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c832d0ed507622c7cb98e9b7f10426850fc9d38527ab8071778dcc3a81d45875" +version = "0.14.4" +source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info-1.0.0#abcf579a1c3738eaa300573be579f7a796bbb98b" dependencies = [ "either", "futures 0.3.16", @@ -1807,7 +1806,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.8.4", - "scale-info 0.10.0", + "scale-info", ] [[package]] @@ -1890,7 +1889,7 @@ dependencies = [ "log 0.4.14", "parity-scale-codec", "paste 1.0.4", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-io", "sp-runtime", @@ -1947,7 +1946,7 @@ dependencies = [ "pallet-balances", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -1959,13 +1958,13 @@ dependencies = [ [[package]] name = "frame-metadata" -version = "14.0.0-rc.3" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76dfa4ecc509676b1416b834196a27639110e66e9da68342e709e5f7b855c54f" +checksum = "96616f82e069102b95a72c87de4c84d2f87ef7f0f20630e78ce3824436483110" dependencies = [ "cfg-if 1.0.0", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", ] @@ -1985,7 +1984,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "pretty_assertions 0.6.1", - "scale-info 0.12.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-arithmetic", @@ -2040,7 +2039,7 @@ dependencies = [ "parity-scale-codec", "pretty_assertions 0.6.1", "rustversion", - "scale-info 0.12.0", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -2059,7 +2058,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", ] [[package]] @@ -2070,7 +2069,7 @@ dependencies = [ "frame-support", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-externalities", @@ -2089,7 +2088,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4406,7 +4405,7 @@ dependencies = [ "pallet-treasury", "parity-scale-codec", "sc-executor", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-core", @@ -4440,7 +4439,7 @@ version = "2.0.0" dependencies = [ "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-runtime", @@ -4546,7 +4545,7 @@ dependencies = [ "pallet-utility", "pallet-vesting", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-authority-discovery", "sp-block-builder", @@ -4625,7 +4624,7 @@ dependencies = [ "pallet-transaction-payment", "pallet-transaction-payment-rpc-runtime-api", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-block-builder", "sp-consensus-aura", @@ -4892,7 +4891,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4907,7 +4906,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -4922,7 +4921,7 @@ dependencies = [ "frame-system", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-consensus-aura", "sp-core", @@ -4939,7 +4938,7 @@ dependencies = [ "frame-system", "pallet-session", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-authority-discovery", "sp-core", @@ -4956,7 +4955,7 @@ dependencies = [ "frame-system", "impl-trait-for-tuples", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-authorship", "sp-core", "sp-io", @@ -4981,7 +4980,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-consensus-babe", "sp-consensus-vrf", @@ -5003,7 +5002,7 @@ dependencies = [ "log 0.4.14", "pallet-transaction-payment", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5020,7 +5019,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5037,7 +5036,7 @@ dependencies = [ "hex-literal", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5066,7 +5065,7 @@ dependencies = [ "pwasm-utils", "rand 0.8.4", "rand_pcg 0.3.0", - "scale-info 0.12.0", + "scale-info", "serde", "smallvec 1.6.1", "sp-core", @@ -5084,7 +5083,7 @@ version = "4.0.0-dev" dependencies = [ "bitflags", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-runtime", @@ -5125,7 +5124,7 @@ version = "4.0.0-dev" dependencies = [ "pallet-contracts-primitives", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-runtime", "sp-std", @@ -5141,7 +5140,7 @@ dependencies = [ "pallet-balances", "pallet-scheduler", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5162,7 +5161,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.11.1", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5183,7 +5182,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5200,7 +5199,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-npos-elections", @@ -5219,7 +5218,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5235,7 +5234,7 @@ dependencies = [ "lite-json", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-keystore", @@ -5250,7 +5249,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5267,7 +5266,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", @@ -5293,7 +5292,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-finality-grandpa", @@ -5315,7 +5314,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5333,7 +5332,7 @@ dependencies = [ "pallet-authorship", "pallet-session", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-application-crypto", "sp-core", "sp-io", @@ -5351,7 +5350,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-keyring", @@ -5369,7 +5368,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5385,7 +5384,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5404,7 +5403,7 @@ dependencies = [ "hex-literal", "pallet-mmr-primitives", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5453,7 +5452,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5468,7 +5467,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5483,7 +5482,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5499,7 +5498,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5526,7 +5525,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5544,7 +5543,7 @@ dependencies = [ "pallet-balances", "pallet-utility", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5559,7 +5558,7 @@ dependencies = [ "frame-system", "parity-scale-codec", "safe-mix", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5574,7 +5573,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5590,7 +5589,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5606,7 +5605,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5623,7 +5622,7 @@ dependencies = [ "log 0.4.14", "pallet-timestamp", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5648,7 +5647,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5666,7 +5665,7 @@ dependencies = [ "pallet-balances", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5689,7 +5688,7 @@ dependencies = [ "pallet-timestamp", "parity-scale-codec", "rand_chacha 0.2.2", - "scale-info 0.12.0", + "scale-info", "serde", "sp-application-crypto", "sp-core", @@ -5727,7 +5726,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5742,7 +5741,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5757,7 +5756,7 @@ dependencies = [ "frame-system", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-inherents", "sp-io", @@ -5776,7 +5775,7 @@ dependencies = [ "pallet-balances", "pallet-treasury", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5793,7 +5792,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "serde_json", "smallvec 1.6.1", @@ -5839,7 +5838,7 @@ dependencies = [ "hex-literal", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-inherents", @@ -5859,7 +5858,7 @@ dependencies = [ "impl-trait-for-tuples", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -5876,7 +5875,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5892,7 +5891,7 @@ dependencies = [ "frame-system", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -5909,7 +5908,7 @@ dependencies = [ "log 0.4.14", "pallet-balances", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-io", "sp-runtime", @@ -6436,7 +6435,7 @@ dependencies = [ "fixed-hash", "impl-codec", "impl-serde", - "scale-info 0.10.0", + "scale-info", "uint", ] @@ -8453,22 +8452,9 @@ dependencies = [ [[package]] name = "scale-info" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2e62ff266e136db561a007c84569985805f84a1d5a08278e52c36aacb6e061b" -dependencies = [ - "bitvec 0.20.2", - "cfg-if 1.0.0", - "derive_more", - "parity-scale-codec", - "scale-info-derive", -] - -[[package]] -name = "scale-info" -version = "0.12.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cf40f28c3eca6a9abe66aa368d7475f6c92a626fb5737d131c82c0170b24f36" +checksum = "5c55b744399c25532d63a0d2789b109df8d46fc93752d46b0782991a931a782f" dependencies = [ "bitvec 0.20.2", "cfg-if 1.0.0", @@ -8480,9 +8466,9 @@ dependencies = [ [[package]] name = "scale-info-derive" -version = "0.7.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b648fa291891a4c80187a25532f6a7d96b82c70353e30b868b14632b8fe043d6" +checksum = "baeb2780690380592f86205aa4ee49815feb2acad8c2f59e6dd207148c3f1fcd" dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", @@ -8945,7 +8931,7 @@ name = "sp-application-crypto" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-core", "sp-io", @@ -8974,7 +8960,7 @@ dependencies = [ "parity-scale-codec", "primitive-types", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "serde", "sp-debug-derive", "sp-std", @@ -8996,7 +8982,7 @@ name = "sp-authority-discovery" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-application-crypto", "sp-runtime", @@ -9067,7 +9053,7 @@ version = "0.10.0-dev" dependencies = [ "async-trait", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-application-crypto", "sp-consensus", @@ -9085,7 +9071,7 @@ dependencies = [ "async-trait", "merlin", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9116,7 +9102,7 @@ name = "sp-consensus-slots" version = "0.10.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-arithmetic", "sp-runtime", ] @@ -9159,7 +9145,7 @@ dependencies = [ "primitive-types", "rand 0.7.3", "regex", - "scale-info 0.12.0", + "scale-info", "schnorrkel", "secrecy", "serde", @@ -9214,7 +9200,7 @@ dependencies = [ "finality-grandpa", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", @@ -9304,7 +9290,7 @@ version = "4.0.0-dev" dependencies = [ "parity-scale-codec", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "serde", "sp-arithmetic", "sp-core", @@ -9321,7 +9307,7 @@ dependencies = [ "honggfuzz", "parity-scale-codec", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "sp-npos-elections", "sp-runtime", "structopt", @@ -9335,7 +9321,7 @@ dependencies = [ "proc-macro-crate 1.0.0", "proc-macro2", "quote", - "scale-info 0.12.0", + "scale-info", "sp-arithmetic", "sp-npos-elections", "syn", @@ -9380,7 +9366,7 @@ dependencies = [ "parity-util-mem", "paste 1.0.4", "rand 0.7.3", - "scale-info 0.12.0", + "scale-info", "serde", "serde_json", "sp-api", @@ -9493,7 +9479,7 @@ name = "sp-session" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-api", "sp-core", "sp-runtime", @@ -9506,7 +9492,7 @@ name = "sp-staking" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-runtime", "sp-std", ] @@ -9624,7 +9610,7 @@ dependencies = [ "async-trait", "log 0.4.14", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-inherents", "sp-runtime", @@ -9641,7 +9627,7 @@ dependencies = [ "hex-literal", "memory-db", "parity-scale-codec", - "scale-info 0.12.0", + "scale-info", "sp-core", "sp-runtime", "sp-std", @@ -9668,7 +9654,7 @@ dependencies = [ "impl-serde", "parity-scale-codec", "parity-wasm 0.42.2", - "scale-info 0.12.0", + "scale-info", "serde", "sp-runtime", "sp-std", @@ -9853,7 +9839,7 @@ dependencies = [ "jsonrpc-client-transports", "parity-scale-codec", "sc-rpc-api", - "scale-info 0.12.0", + "scale-info", "serde", "sp-storage", "tokio", @@ -9940,7 +9926,7 @@ dependencies = [ "sc-block-builder", "sc-executor", "sc-service", - "scale-info 0.12.0", + "scale-info", "serde", "sp-api", "sp-application-crypto", diff --git a/Cargo.toml b/Cargo.toml index 26af7be512d9d..7a17c1513fdc4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -260,7 +260,9 @@ wasmi = { opt-level = 3 } x25519-dalek = { opt-level = 3 } yamux = { opt-level = 3 } zeroize = { opt-level = 3 } - [profile.release] # Substrate runtime requires unwinding. panic = "unwind" + +[patch.crates-io] +finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info-1.0.0" } From 29a077e94da7cc2f038624a6ba2caab0ec0f33d9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 1 Sep 2021 17:38:02 +0100 Subject: [PATCH 480/503] Fix metadata tests --- frame/support/test/tests/construct_runtime.rs | 2 +- frame/support/test/tests/pallet.rs | 2 +- frame/support/test/tests/pallet_instance.rs | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frame/support/test/tests/construct_runtime.rs b/frame/support/test/tests/construct_runtime.rs index 7d05026caa7ed..062993fe10fbb 100644 --- a/frame/support/test/tests/construct_runtime.rs +++ b/frame/support/test/tests/construct_runtime.rs @@ -730,7 +730,7 @@ fn test_metadata() { }; let expected_metadata: RuntimeMetadataPrefixed = - RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); let actual_metadata = Runtime::metadata(); pretty_assertions::assert_eq!(actual_metadata, expected_metadata); } diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index e6828c902cf70..05867b10c99ba 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -1447,7 +1447,7 @@ fn metadata() { }; let expected_metadata: RuntimeMetadataPrefixed = - RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); + RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); let expected_metadata = match expected_metadata.1 { RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index b64d751b44f5b..5fa64f85a95bb 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -741,8 +741,8 @@ fn metadata() { }; let expected_metadata: RuntimeMetadataPrefixed = - RuntimeMetadataLastVersion::new(pallets, extrinsic).into(); - let expected_metadata = match expected_metadata.1 { + RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); + let expected_metadata = match expectedl_metadata.1 { RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; From 6616dc2504ba1b7b5fbc69fa43e5cba7664464cc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 1 Sep 2021 17:42:42 +0100 Subject: [PATCH 481/503] Fix metadata tests --- frame/support/test/tests/pallet_instance.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 5fa64f85a95bb..62c714bc1f6ee 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -740,9 +740,8 @@ fn metadata() { }], }; - let expected_metadata: RuntimeMetadataPrefixed = - RuntimeMetadataLastVersion::new(pallets, extrinsic, meta_type::()).into(); - let expected_metadata = match expectedl_metadata.1 { + let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic, scale_info::meta_type::()).into(); + let expected_metadata = match expected_metadata.1 { RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), }; From 5c7870560c3245ffea6415de3a8bbe6ab87a8575 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 2 Sep 2021 08:29:43 +0100 Subject: [PATCH 482/503] Fmt --- frame/support/test/tests/pallet_instance.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/frame/support/test/tests/pallet_instance.rs b/frame/support/test/tests/pallet_instance.rs index 62c714bc1f6ee..34586e8414216 100644 --- a/frame/support/test/tests/pallet_instance.rs +++ b/frame/support/test/tests/pallet_instance.rs @@ -740,7 +740,9 @@ fn metadata() { }], }; - let expected_metadata: RuntimeMetadataPrefixed = RuntimeMetadataLastVersion::new(pallets, extrinsic, scale_info::meta_type::()).into(); + let expected_metadata: RuntimeMetadataPrefixed = + RuntimeMetadataLastVersion::new(pallets, extrinsic, scale_info::meta_type::()) + .into(); let expected_metadata = match expected_metadata.1 { RuntimeMetadata::V14(metadata) => metadata, _ => panic!("metadata has been bumped, test needs to be updated"), From d4566d1ab67bd882b344a89fd8be8b23e2b20a97 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 2 Sep 2021 08:30:02 +0100 Subject: [PATCH 483/503] Remove patched finality-grandpa --- Cargo.lock | 3 ++- Cargo.toml | 5 +---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 21ea583f3b464..348ea7c49ccdf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1796,7 +1796,8 @@ dependencies = [ [[package]] name = "finality-grandpa" version = "0.14.4" -source = "git+https://github.com/paritytech/finality-grandpa?branch=aj-scale-info-1.0.0#abcf579a1c3738eaa300573be579f7a796bbb98b" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8ac3ff5224ef91f3c97e03eb1de2db82743427e91aaa5ac635f454f0b164f5a" dependencies = [ "either", "futures 0.3.16", diff --git a/Cargo.toml b/Cargo.toml index 7a17c1513fdc4..20bc292f131e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -262,7 +262,4 @@ yamux = { opt-level = 3 } zeroize = { opt-level = 3 } [profile.release] # Substrate runtime requires unwinding. -panic = "unwind" - -[patch.crates-io] -finality-grandpa = { git = "https://github.com/paritytech/finality-grandpa", branch = "aj-scale-info-1.0.0" } +panic = "unwind" \ No newline at end of file From 16d002d2c478b87654adeb6e6ff2593f7abfc048 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 2 Sep 2021 08:41:42 +0100 Subject: [PATCH 484/503] Fix tests, use scale_info imports --- frame/support/test/tests/pallet.rs | 169 ++++++++++++++--------------- 1 file changed, 80 insertions(+), 89 deletions(-) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 05867b10c99ba..13efd7190fb4d 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -24,6 +24,7 @@ use frame_support::{ }, weights::{DispatchClass, DispatchInfo, GetDispatchInfo, Pays, RuntimeDbWeight}, }; +use scale_info::{meta_type, TypeInfo}; use sp_io::{ hashing::{blake2_128, twox_128, twox_64}, TestExternalities, @@ -80,14 +81,14 @@ impl From for u64 { } pub trait SomeAssociation1 { - type _1: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; + type _1: Parameter + codec::MaxEncodedLen + TypeInfo; } impl SomeAssociation1 for u64 { type _1 = u64; } pub trait SomeAssociation2 { - type _2: Parameter + codec::MaxEncodedLen + scale_info::TypeInfo; + type _2: Parameter + codec::MaxEncodedLen + TypeInfo; } impl SomeAssociation2 for u64 { type _2 = u64; @@ -124,7 +125,7 @@ pub mod pallet { #[pallet::constant] type MyGetParam3: Get<::_1>; - type Balance: Parameter + Default + scale_info::TypeInfo; + type Balance: Parameter + Default + TypeInfo; type Event: From> + IsType<::Event>; } @@ -340,13 +341,7 @@ pub mod pallet { #[pallet::origin] #[derive( - EqNoBound, - RuntimeDebugNoBound, - CloneNoBound, - PartialEqNoBound, - Encode, - Decode, - scale_info::TypeInfo, + EqNoBound, RuntimeDebugNoBound, CloneNoBound, PartialEqNoBound, Encode, Decode, TypeInfo, )] pub struct Origin(PhantomData); @@ -990,47 +985,47 @@ fn metadata() { index: 0, name: "System", storage: None, - calls: Some(scale_info::meta_type::>().into()), - event: Some(scale_info::meta_type::>().into()), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "BlockWeights", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, PalletConstantMetadata { name: "BlockLength", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, PalletConstantMetadata { name: "BlockHashCount", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, PalletConstantMetadata { name: "DbWeight", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, PalletConstantMetadata { name: "Version", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, PalletConstantMetadata { name: "SS58Prefix", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![], docs: vec![], }, ], - error: Some(scale_info::meta_type::>().into()), + error: Some(meta_type::>().into()), }, PalletMetadata { index: 1, @@ -1041,21 +1036,21 @@ fn metadata() { StorageEntryMetadata { name: "ValueWhereClause", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, StorageEntryMetadata { name: "Value", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, StorageEntryMetadata { name: "Value2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, @@ -1063,8 +1058,8 @@ fn metadata() { name: "Map", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], }, default: vec![4, 0], @@ -1074,8 +1069,8 @@ fn metadata() { name: "Map2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], @@ -1085,12 +1080,12 @@ fn metadata() { name: "DoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), + value: meta_type::(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, ], - key: scale_info::meta_type::<(u8, u16)>(), + key: meta_type::<(u8, u16)>(), }, default: vec![0], docs: vec![], @@ -1099,8 +1094,8 @@ fn metadata() { name: "DoubleMap2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), - key: scale_info::meta_type::<(u16, u32)>(), + value: meta_type::(), + key: meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, @@ -1113,9 +1108,9 @@ fn metadata() { name: "NMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), + key: meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], @@ -1124,12 +1119,12 @@ fn metadata() { name: "NMap2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::<(u16, u32)>(), + key: meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, ], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], @@ -1138,7 +1133,7 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, @@ -1147,8 +1142,8 @@ fn metadata() { name: "ConditionalMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], @@ -1159,8 +1154,8 @@ fn metadata() { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), - key: scale_info::meta_type::<(u8, u16)>(), + value: meta_type::(), + key: meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, @@ -1174,55 +1169,53 @@ fn metadata() { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::<(u8, u16)>(), + key: meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, ], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], }, ], }), - calls: Some(scale_info::meta_type::>().into()), - event: Some(scale_info::meta_type::>().into()), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "MyGetParam", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![10, 0, 0, 0], docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam2", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![11, 0, 0, 0], docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam3", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![12, 0, 0, 0, 0, 0, 0, 0], docs: vec![], }, PalletConstantMetadata { name: "some_extra", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], docs: vec![" Some doc", " Some doc"], }, PalletConstantMetadata { name: "some_extra_extra", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], docs: vec![" Some doc"], }, ], - error: Some(PalletErrorMetadata { - ty: scale_info::meta_type::>(), - }), + error: Some(PalletErrorMetadata { ty: meta_type::>() }), }, PalletMetadata { index: 1, @@ -1233,21 +1226,21 @@ fn metadata() { StorageEntryMetadata { name: "ValueWhereClause", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, StorageEntryMetadata { name: "Value", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, StorageEntryMetadata { name: "Value2", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, @@ -1255,8 +1248,8 @@ fn metadata() { name: "Map", modifier: StorageEntryModifier::Default, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], }, default: vec![4, 0], @@ -1266,8 +1259,8 @@ fn metadata() { name: "Map2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], @@ -1277,8 +1270,8 @@ fn metadata() { name: "DoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), - key: scale_info::meta_type::<(u8, u16)>(), + value: meta_type::(), + key: meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, @@ -1291,8 +1284,8 @@ fn metadata() { name: "DoubleMap2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), - key: scale_info::meta_type::<(u16, u32)>(), + value: meta_type::(), + key: meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, @@ -1305,9 +1298,9 @@ fn metadata() { name: "NMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), + key: meta_type::(), hashers: vec![StorageHasher::Blake2_128Concat], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], @@ -1316,12 +1309,12 @@ fn metadata() { name: "NMap2", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::<(u16, u32)>(), + key: meta_type::<(u16, u32)>(), hashers: vec![ StorageHasher::Twox64Concat, StorageHasher::Blake2_128Concat, ], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], @@ -1330,7 +1323,7 @@ fn metadata() { StorageEntryMetadata { name: "ConditionalValue", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::()), + ty: StorageEntryType::Plain(meta_type::()), default: vec![0], docs: vec![], }, @@ -1339,8 +1332,8 @@ fn metadata() { name: "ConditionalMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::(), - value: scale_info::meta_type::(), + key: meta_type::(), + value: meta_type::(), hashers: vec![StorageHasher::Twox64Concat], }, default: vec![0], @@ -1351,8 +1344,8 @@ fn metadata() { name: "ConditionalDoubleMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - value: scale_info::meta_type::(), - key: scale_info::meta_type::<(u8, u16)>(), + value: meta_type::(), + key: meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, @@ -1366,55 +1359,53 @@ fn metadata() { name: "ConditionalNMap", modifier: StorageEntryModifier::Optional, ty: StorageEntryType::Map { - key: scale_info::meta_type::<(u8, u16)>(), + key: meta_type::<(u8, u16)>(), hashers: vec![ StorageHasher::Blake2_128Concat, StorageHasher::Twox64Concat, ], - value: scale_info::meta_type::(), + value: meta_type::(), }, default: vec![0], docs: vec![], }, ], }), - calls: Some(scale_info::meta_type::>().into()), - event: Some(scale_info::meta_type::>().into()), + calls: Some(meta_type::>().into()), + event: Some(meta_type::>().into()), constants: vec![ PalletConstantMetadata { name: "MyGetParam", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![10, 0, 0, 0], docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam2", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![11, 0, 0, 0], docs: vec![" Some comment", " Some comment"], }, PalletConstantMetadata { name: "MyGetParam3", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![12, 0, 0, 0, 0, 0, 0, 0], docs: vec![], }, PalletConstantMetadata { name: "some_extra", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![100, 0, 0, 0, 0, 0, 0, 0], docs: vec![" Some doc", " Some doc"], }, PalletConstantMetadata { name: "some_extra_extra", - ty: scale_info::meta_type::(), + ty: meta_type::(), value: vec![0, 0, 0, 0, 0, 0, 0, 0], docs: vec![" Some doc"], }, ], - error: Some(PalletErrorMetadata { - ty: scale_info::meta_type::>(), - }), + error: Some(PalletErrorMetadata { ty: meta_type::>() }), }, PalletMetadata { index: 2, @@ -1424,25 +1415,25 @@ fn metadata() { entries: vec![StorageEntryMetadata { name: "SomeValue", modifier: StorageEntryModifier::Optional, - ty: StorageEntryType::Plain(scale_info::meta_type::>()), + ty: StorageEntryType::Plain(meta_type::>()), default: vec![0], docs: vec![], }], }), - calls: Some(scale_info::meta_type::>().into()), - event: Some(PalletEventMetadata { ty: scale_info::meta_type::() }), + calls: Some(meta_type::>().into()), + event: Some(PalletEventMetadata { ty: meta_type::() }), constants: vec![], error: None, }, ]; let extrinsic = ExtrinsicMetadata { - ty: scale_info::meta_type::(), + ty: meta_type::(), version: 4, signed_extensions: vec![SignedExtensionMetadata { identifier: "UnitSignedExtension", - ty: scale_info::meta_type::<()>(), - additional_signed: scale_info::meta_type::<()>(), + ty: meta_type::<()>(), + additional_signed: meta_type::<()>(), }], }; From fa01b4fab86459643fcecbf4018081f12a4af795 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 2 Sep 2021 08:46:35 +0100 Subject: [PATCH 485/503] Fix pallet tests --- frame/support/test/tests/pallet.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/frame/support/test/tests/pallet.rs b/frame/support/test/tests/pallet.rs index 13efd7190fb4d..2874ef6bd7685 100644 --- a/frame/support/test/tests/pallet.rs +++ b/frame/support/test/tests/pallet.rs @@ -102,6 +102,7 @@ pub mod pallet { }; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; + use scale_info::TypeInfo; type BalanceOf = ::Balance; From 84f048ee3eae95ccaf8b3db2ff22951588bad49e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 2 Sep 2021 12:01:18 +0100 Subject: [PATCH 486/503] Add BlockNumber TypeInfo bound --- frame/system/src/lib.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frame/system/src/lib.rs b/frame/system/src/lib.rs index dca3423c2723c..b2353818d8260 100644 --- a/frame/system/src/lib.rs +++ b/frame/system/src/lib.rs @@ -205,7 +205,8 @@ pub mod pallet { + sp_std::hash::Hash + sp_std::str::FromStr + MaybeMallocSizeOf - + MaxEncodedLen; + + MaxEncodedLen + + TypeInfo; /// The output of the `Hashing` function. type Hash: Parameter From af2903c7a2b0056de952a0c89487899a4da8e348 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Tue, 24 Aug 2021 11:42:27 +0100 Subject: [PATCH 487/503] ui test fix --- .../test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 6c92423c6a7fe..545520124bfee 100644 --- a/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -4,6 +4,6 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied 10 | #[pallet::generate_storage_info] | ^^^^^^^^^^^^^^^^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` | - = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `Key` - = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, Key, u32>` + = note: required because of the requirements on the impl of `KeyGeneratorMaxEncodedLen` for `NMapKey` + = note: required because of the requirements on the impl of `StorageInfoTrait` for `frame_support::pallet_prelude::StorageNMap<_GeneratedPrefixForStorageFoo, NMapKey, u32>` = note: required by `storage_info` From e10a528d67b6cccfdf0873416bc6beadc3da0059 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 6 Sep 2021 12:13:09 +0100 Subject: [PATCH 488/503] Cargo.lock --- Cargo.lock | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fdfed6e70253f..ed5a9824e981a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7378,6 +7378,7 @@ dependencies = [ "sc-service", "sc-telemetry", "sc-tracing", + "sc-utils", "serde", "serde_json", "sp-blockchain", @@ -7386,7 +7387,6 @@ dependencies = [ "sp-keystore", "sp-panic-handler", "sp-runtime", - "sp-utils", "sp-version", "structopt", "tempfile", @@ -7407,6 +7407,7 @@ dependencies = [ "parking_lot 0.11.1", "sc-executor", "sc-transaction-pool-api", + "sc-utils", "sp-api", "sp-blockchain", "sp-consensus", @@ -7419,7 +7420,6 @@ dependencies = [ "sp-storage", "sp-test-primitives", "sp-trie", - "sp-utils", "substrate-prometheus-endpoint", "substrate-test-runtime", "thiserror", @@ -7464,6 +7464,7 @@ dependencies = [ "log 0.4.14", "parking_lot 0.11.1", "sc-client-api", + "sc-utils", "serde", "sp-api", "sp-blockchain", @@ -7472,7 +7473,6 @@ dependencies = [ "sp-runtime", "sp-state-machine", "sp-test-primitives", - "sp-utils", "substrate-prometheus-endpoint", "thiserror", ] @@ -7818,6 +7818,7 @@ dependencies = [ "sc-network-gossip", "sc-network-test", "sc-telemetry", + "sc-utils", "serde_json", "sp-api", "sp-application-crypto", @@ -7830,7 +7831,6 @@ dependencies = [ "sp-keystore", "sp-runtime", "sp-tracing", - "sp-utils", "substrate-prometheus-endpoint", "substrate-test-runtime-client", "tempfile", @@ -7947,6 +7947,7 @@ dependencies = [ "sc-client-api", "sc-consensus", "sc-peerset", + "sc-utils", "serde", "serde_json", "smallvec 1.6.1", @@ -7958,7 +7959,6 @@ dependencies = [ "sp-runtime", "sp-test-primitives", "sp-tracing", - "sp-utils", "substrate-prometheus-endpoint", "substrate-test-runtime", "substrate-test-runtime-client", @@ -8037,13 +8037,13 @@ dependencies = [ "sc-network", "sc-transaction-pool", "sc-transaction-pool-api", + "sc-utils", "sp-api", "sp-consensus", "sp-core", "sp-offchain", "sp-runtime", "sp-tracing", - "sp-utils", "substrate-test-runtime-client", "threadpool", "tokio", @@ -8057,8 +8057,8 @@ dependencies = [ "libp2p", "log 0.4.14", "rand 0.7.3", + "sc-utils", "serde_json", - "sp-utils", "wasm-timer", ] @@ -8091,6 +8091,7 @@ dependencies = [ "sc-tracing", "sc-transaction-pool", "sc-transaction-pool-api", + "sc-utils", "serde_json", "sp-api", "sp-blockchain", @@ -8102,7 +8103,6 @@ dependencies = [ "sp-rpc", "sp-runtime", "sp-session", - "sp-utils", "sp-version", "substrate-test-runtime-client", ] @@ -8195,6 +8195,7 @@ dependencies = [ "sc-tracing", "sc-transaction-pool", "sc-transaction-pool-api", + "sc-utils", "serde", "serde_json", "sp-api", @@ -8214,7 +8215,6 @@ dependencies = [ "sp-transaction-pool", "sp-transaction-storage-proof", "sp-trie", - "sp-utils", "sp-version", "substrate-prometheus-endpoint", "substrate-test-runtime", @@ -8369,6 +8369,7 @@ dependencies = [ "sc-block-builder", "sc-client-api", "sc-transaction-pool-api", + "sc-utils", "serde", "sp-api", "sp-blockchain", @@ -8377,7 +8378,6 @@ dependencies = [ "sp-runtime", "sp-tracing", "sp-transaction-pool", - "sp-utils", "substrate-prometheus-endpoint", "substrate-test-runtime", "substrate-test-runtime-client", @@ -8398,6 +8398,16 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sc-utils" +version = "4.0.0-dev" +dependencies = [ + "futures 0.3.16", + "futures-timer 3.0.2", + "lazy_static", + "prometheus", +] + [[package]] name = "scale-info" version = "1.0.0" @@ -9585,16 +9595,6 @@ dependencies = [ "trie-standardmap", ] -[[package]] -name = "sp-utils" -version = "4.0.0-dev" -dependencies = [ - "futures 0.3.16", - "futures-timer 3.0.2", - "lazy_static", - "prometheus", -] - [[package]] name = "sp-version" version = "4.0.0-dev" From 94369479f68a21a284f4f7c5968914113198f9cc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Thu, 9 Sep 2021 10:57:22 +0100 Subject: [PATCH 489/503] Remove pallet metadata --- frame/membership/src/lib.rs | 3 --- 1 file changed, 3 deletions(-) diff --git a/frame/membership/src/lib.rs b/frame/membership/src/lib.rs index b66dc51b3b0ec..7922d9efaf569 100644 --- a/frame/membership/src/lib.rs +++ b/frame/membership/src/lib.rs @@ -131,9 +131,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata( - PhantomData<(T::AccountId, >::Event)> = "sp_std::marker::PhantomData<(AccountId, Event)>", - )] pub enum Event, I: 'static = ()> { /// The given member was added; see the transaction for who. MemberAdded, From 9610410267957665d9f99e9287978c9b5b73d5d0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Mon, 13 Sep 2021 15:22:50 +0100 Subject: [PATCH 490/503] Cargo.lock --- Cargo.lock | 155 +++++++++++++++++++++++------------------------------ 1 file changed, 67 insertions(+), 88 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5c423c7f490fc..71089c6e236e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -71,12 +71,6 @@ dependencies = [ "subtle 2.4.0", ] -[[package]] -name = "ahash" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e" - [[package]] name = "ahash" version = "0.7.4" @@ -817,9 +811,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.7.2" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea8756167ea0aca10e066cdbe7813bd71d2f24e69b0bc7b50509590cef2ce0b9" +checksum = "fee7ad89dc1128635074c268ee661f90c3f7e83d9fd12910608c36b47d6c3412" dependencies = [ "cfg-if 1.0.0", "cipher", @@ -829,9 +823,9 @@ dependencies = [ [[package]] name = "chacha20poly1305" -version = "0.8.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175a11316f33592cf2b71416ee65283730b5b7849813c4891d02a12906ed9acc" +checksum = "1580317203210c517b6d44794abfbe600698276db18127e37ad3e69bf5e848e5" dependencies = [ "aead", "chacha20", @@ -1932,6 +1926,8 @@ dependencies = [ "frame-system", "parity-scale-codec", "sp-arithmetic", + "sp-core", + "sp-io", "sp-npos-elections", "sp-runtime", "sp-std", @@ -2193,9 +2189,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74ed2411805f6e4e3d9bc904c95d5d423b89b3b25dc0250aa74729de20629ff9" +checksum = "5da6ba8c3bb3c165d3c7319fc1cc8304facf1fb8db99c5de877183c08a273888" dependencies = [ "futures-core", "futures-sink", @@ -2203,9 +2199,9 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af51b1b4a7fdff033703db39de8802c673eb91855f2e0d47dcf3bf2c0ef01f99" +checksum = "88d1c26957f23603395cd326b0ffe64124b818f4449552f960d815cfba83a53d" [[package]] name = "futures-executor" @@ -2221,9 +2217,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b0e06c393068f3a6ef246c75cdca793d6a46347e75286933e5e75fd2fd11582" +checksum = "522de2a0fe3e380f1bc577ba0474108faf3f6b18321dbf60b3b9c39a75073377" [[package]] name = "futures-lite" @@ -2242,9 +2238,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54913bae956fb8df7f4dc6fc90362aa72e69148e3f39041fbe8742d21e0ac57" +checksum = "18e4a4b95cea4b4ccbcf1c5675ca7c4ee4e9e75eb79944d07defde18068f79bb" dependencies = [ "autocfg 1.0.1", "proc-macro-hack", @@ -2266,15 +2262,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0f30aaa67363d119812743aa5f33c201a7a66329f97d1a887022971feea4b53" +checksum = "36ea153c13024fe480590b3e3d4cad89a0cfacecc24577b68f86c6ced9c2bc11" [[package]] name = "futures-task" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe54a98670017f3be909561f6ad13e810d9a51f3f061b902062ca3da80799f2" +checksum = "1d3d00f4eddb73e498a54394f228cd55853bdf059259e8e7bc6e69d408892e99" [[package]] name = "futures-timer" @@ -2290,9 +2286,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eb846bfd58e44a8481a00049e82c43e0ccb5d61f8dc071057cb19249dd4d78" +checksum = "36568465210a3a6ee45e1f165136d68671471a501e632e9a98d96872222b5481" dependencies = [ "autocfg 1.0.1", "futures 0.1.31", @@ -2469,9 +2465,6 @@ name = "hashbrown" version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" -dependencies = [ - "ahash 0.4.7", -] [[package]] name = "hashbrown" @@ -2479,7 +2472,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" dependencies = [ - "ahash 0.7.4", + "ahash", ] [[package]] @@ -2538,6 +2531,17 @@ dependencies = [ "digest 0.9.0", ] +[[package]] +name = "hmac-drbg" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6e570451493f10f6581b48cdd530413b63ea9e780f544bfd3bdcaa0d89d1a7b" +dependencies = [ + "digest 0.8.1", + "generic-array 0.12.4", + "hmac 0.7.1", +] + [[package]] name = "hmac-drbg" version = "0.3.0" @@ -3679,6 +3683,22 @@ dependencies = [ "libc", ] +[[package]] +name = "libsecp256k1" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc1e2c808481a63dc6da2074752fdd4336a3c8fcc68b83db6f1fd5224ae7962" +dependencies = [ + "arrayref", + "crunchy", + "digest 0.8.1", + "hmac-drbg 0.2.0", + "rand 0.7.3", + "sha2 0.8.2", + "subtle 2.4.0", + "typenum", +] + [[package]] name = "libsecp256k1" version = "0.5.0" @@ -3688,7 +3708,7 @@ dependencies = [ "arrayref", "base64 0.12.3", "digest 0.9.0", - "hmac-drbg", + "hmac-drbg 0.3.0", "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", @@ -3707,7 +3727,7 @@ dependencies = [ "arrayref", "base64 0.12.3", "digest 0.9.0", - "hmac-drbg", + "hmac-drbg 0.3.0", "libsecp256k1-core", "libsecp256k1-gen-ecmult", "libsecp256k1-gen-genmult", @@ -3839,11 +3859,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.6.5" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f374d42cdfc1d7dbf3d3dec28afab2eb97ffbf43a3234d795b5986dbf4b90ba" +checksum = "7ea2d928b485416e8908cff2d97d621db22b27f7b3b6729e438bcf42c671ba91" dependencies = [ - "hashbrown 0.9.1", + "hashbrown 0.11.2", ] [[package]] @@ -4224,11 +4244,11 @@ dependencies = [ [[package]] name = "names" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef320dab323286b50fb5cdda23f61c796a72a89998ab565ca32525c5c556f2da" +checksum = "10a8690bf09abf659851e58cd666c3d37ac6af07c2bd7a9e332cfba471715775" dependencies = [ - "rand 0.3.23", + "rand 0.8.4", ] [[package]] @@ -4378,6 +4398,7 @@ dependencies = [ "sp-keystore", "sp-runtime", "sp-timestamp", + "sp-tracing", "sp-transaction-pool", "sp-transaction-storage-proof", "sp-trie", @@ -5035,7 +5056,6 @@ dependencies = [ "frame-benchmarking", "frame-support", "frame-system", - "hex-literal", "log 0.4.14", "parity-scale-codec", "scale-info", @@ -5055,6 +5075,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", + "libsecp256k1 0.3.5", "log 0.4.14", "pallet-balances", "pallet-contracts-primitives", @@ -5065,8 +5086,8 @@ dependencies = [ "parity-scale-codec", "pretty_assertions 0.7.2", "pwasm-utils", - "rand 0.8.4", - "rand_pcg 0.3.0", + "rand 0.7.3", + "rand_pcg 0.2.1", "scale-info", "serde", "smallvec 1.6.1", @@ -6524,9 +6545,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086" [[package]] name = "proc-macro2" -version = "1.0.28" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612" +checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d" dependencies = [ "unicode-xid", ] @@ -6607,9 +6628,9 @@ dependencies = [ [[package]] name = "pwasm-utils" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c1a2f10b47d446372a4f397c58b329aaea72b2daf9395a623a411cb8ccb54f" +checksum = "880b3384fb00b8f6ecccd5d358b93bd2201900ae3daad213791d1864f6441f5c" dependencies = [ "byteorder", "log 0.4.14", @@ -6671,29 +6692,6 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "643f8f41a8ebc4c5dc4515c82bb8abd397b527fc20fd681b7c011c2aee5d44fb" -[[package]] -name = "rand" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c" -dependencies = [ - "libc", - "rand 0.4.6", -] - -[[package]] -name = "rand" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" -dependencies = [ - "fuchsia-cprng", - "libc", - "rand_core 0.3.1", - "rdrand", - "winapi 0.3.9", -] - [[package]] name = "rand" version = "0.6.5" @@ -6892,15 +6890,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_pcg" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7de198537002b913568a3847e53535ace266f93526caf5c360ec41d72c5787f0" -dependencies = [ - "rand_core 0.6.2", -] - [[package]] name = "rand_xorshift" version = "0.1.1" @@ -7193,16 +7182,6 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb5d2a036dc6d2d8fd16fde3498b04306e29bd193bf306a57427019b823d5acd" -[[package]] -name = "ruzstd" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d425143485a37727c7a46e689bbe3b883a00f42b4a52c4ac0f44855c1009b00" -dependencies = [ - "byteorder", - "twox-hash", -] - [[package]] name = "rw-stream-sink" version = "0.2.1" @@ -8146,6 +8125,7 @@ dependencies = [ "log 0.4.14", "serde_json", "substrate-prometheus-endpoint", + "tokio", ] [[package]] @@ -9240,7 +9220,6 @@ dependencies = [ name = "sp-maybe-compressed-blob" version = "4.0.0-dev" dependencies = [ - "ruzstd", "zstd", ] @@ -11629,9 +11608,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +checksum = "377db0846015f7ae377174787dd452e1c5f5a9050bc6f954911d01f116daa0cd" dependencies = [ "zeroize_derive", ] From 09280c2b84eb8e5cf59b1f336bb1a2f5a7dfad2e Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:21:59 +0100 Subject: [PATCH 491/503] Add missing scale-info dependency --- Cargo.lock | 1 + frame/election-provider-support/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index a70a0b8e91d39..55c89fbac348b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1925,6 +1925,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", + "scale-info", "sp-arithmetic", "sp-core", "sp-io", diff --git a/frame/election-provider-support/Cargo.toml b/frame/election-provider-support/Cargo.toml index d713b98fcefa1..dfe2b11024334 100644 --- a/frame/election-provider-support/Cargo.toml +++ b/frame/election-provider-support/Cargo.toml @@ -14,6 +14,7 @@ targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "2.0.0", default-features = false, features = ["derive"] } +scale-info = { version = "1.0", default-features = false, features = ["derive"] } sp-std = { version = "4.0.0-dev", default-features = false, path = "../../primitives/std" } sp-arithmetic = { version = "4.0.0-dev", default-features = false, path = "../../primitives/arithmetic" } sp-npos-elections = { version = "4.0.0-dev", default-features = false, path = "../../primitives/npos-elections" } From 44b6259bf849b4dfc5580d2fbb195303f4eb2d65 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:30:28 +0100 Subject: [PATCH 492/503] Remove pallet event metadata --- frame/collective/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index d3afe070c3cb1..b4da98bed2e8d 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -275,7 +275,6 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - #[pallet::metadata(T::AccountId = "AccountId", T::Hash = "Hash")] pub enum Event, I: 'static = ()> { /// A motion (given hash) has been proposed (by given account) with a threshold (given /// `MemberCount`). From 0bd45fac0a56ed30ffc9ecc09d1453505e7296cc Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:39:59 +0100 Subject: [PATCH 493/503] Fix error --- frame/election-provider-multi-phase/src/unsigned.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/election-provider-multi-phase/src/unsigned.rs b/frame/election-provider-multi-phase/src/unsigned.rs index bd9e801f7c9c5..af0b79177d86c 100644 --- a/frame/election-provider-multi-phase/src/unsigned.rs +++ b/frame/election-provider-multi-phase/src/unsigned.rs @@ -212,7 +212,7 @@ impl Pallet { // get the solution, with a load of checks to ensure if submitted, IT IS ABSOLUTELY VALID. let (raw_solution, witness) = Self::mine_and_check()?; - let score = solution.score.clone(); + let score = raw_solution.score.clone(); let call: Call = Call::submit_unsigned { raw_solution: Box::new(raw_solution), witness }.into(); From c1e06c62f396b3db1b9f9ee4836878659aea5956 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:46:35 +0100 Subject: [PATCH 494/503] Fix collective errors --- frame/collective/src/lib.rs | 1 + frame/collective/src/tests.rs | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/frame/collective/src/lib.rs b/frame/collective/src/lib.rs index b4da98bed2e8d..89d4c8a150c36 100644 --- a/frame/collective/src/lib.rs +++ b/frame/collective/src/lib.rs @@ -126,6 +126,7 @@ impl DefaultVote for MoreThanMajorityThenPrimeDefaultVote { /// Origin for the collective module. #[derive(PartialEq, Eq, Clone, RuntimeDebug, Encode, Decode, TypeInfo)] +#[scale_info(skip_type_params(I))] pub enum RawOrigin { /// It has been condoned by a given number of members of the collective from a given total. Members(MemberCount, MemberCount), diff --git a/frame/collective/src/tests.rs b/frame/collective/src/tests.rs index 5c662428fd992..e1a35a3480256 100644 --- a/frame/collective/src/tests.rs +++ b/frame/collective/src/tests.rs @@ -172,7 +172,7 @@ pub fn new_test_ext() -> sp_io::TestExternalities { } fn make_proposal(value: u64) -> Call { - Call::System(frame_system::Call::remark(value.encode())) + Call::System(frame_system::Call::remark { remark: value.encode() }) } fn record(event: Event) -> EventRecord { @@ -230,7 +230,7 @@ fn close_works() { fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -257,7 +257,7 @@ fn proposal_weight_limit_works_on_approve() { fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -562,7 +562,7 @@ fn limit_active_proposals() { fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members(vec![1, 2, 3], None, MaxMembers::get())); + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }) let length = proposal.encode().len() as u32; assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), length)); @@ -782,7 +782,7 @@ fn motions_reproposing_disapproved_works() { #[test] fn motions_approval_with_enough_votes_and_lower_voting_threshold_works() { new_test_ext().execute_with(|| { - let proposal = Call::Democracy(mock_democracy::Call::external_propose_majority()); + let proposal = Call::Democracy(mock_democracy::Call::external_propose_majority {}); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash: H256 = proposal.blake2_256().into(); From 48c307606adc82f909a9450a4f33f64dc66c6ff9 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:49:55 +0100 Subject: [PATCH 495/503] Semicolol --- frame/collective/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/collective/src/tests.rs b/frame/collective/src/tests.rs index e1a35a3480256..e70853907bd3d 100644 --- a/frame/collective/src/tests.rs +++ b/frame/collective/src/tests.rs @@ -562,7 +562,7 @@ fn limit_active_proposals() { fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }) + Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); let length = proposal.encode().len() as u32; assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), length)); From fe3ed518e24d76daa3dca2b5f463c18afcd937f2 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:53:17 +0100 Subject: [PATCH 496/503] Fmt --- frame/collective/src/tests.rs | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/frame/collective/src/tests.rs b/frame/collective/src/tests.rs index e70853907bd3d..b8feb64867cf8 100644 --- a/frame/collective/src/tests.rs +++ b/frame/collective/src/tests.rs @@ -229,8 +229,11 @@ fn close_works() { #[test] fn proposal_weight_limit_works_on_approve() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -256,8 +259,11 @@ fn proposal_weight_limit_works_on_approve() { #[test] fn proposal_weight_limit_ignored_on_disapprove() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let proposal_len: u32 = proposal.using_encoded(|p| p.len() as u32); let proposal_weight = proposal.get_dispatch_info().weight; let hash = BlakeTwo256::hash_of(&proposal); @@ -561,8 +567,11 @@ fn limit_active_proposals() { #[test] fn correct_validate_and_get_proposal() { new_test_ext().execute_with(|| { - let proposal = - Call::Collective(crate::Call::set_members { new_members: vec![1, 2, 3], prime: None, old_count: MaxMembers::get() }); + let proposal = Call::Collective(crate::Call::set_members { + new_members: vec![1, 2, 3], + prime: None, + old_count: MaxMembers::get(), + }); let length = proposal.encode().len() as u32; assert_ok!(Collective::propose(Origin::signed(1), 3, Box::new(proposal.clone()), length)); From 7345cb24b30df6abe4274bfcea91667d20a47719 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 09:58:20 +0100 Subject: [PATCH 497/503] Remove another metadata attribute --- frame/tips/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/frame/tips/src/lib.rs b/frame/tips/src/lib.rs index da82a36f8c73b..f4a4edb7b3999 100644 --- a/frame/tips/src/lib.rs +++ b/frame/tips/src/lib.rs @@ -177,7 +177,6 @@ pub mod pallet { pub type Reasons = StorageMap<_, Identity, T::Hash, Vec, OptionQuery>; #[pallet::event] - #[pallet::metadata(T::Hash = "Hash", T::AccountId = "AccountId", BalanceOf = "Balance")] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event { /// A new tip suggestion has been opened. \[tip_hash\] From 3f2b61997fb9183cb94731065f3c190f66f6c6a1 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 10:13:10 +0100 Subject: [PATCH 498/503] Add new variant to custom digest TypeInfo --- primitives/runtime/src/generic/digest.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 51e8bafdabc8a..23df80a80438b 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -225,6 +225,11 @@ where v.index(DigestItemType::Other as u8).fields( Fields::unnamed().field(|f| f.ty::>().type_name("Vec")), ) + }) + .variant("RuntimeEnvironmentUpdated,", |v| { + v.index(DigestItemType::RuntimeEnvironmentUpdated as u8).fields( + Fields::unit() + ) }), ) } @@ -600,6 +605,10 @@ mod tests { Default::default(), )), ), + DigestItemType::RuntimeEnvironmentUpdated => ( + "RuntimeEnvironmentUpdated", + DigestItem::RuntimeEnvironmentUpdated + ), }; let encoded = digest_item.encode(); let variant = variants @@ -616,5 +625,6 @@ mod tests { check(DigestItemType::Seal); check(DigestItemType::PreRuntime); check(DigestItemType::ChangesTrieSignal); + check(DigestItemType::RuntimeEnvironmentUpdated); } } From b993d853da46f275b1de376cb4cd62dbe0d4c00b Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 10:38:17 +0100 Subject: [PATCH 499/503] Fmt --- primitives/runtime/src/generic/digest.rs | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 23df80a80438b..92329d5915328 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -227,9 +227,8 @@ where ) }) .variant("RuntimeEnvironmentUpdated,", |v| { - v.index(DigestItemType::RuntimeEnvironmentUpdated as u8).fields( - Fields::unit() - ) + v.index(DigestItemType::RuntimeEnvironmentUpdated as u8) + .fields(Fields::unit()) }), ) } @@ -605,10 +604,8 @@ mod tests { Default::default(), )), ), - DigestItemType::RuntimeEnvironmentUpdated => ( - "RuntimeEnvironmentUpdated", - DigestItem::RuntimeEnvironmentUpdated - ), + DigestItemType::RuntimeEnvironmentUpdated => + ("RuntimeEnvironmentUpdated", DigestItem::RuntimeEnvironmentUpdated), }; let encoded = digest_item.encode(); let variant = variants From a0851e2a5133748022f128e171066a787630dac4 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 10:46:12 +0100 Subject: [PATCH 500/503] Cargo.lock from master --- Cargo.lock | 47 ++++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 55c89fbac348b..1018a1dfb3999 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,15 +274,16 @@ dependencies = [ [[package]] name = "async-process" -version = "1.0.2" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef37b86e2fa961bae5a4d212708ea0154f904ce31d1a4a7f47e1bbc33a0c040b" +checksum = "b21b63ab5a0db0369deb913540af2892750e42d949faacc7a61495ac418a1692" dependencies = [ "async-io", "blocking", "cfg-if 1.0.0", "event-listener", "futures-lite", + "libc", "once_cell", "signal-hook", "winapi 0.3.9", @@ -339,9 +340,9 @@ checksum = "e91831deabf0d6d7ec49552e489aed63b7456a7a3c46cff62adad428110b0af0" [[package]] name = "async-trait" -version = "0.1.50" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b98e84bbb4cbcdd97da190ba0c58a1bb0de2c1fdf67d159e192ed766aeca722" +checksum = "44318e776df68115a881de9a8fd1b9e53368d7a4a5ce4cc48517da3393233a5e" dependencies = [ "proc-macro2", "quote", @@ -1675,9 +1676,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17392a012ea30ef05a610aa97dfb49496e71c9f676b27879922ea5bdf60d9d3f" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" dependencies = [ "log 0.4.14", "regex", @@ -2600,9 +2601,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.5.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" +checksum = "f3a87b616e37e93c22fb19bcd386f02f3af5ea98a25670ad0fce773de23c5e68" [[package]] name = "httpdate" @@ -4857,9 +4858,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.36" +version = "0.10.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9facdb76fec0b73c406f125d44d86fdad818d66fef0531eec9233ca425ff4a" +checksum = "549430950c79ae24e6d02e0b7404534ecf311d94cc9f861e9e4020187d13d885" dependencies = [ "bitflags", "cfg-if 1.0.0", @@ -4877,9 +4878,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de" [[package]] name = "openssl-sys" -version = "0.9.66" +version = "0.9.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1996d2d305e561b70d1ee0c53f1542833f4e1ac6ce9a6708b6ff2738ca67dc82" +checksum = "7a7907e3bfa08bb85105209cdfcb6c63d109f8f6c1ed6ca318fff5c1853fbc1d" dependencies = [ "autocfg 1.0.1", "cc", @@ -6657,7 +6658,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ - "env_logger 0.8.3", + "env_logger 0.8.4", "log 0.4.14", "rand 0.8.4", ] @@ -8678,9 +8679,9 @@ checksum = "42a568c8f2cd051a4d283bd6eb0343ac214c1b0f1ac19f93e1175b2dee38c73d" [[package]] name = "signal-hook" -version = "0.3.6" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7f3f92a1da3d6b1d32245d0cbcbbab0cfc45996d8df619c42bccfa6d2bbb5f" +checksum = "ef33d6d0cd06e0840fba9985aab098c147e67e05cee14d412d3345ed14ff30ac" dependencies = [ "libc", "signal-hook-registry", @@ -9977,9 +9978,9 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2" [[package]] name = "syn" -version = "1.0.69" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fe99c6bd8b1cc636890bcc071842de909d902c81ac7dab53ba33c421ab8ffb" +checksum = "c6f107db402c2c2055242dbf4d2af0e69197202e9faacbef9571bbe47f5a1b84" dependencies = [ "proc-macro2", "quote", @@ -10127,18 +10128,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e" +checksum = "93119e4feac1cbe6c798c34d3a53ea0026b0b1de6a120deef895137c0529bfe2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0" +checksum = "060d69a0afe7796bf42e9e2ff91f5ee691fb15c53d38b4b62a9a53eb23164745" dependencies = [ "proc-macro2", "quote", @@ -10470,9 +10471,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.2.20" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9cbe87a2fa7e35900ce5de20220a582a9483a7063811defce79d7cbd59d4cfe" +checksum = "ab69019741fca4d98be3c62d2b75254528b5432233fd8a4d2739fec20278de48" dependencies = [ "ansi_term 0.12.1", "chrono", From 73e6801bd2215cd62a260659574245a020e1c679 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 10:50:48 +0100 Subject: [PATCH 501/503] Remove comma lol --- primitives/runtime/src/generic/digest.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/primitives/runtime/src/generic/digest.rs b/primitives/runtime/src/generic/digest.rs index 92329d5915328..87af9bc77a5fa 100644 --- a/primitives/runtime/src/generic/digest.rs +++ b/primitives/runtime/src/generic/digest.rs @@ -226,7 +226,7 @@ where Fields::unnamed().field(|f| f.ty::>().type_name("Vec")), ) }) - .variant("RuntimeEnvironmentUpdated,", |v| { + .variant("RuntimeEnvironmentUpdated", |v| { v.index(DigestItemType::RuntimeEnvironmentUpdated as u8) .fields(Fields::unit()) }), From cb2605a6fc5b6b79f790ff1b5347f89549547ac0 Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 10:58:57 +0100 Subject: [PATCH 502/503] Fix example call error --- frame/utility/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index 0f064611b164c..eb26f80d912e9 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -194,7 +194,7 @@ fn call_transfer(dest: u64, value: u64) -> Call { } fn call_foobar(err: bool, _start_weight: u64, end_weight: Option) -> Call { - Call::Example(ExampleCall::foobar { err, _start_weight, end_weight }) + Call::Example(ExampleCall::foobar { err, start_weight, end_weight }) } #[test] From 6875219c6a74130d9a59666ae2c59f9ac7bcacad Mon Sep 17 00:00:00 2001 From: Andrew Jones Date: Wed, 15 Sep 2021 11:19:34 +0100 Subject: [PATCH 503/503] Fix example call error properly --- frame/utility/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frame/utility/src/tests.rs b/frame/utility/src/tests.rs index eb26f80d912e9..bbfbb417e23d1 100644 --- a/frame/utility/src/tests.rs +++ b/frame/utility/src/tests.rs @@ -193,7 +193,7 @@ fn call_transfer(dest: u64, value: u64) -> Call { Call::Balances(BalancesCall::transfer { dest, value }) } -fn call_foobar(err: bool, _start_weight: u64, end_weight: Option) -> Call { +fn call_foobar(err: bool, start_weight: u64, end_weight: Option) -> Call { Call::Example(ExampleCall::foobar { err, start_weight, end_weight }) }

::storage_version(); log::info!( @@ -59,7 +56,7 @@ pub fn migrate< new_pallet_name.as_ref().as_bytes(), ); ::BlockWeights::get().max_block - } + }, _ => { log::warn!( target: "runtime::afg", @@ -75,11 +72,9 @@ pub fn migrate< /// [`frame_support::traits::OnRuntimeUpgrade::pre_upgrade`] for further testing. /// /// Panics if anything goes wrong. -pub fn pre_migration< - T: frame_system::Config, - P: GetPalletVersion + 'static, - N: AsRef, ->(new: N) { +pub fn pre_migration>( + new: N, +) { let new = new.as_ref(); log::info!("pre-migration grandpa test with new = {}", new); @@ -119,10 +114,6 @@ pub fn post_migration() { log::info!("post-migration grandpa"); // Assert that nothing remains at the old prefix - assert!( - sp_io::storage::next_key(&twox_128(OLD_PREFIX)).map_or( - true, - |next_key| !next_key.starts_with(&twox_128(OLD_PREFIX)) - ) - ); + assert!(sp_io::storage::next_key(&twox_128(OLD_PREFIX)) + .map_or(true, |next_key| !next_key.starts_with(&twox_128(OLD_PREFIX)))); } diff --git a/frame/grandpa/src/mock.rs b/frame/grandpa/src/mock.rs index 768564c30105f..882acdb4bcc12 100644 --- a/frame/grandpa/src/mock.rs +++ b/frame/grandpa/src/mock.rs @@ -19,13 +19,15 @@ #![cfg(test)] -use crate::{AuthorityId, AuthorityList, ConsensusLog, Config, self as pallet_grandpa}; +use crate::{self as pallet_grandpa, AuthorityId, AuthorityList, Config, ConsensusLog}; use ::grandpa as finality_grandpa; use codec::Encode; +use frame_election_provider_support::onchain; use frame_support::{ parameter_types, - traits::{KeyOwnerProofSystem, OnFinalize, OnInitialize, GenesisBuild}, + traits::{GenesisBuild, KeyOwnerProofSystem, OnFinalize, OnInitialize}, }; +use pallet_session::historical as pallet_session_historical; use pallet_staking::EraIndex; use sp_core::{crypto::KeyTypeId, H256}; use sp_finality_grandpa::{RoundNumber, SetId, GRANDPA_ENGINE_ID}; @@ -38,8 +40,6 @@ use sp_runtime::{ DigestItem, Perbill, }; use sp_staking::SessionIndex; -use pallet_session::historical as pallet_session_historical; -use frame_election_provider_support::onchain; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -275,13 +275,9 @@ pub fn new_test_ext(vec: Vec<(u64, u64)>) -> sp_io::TestExternalities { } pub fn new_test_ext_raw_authorities(authorities: AuthorityList) -> sp_io::TestExternalities { - let mut t = frame_system::GenesisConfig::default() - .build_storage::() - .unwrap(); + let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - let balances: Vec<_> = (0..authorities.len()) - .map(|i| (i as u64, 10_000_000)) - .collect(); + let balances: Vec<_> = (0..authorities.len()).map(|i| (i as u64, 10_000_000)).collect(); pallet_balances::GenesisConfig:: { balances } .assimilate_storage(&mut t) @@ -295,9 +291,7 @@ pub fn new_test_ext_raw_authorities(authorities: AuthorityList) -> sp_io::TestEx ( i as u64, i as u64, - TestSessionKeys { - grandpa_authority: AuthorityId::from(k.clone()), - }, + TestSessionKeys { grandpa_authority: AuthorityId::from(k.clone()) }, ) }) .collect(); @@ -311,12 +305,7 @@ pub fn new_test_ext_raw_authorities(authorities: AuthorityList) -> sp_io::TestEx // controllers are the index + 1000 let stakers: Vec<_> = (0..authorities.len()) .map(|i| { - ( - i as u64, - i as u64 + 1000, - 10_000, - pallet_staking::StakerStatus::::Validator, - ) + (i as u64, i as u64 + 1000, 10_000, pallet_staking::StakerStatus::::Validator) }) .collect(); @@ -348,12 +337,7 @@ pub fn start_session(session_index: SessionIndex) { System::parent_hash() }; - System::initialize( - &(i as u64 + 1), - &parent_hash, - &Default::default(), - Default::default(), - ); + System::initialize(&(i as u64 + 1), &parent_hash, &Default::default(), Default::default()); System::set_block_number((i + 1).into()); Timestamp::set_timestamp(System::block_number() * 6000); @@ -372,12 +356,7 @@ pub fn start_era(era_index: EraIndex) { } pub fn initialize_block(number: u64, parent_hash: H256) { - System::initialize( - &number, - &parent_hash, - &Default::default(), - Default::default(), - ); + System::initialize(&number, &parent_hash, &Default::default(), Default::default()); } pub fn generate_equivocation_proof( @@ -386,10 +365,7 @@ pub fn generate_equivocation_proof( vote2: (RoundNumber, H256, u64, &Ed25519Keyring), ) -> sp_finality_grandpa::EquivocationProof { let signed_prevote = |round, hash, number, keyring: &Ed25519Keyring| { - let prevote = finality_grandpa::Prevote { - target_hash: hash, - target_number: number, - }; + let prevote = finality_grandpa::Prevote { target_hash: hash, target_number: number }; let prevote_msg = finality_grandpa::Message::Prevote(prevote.clone()); let payload = sp_finality_grandpa::localized_payload(round, set_id, &prevote_msg); diff --git a/frame/grandpa/src/tests.rs b/frame/grandpa/src/tests.rs index 0692102771bfb..8337876d88bc0 100644 --- a/frame/grandpa/src/tests.rs +++ b/frame/grandpa/src/tests.rs @@ -24,7 +24,7 @@ use crate::mock::*; use codec::Encode; use fg_primitives::ScheduledChange; use frame_support::{ - assert_err, assert_ok, assert_noop, + assert_err, assert_noop, assert_ok, traits::{Currency, OnFinalize, OneSessionHandler}, weights::{GetDispatchInfo, Pays}, }; @@ -43,21 +43,24 @@ fn authorities_change_logged() { Grandpa::on_finalize(1); let header = System::finalize(); - assert_eq!(header.digest, Digest { - logs: vec![ - grandpa_log(ConsensusLog::ScheduledChange( - ScheduledChange { delay: 0, next_authorities: to_authorities(vec![(4, 1), (5, 1), (6, 1)]) } - )), - ], - }); - - assert_eq!(System::events(), vec![ - EventRecord { + assert_eq!( + header.digest, + Digest { + logs: vec![grandpa_log(ConsensusLog::ScheduledChange(ScheduledChange { + delay: 0, + next_authorities: to_authorities(vec![(4, 1), (5, 1), (6, 1)]) + })),], + } + ); + + assert_eq!( + System::events(), + vec![EventRecord { phase: Phase::Finalization, event: Event::NewAuthorities(to_authorities(vec![(4, 1), (5, 1), (6, 1)])).into(), topics: vec![], - }, - ]); + },] + ); }); } @@ -68,13 +71,15 @@ fn authorities_change_logged_after_delay() { Grandpa::schedule_change(to_authorities(vec![(4, 1), (5, 1), (6, 1)]), 1, None).unwrap(); Grandpa::on_finalize(1); let header = System::finalize(); - assert_eq!(header.digest, Digest { - logs: vec![ - grandpa_log(ConsensusLog::ScheduledChange( - ScheduledChange { delay: 1, next_authorities: to_authorities(vec![(4, 1), (5, 1), (6, 1)]) } - )), - ], - }); + assert_eq!( + header.digest, + Digest { + logs: vec![grandpa_log(ConsensusLog::ScheduledChange(ScheduledChange { + delay: 1, + next_authorities: to_authorities(vec![(4, 1), (5, 1), (6, 1)]) + })),], + } + ); // no change at this height. assert_eq!(System::events(), vec![]); @@ -84,13 +89,14 @@ fn authorities_change_logged_after_delay() { Grandpa::on_finalize(2); let _header = System::finalize(); - assert_eq!(System::events(), vec![ - EventRecord { + assert_eq!( + System::events(), + vec![EventRecord { phase: Phase::Finalization, event: Event::NewAuthorities(to_authorities(vec![(4, 1), (5, 1), (6, 1)])).into(), topics: vec![], - }, - ]); + },] + ); }); } @@ -131,11 +137,7 @@ fn cannot_schedule_change_when_one_pending() { fn dispatch_forced_change() { new_test_ext(vec![(1, 1), (2, 1), (3, 1)]).execute_with(|| { initialize_block(1, Default::default()); - Grandpa::schedule_change( - to_authorities(vec![(4, 1), (5, 1), (6, 1)]), - 5, - Some(0), - ).unwrap(); + Grandpa::schedule_change(to_authorities(vec![(4, 1), (5, 1), (6, 1)]), 5, Some(0)).unwrap(); assert!(>::exists()); assert_noop!( @@ -168,7 +170,10 @@ fn dispatch_forced_change() { { initialize_block(7, header.hash()); assert!(!>::exists()); - assert_eq!(Grandpa::grandpa_authorities(), to_authorities(vec![(4, 1), (5, 1), (6, 1)])); + assert_eq!( + Grandpa::grandpa_authorities(), + to_authorities(vec![(4, 1), (5, 1), (6, 1)]) + ); assert_ok!(Grandpa::schedule_change(to_authorities(vec![(5, 1)]), 1, None)); Grandpa::on_finalize(7); header = System::finalize(); @@ -178,7 +183,10 @@ fn dispatch_forced_change() { { initialize_block(8, header.hash()); assert!(>::exists()); - assert_eq!(Grandpa::grandpa_authorities(), to_authorities(vec![(4, 1), (5, 1), (6, 1)])); + assert_eq!( + Grandpa::grandpa_authorities(), + to_authorities(vec![(4, 1), (5, 1), (6, 1)]) + ); assert_noop!( Grandpa::schedule_change(to_authorities(vec![(5, 1)]), 1, None), Error::::ChangePending @@ -205,7 +213,11 @@ fn dispatch_forced_change() { { initialize_block(11, header.hash()); assert!(!>::exists()); - assert_ok!(Grandpa::schedule_change(to_authorities(vec![(5, 1), (6, 1), (7, 1)]), 5, Some(0))); + assert_ok!(Grandpa::schedule_change( + to_authorities(vec![(5, 1), (6, 1), (7, 1)]), + 5, + Some(0) + )); assert_eq!(Grandpa::next_forced(), Some(21)); Grandpa::on_finalize(11); header = System::finalize(); @@ -222,13 +234,7 @@ fn schedule_pause_only_when_live() { Grandpa::schedule_pause(1).unwrap(); // we've switched to the pending pause state - assert_eq!( - Grandpa::state(), - StoredState::PendingPause { - scheduled_at: 1u64, - delay: 1, - }, - ); + assert_eq!(Grandpa::state(), StoredState::PendingPause { scheduled_at: 1u64, delay: 1 },); Grandpa::on_finalize(1); let _ = System::finalize(); @@ -242,10 +248,7 @@ fn schedule_pause_only_when_live() { let _ = System::finalize(); // after finalizing block 2 the set should have switched to paused state - assert_eq!( - Grandpa::state(), - StoredState::Paused, - ); + assert_eq!(Grandpa::state(), StoredState::Paused,); }); } @@ -257,20 +260,14 @@ fn schedule_resume_only_when_paused() { // the set is currently live, resuming it is an error assert_noop!(Grandpa::schedule_resume(1), Error::::ResumeFailed); - assert_eq!( - Grandpa::state(), - StoredState::Live, - ); + assert_eq!(Grandpa::state(), StoredState::Live,); // we schedule a pause to be applied instantly Grandpa::schedule_pause(0).unwrap(); Grandpa::on_finalize(1); let _ = System::finalize(); - assert_eq!( - Grandpa::state(), - StoredState::Paused, - ); + assert_eq!(Grandpa::state(), StoredState::Paused,); // we schedule the set to go back live in 2 blocks initialize_block(2, Default::default()); @@ -287,10 +284,7 @@ fn schedule_resume_only_when_paused() { let _ = System::finalize(); // it should be live at block 4 - assert_eq!( - Grandpa::state(), - StoredState::Live, - ); + assert_eq!(Grandpa::state(), StoredState::Live,); }); } @@ -298,26 +292,11 @@ fn schedule_resume_only_when_paused() { fn time_slot_have_sane_ord() { // Ensure that `Ord` implementation is sane. const FIXTURE: &[GrandpaTimeSlot] = &[ - GrandpaTimeSlot { - set_id: 0, - round: 0, - }, - GrandpaTimeSlot { - set_id: 0, - round: 1, - }, - GrandpaTimeSlot { - set_id: 1, - round: 0, - }, - GrandpaTimeSlot { - set_id: 1, - round: 1, - }, - GrandpaTimeSlot { - set_id: 1, - round: 2, - } + GrandpaTimeSlot { set_id: 0, round: 0 }, + GrandpaTimeSlot { set_id: 0, round: 1 }, + GrandpaTimeSlot { set_id: 1, round: 0 }, + GrandpaTimeSlot { set_id: 1, round: 1 }, + GrandpaTimeSlot { set_id: 1, round: 2 }, ]; assert!(FIXTURE.windows(2).all(|f| f[0] < f[1])); } @@ -325,16 +304,9 @@ fn time_slot_have_sane_ord() { /// Returns a list with 3 authorities with known keys: /// Alice, Bob and Charlie. pub fn test_authorities() -> AuthorityList { - let authorities = vec![ - Ed25519Keyring::Alice, - Ed25519Keyring::Bob, - Ed25519Keyring::Charlie, - ]; + let authorities = vec![Ed25519Keyring::Alice, Ed25519Keyring::Bob, Ed25519Keyring::Charlie]; - authorities - .into_iter() - .map(|id| (id.public().into(), 1u64)) - .collect() + authorities.into_iter().map(|id| (id.public().into(), 1u64)).collect() } #[test] @@ -357,11 +329,7 @@ fn report_equivocation_current_set_works() { assert_eq!( Staking::eras_stakers(1, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } @@ -384,13 +352,11 @@ fn report_equivocation_current_set_works() { Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); // report the equivocation and the tx should be dispatched successfully - assert_ok!( - Grandpa::report_equivocation_unsigned( - Origin::none(), - equivocation_proof, - key_owner_proof, - ), - ); + assert_ok!(Grandpa::report_equivocation_unsigned( + Origin::none(), + equivocation_proof, + key_owner_proof, + ),); start_era(2); @@ -401,17 +367,13 @@ fn report_equivocation_current_set_works() { assert_eq!(Staking::slashable_balance_of(&equivocation_validator_id), 0); assert_eq!( Staking::eras_stakers(2, equivocation_validator_id), - pallet_staking::Exposure { - total: 0, - own: 0, - others: vec![], - }, + pallet_staking::Exposure { total: 0, own: 0, others: vec![] }, ); // check that the balances of all other validators are left intact. for validator in &validators { if *validator == equivocation_validator_id { - continue; + continue } assert_eq!(Balances::total_balance(validator), 10_000_000); @@ -419,11 +381,7 @@ fn report_equivocation_current_set_works() { assert_eq!( Staking::eras_stakers(2, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } }); @@ -455,11 +413,7 @@ fn report_equivocation_old_set_works() { assert_eq!( Staking::eras_stakers(2, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } @@ -476,13 +430,11 @@ fn report_equivocation_old_set_works() { // report the equivocation using the key ownership proof generated on // the old set, the tx should be dispatched successfully - assert_ok!( - Grandpa::report_equivocation_unsigned( - Origin::none(), - equivocation_proof, - key_owner_proof, - ), - ); + assert_ok!(Grandpa::report_equivocation_unsigned( + Origin::none(), + equivocation_proof, + key_owner_proof, + ),); start_era(3); @@ -494,17 +446,13 @@ fn report_equivocation_old_set_works() { assert_eq!( Staking::eras_stakers(3, equivocation_validator_id), - pallet_staking::Exposure { - total: 0, - own: 0, - others: vec![], - }, + pallet_staking::Exposure { total: 0, own: 0, others: vec![] }, ); // check that the balances of all other validators are left intact. for validator in &validators { if *validator == equivocation_validator_id { - continue; + continue } assert_eq!(Balances::total_balance(validator), 10_000_000); @@ -512,11 +460,7 @@ fn report_equivocation_old_set_works() { assert_eq!( Staking::eras_stakers(3, validator), - pallet_staking::Exposure { - total: 10_000, - own: 10_000, - others: vec![], - }, + pallet_staking::Exposure { total: 10_000, own: 10_000, others: vec![] }, ); } }); @@ -737,10 +681,8 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { let key_owner_proof = Historical::prove((sp_finality_grandpa::KEY_TYPE, &equivocation_key)).unwrap(); - let call = Call::report_equivocation_unsigned( - equivocation_proof.clone(), - key_owner_proof.clone(), - ); + let call = + Call::report_equivocation_unsigned(equivocation_proof.clone(), key_owner_proof.clone()); // only local/inblock reports are allowed assert_eq!( @@ -752,11 +694,7 @@ fn report_equivocation_validate_unsigned_prevents_duplicates() { ); // the transaction is valid when passed as local - let tx_tag = ( - equivocation_key, - set_id, - 1u64, - ); + let tx_tag = (equivocation_key, set_id, 1u64); assert_eq!( ::validate_unsigned( @@ -861,23 +799,19 @@ fn always_schedules_a_change_on_new_session_when_stalled() { fn report_equivocation_has_valid_weight() { // the weight depends on the size of the validator set, // but there's a lower bound of 100 validators. - assert!( - (1..=100) - .map(::WeightInfo::report_equivocation) - .collect::>() - .windows(2) - .all(|w| w[0] == w[1]) - ); + assert!((1..=100) + .map(::WeightInfo::report_equivocation) + .collect::>() + .windows(2) + .all(|w| w[0] == w[1])); // after 100 validators the weight should keep increasing // with every extra validator. - assert!( - (100..=1000) - .map(::WeightInfo::report_equivocation) - .collect::>() - .windows(2) - .all(|w| w[0] < w[1]) - ); + assert!((100..=1000) + .map(::WeightInfo::report_equivocation) + .collect::>() + .windows(2) + .all(|w| w[0] < w[1])); } #[test] diff --git a/frame/identity/src/benchmarking.rs b/frame/identity/src/benchmarking.rs index 4fb76fcb4138c..5cae65818145d 100644 --- a/frame/identity/src/benchmarking.rs +++ b/frame/identity/src/benchmarking.rs @@ -21,11 +21,11 @@ use super::*; +use crate::Pallet as Identity; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; +use frame_support::{ensure, traits::Get}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; -use frame_support::{ensure, traits::Get}; -use crate::Pallet as Identity; const SEED: u32 = 0; @@ -39,11 +39,19 @@ fn add_registrars(r: u32) -> Result<(), &'static str> { let registrar: T::AccountId = account("registrar", i, SEED); let _ = T::Currency::make_free_balance_be(®istrar, BalanceOf::::max_value()); Identity::::add_registrar(RawOrigin::Root.into(), registrar.clone())?; - Identity::::set_fee(RawOrigin::Signed(registrar.clone()).into(), i.into(), 10u32.into())?; - let fields = IdentityFields( - IdentityField::Display | IdentityField::Legal | IdentityField::Web | IdentityField::Riot - | IdentityField::Email | IdentityField::PgpFingerprint | IdentityField::Image | IdentityField::Twitter - ); + Identity::::set_fee( + RawOrigin::Signed(registrar.clone()).into(), + i.into(), + 10u32.into(), + )?; + let fields = + IdentityFields( + IdentityField::Display | + IdentityField::Legal | IdentityField::Web | + IdentityField::Riot | IdentityField::Email | + IdentityField::PgpFingerprint | + IdentityField::Image | IdentityField::Twitter, + ); Identity::::set_fields(RawOrigin::Signed(registrar.clone()).into(), i.into(), fields)?; } @@ -53,7 +61,10 @@ fn add_registrars(r: u32) -> Result<(), &'static str> { // Create `s` sub-accounts for the identity of `who` and return them. // Each will have 32 bytes of raw data added to it. -fn create_sub_accounts(who: &T::AccountId, s: u32) -> Result, &'static str> { +fn create_sub_accounts( + who: &T::AccountId, + s: u32, +) -> Result, &'static str> { let mut subs = Vec::new(); let who_origin = RawOrigin::Signed(who.clone()); let data = Data::Raw(vec![0; 32].try_into().unwrap()); @@ -73,7 +84,10 @@ fn create_sub_accounts(who: &T::AccountId, s: u32) -> Result(who: &T::AccountId, s: u32) -> Result, &'static str> { +fn add_sub_accounts( + who: &T::AccountId, + s: u32, +) -> Result, &'static str> { let who_origin = RawOrigin::Signed(who.clone()); let subs = create_sub_accounts::(who, s)?; @@ -399,8 +413,4 @@ benchmarks! { } -impl_benchmark_test_suite!( - Identity, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Identity, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/identity/src/lib.rs b/frame/identity/src/lib.rs index 93feb7387aa03..935628147b343 100644 --- a/frame/identity/src/lib.rs +++ b/frame/identity/src/lib.rs @@ -72,32 +72,34 @@ #![cfg_attr(not(feature = "std"), no_std)] +mod benchmarking; #[cfg(test)] mod tests; mod types; -mod benchmarking; pub mod weights; -use sp_std::prelude::*; -use sp_std::convert::TryInto; -use sp_runtime::traits::{StaticLookup, Zero, AppendZerosInput, Saturating}; use frame_support::traits::{BalanceStatus, Currency, OnUnbalanced, ReservableCurrency}; +use sp_runtime::traits::{AppendZerosInput, Saturating, StaticLookup, Zero}; +use sp_std::{convert::TryInto, prelude::*}; pub use weights::WeightInfo; pub use pallet::*; pub use types::{ - Data, IdentityField, IdentityFields, IdentityInfo, Judgement, RegistrarIndex, - RegistrarInfo, Registration, + Data, IdentityField, IdentityFields, IdentityInfo, Judgement, RegistrarIndex, RegistrarInfo, + Registration, }; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -121,7 +123,6 @@ pub mod pallet { #[pallet::constant] type SubAccountDeposit: Get>; - /// The maximum number of sub-accounts allowed per identified account. #[pallet::constant] type MaxSubAccounts: Get; @@ -171,13 +172,8 @@ pub mod pallet { /// context. If the account is not some other account's sub-identity, then just `None`. #[pallet::storage] #[pallet::getter(fn super_of)] - pub(super) type SuperOf = StorageMap< - _, - Blake2_128Concat, - T::AccountId, - (T::AccountId, Data), - OptionQuery, - >; + pub(super) type SuperOf = + StorageMap<_, Blake2_128Concat, T::AccountId, (T::AccountId, Data), OptionQuery>; /// Alternative "sub" identities of this account. /// @@ -239,7 +235,7 @@ pub mod pallet { /// Sender is not a sub-account. NotSub, /// Sub-account isn't owned by sender. - NotOwned + NotOwned, } #[pallet::event] @@ -286,17 +282,23 @@ pub mod pallet { /// - One event. /// # #[pallet::weight(T::WeightInfo::add_registrar(T::MaxRegistrars::get()))] - pub fn add_registrar(origin: OriginFor, account: T::AccountId) -> DispatchResultWithPostInfo { + pub fn add_registrar( + origin: OriginFor, + account: T::AccountId, + ) -> DispatchResultWithPostInfo { T::RegistrarOrigin::ensure_origin(origin)?; let (i, registrar_count) = >::try_mutate( |registrars| -> Result<(RegistrarIndex, usize), DispatchError> { - registrars.try_push(Some(RegistrarInfo { - account, fee: Zero::zero(), fields: Default::default() - })) - .map_err(|_| Error::::TooManyRegistrars)?; + registrars + .try_push(Some(RegistrarInfo { + account, + fee: Zero::zero(), + fields: Default::default(), + })) + .map_err(|_| Error::::TooManyRegistrars)?; Ok(((registrars.len() - 1) as RegistrarIndex, registrars.len())) - } + }, )?; Self::deposit_event(Event::RegistrarAdded(i)); @@ -327,7 +329,10 @@ pub mod pallet { T::MaxRegistrars::get().into(), // R T::MaxAdditionalFields::get().into(), // X ))] - pub fn set_identity(origin: OriginFor, info: IdentityInfo) -> DispatchResultWithPostInfo { + pub fn set_identity( + origin: OriginFor, + info: IdentityInfo, + ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; let extra_fields = info.additional.len() as u32; ensure!(extra_fields <= T::MaxAdditionalFields::get(), Error::::TooManyFields); @@ -339,8 +344,9 @@ pub mod pallet { id.judgements.retain(|j| j.1.is_sticky()); id.info = info; id - } - None => Registration { info, judgements: BoundedVec::default(), deposit: Zero::zero() }, + }, + None => + Registration { info, judgements: BoundedVec::default(), deposit: Zero::zero() }, }; let old_deposit = id.deposit; @@ -359,8 +365,9 @@ pub mod pallet { Ok(Some(T::WeightInfo::set_identity( judgements as u32, // R - extra_fields // X - )).into()) + extra_fields, // X + )) + .into()) } /// Set the sub-accounts of the sender. @@ -393,15 +400,22 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::set_subs_old(T::MaxSubAccounts::get()) // P: Assume max sub accounts removed. .saturating_add(T::WeightInfo::set_subs_new(subs.len() as u32)) // S: Assume all subs are new. )] - pub fn set_subs(origin: OriginFor, subs: Vec<(T::AccountId, Data)>) -> DispatchResultWithPostInfo { + pub fn set_subs( + origin: OriginFor, + subs: Vec<(T::AccountId, Data)>, + ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ensure!(>::contains_key(&sender), Error::::NotFound); - ensure!(subs.len() <= T::MaxSubAccounts::get() as usize, Error::::TooManySubAccounts); + ensure!( + subs.len() <= T::MaxSubAccounts::get() as usize, + Error::::TooManySubAccounts + ); let (old_deposit, old_ids) = >::get(&sender); let new_deposit = T::SubAccountDeposit::get() * >::from(subs.len() as u32); - let not_other_sub = subs.iter().filter_map(|i| SuperOf::::get(&i.0)).all(|i| &i.0 == &sender); + let not_other_sub = + subs.iter().filter_map(|i| SuperOf::::get(&i.0)).all(|i| &i.0 == &sender); ensure!(not_other_sub, Error::::AlreadyClaimed); if old_deposit < new_deposit { @@ -430,8 +444,9 @@ pub mod pallet { Ok(Some( T::WeightInfo::set_subs_old(old_ids.len() as u32) // P: Real number of old accounts removed. - .saturating_add(T::WeightInfo::set_subs_new(new_subs as u32)) // S: New subs added. - ).into()) + .saturating_add(T::WeightInfo::set_subs_new(new_subs as u32)), // S: New subs added. + ) + .into()) } /// Clear an account's identity info and all sub-accounts and return all deposits. @@ -473,10 +488,11 @@ pub mod pallet { Self::deposit_event(Event::IdentityCleared(sender, deposit)); Ok(Some(T::WeightInfo::clear_identity( - id.judgements.len() as u32, // R - sub_ids.len() as u32, // S - id.info.additional.len() as u32 // X - )).into()) + id.judgements.len() as u32, // R + sub_ids.len() as u32, // S + id.info.additional.len() as u32, // X + )) + .into()) } /// Request a judgement from a registrar. @@ -506,28 +522,30 @@ pub mod pallet { T::MaxRegistrars::get().into(), // R T::MaxAdditionalFields::get().into(), // X ))] - pub fn request_judgement(origin: OriginFor, + pub fn request_judgement( + origin: OriginFor, #[pallet::compact] reg_index: RegistrarIndex, #[pallet::compact] max_fee: BalanceOf, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; let registrars = >::get(); - let registrar = registrars.get(reg_index as usize).and_then(Option::as_ref) + let registrar = registrars + .get(reg_index as usize) + .and_then(Option::as_ref) .ok_or(Error::::EmptyIndex)?; ensure!(max_fee >= registrar.fee, Error::::FeeChanged); let mut id = >::get(&sender).ok_or(Error::::NoIdentity)?; let item = (reg_index, Judgement::FeePaid(registrar.fee)); match id.judgements.binary_search_by_key(®_index, |x| x.0) { - Ok(i) => if id.judgements[i].1.is_sticky() { - Err(Error::::StickyJudgement)? - } else { - id.judgements[i] = item - }, - Err(i) => id - .judgements - .try_insert(i, item) - .map_err(|_| Error::::TooManyRegistrars)?, + Ok(i) => + if id.judgements[i].1.is_sticky() { + Err(Error::::StickyJudgement)? + } else { + id.judgements[i] = item + }, + Err(i) => + id.judgements.try_insert(i, item).map_err(|_| Error::::TooManyRegistrars)?, } T::Currency::reserve(&sender, registrar.fee)?; @@ -538,10 +556,8 @@ pub mod pallet { Self::deposit_event(Event::JudgementRequested(sender, reg_index)); - Ok(Some(T::WeightInfo::request_judgement( - judgements as u32, - extra_fields as u32, - )).into()) + Ok(Some(T::WeightInfo::request_judgement(judgements as u32, extra_fields as u32)) + .into()) } /// Cancel a previous request. @@ -565,11 +581,16 @@ pub mod pallet { T::MaxRegistrars::get().into(), // R T::MaxAdditionalFields::get().into(), // X ))] - pub fn cancel_request(origin: OriginFor, reg_index: RegistrarIndex) -> DispatchResultWithPostInfo { + pub fn cancel_request( + origin: OriginFor, + reg_index: RegistrarIndex, + ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; let mut id = >::get(&sender).ok_or(Error::::NoIdentity)?; - let pos = id.judgements.binary_search_by_key(®_index, |x| x.0) + let pos = id + .judgements + .binary_search_by_key(®_index, |x| x.0) .map_err(|_| Error::::NotFound)?; let fee = if let Judgement::FeePaid(fee) = id.judgements.remove(pos).1 { fee @@ -585,10 +606,7 @@ pub mod pallet { Self::deposit_event(Event::JudgementUnrequested(sender, reg_index)); - Ok(Some(T::WeightInfo::cancel_request( - judgements as u32, - extra_fields as u32 - )).into()) + Ok(Some(T::WeightInfo::cancel_request(judgements as u32, extra_fields as u32)).into()) } /// Set the fee required for a judgement to be requested from a registrar. @@ -605,7 +623,8 @@ pub mod pallet { /// - Benchmark: 7.315 + R * 0.329 µs (min squares analysis) /// # #[pallet::weight(T::WeightInfo::set_fee(T::MaxRegistrars::get()))] // R - pub fn set_fee(origin: OriginFor, + pub fn set_fee( + origin: OriginFor, #[pallet::compact] index: RegistrarIndex, #[pallet::compact] fee: BalanceOf, ) -> DispatchResultWithPostInfo { @@ -614,7 +633,14 @@ pub mod pallet { let registrars = >::mutate(|rs| -> Result { rs.get_mut(index as usize) .and_then(|x| x.as_mut()) - .and_then(|r| if r.account == who { r.fee = fee; Some(()) } else { None }) + .and_then(|r| { + if r.account == who { + r.fee = fee; + Some(()) + } else { + None + } + }) .ok_or_else(|| DispatchError::from(Error::::InvalidIndex))?; Ok(rs.len()) })?; @@ -635,7 +661,8 @@ pub mod pallet { /// - Benchmark: 8.823 + R * 0.32 µs (min squares analysis) /// # #[pallet::weight(T::WeightInfo::set_account_id(T::MaxRegistrars::get()))] // R - pub fn set_account_id(origin: OriginFor, + pub fn set_account_id( + origin: OriginFor, #[pallet::compact] index: RegistrarIndex, new: T::AccountId, ) -> DispatchResultWithPostInfo { @@ -644,7 +671,14 @@ pub mod pallet { let registrars = >::mutate(|rs| -> Result { rs.get_mut(index as usize) .and_then(|x| x.as_mut()) - .and_then(|r| if r.account == who { r.account = new; Some(()) } else { None }) + .and_then(|r| { + if r.account == who { + r.account = new; + Some(()) + } else { + None + } + }) .ok_or_else(|| DispatchError::from(Error::::InvalidIndex))?; Ok(rs.len()) })?; @@ -665,7 +699,8 @@ pub mod pallet { /// - Benchmark: 7.464 + R * 0.325 µs (min squares analysis) /// # #[pallet::weight(T::WeightInfo::set_fields(T::MaxRegistrars::get()))] // R - pub fn set_fields(origin: OriginFor, + pub fn set_fields( + origin: OriginFor, #[pallet::compact] index: RegistrarIndex, fields: IdentityFields, ) -> DispatchResultWithPostInfo { @@ -674,13 +709,21 @@ pub mod pallet { let registrars = >::mutate(|rs| -> Result { rs.get_mut(index as usize) .and_then(|x| x.as_mut()) - .and_then(|r| if r.account == who { r.fields = fields; Some(()) } else { None }) + .and_then(|r| { + if r.account == who { + r.fields = fields; + Some(()) + } else { + None + } + }) .ok_or_else(|| DispatchError::from(Error::::InvalidIndex))?; Ok(rs.len()) })?; Ok(Some(T::WeightInfo::set_fields( - registrars as u32 // R - )).into()) + registrars as u32, // R + )) + .into()) } /// Provide a judgement for an account's identity. @@ -706,7 +749,8 @@ pub mod pallet { T::MaxRegistrars::get().into(), // R T::MaxAdditionalFields::get().into(), // X ))] - pub fn provide_judgement(origin: OriginFor, + pub fn provide_judgement( + origin: OriginFor, #[pallet::compact] reg_index: RegistrarIndex, target: ::Source, judgement: Judgement>, @@ -725,10 +769,15 @@ pub mod pallet { match id.judgements.binary_search_by_key(®_index, |x| x.0) { Ok(position) => { if let Judgement::FeePaid(fee) = id.judgements[position].1 { - let _ = T::Currency::repatriate_reserved(&target, &sender, fee, BalanceStatus::Free); + let _ = T::Currency::repatriate_reserved( + &target, + &sender, + fee, + BalanceStatus::Free, + ); } id.judgements[position] = item - } + }, Err(position) => id .judgements .try_insert(position, item) @@ -740,10 +789,8 @@ pub mod pallet { >::insert(&target, id); Self::deposit_event(Event::JudgementGiven(target, reg_index)); - Ok(Some(T::WeightInfo::provide_judgement( - judgements as u32, - extra_fields as u32, - )).into()) + Ok(Some(T::WeightInfo::provide_judgement(judgements as u32, extra_fields as u32)) + .into()) } /// Remove an account's identity and sub-account information and slash the deposits. @@ -771,7 +818,8 @@ pub mod pallet { T::MaxAdditionalFields::get().into(), // X ))] pub fn kill_identity( - origin: OriginFor, target: ::Source + origin: OriginFor, + target: ::Source, ) -> DispatchResultWithPostInfo { T::ForceOrigin::ensure_origin(origin)?; @@ -790,10 +838,11 @@ pub mod pallet { Self::deposit_event(Event::IdentityKilled(target, deposit)); Ok(Some(T::WeightInfo::kill_identity( - id.judgements.len() as u32, // R - sub_ids.len() as u32, // S - id.info.additional.len() as u32 // X - )).into()) + id.judgements.len() as u32, // R + sub_ids.len() as u32, // S + id.info.additional.len() as u32, // X + )) + .into()) } /// Add the given account to the sender's subs. @@ -804,7 +853,11 @@ pub mod pallet { /// The dispatch origin for this call must be _Signed_ and the sender must have a registered /// sub identity of `sub`. #[pallet::weight(T::WeightInfo::add_sub(T::MaxSubAccounts::get()))] - pub fn add_sub(origin: OriginFor, sub: ::Source, data: Data) -> DispatchResult { + pub fn add_sub( + origin: OriginFor, + sub: ::Source, + data: Data, + ) -> DispatchResult { let sender = ensure_signed(origin)?; let sub = T::Lookup::lookup(sub)?; ensure!(IdentityOf::::contains_key(&sender), Error::::NoIdentity); @@ -814,7 +867,10 @@ pub mod pallet { SubsOf::::try_mutate(&sender, |(ref mut subs_deposit, ref mut sub_ids)| { // Ensure there is space and that the deposit is paid. - ensure!(sub_ids.len() < T::MaxSubAccounts::get() as usize, Error::::TooManySubAccounts); + ensure!( + sub_ids.len() < T::MaxSubAccounts::get() as usize, + Error::::TooManySubAccounts + ); let deposit = T::SubAccountDeposit::get(); T::Currency::reserve(&sender, deposit)?; @@ -833,7 +889,9 @@ pub mod pallet { /// sub identity of `sub`. #[pallet::weight(T::WeightInfo::rename_sub(T::MaxSubAccounts::get()))] pub fn rename_sub( - origin: OriginFor, sub: ::Source, data: Data + origin: OriginFor, + sub: ::Source, + data: Data, ) -> DispatchResult { let sender = ensure_signed(origin)?; let sub = T::Lookup::lookup(sub)?; @@ -851,7 +909,10 @@ pub mod pallet { /// The dispatch origin for this call must be _Signed_ and the sender must have a registered /// sub identity of `sub`. #[pallet::weight(T::WeightInfo::remove_sub(T::MaxSubAccounts::get()))] - pub fn remove_sub(origin: OriginFor, sub: ::Source) -> DispatchResult { + pub fn remove_sub( + origin: OriginFor, + sub: ::Source, + ) -> DispatchResult { let sender = ensure_signed(origin)?; ensure!(IdentityOf::::contains_key(&sender), Error::::NoIdentity); let sub = T::Lookup::lookup(sub)?; @@ -887,19 +948,20 @@ pub mod pallet { sub_ids.retain(|x| x != &sender); let deposit = T::SubAccountDeposit::get().min(*subs_deposit); *subs_deposit -= deposit; - let _ = T::Currency::repatriate_reserved(&sup, &sender, deposit, BalanceStatus::Free); + let _ = + T::Currency::repatriate_reserved(&sup, &sender, deposit, BalanceStatus::Free); Self::deposit_event(Event::SubIdentityRevoked(sender, sup.clone(), deposit)); }); Ok(()) } } - } impl Pallet { /// Get the subs of an account. pub fn subs(who: &T::AccountId) -> Vec<(T::AccountId, Data)> { - SubsOf::::get(who).1 + SubsOf::::get(who) + .1 .into_iter() .filter_map(|a| SuperOf::::get(&a).map(|x| (a, x.1))) .collect() diff --git a/frame/identity/src/tests.rs b/frame/identity/src/tests.rs index 7a8bb4fa6d92e..127b0a9ecb171 100644 --- a/frame/identity/src/tests.rs +++ b/frame/identity/src/tests.rs @@ -20,13 +20,13 @@ use super::*; use crate as pallet_identity; -use codec::{Encode, Decode}; -use sp_runtime::traits::BadOrigin; -use frame_support::{assert_ok, assert_noop, parameter_types, ord_parameter_types, BoundedVec}; +use codec::{Decode, Encode}; +use frame_support::{assert_noop, assert_ok, ord_parameter_types, parameter_types, BoundedVec}; +use frame_system::{EnsureOneOf, EnsureRoot, EnsureSignedBy}; use sp_core::H256; -use frame_system::{EnsureSignedBy, EnsureOneOf, EnsureRoot}; use sp_runtime::{ - testing::Header, traits::{BlakeTwo256, IdentityLookup}, + testing::Header, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -100,16 +100,8 @@ ord_parameter_types! { pub const One: u64 = 1; pub const Two: u64 = 2; } -type EnsureOneOrRoot = EnsureOneOf< - u64, - EnsureRoot, - EnsureSignedBy ->; -type EnsureTwoOrRoot = EnsureOneOf< - u64, - EnsureRoot, - EnsureSignedBy ->; +type EnsureOneOrRoot = EnsureOneOf, EnsureSignedBy>; +type EnsureTwoOrRoot = EnsureOneOf, EnsureSignedBy>; impl pallet_identity::Config for Test { type Event = Event; type Currency = Balances; @@ -128,15 +120,10 @@ impl pallet_identity::Config for Test { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { - balances: vec![ - (1, 10), - (2, 10), - (3, 10), - (10, 100), - (20, 100), - (30, 100), - ], - }.assimilate_storage(&mut t).unwrap(); + balances: vec![(1, 10), (2, 10), (3, 10), (10, 100), (20, 100), (30, 100)], + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } @@ -144,7 +131,7 @@ fn ten() -> IdentityInfo { IdentityInfo { display: Data::Raw(b"ten".to_vec().try_into().unwrap()), legal: Data::Raw(b"The Right Ordinal Ten, Esq.".to_vec().try_into().unwrap()), - .. Default::default() + ..Default::default() } } @@ -152,7 +139,7 @@ fn twenty() -> IdentityInfo { IdentityInfo { display: Data::Raw(b"twenty".to_vec().try_into().unwrap()), legal: Data::Raw(b"The Right Ordinal Twenty, Esq.".to_vec().try_into().unwrap()), - .. Default::default() + ..Default::default() } } @@ -177,7 +164,10 @@ fn editing_subaccounts_should_work() { assert_eq!(Balances::free_balance(10), 70); // third sub account is too many - assert_noop!(Identity::add_sub(Origin::signed(10), 3, data(3)), Error::::TooManySubAccounts); + assert_noop!( + Identity::add_sub(Origin::signed(10), 3, data(3)), + Error::::TooManySubAccounts + ); // rename first sub account assert_ok!(Identity::rename_sub(Origin::signed(10), 1, data(11))); @@ -214,7 +204,10 @@ fn resolving_subaccount_ownership_works() { assert_eq!(Balances::free_balance(10), 80); assert_eq!(Balances::reserved_balance(10), 20); // 20 cannot claim 1 now - assert_noop!(Identity::add_sub(Origin::signed(20), 1, data(1)), Error::::AlreadyClaimed); + assert_noop!( + Identity::add_sub(Origin::signed(20), 1, data(1)), + Error::::AlreadyClaimed + ); // 1 wants to be with 20 so it quits from 10 assert_ok!(Identity::quit_sub(Origin::signed(1))); // 1 gets the 10 that 10 paid. @@ -243,9 +236,10 @@ fn adding_registrar_should_work() { assert_ok!(Identity::set_fee(Origin::signed(3), 0, 10)); let fields = IdentityFields(IdentityField::Display | IdentityField::Legal); assert_ok!(Identity::set_fields(Origin::signed(3), 0, fields)); - assert_eq!(Identity::registrars(), vec![ - Some(RegistrarInfo { account: 3, fee: 10, fields }) - ]); + assert_eq!( + Identity::registrars(), + vec![Some(RegistrarInfo { account: 3, fee: 10, fields })] + ); }); } @@ -370,7 +364,10 @@ fn setting_subaccounts_should_work() { assert_eq!(Identity::super_of(40), None); subs.push((20, Data::Raw(vec![40; 1].try_into().unwrap()))); - assert_noop!(Identity::set_subs(Origin::signed(10), subs.clone()), Error::::TooManySubAccounts); + assert_noop!( + Identity::set_subs(Origin::signed(10), subs.clone()), + Error::::TooManySubAccounts + ); }); } @@ -378,7 +375,10 @@ fn setting_subaccounts_should_work() { fn clearing_account_should_remove_subaccounts_and_refund() { new_test_ext().execute_with(|| { assert_ok!(Identity::set_identity(Origin::signed(10), ten())); - assert_ok!(Identity::set_subs(Origin::signed(10), vec![(20, Data::Raw(vec![40; 1].try_into().unwrap()))])); + assert_ok!(Identity::set_subs( + Origin::signed(10), + vec![(20, Data::Raw(vec![40; 1].try_into().unwrap()))] + )); assert_ok!(Identity::clear_identity(Origin::signed(10))); assert_eq!(Balances::free_balance(10), 100); assert!(Identity::super_of(20).is_none()); @@ -389,7 +389,10 @@ fn clearing_account_should_remove_subaccounts_and_refund() { fn killing_account_should_remove_subaccounts_and_not_refund() { new_test_ext().execute_with(|| { assert_ok!(Identity::set_identity(Origin::signed(10), ten())); - assert_ok!(Identity::set_subs(Origin::signed(10), vec![(20, Data::Raw(vec![40; 1].try_into().unwrap()))])); + assert_ok!(Identity::set_subs( + Origin::signed(10), + vec![(20, Data::Raw(vec![40; 1].try_into().unwrap()))] + )); assert_ok!(Identity::kill_identity(Origin::signed(2), 10)); assert_eq!(Balances::free_balance(10), 80); assert!(Identity::super_of(20).is_none()); @@ -409,7 +412,10 @@ fn cancelling_requested_judgement_should_work() { assert_noop!(Identity::cancel_request(Origin::signed(10), 0), Error::::NotFound); assert_ok!(Identity::provide_judgement(Origin::signed(3), 0, 10, Judgement::Reasonable)); - assert_noop!(Identity::cancel_request(Origin::signed(10), 0), Error::::JudgementGiven); + assert_noop!( + Identity::cancel_request(Origin::signed(10), 0), + Error::::JudgementGiven + ); }); } @@ -419,19 +425,28 @@ fn requesting_judgement_should_work() { assert_ok!(Identity::add_registrar(Origin::signed(1), 3)); assert_ok!(Identity::set_fee(Origin::signed(3), 0, 10)); assert_ok!(Identity::set_identity(Origin::signed(10), ten())); - assert_noop!(Identity::request_judgement(Origin::signed(10), 0, 9), Error::::FeeChanged); + assert_noop!( + Identity::request_judgement(Origin::signed(10), 0, 9), + Error::::FeeChanged + ); assert_ok!(Identity::request_judgement(Origin::signed(10), 0, 10)); // 10 for the judgement request, 10 for the identity. assert_eq!(Balances::free_balance(10), 80); // Re-requesting won't work as we already paid. - assert_noop!(Identity::request_judgement(Origin::signed(10), 0, 10), Error::::StickyJudgement); + assert_noop!( + Identity::request_judgement(Origin::signed(10), 0, 10), + Error::::StickyJudgement + ); assert_ok!(Identity::provide_judgement(Origin::signed(3), 0, 10, Judgement::Erroneous)); // Registrar got their payment now. assert_eq!(Balances::free_balance(3), 20); // Re-requesting still won't work as it's erroneous. - assert_noop!(Identity::request_judgement(Origin::signed(10), 0, 10), Error::::StickyJudgement); + assert_noop!( + Identity::request_judgement(Origin::signed(10), 0, 10), + Error::::StickyJudgement + ); // Requesting from a second registrar still works. assert_ok!(Identity::add_registrar(Origin::signed(1), 4)); @@ -448,14 +463,24 @@ fn field_deposit_should_work() { new_test_ext().execute_with(|| { assert_ok!(Identity::add_registrar(Origin::signed(1), 3)); assert_ok!(Identity::set_fee(Origin::signed(3), 0, 10)); - assert_ok!(Identity::set_identity(Origin::signed(10), IdentityInfo { - additional: vec![ - (Data::Raw(b"number".to_vec().try_into().unwrap()), Data::Raw(10u32.encode().try_into().unwrap())), - (Data::Raw(b"text".to_vec().try_into().unwrap()), Data::Raw(b"10".to_vec().try_into().unwrap())), - ] - .try_into() - .unwrap(), .. Default::default() - })); + assert_ok!(Identity::set_identity( + Origin::signed(10), + IdentityInfo { + additional: vec![ + ( + Data::Raw(b"number".to_vec().try_into().unwrap()), + Data::Raw(10u32.encode().try_into().unwrap()) + ), + ( + Data::Raw(b"text".to_vec().try_into().unwrap()), + Data::Raw(b"10".to_vec().try_into().unwrap()) + ), + ] + .try_into() + .unwrap(), + ..Default::default() + } + )); assert_eq!(Balances::free_balance(10), 70); }); } @@ -465,7 +490,10 @@ fn setting_account_id_should_work() { new_test_ext().execute_with(|| { assert_ok!(Identity::add_registrar(Origin::signed(1), 3)); // account 4 cannot change the first registrar's identity since it's owned by 3. - assert_noop!(Identity::set_account_id(Origin::signed(4), 0, 3), Error::::InvalidIndex); + assert_noop!( + Identity::set_account_id(Origin::signed(4), 0, 3), + Error::::InvalidIndex + ); // account 3 can, because that's the registrar's current account. assert_ok!(Identity::set_account_id(Origin::signed(3), 0, 4)); // account 4 can now, because that's their new ID. diff --git a/frame/identity/src/types.rs b/frame/identity/src/types.rs index 49b29423ec170..5ef4eef464715 100644 --- a/frame/identity/src/types.rs +++ b/frame/identity/src/types.rs @@ -15,20 +15,19 @@ // See the License for the specific language governing permissions and // limitations under the License. -use codec::{Encode, Decode, MaxEncodedLen}; -use scale_info::{build::{Fields, Variants}, Path, Type, TypeInfo}; +use super::*; +use codec::{Decode, Encode, MaxEncodedLen}; use enumflags2::BitFlags; use frame_support::{ - traits::{ConstU32, Get}, - BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, + traits::{ConstU32, Get}, + BoundedVec, CloneNoBound, PartialEqNoBound, RuntimeDebugNoBound, }; -use sp_std::prelude::*; -use sp_std::{fmt::Debug, iter::once, ops::Add}; -use sp_runtime::{ - traits::Zero, - RuntimeDebug, +use scale_info::{ + build::{Fields, Variants}, + Path, Type, TypeInfo, }; -use super::*; +use sp_runtime::{traits::Zero, RuntimeDebug}; +use sp_std::{fmt::Debug, iter::once, ops::Add, prelude::*}; /// Either underlying data blob if it is at most 32 bytes, or a hash of it. If the data is greater /// than 32-bytes then it will be truncated when encoding. @@ -59,13 +58,13 @@ impl Decode for Data { let b = input.read_byte()?; Ok(match b { 0 => Data::None, - n @ 1 ..= 33 => { + n @ 1..=33 => { let mut r: BoundedVec<_, _> = vec![0u8; n as usize - 1] .try_into() .expect("bound checked in match arm condition; qed"); input.read(&mut r[..])?; Data::Raw(r) - } + }, 34 => Data::BlakeTwo256(<[u8; 32]>::decode(input)?), 35 => Data::Sha256(<[u8; 32]>::decode(input)?), 36 => Data::Keccak256(<[u8; 32]>::decode(input)?), @@ -84,7 +83,7 @@ impl Encode for Data { let mut r = vec![l as u8 + 1; l + 1]; r[1..].copy_from_slice(&x[..l as usize]); r - } + }, Data::BlakeTwo256(ref h) => once(34u8).chain(h.iter().cloned()).collect(), Data::Sha256(ref h) => once(35u8).chain(h.iter().cloned()).collect(), Data::Keccak256(ref h) => once(36u8).chain(h.iter().cloned()).collect(), @@ -111,36 +110,61 @@ impl TypeInfo for Data { type Identity = Self; fn type_info() -> Type { - let variants = Variants::new() - .variant("None", |v| v.index(0)); + let variants = Variants::new().variant("None", |v| v.index(0)); // create a variant for all sizes of Raw data from 0-32 - let variants = data_raw_variants!(variants, - (1, 0), (2, 1), (3, 2), (4, 3), (5, 4), (6, 5), (7, 6), (8, 7), - (9, 8), (10, 9), (11, 10), (12, 11), (13, 12), (14, 13), (15, 14), (16, 15), - (17, 16), (18, 17), (19, 18), (20, 19), (21, 20), (22, 21), (23, 22), (24, 23), - (25, 24), (26, 25), (27, 26), (28, 27), (29, 28), (30, 29), (31, 30), (32, 31), + let variants = data_raw_variants!( + variants, + (1, 0), + (2, 1), + (3, 2), + (4, 3), + (5, 4), + (6, 5), + (7, 6), + (8, 7), + (9, 8), + (10, 9), + (11, 10), + (12, 11), + (13, 12), + (14, 13), + (15, 14), + (16, 15), + (17, 16), + (18, 17), + (19, 18), + (20, 19), + (21, 20), + (22, 21), + (23, 22), + (24, 23), + (25, 24), + (26, 25), + (27, 26), + (28, 27), + (29, 28), + (30, 29), + (31, 30), + (32, 31), (33, 32) ); - let variants = - variants - .variant("BlakeTwo256", |v| v - .index(34) - .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) - .variant("Sha256", |v| v - .index(35) - .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) - .variant("Keccak256", |v| v - .index(36) - .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))) - .variant("ShaThree256", |v| v - .index(37) - .fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>()))); - - Type::builder() - .path(Path::new("Data", module_path!())) - .variant(variants) + let variants = variants + .variant("BlakeTwo256", |v| { + v.index(34).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("Sha256", |v| { + v.index(35).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("Keccak256", |v| { + v.index(36).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }) + .variant("ShaThree256", |v| { + v.index(37).fields(Fields::unnamed().field(|f| f.ty::<[u8; 32]>())) + }); + + Type::builder().path(Path::new("Data", module_path!())).variant(variants) } } @@ -158,9 +182,8 @@ pub type RegistrarIndex = u32; /// NOTE: Registrars may pay little attention to some fields. Registrars may want to make clear /// which fields their attestation is relevant for by off-chain means. #[derive(Copy, Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] -pub enum Judgement< - Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq -> { +pub enum Judgement +{ /// The default value; no opinion is held. Unknown, /// No judgement is yet in place, but a deposit is reserved as payment for providing one. @@ -182,9 +205,9 @@ pub enum Judgement< Erroneous, } -impl< - Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq -> Judgement { +impl + Judgement +{ /// Returns `true` if this judgement is indicative of a deposit being currently held. This means /// it should not be cleared or replaced except by an operation which utilizes the deposit. pub(crate) fn has_deposit(&self) -> bool { @@ -210,14 +233,14 @@ impl< #[repr(u64)] #[derive(Encode, Decode, Clone, Copy, PartialEq, Eq, BitFlags, RuntimeDebug, TypeInfo)] pub enum IdentityField { - Display = 0b0000000000000000000000000000000000000000000000000000000000000001, - Legal = 0b0000000000000000000000000000000000000000000000000000000000000010, - Web = 0b0000000000000000000000000000000000000000000000000000000000000100, - Riot = 0b0000000000000000000000000000000000000000000000000000000000001000, - Email = 0b0000000000000000000000000000000000000000000000000000000000010000, + Display = 0b0000000000000000000000000000000000000000000000000000000000000001, + Legal = 0b0000000000000000000000000000000000000000000000000000000000000010, + Web = 0b0000000000000000000000000000000000000000000000000000000000000100, + Riot = 0b0000000000000000000000000000000000000000000000000000000000001000, + Email = 0b0000000000000000000000000000000000000000000000000000000000010000, PgpFingerprint = 0b0000000000000000000000000000000000000000000000000000000000100000, - Image = 0b0000000000000000000000000000000000000000000000000000000001000000, - Twitter = 0b0000000000000000000000000000000000000000000000000000000010000000, + Image = 0b0000000000000000000000000000000000000000000000000000000001000000, + Twitter = 0b0000000000000000000000000000000000000000000000000000000010000000, } impl MaxEncodedLen for IdentityField { @@ -252,14 +275,10 @@ impl TypeInfo for IdentityFields { type Identity = Self; fn type_info() -> Type { - Type::builder() - .path(Path::new("IdentityFields", module_path!())) - .composite( - Fields::unnamed() - .field(|f| f.ty::() - .type_name("BitFlags") - ) - ) + Type::builder().path(Path::new("IdentityFields", module_path!())).composite( + Fields::unnamed() + .field(|f| f.ty::().type_name("BitFlags")), + ) } } @@ -268,8 +287,7 @@ impl TypeInfo for IdentityFields { /// NOTE: This should be stored at the end of the storage item to facilitate the addition of extra /// fields in a backwards compatible way through a specialized `Decode` impl. #[derive( - CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, - TypeInfo, + CloneNoBound, Encode, Decode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo, )] #[codec(mel_bound(FieldLimit: Get))] #[cfg_attr(test, derive(frame_support::DefaultNoBound))] @@ -324,7 +342,9 @@ pub struct IdentityInfo> { /// /// NOTE: This is stored separately primarily to facilitate the addition of extra fields in a /// backwards compatible way through a specialized `Decode` impl. -#[derive(CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo)] +#[derive( + CloneNoBound, Encode, Eq, MaxEncodedLen, PartialEqNoBound, RuntimeDebugNoBound, TypeInfo, +)] #[codec(mel_bound( Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq + Zero + Add, MaxJudgements: Get, @@ -347,23 +367,27 @@ pub struct Registration< pub info: IdentityInfo, } -impl < - Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq + Zero + Add, - MaxJudgements: Get, - MaxAdditionalFields: Get, -> Registration { +impl< + Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq + Zero + Add, + MaxJudgements: Get, + MaxAdditionalFields: Get, + > Registration +{ pub(crate) fn total_deposit(&self) -> Balance { - self.deposit + self.judgements.iter() - .map(|(_, ref j)| if let Judgement::FeePaid(fee) = j { *fee } else { Zero::zero() }) - .fold(Zero::zero(), |a, i| a + i) + self.deposit + + self.judgements + .iter() + .map(|(_, ref j)| if let Judgement::FeePaid(fee) = j { *fee } else { Zero::zero() }) + .fold(Zero::zero(), |a, i| a + i) } } impl< - Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq, - MaxJudgements: Get, - MaxAdditionalFields: Get, -> Decode for Registration { + Balance: Encode + Decode + MaxEncodedLen + Copy + Clone + Debug + Eq + PartialEq, + MaxJudgements: Get, + MaxAdditionalFields: Get, + > Decode for Registration +{ fn decode(input: &mut I) -> sp_std::result::Result { let (judgements, deposit, info) = Decode::decode(&mut AppendZerosInput::new(input))?; Ok(Self { judgements, deposit, info }) @@ -374,7 +398,7 @@ impl< #[derive(Clone, Encode, Decode, Eq, PartialEq, RuntimeDebug, MaxEncodedLen, TypeInfo)] pub struct RegistrarInfo< Balance: Encode + Decode + Clone + Debug + Eq + PartialEq, - AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq + AccountId: Encode + Decode + Clone + Debug + Eq + PartialEq, > { /// The account of the registrar. pub account: AccountId, @@ -399,15 +423,14 @@ mod tests { let type_info = registry.resolve(type_id.id()).unwrap(); let check_type_info = |data: &Data| { - let variant_name = - match data { - Data::None => "None".to_string(), - Data::BlakeTwo256(_) => "BlakeTwo256".to_string(), - Data::Sha256(_) => "Sha256".to_string(), - Data::Keccak256(_) => "Keccak256".to_string(), - Data::ShaThree256(_) => "ShaThree256".to_string(), - Data::Raw(bytes) => format!("Raw{}", bytes.len()), - }; + let variant_name = match data { + Data::None => "None".to_string(), + Data::BlakeTwo256(_) => "BlakeTwo256".to_string(), + Data::Sha256(_) => "Sha256".to_string(), + Data::Keccak256(_) => "Keccak256".to_string(), + Data::ShaThree256(_) => "ShaThree256".to_string(), + Data::Raw(bytes) => format!("Raw{}", bytes.len()), + }; if let scale_info::TypeDef::Variant(variant) = type_info.type_def() { let variant = variant .variants() @@ -421,12 +444,13 @@ mod tests { .fields() .first() .and_then(|f| registry.resolve(f.ty().id())) - .map(|ty| + .map(|ty| { if let scale_info::TypeDef::Array(arr) = ty.type_def() { arr.len() } else { panic!("Should be an array type") - }) + } + }) .unwrap_or(0); let encoded = data.encode(); @@ -437,7 +461,7 @@ mod tests { }; }; - let mut data = vec! [ + let mut data = vec![ Data::None, Data::BlakeTwo256(Default::default()), Data::Sha256(Default::default()), @@ -455,4 +479,3 @@ mod tests { } } } - diff --git a/frame/identity/src/weights.rs b/frame/identity/src/weights.rs index f283b2869bdfc..81489c9749359 100644 --- a/frame/identity/src/weights.rs +++ b/frame/identity/src/weights.rs @@ -35,44 +35,46 @@ // --output=./frame/identity/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_identity. pub trait WeightInfo { - fn add_registrar(r: u32, ) -> Weight; - fn set_identity(r: u32, x: u32, ) -> Weight; - fn set_subs_new(s: u32, ) -> Weight; - fn set_subs_old(p: u32, ) -> Weight; - fn clear_identity(r: u32, s: u32, x: u32, ) -> Weight; - fn request_judgement(r: u32, x: u32, ) -> Weight; - fn cancel_request(r: u32, x: u32, ) -> Weight; - fn set_fee(r: u32, ) -> Weight; - fn set_account_id(r: u32, ) -> Weight; - fn set_fields(r: u32, ) -> Weight; - fn provide_judgement(r: u32, x: u32, ) -> Weight; - fn kill_identity(r: u32, s: u32, x: u32, ) -> Weight; - fn add_sub(s: u32, ) -> Weight; - fn rename_sub(s: u32, ) -> Weight; - fn remove_sub(s: u32, ) -> Weight; - fn quit_sub(s: u32, ) -> Weight; + fn add_registrar(r: u32) -> Weight; + fn set_identity(r: u32, x: u32) -> Weight; + fn set_subs_new(s: u32) -> Weight; + fn set_subs_old(p: u32) -> Weight; + fn clear_identity(r: u32, s: u32, x: u32) -> Weight; + fn request_judgement(r: u32, x: u32) -> Weight; + fn cancel_request(r: u32, x: u32) -> Weight; + fn set_fee(r: u32) -> Weight; + fn set_account_id(r: u32) -> Weight; + fn set_fields(r: u32) -> Weight; + fn provide_judgement(r: u32, x: u32) -> Weight; + fn kill_identity(r: u32, s: u32, x: u32) -> Weight; + fn add_sub(s: u32) -> Weight; + fn rename_sub(s: u32) -> Weight; + fn remove_sub(s: u32) -> Weight; + fn quit_sub(s: u32) -> Weight; } /// Weights for pallet_identity using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn add_registrar(r: u32, ) -> Weight { + fn add_registrar(r: u32) -> Weight { (21_825_000 as Weight) // Standard Error: 3_000 .saturating_add((288_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_identity(r: u32, x: u32, ) -> Weight { + fn set_identity(r: u32, x: u32) -> Weight { (53_354_000 as Weight) // Standard Error: 15_000 .saturating_add((274_000 as Weight).saturating_mul(r as Weight)) @@ -81,7 +83,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_subs_new(s: u32, ) -> Weight { + fn set_subs_new(s: u32) -> Weight { (42_017_000 as Weight) // Standard Error: 2_000 .saturating_add((6_457_000 as Weight).saturating_mul(s as Weight)) @@ -90,7 +92,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn set_subs_old(p: u32, ) -> Weight { + fn set_subs_old(p: u32) -> Weight { (41_605_000 as Weight) // Standard Error: 0 .saturating_add((2_157_000 as Weight).saturating_mul(p as Weight)) @@ -98,7 +100,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(1 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(p as Weight))) } - fn clear_identity(r: u32, s: u32, x: u32, ) -> Weight { + fn clear_identity(r: u32, s: u32, x: u32) -> Weight { (51_811_000 as Weight) // Standard Error: 5_000 .saturating_add((202_000 as Weight).saturating_mul(r as Weight)) @@ -110,7 +112,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn request_judgement(r: u32, x: u32, ) -> Weight { + fn request_judgement(r: u32, x: u32) -> Weight { (54_657_000 as Weight) // Standard Error: 5_000 .saturating_add((381_000 as Weight).saturating_mul(r as Weight)) @@ -119,7 +121,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn cancel_request(r: u32, x: u32, ) -> Weight { + fn cancel_request(r: u32, x: u32) -> Weight { (50_895_000 as Weight) // Standard Error: 6_000 .saturating_add((267_000 as Weight).saturating_mul(r as Weight)) @@ -128,28 +130,28 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_fee(r: u32, ) -> Weight { + fn set_fee(r: u32) -> Weight { (8_036_000 as Weight) // Standard Error: 2_000 .saturating_add((281_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_account_id(r: u32, ) -> Weight { + fn set_account_id(r: u32) -> Weight { (9_001_000 as Weight) // Standard Error: 2_000 .saturating_add((288_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_fields(r: u32, ) -> Weight { + fn set_fields(r: u32) -> Weight { (8_039_000 as Weight) // Standard Error: 2_000 .saturating_add((286_000 as Weight).saturating_mul(r as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn provide_judgement(r: u32, x: u32, ) -> Weight { + fn provide_judgement(r: u32, x: u32) -> Weight { (35_746_000 as Weight) // Standard Error: 4_000 .saturating_add((346_000 as Weight).saturating_mul(r as Weight)) @@ -158,7 +160,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn kill_identity(r: u32, s: u32, x: u32, ) -> Weight { + fn kill_identity(r: u32, s: u32, x: u32) -> Weight { (65_304_000 as Weight) // Standard Error: 4_000 .saturating_add((149_000 as Weight).saturating_mul(r as Weight)) @@ -170,28 +172,28 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn add_sub(s: u32, ) -> Weight { + fn add_sub(s: u32) -> Weight { (55_491_000 as Weight) // Standard Error: 0 .saturating_add((220_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn rename_sub(s: u32, ) -> Weight { + fn rename_sub(s: u32) -> Weight { (17_564_000 as Weight) // Standard Error: 0 .saturating_add((84_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn remove_sub(s: u32, ) -> Weight { + fn remove_sub(s: u32) -> Weight { (56_535_000 as Weight) // Standard Error: 0 .saturating_add((209_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn quit_sub(s: u32, ) -> Weight { + fn quit_sub(s: u32) -> Weight { (35_369_000 as Weight) // Standard Error: 0 .saturating_add((200_000 as Weight).saturating_mul(s as Weight)) @@ -202,14 +204,14 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn add_registrar(r: u32, ) -> Weight { + fn add_registrar(r: u32) -> Weight { (21_825_000 as Weight) // Standard Error: 3_000 .saturating_add((288_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_identity(r: u32, x: u32, ) -> Weight { + fn set_identity(r: u32, x: u32) -> Weight { (53_354_000 as Weight) // Standard Error: 15_000 .saturating_add((274_000 as Weight).saturating_mul(r as Weight)) @@ -218,7 +220,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_subs_new(s: u32, ) -> Weight { + fn set_subs_new(s: u32) -> Weight { (42_017_000 as Weight) // Standard Error: 2_000 .saturating_add((6_457_000 as Weight).saturating_mul(s as Weight)) @@ -227,7 +229,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn set_subs_old(p: u32, ) -> Weight { + fn set_subs_old(p: u32) -> Weight { (41_605_000 as Weight) // Standard Error: 0 .saturating_add((2_157_000 as Weight).saturating_mul(p as Weight)) @@ -235,7 +237,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(1 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(p as Weight))) } - fn clear_identity(r: u32, s: u32, x: u32, ) -> Weight { + fn clear_identity(r: u32, s: u32, x: u32) -> Weight { (51_811_000 as Weight) // Standard Error: 5_000 .saturating_add((202_000 as Weight).saturating_mul(r as Weight)) @@ -247,7 +249,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(2 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn request_judgement(r: u32, x: u32, ) -> Weight { + fn request_judgement(r: u32, x: u32) -> Weight { (54_657_000 as Weight) // Standard Error: 5_000 .saturating_add((381_000 as Weight).saturating_mul(r as Weight)) @@ -256,7 +258,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn cancel_request(r: u32, x: u32, ) -> Weight { + fn cancel_request(r: u32, x: u32) -> Weight { (50_895_000 as Weight) // Standard Error: 6_000 .saturating_add((267_000 as Weight).saturating_mul(r as Weight)) @@ -265,28 +267,28 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_fee(r: u32, ) -> Weight { + fn set_fee(r: u32) -> Weight { (8_036_000 as Weight) // Standard Error: 2_000 .saturating_add((281_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_account_id(r: u32, ) -> Weight { + fn set_account_id(r: u32) -> Weight { (9_001_000 as Weight) // Standard Error: 2_000 .saturating_add((288_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_fields(r: u32, ) -> Weight { + fn set_fields(r: u32) -> Weight { (8_039_000 as Weight) // Standard Error: 2_000 .saturating_add((286_000 as Weight).saturating_mul(r as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn provide_judgement(r: u32, x: u32, ) -> Weight { + fn provide_judgement(r: u32, x: u32) -> Weight { (35_746_000 as Weight) // Standard Error: 4_000 .saturating_add((346_000 as Weight).saturating_mul(r as Weight)) @@ -295,7 +297,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn kill_identity(r: u32, s: u32, x: u32, ) -> Weight { + fn kill_identity(r: u32, s: u32, x: u32) -> Weight { (65_304_000 as Weight) // Standard Error: 4_000 .saturating_add((149_000 as Weight).saturating_mul(r as Weight)) @@ -307,28 +309,28 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(3 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn add_sub(s: u32, ) -> Weight { + fn add_sub(s: u32) -> Weight { (55_491_000 as Weight) // Standard Error: 0 .saturating_add((220_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn rename_sub(s: u32, ) -> Weight { + fn rename_sub(s: u32) -> Weight { (17_564_000 as Weight) // Standard Error: 0 .saturating_add((84_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn remove_sub(s: u32, ) -> Weight { + fn remove_sub(s: u32) -> Weight { (56_535_000 as Weight) // Standard Error: 0 .saturating_add((209_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn quit_sub(s: u32, ) -> Weight { + fn quit_sub(s: u32) -> Weight { (35_369_000 as Weight) // Standard Error: 0 .saturating_add((200_000 as Weight).saturating_mul(s as Weight)) diff --git a/frame/im-online/src/benchmarking.rs b/frame/im-online/src/benchmarking.rs index 5ab4d16c7fe08..ec53ec534850c 100644 --- a/frame/im-online/src/benchmarking.rs +++ b/frame/im-online/src/benchmarking.rs @@ -21,22 +21,27 @@ use super::*; -use frame_system::RawOrigin; use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; -use sp_core::OpaquePeerId; -use sp_core::offchain::OpaqueMultiaddr; -use sp_runtime::traits::{ValidateUnsigned, Zero}; -use sp_runtime::transaction_validity::TransactionSource; use frame_support::traits::UnfilteredDispatchable; +use frame_system::RawOrigin; +use sp_core::{offchain::OpaqueMultiaddr, OpaquePeerId}; +use sp_runtime::{ + traits::{ValidateUnsigned, Zero}, + transaction_validity::TransactionSource, +}; use crate::Pallet as ImOnline; const MAX_KEYS: u32 = 1000; const MAX_EXTERNAL_ADDRESSES: u32 = 100; -pub fn create_heartbeat(k: u32, e: u32) -> - Result<(crate::Heartbeat, ::Signature), &'static str> -{ +pub fn create_heartbeat( + k: u32, + e: u32, +) -> Result< + (crate::Heartbeat, ::Signature), + &'static str, +> { let mut keys = Vec::new(); for _ in 0..k { keys.push(T::AuthorityId::generate_pair(None)); @@ -51,12 +56,12 @@ pub fn create_heartbeat(k: u32, e: u32) -> block_number: T::BlockNumber::zero(), network_state, session_index: 0, - authority_index: k-1, + authority_index: k - 1, validators_len: keys.len() as u32, }; let encoded_heartbeat = input_heartbeat.encode(); - let authority_id = keys.get((k-1) as usize).ok_or("out of range")?; + let authority_id = keys.get((k - 1) as usize).ok_or("out of range")?; let signature = authority_id.sign(&encoded_heartbeat).ok_or("couldn't make signature")?; Ok((input_heartbeat, signature)) @@ -91,9 +96,4 @@ benchmarks! { } } - -impl_benchmark_test_suite!( - ImOnline, - crate::mock::new_test_ext(), - crate::mock::Runtime, -); +impl_benchmark_test_suite!(ImOnline, crate::mock::new_test_ext(), crate::mock::Runtime,); diff --git a/frame/im-online/src/lib.rs b/frame/im-online/src/lib.rs index 37c1a89cf7f9f..80462b85b00f3 100644 --- a/frame/im-online/src/lib.rs +++ b/frame/im-online/src/lib.rs @@ -69,32 +69,31 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] +mod benchmarking; mod mock; mod tests; -mod benchmarking; pub mod weights; -use sp_application_crypto::RuntimeAppPublic; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use frame_support::traits::{ + EstimateNextSessionRotation, OneSessionHandler, ValidatorSet, ValidatorSetWithIdentification, +}; +use frame_system::offchain::{SendTransactionTypes, SubmitTransaction}; +pub use pallet::*; use scale_info::TypeInfo; +use sp_application_crypto::RuntimeAppPublic; use sp_core::offchain::OpaqueNetworkState; -use sp_std::prelude::*; -use sp_std::convert::TryInto; use sp_runtime::{ offchain::storage::{MutateStorageError, StorageRetrievalError, StorageValueRef}, traits::{AtLeast32BitUnsigned, Convert, Saturating, TrailingZeroInput}, - Perbill, Permill, PerThing, RuntimeDebug, SaturatedConversion, + PerThing, Perbill, Permill, RuntimeDebug, SaturatedConversion, }; use sp_staking::{ + offence::{Kind, Offence, ReportOffence}, SessionIndex, - offence::{ReportOffence, Offence, Kind}, -}; -use frame_support::traits::{ - EstimateNextSessionRotation, OneSessionHandler, ValidatorSet, ValidatorSetWithIdentification, }; -use frame_system::offchain::{SendTransactionTypes, SubmitTransaction}; +use sp_std::{convert::TryInto, prelude::*}; pub use weights::WeightInfo; -pub use pallet::*; pub mod sr25519 { mod app_sr25519 { @@ -116,7 +115,7 @@ pub mod sr25519 { pub mod ed25519 { mod app_ed25519 { - use sp_application_crypto::{app_crypto, key_types::IM_ONLINE, ed25519}; + use sp_application_crypto::{app_crypto, ed25519, key_types::IM_ONLINE}; app_crypto!(ed25519, IM_ONLINE); } @@ -186,8 +185,7 @@ enum OffchainErr { impl sp_std::fmt::Debug for OffchainErr { fn fmt(&self, fmt: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result { match *self { - OffchainErr::TooEarly => - write!(fmt, "Too early to send heartbeat."), + OffchainErr::TooEarly => write!(fmt, "Too early to send heartbeat."), OffchainErr::WaitingForInclusion(ref block) => write!(fmt, "Heartbeat already sent at {:?}. Waiting for inclusion.", block), OffchainErr::AlreadyOnline(auth_idx) => @@ -205,7 +203,8 @@ pub type AuthIndex = u32; /// Heartbeat which is sent/received. #[derive(Encode, Decode, Clone, PartialEq, Eq, RuntimeDebug, TypeInfo)] pub struct Heartbeat - where BlockNumber: PartialEq + Eq + Decode + Encode, +where + BlockNumber: PartialEq + Eq + Decode + Encode, { /// Block number at the time heartbeat is created.. pub block_number: BlockNumber, @@ -220,31 +219,32 @@ pub struct Heartbeat } /// A type for representing the validator id in a session. -pub type ValidatorId = < - ::ValidatorSet as ValidatorSet<::AccountId> ->::ValidatorId; +pub type ValidatorId = <::ValidatorSet as ValidatorSet< + ::AccountId, +>>::ValidatorId; /// A tuple of (ValidatorId, Identification) where `Identification` is the full identification of `ValidatorId`. pub type IdentificationTuple = ( ValidatorId, - <::ValidatorSet as - ValidatorSetWithIdentification<::AccountId>>::Identification, + <::ValidatorSet as ValidatorSetWithIdentification< + ::AccountId, + >>::Identification, ); type OffchainResult = Result::BlockNumber>>; #[frame_support::pallet] pub mod pallet { - use frame_support::{pallet_prelude::*, traits::Get}; - use frame_system::{pallet_prelude::*, ensure_none}; + use super::*; + use frame_support::{pallet_prelude::*, traits::Get, Parameter}; + use frame_system::{ensure_none, pallet_prelude::*}; use sp_runtime::{ - traits::{Member, MaybeSerializeDeserialize}, + traits::{MaybeSerializeDeserialize, Member}, transaction_validity::{ - InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity, ValidTransaction, + InvalidTransaction, TransactionPriority, TransactionSource, TransactionValidity, + ValidTransaction, }, }; - use frame_support::Parameter; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -253,7 +253,12 @@ pub mod pallet { #[pallet::config] pub trait Config: SendTransactionTypes> + frame_system::Config { /// The identifier type for an authority. - type AuthorityId: Member + Parameter + RuntimeAppPublic + Default + Ord + MaybeSerializeDeserialize; + type AuthorityId: Member + + Parameter + + RuntimeAppPublic + + Default + + Ord + + MaybeSerializeDeserialize; /// The overarching event type. type Event: From> + IsType<::Event>; @@ -331,14 +336,8 @@ pub mod pallet { /// `offchain::OpaqueNetworkState`. #[pallet::storage] #[pallet::getter(fn received_heartbeats)] - pub(crate) type ReceivedHeartbeats = StorageDoubleMap< - _, - Twox64Concat, - SessionIndex, - Twox64Concat, - AuthIndex, - Vec, - >; + pub(crate) type ReceivedHeartbeats = + StorageDoubleMap<_, Twox64Concat, SessionIndex, Twox64Concat, AuthIndex, Vec>; /// For each session index, we keep a mapping of `ValidatorId` to the /// number of blocks authored by the given authority. @@ -362,9 +361,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - GenesisConfig { - keys: Default::default(), - } + GenesisConfig { keys: Default::default() } } } @@ -402,10 +399,8 @@ pub mod pallet { ensure_none(origin)?; let current_session = T::ValidatorSet::session_index(); - let exists = ReceivedHeartbeats::::contains_key( - ¤t_session, - &heartbeat.authority_index - ); + let exists = + ReceivedHeartbeats::::contains_key(¤t_session, &heartbeat.authority_index); let keys = Keys::::get(); let public = keys.get(heartbeat.authority_index as usize); if let (false, Some(public)) = (exists, public) { @@ -415,7 +410,7 @@ pub mod pallet { ReceivedHeartbeats::::insert( ¤t_session, &heartbeat.authority_index, - &network_state + &network_state, ); Ok(()) @@ -463,19 +458,19 @@ pub mod pallet { if let Call::heartbeat { heartbeat, _signature: signature } = call { if >::is_online(heartbeat.authority_index) { // we already received a heartbeat for this authority - return InvalidTransaction::Stale.into(); + return InvalidTransaction::Stale.into() } // check if session index from heartbeat is recent let current_session = T::ValidatorSet::session_index(); if heartbeat.session_index != current_session { - return InvalidTransaction::Stale.into(); + return InvalidTransaction::Stale.into() } // verify that the incoming (unverified) pubkey is actually an authority id let keys = Keys::::get(); if keys.len() as u32 != heartbeat.validators_len { - return InvalidTransaction::Custom(INVALID_VALIDATORS_LEN).into(); + return InvalidTransaction::Custom(INVALID_VALIDATORS_LEN).into() } let authority_id = match keys.get(heartbeat.authority_index as usize) { Some(id) => id, @@ -488,7 +483,7 @@ pub mod pallet { }); if !signature_valid { - return InvalidTransaction::BadProof.into(); + return InvalidTransaction::BadProof.into() } ValidTransaction::with_tag_prefix("ImOnline") @@ -511,9 +506,8 @@ pub mod pallet { /// Keep track of number of authored blocks per authority, uncles are counted as /// well since they're a valid proof of being online. -impl< - T: Config + pallet_authorship::Config, -> pallet_authorship::EventHandler, T::BlockNumber> for Pallet +impl + pallet_authorship::EventHandler, T::BlockNumber> for Pallet { fn note_author(author: ValidatorId) { Self::note_authorship(author); @@ -533,7 +527,7 @@ impl Pallet { let current_validators = T::ValidatorSet::validators(); if authority_index >= current_validators.len() as u32 { - return false; + return false } let authority = ¤t_validators[authority_index as usize]; @@ -545,10 +539,7 @@ impl Pallet { let current_session = T::ValidatorSet::session_index(); ReceivedHeartbeats::::contains_key(¤t_session, &authority_index) || - AuthoredBlocks::::get( - ¤t_session, - authority, - ) != 0 + AuthoredBlocks::::get(¤t_session, authority) != 0 } /// Returns `true` if a heartbeat has been received for the authority at `authority_index` in @@ -562,11 +553,7 @@ impl Pallet { fn note_authorship(author: ValidatorId) { let current_session = T::ValidatorSet::session_index(); - AuthoredBlocks::::mutate( - ¤t_session, - author, - |authored| *authored += 1, - ); + AuthoredBlocks::::mutate(¤t_session, author, |authored| *authored += 1); } pub(crate) fn send_heartbeats( @@ -602,8 +589,8 @@ impl Pallet { // haven't sent an heartbeat yet we'll send one unconditionally. the idea is to prevent // all nodes from sending the heartbeats at the same block and causing a temporary (but // deterministic) spike in transactions. - progress >= START_HEARTBEAT_FINAL_PERIOD - || progress >= START_HEARTBEAT_RANDOM_PERIOD && random_choice(progress) + progress >= START_HEARTBEAT_FINAL_PERIOD || + progress >= START_HEARTBEAT_RANDOM_PERIOD && random_choice(progress) } else { // otherwise we fallback to using the block number calculated at the beginning // of the session that should roughly correspond to the middle of the session @@ -612,23 +599,21 @@ impl Pallet { }; if !should_heartbeat { - return Err(OffchainErr::TooEarly); + return Err(OffchainErr::TooEarly) } let session_index = T::ValidatorSet::session_index(); let validators_len = Keys::::decode_len().unwrap_or_default() as u32; - Ok( - Self::local_authority_keys().map(move |(authority_index, key)| { - Self::send_single_heartbeat( - authority_index, - key, - session_index, - block_number, - validators_len, - ) - }), - ) + Ok(Self::local_authority_keys().map(move |(authority_index, key)| { + Self::send_single_heartbeat( + authority_index, + key, + session_index, + block_number, + validators_len, + ) + })) } fn send_single_heartbeat( @@ -640,8 +625,8 @@ impl Pallet { ) -> OffchainResult { // A helper function to prepare heartbeat call. let prepare_heartbeat = || -> OffchainResult> { - let network_state = sp_io::offchain::network_state() - .map_err(|_| OffchainErr::NetworkState)?; + let network_state = + sp_io::offchain::network_state().map_err(|_| OffchainErr::NetworkState)?; let heartbeat = Heartbeat { block_number, network_state, @@ -656,35 +641,30 @@ impl Pallet { }; if Self::is_online(authority_index) { - return Err(OffchainErr::AlreadyOnline(authority_index)); + return Err(OffchainErr::AlreadyOnline(authority_index)) } // acquire lock for that authority at current heartbeat to make sure we don't // send concurrent heartbeats. - Self::with_heartbeat_lock( - authority_index, - session_index, - block_number, - || { - let call = prepare_heartbeat()?; - log::info!( - target: "runtime::im-online", - "[index: {:?}] Reporting im-online at block: {:?} (session: {:?}): {:?}", - authority_index, - block_number, - session_index, - call, - ); + Self::with_heartbeat_lock(authority_index, session_index, block_number, || { + let call = prepare_heartbeat()?; + log::info!( + target: "runtime::im-online", + "[index: {:?}] Reporting im-online at block: {:?} (session: {:?}): {:?}", + authority_index, + block_number, + session_index, + call, + ); - SubmitTransaction::>::submit_unsigned_transaction(call.into()) - .map_err(|_| OffchainErr::SubmitTransaction)?; + SubmitTransaction::>::submit_unsigned_transaction(call.into()) + .map_err(|_| OffchainErr::SubmitTransaction)?; - Ok(()) - }, - ) + Ok(()) + }) } - fn local_authority_keys() -> impl Iterator { + fn local_authority_keys() -> impl Iterator { // on-chain storage // // At index `idx`: @@ -699,13 +679,12 @@ impl Pallet { local_keys.sort(); - authorities.into_iter() - .enumerate() - .filter_map(move |(index, authority)| { - local_keys.binary_search(&authority) - .ok() - .map(|location| (index as u32, local_keys[location].clone())) - }) + authorities.into_iter().enumerate().filter_map(move |(index, authority)| { + local_keys + .binary_search(&authority) + .ok() + .map(|location| (index as u32, local_keys[location].clone())) + }) } fn with_heartbeat_lock( @@ -722,24 +701,21 @@ impl Pallet { let storage = StorageValueRef::persistent(&key); let res = storage.mutate( |status: Result>, StorageRetrievalError>| { - // Check if there is already a lock for that particular block. - // This means that the heartbeat has already been sent, and we are just waiting - // for it to be included. However if it doesn't get included for INCLUDE_THRESHOLD - // we will re-send it. - match status { - // we are still waiting for inclusion. - Ok(Some(status)) if status.is_recent(session_index, now) => { - Err(OffchainErr::WaitingForInclusion(status.sent_at)) - }, - // attempt to set new status - _ => Ok(HeartbeatStatus { - session_index, - sent_at: now, - }), - } - }); + // Check if there is already a lock for that particular block. + // This means that the heartbeat has already been sent, and we are just waiting + // for it to be included. However if it doesn't get included for INCLUDE_THRESHOLD + // we will re-send it. + match status { + // we are still waiting for inclusion. + Ok(Some(status)) if status.is_recent(session_index, now) => + Err(OffchainErr::WaitingForInclusion(status.sent_at)), + // attempt to set new status + _ => Ok(HeartbeatStatus { session_index, sent_at: now }), + } + }, + ); if let Err(MutateStorageError::ValueFunctionFailed(err)) = res { - return Err(err); + return Err(err) } let mut new_status = res.map_err(|_| OffchainErr::FailedToAcquireLock)?; @@ -777,14 +753,16 @@ impl OneSessionHandler for Pallet { type Key = T::AuthorityId; fn on_genesis_session<'a, I: 'a>(validators: I) - where I: Iterator + where + I: Iterator, { let keys = validators.map(|x| x.1).collect::>(); Self::initialize_keys(&keys); } fn on_new_session<'a, I: 'a>(_changed: bool, validators: I, _queued_validators: I) - where I: Iterator + where + I: Iterator, { // Tell the offchain worker to start making the next session's heartbeats. // Since we consider producing blocks as being online, @@ -802,14 +780,16 @@ impl OneSessionHandler for Pallet { let keys = Keys::::get(); let current_validators = T::ValidatorSet::validators(); - let offenders = current_validators.into_iter().enumerate() - .filter(|(index, id)| - !Self::is_online_aux(*index as u32, id) - ).filter_map(|(_, id)| + let offenders = current_validators + .into_iter() + .enumerate() + .filter(|(index, id)| !Self::is_online_aux(*index as u32, id)) + .filter_map(|(_, id)| { >::IdentificationOf::convert( id.clone() ).map(|full_id| (id, full_id)) - ).collect::>>(); + }) + .collect::>>(); // Remove all received heartbeats and number of authored blocks from the // current session, they have already been processed and won't be needed diff --git a/frame/im-online/src/mock.rs b/frame/im-online/src/mock.rs index 3d7d6d73cd83a..a04da49c6526d 100644 --- a/frame/im-online/src/mock.rs +++ b/frame/im-online/src/mock.rs @@ -24,9 +24,11 @@ use std::cell::RefCell; use frame_support::{parameter_types, weights::Weight}; use pallet_session::historical as pallet_session_historical; use sp_core::H256; -use sp_runtime::testing::{Header, TestXt, UintAuthorityId}; -use sp_runtime::traits::{BlakeTwo256, ConvertInto, IdentityLookup}; -use sp_runtime::{Perbill, Permill}; +use sp_runtime::{ + testing::{Header, TestXt, UintAuthorityId}, + traits::{BlakeTwo256, ConvertInto, IdentityLookup}, + Perbill, Permill, +}; use sp_staking::{ offence::{OffenceError, ReportOffence}, SessionIndex, @@ -70,13 +72,11 @@ impl pallet_session::SessionManager for TestSessionManager { impl pallet_session::historical::SessionManager for TestSessionManager { fn new_session(_new_index: SessionIndex) -> Option> { - VALIDATORS.with(|l| l - .borrow_mut() - .take() - .map(|validators| { - validators.iter().map(|v| (*v, *v)).collect() - }) - ) + VALIDATORS.with(|l| { + l.borrow_mut() + .take() + .map(|validators| validators.iter().map(|v| (*v, *v)).collect()) + }) } fn end_session(_: SessionIndex) {} fn start_session(_: SessionIndex) {} @@ -105,9 +105,7 @@ impl ReportOffence for OffenceHandler { } pub fn new_test_ext() -> sp_io::TestExternalities { - let t = frame_system::GenesisConfig::default() - .build_storage::() - .unwrap(); + let t = frame_system::GenesisConfig::default().build_storage::().unwrap(); t.into() } @@ -154,8 +152,9 @@ parameter_types! { impl pallet_session::Config for Runtime { type ShouldEndSession = pallet_session::PeriodicSessions; - type SessionManager = pallet_session::historical::NoteHistoricalRoot; - type SessionHandler = (ImOnline, ); + type SessionManager = + pallet_session::historical::NoteHistoricalRoot; + type SessionHandler = (ImOnline,); type ValidatorId = u64; type ValidatorIdOf = ConvertInto; type Keys = UintAuthorityId; @@ -230,7 +229,8 @@ impl Config for Runtime { type WeightInfo = (); } -impl frame_system::offchain::SendTransactionTypes for Runtime where +impl frame_system::offchain::SendTransactionTypes for Runtime +where Call: From, { type OverarchingCall = Call; diff --git a/frame/im-online/src/tests.rs b/frame/im-online/src/tests.rs index 5fb8fd3a791e9..30af2d31fda3a 100644 --- a/frame/im-online/src/tests.rs +++ b/frame/im-online/src/tests.rs @@ -21,23 +21,23 @@ use super::*; use crate::mock::*; -use sp_core::OpaquePeerId; -use sp_core::offchain::{ - OffchainDbExt, - OffchainWorkerExt, - TransactionPoolExt, - testing::{TestOffchainExt, TestTransactionPoolExt}, +use frame_support::{assert_noop, dispatch}; +use sp_core::{ + offchain::{ + testing::{TestOffchainExt, TestTransactionPoolExt}, + OffchainDbExt, OffchainWorkerExt, TransactionPoolExt, + }, + OpaquePeerId, +}; +use sp_runtime::{ + testing::UintAuthorityId, + transaction_validity::{InvalidTransaction, TransactionValidityError}, }; -use frame_support::{dispatch, assert_noop}; -use sp_runtime::{testing::UintAuthorityId, transaction_validity::{TransactionValidityError, InvalidTransaction}}; #[test] fn test_unresponsiveness_slash_fraction() { // A single case of unresponsiveness is not slashed. - assert_eq!( - UnresponsivenessOffence::<()>::slash_fraction(1, 50), - Perbill::zero(), - ); + assert_eq!(UnresponsivenessOffence::<()>::slash_fraction(1, 50), Perbill::zero(),); assert_eq!( UnresponsivenessOffence::<()>::slash_fraction(5, 50), @@ -75,17 +75,17 @@ fn should_report_offline_validators() { // then let offences = OFFENCES.with(|l| l.replace(vec![])); - assert_eq!(offences, vec![ - (vec![], UnresponsivenessOffence { - session_index: 2, - validator_set_count: 3, - offenders: vec![ - (1, 1), - (2, 2), - (3, 3), - ], - }) - ]); + assert_eq!( + offences, + vec![( + vec![], + UnresponsivenessOffence { + session_index: 2, + validator_set_count: 3, + offenders: vec![(1, 1), (2, 2), (3, 3),], + } + )] + ); // should not report when heartbeat is sent for (idx, v) in validators.into_iter().take(4).enumerate() { @@ -95,16 +95,17 @@ fn should_report_offline_validators() { // then let offences = OFFENCES.with(|l| l.replace(vec![])); - assert_eq!(offences, vec![ - (vec![], UnresponsivenessOffence { - session_index: 3, - validator_set_count: 6, - offenders: vec![ - (5, 5), - (6, 6), - ], - }) - ]); + assert_eq!( + offences, + vec![( + vec![], + UnresponsivenessOffence { + session_index: 3, + validator_set_count: 6, + offenders: vec![(5, 5), (6, 6),], + } + )] + ); }); } @@ -129,17 +130,15 @@ fn heartbeat( }; let signature = id.sign(&heartbeat.encode()).unwrap(); - ImOnline::pre_dispatch(&crate::Call::heartbeat(heartbeat.clone(), signature.clone())) - .map_err(|e| match e { - TransactionValidityError::Invalid(InvalidTransaction::Custom(INVALID_VALIDATORS_LEN)) => - "invalid validators len", + ImOnline::pre_dispatch(&crate::Call::heartbeat(heartbeat.clone(), signature.clone())).map_err( + |e| match e { + TransactionValidityError::Invalid(InvalidTransaction::Custom( + INVALID_VALIDATORS_LEN, + )) => "invalid validators len", e @ _ => <&'static str>::from(e), - })?; - ImOnline::heartbeat( - Origin::none(), - heartbeat, - signature, - ) + }, + )?; + ImOnline::heartbeat(Origin::none(), heartbeat, signature) } #[test] @@ -191,8 +190,14 @@ fn late_heartbeat_and_invalid_keys_len_should_fail() { assert_eq!(Session::validators(), vec![1, 2, 3]); // when - assert_noop!(heartbeat(1, 3, 0, 1.into(), Session::validators()), "Transaction is outdated"); - assert_noop!(heartbeat(1, 1, 0, 1.into(), Session::validators()), "Transaction is outdated"); + assert_noop!( + heartbeat(1, 3, 0, 1.into(), Session::validators()), + "Transaction is outdated" + ); + assert_noop!( + heartbeat(1, 1, 0, 1.into(), Session::validators()), + "Transaction is outdated" + ); // invalid validators_len assert_noop!(heartbeat(1, 2, 0, 1.into(), vec![]), "invalid validators len"); @@ -236,13 +241,16 @@ fn should_generate_heartbeats() { e => panic!("Unexpected call: {:?}", e), }; - assert_eq!(heartbeat, Heartbeat { - block_number: block, - network_state: sp_io::offchain::network_state().unwrap(), - session_index: 2, - authority_index: 2, - validators_len: 3, - }); + assert_eq!( + heartbeat, + Heartbeat { + block_number: block, + network_state: sp_io::offchain::network_state().unwrap(), + session_index: 2, + authority_index: 2, + validators_len: 3, + } + ); }); } @@ -348,13 +356,16 @@ fn should_not_send_a_report_if_already_online() { e => panic!("Unexpected call: {:?}", e), }; - assert_eq!(heartbeat, Heartbeat { - block_number: 4, - network_state: sp_io::offchain::network_state().unwrap(), - session_index: 2, - authority_index: 0, - validators_len: 3, - }); + assert_eq!( + heartbeat, + Heartbeat { + block_number: 4, + network_state: sp_io::offchain::network_state().unwrap(), + session_index: 2, + authority_index: 0, + validators_len: 3, + } + ); }); } @@ -424,10 +435,7 @@ fn should_handle_non_linear_session_progress() { // if we don't have valid results for the current session progres then // we'll fallback to `HeartbeatAfter` and only heartbeat on block 5. MOCK_CURRENT_SESSION_PROGRESS.with(|p| *p.borrow_mut() = Some(None)); - assert_eq!( - ImOnline::send_heartbeats(2).err(), - Some(OffchainErr::TooEarly), - ); + assert_eq!(ImOnline::send_heartbeats(2).err(), Some(OffchainErr::TooEarly),); MOCK_CURRENT_SESSION_PROGRESS.with(|p| *p.borrow_mut() = Some(None)); assert!(ImOnline::send_heartbeats(5).ok().is_some()); @@ -453,11 +461,9 @@ fn test_does_not_heartbeat_early_in_the_session() { ext.execute_with(|| { // mock current session progress as being 5%. we only randomly start // heartbeating after 10% of the session has elapsed. - MOCK_CURRENT_SESSION_PROGRESS.with(|p| *p.borrow_mut() = Some(Some(Permill::from_float(0.05)))); - assert_eq!( - ImOnline::send_heartbeats(2).err(), - Some(OffchainErr::TooEarly), - ); + MOCK_CURRENT_SESSION_PROGRESS + .with(|p| *p.borrow_mut() = Some(Some(Permill::from_float(0.05)))); + assert_eq!(ImOnline::send_heartbeats(2).err(), Some(OffchainErr::TooEarly),); }); } @@ -475,8 +481,8 @@ fn test_probability_of_heartbeating_increases_with_session_progress() { // the average session length is 100 blocks, therefore the residual // probability of sending a heartbeat is 1% MOCK_AVERAGE_SESSION_LENGTH.with(|p| *p.borrow_mut() = Some(100)); - MOCK_CURRENT_SESSION_PROGRESS.with(|p| *p.borrow_mut() = - Some(Some(Permill::from_float(progress)))); + MOCK_CURRENT_SESSION_PROGRESS + .with(|p| *p.borrow_mut() = Some(Some(Permill::from_float(progress)))); let mut seed = [0u8; 32]; let encoded = ((random * Permill::ACCURACY as f64) as u32).encode(); @@ -486,10 +492,7 @@ fn test_probability_of_heartbeating_increases_with_session_progress() { let assert_too_early = |progress, random| { set_test(progress, random); - assert_eq!( - ImOnline::send_heartbeats(2).err(), - Some(OffchainErr::TooEarly), - ); + assert_eq!(ImOnline::send_heartbeats(2).err(), Some(OffchainErr::TooEarly),); }; let assert_heartbeat_ok = |progress, random| { diff --git a/frame/im-online/src/weights.rs b/frame/im-online/src/weights.rs index 6a1f575b856c5..4713f8aaec410 100644 --- a/frame/im-online/src/weights.rs +++ b/frame/im-online/src/weights.rs @@ -35,22 +35,24 @@ // --output=./frame/im-online/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_im_online. pub trait WeightInfo { - fn validate_unsigned_and_then_heartbeat(k: u32, e: u32, ) -> Weight; + fn validate_unsigned_and_then_heartbeat(k: u32, e: u32) -> Weight; } /// Weights for pallet_im_online using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn validate_unsigned_and_then_heartbeat(k: u32, e: u32, ) -> Weight { + fn validate_unsigned_and_then_heartbeat(k: u32, e: u32) -> Weight { (97_166_000 as Weight) // Standard Error: 0 .saturating_add((153_000 as Weight).saturating_mul(k as Weight)) @@ -63,7 +65,7 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn validate_unsigned_and_then_heartbeat(k: u32, e: u32, ) -> Weight { + fn validate_unsigned_and_then_heartbeat(k: u32, e: u32) -> Weight { (97_166_000 as Weight) // Standard Error: 0 .saturating_add((153_000 as Weight).saturating_mul(k as Weight)) diff --git a/frame/indices/src/benchmarking.rs b/frame/indices/src/benchmarking.rs index 625a994af38f6..6829a66051601 100644 --- a/frame/indices/src/benchmarking.rs +++ b/frame/indices/src/benchmarking.rs @@ -20,8 +20,8 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; use crate::Pallet as Indices; @@ -93,9 +93,4 @@ benchmarks! { // TODO in another PR: lookup and unlookup trait weights (not critical) } - -impl_benchmark_test_suite!( - Indices, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Indices, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/indices/src/lib.rs b/frame/indices/src/lib.rs index b3492be7938f1..6c96dced7eb3b 100644 --- a/frame/indices/src/lib.rs +++ b/frame/indices/src/lib.rs @@ -20,36 +20,43 @@ #![cfg_attr(not(feature = "std"), no_std)] +mod benchmarking; mod mock; mod tests; -mod benchmarking; pub mod weights; -use sp_std::prelude::*; use codec::Codec; -use sp_runtime::MultiAddress; -use sp_runtime::traits::{ - StaticLookup, LookupError, Zero, Saturating, AtLeast32Bit +use frame_support::traits::{BalanceStatus::Reserved, Currency, ReservableCurrency}; +use sp_runtime::{ + traits::{AtLeast32Bit, LookupError, Saturating, StaticLookup, Zero}, + MultiAddress, }; -use frame_support::traits::{Currency, ReservableCurrency, BalanceStatus::Reserved}; +use sp_std::prelude::*; pub use weights::WeightInfo; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; /// The module's config trait. #[pallet::config] pub trait Config: frame_system::Config { /// Type used for storing an account's index; implies the maximum number of accounts the system /// can hold. - type AccountIndex: Parameter + Member + MaybeSerializeDeserialize + Codec + Default + AtLeast32Bit + Copy; + type AccountIndex: Parameter + + Member + + MaybeSerializeDeserialize + + Codec + + Default + + AtLeast32Bit + + Copy; /// The currency trait. type Currency: ReservableCurrency; @@ -262,7 +269,7 @@ pub mod pallet { } /// Old name generated by `decl_event`. - #[deprecated(note="use `Event` instead")] + #[deprecated(note = "use `Event` instead")] pub type RawEvent = Event; #[pallet::error] @@ -281,11 +288,8 @@ pub mod pallet { /// The lookup from index to account. #[pallet::storage] - pub type Accounts = StorageMap< - _, Blake2_128Concat, - T::AccountIndex, - (T::AccountId, BalanceOf, bool) - >; + pub type Accounts = + StorageMap<_, Blake2_128Concat, T::AccountIndex, (T::AccountId, BalanceOf, bool)>; #[pallet::genesis_config] pub struct GenesisConfig { @@ -295,9 +299,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - indices: Default::default(), - } + Self { indices: Default::default() } } } @@ -320,9 +322,7 @@ impl Pallet { } /// Lookup an address to get an Id, if there's one there. - pub fn lookup_address( - a: MultiAddress - ) -> Option { + pub fn lookup_address(a: MultiAddress) -> Option { match a { MultiAddress::Id(i) => Some(i), MultiAddress::Index(i) => Self::lookup_index(i), diff --git a/frame/indices/src/mock.rs b/frame/indices/src/mock.rs index 46c1d814acb6b..e026e36bc389d 100644 --- a/frame/indices/src/mock.rs +++ b/frame/indices/src/mock.rs @@ -19,10 +19,10 @@ #![cfg(test)] -use sp_runtime::testing::Header; -use sp_core::H256; -use frame_support::parameter_types; use crate::{self as pallet_indices, Config}; +use frame_support::parameter_types; +use sp_core::H256; +use sp_runtime::testing::Header; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -101,8 +101,10 @@ impl Config for Test { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig::{ + pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/indices/src/tests.rs b/frame/indices/src/tests.rs index 96b8c4acfcd2d..37df20e9b9288 100644 --- a/frame/indices/src/tests.rs +++ b/frame/indices/src/tests.rs @@ -19,15 +19,17 @@ #![cfg(test)] -use super::*; -use super::mock::*; -use frame_support::{assert_ok, assert_noop}; +use super::{mock::*, *}; +use frame_support::{assert_noop, assert_ok}; use pallet_balances::Error as BalancesError; #[test] fn claiming_should_work() { new_test_ext().execute_with(|| { - assert_noop!(Indices::claim(Some(0).into(), 0), BalancesError::::InsufficientBalance); + assert_noop!( + Indices::claim(Some(0).into(), 0), + BalancesError::::InsufficientBalance + ); assert_ok!(Indices::claim(Some(1).into(), 0)); assert_noop!(Indices::claim(Some(2).into(), 0), Error::::InUse); assert_eq!(Balances::reserved_balance(1), 1); diff --git a/frame/indices/src/weights.rs b/frame/indices/src/weights.rs index 559392d3d2ba2..07df65d9443ee 100644 --- a/frame/indices/src/weights.rs +++ b/frame/indices/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/indices/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_indices. diff --git a/frame/lottery/src/benchmarking.rs b/frame/lottery/src/benchmarking.rs index 8fe91088b84ea..cf58a5f81b10d 100644 --- a/frame/lottery/src/benchmarking.rs +++ b/frame/lottery/src/benchmarking.rs @@ -21,9 +21,9 @@ use super::*; -use frame_system::RawOrigin; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_support::traits::{EnsureOrigin, OnInitialize, UnfilteredDispatchable}; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; +use frame_system::RawOrigin; use sp_runtime::traits::{Bounded, Zero}; use crate::Pallet as Lottery; @@ -170,8 +170,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Lottery, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Lottery, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/lottery/src/lib.rs b/frame/lottery/src/lib.rs index 14d4d5306729f..afd0fd90c0388 100644 --- a/frame/lottery/src/lib.rs +++ b/frame/lottery/src/lib.rs @@ -47,30 +47,30 @@ #![cfg_attr(not(feature = "std"), no_std)] +mod benchmarking; #[cfg(test)] mod mock; #[cfg(test)] mod tests; -mod benchmarking; pub mod weights; -use sp_std::prelude::*; +use codec::{Decode, Encode}; +use frame_support::{ + dispatch::{DispatchResult, Dispatchable, GetDispatchInfo}, + ensure, + traits::{Currency, ExistenceRequirement::KeepAlive, Get, Randomness, ReservableCurrency}, + PalletId, RuntimeDebug, +}; +pub use pallet::*; use sp_runtime::{ - DispatchError, ArithmeticError, traits::{AccountIdConversion, Saturating, Zero}, + ArithmeticError, DispatchError, }; -use frame_support::{ - ensure, PalletId, RuntimeDebug, - dispatch::{Dispatchable, DispatchResult, GetDispatchInfo}, - traits::{ - Currency, ReservableCurrency, Get, ExistenceRequirement::KeepAlive, Randomness, - }, -}; -use codec::{Encode, Decode}; +use sp_std::prelude::*; pub use weights::WeightInfo; -pub use pallet::*; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; // Any runtime call can be encoded into two bytes which represent the pallet and call index. // We use this to uniquely match someone's incoming call with the calls configured for the lottery. @@ -96,7 +96,9 @@ pub trait ValidateCall { } impl ValidateCall for () { - fn validate_call(_: &::Call) -> bool { false } + fn validate_call(_: &::Call) -> bool { + false + } } impl ValidateCall for Pallet { @@ -112,9 +114,9 @@ impl ValidateCall for Pallet { #[frame_support::pallet] pub mod pallet { - use frame_support::{Parameter, pallet_prelude::*, traits::EnsureOrigin, weights::Weight}; - use frame_system::{ensure_signed, pallet_prelude::*}; use super::*; + use frame_support::{pallet_prelude::*, traits::EnsureOrigin, weights::Weight, Parameter}; + use frame_system::{ensure_signed, pallet_prelude::*}; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -128,7 +130,10 @@ pub mod pallet { type PalletId: Get; /// A dispatchable call. - type Call: Parameter + Dispatchable + GetDispatchInfo + From>; + type Call: Parameter + + Dispatchable + + GetDispatchInfo + + From>; /// The currency trait. type Currency: ReservableCurrency; @@ -199,16 +204,13 @@ pub mod pallet { /// The configuration for the current lottery. #[pallet::storage] - pub(crate) type Lottery = StorageValue<_, LotteryConfig>>; + pub(crate) type Lottery = + StorageValue<_, LotteryConfig>>; /// Users who have purchased a ticket. (Lottery Index, Tickets Purchased) #[pallet::storage] - pub(crate) type Participants = StorageMap< - _, - Twox64Concat, T::AccountId, - (u32, Vec), - ValueQuery, - >; + pub(crate) type Participants = + StorageMap<_, Twox64Concat, T::AccountId, (u32, Vec), ValueQuery>; /// Total number of tickets sold. #[pallet::storage] @@ -231,9 +233,8 @@ pub mod pallet { fn on_initialize(n: T::BlockNumber) -> Weight { Lottery::::mutate(|mut lottery| -> Weight { if let Some(config) = &mut lottery { - let payout_block = config.start - .saturating_add(config.length) - .saturating_add(config.delay); + let payout_block = + config.start.saturating_add(config.length).saturating_add(config.delay); if payout_block <= n { let (lottery_account, lottery_balance) = Self::pot(); let ticket_count = TicketsCount::::get(); @@ -241,7 +242,12 @@ pub mod pallet { let winning_number = Self::choose_winner(ticket_count); let winner = Tickets::::get(winning_number).unwrap_or(lottery_account); // Not much we can do if this fails... - let res = T::Currency::transfer(&Self::account_id(), &winner, lottery_balance, KeepAlive); + let res = T::Currency::transfer( + &Self::account_id(), + &winner, + lottery_balance, + KeepAlive, + ); debug_assert!(res.is_ok()); Self::deposit_event(Event::::Winner(winner, lottery_balance)); @@ -339,13 +345,7 @@ pub mod pallet { let new_index = index.checked_add(1).ok_or(ArithmeticError::Overflow)?; let start = frame_system::Pallet::::block_number(); // Use new_index to more easily track everything with the current state. - *lottery = Some(LotteryConfig { - price, - start, - length, - delay, - repeat, - }); + *lottery = Some(LotteryConfig { price, start, length, delay, repeat }); LotteryIndex::::put(new_index); Ok(()) })?; @@ -388,8 +388,8 @@ impl Pallet { // The existential deposit is not part of the pot so lottery account never gets deleted. fn pot() -> (T::AccountId, BalanceOf) { let account_id = Self::account_id(); - let balance = T::Currency::free_balance(&account_id) - .saturating_sub(T::Currency::minimum_balance()); + let balance = + T::Currency::free_balance(&account_id).saturating_sub(T::Currency::minimum_balance()); (account_id, balance) } @@ -407,7 +407,9 @@ impl Pallet { // Convert a call to it's call index by encoding the call and taking the first two bytes. fn call_to_index(call: &::Call) -> Result { let encoded_call = call.encode(); - if encoded_call.len() < 2 { Err(Error::::EncodingFailed)? } + if encoded_call.len() < 2 { + Err(Error::::EncodingFailed)? + } return Ok((encoded_call[0], encoded_call[1])) } @@ -416,30 +418,39 @@ impl Pallet { // Check the call is valid lottery let config = Lottery::::get().ok_or(Error::::NotConfigured)?; let block_number = frame_system::Pallet::::block_number(); - ensure!(block_number < config.start.saturating_add(config.length), Error::::AlreadyEnded); + ensure!( + block_number < config.start.saturating_add(config.length), + Error::::AlreadyEnded + ); ensure!(T::ValidateCall::validate_call(call), Error::::InvalidCall); let call_index = Self::call_to_index(call)?; let ticket_count = TicketsCount::::get(); let new_ticket_count = ticket_count.checked_add(1).ok_or(ArithmeticError::Overflow)?; // Try to update the participant status - Participants::::try_mutate(&caller, |(lottery_index, participating_calls)| -> DispatchResult { - let index = LotteryIndex::::get(); - // If lottery index doesn't match, then reset participating calls and index. - if *lottery_index != index { - *participating_calls = Vec::new(); - *lottery_index = index; - } else { - // Check that user is not already participating under this call. - ensure!(!participating_calls.iter().any(|c| call_index == *c), Error::::AlreadyParticipating); - } - // Check user has enough funds and send it to the Lottery account. - T::Currency::transfer(caller, &Self::account_id(), config.price, KeepAlive)?; - // Create a new ticket. - TicketsCount::::put(new_ticket_count); - Tickets::::insert(ticket_count, caller.clone()); - participating_calls.push(call_index); - Ok(()) - })?; + Participants::::try_mutate( + &caller, + |(lottery_index, participating_calls)| -> DispatchResult { + let index = LotteryIndex::::get(); + // If lottery index doesn't match, then reset participating calls and index. + if *lottery_index != index { + *participating_calls = Vec::new(); + *lottery_index = index; + } else { + // Check that user is not already participating under this call. + ensure!( + !participating_calls.iter().any(|c| call_index == *c), + Error::::AlreadyParticipating + ); + } + // Check user has enough funds and send it to the Lottery account. + T::Currency::transfer(caller, &Self::account_id(), config.price, KeepAlive)?; + // Create a new ticket. + TicketsCount::::put(new_ticket_count); + Tickets::::insert(ticket_count, caller.clone()); + participating_calls.push(call_index); + Ok(()) + }, + )?; Self::deposit_event(Event::::TicketBought(caller.clone(), call_index)); @@ -451,9 +462,9 @@ impl Pallet { let mut random_number = Self::generate_random_number(0); // Best effort attempt to remove bias from modulus operator. - for i in 1 .. T::MaxGenerateRandom::get() { + for i in 1..T::MaxGenerateRandom::get() { if random_number < u32::MAX - u32::MAX % total { - break; + break } random_number = Self::generate_random_number(i); diff --git a/frame/lottery/src/mock.rs b/frame/lottery/src/mock.rs index 885e81bb32ea3..253923de0d5e7 100644 --- a/frame/lottery/src/mock.rs +++ b/frame/lottery/src/mock.rs @@ -25,13 +25,13 @@ use frame_support::{ traits::{OnFinalize, OnInitialize}, }; use frame_support_test::TestRandomness; +use frame_system::EnsureRoot; use sp_core::H256; use sp_runtime::{ - Perbill, testing::Header, traits::{BlakeTwo256, IdentityLookup}, + Perbill, }; -use frame_system::EnsureRoot; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -123,7 +123,9 @@ pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 100), (2, 100), (3, 100), (4, 100), (5, 100)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/lottery/src/tests.rs b/frame/lottery/src/tests.rs index 38994b2864c62..800ae223d9739 100644 --- a/frame/lottery/src/tests.rs +++ b/frame/lottery/src/tests.rs @@ -18,13 +18,12 @@ //! Tests for the module. use super::*; +use frame_support::{assert_noop, assert_ok}; use mock::{ - Lottery, Balances, Test, Origin, Call, SystemCall, BalancesCall, - new_test_ext, run_to_block + new_test_ext, run_to_block, Balances, BalancesCall, Call, Lottery, Origin, SystemCall, Test, }; -use sp_runtime::traits::{BadOrigin}; -use frame_support::{assert_noop, assert_ok}; use pallet_balances::Error as BalancesError; +use sp_runtime::traits::BadOrigin; #[test] fn initial_state() { @@ -86,13 +85,7 @@ fn basic_end_to_end_works() { assert_eq!(LotteryIndex::::get(), 2); assert_eq!( crate::Lottery::::get().unwrap(), - LotteryConfig { - price, - start: 25, - length, - delay, - repeat: true, - } + LotteryConfig { price, start: 25, length, delay, repeat: true } ); }); } @@ -184,10 +177,7 @@ fn buy_ticket_works_as_simple_passthrough() { ); let bad_origin_call = Box::new(Call::Balances(BalancesCall::force_transfer(0, 0, 0))); - assert_noop!( - Lottery::buy_ticket(Origin::signed(1), bad_origin_call), - BadOrigin, - ); + assert_noop!(Lottery::buy_ticket(Origin::signed(1), bad_origin_call), BadOrigin,); // User can call other txs, but doesn't get a ticket let remark_call = Box::new(Call::System(SystemCall::remark(b"hello, world!".to_vec()))); @@ -210,7 +200,6 @@ fn buy_ticket_works() { ]; assert_ok!(Lottery::set_calls(Origin::root(), calls)); - // Can't buy ticket before start let call = Box::new(Call::Balances(BalancesCall::transfer(2, 1))); assert_ok!(Lottery::buy_ticket(Origin::signed(1), call.clone())); diff --git a/frame/lottery/src/weights.rs b/frame/lottery/src/weights.rs index a73d0b667e351..9e4c7fece24d9 100644 --- a/frame/lottery/src/weights.rs +++ b/frame/lottery/src/weights.rs @@ -35,17 +35,19 @@ // --output=./frame/lottery/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_lottery. pub trait WeightInfo { fn buy_ticket() -> Weight; - fn set_calls(n: u32, ) -> Weight; + fn set_calls(n: u32) -> Weight; fn start_lottery() -> Weight; fn stop_repeat() -> Weight; fn on_initialize_end() -> Weight; @@ -60,7 +62,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } - fn set_calls(n: u32, ) -> Weight { + fn set_calls(n: u32) -> Weight { (15_015_000 as Weight) // Standard Error: 5_000 .saturating_add((301_000 as Weight).saturating_mul(n as Weight)) @@ -95,7 +97,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(6 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } - fn set_calls(n: u32, ) -> Weight { + fn set_calls(n: u32) -> Weight { (15_015_000 as Weight) // Standard Error: 5_000 .saturating_add((301_000 as Weight).saturating_mul(n as Weight)) diff --git a/frame/membership/src/lib.rs b/frame/membership/src/lib.rs index c834ed23659e6..ed0c78f82d262 100644 --- a/frame/membership/src/lib.rs +++ b/frame/membership/src/lib.rs @@ -23,12 +23,12 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; use frame_support::{ - decl_module, decl_storage, decl_event, decl_error, - traits::{ChangeMembers, InitializeMembers, EnsureOrigin, Contains, SortedMembers, Get}, + decl_error, decl_event, decl_module, decl_storage, + traits::{ChangeMembers, Contains, EnsureOrigin, Get, InitializeMembers, SortedMembers}, }; use frame_system::ensure_signed; +use sp_std::prelude::*; pub mod weights; pub use weights::WeightInfo; @@ -321,10 +321,10 @@ impl, I: Instance> SortedMembers for Module { #[cfg(feature = "runtime-benchmarks")] mod benchmark { - use super::{*, Module as Membership}; + use super::{Module as Membership, *}; + use frame_benchmarking::{account, benchmarks_instance, impl_benchmark_test_suite, whitelist}; + use frame_support::{assert_ok, traits::EnsureOrigin}; use frame_system::RawOrigin; - use frame_support::{traits::EnsureOrigin, assert_ok}; - use frame_benchmarking::{benchmarks_instance, whitelist, account, impl_benchmark_test_suite}; const SEED: u32 = 0; @@ -467,10 +467,13 @@ mod tests { use super::*; use crate as pallet_membership; - use frame_support::{assert_ok, assert_noop, parameter_types, ord_parameter_types}; - use sp_core::H256; - use sp_runtime::{traits::{BlakeTwo256, IdentityLookup, BadOrigin}, testing::Header}; + use frame_support::{assert_noop, assert_ok, ord_parameter_types, parameter_types}; use frame_system::EnsureSignedBy; + use sp_core::H256; + use sp_runtime::{ + testing::Header, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, + }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -572,10 +575,12 @@ mod tests { pub(crate) fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); // We use default for brevity, but you can configure as desired if needed. - pallet_membership::GenesisConfig::{ + pallet_membership::GenesisConfig:: { members: vec![10, 20, 30], - .. Default::default() - }.assimilate_storage(&mut t).unwrap(); + ..Default::default() + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } @@ -617,7 +622,10 @@ mod tests { fn add_member_works() { new_test_ext().execute_with(|| { assert_noop!(Membership::add_member(Origin::signed(5), 15), BadOrigin); - assert_noop!(Membership::add_member(Origin::signed(1), 10), Error::::AlreadyMember); + assert_noop!( + Membership::add_member(Origin::signed(1), 10), + Error::::AlreadyMember + ); assert_ok!(Membership::add_member(Origin::signed(1), 15)); assert_eq!(Membership::members(), vec![10, 15, 20, 30]); assert_eq!(MEMBERS.with(|m| m.borrow().clone()), Membership::members()); @@ -628,7 +636,10 @@ mod tests { fn remove_member_works() { new_test_ext().execute_with(|| { assert_noop!(Membership::remove_member(Origin::signed(5), 20), BadOrigin); - assert_noop!(Membership::remove_member(Origin::signed(2), 15), Error::::NotMember); + assert_noop!( + Membership::remove_member(Origin::signed(2), 15), + Error::::NotMember + ); assert_ok!(Membership::set_prime(Origin::signed(5), 20)); assert_ok!(Membership::remove_member(Origin::signed(2), 20)); assert_eq!(Membership::members(), vec![10, 30]); @@ -642,8 +653,14 @@ mod tests { fn swap_member_works() { new_test_ext().execute_with(|| { assert_noop!(Membership::swap_member(Origin::signed(5), 10, 25), BadOrigin); - assert_noop!(Membership::swap_member(Origin::signed(3), 15, 25), Error::::NotMember); - assert_noop!(Membership::swap_member(Origin::signed(3), 10, 30), Error::::AlreadyMember); + assert_noop!( + Membership::swap_member(Origin::signed(3), 15, 25), + Error::::NotMember + ); + assert_noop!( + Membership::swap_member(Origin::signed(3), 10, 30), + Error::::AlreadyMember + ); assert_ok!(Membership::set_prime(Origin::signed(5), 20)); assert_ok!(Membership::swap_member(Origin::signed(3), 20, 20)); @@ -673,8 +690,14 @@ mod tests { fn change_key_works() { new_test_ext().execute_with(|| { assert_ok!(Membership::set_prime(Origin::signed(5), 10)); - assert_noop!(Membership::change_key(Origin::signed(3), 25), Error::::NotMember); - assert_noop!(Membership::change_key(Origin::signed(10), 20), Error::::AlreadyMember); + assert_noop!( + Membership::change_key(Origin::signed(3), 25), + Error::::NotMember + ); + assert_noop!( + Membership::change_key(Origin::signed(10), 20), + Error::::AlreadyMember + ); assert_ok!(Membership::change_key(Origin::signed(10), 40)); assert_eq!(Membership::members(), vec![20, 30, 40]); assert_eq!(MEMBERS.with(|m| m.borrow().clone()), Membership::members()); @@ -718,6 +741,8 @@ mod tests { pallet_membership::GenesisConfig:: { members: vec![1, 2, 3, 1], phantom: Default::default(), - }.build_storage().unwrap(); + } + .build_storage() + .unwrap(); } } diff --git a/frame/membership/src/weights.rs b/frame/membership/src/weights.rs index 8e2d8bb266164..4891f0d91b10a 100644 --- a/frame/membership/src/weights.rs +++ b/frame/membership/src/weights.rs @@ -35,70 +35,72 @@ // --output=./frame/membership/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_membership. pub trait WeightInfo { - fn add_member(m: u32, ) -> Weight; - fn remove_member(m: u32, ) -> Weight; - fn swap_member(m: u32, ) -> Weight; - fn reset_member(m: u32, ) -> Weight; - fn change_key(m: u32, ) -> Weight; - fn set_prime(m: u32, ) -> Weight; - fn clear_prime(m: u32, ) -> Weight; + fn add_member(m: u32) -> Weight; + fn remove_member(m: u32) -> Weight; + fn swap_member(m: u32) -> Weight; + fn reset_member(m: u32) -> Weight; + fn change_key(m: u32) -> Weight; + fn set_prime(m: u32) -> Weight; + fn clear_prime(m: u32) -> Weight; } /// Weights for pallet_membership using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn add_member(m: u32, ) -> Weight { + fn add_member(m: u32) -> Weight { (24_309_000 as Weight) // Standard Error: 3_000 .saturating_add((147_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn remove_member(m: u32, ) -> Weight { + fn remove_member(m: u32) -> Weight { (29_722_000 as Weight) // Standard Error: 0 .saturating_add((119_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn swap_member(m: u32, ) -> Weight { + fn swap_member(m: u32) -> Weight { (30_239_000 as Weight) // Standard Error: 0 .saturating_add((132_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn reset_member(m: u32, ) -> Weight { + fn reset_member(m: u32) -> Weight { (31_302_000 as Weight) // Standard Error: 0 .saturating_add((289_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn change_key(m: u32, ) -> Weight { + fn change_key(m: u32) -> Weight { (31_967_000 as Weight) // Standard Error: 0 .saturating_add((130_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(4 as Weight)) } - fn set_prime(m: u32, ) -> Weight { + fn set_prime(m: u32) -> Weight { (8_083_000 as Weight) // Standard Error: 0 .saturating_add((91_000 as Weight).saturating_mul(m as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn clear_prime(m: u32, ) -> Weight { + fn clear_prime(m: u32) -> Weight { (3_360_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(m as Weight)) @@ -108,49 +110,49 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn add_member(m: u32, ) -> Weight { + fn add_member(m: u32) -> Weight { (24_309_000 as Weight) // Standard Error: 3_000 .saturating_add((147_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn remove_member(m: u32, ) -> Weight { + fn remove_member(m: u32) -> Weight { (29_722_000 as Weight) // Standard Error: 0 .saturating_add((119_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn swap_member(m: u32, ) -> Weight { + fn swap_member(m: u32) -> Weight { (30_239_000 as Weight) // Standard Error: 0 .saturating_add((132_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn reset_member(m: u32, ) -> Weight { + fn reset_member(m: u32) -> Weight { (31_302_000 as Weight) // Standard Error: 0 .saturating_add((289_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn change_key(m: u32, ) -> Weight { + fn change_key(m: u32) -> Weight { (31_967_000 as Weight) // Standard Error: 0 .saturating_add((130_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(4 as Weight)) } - fn set_prime(m: u32, ) -> Weight { + fn set_prime(m: u32) -> Weight { (8_083_000 as Weight) // Standard Error: 0 .saturating_add((91_000 as Weight).saturating_mul(m as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn clear_prime(m: u32, ) -> Weight { + fn clear_prime(m: u32) -> Weight { (3_360_000 as Weight) // Standard Error: 0 .saturating_add((1_000 as Weight).saturating_mul(m as Weight)) diff --git a/frame/merkle-mountain-range/primitives/src/lib.rs b/frame/merkle-mountain-range/primitives/src/lib.rs index 7b562656a1e04..c556583a9dd1a 100644 --- a/frame/merkle-mountain-range/primitives/src/lib.rs +++ b/frame/merkle-mountain-range/primitives/src/lib.rs @@ -21,7 +21,7 @@ #![warn(missing_docs)] use frame_support::RuntimeDebug; -use sp_runtime::traits::{self, Saturating, One}; +use sp_runtime::traits::{self, One, Saturating}; use sp_std::fmt; #[cfg(not(feature = "std"))] use sp_std::prelude::Vec; @@ -55,16 +55,10 @@ impl LeafDataProvider for () { /// current block hash is not available (since the block is not finished yet), /// we use the `parent_hash` here along with parent block number. impl LeafDataProvider for frame_system::Pallet { - type LeafData = ( - ::BlockNumber, - ::Hash - ); + type LeafData = (::BlockNumber, ::Hash); fn leaf_data() -> Self::LeafData { - ( - Self::block_number().saturating_sub(One::one()), - Self::parent_hash() - ) + (Self::block_number().saturating_sub(One::one()), Self::parent_hash()) } } @@ -130,7 +124,8 @@ mod encoding { fn encode_to(&self, dest: &mut T) { match self { Self::Data(l) => l.using_encoded( - |data| Either::<&[u8], &H::Output>::Left(data).encode_to(dest), false + |data| Either::<&[u8], &H::Output>::Left(data).encode_to(dest), + false, ), Self::Hash(h) => Either::<&[u8], &H::Output>::Right(h).encode_to(dest), } @@ -258,7 +253,8 @@ macro_rules! impl_leaf_data_for_tuple { /// Test functions implementation for `Compact, ...)>` #[cfg(test)] -impl Compact, DataOrHash)> where +impl Compact, DataOrHash)> +where H: traits::Hash, A: FullLeaf, B: FullLeaf, @@ -346,7 +342,7 @@ pub struct OpaqueLeaf( /// /// NOTE it DOES NOT include length prefix (like `Vec` encoding would). #[cfg_attr(feature = "std", serde(with = "sp_core::bytes"))] - pub Vec + pub Vec, ); impl OpaqueLeaf { @@ -474,25 +470,21 @@ mod tests { ]; // when - let encoded = cases - .iter() - .map(codec::Encode::encode) - .collect::>(); + let encoded = cases.iter().map(codec::Encode::encode).collect::>(); - let decoded = encoded - .iter() - .map(|x| Test::decode(&mut &**x)) - .collect::>(); + let decoded = encoded.iter().map(|x| Test::decode(&mut &**x)).collect::>(); // then - assert_eq!(decoded, cases.into_iter().map(Result::<_, codec::Error>::Ok).collect::>()); + assert_eq!( + decoded, + cases.into_iter().map(Result::<_, codec::Error>::Ok).collect::>() + ); // check encoding correctness assert_eq!(&encoded[0], &hex_literal::hex!("00343048656c6c6f20576f726c6421")); assert_eq!( encoded[1].as_slice(), - hex_literal::hex!( - "01c3e7ba6b511162fead58f2c8b5764ce869ed1118011ac37392522ed16720bbcd" - ).as_ref() + hex_literal::hex!("01c3e7ba6b511162fead58f2c8b5764ce869ed1118011ac37392522ed16720bbcd") + .as_ref() ); } @@ -519,10 +511,7 @@ mod tests { // when let c: TestCompact = Compact::new((a.clone(), b.clone())); - let d: TestCompact = Compact::new(( - Test::Hash(a.hash()), - Test::Hash(b.hash()), - )); + let d: TestCompact = Compact::new((Test::Hash(a.hash()), Test::Hash(b.hash()))); // then assert_eq!(c.hash(), d.hash()); @@ -535,35 +524,28 @@ mod tests { let b = Test::Data("".into()); let c: TestCompact = Compact::new((a.clone(), b.clone())); - let d: TestCompact = Compact::new(( - Test::Hash(a.hash()), - Test::Hash(b.hash()), - )); + let d: TestCompact = Compact::new((Test::Hash(a.hash()), Test::Hash(b.hash()))); let cases = vec![c, d.clone()]; // when - let encoded_compact = cases - .iter() - .map(|c| c.using_encoded(|x| x.to_vec(), true)) - .collect::>(); + let encoded_compact = + cases.iter().map(|c| c.using_encoded(|x| x.to_vec(), true)).collect::>(); - let encoded = cases - .iter() - .map(|c| c.using_encoded(|x| x.to_vec(), false)) - .collect::>(); + let encoded = + cases.iter().map(|c| c.using_encoded(|x| x.to_vec(), false)).collect::>(); let decoded_compact = encoded_compact .iter() .map(|x| TestCompact::decode(&mut &**x)) .collect::>(); - let decoded = encoded - .iter() - .map(|x| TestCompact::decode(&mut &**x)) - .collect::>(); + let decoded = encoded.iter().map(|x| TestCompact::decode(&mut &**x)).collect::>(); // then - assert_eq!(decoded, cases.into_iter().map(Result::<_, codec::Error>::Ok).collect::>()); + assert_eq!( + decoded, + cases.into_iter().map(Result::<_, codec::Error>::Ok).collect::>() + ); assert_eq!(decoded_compact, vec![Ok(d.clone()), Ok(d.clone())]); } @@ -575,10 +557,7 @@ mod tests { let b = Test::Data("".into()); let c: TestCompact = Compact::new((a.clone(), b.clone())); - let d: TestCompact = Compact::new(( - Test::Hash(a.hash()), - Test::Hash(b.hash()), - )); + let d: TestCompact = Compact::new((Test::Hash(a.hash()), Test::Hash(b.hash()))); let cases = vec![c, d.clone()]; let encoded_compact = cases @@ -587,16 +566,10 @@ mod tests { .map(OpaqueLeaf::from_encoded_leaf) .collect::>(); - let opaque = cases - .iter() - .map(OpaqueLeaf::from_leaf) - .collect::>(); + let opaque = cases.iter().map(OpaqueLeaf::from_leaf).collect::>(); // then - assert_eq!( - encoded_compact, - opaque, - ); + assert_eq!(encoded_compact, opaque,); } #[test] @@ -610,10 +583,7 @@ mod tests { let case3 = a.encode().encode(); // when - let encoded = vec![&case1, &case2] - .into_iter() - .map(|x| x.encode()) - .collect::>(); + let encoded = vec![&case1, &case2].into_iter().map(|x| x.encode()).collect::>(); let decoded = vec![&*encoded[0], &*encoded[1], &*case3] .into_iter() .map(|x| EncodableOpaqueLeaf::decode(&mut &*x)) diff --git a/frame/merkle-mountain-range/rpc/src/lib.rs b/frame/merkle-mountain-range/rpc/src/lib.rs index fb46fc6280b8a..4719893778f6a 100644 --- a/frame/merkle-mountain-range/rpc/src/lib.rs +++ b/frame/merkle-mountain-range/rpc/src/lib.rs @@ -26,14 +26,11 @@ use jsonrpc_core::{Error, ErrorCode, Result}; use jsonrpc_derive::rpc; use serde::{Deserialize, Serialize}; +use pallet_mmr_primitives::{Error as MmrError, Proof}; use sp_api::ProvideRuntimeApi; use sp_blockchain::HeaderBackend; use sp_core::Bytes; -use sp_runtime::{ - generic::BlockId, - traits::{Block as BlockT}, -}; -use pallet_mmr_primitives::{Error as MmrError, Proof}; +use sp_runtime::{generic::BlockId, traits::Block as BlockT}; pub use pallet_mmr_primitives::MmrApi as MmrRuntimeApi; @@ -51,19 +48,12 @@ pub struct LeafProof { impl LeafProof { /// Create new `LeafProof` from given concrete `leaf` and `proof`. - pub fn new( - block_hash: BlockHash, - leaf: Leaf, - proof: Proof, - ) -> Self where + pub fn new(block_hash: BlockHash, leaf: Leaf, proof: Proof) -> Self + where Leaf: Encode, MmrHash: Encode, { - Self { - block_hash, - leaf: Bytes(leaf.encode()), - proof: Bytes(proof.encode()), - } + Self { block_hash, leaf: Bytes(leaf.encode()), proof: Bytes(proof.encode()) } } } @@ -95,21 +85,15 @@ pub struct Mmr { impl Mmr { /// Create new `Mmr` with the given reference to the client. pub fn new(client: Arc) -> Self { - Self { - client, - _marker: Default::default(), - } + Self { client, _marker: Default::default() } } } -impl MmrApi<::Hash,> for Mmr +impl MmrApi<::Hash> for Mmr where Block: BlockT, C: Send + Sync + 'static + ProvideRuntimeApi + HeaderBackend, - C::Api: MmrRuntimeApi< - Block, - MmrHash, - >, + C::Api: MmrRuntimeApi, MmrHash: Codec + Send + Sync + 'static, { fn generate_proof( @@ -120,8 +104,7 @@ where let api = self.client.runtime_api(); let block_hash = at.unwrap_or_else(|| // If the block hash is not supplied assume the best block. - self.client.info().best_hash - ); + self.client.info().best_hash); let (leaf, proof) = api .generate_proof_with_context( @@ -202,11 +185,14 @@ mod tests { let expected = LeafProof { block_hash: H256::repeat_byte(0), leaf: Bytes(vec![1_u8, 2, 3, 4].encode()), - proof: Bytes(Proof { - leaf_index: 1, - leaf_count: 9, - items: vec![H256::repeat_byte(1), H256::repeat_byte(2)], - }.encode()), + proof: Bytes( + Proof { + leaf_index: 1, + leaf_count: 9, + items: vec![H256::repeat_byte(1), H256::repeat_byte(2)], + } + .encode(), + ), }; // when @@ -218,6 +204,5 @@ mod tests { // then assert_eq!(actual, expected); - } } diff --git a/frame/merkle-mountain-range/src/benchmarking.rs b/frame/merkle-mountain-range/src/benchmarking.rs index af7531a00bdc4..97a880b222ec7 100644 --- a/frame/merkle-mountain-range/src/benchmarking.rs +++ b/frame/merkle-mountain-range/src/benchmarking.rs @@ -20,8 +20,8 @@ #![cfg_attr(not(feature = "std"), no_std)] use crate::*; -use frame_support::traits::OnInitialize; use frame_benchmarking::{benchmarks_instance_pallet, impl_benchmark_test_suite}; +use frame_support::traits::OnInitialize; benchmarks_instance_pallet! { on_initialize { @@ -37,8 +37,4 @@ benchmarks_instance_pallet! { } } -impl_benchmark_test_suite!( - Pallet, - crate::tests::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Pallet, crate::tests::new_test_ext(), crate::mock::Test,); diff --git a/frame/merkle-mountain-range/src/default_weights.rs b/frame/merkle-mountain-range/src/default_weights.rs index 98bb404e3f3a1..6308975ce7d22 100644 --- a/frame/merkle-mountain-range/src/default_weights.rs +++ b/frame/merkle-mountain-range/src/default_weights.rs @@ -19,7 +19,8 @@ //! This file was not auto-generated. use frame_support::weights::{ - Weight, constants::{WEIGHT_PER_NANOS, RocksDbWeight as DbWeight}, + constants::{RocksDbWeight as DbWeight, WEIGHT_PER_NANOS}, + Weight, }; impl crate::WeightInfo for () { @@ -34,9 +35,6 @@ impl crate::WeightInfo for () { leaf_weight .saturating_add(hash_weight) .saturating_add(hook_weight) - .saturating_add(DbWeight::get().reads_writes( - 2 + peaks, - 2 + peaks, - )) + .saturating_add(DbWeight::get().reads_writes(2 + peaks, 2 + peaks)) } } diff --git a/frame/merkle-mountain-range/src/lib.rs b/frame/merkle-mountain-range/src/lib.rs index 764acd6a02b5f..bcb6e62d4420f 100644 --- a/frame/merkle-mountain-range/src/lib.rs +++ b/frame/merkle-mountain-range/src/lib.rs @@ -61,17 +61,17 @@ use codec::Encode; use frame_support::weights::Weight; use sp_runtime::traits; -mod default_weights; -mod mmr; #[cfg(any(feature = "runtime-benchmarks", test))] mod benchmarking; +mod default_weights; +mod mmr; #[cfg(test)] mod mock; #[cfg(test)] mod tests; -pub use pallet_mmr_primitives as primitives; pub use pallet::*; +pub use pallet_mmr_primitives as primitives; pub trait WeightInfo { fn on_initialize(peaks: u64) -> Weight; @@ -79,9 +79,9 @@ pub trait WeightInfo { #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -116,9 +116,17 @@ pub mod pallet { /// /// This type is actually going to be stored in the MMR. /// Required to be provided again, to satisfy trait bounds for storage items. - type Hash: traits::Member + traits::MaybeSerializeDeserialize + sp_std::fmt::Debug - + sp_std::hash::Hash + AsRef<[u8]> + AsMut<[u8]> + Copy + Default + codec::Codec - + codec::EncodeLike + scale_info::TypeInfo; + type Hash: traits::Member + + traits::MaybeSerializeDeserialize + + sp_std::fmt::Debug + + sp_std::hash::Hash + + AsRef<[u8]> + + AsMut<[u8]> + + Copy + + Default + + codec::Codec + + codec::EncodeLike + + scale_info::TypeInfo; /// Data stored in the leaf nodes. /// @@ -147,7 +155,8 @@ pub mod pallet { /// Latest MMR Root hash. #[pallet::storage] #[pallet::getter(fn mmr_root_hash)] - pub type RootHash, I: 'static = ()> = StorageValue<_, >::Hash, ValueQuery>; + pub type RootHash, I: 'static = ()> = + StorageValue<_, >::Hash, ValueQuery>; /// Current size of the MMR (number of leaves). #[pallet::storage] @@ -160,13 +169,8 @@ pub mod pallet { /// are pruned and only stored in the Offchain DB. #[pallet::storage] #[pallet::getter(fn mmr_peak)] - pub type Nodes, I: 'static = ()> = StorageMap< - _, - Identity, - u64, - >::Hash, - OptionQuery - >; + pub type Nodes, I: 'static = ()> = + StorageMap<_, Identity, u64, >::Hash, OptionQuery>; #[pallet::hooks] impl, I: 'static> Hooks> for Pallet { @@ -211,7 +215,8 @@ pub fn verify_leaf_proof( root: H::Output, leaf: mmr::Node, proof: primitives::Proof, -) -> Result<(), primitives::Error> where +) -> Result<(), primitives::Error> +where H: traits::Hash, L: primitives::FullLeaf, { @@ -234,10 +239,9 @@ impl, I: 'static> Pallet { /// (Offchain Worker or Runtime API call), since it requires /// all the leaves to be present. /// It may return an error or panic if used incorrectly. - pub fn generate_proof(leaf_index: u64) -> Result< - (LeafOf, primitives::Proof<>::Hash>), - primitives::Error, - > { + pub fn generate_proof( + leaf_index: u64, + ) -> Result<(LeafOf, primitives::Proof<>::Hash>), primitives::Error> { let mmr: ModuleMmr = mmr::Mmr::new(Self::mmr_leaves()); mmr.generate_proof(leaf_index) } @@ -252,13 +256,12 @@ impl, I: 'static> Pallet { leaf: LeafOf, proof: primitives::Proof<>::Hash>, ) -> Result<(), primitives::Error> { - if proof.leaf_count > Self::mmr_leaves() - || proof.leaf_count == 0 - || proof.items.len() as u32 > mmr::utils::NodesUtils::new(proof.leaf_count).depth() + if proof.leaf_count > Self::mmr_leaves() || + proof.leaf_count == 0 || + proof.items.len() as u32 > mmr::utils::NodesUtils::new(proof.leaf_count).depth() { - return Err(primitives::Error::Verify.log_debug( - "The proof has incorrect number of leaves or proof items." - )); + return Err(primitives::Error::Verify + .log_debug("The proof has incorrect number of leaves or proof items.")) } let mmr: ModuleMmr = mmr::Mmr::new(proof.leaf_count); diff --git a/frame/merkle-mountain-range/src/mmr/mmr.rs b/frame/merkle-mountain-range/src/mmr/mmr.rs index 53b76ba8000a4..d5036e58f432e 100644 --- a/frame/merkle-mountain-range/src/mmr/mmr.rs +++ b/frame/merkle-mountain-range/src/mmr/mmr.rs @@ -16,13 +16,13 @@ // limitations under the License. use crate::{ - Config, HashingOf, mmr::{ - Node, NodeOf, Hasher, - storage::{Storage, OffchainStorage, RuntimeStorage}, + storage::{OffchainStorage, RuntimeStorage, Storage}, utils::NodesUtils, + Hasher, Node, NodeOf, }, primitives::{self, Error}, + Config, HashingOf, }; #[cfg(not(feature = "std"))] use sp_std::vec; @@ -32,45 +32,39 @@ pub fn verify_leaf_proof( root: H::Output, leaf: Node, proof: primitives::Proof, -) -> Result where +) -> Result +where H: sp_runtime::traits::Hash, L: primitives::FullLeaf, { let size = NodesUtils::new(proof.leaf_count).size(); let leaf_position = mmr_lib::leaf_index_to_pos(proof.leaf_index); - let p = mmr_lib::MerkleProof::< - Node, - Hasher, - >::new( + let p = mmr_lib::MerkleProof::, Hasher>::new( size, proof.items.into_iter().map(Node::Hash).collect(), ); - p.verify( - Node::Hash(root), - vec![(leaf_position, leaf)], - ).map_err(|e| Error::Verify.log_debug(e)) + p.verify(Node::Hash(root), vec![(leaf_position, leaf)]) + .map_err(|e| Error::Verify.log_debug(e)) } /// A wrapper around a MMR library to expose limited functionality. /// /// Available functions depend on the storage kind ([Runtime](crate::mmr::storage::RuntimeStorage) /// vs [Off-chain](crate::mmr::storage::OffchainStorage)). -pub struct Mmr where +pub struct Mmr +where T: Config, I: 'static, L: primitives::FullLeaf, Storage: mmr_lib::MMRStore>, { - mmr: mmr_lib::MMR< - NodeOf, - Hasher, L>, - Storage - >, + mmr: mmr_lib::MMR, Hasher, L>, Storage>, leaves: u64, } -impl Mmr where +impl Mmr +where T: Config, I: 'static, L: primitives::FullLeaf, @@ -79,10 +73,7 @@ impl Mmr where /// Create a pointer to an existing MMR with given number of leaves. pub fn new(leaves: u64) -> Self { let size = NodesUtils::new(leaves).size(); - Self { - mmr: mmr_lib::MMR::new(size, Default::default()), - leaves, - } + Self { mmr: mmr_lib::MMR::new(size, Default::default()), leaves } } /// Verify proof of a single leaf. @@ -91,19 +82,14 @@ impl Mmr where leaf: L, proof: primitives::Proof<>::Hash>, ) -> Result { - let p = mmr_lib::MerkleProof::< - NodeOf, - Hasher, L>, - >::new( + let p = mmr_lib::MerkleProof::, Hasher, L>>::new( self.mmr.mmr_size(), proof.items.into_iter().map(Node::Hash).collect(), ); let position = mmr_lib::leaf_index_to_pos(proof.leaf_index); let root = self.mmr.get_root().map_err(|e| Error::GetRoot.log_error(e))?; - p.verify( - root, - vec![(position, Node::Data(leaf))], - ).map_err(|e| Error::Verify.log_debug(e)) + p.verify(root, vec![(position, Node::Data(leaf))]) + .map_err(|e| Error::Verify.log_debug(e)) } /// Return the internal size of the MMR (number of nodes). @@ -114,19 +100,18 @@ impl Mmr where } /// Runtime specific MMR functions. -impl Mmr where +impl Mmr +where T: Config, I: 'static, L: primitives::FullLeaf, { - /// Push another item to the MMR. /// /// Returns element position (index) in the MMR. pub fn push(&mut self, leaf: L) -> Option { - let position = self.mmr.push(Node::Data(leaf)) - .map_err(|e| Error::Push.log_error(e)) - .ok()?; + let position = + self.mmr.push(Node::Data(leaf)).map_err(|e| Error::Push.log_error(e)).ok()?; self.leaves += 1; @@ -143,7 +128,8 @@ impl Mmr where } /// Off-chain specific MMR functions. -impl Mmr where +impl Mmr +where T: Config, I: 'static, L: primitives::FullLeaf + codec::Decode, @@ -152,10 +138,10 @@ impl Mmr where /// /// Proof generation requires all the nodes (or their hashes) to be available in the storage. /// (i.e. you can't run the function in the pruned storage). - pub fn generate_proof(&self, leaf_index: u64) -> Result< - (L, primitives::Proof<>::Hash>), - Error - > { + pub fn generate_proof( + &self, + leaf_index: u64, + ) -> Result<(L, primitives::Proof<>::Hash>), Error> { let position = mmr_lib::leaf_index_to_pos(leaf_index); let store = >::default(); let leaf = match mmr_lib::MMRStore::get_elem(&store, position) { @@ -163,7 +149,8 @@ impl Mmr where e => return Err(Error::LeafNotFound.log_debug(e)), }; let leaf_count = self.leaves; - self.mmr.gen_proof(vec![position]) + self.mmr + .gen_proof(vec![position]) .map_err(|e| Error::GenerateProof.log_error(e)) .map(|p| primitives::Proof { leaf_index, @@ -173,4 +160,3 @@ impl Mmr where .map(|p| (leaf, p)) } } - diff --git a/frame/merkle-mountain-range/src/mmr/mod.rs b/frame/merkle-mountain-range/src/mmr/mod.rs index e705b247067e5..ec2dfe245bd41 100644 --- a/frame/merkle-mountain-range/src/mmr/mod.rs +++ b/frame/merkle-mountain-range/src/mmr/mod.rs @@ -15,14 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. +mod mmr; pub mod storage; pub mod utils; -mod mmr; use crate::primitives::FullLeaf; use sp_runtime::traits; -pub use self::mmr::{Mmr, verify_leaf_proof}; +pub use self::mmr::{verify_leaf_proof, Mmr}; /// Node type for runtime `T`. pub type NodeOf = Node<>::Hashing, L>; diff --git a/frame/merkle-mountain-range/src/mmr/storage.rs b/frame/merkle-mountain-range/src/mmr/storage.rs index 65fe19556630c..09e24017816ec 100644 --- a/frame/merkle-mountain-range/src/mmr/storage.rs +++ b/frame/merkle-mountain-range/src/mmr/storage.rs @@ -21,8 +21,10 @@ use codec::Encode; #[cfg(not(feature = "std"))] use sp_std::prelude::Vec; -use crate::mmr::{NodeOf, Node}; -use crate::{NumberOfLeaves, Nodes, Pallet, Config, primitives}; +use crate::{ + mmr::{Node, NodeOf}, + primitives, Config, Nodes, NumberOfLeaves, Pallet, +}; /// A marker type for runtime-specific storage implementation. /// @@ -44,9 +46,7 @@ pub struct OffchainStorage; /// /// There are two different implementations depending on the use case. /// See docs for [RuntimeStorage] and [OffchainStorage]. -pub struct Storage( - sp_std::marker::PhantomData<(StorageType, T, I, L)> -); +pub struct Storage(sp_std::marker::PhantomData<(StorageType, T, I, L)>); impl Default for Storage { fn default() -> Self { @@ -54,7 +54,8 @@ impl Default for Storage { } } -impl mmr_lib::MMRStore> for Storage where +impl mmr_lib::MMRStore> for Storage +where T: Config, I: 'static, L: primitives::FullLeaf + codec::Decode, @@ -62,32 +63,30 @@ impl mmr_lib::MMRStore> for Storage mmr_lib::Result>> { let key = Pallet::::offchain_key(pos); // Retrieve the element from Off-chain DB. - Ok(sp_io::offchain - ::local_storage_get(sp_core::offchain::StorageKind::PERSISTENT, &key) + Ok(sp_io::offchain::local_storage_get(sp_core::offchain::StorageKind::PERSISTENT, &key) .and_then(|v| codec::Decode::decode(&mut &*v).ok())) } fn append(&mut self, _: u64, _: Vec>) -> mmr_lib::Result<()> { panic!("MMR must not be altered in the off-chain context.") - } + } } -impl mmr_lib::MMRStore> for Storage where +impl mmr_lib::MMRStore> for Storage +where T: Config, I: 'static, L: primitives::FullLeaf, { fn get_elem(&self, pos: u64) -> mmr_lib::Result>> { - Ok(>::get(pos) - .map(Node::Hash) - ) + Ok(>::get(pos).map(Node::Hash)) } fn append(&mut self, pos: u64, elems: Vec>) -> mmr_lib::Result<()> { let mut leaves = crate::NumberOfLeaves::::get(); let mut size = crate::mmr::utils::NodesUtils::new(leaves).size(); if pos != size { - return Err(mmr_lib::Error::InconsistentStore); + return Err(mmr_lib::Error::InconsistentStore) } for elem in elems { diff --git a/frame/merkle-mountain-range/src/mmr/utils.rs b/frame/merkle-mountain-range/src/mmr/utils.rs index 34ae6e1a3c78a..4f103fa3b8c0f 100644 --- a/frame/merkle-mountain-range/src/mmr/utils.rs +++ b/frame/merkle-mountain-range/src/mmr/utils.rs @@ -49,9 +49,7 @@ impl NodesUtils { return 0 } - 64 - self.no_of_leaves - .next_power_of_two() - .leading_zeros() + 64 - self.no_of_leaves.next_power_of_two().leading_zeros() } } @@ -123,9 +121,6 @@ mod tests { actual_sizes.push(mmr.size()); }) } - assert_eq!( - sizes[1..], - actual_sizes[..], - ); + assert_eq!(sizes[1..], actual_sizes[..],); } } diff --git a/frame/merkle-mountain-range/src/mock.rs b/frame/merkle-mountain-range/src/mock.rs index cfd8212e69847..4a6b224b051b3 100644 --- a/frame/merkle-mountain-range/src/mock.rs +++ b/frame/merkle-mountain-range/src/mock.rs @@ -15,21 +15,18 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::*; use crate as pallet_mmr; +use crate::*; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use frame_support::parameter_types; -use pallet_mmr_primitives::{LeafDataProvider, Compact}; +use pallet_mmr_primitives::{Compact, LeafDataProvider}; use sp_core::H256; use sp_runtime::{ testing::Header, - traits::{ - BlakeTwo256, Keccak256, IdentityLookup, - }, + traits::{BlakeTwo256, IdentityLookup, Keccak256}, }; -use sp_std::cell::RefCell; -use sp_std::prelude::*; +use sp_std::{cell::RefCell, prelude::*}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -92,10 +89,7 @@ pub struct LeafData { impl LeafData { pub fn new(a: u64) -> Self { - Self { - a, - b: Default::default(), - } + Self { a, b: Default::default() } } } diff --git a/frame/merkle-mountain-range/src/tests.rs b/frame/merkle-mountain-range/src/tests.rs index 5640468ac93a5..50512e9286951 100644 --- a/frame/merkle-mountain-range/src/tests.rs +++ b/frame/merkle-mountain-range/src/tests.rs @@ -15,18 +15,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::*; -use crate::mock::*; +use crate::{mock::*, *}; use frame_support::traits::OnInitialize; +use pallet_mmr_primitives::{Compact, Proof}; use sp_core::{ + offchain::{testing::TestOffchainExt, OffchainDbExt, OffchainWorkerExt}, H256, - offchain::{ - testing::TestOffchainExt, - OffchainWorkerExt, OffchainDbExt, - }, }; -use pallet_mmr_primitives::{Proof, Compact}; pub(crate) fn new_test_ext() -> sp_io::TestExternalities { frame_system::GenesisConfig::default().build_storage::().unwrap().into() @@ -58,13 +54,12 @@ pub(crate) fn hex(s: &str) -> H256 { type BlockNumber = ::BlockNumber; -fn decode_node(v: Vec) -> mmr::Node< - ::Hashing, - ((BlockNumber, H256), LeafData), -> { +fn decode_node( + v: Vec, +) -> mmr::Node<::Hashing, ((BlockNumber, H256), LeafData)> { use crate::primitives::DataOrHash; - type A = DataOrHash::<::Hashing, (BlockNumber, H256)>; - type B = DataOrHash::<::Hashing, LeafData>; + type A = DataOrHash<::Hashing, (BlockNumber, H256)>; + type B = DataOrHash<::Hashing, LeafData>; type Node = mmr::Node<::Hashing, (A, B)>; let tuple: Node = codec::Decode::decode(&mut &v[..]).unwrap(); @@ -89,7 +84,9 @@ fn should_start_empty() { // given assert_eq!( crate::RootHash::::get(), - "0000000000000000000000000000000000000000000000000000000000000000".parse().unwrap() + "0000000000000000000000000000000000000000000000000000000000000000" + .parse() + .unwrap() ); assert_eq!(crate::NumberOfLeaves::::get(), 0); assert_eq!(crate::Nodes::::get(0), None); @@ -99,8 +96,10 @@ fn should_start_empty() { // then assert_eq!(crate::NumberOfLeaves::::get(), 1); - assert_eq!(crate::Nodes::::get(0), - Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0"))); + assert_eq!( + crate::Nodes::::get(0), + Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0")) + ); assert_eq!( crate::RootHash::::get(), hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0") @@ -120,35 +119,41 @@ fn should_append_to_mmr_when_on_initialize_is_called() { // then assert_eq!(crate::NumberOfLeaves::::get(), 2); - assert_eq!(( - crate::Nodes::::get(0), - crate::Nodes::::get(1), - crate::Nodes::::get(2), - crate::Nodes::::get(3), - crate::RootHash::::get(), - ), ( - Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0")), - Some(hex("ad4cbc033833612ccd4626d5f023b9dfc50a35e838514dd1f3c86f8506728705")), - Some(hex("672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854")), - None, - hex("672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854"), - )); + assert_eq!( + ( + crate::Nodes::::get(0), + crate::Nodes::::get(1), + crate::Nodes::::get(2), + crate::Nodes::::get(3), + crate::RootHash::::get(), + ), + ( + Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0")), + Some(hex("ad4cbc033833612ccd4626d5f023b9dfc50a35e838514dd1f3c86f8506728705")), + Some(hex("672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854")), + None, + hex("672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854"), + ) + ); }); // make sure the leaves end up in the offchain DB ext.persist_offchain_overlay(); let offchain_db = ext.offchain_db(); - assert_eq!(offchain_db.get(&MMR::offchain_key(0)).map(decode_node), Some(mmr::Node::Data(( - (0, H256::repeat_byte(1)), - LeafData::new(1), - )))); - assert_eq!(offchain_db.get(&MMR::offchain_key(1)).map(decode_node), Some(mmr::Node::Data(( - (1, H256::repeat_byte(2)), - LeafData::new(2), - )))); - assert_eq!(offchain_db.get(&MMR::offchain_key(2)).map(decode_node), Some(mmr::Node::Hash( - hex("672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854") - ))); + assert_eq!( + offchain_db.get(&MMR::offchain_key(0)).map(decode_node), + Some(mmr::Node::Data(((0, H256::repeat_byte(1)), LeafData::new(1),))) + ); + assert_eq!( + offchain_db.get(&MMR::offchain_key(1)).map(decode_node), + Some(mmr::Node::Data(((1, H256::repeat_byte(2)), LeafData::new(2),))) + ); + assert_eq!( + offchain_db.get(&MMR::offchain_key(2)).map(decode_node), + Some(mmr::Node::Hash(hex( + "672c04a9cd05a644789d769daa552d35d8de7c33129f8a7cbf49e595234c4854" + ))) + ); assert_eq!(offchain_db.get(&MMR::offchain_key(3)), None); } @@ -161,15 +166,18 @@ fn should_construct_larger_mmr_correctly() { // then assert_eq!(crate::NumberOfLeaves::::get(), 7); - assert_eq!(( - crate::Nodes::::get(0), - crate::Nodes::::get(10), - crate::RootHash::::get(), - ), ( - Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0")), - Some(hex("611c2174c6164952a66d985cfe1ec1a623794393e3acff96b136d198f37a648c")), - hex("e45e25259f7930626431347fa4dd9aae7ac83b4966126d425ca70ab343709d2c"), - )); + assert_eq!( + ( + crate::Nodes::::get(0), + crate::Nodes::::get(10), + crate::RootHash::::get(), + ), + ( + Some(hex("4320435e8c3318562dba60116bdbcc0b82ffcecb9bb39aae3300cfda3ad0b8b0")), + Some(hex("611c2174c6164952a66d985cfe1ec1a623794393e3acff96b136d198f37a648c")), + hex("e45e25259f7930626431347fa4dd9aae7ac83b4966126d425ca70ab343709d2c"), + ) + ); }); } @@ -192,41 +200,50 @@ fn should_generate_proofs_correctly() { .collect::>(); // then - assert_eq!(proofs[0], (Compact::new(( - (0, H256::repeat_byte(1)).into(), - LeafData::new(1).into(), - )), Proof { - leaf_index: 0, - leaf_count: 7, - items: vec![ - hex("ad4cbc033833612ccd4626d5f023b9dfc50a35e838514dd1f3c86f8506728705"), - hex("cb24f4614ad5b2a5430344c99545b421d9af83c46fd632d70a332200884b4d46"), - hex("dca421199bdcc55bb773c6b6967e8d16675de69062b52285ca63685241fdf626"), - ], - })); - assert_eq!(proofs[4], (Compact::new(( - (4, H256::repeat_byte(5)).into(), - LeafData::new(5).into(), - )), Proof { - leaf_index: 4, - leaf_count: 7, - items: vec![ - hex("ae88a0825da50e953e7a359c55fe13c8015e48d03d301b8bdfc9193874da9252"), - hex("8ed25570209d8f753d02df07c1884ddb36a3d9d4770e4608b188322151c657fe"), - hex("611c2174c6164952a66d985cfe1ec1a623794393e3acff96b136d198f37a648c"), - ], - })); - assert_eq!(proofs[6], (Compact::new(( - (6, H256::repeat_byte(7)).into(), - LeafData::new(7).into(), - )), Proof { - leaf_index: 6, - leaf_count: 7, - items: vec![ - hex("ae88a0825da50e953e7a359c55fe13c8015e48d03d301b8bdfc9193874da9252"), - hex("7e4316ae2ebf7c3b6821cb3a46ca8b7a4f9351a9b40fcf014bb0a4fd8e8f29da"), - ], - })); + assert_eq!( + proofs[0], + ( + Compact::new(((0, H256::repeat_byte(1)).into(), LeafData::new(1).into(),)), + Proof { + leaf_index: 0, + leaf_count: 7, + items: vec![ + hex("ad4cbc033833612ccd4626d5f023b9dfc50a35e838514dd1f3c86f8506728705"), + hex("cb24f4614ad5b2a5430344c99545b421d9af83c46fd632d70a332200884b4d46"), + hex("dca421199bdcc55bb773c6b6967e8d16675de69062b52285ca63685241fdf626"), + ], + } + ) + ); + assert_eq!( + proofs[4], + ( + Compact::new(((4, H256::repeat_byte(5)).into(), LeafData::new(5).into(),)), + Proof { + leaf_index: 4, + leaf_count: 7, + items: vec![ + hex("ae88a0825da50e953e7a359c55fe13c8015e48d03d301b8bdfc9193874da9252"), + hex("8ed25570209d8f753d02df07c1884ddb36a3d9d4770e4608b188322151c657fe"), + hex("611c2174c6164952a66d985cfe1ec1a623794393e3acff96b136d198f37a648c"), + ], + } + ) + ); + assert_eq!( + proofs[6], + ( + Compact::new(((6, H256::repeat_byte(7)).into(), LeafData::new(7).into(),)), + Proof { + leaf_index: 6, + leaf_count: 7, + items: vec![ + hex("ae88a0825da50e953e7a359c55fe13c8015e48d03d301b8bdfc9193874da9252"), + hex("7e4316ae2ebf7c3b6821cb3a46ca8b7a4f9351a9b40fcf014bb0a4fd8e8f29da"), + ], + } + ) + ); }); } @@ -280,7 +297,10 @@ fn verification_should_be_stateless() { // Verify proof without relying on any on-chain data. let leaf = crate::primitives::DataOrHash::Data(leaf); - assert_eq!(crate::verify_leaf_proof::<::Hashing, _>(root, leaf, proof5), Ok(())); + assert_eq!( + crate::verify_leaf_proof::<::Hashing, _>(root, leaf, proof5), + Ok(()) + ); } #[test] diff --git a/frame/multisig/src/benchmarking.rs b/frame/multisig/src/benchmarking.rs index 63a178313addd..393e15292e6bc 100644 --- a/frame/multisig/src/benchmarking.rs +++ b/frame/multisig/src/benchmarking.rs @@ -20,20 +20,18 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; +use core::convert::TryInto; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; -use core::convert::TryInto; use crate::Pallet as Multisig; const SEED: u32 = 0; -fn setup_multi(s: u32, z: u32) - -> Result<(Vec, Vec), &'static str> -{ +fn setup_multi(s: u32, z: u32) -> Result<(Vec, Vec), &'static str> { let mut signatories: Vec = Vec::new(); - for i in 0 .. s { + for i in 0..s { let signatory = account("signatory", i, SEED); // Give them some balance for a possible deposit let balance = BalanceOf::::max_value(); @@ -298,8 +296,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Multisig, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Multisig, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/multisig/src/lib.rs b/frame/multisig/src/lib.rs index 73fe2729eab18..73891c41b2074 100644 --- a/frame/multisig/src/lib.rs +++ b/frame/multisig/src/lib.rs @@ -46,26 +46,34 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -mod tests; mod benchmarking; +mod tests; pub mod weights; -use sp_std::prelude::*; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; +use frame_support::{ + dispatch::{ + DispatchErrorWithPostInfo, DispatchResult, DispatchResultWithPostInfo, PostDispatchInfo, + }, + ensure, + traits::{Currency, Get, ReservableCurrency}, + weights::{GetDispatchInfo, Weight}, + RuntimeDebug, +}; +use frame_system::{self as system, RawOrigin}; use scale_info::TypeInfo; use sp_io::hashing::blake2_256; -use frame_support::{ensure, RuntimeDebug}; -use frame_support::{traits::{Get, ReservableCurrency, Currency}, - weights::{Weight, GetDispatchInfo}, - dispatch::{DispatchResultWithPostInfo, DispatchResult, DispatchErrorWithPostInfo, PostDispatchInfo}, +use sp_runtime::{ + traits::{Dispatchable, Zero}, + DispatchError, }; -use frame_system::{self as system, RawOrigin}; -use sp_runtime::{DispatchError, traits::{Dispatchable, Zero}}; +use sp_std::prelude::*; pub use weights::WeightInfo; pub use pallet::*; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; /// Just a bunch of bytes, but they should decode to a valid `Call`. pub type OpaqueCall = Vec; @@ -101,10 +109,10 @@ enum CallOrHash { } #[frame_support::pallet] -pub mod pallet{ +pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -112,8 +120,10 @@ pub mod pallet{ type Event: From> + IsType<::Event>; /// The overarching call type. - type Call: Parameter + Dispatchable - + GetDispatchInfo + From>; + type Call: Parameter + + Dispatchable + + GetDispatchInfo + + From>; /// The currency mechanism. type Currency: ReservableCurrency; @@ -157,12 +167,8 @@ pub mod pallet{ >; #[pallet::storage] - pub type Calls = StorageMap< - _, - Identity, - [u8; 32], - (OpaqueCall, T::AccountId, BalanceOf), - >; + pub type Calls = + StorageMap<_, Identity, [u8; 32], (OpaqueCall, T::AccountId, BalanceOf)>; #[pallet::error] pub enum Error { @@ -205,9 +211,15 @@ pub mod pallet{ /// \[approving, timepoint, multisig, call_hash\] MultisigApproval(T::AccountId, Timepoint, T::AccountId, CallHash), /// A multisig operation has been executed. \[approving, timepoint, multisig, call_hash\] - MultisigExecuted(T::AccountId, Timepoint, T::AccountId, CallHash, DispatchResult), + MultisigExecuted( + T::AccountId, + Timepoint, + T::AccountId, + CallHash, + DispatchResult, + ), /// A multisig operation has been cancelled. \[cancelling, timepoint, multisig, call_hash\] - MultisigCancelled(T::AccountId, Timepoint, T::AccountId, CallHash) + MultisigCancelled(T::AccountId, Timepoint, T::AccountId, CallHash), } #[pallet::hooks] @@ -258,21 +270,26 @@ pub mod pallet{ let call_len = call.using_encoded(|c| c.len()); let result = call.dispatch(RawOrigin::Signed(id).into()); - result.map(|post_dispatch_info| post_dispatch_info.actual_weight - .map(|actual_weight| - T::WeightInfo::as_multi_threshold_1(call_len as u32) - .saturating_add(actual_weight) - ).into() - ).map_err(|err| match err.post_info.actual_weight { - Some(actual_weight) => { - let weight_used = T::WeightInfo::as_multi_threshold_1(call_len as u32) - .saturating_add(actual_weight); - let post_info = Some(weight_used).into(); - let error = err.error.into(); - DispatchErrorWithPostInfo { post_info, error } - }, - None => err, - }) + result + .map(|post_dispatch_info| { + post_dispatch_info + .actual_weight + .map(|actual_weight| { + T::WeightInfo::as_multi_threshold_1(call_len as u32) + .saturating_add(actual_weight) + }) + .into() + }) + .map_err(|err| match err.post_info.actual_weight { + Some(actual_weight) => { + let weight_used = T::WeightInfo::as_multi_threshold_1(call_len as u32) + .saturating_add(actual_weight); + let post_info = Some(weight_used).into(); + let error = err.error.into(); + DispatchErrorWithPostInfo { post_info, error } + }, + None => err, + }) } /// Register approval for a dispatch to be made from a deterministic composite account if @@ -341,7 +358,14 @@ pub mod pallet{ max_weight: Weight, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; - Self::operate(who, threshold, other_signatories, maybe_timepoint, CallOrHash::Call(call, store_call), max_weight) + Self::operate( + who, + threshold, + other_signatories, + maybe_timepoint, + CallOrHash::Call(call, store_call), + max_weight, + ) } /// Register approval for a dispatch to be made from a deterministic composite account if @@ -397,7 +421,14 @@ pub mod pallet{ max_weight: Weight, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; - Self::operate(who, threshold, other_signatories, maybe_timepoint, CallOrHash::Hash(call_hash), max_weight) + Self::operate( + who, + threshold, + other_signatories, + maybe_timepoint, + CallOrHash::Hash(call_hash), + max_weight, + ) } /// Cancel a pre-existing, on-going multisig transaction. Any deposit reserved previously @@ -443,8 +474,7 @@ pub mod pallet{ let id = Self::multi_account_id(&signatories, threshold); - let m = >::get(&id, call_hash) - .ok_or(Error::::NotFound)?; + let m = >::get(&id, call_hash).ok_or(Error::::NotFound)?; ensure!(m.when == timepoint, Error::::WrongTimepoint); ensure!(m.depositor == who, Error::::NotOwner); @@ -492,7 +522,7 @@ impl Pallet { let call_hash = blake2_256(&call); let call_len = call.len(); (call_hash, call_len, Some(call), should_store) - } + }, CallOrHash::Hash(h) => (h, 0, None, false), }; @@ -507,12 +537,16 @@ impl Pallet { // We only bother with the approval if we're below threshold. let maybe_pos = m.approvals.binary_search(&who).err().filter(|_| approvals < threshold); // Bump approvals if not yet voted and the vote is needed. - if maybe_pos.is_some() { approvals += 1; } + if maybe_pos.is_some() { + approvals += 1; + } // We only bother fetching/decoding call if we know that we're ready to execute. let maybe_approved_call = if approvals >= threshold { Self::get_call(&call_hash, maybe_call.as_ref().map(|c| c.as_ref())) - } else { None }; + } else { + None + }; if let Some((call, call_len)) = maybe_approved_call { // verify weight @@ -526,21 +560,33 @@ impl Pallet { let result = call.dispatch(RawOrigin::Signed(id.clone()).into()); Self::deposit_event(Event::MultisigExecuted( - who, timepoint, id, call_hash, result.map(|_| ()).map_err(|e| e.error) + who, + timepoint, + id, + call_hash, + result.map(|_| ()).map_err(|e| e.error), )); - Ok(get_result_weight(result).map(|actual_weight| - T::WeightInfo::as_multi_complete( - other_signatories_len as u32, - call_len as u32 - ).saturating_add(actual_weight) - ).into()) + Ok(get_result_weight(result) + .map(|actual_weight| { + T::WeightInfo::as_multi_complete( + other_signatories_len as u32, + call_len as u32, + ) + .saturating_add(actual_weight) + }) + .into()) } else { // We cannot dispatch the call now; either it isn't available, or it is, but we // don't have threshold approvals even with our signature. // Store the call if desired. let stored = if let Some(data) = maybe_call.filter(|_| store) { - Self::store_call_and_reserve(who.clone(), &call_hash, data, BalanceOf::::zero())?; + Self::store_call_and_reserve( + who.clone(), + &call_hash, + data, + BalanceOf::::zero(), + )?; true } else { false @@ -563,10 +609,7 @@ impl Pallet { call_len as u32, ) } else { - T::WeightInfo::as_multi_approve( - other_signatories_len as u32, - call_len as u32, - ) + T::WeightInfo::as_multi_approve(other_signatories_len as u32, call_len as u32) }; // Call is not made, so the actual weight does not include call Ok(Some(final_weight).into()) @@ -587,24 +630,22 @@ impl Pallet { false }; - >::insert(&id, call_hash, Multisig { - when: Self::timepoint(), - deposit, - depositor: who.clone(), - approvals: vec![who.clone()], - }); + >::insert( + &id, + call_hash, + Multisig { + when: Self::timepoint(), + deposit, + depositor: who.clone(), + approvals: vec![who.clone()], + }, + ); Self::deposit_event(Event::NewMultisig(who, id, call_hash)); let final_weight = if stored { - T::WeightInfo::as_multi_create_store( - other_signatories_len as u32, - call_len as u32, - ) + T::WeightInfo::as_multi_create_store(other_signatories_len as u32, call_len as u32) } else { - T::WeightInfo::as_multi_create( - other_signatories_len as u32, - call_len as u32, - ) + T::WeightInfo::as_multi_create(other_signatories_len as u32, call_len as u32) }; // Call is not made, so the actual weight does not include call Ok(Some(final_weight).into()) @@ -623,22 +664,27 @@ impl Pallet { other_deposit: BalanceOf, ) -> DispatchResult { ensure!(!Calls::::contains_key(hash), Error::::AlreadyStored); - let deposit = other_deposit + T::DepositBase::get() - + T::DepositFactor::get() * BalanceOf::::from(((data.len() + 31) / 32) as u32); + let deposit = other_deposit + + T::DepositBase::get() + + T::DepositFactor::get() * BalanceOf::::from(((data.len() + 31) / 32) as u32); T::Currency::reserve(&who, deposit)?; Calls::::insert(&hash, (data, who, deposit)); Ok(()) } /// Attempt to decode and return the call, provided by the user or from storage. - fn get_call(hash: &[u8; 32], maybe_known: Option<&[u8]>) -> Option<(::Call, usize)> { - maybe_known.map_or_else(|| { - Calls::::get(hash).and_then(|(data, ..)| { - Decode::decode(&mut &data[..]).ok().map(|d| (d, data.len())) - }) - }, |data| { - Decode::decode(&mut &data[..]).ok().map(|d| (d, data.len())) - }) + fn get_call( + hash: &[u8; 32], + maybe_known: Option<&[u8]>, + ) -> Option<(::Call, usize)> { + maybe_known.map_or_else( + || { + Calls::::get(hash).and_then(|(data, ..)| { + Decode::decode(&mut &data[..]).ok().map(|d| (d, data.len())) + }) + }, + |data| Decode::decode(&mut &data[..]).ok().map(|d| (d, data.len())), + ) } /// Attempt to remove a call from storage, returning any deposit on it to the owner. @@ -657,9 +703,10 @@ impl Pallet { } /// Check that signatories is sorted and doesn't contain sender, then insert sender. - fn ensure_sorted_and_insert(other_signatories: Vec, who: T::AccountId) - -> Result, DispatchError> - { + fn ensure_sorted_and_insert( + other_signatories: Vec, + who: T::AccountId, + ) -> Result, DispatchError> { let mut signatories = other_signatories; let mut maybe_last = None; let mut index = 0; diff --git a/frame/multisig/src/tests.rs b/frame/multisig/src/tests.rs index 69f7cb17b0f5a..6dba6f7d4ab5a 100644 --- a/frame/multisig/src/tests.rs +++ b/frame/multisig/src/tests.rs @@ -21,12 +21,13 @@ use super::*; -use frame_support::{ - assert_ok, assert_noop, parameter_types, traits::Filter, -}; -use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; use crate as pallet_multisig; +use frame_support::{assert_noop, assert_ok, parameter_types, traits::Filter}; +use sp_core::H256; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -113,14 +114,15 @@ impl Config for Test { type WeightInfo = (); } -use pallet_balances::Call as BalancesCall; -use pallet_balances::Error as BalancesError; +use pallet_balances::{Call as BalancesCall, Error as BalancesError}; pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 10), (3, 10), (4, 10), (5, 2)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext @@ -141,11 +143,27 @@ fn multisig_deposit_is_taken_and_returned() { let call = Call::Balances(BalancesCall::transfer(6, 15)); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data.clone(), false, 0)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data.clone(), + false, + 0 + )); assert_eq!(Balances::free_balance(1), 2); assert_eq!(Balances::reserved_balance(1), 3); - assert_ok!(Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), data, false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + data, + false, + call_weight + )); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::reserved_balance(1), 0); }); @@ -167,7 +185,14 @@ fn multisig_deposit_is_taken_and_returned_with_call_storage() { assert_eq!(Balances::free_balance(1), 0); assert_eq!(Balances::reserved_balance(1), 5); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), hash, call_weight)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + hash, + call_weight + )); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::reserved_balance(1), 0); }); @@ -186,17 +211,39 @@ fn multisig_deposit_is_taken_and_returned_with_alt_call_storage() { let data = call.encode(); let hash = blake2_256(&data); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); assert_eq!(Balances::free_balance(1), 1); assert_eq!(Balances::reserved_balance(1), 4); - assert_ok!(Multisig::as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), data, true, 0)); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + data, + true, + 0 + )); assert_eq!(Balances::free_balance(2), 3); assert_eq!(Balances::reserved_balance(2), 2); assert_eq!(Balances::free_balance(1), 1); assert_eq!(Balances::reserved_balance(1), 4); - assert_ok!(Multisig::approve_as_multi(Origin::signed(3), 3, vec![1, 2], Some(now()), hash, call_weight)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(3), + 3, + vec![1, 2], + Some(now()), + hash, + call_weight + )); assert_eq!(Balances::free_balance(1), 5); assert_eq!(Balances::reserved_balance(1), 0); assert_eq!(Balances::free_balance(2), 5); @@ -209,13 +256,31 @@ fn cancel_multisig_returns_deposit() { new_test_ext().execute_with(|| { let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); let hash = blake2_256(&call); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); assert_eq!(Balances::free_balance(1), 6); assert_eq!(Balances::reserved_balance(1), 4); - assert_ok!( - Multisig::cancel_as_multi(Origin::signed(1), 3, vec![2, 3], now(), hash.clone()), - ); + assert_ok!(Multisig::cancel_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + now(), + hash.clone() + ),); assert_eq!(Balances::free_balance(1), 10); assert_eq!(Balances::reserved_balance(1), 0); }); @@ -233,7 +298,14 @@ fn timepoint_checking_works() { let hash = blake2_256(&call); assert_noop!( - Multisig::approve_as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), hash.clone(), 0), + Multisig::approve_as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + ), Error::::UnexpectedTimepoint, ); @@ -243,9 +315,17 @@ fn timepoint_checking_works() { Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], None, call.clone(), false, 0), Error::::NoTimepoint, ); - let later = Timepoint { index: 1, .. now() }; + let later = Timepoint { index: 1, ..now() }; assert_noop!( - Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], Some(later), call.clone(), false, 0), + Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(later), + call.clone(), + false, + 0 + ), Error::::WrongTimepoint, ); }); @@ -266,7 +346,14 @@ fn multisig_2_of_3_works_with_call_storing() { assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data, true, 0)); assert_eq!(Balances::free_balance(6), 0); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), hash, call_weight)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + hash, + call_weight + )); assert_eq!(Balances::free_balance(6), 15); }); } @@ -286,7 +373,15 @@ fn multisig_2_of_3_works() { assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 2, vec![2, 3], None, hash, 0)); assert_eq!(Balances::free_balance(6), 0); - assert_ok!(Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), data, false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + data, + false, + call_weight + )); assert_eq!(Balances::free_balance(6), 15); }); } @@ -303,11 +398,33 @@ fn multisig_3_of_3_works() { let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); assert_eq!(Balances::free_balance(6), 0); - assert_ok!(Multisig::as_multi(Origin::signed(3), 3, vec![1, 2], Some(now()), data, false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(3), + 3, + vec![1, 2], + Some(now()), + data, + false, + call_weight + )); assert_eq!(Balances::free_balance(6), 15); }); } @@ -317,15 +434,33 @@ fn cancel_multisig_works() { new_test_ext().execute_with(|| { let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); let hash = blake2_256(&call); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); assert_noop!( Multisig::cancel_as_multi(Origin::signed(2), 3, vec![1, 3], now(), hash.clone()), Error::::NotOwner, ); - assert_ok!( - Multisig::cancel_as_multi(Origin::signed(1), 3, vec![2, 3], now(), hash.clone()), - ); + assert_ok!(Multisig::cancel_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + now(), + hash.clone() + ),); }); } @@ -336,14 +471,25 @@ fn cancel_multisig_with_call_storage_works() { let hash = blake2_256(&call); assert_ok!(Multisig::as_multi(Origin::signed(1), 3, vec![2, 3], None, call, true, 0)); assert_eq!(Balances::free_balance(1), 4); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); assert_noop!( Multisig::cancel_as_multi(Origin::signed(2), 3, vec![1, 3], now(), hash.clone()), Error::::NotOwner, ); - assert_ok!( - Multisig::cancel_as_multi(Origin::signed(1), 3, vec![2, 3], now(), hash.clone()), - ); + assert_ok!(Multisig::cancel_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + now(), + hash.clone() + ),); assert_eq!(Balances::free_balance(1), 10); }); } @@ -353,9 +499,24 @@ fn cancel_multisig_with_alt_call_storage_works() { new_test_ext().execute_with(|| { let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); let hash = blake2_256(&call); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); assert_eq!(Balances::free_balance(1), 6); - assert_ok!(Multisig::as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), call, true, 0)); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + call, + true, + 0 + )); assert_eq!(Balances::free_balance(2), 8); assert_ok!(Multisig::cancel_as_multi(Origin::signed(1), 3, vec![2, 3], now(), hash)); assert_eq!(Balances::free_balance(1), 10); @@ -374,10 +535,26 @@ fn multisig_2_of_3_as_multi_works() { let call = Call::Balances(BalancesCall::transfer(6, 15)); let call_weight = call.get_dispatch_info().weight; let data = call.encode(); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data.clone(), false, 0)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data.clone(), + false, + 0 + )); assert_eq!(Balances::free_balance(6), 0); - assert_ok!(Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), data, false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + data, + false, + call_weight + )); assert_eq!(Balances::free_balance(6), 15); }); } @@ -397,10 +574,42 @@ fn multisig_2_of_3_as_multi_with_many_calls_works() { let call2_weight = call2.get_dispatch_info().weight; let data2 = call2.encode(); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data1.clone(), false, 0)); - assert_ok!(Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], None, data2.clone(), false, 0)); - assert_ok!(Multisig::as_multi(Origin::signed(3), 2, vec![1, 2], Some(now()), data1, false, call1_weight)); - assert_ok!(Multisig::as_multi(Origin::signed(3), 2, vec![1, 2], Some(now()), data2, false, call2_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data1.clone(), + false, + 0 + )); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + None, + data2.clone(), + false, + 0 + )); + assert_ok!(Multisig::as_multi( + Origin::signed(3), + 2, + vec![1, 2], + Some(now()), + data1, + false, + call1_weight + )); + assert_ok!(Multisig::as_multi( + Origin::signed(3), + 2, + vec![1, 2], + Some(now()), + data2, + false, + call2_weight + )); assert_eq!(Balances::free_balance(6), 10); assert_eq!(Balances::free_balance(7), 5); @@ -419,15 +628,49 @@ fn multisig_2_of_3_cannot_reissue_same_call() { let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data.clone(), false, 0)); - assert_ok!(Multisig::as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), data.clone(), false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data.clone(), + false, + 0 + )); + assert_ok!(Multisig::as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + data.clone(), + false, + call_weight + )); assert_eq!(Balances::free_balance(multi), 5); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data.clone(), false, 0)); - assert_ok!(Multisig::as_multi(Origin::signed(3), 2, vec![1, 2], Some(now()), data.clone(), false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data.clone(), + false, + 0 + )); + assert_ok!(Multisig::as_multi( + Origin::signed(3), + 2, + vec![1, 2], + Some(now()), + data.clone(), + false, + call_weight + )); let err = DispatchError::from(BalancesError::::InsufficientBalance).stripped(); - System::assert_last_event(pallet_multisig::Event::MultisigExecuted(3, now(), multi, hash, Err(err)).into()); + System::assert_last_event( + pallet_multisig::Event::MultisigExecuted(3, now(), multi, hash, Err(err)).into(), + ); }); } @@ -462,14 +705,42 @@ fn duplicate_approvals_are_ignored() { new_test_ext().execute_with(|| { let call = Call::Balances(BalancesCall::transfer(6, 15)).encode(); let hash = blake2_256(&call); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 2, vec![2, 3], None, hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + hash.clone(), + 0 + )); assert_noop!( - Multisig::approve_as_multi(Origin::signed(1), 2, vec![2, 3], Some(now()), hash.clone(), 0), + Multisig::approve_as_multi( + Origin::signed(1), + 2, + vec![2, 3], + Some(now()), + hash.clone(), + 0 + ), Error::::AlreadyApproved, ); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 2, vec![1, 3], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 2, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); assert_noop!( - Multisig::approve_as_multi(Origin::signed(3), 2, vec![1, 2], Some(now()), hash.clone(), 0), + Multisig::approve_as_multi( + Origin::signed(3), + 2, + vec![1, 2], + Some(now()), + hash.clone(), + 0 + ), Error::::AlreadyApproved, ); }); @@ -521,7 +792,15 @@ fn weight_check_works() { let call = Call::Balances(BalancesCall::transfer(6, 15)); let data = call.encode(); - assert_ok!(Multisig::as_multi(Origin::signed(1), 2, vec![2, 3], None, data.clone(), false, 0)); + assert_ok!(Multisig::as_multi( + Origin::signed(1), + 2, + vec![2, 3], + None, + data.clone(), + false, + 0 + )); assert_eq!(Balances::free_balance(6), 0); assert_noop!( @@ -545,12 +824,41 @@ fn multisig_handles_no_preimage_after_all_approve() { let call_weight = call.get_dispatch_info().weight; let data = call.encode(); let hash = blake2_256(&data); - assert_ok!(Multisig::approve_as_multi(Origin::signed(1), 3, vec![2, 3], None, hash.clone(), 0)); - assert_ok!(Multisig::approve_as_multi(Origin::signed(2), 3, vec![1, 3], Some(now()), hash.clone(), 0)); - assert_ok!(Multisig::approve_as_multi(Origin::signed(3), 3, vec![1, 2], Some(now()), hash.clone(), 0)); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(1), + 3, + vec![2, 3], + None, + hash.clone(), + 0 + )); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(2), + 3, + vec![1, 3], + Some(now()), + hash.clone(), + 0 + )); + assert_ok!(Multisig::approve_as_multi( + Origin::signed(3), + 3, + vec![1, 2], + Some(now()), + hash.clone(), + 0 + )); assert_eq!(Balances::free_balance(6), 0); - assert_ok!(Multisig::as_multi(Origin::signed(3), 3, vec![1, 2], Some(now()), data, false, call_weight)); + assert_ok!(Multisig::as_multi( + Origin::signed(3), + 3, + vec![1, 2], + Some(now()), + data, + false, + call_weight + )); assert_eq!(Balances::free_balance(6), 15); }); } diff --git a/frame/multisig/src/weights.rs b/frame/multisig/src/weights.rs index 50f774030015f..29e823d53ea0c 100644 --- a/frame/multisig/src/weights.rs +++ b/frame/multisig/src/weights.rs @@ -35,34 +35,36 @@ // --output=./frame/multisig/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_multisig. pub trait WeightInfo { - fn as_multi_threshold_1(z: u32, ) -> Weight; - fn as_multi_create(s: u32, z: u32, ) -> Weight; - fn as_multi_create_store(s: u32, z: u32, ) -> Weight; - fn as_multi_approve(s: u32, z: u32, ) -> Weight; - fn as_multi_approve_store(s: u32, z: u32, ) -> Weight; - fn as_multi_complete(s: u32, z: u32, ) -> Weight; - fn approve_as_multi_create(s: u32, ) -> Weight; - fn approve_as_multi_approve(s: u32, ) -> Weight; - fn approve_as_multi_complete(s: u32, ) -> Weight; - fn cancel_as_multi(s: u32, ) -> Weight; + fn as_multi_threshold_1(z: u32) -> Weight; + fn as_multi_create(s: u32, z: u32) -> Weight; + fn as_multi_create_store(s: u32, z: u32) -> Weight; + fn as_multi_approve(s: u32, z: u32) -> Weight; + fn as_multi_approve_store(s: u32, z: u32) -> Weight; + fn as_multi_complete(s: u32, z: u32) -> Weight; + fn approve_as_multi_create(s: u32) -> Weight; + fn approve_as_multi_approve(s: u32) -> Weight; + fn approve_as_multi_complete(s: u32) -> Weight; + fn cancel_as_multi(s: u32) -> Weight; } /// Weights for pallet_multisig using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn as_multi_threshold_1(_z: u32, ) -> Weight { + fn as_multi_threshold_1(_z: u32) -> Weight { (14_411_000 as Weight) } - fn as_multi_create(s: u32, z: u32, ) -> Weight { + fn as_multi_create(s: u32, z: u32) -> Weight { (54_200_000 as Weight) // Standard Error: 0 .saturating_add((127_000 as Weight).saturating_mul(s as Weight)) @@ -71,7 +73,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn as_multi_create_store(s: u32, z: u32, ) -> Weight { + fn as_multi_create_store(s: u32, z: u32) -> Weight { (60_502_000 as Weight) // Standard Error: 0 .saturating_add((128_000 as Weight).saturating_mul(s as Weight)) @@ -80,7 +82,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn as_multi_approve(s: u32, z: u32, ) -> Weight { + fn as_multi_approve(s: u32, z: u32) -> Weight { (32_075_000 as Weight) // Standard Error: 0 .saturating_add((132_000 as Weight).saturating_mul(s as Weight)) @@ -89,7 +91,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn as_multi_approve_store(s: u32, z: u32, ) -> Weight { + fn as_multi_approve_store(s: u32, z: u32) -> Weight { (57_742_000 as Weight) // Standard Error: 0 .saturating_add((141_000 as Weight).saturating_mul(s as Weight)) @@ -98,7 +100,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn as_multi_complete(s: u32, z: u32, ) -> Weight { + fn as_multi_complete(s: u32, z: u32) -> Weight { (73_503_000 as Weight) // Standard Error: 0 .saturating_add((246_000 as Weight).saturating_mul(s as Weight)) @@ -107,28 +109,28 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn approve_as_multi_create(s: u32, ) -> Weight { + fn approve_as_multi_create(s: u32) -> Weight { (53_659_000 as Weight) // Standard Error: 0 .saturating_add((133_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn approve_as_multi_approve(s: u32, ) -> Weight { + fn approve_as_multi_approve(s: u32) -> Weight { (31_353_000 as Weight) // Standard Error: 0 .saturating_add((136_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn approve_as_multi_complete(s: u32, ) -> Weight { + fn approve_as_multi_complete(s: u32) -> Weight { (125_011_000 as Weight) // Standard Error: 0 .saturating_add((247_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn cancel_as_multi(s: u32, ) -> Weight { + fn cancel_as_multi(s: u32) -> Weight { (92_318_000 as Weight) // Standard Error: 0 .saturating_add((128_000 as Weight).saturating_mul(s as Weight)) @@ -139,10 +141,10 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn as_multi_threshold_1(_z: u32, ) -> Weight { + fn as_multi_threshold_1(_z: u32) -> Weight { (14_411_000 as Weight) } - fn as_multi_create(s: u32, z: u32, ) -> Weight { + fn as_multi_create(s: u32, z: u32) -> Weight { (54_200_000 as Weight) // Standard Error: 0 .saturating_add((127_000 as Weight).saturating_mul(s as Weight)) @@ -151,7 +153,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn as_multi_create_store(s: u32, z: u32, ) -> Weight { + fn as_multi_create_store(s: u32, z: u32) -> Weight { (60_502_000 as Weight) // Standard Error: 0 .saturating_add((128_000 as Weight).saturating_mul(s as Weight)) @@ -160,7 +162,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn as_multi_approve(s: u32, z: u32, ) -> Weight { + fn as_multi_approve(s: u32, z: u32) -> Weight { (32_075_000 as Weight) // Standard Error: 0 .saturating_add((132_000 as Weight).saturating_mul(s as Weight)) @@ -169,7 +171,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn as_multi_approve_store(s: u32, z: u32, ) -> Weight { + fn as_multi_approve_store(s: u32, z: u32) -> Weight { (57_742_000 as Weight) // Standard Error: 0 .saturating_add((141_000 as Weight).saturating_mul(s as Weight)) @@ -178,7 +180,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn as_multi_complete(s: u32, z: u32, ) -> Weight { + fn as_multi_complete(s: u32, z: u32) -> Weight { (73_503_000 as Weight) // Standard Error: 0 .saturating_add((246_000 as Weight).saturating_mul(s as Weight)) @@ -187,28 +189,28 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn approve_as_multi_create(s: u32, ) -> Weight { + fn approve_as_multi_create(s: u32) -> Weight { (53_659_000 as Weight) // Standard Error: 0 .saturating_add((133_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn approve_as_multi_approve(s: u32, ) -> Weight { + fn approve_as_multi_approve(s: u32) -> Weight { (31_353_000 as Weight) // Standard Error: 0 .saturating_add((136_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn approve_as_multi_complete(s: u32, ) -> Weight { + fn approve_as_multi_complete(s: u32) -> Weight { (125_011_000 as Weight) // Standard Error: 0 .saturating_add((247_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn cancel_as_multi(s: u32, ) -> Weight { + fn cancel_as_multi(s: u32) -> Weight { (92_318_000 as Weight) // Standard Error: 0 .saturating_add((128_000 as Weight).saturating_mul(s as Weight)) diff --git a/frame/nicks/src/lib.rs b/frame/nicks/src/lib.rs index 8af87dad8c74e..42c355c61f7da 100644 --- a/frame/nicks/src/lib.rs +++ b/frame/nicks/src/lib.rs @@ -41,21 +41,26 @@ #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; -use sp_runtime::{ - traits::{StaticLookup, Zero} -}; -use frame_support::traits::{Currency, ReservableCurrency, OnUnbalanced}; +use frame_support::traits::{Currency, OnUnbalanced, ReservableCurrency}; pub use pallet::*; +use sp_runtime::traits::{StaticLookup, Zero}; +use sp_std::prelude::*; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; #[frame_support::pallet] pub mod pallet { - use frame_system::{ensure_signed, pallet_prelude::*}; - use frame_support::{ensure, pallet_prelude::*, traits::{EnsureOrigin, Get}}; use super::*; + use frame_support::{ + ensure, + pallet_prelude::*, + traits::{EnsureOrigin, Get}, + }; + use frame_system::{ensure_signed, pallet_prelude::*}; #[pallet::config] pub trait Config: frame_system::Config { @@ -112,7 +117,8 @@ pub mod pallet { /// The lookup table for names. #[pallet::storage] - pub(super) type NameOf = StorageMap<_, Twox64Concat, T::AccountId, (Vec, BalanceOf)>; + pub(super) type NameOf = + StorageMap<_, Twox64Concat, T::AccountId, (Vec, BalanceOf)>; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -196,7 +202,7 @@ pub mod pallet { #[pallet::weight(70_000_000)] pub fn kill_name( origin: OriginFor, - target: ::Source + target: ::Source, ) -> DispatchResult { T::ForceOrigin::ensure_origin(origin)?; @@ -227,7 +233,7 @@ pub mod pallet { pub fn force_name( origin: OriginFor, target: ::Source, - name: Vec + name: Vec, ) -> DispatchResult { T::ForceOrigin::ensure_origin(origin)?; @@ -246,11 +252,12 @@ mod tests { use super::*; use crate as pallet_nicks; - use frame_support::{assert_ok, assert_noop, parameter_types, ord_parameter_types}; - use sp_core::H256; + use frame_support::{assert_noop, assert_ok, ord_parameter_types, parameter_types}; use frame_system::EnsureSignedBy; + use sp_core::H256; use sp_runtime::{ - testing::Header, traits::{BlakeTwo256, IdentityLookup, BadOrigin}, + testing::Header, + traits::{BadOrigin, BlakeTwo256, IdentityLookup}, }; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -332,12 +339,9 @@ mod tests { fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - pallet_balances::GenesisConfig:: { - balances: vec![ - (1, 10), - (2, 10), - ], - }.assimilate_storage(&mut t).unwrap(); + pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 10)] } + .assimilate_storage(&mut t) + .unwrap(); t.into() } @@ -397,7 +401,10 @@ mod tests { pallet_balances::Error::::InsufficientBalance ); - assert_noop!(Nicks::set_name(Origin::signed(1), b"Ga".to_vec()), Error::::TooShort); + assert_noop!( + Nicks::set_name(Origin::signed(1), b"Ga".to_vec()), + Error::::TooShort + ); assert_noop!( Nicks::set_name(Origin::signed(1), b"Gavin James Wood, Esquire".to_vec()), Error::::TooLong diff --git a/frame/node-authorization/src/lib.rs b/frame/node-authorization/src/lib.rs index be2b5d95551ac..016f12d2eb838 100644 --- a/frame/node-authorization/src/lib.rs +++ b/frame/node-authorization/src/lib.rs @@ -44,22 +44,15 @@ mod tests; pub mod weights; -use sp_core::OpaquePeerId as PeerId; -use sp_std::{ - collections::btree_set::BTreeSet, - iter::FromIterator, - prelude::*, -}; pub use pallet::*; +use sp_core::OpaquePeerId as PeerId; +use sp_std::{collections::btree_set::BTreeSet, iter::FromIterator, prelude::*}; pub use weights::WeightInfo; #[frame_support::pallet] pub mod pallet { use super::*; - use frame_support::{ - dispatch::DispatchResult, - pallet_prelude::*, - }; + use frame_support::{dispatch::DispatchResult, pallet_prelude::*}; use frame_system::pallet_prelude::*; #[pallet::pallet] @@ -104,23 +97,13 @@ pub mod pallet { /// A map that maintains the ownership of each node. #[pallet::storage] #[pallet::getter(fn owners)] - pub type Owners = StorageMap< - _, - Blake2_128Concat, - PeerId, - T::AccountId, - >; + pub type Owners = StorageMap<_, Blake2_128Concat, PeerId, T::AccountId>; /// The additional adapative connections of each node. #[pallet::storage] #[pallet::getter(fn additional_connection)] - pub type AdditionalConnections = StorageMap< - _, - Blake2_128Concat, - PeerId, - BTreeSet, - ValueQuery, - >; + pub type AdditionalConnections = + StorageMap<_, Blake2_128Concat, PeerId, BTreeSet, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig { @@ -207,10 +190,10 @@ pub mod pallet { ), Ok(node) => sp_io::offchain::set_authorized_nodes( Self::get_authorized_nodes(&PeerId(node)), - true - ) + true, + ), } - } + }, } } } @@ -227,7 +210,7 @@ pub mod pallet { pub fn add_well_known_node( origin: OriginFor, node: PeerId, - owner: T::AccountId + owner: T::AccountId, ) -> DispatchResult { T::AddOrigin::ensure_origin(origin)?; ensure!(node.0.len() < T::MaxPeerIdLength::get() as usize, Error::::PeerIdTooLong); @@ -280,16 +263,15 @@ pub mod pallet { pub fn swap_well_known_node( origin: OriginFor, remove: PeerId, - add: PeerId + add: PeerId, ) -> DispatchResult { T::SwapOrigin::ensure_origin(origin)?; - ensure!( - remove.0.len() < T::MaxPeerIdLength::get() as usize, - Error::::PeerIdTooLong - ); + ensure!(remove.0.len() < T::MaxPeerIdLength::get() as usize, Error::::PeerIdTooLong); ensure!(add.0.len() < T::MaxPeerIdLength::get() as usize, Error::::PeerIdTooLong); - if remove == add { return Ok(()) } + if remove == add { + return Ok(()) + } let mut nodes = WellKnownNodes::::get(); ensure!(nodes.contains(&remove), Error::::NotExist); @@ -316,7 +298,7 @@ pub mod pallet { #[pallet::weight((T::WeightInfo::reset_well_known_nodes(), DispatchClass::Operational))] pub fn reset_well_known_nodes( origin: OriginFor, - nodes: Vec<(PeerId, T::AccountId)> + nodes: Vec<(PeerId, T::AccountId)>, ) -> DispatchResult { T::ResetOrigin::ensure_origin(origin)?; ensure!(nodes.len() < T::MaxWellKnownNodes::get() as usize, Error::::TooManyNodes); @@ -336,7 +318,7 @@ pub mod pallet { let sender = ensure_signed(origin)?; ensure!(node.0.len() < T::MaxPeerIdLength::get() as usize, Error::::PeerIdTooLong); - ensure!(!Owners::::contains_key(&node),Error::::AlreadyClaimed); + ensure!(!Owners::::contains_key(&node), Error::::AlreadyClaimed); Owners::::insert(&node, &sender); Self::deposit_event(Event::NodeClaimed(node, sender)); @@ -372,7 +354,7 @@ pub mod pallet { pub fn transfer_node( origin: OriginFor, node: PeerId, - owner: T::AccountId + owner: T::AccountId, ) -> DispatchResult { let sender = ensure_signed(origin)?; @@ -394,7 +376,7 @@ pub mod pallet { pub fn add_connections( origin: OriginFor, node: PeerId, - connections: Vec + connections: Vec, ) -> DispatchResult { let sender = ensure_signed(origin)?; @@ -406,7 +388,7 @@ pub mod pallet { for add_node in connections.iter() { if *add_node == node { - continue; + continue } nodes.insert(add_node.clone()); } @@ -425,7 +407,7 @@ pub mod pallet { pub fn remove_connections( origin: OriginFor, node: PeerId, - connections: Vec + connections: Vec, ) -> DispatchResult { let sender = ensure_signed(origin)?; @@ -449,9 +431,7 @@ pub mod pallet { impl Pallet { fn initialize_nodes(nodes: &Vec<(PeerId, T::AccountId)>) { - let peer_ids = nodes.iter() - .map(|item| item.0.clone()) - .collect::>(); + let peer_ids = nodes.iter().map(|item| item.0.clone()).collect::>(); WellKnownNodes::::put(&peer_ids); for (node, who) in nodes.iter() { diff --git a/frame/node-authorization/src/mock.rs b/frame/node-authorization/src/mock.rs index e952ed900d4be..302378f48ce64 100644 --- a/frame/node-authorization/src/mock.rs +++ b/frame/node-authorization/src/mock.rs @@ -20,13 +20,13 @@ use super::*; use crate as pallet_node_authorization; -use frame_support::{ - parameter_types, ord_parameter_types, - traits::GenesisBuild, -}; +use frame_support::{ord_parameter_types, parameter_types, traits::GenesisBuild}; use frame_system::EnsureSignedBy; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -102,6 +102,8 @@ pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_node_authorization::GenesisConfig:: { nodes: vec![(test_node(10), 10), (test_node(20), 20), (test_node(30), 30)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/node-authorization/src/tests.rs b/frame/node-authorization/src/tests.rs index 15a286fbc2390..530904fa73488 100644 --- a/frame/node-authorization/src/tests.rs +++ b/frame/node-authorization/src/tests.rs @@ -19,7 +19,7 @@ use super::*; use crate::mock::*; -use frame_support::{assert_ok, assert_noop}; +use frame_support::{assert_noop, assert_ok}; use sp_runtime::traits::BadOrigin; #[test] @@ -38,9 +38,7 @@ fn add_well_known_node_works() { Error::::AlreadyJoined ); - assert_ok!( - NodeAuthorization::add_well_known_node(Origin::signed(1), test_node(15), 15) - ); + assert_ok!(NodeAuthorization::add_well_known_node(Origin::signed(1), test_node(15), 15)); assert_eq!( WellKnownNodes::::get(), BTreeSet::from_iter(vec![test_node(10), test_node(15), test_node(20), test_node(30)]) @@ -75,13 +73,11 @@ fn remove_well_known_node_works() { AdditionalConnections::::insert( test_node(20), - BTreeSet::from_iter(vec![test_node(40)]) + BTreeSet::from_iter(vec![test_node(40)]), ); assert!(AdditionalConnections::::contains_key(test_node(20))); - assert_ok!( - NodeAuthorization::remove_well_known_node(Origin::signed(2), test_node(20)) - ); + assert_ok!(NodeAuthorization::remove_well_known_node(Origin::signed(2), test_node(20))); assert_eq!( WellKnownNodes::::get(), BTreeSet::from_iter(vec![test_node(10), test_node(30)]) @@ -95,56 +91,58 @@ fn remove_well_known_node_works() { fn swap_well_known_node_works() { new_test_ext().execute_with(|| { assert_noop!( - NodeAuthorization::swap_well_known_node( - Origin::signed(4), test_node(20), test_node(5) - ), + NodeAuthorization::swap_well_known_node(Origin::signed(4), test_node(20), test_node(5)), BadOrigin ); assert_noop!( NodeAuthorization::swap_well_known_node( - Origin::signed(3), PeerId(vec![1, 2, 3]), test_node(20) + Origin::signed(3), + PeerId(vec![1, 2, 3]), + test_node(20) ), Error::::PeerIdTooLong ); assert_noop!( NodeAuthorization::swap_well_known_node( - Origin::signed(3), test_node(20), PeerId(vec![1, 2, 3]) + Origin::signed(3), + test_node(20), + PeerId(vec![1, 2, 3]) ), Error::::PeerIdTooLong ); - assert_ok!( - NodeAuthorization::swap_well_known_node( - Origin::signed(3), test_node(20), test_node(20) - ) - ); + assert_ok!(NodeAuthorization::swap_well_known_node( + Origin::signed(3), + test_node(20), + test_node(20) + )); assert_eq!( WellKnownNodes::::get(), BTreeSet::from_iter(vec![test_node(10), test_node(20), test_node(30)]) ); assert_noop!( - NodeAuthorization::swap_well_known_node( - Origin::signed(3), test_node(15), test_node(5) - ), + NodeAuthorization::swap_well_known_node(Origin::signed(3), test_node(15), test_node(5)), Error::::NotExist ); assert_noop!( NodeAuthorization::swap_well_known_node( - Origin::signed(3), test_node(20), test_node(30) + Origin::signed(3), + test_node(20), + test_node(30) ), Error::::AlreadyJoined ); AdditionalConnections::::insert( test_node(20), - BTreeSet::from_iter(vec![test_node(15)]) - ); - assert_ok!( - NodeAuthorization::swap_well_known_node( - Origin::signed(3), test_node(20), test_node(5) - ) + BTreeSet::from_iter(vec![test_node(15)]), ); + assert_ok!(NodeAuthorization::swap_well_known_node( + Origin::signed(3), + test_node(20), + test_node(5) + )); assert_eq!( WellKnownNodes::::get(), BTreeSet::from_iter(vec![test_node(5), test_node(10), test_node(30)]) @@ -182,12 +180,10 @@ fn reset_well_known_nodes_works() { Error::::TooManyNodes ); - assert_ok!( - NodeAuthorization::reset_well_known_nodes( - Origin::signed(4), - vec![(test_node(15), 15), (test_node(5), 5), (test_node(20), 20)] - ) - ); + assert_ok!(NodeAuthorization::reset_well_known_nodes( + Origin::signed(4), + vec![(test_node(15), 15), (test_node(5), 5), (test_node(20), 20)] + )); assert_eq!( WellKnownNodes::::get(), BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(20)]) @@ -240,7 +236,7 @@ fn remove_claim_works() { Owners::::insert(test_node(15), 15); AdditionalConnections::::insert( test_node(15), - BTreeSet::from_iter(vec![test_node(20)]) + BTreeSet::from_iter(vec![test_node(20)]), ); assert_ok!(NodeAuthorization::remove_claim(Origin::signed(15), test_node(15))); assert!(!Owners::::contains_key(test_node(15))); @@ -275,31 +271,35 @@ fn add_connections_works() { new_test_ext().execute_with(|| { assert_noop!( NodeAuthorization::add_connections( - Origin::signed(15), PeerId(vec![1, 2, 3]), vec![test_node(5)] + Origin::signed(15), + PeerId(vec![1, 2, 3]), + vec![test_node(5)] ), Error::::PeerIdTooLong ); assert_noop!( NodeAuthorization::add_connections( - Origin::signed(15), test_node(15), vec![test_node(5)] + Origin::signed(15), + test_node(15), + vec![test_node(5)] ), Error::::NotClaimed ); assert_noop!( NodeAuthorization::add_connections( - Origin::signed(15), test_node(20), vec![test_node(5)] + Origin::signed(15), + test_node(20), + vec![test_node(5)] ), Error::::NotOwner ); - assert_ok!( - NodeAuthorization::add_connections( - Origin::signed(20), - test_node(20), - vec![test_node(15), test_node(5), test_node(25), test_node(20)] - ) - ); + assert_ok!(NodeAuthorization::add_connections( + Origin::signed(20), + test_node(20), + vec![test_node(15), test_node(5), test_node(25), test_node(20)] + )); assert_eq!( AdditionalConnections::::get(test_node(20)), BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(25)]) @@ -312,35 +312,39 @@ fn remove_connections_works() { new_test_ext().execute_with(|| { assert_noop!( NodeAuthorization::remove_connections( - Origin::signed(15), PeerId(vec![1, 2, 3]), vec![test_node(5)] + Origin::signed(15), + PeerId(vec![1, 2, 3]), + vec![test_node(5)] ), Error::::PeerIdTooLong ); assert_noop!( NodeAuthorization::remove_connections( - Origin::signed(15), test_node(15), vec![test_node(5)] + Origin::signed(15), + test_node(15), + vec![test_node(5)] ), Error::::NotClaimed ); assert_noop!( NodeAuthorization::remove_connections( - Origin::signed(15), test_node(20), vec![test_node(5)] + Origin::signed(15), + test_node(20), + vec![test_node(5)] ), Error::::NotOwner ); AdditionalConnections::::insert( test_node(20), - BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(25)]) - ); - assert_ok!( - NodeAuthorization::remove_connections( - Origin::signed(20), - test_node(20), - vec![test_node(15), test_node(5)] - ) + BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(25)]), ); + assert_ok!(NodeAuthorization::remove_connections( + Origin::signed(20), + test_node(20), + vec![test_node(15), test_node(5)] + )); assert_eq!( AdditionalConnections::::get(test_node(20)), BTreeSet::from_iter(vec![test_node(25)]) @@ -353,7 +357,7 @@ fn get_authorized_nodes_works() { new_test_ext().execute_with(|| { AdditionalConnections::::insert( test_node(20), - BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(25)]) + BTreeSet::from_iter(vec![test_node(5), test_node(15), test_node(25)]), ); let mut authorized_nodes = Pallet::::get_authorized_nodes(&test_node(20)); diff --git a/frame/node-authorization/src/weights.rs b/frame/node-authorization/src/weights.rs index 3d01e40d67ac3..c64d6f0e772a6 100644 --- a/frame/node-authorization/src/weights.rs +++ b/frame/node-authorization/src/weights.rs @@ -20,7 +20,10 @@ #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; pub trait WeightInfo { @@ -36,13 +39,31 @@ pub trait WeightInfo { } impl WeightInfo for () { - fn add_well_known_node() -> Weight { 50_000_000 } - fn remove_well_known_node() -> Weight { 50_000_000 } - fn swap_well_known_node() -> Weight { 50_000_000 } - fn reset_well_known_nodes() -> Weight { 50_000_000 } - fn claim_node() -> Weight { 50_000_000 } - fn remove_claim() -> Weight { 50_000_000 } - fn transfer_node() -> Weight { 50_000_000 } - fn add_connections() -> Weight { 50_000_000 } - fn remove_connections() -> Weight { 50_000_000 } + fn add_well_known_node() -> Weight { + 50_000_000 + } + fn remove_well_known_node() -> Weight { + 50_000_000 + } + fn swap_well_known_node() -> Weight { + 50_000_000 + } + fn reset_well_known_nodes() -> Weight { + 50_000_000 + } + fn claim_node() -> Weight { + 50_000_000 + } + fn remove_claim() -> Weight { + 50_000_000 + } + fn transfer_node() -> Weight { + 50_000_000 + } + fn add_connections() -> Weight { + 50_000_000 + } + fn remove_connections() -> Weight { + 50_000_000 + } } diff --git a/frame/offences/benchmarking/src/lib.rs b/frame/offences/benchmarking/src/lib.rs index d424cfc751eef..0332272cf2df5 100644 --- a/frame/offences/benchmarking/src/lib.rs +++ b/frame/offences/benchmarking/src/lib.rs @@ -21,29 +21,30 @@ mod mock; -use sp_std::prelude::*; -use sp_std::vec; +use sp_std::{prelude::*, vec}; -use frame_system::{RawOrigin, Pallet as System, Config as SystemConfig}; -use frame_benchmarking::{benchmarks, account, impl_benchmark_test_suite}; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite}; use frame_support::traits::{Currency, ValidatorSet, ValidatorSetWithIdentification}; +use frame_system::{Config as SystemConfig, Pallet as System, RawOrigin}; use sp_runtime::{ + traits::{Convert, Saturating, StaticLookup, UniqueSaturatedInto}, Perbill, - traits::{Convert, StaticLookup, Saturating, UniqueSaturatedInto}, }; -use sp_staking::offence::{ReportOffence, Offence}; +use sp_staking::offence::{Offence, ReportOffence}; -use pallet_balances::Config as BalancesConfig; use pallet_babe::BabeEquivocationOffence; +use pallet_balances::Config as BalancesConfig; use pallet_grandpa::{GrandpaEquivocationOffence, GrandpaTimeSlot}; use pallet_im_online::{Config as ImOnlineConfig, Pallet as ImOnline, UnresponsivenessOffence}; use pallet_offences::{Config as OffencesConfig, Pallet as Offences}; -use pallet_session::historical::{Config as HistoricalConfig, IdentificationTuple}; -use pallet_session::{Config as SessionConfig, SessionManager}; +use pallet_session::{ + historical::{Config as HistoricalConfig, IdentificationTuple}, + Config as SessionConfig, SessionManager, +}; use pallet_staking::{ - Pallet as Staking, Config as StakingConfig, RewardDestination, ValidatorPrefs, Exposure, - IndividualExposure, Event as StakingEvent, + Config as StakingConfig, Event as StakingEvent, Exposure, IndividualExposure, + Pallet as Staking, RewardDestination, ValidatorPrefs, }; const SEED: u32 = 0; @@ -62,7 +63,8 @@ pub trait Config: + HistoricalConfig + BalancesConfig + IdTupleConvert -{} +{ +} /// A helper trait to make sure we can convert `IdentificationTuple` coming from historical /// and the one required by offences. @@ -71,8 +73,9 @@ pub trait IdTupleConvert { fn convert(id: IdentificationTuple) -> ::IdentificationTuple; } -impl IdTupleConvert for T where - ::IdentificationTuple: From> +impl IdTupleConvert for T +where + ::IdentificationTuple: From>, { fn convert(id: IdentificationTuple) -> ::IdentificationTuple { id.into() @@ -80,7 +83,8 @@ impl IdTupleConvert for T where } type LookupSourceOf = <::Lookup as StaticLookup>::Source; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; struct Offender { pub controller: T::AccountId, @@ -109,19 +113,20 @@ fn create_offender(n: u32, nominators: u32) -> Result, &' reward_destination.clone(), )?; - let validator_prefs = ValidatorPrefs { - commission: Perbill::from_percent(50), - .. Default::default() - }; + let validator_prefs = + ValidatorPrefs { commission: Perbill::from_percent(50), ..Default::default() }; Staking::::validate(RawOrigin::Signed(controller.clone()).into(), validator_prefs)?; let mut individual_exposures = vec![]; let mut nominator_stashes = vec![]; // Create n nominators - for i in 0 .. nominators { - let nominator_stash: T::AccountId = account("nominator stash", n * MAX_NOMINATORS + i, SEED); - let nominator_controller: T::AccountId = account("nominator controller", n * MAX_NOMINATORS + i, SEED); - let nominator_controller_lookup: LookupSourceOf = T::Lookup::unlookup(nominator_controller.clone()); + for i in 0..nominators { + let nominator_stash: T::AccountId = + account("nominator stash", n * MAX_NOMINATORS + i, SEED); + let nominator_controller: T::AccountId = + account("nominator controller", n * MAX_NOMINATORS + i, SEED); + let nominator_controller_lookup: LookupSourceOf = + T::Lookup::unlookup(nominator_controller.clone()); T::Currency::make_free_balance_be(&nominator_stash, free_amount.into()); Staking::::bond( @@ -132,76 +137,82 @@ fn create_offender(n: u32, nominators: u32) -> Result, &' )?; let selected_validators: Vec> = vec![controller_lookup.clone()]; - Staking::::nominate(RawOrigin::Signed(nominator_controller.clone()).into(), selected_validators)?; + Staking::::nominate( + RawOrigin::Signed(nominator_controller.clone()).into(), + selected_validators, + )?; - individual_exposures.push(IndividualExposure { - who: nominator_stash.clone(), - value: amount.clone(), - }); + individual_exposures + .push(IndividualExposure { who: nominator_stash.clone(), value: amount.clone() }); nominator_stashes.push(nominator_stash.clone()); } - let exposure = Exposure { - total: amount.clone() * n.into(), - own: amount, - others: individual_exposures, - }; + let exposure = + Exposure { total: amount.clone() * n.into(), own: amount, others: individual_exposures }; let current_era = 0u32; Staking::::add_era_stakers(current_era.into(), stash.clone().into(), exposure); Ok(Offender { controller, stash, nominator_stashes }) } -fn make_offenders(num_offenders: u32, num_nominators: u32) -> Result< - (Vec>, Vec>), - &'static str -> { +fn make_offenders( + num_offenders: u32, + num_nominators: u32, +) -> Result<(Vec>, Vec>), &'static str> { Staking::::new_session(0); let mut offenders = vec![]; - for i in 0 .. num_offenders { + for i in 0..num_offenders { let offender = create_offender::(i + 1, num_nominators)?; offenders.push(offender); } Staking::::start_session(0); - let id_tuples = offenders.iter() - .map(|offender| + let id_tuples = offenders + .iter() + .map(|offender| { ::ValidatorIdOf::convert(offender.controller.clone()) - .expect("failed to get validator id from account id")) - .map(|validator_id| + .expect("failed to get validator id from account id") + }) + .map(|validator_id| { ::FullIdentificationOf::convert(validator_id.clone()) - .map(|full_id| (validator_id, full_id)) - .expect("failed to convert validator id to full identification")) + .map(|full_id| (validator_id, full_id)) + .expect("failed to convert validator id to full identification") + }) .collect::>>(); Ok((id_tuples, offenders)) } -fn make_offenders_im_online(num_offenders: u32, num_nominators: u32) -> Result< - (Vec>, Vec>), - &'static str -> { +fn make_offenders_im_online( + num_offenders: u32, + num_nominators: u32, +) -> Result<(Vec>, Vec>), &'static str> { Staking::::new_session(0); let mut offenders = vec![]; - for i in 0 .. num_offenders { + for i in 0..num_offenders { let offender = create_offender::(i + 1, num_nominators)?; offenders.push(offender); } Staking::::start_session(0); - let id_tuples = offenders.iter() - .map(|offender| < + let id_tuples = offenders + .iter() + .map(|offender| { + < ::ValidatorSet as ValidatorSet >::ValidatorIdOf::convert(offender.controller.clone()) - .expect("failed to get validator id from account id")) - .map(|validator_id| < + .expect("failed to get validator id from account id") + }) + .map(|validator_id| { + < ::ValidatorSet as ValidatorSetWithIdentification >::IdentificationOf::convert(validator_id.clone()) .map(|full_id| (validator_id, full_id)) - .expect("failed to convert validator id to full identification")) + .expect("failed to convert validator id to full identification") + }) .collect::>>(); Ok((id_tuples, offenders)) } @@ -224,7 +235,9 @@ fn check_events::Event>>(expec pretty("--Got:", &events); pretty("--Expected:", &expected); format!("Mismatching length. Got: {}, expected: {}", lengths.0, lengths.1) - } else { Default::default() }; + } else { + Default::default() + }; for (idx, (a, b)) in events.into_iter().zip(expected).enumerate() { assert_eq!(a, b, "Mismatch at: {}. {}", idx, length_mismatch); @@ -388,8 +401,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Pallet, - crate::mock::new_test_ext(), - crate::mock::Test, -); +impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::Test,); diff --git a/frame/offences/benchmarking/src/mock.rs b/frame/offences/benchmarking/src/mock.rs index 4e7a63c58a40b..6fc5ee8b66eb0 100644 --- a/frame/offences/benchmarking/src/mock.rs +++ b/frame/offences/benchmarking/src/mock.rs @@ -20,17 +20,14 @@ #![cfg(test)] use super::*; -use frame_support::{ - parameter_types, - weights::constants::WEIGHT_PER_SECOND, -}; +use frame_election_provider_support::onchain; +use frame_support::{parameter_types, weights::constants::WEIGHT_PER_SECOND}; use frame_system as system; +use pallet_session::historical as pallet_session_historical; use sp_runtime::{ - traits::IdentityLookup, testing::{Header, UintAuthorityId}, + traits::IdentityLookup, }; -use frame_election_provider_support::onchain; -use pallet_session::historical as pallet_session_historical; type AccountId = u64; type AccountIndex = u32; @@ -112,7 +109,8 @@ impl pallet_session::SessionHandler for TestSessionHandler { _: bool, _: &[(AccountId, Ks)], _: &[(AccountId, Ks)], - ) {} + ) { + } fn on_disabled(_: usize) {} } @@ -198,7 +196,10 @@ impl pallet_offences::Config for Test { type OnOffenceHandler = Staking; } -impl frame_system::offchain::SendTransactionTypes for Test where Call: From { +impl frame_system::offchain::SendTransactionTypes for Test +where + Call: From, +{ type Extrinsic = Extrinsic; type OverarchingCall = Call; } diff --git a/frame/offences/src/lib.rs b/frame/offences/src/lib.rs index 1076dd615496d..3392cd6e4a884 100644 --- a/frame/offences/src/lib.rs +++ b/frame/offences/src/lib.rs @@ -22,18 +22,18 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] +mod migration; mod mock; mod tests; -mod migration; -use sp_std::prelude::*; +use codec::{Decode, Encode}; use frame_support::weights::Weight; use sp_runtime::{traits::Hash, Perbill}; use sp_staking::{ offence::{Kind, Offence, OffenceDetails, OffenceError, OnOffenceHandler, ReportOffence}, - SessionIndex + SessionIndex, }; -use codec::{Decode, Encode}; +use sp_std::prelude::*; pub use pallet::*; @@ -44,17 +44,25 @@ type OpaqueTimeSlot = Vec; type ReportIdOf = ::Hash; pub trait WeightInfo { - fn report_offence_im_online(r: u32, o: u32, n: u32, ) -> Weight; - fn report_offence_grandpa(r: u32, n: u32, ) -> Weight; - fn report_offence_babe(r: u32, n: u32, ) -> Weight; - fn on_initialize(d: u32, ) -> Weight; + fn report_offence_im_online(r: u32, o: u32, n: u32) -> Weight; + fn report_offence_grandpa(r: u32, n: u32) -> Weight; + fn report_offence_babe(r: u32, n: u32) -> Weight; + fn on_initialize(d: u32) -> Weight; } impl WeightInfo for () { - fn report_offence_im_online(_r: u32, _o: u32, _n: u32, ) -> Weight { 1_000_000_000 } - fn report_offence_grandpa(_r: u32, _n: u32, ) -> Weight { 1_000_000_000 } - fn report_offence_babe(_r: u32, _n: u32, ) -> Weight { 1_000_000_000 } - fn on_initialize(_d: u32, ) -> Weight { 1_000_000_000 } + fn report_offence_im_online(_r: u32, _o: u32, _n: u32) -> Weight { + 1_000_000_000 + } + fn report_offence_grandpa(_r: u32, _n: u32) -> Weight { + 1_000_000_000 + } + fn report_offence_babe(_r: u32, _n: u32) -> Weight { + 1_000_000_000 + } + fn on_initialize(_d: u32) -> Weight { + 1_000_000_000 + } } #[frame_support::pallet] @@ -145,22 +153,20 @@ where // Go through all offenders in the offence report and find all offenders that were spotted // in unique reports. - let TriageOutcome { - concurrent_offenders, - } = match Self::triage_offence_report::(reporters, &time_slot, offenders) { - Some(triage) => triage, - // The report contained only duplicates, so there is no need to slash again. - None => return Err(OffenceError::DuplicateReport), - }; + let TriageOutcome { concurrent_offenders } = + match Self::triage_offence_report::(reporters, &time_slot, offenders) { + Some(triage) => triage, + // The report contained only duplicates, so there is no need to slash again. + None => return Err(OffenceError::DuplicateReport), + }; let offenders_count = concurrent_offenders.len() as u32; // The amount new offenders are slashed let new_fraction = O::slash_fraction(offenders_count, validator_set_count); - let slash_perbill: Vec<_> = (0..concurrent_offenders.len()) - .map(|_| new_fraction.clone()) - .collect(); + let slash_perbill: Vec<_> = + (0..concurrent_offenders.len()).map(|_| new_fraction.clone()).collect(); T::OnOffenceHandler::on_offence( &concurrent_offenders, @@ -212,10 +218,7 @@ impl Pallet { any_new = true; >::insert( &report_id, - OffenceDetails { - offender, - reporters: reporters.clone(), - }, + OffenceDetails { offender, reporters: reporters.clone() }, ); storage.insert(time_slot, report_id); @@ -232,9 +235,7 @@ impl Pallet { storage.save(); - Some(TriageOutcome { - concurrent_offenders, - }) + Some(TriageOutcome { concurrent_offenders }) } else { None } @@ -270,20 +271,14 @@ impl> ReportIndexStorage { let concurrent_reports = >::get(&O::ID, &opaque_time_slot); - Self { - opaque_time_slot, - concurrent_reports, - same_kind_reports, - } + Self { opaque_time_slot, concurrent_reports, same_kind_reports } } /// Insert a new report to the index. fn insert(&mut self, time_slot: &O::TimeSlot, report_id: ReportIdOf) { // Insert the report id into the list while maintaining the ordering by the time // slot. - let pos = self - .same_kind_reports - .partition_point(|&(ref when, _)| when <= time_slot); + let pos = self.same_kind_reports.partition_point(|&(ref when, _)| when <= time_slot); self.same_kind_reports.insert(pos, (time_slot.clone(), report_id)); // Update the list of concurrent reports. diff --git a/frame/offences/src/migration.rs b/frame/offences/src/migration.rs index ce8a125e7e1a1..cb5c520392c97 100644 --- a/frame/offences/src/migration.rs +++ b/frame/offences/src/migration.rs @@ -16,18 +16,13 @@ // limitations under the License. use super::{Config, OffenceDetails, Perbill, SessionIndex}; -use frame_support::{traits::Get, weights::Weight, generate_storage_alias}; +use frame_support::{generate_storage_alias, traits::Get, weights::Weight}; use sp_staking::offence::OnOffenceHandler; use sp_std::vec::Vec; /// Type of data stored as a deferred offence type DeferredOffenceOf = ( - Vec< - OffenceDetails< - ::AccountId, - ::IdentificationTuple, - >, - >, + Vec::AccountId, ::IdentificationTuple>>, Vec, SessionIndex, ); diff --git a/frame/offences/src/mock.rs b/frame/offences/src/mock.rs index 5818ae71687b2..84114f015089c 100644 --- a/frame/offences/src/mock.rs +++ b/frame/offences/src/mock.rs @@ -19,22 +19,27 @@ #![cfg(test)] -use std::cell::RefCell; +use crate as offences; use crate::Config; use codec::Encode; -use sp_runtime::Perbill; -use sp_staking::{ - SessionIndex, - offence::{self, Kind, OffenceDetails}, -}; -use sp_runtime::testing::Header; -use sp_runtime::traits::{IdentityLookup, BlakeTwo256}; -use sp_core::H256; use frame_support::{ parameter_types, - weights::{Weight, constants::{WEIGHT_PER_SECOND, RocksDbWeight}}, + weights::{ + constants::{RocksDbWeight, WEIGHT_PER_SECOND}, + Weight, + }, }; -use crate as offences; +use sp_core::H256; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, + Perbill, +}; +use sp_staking::{ + offence::{self, Kind, OffenceDetails}, + SessionIndex, +}; +use std::cell::RefCell; pub struct OnOffenceHandler; @@ -43,8 +48,8 @@ thread_local! { pub static OFFENCE_WEIGHT: RefCell = RefCell::new(Default::default()); } -impl - offence::OnOffenceHandler for OnOffenceHandler +impl offence::OnOffenceHandler + for OnOffenceHandler { fn on_offence( _offenders: &[OffenceDetails], @@ -60,9 +65,7 @@ impl } pub fn with_on_offence_fractions) -> R>(f: F) -> R { - ON_OFFENCE_PERBILL.with(|fractions| { - f(&mut *fractions.borrow_mut()) - }) + ON_OFFENCE_PERBILL.with(|fractions| f(&mut *fractions.borrow_mut())) } type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -163,10 +166,7 @@ impl offence::Offence for Offence { 1 } - fn slash_fraction( - offenders_count: u32, - validator_set_count: u32, - ) -> Perbill { + fn slash_fraction(offenders_count: u32, validator_set_count: u32) -> Perbill { Perbill::from_percent(5 + offenders_count * 100 / validator_set_count) } } diff --git a/frame/offences/src/tests.rs b/frame/offences/src/tests.rs index d2e0f2d63d550..18cfa9410a6c6 100644 --- a/frame/offences/src/tests.rs +++ b/frame/offences/src/tests.rs @@ -21,11 +21,11 @@ use super::*; use crate::mock::{ - Offences, System, Offence, Event, KIND, new_test_ext, with_on_offence_fractions, - offence_reports, report_id, + new_test_ext, offence_reports, report_id, with_on_offence_fractions, Event, Offence, Offences, + System, KIND, }; -use sp_runtime::Perbill; use frame_system::{EventRecord, Phase}; +use sp_runtime::Perbill; #[test] fn should_report_an_authority_and_trigger_on_offence() { @@ -34,11 +34,7 @@ fn should_report_an_authority_and_trigger_on_offence() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; + let offence = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; // when Offences::report_offence(vec![], offence).unwrap(); @@ -57,11 +53,7 @@ fn should_not_report_the_same_authority_twice_in_the_same_slot() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; + let offence = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; Offences::report_offence(vec![], offence.clone()).unwrap(); with_on_offence_fractions(|f| { assert_eq!(f.clone(), vec![Perbill::from_percent(25)]); @@ -79,7 +71,6 @@ fn should_not_report_the_same_authority_twice_in_the_same_slot() { }); } - #[test] fn should_report_in_different_time_slot() { new_test_ext().execute_with(|| { @@ -87,11 +78,7 @@ fn should_report_in_different_time_slot() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let mut offence = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; + let mut offence = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; Offences::report_offence(vec![], offence.clone()).unwrap(); with_on_offence_fractions(|f| { assert_eq!(f.clone(), vec![Perbill::from_percent(25)]); @@ -117,11 +104,7 @@ fn should_deposit_event() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; + let offence = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; // when Offences::report_offence(vec![], offence).unwrap(); @@ -145,11 +128,7 @@ fn doesnt_deposit_event_for_dups() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; + let offence = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; Offences::report_offence(vec![], offence.clone()).unwrap(); with_on_offence_fractions(|f| { assert_eq!(f.clone(), vec![Perbill::from_percent(25)]); @@ -181,33 +160,26 @@ fn reports_if_an_offence_is_dup() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence = |time_slot, offenders| TestOffence { - validator_set_count: 5, - time_slot, - offenders, - }; + let offence = + |time_slot, offenders| TestOffence { validator_set_count: 5, time_slot, offenders }; let mut test_offence = offence(time_slot, vec![0]); // the report for authority 0 at time slot 42 should not be a known // offence - assert!( - !>::is_known_offence( - &test_offence.offenders, - &test_offence.time_slot - ) - ); + assert!(!>::is_known_offence( + &test_offence.offenders, + &test_offence.time_slot + )); // we report an offence for authority 0 at time slot 42 Offences::report_offence(vec![], test_offence.clone()).unwrap(); // the same report should be a known offence now - assert!( - >::is_known_offence( - &test_offence.offenders, - &test_offence.time_slot - ) - ); + assert!(>::is_known_offence( + &test_offence.offenders, + &test_offence.time_slot + )); // and reporting it again should yield a duplicate report error assert_eq!( @@ -219,28 +191,21 @@ fn reports_if_an_offence_is_dup() { test_offence.offenders.push(1); // it should not be a known offence anymore - assert!( - !>::is_known_offence( - &test_offence.offenders, - &test_offence.time_slot - ) - ); + assert!(!>::is_known_offence( + &test_offence.offenders, + &test_offence.time_slot + )); // and reporting it again should work without any error - assert_eq!( - Offences::report_offence(vec![], test_offence.clone()), - Ok(()) - ); + assert_eq!(Offences::report_offence(vec![], test_offence.clone()), Ok(())); // creating a new offence for the same authorities on the next slot // should be considered a new offence and thefore not known let test_offence_next_slot = offence(time_slot + 1, vec![0, 1]); - assert!( - !>::is_known_offence( - &test_offence_next_slot.offenders, - &test_offence_next_slot.time_slot - ) - ); + assert!(!>::is_known_offence( + &test_offence_next_slot.offenders, + &test_offence_next_slot.time_slot + )); }); } @@ -253,16 +218,8 @@ fn should_properly_count_offences() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence1 = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; - let offence2 = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![4], - }; + let offence1 = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; + let offence2 = Offence { validator_set_count: 5, time_slot, offenders: vec![4] }; Offences::report_offence(vec![], offence1).unwrap(); with_on_offence_fractions(|f| { assert_eq!(f.clone(), vec![Perbill::from_percent(25)]); @@ -294,26 +251,12 @@ fn should_properly_sort_offences() { let time_slot = 42; assert_eq!(offence_reports(KIND, time_slot), vec![]); - let offence1 = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![5], - }; - let offence2 = Offence { - validator_set_count: 5, - time_slot, - offenders: vec![4], - }; - let offence3 = Offence { - validator_set_count: 5, - time_slot: time_slot + 1, - offenders: vec![6, 7], - }; - let offence4 = Offence { - validator_set_count: 5, - time_slot: time_slot - 1, - offenders: vec![3], - }; + let offence1 = Offence { validator_set_count: 5, time_slot, offenders: vec![5] }; + let offence2 = Offence { validator_set_count: 5, time_slot, offenders: vec![4] }; + let offence3 = + Offence { validator_set_count: 5, time_slot: time_slot + 1, offenders: vec![6, 7] }; + let offence4 = + Offence { validator_set_count: 5, time_slot: time_slot - 1, offenders: vec![3] }; Offences::report_offence(vec![], offence1).unwrap(); with_on_offence_fractions(|f| { assert_eq!(f.clone(), vec![Perbill::from_percent(25)]); @@ -327,10 +270,10 @@ fn should_properly_sort_offences() { Offences::report_offence(vec![], offence4).unwrap(); // then - let same_kind_reports = - Vec::<(u128, sp_core::H256)>::decode( - &mut &crate::ReportsByKindIndex::::get(KIND)[..], - ).unwrap(); + let same_kind_reports = Vec::<(u128, sp_core::H256)>::decode( + &mut &crate::ReportsByKindIndex::::get(KIND)[..], + ) + .unwrap(); assert_eq!( same_kind_reports, vec![ diff --git a/frame/proxy/src/benchmarking.rs b/frame/proxy/src/benchmarking.rs index 336a80dd4ac5f..a06c22a3ed8fe 100644 --- a/frame/proxy/src/benchmarking.rs +++ b/frame/proxy/src/benchmarking.rs @@ -20,10 +20,10 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; +use crate::Pallet as Proxy; +use frame_benchmarking::{account, benchmarks, impl_benchmark_test_suite, whitelisted_caller}; use frame_system::RawOrigin; -use frame_benchmarking::{benchmarks, account, whitelisted_caller, impl_benchmark_test_suite}; use sp_runtime::traits::Bounded; -use crate::Pallet as Proxy; const SEED: u32 = 0; @@ -48,7 +48,7 @@ fn add_proxies(n: u32, maybe_who: Option) -> Result<(), fn add_announcements( n: u32, maybe_who: Option, - maybe_real: Option + maybe_real: Option, ) -> Result<(), &'static str> { let caller = maybe_who.unwrap_or_else(|| account("caller", 0, SEED)); T::Currency::make_free_balance_be(&caller, BalanceOf::::max_value()); @@ -247,8 +247,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Proxy, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Proxy, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/proxy/src/lib.rs b/frame/proxy/src/lib.rs index 8f4c42b76c6eb..fe94012b48188 100644 --- a/frame/proxy/src/lib.rs +++ b/frame/proxy/src/lib.rs @@ -29,42 +29,49 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -mod tests; mod benchmarking; +mod tests; pub mod weights; -use sp_std::{prelude::*, convert::TryInto}; -use codec::{Encode, Decode, MaxEncodedLen}; +use codec::{Decode, Encode, MaxEncodedLen}; +use frame_support::{ + dispatch::{DispatchError, DispatchResultWithPostInfo, PostDispatchInfo}, + ensure, + traits::{Currency, Get, InstanceFilter, IsSubType, IsType, OriginTrait, ReservableCurrency}, + weights::GetDispatchInfo, + RuntimeDebug, +}; +use frame_system::{self as system}; use scale_info::TypeInfo; use sp_io::hashing::blake2_256; use sp_runtime::{ + traits::{Dispatchable, Hash, Saturating, Zero}, DispatchResult, - traits::{Dispatchable, Zero, Hash, Saturating} }; -use frame_support::{ - RuntimeDebug, ensure, - dispatch::{DispatchResultWithPostInfo, PostDispatchInfo}, - traits::{ - Get, ReservableCurrency, Currency, InstanceFilter, OriginTrait, - IsType, IsSubType, - }, - weights::GetDispatchInfo, -}; -use frame_system::{self as system}; -use frame_support::dispatch::DispatchError; +use sp_std::{convert::TryInto, prelude::*}; pub use weights::WeightInfo; pub use pallet::*; type CallHashOf = <::CallHasher as Hash>::Output; -type BalanceOf = <::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <::Currency as Currency<::AccountId>>::Balance; /// The parameters under which a particular account has a proxy relationship with some other /// account. #[derive( - Encode, Decode, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RuntimeDebug, MaxEncodedLen, - TypeInfo + Encode, + Decode, + Clone, + Copy, + Eq, + PartialEq, + Ord, + PartialOrd, + RuntimeDebug, + MaxEncodedLen, + TypeInfo, )] pub struct ProxyDefinition { /// The account which may act on behalf of another. @@ -89,9 +96,9 @@ pub struct Announcement { #[frame_support::pallet] pub mod pallet { + use super::{DispatchResult, *}; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::{*, DispatchResult}; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -105,8 +112,11 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// The overarching call type. - type Call: Parameter + Dispatchable - + GetDispatchInfo + From> + IsSubType> + type Call: Parameter + + Dispatchable + + GetDispatchInfo + + From> + + IsSubType> + IsType<::Call>; /// The currency mechanism. @@ -116,8 +126,13 @@ pub mod pallet { /// The instance filter determines whether a given call may be proxied under this type. /// /// IMPORTANT: `Default` must be provided and MUST BE the the *most permissive* value. - type ProxyType: Parameter + Member + Ord + PartialOrd + InstanceFilter<::Call> - + Default + MaxEncodedLen; + type ProxyType: Parameter + + Member + + Ord + + PartialOrd + + InstanceFilter<::Call> + + Default + + MaxEncodedLen; /// The base amount of currency needed to reserve for creating a proxy. /// @@ -295,21 +310,17 @@ pub mod pallet { origin: OriginFor, proxy_type: T::ProxyType, delay: T::BlockNumber, - index: u16 + index: u16, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; let anonymous = Self::anonymous_account(&who, &proxy_type, index, None); ensure!(!Proxies::::contains_key(&anonymous), Error::::Duplicate); - let proxy_def = ProxyDefinition { - delegate: who.clone(), - proxy_type: proxy_type.clone(), - delay, - }; - let bounded_proxies: BoundedVec<_, T::MaxProxies> = vec![proxy_def] - .try_into() - .map_err(|_| Error::::TooMany)?; + let proxy_def = + ProxyDefinition { delegate: who.clone(), proxy_type: proxy_type.clone(), delay }; + let bounded_proxies: BoundedVec<_, T::MaxProxies> = + vec![proxy_def].try_into().map_err(|_| Error::::TooMany)?; let deposit = T::ProxyDepositBase::get() + T::ProxyDepositFactor::get(); T::Currency::reserve(&who, deposit)?; @@ -386,10 +397,12 @@ pub mod pallet { pub fn announce( origin: OriginFor, real: T::AccountId, - call_hash: CallHashOf - ) -> DispatchResultWithPostInfo{ + call_hash: CallHashOf, + ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; - Proxies::::get(&real).0.into_iter() + Proxies::::get(&real) + .0 + .into_iter() .find(|x| &x.delegate == &who) .ok_or(Error::::NotProxy)?; @@ -407,7 +420,10 @@ pub mod pallet { T::AnnouncementDepositBase::get(), T::AnnouncementDepositFactor::get(), pending.len(), - ).map(|d| d.expect("Just pushed; pending.len() > 0; rejig_deposit returns Some; qed")) + ) + .map(|d| { + d.expect("Just pushed; pending.len() > 0; rejig_deposit returns Some; qed") + }) .map(|d| *deposit = d) })?; Self::deposit_event(Event::Announced(real, who, call_hash)); @@ -437,7 +453,7 @@ pub mod pallet { pub fn remove_announcement( origin: OriginFor, real: T::AccountId, - call_hash: CallHashOf + call_hash: CallHashOf, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; Self::edit_announcements(&who, |ann| ann.real != real || ann.call_hash != call_hash)?; @@ -467,10 +483,12 @@ pub mod pallet { pub fn reject_announcement( origin: OriginFor, delegate: T::AccountId, - call_hash: CallHashOf + call_hash: CallHashOf, ) -> DispatchResultWithPostInfo { let who = ensure_signed(origin)?; - Self::edit_announcements(&delegate, |ann| ann.real != who || ann.call_hash != call_hash)?; + Self::edit_announcements(&delegate, |ann| { + ann.real != who || ann.call_hash != call_hash + })?; Ok(().into()) } @@ -512,9 +530,12 @@ pub mod pallet { let call_hash = T::CallHasher::hash_of(&call); let now = system::Pallet::::block_number(); - Self::edit_announcements(&delegate, |ann| - ann.real != real || ann.call_hash != call_hash || now.saturating_sub(ann.height) < def.delay - ).map_err(|_| Error::::Unannounced)?; + Self::edit_announcements(&delegate, |ann| { + ann.real != real || + ann.call_hash != call_hash || + now.saturating_sub(ann.height) < def.delay + }) + .map_err(|_| Error::::Unannounced)?; Self::do_proxy(def, real, *call); @@ -524,8 +545,7 @@ pub mod pallet { #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] - pub enum Event - { + pub enum Event { /// A proxy was executed correctly, with the given \[result\]. ProxyExecuted(DispatchResult), /// Anonymous account has been created by new proxy with given @@ -536,10 +556,10 @@ pub mod pallet { } /// Old name generated by `decl_event`. - #[deprecated(note="use `Event` instead")] + #[deprecated(note = "use `Event` instead")] pub type RawEvent = Event; - #[pallet::error] + #[pallet::error] pub enum Error { /// There are too many proxies registered or too many announcements pending. TooMany, @@ -568,13 +588,10 @@ pub mod pallet { Twox64Concat, T::AccountId, ( - BoundedVec< - ProxyDefinition, - T::MaxProxies, - >, - BalanceOf + BoundedVec, T::MaxProxies>, + BalanceOf, ), - ValueQuery + ValueQuery, >; /// The announcements made by the proxy (key). @@ -585,19 +602,14 @@ pub mod pallet { Twox64Concat, T::AccountId, ( - BoundedVec< - Announcement, T::BlockNumber>, - T::MaxPending, - >, + BoundedVec, T::BlockNumber>, T::MaxPending>, BalanceOf, ), - ValueQuery + ValueQuery, >; - } impl Pallet { - /// Calculate the address of an anonymous account. /// /// - `who`: The spawner account. @@ -615,10 +627,12 @@ impl Pallet { index: u16, maybe_when: Option<(T::BlockNumber, u32)>, ) -> T::AccountId { - let (height, ext_index) = maybe_when.unwrap_or_else(|| ( - system::Pallet::::block_number(), - system::Pallet::::extrinsic_index().unwrap_or_default() - )); + let (height, ext_index) = maybe_when.unwrap_or_else(|| { + ( + system::Pallet::::block_number(), + system::Pallet::::extrinsic_index().unwrap_or_default(), + ) + }); let entropy = (b"modlpy/proxy____", who, height, ext_index, proxy_type, index) .using_encoded(blake2_256); T::AccountId::decode(&mut &entropy[..]).unwrap_or_default() @@ -701,26 +715,22 @@ impl Pallet { factor: BalanceOf, len: usize, ) -> Result>, DispatchError> { - let new_deposit = if len == 0 { - BalanceOf::::zero() - } else { - base + factor * (len as u32).into() - }; + let new_deposit = + if len == 0 { BalanceOf::::zero() } else { base + factor * (len as u32).into() }; if new_deposit > old_deposit { T::Currency::reserve(&who, new_deposit - old_deposit)?; } else if new_deposit < old_deposit { T::Currency::unreserve(&who, old_deposit - new_deposit); } - Ok(if len == 0 { - None - } else { - Some(new_deposit) - }) + Ok(if len == 0 { None } else { Some(new_deposit) }) } fn edit_announcements< - F: FnMut(&Announcement, T::BlockNumber>) -> bool - >(delegate: &T::AccountId, f: F) -> DispatchResult { + F: FnMut(&Announcement, T::BlockNumber>) -> bool, + >( + delegate: &T::AccountId, + f: F, + ) -> DispatchResult { Announcements::::try_mutate_exists(delegate, |x| { let (mut pending, old_deposit) = x.take().ok_or(Error::::NotFound)?; let orig_pending_len = pending.len(); @@ -732,7 +742,8 @@ impl Pallet { T::AnnouncementDepositBase::get(), T::AnnouncementDepositFactor::get(), pending.len(), - )?.map(|deposit| (pending, deposit)); + )? + .map(|deposit| (pending, deposit)); Ok(()) }) } @@ -743,7 +754,8 @@ impl Pallet { force_proxy_type: Option, ) -> Result, DispatchError> { let f = |x: &ProxyDefinition| -> bool { - &x.delegate == delegate && force_proxy_type.as_ref().map_or(true, |y| &x.proxy_type == y) + &x.delegate == delegate && + force_proxy_type.as_ref().map_or(true, |y| &x.proxy_type == y) }; Ok(Proxies::::get(real).0.into_iter().find(f).ok_or(Error::::NotProxy)?) } @@ -760,12 +772,15 @@ impl Pallet { // We make sure the proxy call does access this pallet to change modify proxies. match c.is_sub_type() { // Proxy call cannot add or remove a proxy with more permissions than it already has. - Some(Call::add_proxy { ref proxy_type, .. } | Call::remove_proxy { ref proxy_type, .. }) - if !def.proxy_type.is_superset(&proxy_type) => false, + Some( + Call::add_proxy { ref proxy_type, .. } | + Call::remove_proxy { ref proxy_type, .. }, + ) if !def.proxy_type.is_superset(&proxy_type) => false, // Proxy call cannot remove all proxies or kill anonymous proxies unless it has full permissions. Some(Call::remove_proxies { .. } | Call::kill_anonymous { .. }) - if def.proxy_type != T::ProxyType::default() => false, - _ => def.proxy_type.filter(c) + if def.proxy_type != T::ProxyType::default() => + false, + _ => def.proxy_type.filter(c), } }); let e = call.dispatch(origin); diff --git a/frame/proxy/src/tests.rs b/frame/proxy/src/tests.rs index 89a8a99336aef..227cc85be596f 100644 --- a/frame/proxy/src/tests.rs +++ b/frame/proxy/src/tests.rs @@ -21,13 +21,16 @@ use super::*; +use crate as proxy; +use codec::{Decode, Encode}; use frame_support::{ - assert_ok, assert_noop, parameter_types, RuntimeDebug, dispatch::DispatchError, traits::Filter, + assert_noop, assert_ok, dispatch::DispatchError, parameter_types, traits::Filter, RuntimeDebug, }; -use codec::{Encode, Decode}; use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; -use crate as proxy; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -103,7 +106,16 @@ parameter_types! { pub const AnnouncementDepositFactor: u64 = 1; } #[derive( - Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Encode, Decode, RuntimeDebug, MaxEncodedLen, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Encode, + Decode, + RuntimeDebug, + MaxEncodedLen, scale_info::TypeInfo, )] pub enum ProxyType { @@ -111,12 +123,17 @@ pub enum ProxyType { JustTransfer, JustUtility, } -impl Default for ProxyType { fn default() -> Self { Self::Any } } +impl Default for ProxyType { + fn default() -> Self { + Self::Any + } +} impl InstanceFilter for ProxyType { fn filter(&self, c: &Call) -> bool { match self { ProxyType::Any => true, - ProxyType::JustTransfer => matches!(c, Call::Balances(pallet_balances::Call::transfer(..))), + ProxyType::JustTransfer => + matches!(c, Call::Balances(pallet_balances::Call::transfer(..))), ProxyType::JustUtility => matches!(c, Call::Utility(..)), } } @@ -150,27 +167,31 @@ impl Config for Test { type AnnouncementDepositFactor = AnnouncementDepositFactor; } +use super::{Call as ProxyCall, Event as ProxyEvent}; use frame_system::Call as SystemCall; -use pallet_balances::Call as BalancesCall; -use pallet_balances::Error as BalancesError; -use pallet_balances::Event as BalancesEvent; -use pallet_utility::Call as UtilityCall; -use pallet_utility::Event as UtilityEvent; -use super::Event as ProxyEvent; -use super::Call as ProxyCall; +use pallet_balances::{Call as BalancesCall, Error as BalancesError, Event as BalancesEvent}; +use pallet_utility::{Call as UtilityCall, Event as UtilityEvent}; pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 10), (2, 10), (3, 10), (4, 10), (5, 2)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext = sp_io::TestExternalities::new(t); ext.execute_with(|| System::set_block_number(1)); ext } fn last_events(n: usize) -> Vec { - system::Pallet::::events().into_iter().rev().take(n).rev().map(|e| e.event).collect() + system::Pallet::::events() + .into_iter() + .rev() + .take(n) + .rev() + .map(|e| e.event) + .collect() } fn expect_events(e: Vec) { @@ -186,27 +207,21 @@ fn announcement_works() { assert_ok!(Proxy::announce(Origin::signed(3), 1, [1; 32].into())); let announcements = Announcements::::get(3); - assert_eq!(announcements.0, vec![Announcement { - real: 1, - call_hash: [1; 32].into(), - height: 1, - }]); + assert_eq!( + announcements.0, + vec![Announcement { real: 1, call_hash: [1; 32].into(), height: 1 }] + ); assert_eq!(Balances::reserved_balance(3), announcements.1); assert_ok!(Proxy::announce(Origin::signed(3), 2, [2; 32].into())); let announcements = Announcements::::get(3); - assert_eq!(announcements.0, vec![ - Announcement { - real: 1, - call_hash: [1; 32].into(), - height: 1, - }, - Announcement { - real: 2, - call_hash: [2; 32].into(), - height: 1, - }, - ]); + assert_eq!( + announcements.0, + vec![ + Announcement { real: 1, call_hash: [1; 32].into(), height: 1 }, + Announcement { real: 2, call_hash: [2; 32].into(), height: 1 }, + ] + ); assert_eq!(Balances::reserved_balance(3), announcements.1); assert_noop!(Proxy::announce(Origin::signed(3), 2, [3; 32].into()), Error::::TooMany); @@ -224,11 +239,10 @@ fn remove_announcement_works() { assert_noop!(Proxy::remove_announcement(Origin::signed(3), 1, [0; 32].into()), e); assert_ok!(Proxy::remove_announcement(Origin::signed(3), 1, [1; 32].into())); let announcements = Announcements::::get(3); - assert_eq!(announcements.0, vec![Announcement { - real: 2, - call_hash: [2; 32].into(), - height: 1, - }]); + assert_eq!( + announcements.0, + vec![Announcement { real: 2, call_hash: [2; 32].into(), height: 1 }] + ); assert_eq!(Balances::reserved_balance(3), announcements.1); }); } @@ -246,11 +260,10 @@ fn reject_announcement_works() { assert_noop!(Proxy::reject_announcement(Origin::signed(4), 3, [1; 32].into()), e); assert_ok!(Proxy::reject_announcement(Origin::signed(1), 3, [1; 32].into())); let announcements = Announcements::::get(3); - assert_eq!(announcements.0, vec![Announcement { - real: 2, - call_hash: [2; 32].into(), - height: 1, - }]); + assert_eq!( + announcements.0, + vec![Announcement { real: 2, call_hash: [2; 32].into(), height: 1 }] + ); assert_eq!(Balances::reserved_balance(3), announcements.1); }); } @@ -294,11 +307,7 @@ fn proxy_announced_removes_announcement_and_returns_deposit() { system::Pallet::::set_block_number(2); assert_ok!(Proxy::proxy_announced(Origin::signed(0), 3, 1, None, call.clone())); let announcements = Announcements::::get(3); - assert_eq!(announcements.0, vec![Announcement { - real: 2, - call_hash, - height: 1, - }]); + assert_eq!(announcements.0, vec![Announcement { real: 2, call_hash, height: 1 }]); assert_eq!(Balances::reserved_balance(3), announcements.1); }); } @@ -333,7 +342,10 @@ fn filtering_works() { let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); - expect_events(vec![UtilityEvent::BatchCompleted.into(), ProxyEvent::ProxyExecuted(Ok(())).into()]); + expect_events(vec![ + UtilityEvent::BatchCompleted.into(), + ProxyEvent::ProxyExecuted(Ok(())).into(), + ]); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); @@ -345,7 +357,10 @@ fn filtering_works() { let inner = Box::new(Call::Proxy(ProxyCall::add_proxy(5, ProxyType::Any, 0))); let call = Box::new(Call::Utility(UtilityCall::batch(vec![*inner]))); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); - expect_events(vec![UtilityEvent::BatchCompleted.into(), ProxyEvent::ProxyExecuted(Ok(())).into()]); + expect_events(vec![ + UtilityEvent::BatchCompleted.into(), + ProxyEvent::ProxyExecuted(Ok(())).into(), + ]); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); @@ -360,7 +375,10 @@ fn filtering_works() { assert_ok!(Proxy::proxy(Origin::signed(4), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(2), 1, None, call.clone())); - expect_events(vec![BalancesEvent::::Unreserved(1, 5).into(), ProxyEvent::ProxyExecuted(Ok(())).into()]); + expect_events(vec![ + BalancesEvent::::Unreserved(1, 5).into(), + ProxyEvent::ProxyExecuted(Ok(())).into(), + ]); }); } @@ -368,7 +386,10 @@ fn filtering_works() { fn add_remove_proxies_works() { new_test_ext().execute_with(|| { assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::Any, 0)); - assert_noop!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::Any, 0), Error::::Duplicate); + assert_noop!( + Proxy::add_proxy(Origin::signed(1), 2, ProxyType::Any, 0), + Error::::Duplicate + ); assert_eq!(Balances::reserved_balance(1), 2); assert_ok!(Proxy::add_proxy(Origin::signed(1), 2, ProxyType::JustTransfer, 0)); assert_eq!(Balances::reserved_balance(1), 3); @@ -376,8 +397,14 @@ fn add_remove_proxies_works() { assert_eq!(Balances::reserved_balance(1), 4); assert_ok!(Proxy::add_proxy(Origin::signed(1), 4, ProxyType::JustUtility, 0)); assert_eq!(Balances::reserved_balance(1), 5); - assert_noop!(Proxy::add_proxy(Origin::signed(1), 4, ProxyType::Any, 0), Error::::TooMany); - assert_noop!(Proxy::remove_proxy(Origin::signed(1), 3, ProxyType::JustTransfer, 0), Error::::NotFound); + assert_noop!( + Proxy::add_proxy(Origin::signed(1), 4, ProxyType::Any, 0), + Error::::TooMany + ); + assert_noop!( + Proxy::remove_proxy(Origin::signed(1), 3, ProxyType::JustTransfer, 0), + Error::::NotFound + ); assert_ok!(Proxy::remove_proxy(Origin::signed(1), 4, ProxyType::JustUtility, 0)); assert_eq!(Balances::reserved_balance(1), 4); assert_ok!(Proxy::remove_proxy(Origin::signed(1), 3, ProxyType::Any, 0)); @@ -386,7 +413,10 @@ fn add_remove_proxies_works() { assert_eq!(Balances::reserved_balance(1), 2); assert_ok!(Proxy::remove_proxy(Origin::signed(1), 2, ProxyType::JustTransfer, 0)); assert_eq!(Balances::reserved_balance(1), 0); - assert_noop!(Proxy::add_proxy(Origin::signed(1), 1, ProxyType::Any, 0), Error::::NoSelfProxy); + assert_noop!( + Proxy::add_proxy(Origin::signed(1), 1, ProxyType::Any, 0), + Error::::NoSelfProxy + ); }); } @@ -409,7 +439,10 @@ fn proxying_works() { assert_ok!(Proxy::add_proxy(Origin::signed(1), 3, ProxyType::Any, 0)); let call = Box::new(Call::Balances(BalancesCall::transfer(6, 1))); - assert_noop!(Proxy::proxy(Origin::signed(4), 1, None, call.clone()), Error::::NotProxy); + assert_noop!( + Proxy::proxy(Origin::signed(4), 1, None, call.clone()), + Error::::NotProxy + ); assert_noop!( Proxy::proxy(Origin::signed(2), 1, Some(ProxyType::Any), call.clone()), Error::::NotProxy @@ -423,7 +456,9 @@ fn proxying_works() { System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); let call = Box::new(Call::Balances(BalancesCall::transfer_keep_alive(6, 1))); - assert_ok!(Call::Proxy(super::Call::proxy(1, None, call.clone())).dispatch(Origin::signed(2))); + assert_ok!( + Call::Proxy(super::Call::proxy(1, None, call.clone())).dispatch(Origin::signed(2)) + ); System::assert_last_event(ProxyEvent::ProxyExecuted(Err(DispatchError::BadOrigin)).into()); assert_ok!(Proxy::proxy(Origin::signed(3), 1, None, call.clone())); System::assert_last_event(ProxyEvent::ProxyExecuted(Ok(())).into()); @@ -436,14 +471,19 @@ fn anonymous_works() { new_test_ext().execute_with(|| { assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0)); let anon = Proxy::anonymous_account(&1, &ProxyType::Any, 0, None); - System::assert_last_event(ProxyEvent::AnonymousCreated(anon.clone(), 1, ProxyType::Any, 0).into()); + System::assert_last_event( + ProxyEvent::AnonymousCreated(anon.clone(), 1, ProxyType::Any, 0).into(), + ); // other calls to anonymous allowed as long as they're not exactly the same. assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::JustTransfer, 0, 0)); assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 1)); let anon2 = Proxy::anonymous_account(&2, &ProxyType::Any, 0, None); assert_ok!(Proxy::anonymous(Origin::signed(2), ProxyType::Any, 0, 0)); - assert_noop!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0), Error::::Duplicate); + assert_noop!( + Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0), + Error::::Duplicate + ); System::set_extrinsic_index(1); assert_ok!(Proxy::anonymous(Origin::signed(1), ProxyType::Any, 0, 0)); System::set_extrinsic_index(0); @@ -467,6 +507,9 @@ fn anonymous_works() { assert_eq!(Balances::free_balance(1), 0); assert_ok!(Proxy::proxy(Origin::signed(1), anon, None, call.clone())); assert_eq!(Balances::free_balance(1), 2); - assert_noop!(Proxy::proxy(Origin::signed(1), anon, None, call.clone()), Error::::NotProxy); + assert_noop!( + Proxy::proxy(Origin::signed(1), anon, None, call.clone()), + Error::::NotProxy + ); }); } diff --git a/frame/proxy/src/weights.rs b/frame/proxy/src/weights.rs index f250186ad81d7..ebd732d71981d 100644 --- a/frame/proxy/src/weights.rs +++ b/frame/proxy/src/weights.rs @@ -35,37 +35,39 @@ // --output=./frame/proxy/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_proxy. pub trait WeightInfo { - fn proxy(p: u32, ) -> Weight; - fn proxy_announced(a: u32, p: u32, ) -> Weight; - fn remove_announcement(a: u32, p: u32, ) -> Weight; - fn reject_announcement(a: u32, p: u32, ) -> Weight; - fn announce(a: u32, p: u32, ) -> Weight; - fn add_proxy(p: u32, ) -> Weight; - fn remove_proxy(p: u32, ) -> Weight; - fn remove_proxies(p: u32, ) -> Weight; - fn anonymous(p: u32, ) -> Weight; - fn kill_anonymous(p: u32, ) -> Weight; + fn proxy(p: u32) -> Weight; + fn proxy_announced(a: u32, p: u32) -> Weight; + fn remove_announcement(a: u32, p: u32) -> Weight; + fn reject_announcement(a: u32, p: u32) -> Weight; + fn announce(a: u32, p: u32) -> Weight; + fn add_proxy(p: u32) -> Weight; + fn remove_proxy(p: u32) -> Weight; + fn remove_proxies(p: u32) -> Weight; + fn anonymous(p: u32) -> Weight; + fn kill_anonymous(p: u32) -> Weight; } /// Weights for pallet_proxy using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn proxy(p: u32, ) -> Weight { + fn proxy(p: u32) -> Weight { (22_645_000 as Weight) // Standard Error: 1_000 .saturating_add((162_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) } - fn proxy_announced(a: u32, p: u32, ) -> Weight { + fn proxy_announced(a: u32, p: u32) -> Weight { (53_259_000 as Weight) // Standard Error: 2_000 .saturating_add((543_000 as Weight).saturating_mul(a as Weight)) @@ -74,7 +76,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn remove_announcement(a: u32, p: u32, ) -> Weight { + fn remove_announcement(a: u32, p: u32) -> Weight { (37_983_000 as Weight) // Standard Error: 2_000 .saturating_add((545_000 as Weight).saturating_mul(a as Weight)) @@ -83,7 +85,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn reject_announcement(a: u32, p: u32, ) -> Weight { + fn reject_announcement(a: u32, p: u32) -> Weight { (37_922_000 as Weight) // Standard Error: 1_000 .saturating_add((541_000 as Weight).saturating_mul(a as Weight)) @@ -92,7 +94,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn announce(a: u32, p: u32, ) -> Weight { + fn announce(a: u32, p: u32) -> Weight { (51_355_000 as Weight) // Standard Error: 2_000 .saturating_add((534_000 as Weight).saturating_mul(a as Weight)) @@ -101,35 +103,35 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn add_proxy(p: u32, ) -> Weight { + fn add_proxy(p: u32) -> Weight { (35_798_000 as Weight) // Standard Error: 2_000 .saturating_add((228_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn remove_proxy(p: u32, ) -> Weight { + fn remove_proxy(p: u32) -> Weight { (35_554_000 as Weight) // Standard Error: 3_000 .saturating_add((250_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn remove_proxies(p: u32, ) -> Weight { + fn remove_proxies(p: u32) -> Weight { (33_911_000 as Weight) // Standard Error: 1_000 .saturating_add((165_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn anonymous(p: u32, ) -> Weight { + fn anonymous(p: u32) -> Weight { (48_695_000 as Weight) // Standard Error: 1_000 .saturating_add((53_000 as Weight).saturating_mul(p as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn kill_anonymous(p: u32, ) -> Weight { + fn kill_anonymous(p: u32) -> Weight { (35_904_000 as Weight) // Standard Error: 1_000 .saturating_add((159_000 as Weight).saturating_mul(p as Weight)) @@ -140,13 +142,13 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn proxy(p: u32, ) -> Weight { + fn proxy(p: u32) -> Weight { (22_645_000 as Weight) // Standard Error: 1_000 .saturating_add((162_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) } - fn proxy_announced(a: u32, p: u32, ) -> Weight { + fn proxy_announced(a: u32, p: u32) -> Weight { (53_259_000 as Weight) // Standard Error: 2_000 .saturating_add((543_000 as Weight).saturating_mul(a as Weight)) @@ -155,7 +157,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn remove_announcement(a: u32, p: u32, ) -> Weight { + fn remove_announcement(a: u32, p: u32) -> Weight { (37_983_000 as Weight) // Standard Error: 2_000 .saturating_add((545_000 as Weight).saturating_mul(a as Weight)) @@ -164,7 +166,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn reject_announcement(a: u32, p: u32, ) -> Weight { + fn reject_announcement(a: u32, p: u32) -> Weight { (37_922_000 as Weight) // Standard Error: 1_000 .saturating_add((541_000 as Weight).saturating_mul(a as Weight)) @@ -173,7 +175,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn announce(a: u32, p: u32, ) -> Weight { + fn announce(a: u32, p: u32) -> Weight { (51_355_000 as Weight) // Standard Error: 2_000 .saturating_add((534_000 as Weight).saturating_mul(a as Weight)) @@ -182,35 +184,35 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn add_proxy(p: u32, ) -> Weight { + fn add_proxy(p: u32) -> Weight { (35_798_000 as Weight) // Standard Error: 2_000 .saturating_add((228_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn remove_proxy(p: u32, ) -> Weight { + fn remove_proxy(p: u32) -> Weight { (35_554_000 as Weight) // Standard Error: 3_000 .saturating_add((250_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn remove_proxies(p: u32, ) -> Weight { + fn remove_proxies(p: u32) -> Weight { (33_911_000 as Weight) // Standard Error: 1_000 .saturating_add((165_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn anonymous(p: u32, ) -> Weight { + fn anonymous(p: u32) -> Weight { (48_695_000 as Weight) // Standard Error: 1_000 .saturating_add((53_000 as Weight).saturating_mul(p as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn kill_anonymous(p: u32, ) -> Weight { + fn kill_anonymous(p: u32) -> Weight { (35_904_000 as Weight) // Standard Error: 1_000 .saturating_add((159_000 as Weight).saturating_mul(p as Weight)) diff --git a/frame/randomness-collective-flip/src/lib.rs b/frame/randomness-collective-flip/src/lib.rs index 1ff7d4382da14..64a263dd5bbd0 100644 --- a/frame/randomness-collective-flip/src/lib.rs +++ b/frame/randomness-collective-flip/src/lib.rs @@ -69,9 +69,9 @@ use safe_mix::TripletMix; use codec::Encode; -use sp_std::{prelude::*, convert::TryInto}; -use sp_runtime::traits::{Hash, Saturating}; use frame_support::traits::Randomness; +use sp_runtime::traits::{Hash, Saturating}; +use sp_std::{convert::TryInto, prelude::*}; const RANDOM_MATERIAL_LEN: u32 = 81; @@ -85,9 +85,9 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -101,11 +101,13 @@ pub mod pallet { fn on_initialize(block_number: T::BlockNumber) -> Weight { let parent_hash = >::parent_hash(); - >::mutate(|ref mut values| if values.len() < RANDOM_MATERIAL_LEN as usize { - values.push(parent_hash) - } else { - let index = block_number_to_index::(block_number); - values[index] = parent_hash; + >::mutate(|ref mut values| { + if values.len() < RANDOM_MATERIAL_LEN as usize { + values.push(parent_hash) + } else { + let index = block_number_to_index::(block_number); + values[index] = parent_hash; + } }); T::DbWeight::get().reads_writes(1, 1) @@ -117,8 +119,7 @@ pub mod pallet { /// the oldest hash. #[pallet::storage] #[pallet::getter(fn random_material)] - pub(super) type RandomMaterial = - StorageValue<_, Vec, ValueQuery>; + pub(super) type RandomMaterial = StorageValue<_, Vec, ValueQuery>; } impl Randomness for Pallet { @@ -151,17 +152,14 @@ impl Randomness for Pallet { T::Hash::default() }; - ( - seed, - block_number.saturating_sub(RANDOM_MATERIAL_LEN.into()), - ) + (seed, block_number.saturating_sub(RANDOM_MATERIAL_LEN.into())) } } #[cfg(test)] mod tests { - use crate as pallet_randomness_collective_flip; use super::*; + use crate as pallet_randomness_collective_flip; use sp_core::H256; use sp_runtime::{ @@ -169,7 +167,10 @@ mod tests { traits::{BlakeTwo256, Header as _, IdentityLookup}, }; - use frame_support::{parameter_types, traits::{Randomness, OnInitialize}}; + use frame_support::{ + parameter_types, + traits::{OnInitialize, Randomness}, + }; use frame_system::limits; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; @@ -229,7 +230,7 @@ mod tests { #[test] fn test_block_number_to_index() { - for i in 1 .. 1000 { + for i in 1..1000 { assert_eq!((i - 1) as usize % 81, block_number_to_index::(i)); } } @@ -237,13 +238,8 @@ mod tests { fn setup_blocks(blocks: u64) { let mut parent_hash = System::parent_hash(); - for i in 1 .. (blocks + 1) { - System::initialize( - &i, - &parent_hash, - &Default::default(), - frame_system::InitKind::Full, - ); + for i in 1..(blocks + 1) { + System::initialize(&i, &parent_hash, &Default::default(), frame_system::InitKind::Full); CollectiveFlip::on_initialize(i); let header = System::finalize(); diff --git a/frame/recovery/src/lib.rs b/frame/recovery/src/lib.rs index 15ae0953b6633..d63465914f885 100644 --- a/frame/recovery/src/lib.rs +++ b/frame/recovery/src/lib.rs @@ -151,15 +151,16 @@ // Ensure we're `no_std` when compiling for Wasm. #![cfg_attr(not(feature = "std"), no_std)] -use sp_std::prelude::*; -use sp_runtime::traits::{Dispatchable, SaturatedConversion, CheckedAdd, CheckedMul}; -use codec::{Encode, Decode}; +use codec::{Decode, Encode}; use scale_info::TypeInfo; +use sp_runtime::traits::{CheckedAdd, CheckedMul, Dispatchable, SaturatedConversion}; +use sp_std::prelude::*; use frame_support::{ - RuntimeDebug, weights::GetDispatchInfo, - traits::{Currency, ReservableCurrency, BalanceStatus}, dispatch::PostDispatchInfo, + traits::{BalanceStatus, Currency, ReservableCurrency}, + weights::GetDispatchInfo, + RuntimeDebug, }; pub use pallet::*; @@ -201,10 +202,10 @@ pub struct RecoveryConfig { #[frame_support::pallet] pub mod pallet { - use frame_support::{ensure, Parameter, pallet_prelude::*, traits::Get}; - use frame_system::{pallet_prelude::*, ensure_signed, ensure_root}; - use sp_runtime::ArithmeticError; use super::*; + use frame_support::{ensure, pallet_prelude::*, traits::Get, Parameter}; + use frame_system::{ensure_root, ensure_signed, pallet_prelude::*}; + use sp_runtime::ArithmeticError; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -217,7 +218,9 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// The overarching call type. - type Call: Parameter + Dispatchable + GetDispatchInfo; + type Call: Parameter + + Dispatchable + + GetDispatchInfo; /// The currency mechanism. type Currency: ReservableCurrency; @@ -313,7 +316,8 @@ pub mod pallet { #[pallet::getter(fn recovery_config)] pub type Recoverable = StorageMap< _, - Twox64Concat, T::AccountId, + Twox64Concat, + T::AccountId, RecoveryConfig, T::AccountId>, >; @@ -323,10 +327,12 @@ pub mod pallet { /// is the user trying to recover the account. #[pallet::storage] #[pallet::getter(fn active_recovery)] - pub type ActiveRecoveries= StorageDoubleMap< + pub type ActiveRecoveries = StorageDoubleMap< _, - Twox64Concat, T::AccountId, - Twox64Concat, T::AccountId, + Twox64Concat, + T::AccountId, + Twox64Concat, + T::AccountId, ActiveRecovery, T::AccountId>, >; @@ -365,14 +371,15 @@ pub mod pallet { pub fn as_recovered( origin: OriginFor, account: T::AccountId, - call: Box<::Call> + call: Box<::Call>, ) -> DispatchResult { let who = ensure_signed(origin)?; // Check `who` is allowed to make a call on behalf of `account` let target = Self::proxy(&who).ok_or(Error::::NotAllowed)?; ensure!(&target == &account, Error::::NotAllowed); call.dispatch(frame_system::RawOrigin::Signed(account).into()) - .map(|_| ()).map_err(|e| e.error) + .map(|_| ()) + .map_err(|e| e.error) } /// Allow ROOT to bypass the recovery process and set an a rescuer account @@ -433,7 +440,7 @@ pub mod pallet { origin: OriginFor, friends: Vec, threshold: u16, - delay_period: T::BlockNumber + delay_period: T::BlockNumber, ) -> DispatchResult { let who = ensure_signed(origin)?; // Check account is not already set up for recovery @@ -455,12 +462,8 @@ pub mod pallet { // Reserve the deposit T::Currency::reserve(&who, total_deposit)?; // Create the recovery configuration - let recovery_config = RecoveryConfig { - delay_period, - deposit: total_deposit, - friends, - threshold, - }; + let recovery_config = + RecoveryConfig { delay_period, deposit: total_deposit, friends, threshold }; // Create the recovery configuration storage item >::insert(&who, recovery_config); @@ -496,7 +499,10 @@ pub mod pallet { // Check that the account is recoverable ensure!(>::contains_key(&account), Error::::NotRecoverable); // Check that the recovery process has not already been started - ensure!(!>::contains_key(&account, &who), Error::::AlreadyStarted); + ensure!( + !>::contains_key(&account, &who), + Error::::AlreadyStarted + ); // Take recovery deposit let recovery_deposit = T::RecoveryDeposit::get(); T::Currency::reserve(&who, recovery_deposit)?; @@ -541,13 +547,14 @@ pub mod pallet { pub fn vouch_recovery( origin: OriginFor, lost: T::AccountId, - rescuer: T::AccountId + rescuer: T::AccountId, ) -> DispatchResult { let who = ensure_signed(origin)?; // Get the recovery configuration for the lost account. let recovery_config = Self::recovery_config(&lost).ok_or(Error::::NotRecoverable)?; // Get the active recovery process for the rescuer. - let mut active_recovery = Self::active_recovery(&lost, &rescuer).ok_or(Error::::NotStarted)?; + let mut active_recovery = + Self::active_recovery(&lost, &rescuer).ok_or(Error::::NotStarted)?; // Make sure the voter is a friend ensure!(Self::is_friend(&recovery_config.friends, &who), Error::::NotFriend); // Either insert the vouch, or return an error that the user already vouched. @@ -585,13 +592,16 @@ pub mod pallet { pub fn claim_recovery(origin: OriginFor, account: T::AccountId) -> DispatchResult { let who = ensure_signed(origin)?; // Get the recovery configuration for the lost account - let recovery_config = Self::recovery_config(&account).ok_or(Error::::NotRecoverable)?; + let recovery_config = + Self::recovery_config(&account).ok_or(Error::::NotRecoverable)?; // Get the active recovery process for the rescuer - let active_recovery = Self::active_recovery(&account, &who).ok_or(Error::::NotStarted)?; + let active_recovery = + Self::active_recovery(&account, &who).ok_or(Error::::NotStarted)?; ensure!(!Proxy::::contains_key(&who), Error::::AlreadyProxy); // Make sure the delay period has passed let current_block_number = >::block_number(); - let recoverable_block_number = active_recovery.created + let recoverable_block_number = active_recovery + .created .checked_add(&recovery_config.delay_period) .ok_or(ArithmeticError::Overflow)?; ensure!(recoverable_block_number <= current_block_number, Error::::DelayPeriod); @@ -631,10 +641,16 @@ pub mod pallet { pub fn close_recovery(origin: OriginFor, rescuer: T::AccountId) -> DispatchResult { let who = ensure_signed(origin)?; // Take the active recovery process started by the rescuer for this account. - let active_recovery = >::take(&who, &rescuer).ok_or(Error::::NotStarted)?; + let active_recovery = + >::take(&who, &rescuer).ok_or(Error::::NotStarted)?; // Move the reserved funds from the rescuer to the rescued account. // Acts like a slashing mechanism for those who try to maliciously recover accounts. - let res = T::Currency::repatriate_reserved(&rescuer, &who, active_recovery.deposit, BalanceStatus::Free); + let res = T::Currency::repatriate_reserved( + &rescuer, + &who, + active_recovery.deposit, + BalanceStatus::Free, + ); debug_assert!(res.is_ok()); Self::deposit_event(Event::::RecoveryClosed(who, rescuer)); Ok(()) diff --git a/frame/recovery/src/mock.rs b/frame/recovery/src/mock.rs index 9139cc12ce54a..c9c01e35bf9bb 100644 --- a/frame/recovery/src/mock.rs +++ b/frame/recovery/src/mock.rs @@ -19,12 +19,16 @@ use super::*; -use frame_support::{parameter_types, traits::{OnInitialize, OnFinalize}}; +use crate as recovery; +use frame_support::{ + parameter_types, + traits::{OnFinalize, OnInitialize}, +}; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, testing::Header, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, }; -use crate as recovery; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -113,7 +117,9 @@ pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); pallet_balances::GenesisConfig:: { balances: vec![(1, 100), (2, 100), (3, 100), (4, 100), (5, 100)], - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/recovery/src/tests.rs b/frame/recovery/src/tests.rs index 4c7c6ef108d72..9065e9afe8861 100644 --- a/frame/recovery/src/tests.rs +++ b/frame/recovery/src/tests.rs @@ -18,15 +18,11 @@ //! Tests for the module. use super::*; +use frame_support::{assert_noop, assert_ok, traits::Currency}; use mock::{ - Recovery, Balances, Test, Origin, Call, BalancesCall, RecoveryCall, - new_test_ext, run_to_block -}; -use sp_runtime::traits::{BadOrigin}; -use frame_support::{ - assert_noop, assert_ok, - traits::{Currency}, + new_test_ext, run_to_block, Balances, BalancesCall, Call, Origin, Recovery, RecoveryCall, Test, }; +use sp_runtime::traits::BadOrigin; #[test] fn basic_setup_works() { @@ -118,7 +114,7 @@ fn malicious_recovery_fails() { assert_ok!(Recovery::vouch_recovery(Origin::signed(2), 5, 1)); // shame on you assert_ok!(Recovery::vouch_recovery(Origin::signed(3), 5, 1)); // shame on you assert_ok!(Recovery::vouch_recovery(Origin::signed(4), 5, 1)); // shame on you - // We met the threshold, lets try to recover the account...? + // We met the threshold, lets try to recover the account...? assert_noop!(Recovery::claim_recovery(Origin::signed(1), 5), Error::::DelayPeriod); // Account 1 needs to wait... run_to_block(19); @@ -136,7 +132,12 @@ fn malicious_recovery_fails() { assert_noop!(Recovery::claim_recovery(Origin::signed(1), 5), Error::::NotStarted); // Account 5 can remove their recovery config and pick some better friends assert_ok!(Recovery::remove_recovery(Origin::signed(5))); - assert_ok!(Recovery::create_recovery(Origin::signed(5), vec![22, 33, 44], threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + vec![22, 33, 44], + threshold, + delay_period + )); }); } @@ -174,9 +175,7 @@ fn create_recovery_handles_basic_errors() { Error::::NotSorted ); // Already configured - assert_ok!( - Recovery::create_recovery(Origin::signed(5), vec![2, 3, 4], 3, 10) - ); + assert_ok!(Recovery::create_recovery(Origin::signed(5), vec![2, 3, 4], 3, 10)); assert_noop!( Recovery::create_recovery(Origin::signed(5), vec![2, 3, 4], 3, 10), Error::::AlreadyRecoverable @@ -191,17 +190,18 @@ fn create_recovery_works() { let threshold = 3; let delay_period = 10; // Account 5 sets up a recovery configuration on their account - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); // Deposit is taken, and scales with the number of friends they pick // Base 10 + 1 per friends = 13 total reserved assert_eq!(Balances::reserved_balance(5), 13); // Recovery configuration is correctly stored - let recovery_config = RecoveryConfig { - delay_period, - deposit: 13, - friends: friends.clone(), - threshold, - }; + let recovery_config = + RecoveryConfig { delay_period, deposit: 13, friends: friends.clone(), threshold }; assert_eq!(Recovery::recovery_config(5), Some(recovery_config)); }); } @@ -218,10 +218,18 @@ fn initiate_recovery_handles_basic_errors() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); // Same user cannot recover same account twice assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); - assert_noop!(Recovery::initiate_recovery(Origin::signed(1), 5), Error::::AlreadyStarted); + assert_noop!( + Recovery::initiate_recovery(Origin::signed(1), 5), + Error::::AlreadyStarted + ); // No double deposit assert_eq!(Balances::reserved_balance(1), 10); }); @@ -234,17 +242,18 @@ fn initiate_recovery_works() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); // Recovery can be initiated assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); // Deposit is reserved assert_eq!(Balances::reserved_balance(1), 10); // Recovery status object is created correctly - let recovery_status = ActiveRecovery { - created: 0, - deposit: 10, - friends: vec![], - }; + let recovery_status = ActiveRecovery { created: 0, deposit: 10, friends: vec![] }; assert_eq!(>::get(&5, &1), Some(recovery_status)); // Multiple users can attempt to recover the same account assert_ok!(Recovery::initiate_recovery(Origin::signed(2), 5)); @@ -255,12 +264,20 @@ fn initiate_recovery_works() { fn vouch_recovery_handles_basic_errors() { new_test_ext().execute_with(|| { // Cannot vouch for non-recoverable account - assert_noop!(Recovery::vouch_recovery(Origin::signed(2), 5, 1), Error::::NotRecoverable); + assert_noop!( + Recovery::vouch_recovery(Origin::signed(2), 5, 1), + Error::::NotRecoverable + ); // Create a recovery process for next tests let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); // Cannot vouch a recovery process that has not started assert_noop!(Recovery::vouch_recovery(Origin::signed(2), 5, 1), Error::::NotStarted); // Initiate a recovery process @@ -269,7 +286,10 @@ fn vouch_recovery_handles_basic_errors() { assert_noop!(Recovery::vouch_recovery(Origin::signed(22), 5, 1), Error::::NotFriend); // Cannot vouch twice assert_ok!(Recovery::vouch_recovery(Origin::signed(2), 5, 1)); - assert_noop!(Recovery::vouch_recovery(Origin::signed(2), 5, 1), Error::::AlreadyVouched); + assert_noop!( + Recovery::vouch_recovery(Origin::signed(2), 5, 1), + Error::::AlreadyVouched + ); }); } @@ -280,7 +300,12 @@ fn vouch_recovery_works() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); // Vouching works assert_ok!(Recovery::vouch_recovery(Origin::signed(2), 5, 1)); @@ -288,11 +313,7 @@ fn vouch_recovery_works() { assert_ok!(Recovery::vouch_recovery(Origin::signed(4), 5, 1)); assert_ok!(Recovery::vouch_recovery(Origin::signed(3), 5, 1)); // Final recovery status object is updated correctly - let recovery_status = ActiveRecovery { - created: 0, - deposit: 10, - friends: vec![2, 3, 4], - }; + let recovery_status = ActiveRecovery { created: 0, deposit: 10, friends: vec![2, 3, 4] }; assert_eq!(>::get(&5, &1), Some(recovery_status)); }); } @@ -306,7 +327,12 @@ fn claim_recovery_handles_basic_errors() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); // Cannot claim an account which has not started the recovery process assert_noop!(Recovery::claim_recovery(Origin::signed(1), 5), Error::::NotStarted); assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); @@ -328,7 +354,12 @@ fn claim_recovery_works() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); assert_ok!(Recovery::vouch_recovery(Origin::signed(2), 5, 1)); assert_ok!(Recovery::vouch_recovery(Origin::signed(3), 5, 1)); @@ -372,7 +403,12 @@ fn remove_recovery_works() { let friends = vec![2, 3, 4]; let threshold = 3; let delay_period = 10; - assert_ok!(Recovery::create_recovery(Origin::signed(5), friends.clone(), threshold, delay_period)); + assert_ok!(Recovery::create_recovery( + Origin::signed(5), + friends.clone(), + threshold, + delay_period + )); assert_ok!(Recovery::initiate_recovery(Origin::signed(1), 5)); assert_ok!(Recovery::initiate_recovery(Origin::signed(2), 5)); // Cannot remove a recovery when there are active recoveries. diff --git a/frame/scheduler/src/benchmarking.rs b/frame/scheduler/src/benchmarking.rs index 47375658fb9bc..f6909160c5ee0 100644 --- a/frame/scheduler/src/benchmarking.rs +++ b/frame/scheduler/src/benchmarking.rs @@ -20,10 +20,10 @@ #![cfg(feature = "runtime-benchmarks")] use super::*; -use sp_std::{vec, prelude::*}; -use frame_system::RawOrigin; -use frame_support::{ensure, traits::OnInitialize}; use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; +use frame_support::{ensure, traits::OnInitialize}; +use frame_system::RawOrigin; +use sp_std::{prelude::*, vec}; use crate::Pallet as Scheduler; use frame_system::Pallet as System; @@ -31,7 +31,7 @@ use frame_system::Pallet as System; const BLOCK_NUMBER: u32 = 2; // Add `n` named items to the schedule -fn fill_schedule (when: T::BlockNumber, n: u32) -> Result<(), &'static str> { +fn fill_schedule(when: T::BlockNumber, n: u32) -> Result<(), &'static str> { // Essentially a no-op call. let call = frame_system::Call::set_storage(vec![]); for i in 0..n { @@ -141,8 +141,4 @@ benchmarks! { } } -impl_benchmark_test_suite!( - Scheduler, - crate::tests::new_test_ext(), - crate::tests::Test, -); +impl_benchmark_test_suite!(Scheduler, crate::tests::new_test_ext(), crate::tests::Test,); diff --git a/frame/scheduler/src/lib.rs b/frame/scheduler/src/lib.rs index f7acfb2a7fb96..706b8abe6e5a0 100644 --- a/frame/scheduler/src/lib.rs +++ b/frame/scheduler/src/lib.rs @@ -54,18 +54,24 @@ mod benchmarking; pub mod weights; -use sp_std::{prelude::*, marker::PhantomData, borrow::Borrow}; -use codec::{Encode, Decode, Codec}; -use scale_info::TypeInfo; -use sp_runtime::{RuntimeDebug, traits::{Zero, One, BadOrigin, Saturating}}; +use codec::{Codec, Decode, Encode}; use frame_support::{ - dispatch::{Dispatchable, DispatchError, DispatchResult, Parameter}, - traits::{Get, schedule::{self, DispatchTime}, OriginTrait, EnsureOrigin, IsType}, + dispatch::{DispatchError, DispatchResult, Dispatchable, Parameter}, + traits::{ + schedule::{self, DispatchTime}, + EnsureOrigin, Get, IsType, OriginTrait, + }, weights::{GetDispatchInfo, Weight}, }; use frame_system::{self as system, ensure_signed}; -pub use weights::WeightInfo; pub use pallet::*; +use scale_info::TypeInfo; +use sp_runtime::{ + traits::{BadOrigin, One, Saturating, Zero}, + RuntimeDebug, +}; +use sp_std::{borrow::Borrow, marker::PhantomData, prelude::*}; +pub use weights::WeightInfo; /// Just a simple index for naming period tasks. pub type PeriodicIndex = u32; @@ -139,8 +145,7 @@ pub mod pallet { + IsType<::Origin>; /// The caller origin, overarching type of all pallets origins. - type PalletsOrigin: From> + Codec + Clone + Eq - + TypeInfo; + type PalletsOrigin: From> + Codec + Clone + Eq + TypeInfo; /// The aggregated call type. type Call: Parameter @@ -211,21 +216,21 @@ pub mod pallet { } #[pallet::genesis_config] - pub struct GenesisConfig; + pub struct GenesisConfig; - #[cfg(feature = "std")] - impl Default for GenesisConfig { - fn default() -> Self { + #[cfg(feature = "std")] + impl Default for GenesisConfig { + fn default() -> Self { Self } - } + } - #[pallet::genesis_build] - impl GenesisBuild for GenesisConfig { - fn build(&self) { + #[pallet::genesis_build] + impl GenesisBuild for GenesisConfig { + fn build(&self) { StorageVersion::::put(Releases::V2); - } - } + } + } #[pallet::hooks] impl Hooks> for Pallet { @@ -292,9 +297,9 @@ pub mod pallet { // - It's priority is `HARD_DEADLINE` // - It does not push the weight past the limit. // - It is the first item in the schedule - if s.priority <= schedule::HARD_DEADLINE - || cumulative_weight <= limit - || order == 0 + if s.priority <= schedule::HARD_DEADLINE || + cumulative_weight <= limit || + order == 0 { let r = s.call.clone().dispatch(s.origin.clone().into()); let maybe_id = s.maybe_id.clone(); @@ -498,20 +503,25 @@ impl Pallet { StorageVersion::::put(Releases::V2); Agenda::::translate::< - Vec::Call, T::BlockNumber>>>, _ - >(|_, agenda| Some( - agenda - .into_iter() - .map(|schedule| schedule.map(|schedule| ScheduledV2 { - maybe_id: schedule.maybe_id, - priority: schedule.priority, - call: schedule.call, - maybe_periodic: schedule.maybe_periodic, - origin: system::RawOrigin::Root.into(), - _phantom: Default::default(), - })) - .collect::>() - )); + Vec::Call, T::BlockNumber>>>, + _, + >(|_, agenda| { + Some( + agenda + .into_iter() + .map(|schedule| { + schedule.map(|schedule| ScheduledV2 { + maybe_id: schedule.maybe_id, + priority: schedule.priority, + call: schedule.call, + maybe_periodic: schedule.maybe_periodic, + origin: system::RawOrigin::Root.into(), + _phantom: Default::default(), + }) + }) + .collect::>(), + ) + }); true } else { @@ -522,20 +532,25 @@ impl Pallet { /// Helper to migrate scheduler when the pallet origin type has changed. pub fn migrate_origin + codec::Decode>() { Agenda::::translate::< - Vec::Call, T::BlockNumber, OldOrigin, T::AccountId>>>, _ - >(|_, agenda| Some( - agenda - .into_iter() - .map(|schedule| schedule.map(|schedule| Scheduled { - maybe_id: schedule.maybe_id, - priority: schedule.priority, - call: schedule.call, - maybe_periodic: schedule.maybe_periodic, - origin: schedule.origin.into(), - _phantom: Default::default(), - })) - .collect::>() - )); + Vec::Call, T::BlockNumber, OldOrigin, T::AccountId>>>, + _, + >(|_, agenda| { + Some( + agenda + .into_iter() + .map(|schedule| { + schedule.map(|schedule| Scheduled { + maybe_id: schedule.maybe_id, + priority: schedule.priority, + call: schedule.call, + maybe_periodic: schedule.maybe_periodic, + origin: schedule.origin.into(), + _phantom: Default::default(), + }) + }) + .collect::>(), + ) + }); } fn resolve_time(when: DispatchTime) -> Result { @@ -549,7 +564,7 @@ impl Pallet { }; if when <= now { - return Err(Error::::TargetBlockNumberInPast.into()); + return Err(Error::::TargetBlockNumberInPast.into()) } Ok(when) @@ -601,7 +616,7 @@ impl Pallet { |s| -> Result>, DispatchError> { if let (Some(ref o), Some(ref s)) = (origin, s.borrow()) { if *o != s.origin { - return Err(BadOrigin.into()); + return Err(BadOrigin.into()) } }; Ok(s.take()) @@ -626,7 +641,7 @@ impl Pallet { let new_time = Self::resolve_time(new_time)?; if new_time == when { - return Err(Error::::RescheduleNoChange.into()); + return Err(Error::::RescheduleNoChange.into()) } Agenda::::try_mutate(when, |agenda| -> DispatchResult { @@ -653,7 +668,7 @@ impl Pallet { ) -> Result, DispatchError> { // ensure id it is unique if Lookup::::contains_key(&id) { - return Err(Error::::FailedToSchedule)?; + return Err(Error::::FailedToSchedule)? } let when = Self::resolve_time(when)?; @@ -696,7 +711,7 @@ impl Pallet { if let Some(s) = agenda.get_mut(i) { if let (Some(ref o), Some(ref s)) = (origin, s.borrow()) { if *o != s.origin { - return Err(BadOrigin.into()); + return Err(BadOrigin.into()) } } *s = None; @@ -723,7 +738,7 @@ impl Pallet { let (when, index) = lookup.ok_or(Error::::NotFound)?; if new_time == when { - return Err(Error::::RescheduleNoChange.into()); + return Err(Error::::RescheduleNoChange.into()) } Agenda::::try_mutate(when, |agenda| -> DispatchResult { @@ -773,10 +788,7 @@ impl schedule::Anon::Call, T::PalletsOr } fn next_dispatch_time((when, index): Self::Address) -> Result { - Agenda::::get(when) - .get(index as usize) - .ok_or(()) - .map(|_| when) + Agenda::::get(when).get(index as usize).ok_or(()).map(|_| when) } } @@ -868,7 +880,10 @@ mod tests { } #[pallet::call] - impl Pallet where ::Origin: OriginTrait { + impl Pallet + where + ::Origin: OriginTrait, + { #[pallet::weight(*weight)] pub fn log(origin: OriginFor, i: u32, weight: Weight) -> DispatchResult { Self::deposit_event(Event::Logged(i, weight)); @@ -879,7 +894,11 @@ mod tests { } #[pallet::weight(*weight)] - pub fn log_without_filter(origin: OriginFor, i: u32, weight: Weight) -> DispatchResult { + pub fn log_without_filter( + origin: OriginFor, + i: u32, + weight: Weight, + ) -> DispatchResult { Self::deposit_event(Event::Logged(i, weight)); LOG.with(|log| { log.borrow_mut().push((origin.caller().clone(), i)); @@ -987,9 +1006,7 @@ mod tests { fn basic_scheduling_works() { new_test_ext().execute_with(|| { let call = Call::Logger(LoggerCall::log(42, 1000)); - assert!(!::BaseCallFilter::filter( - &call - )); + assert!(!::BaseCallFilter::filter(&call)); assert_ok!(Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call)); run_to_block(3); assert!(logger::log().is_empty()); @@ -1005,9 +1022,7 @@ mod tests { new_test_ext().execute_with(|| { run_to_block(2); let call = Call::Logger(LoggerCall::log(42, 1000)); - assert!(!::BaseCallFilter::filter( - &call - )); + assert!(!::BaseCallFilter::filter(&call)); // This will schedule the call 3 blocks after the next block... so block 3 + 3 = 6 assert_ok!(Scheduler::do_schedule(DispatchTime::After(3), None, 127, root(), call)); run_to_block(5); @@ -1039,7 +1054,11 @@ mod tests { new_test_ext().execute_with(|| { // at #4, every 3 blocks, 3 times. assert_ok!(Scheduler::do_schedule( - DispatchTime::At(4), Some((3, 3)), 127, root(), Call::Logger(logger::Call::log(42, 1000)) + DispatchTime::At(4), + Some((3, 3)), + 127, + root(), + Call::Logger(logger::Call::log(42, 1000)) )); run_to_block(3); assert!(logger::log().is_empty()); @@ -1052,15 +1071,9 @@ mod tests { run_to_block(9); assert_eq!(logger::log(), vec![(root(), 42u32), (root(), 42u32)]); run_to_block(10); - assert_eq!( - logger::log(), - vec![(root(), 42u32), (root(), 42u32), (root(), 42u32)] - ); + assert_eq!(logger::log(), vec![(root(), 42u32), (root(), 42u32), (root(), 42u32)]); run_to_block(100); - assert_eq!( - logger::log(), - vec![(root(), 42u32), (root(), 42u32), (root(), 42u32)] - ); + assert_eq!(logger::log(), vec![(root(), 42u32), (root(), 42u32), (root(), 42u32)]); }); } @@ -1069,14 +1082,20 @@ mod tests { new_test_ext().execute_with(|| { let call = Call::Logger(LoggerCall::log(42, 1000)); assert!(!::BaseCallFilter::filter(&call)); - assert_eq!(Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call).unwrap(), (4, 0)); + assert_eq!( + Scheduler::do_schedule(DispatchTime::At(4), None, 127, root(), call).unwrap(), + (4, 0) + ); run_to_block(3); assert!(logger::log().is_empty()); assert_eq!(Scheduler::do_reschedule((4, 0), DispatchTime::At(6)).unwrap(), (6, 0)); - assert_noop!(Scheduler::do_reschedule((6, 0), DispatchTime::At(6)), Error::::RescheduleNoChange); + assert_noop!( + Scheduler::do_reschedule((6, 0), DispatchTime::At(6)), + Error::::RescheduleNoChange + ); run_to_block(4); assert!(logger::log().is_empty()); @@ -1094,16 +1113,31 @@ mod tests { new_test_ext().execute_with(|| { let call = Call::Logger(LoggerCall::log(42, 1000)); assert!(!::BaseCallFilter::filter(&call)); - assert_eq!(Scheduler::do_schedule_named( - 1u32.encode(), DispatchTime::At(4), None, 127, root(), call - ).unwrap(), (4, 0)); + assert_eq!( + Scheduler::do_schedule_named( + 1u32.encode(), + DispatchTime::At(4), + None, + 127, + root(), + call + ) + .unwrap(), + (4, 0) + ); run_to_block(3); assert!(logger::log().is_empty()); - assert_eq!(Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)).unwrap(), (6, 0)); + assert_eq!( + Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)).unwrap(), + (6, 0) + ); - assert_noop!(Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)), Error::::RescheduleNoChange); + assert_noop!( + Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)), + Error::::RescheduleNoChange + ); run_to_block(4); assert!(logger::log().is_empty()); @@ -1121,15 +1155,30 @@ mod tests { new_test_ext().execute_with(|| { let call = Call::Logger(LoggerCall::log(42, 1000)); assert!(!::BaseCallFilter::filter(&call)); - assert_eq!(Scheduler::do_schedule_named( - 1u32.encode(), DispatchTime::At(4), Some((3, 3)), 127, root(), call - ).unwrap(), (4, 0)); + assert_eq!( + Scheduler::do_schedule_named( + 1u32.encode(), + DispatchTime::At(4), + Some((3, 3)), + 127, + root(), + call + ) + .unwrap(), + (4, 0) + ); run_to_block(3); assert!(logger::log().is_empty()); - assert_eq!(Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(5)).unwrap(), (5, 0)); - assert_eq!(Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)).unwrap(), (6, 0)); + assert_eq!( + Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(5)).unwrap(), + (5, 0) + ); + assert_eq!( + Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(6)).unwrap(), + (6, 0) + ); run_to_block(5); assert!(logger::log().is_empty()); @@ -1137,7 +1186,10 @@ mod tests { run_to_block(6); assert_eq!(logger::log(), vec![(root(), 42u32)]); - assert_eq!(Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(10)).unwrap(), (10, 0)); + assert_eq!( + Scheduler::do_reschedule_named(1u32.encode(), DispatchTime::At(10)).unwrap(), + (10, 0) + ); run_to_block(9); assert_eq!(logger::log(), vec![(root(), 42u32)]); @@ -1158,11 +1210,22 @@ mod tests { new_test_ext().execute_with(|| { // at #4. Scheduler::do_schedule_named( - 1u32.encode(), DispatchTime::At(4), None, 127, root(), Call::Logger(LoggerCall::log(69, 1000)) - ).unwrap(); + 1u32.encode(), + DispatchTime::At(4), + None, + 127, + root(), + Call::Logger(LoggerCall::log(69, 1000)), + ) + .unwrap(); let i = Scheduler::do_schedule( - DispatchTime::At(4), None, 127, root(), Call::Logger(LoggerCall::log(42, 1000)) - ).unwrap(); + DispatchTime::At(4), + None, + 127, + root(), + Call::Logger(LoggerCall::log(42, 1000)), + ) + .unwrap(); run_to_block(3); assert!(logger::log().is_empty()); assert_ok!(Scheduler::do_cancel_named(None, 1u32.encode())); @@ -1316,10 +1379,7 @@ mod tests { assert_eq!(logger::log(), vec![(root(), 2600u32)]); // 69 and 42 fit together run_to_block(5); - assert_eq!( - logger::log(), - vec![(root(), 2600u32), (root(), 69u32), (root(), 42u32)] - ); + assert_eq!(logger::log(), vec![(root(), 2600u32), (root(), 69u32), (root(), 42u32)]); }); } @@ -1373,9 +1433,9 @@ mod tests { let call_weight = MaximumSchedulerWeight::get() / 2; assert_eq!( actual_weight, - call_weight - + base_weight + base_multiplier - + named_multiplier + periodic_multiplier + call_weight + + base_weight + base_multiplier + + named_multiplier + periodic_multiplier ); assert_eq!(logger::log(), vec![(root(), 2600u32)]); @@ -1386,10 +1446,7 @@ mod tests { actual_weight, call_weight + base_weight + base_multiplier * 2 + periodic_multiplier ); - assert_eq!( - logger::log(), - vec![(root(), 2600u32), (root(), 69u32), (root(), 42u32)] - ); + assert_eq!(logger::log(), vec![(root(), 2600u32), (root(), 69u32), (root(), 42u32)]); // Will include named only let actual_weight = Scheduler::on_initialize(3); @@ -1400,12 +1457,7 @@ mod tests { ); assert_eq!( logger::log(), - vec![ - (root(), 2600u32), - (root(), 69u32), - (root(), 42u32), - (root(), 3u32) - ] + vec![(root(), 2600u32), (root(), 69u32), (root(), 42u32), (root(), 3u32)] ); // Will contain none @@ -1489,10 +1541,7 @@ mod tests { // Scheduled calls are in the agenda. assert_eq!(Agenda::::get(4).len(), 2); assert!(logger::log().is_empty()); - assert_ok!(Scheduler::cancel_named( - system::RawOrigin::Signed(1).into(), - 1u32.encode() - )); + assert_ok!(Scheduler::cancel_named(system::RawOrigin::Signed(1).into(), 1u32.encode())); assert_ok!(Scheduler::cancel(system::RawOrigin::Signed(1).into(), 4, 1)); // Scheduled calls are made NONE, so should not effect state run_to_block(100); @@ -1551,18 +1600,12 @@ mod tests { Scheduler::cancel_named(system::RawOrigin::Signed(2).into(), 1u32.encode()), BadOrigin ); - assert_noop!( - Scheduler::cancel(system::RawOrigin::Signed(2).into(), 4, 1), - BadOrigin - ); + assert_noop!(Scheduler::cancel(system::RawOrigin::Signed(2).into(), 4, 1), BadOrigin); assert_noop!( Scheduler::cancel_named(system::RawOrigin::Root.into(), 1u32.encode()), BadOrigin ); - assert_noop!( - Scheduler::cancel(system::RawOrigin::Root.into(), 4, 1), - BadOrigin - ); + assert_noop!(Scheduler::cancel(system::RawOrigin::Root.into(), 4, 1), BadOrigin); run_to_block(5); assert_eq!( logger::log(), diff --git a/frame/scheduler/src/weights.rs b/frame/scheduler/src/weights.rs index 648652428cbb8..67b1079fdd35d 100644 --- a/frame/scheduler/src/weights.rs +++ b/frame/scheduler/src/weights.rs @@ -35,46 +35,48 @@ // --output=./frame/scheduler/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_scheduler. pub trait WeightInfo { - fn schedule(s: u32, ) -> Weight; - fn cancel(s: u32, ) -> Weight; - fn schedule_named(s: u32, ) -> Weight; - fn cancel_named(s: u32, ) -> Weight; + fn schedule(s: u32) -> Weight; + fn cancel(s: u32) -> Weight; + fn schedule_named(s: u32) -> Weight; + fn cancel_named(s: u32) -> Weight; } /// Weights for pallet_scheduler using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { - fn schedule(s: u32, ) -> Weight { + fn schedule(s: u32) -> Weight { (24_811_000 as Weight) // Standard Error: 1_000 .saturating_add((116_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn cancel(s: u32, ) -> Weight { + fn cancel(s: u32) -> Weight { (23_851_000 as Weight) // Standard Error: 3_000 .saturating_add((1_439_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn schedule_named(s: u32, ) -> Weight { + fn schedule_named(s: u32) -> Weight { (31_096_000 as Weight) // Standard Error: 1_000 .saturating_add((141_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(2 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn cancel_named(s: u32, ) -> Weight { + fn cancel_named(s: u32) -> Weight { (26_715_000 as Weight) // Standard Error: 4_000 .saturating_add((1_455_000 as Weight).saturating_mul(s as Weight)) @@ -85,28 +87,28 @@ impl WeightInfo for SubstrateWeight { // For backwards compatibility and tests impl WeightInfo for () { - fn schedule(s: u32, ) -> Weight { + fn schedule(s: u32) -> Weight { (24_811_000 as Weight) // Standard Error: 1_000 .saturating_add((116_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn cancel(s: u32, ) -> Weight { + fn cancel(s: u32) -> Weight { (23_851_000 as Weight) // Standard Error: 3_000 .saturating_add((1_439_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn schedule_named(s: u32, ) -> Weight { + fn schedule_named(s: u32) -> Weight { (31_096_000 as Weight) // Standard Error: 1_000 .saturating_add((141_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(2 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn cancel_named(s: u32, ) -> Weight { + fn cancel_named(s: u32) -> Weight { (26_715_000 as Weight) // Standard Error: 4_000 .saturating_add((1_455_000 as Weight).saturating_mul(s as Weight)) diff --git a/frame/scored-pool/src/lib.rs b/frame/scored-pool/src/lib.rs index dded7c45963fa..a5cdb6274f995 100644 --- a/frame/scored-pool/src/lib.rs +++ b/frame/scored-pool/src/lib.rs @@ -91,18 +91,16 @@ mod mock; mod tests; use codec::FullCodec; -use sp_std::{ - fmt::Debug, - prelude::*, -}; use frame_support::{ ensure, - traits::{ChangeMembers, InitializeMembers, Currency, Get, ReservableCurrency}, + traits::{ChangeMembers, Currency, Get, InitializeMembers, ReservableCurrency}, }; -use sp_runtime::traits::{AtLeast32Bit, Zero, StaticLookup}; pub use pallet::*; +use sp_runtime::traits::{AtLeast32Bit, StaticLookup, Zero}; +use sp_std::{fmt::Debug, prelude::*}; -type BalanceOf = <>::Currency as Currency<::AccountId>>::Balance; +type BalanceOf = + <>::Currency as Currency<::AccountId>>::Balance; type PoolT = Vec<(::AccountId, Option<>::Score>)>; /// The enum is supplied when refreshing the members set. @@ -117,10 +115,10 @@ enum ChangeReceiver { #[frame_support::pallet] pub mod pallet { + use super::*; use frame_support::{pallet_prelude::*, traits::EnsureOrigin, weights::Weight}; use frame_system::{ensure_root, ensure_signed, pallet_prelude::*}; use sp_runtime::traits::MaybeSerializeDeserialize; - use super::*; #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] @@ -132,8 +130,13 @@ pub mod pallet { type Currency: Currency + ReservableCurrency; /// The score attributed to a member or candidate. - type Score: - AtLeast32Bit + Clone + Copy + Default + FullCodec + MaybeSerializeDeserialize + Debug + type Score: AtLeast32Bit + + Clone + + Copy + + Default + + FullCodec + + MaybeSerializeDeserialize + + Debug + scale_info::TypeInfo; /// The overarching event type. @@ -210,22 +213,19 @@ pub mod pallet { /// `T::AccountId`, but by `T::Score` instead). #[pallet::storage] #[pallet::getter(fn candidate_exists)] - pub(crate) type CandidateExists, I: 'static = ()> = StorageMap< - _, - Twox64Concat, T::AccountId, - bool, - ValueQuery, - >; + pub(crate) type CandidateExists, I: 'static = ()> = + StorageMap<_, Twox64Concat, T::AccountId, bool, ValueQuery>; /// The current membership, stored as an ordered Vec. #[pallet::storage] #[pallet::getter(fn members)] - pub(crate) type Members, I: 'static = ()> = StorageValue<_, Vec, ValueQuery>; + pub(crate) type Members, I: 'static = ()> = + StorageValue<_, Vec, ValueQuery>; /// Size of the `Members` set. #[pallet::storage] #[pallet::getter(fn member_count)] - pub(crate) type MemberCount = StorageValue<_, u32, ValueQuery>; + pub(crate) type MemberCount = StorageValue<_, u32, ValueQuery>; #[pallet::genesis_config] pub struct GenesisConfig, I: 'static = ()> { @@ -236,10 +236,7 @@ pub mod pallet { #[cfg(feature = "std")] impl, I: 'static> Default for GenesisConfig { fn default() -> Self { - Self { - pool: Default::default(), - member_count: Default::default(), - } + Self { pool: Default::default(), member_count: Default::default() } } } @@ -250,19 +247,15 @@ pub mod pallet { // reserve balance for each candidate in the pool. // panicking here is ok, since this just happens one time, pre-genesis. - pool - .iter() - .for_each(|(who, _)| { - T::Currency::reserve(&who, T::CandidateDeposit::get()) - .expect("balance too low to create candidacy"); - >::insert(who, true); - }); + pool.iter().for_each(|(who, _)| { + T::Currency::reserve(&who, T::CandidateDeposit::get()) + .expect("balance too low to create candidacy"); + >::insert(who, true); + }); // Sorts the `Pool` by score in a descending order. Entities which // have a score of `None` are sorted to the beginning of the vec. - pool.sort_by_key(|(_, maybe_score)| - Reverse(maybe_score.unwrap_or_default()) - ); + pool.sort_by_key(|(_, maybe_score)| Reverse(maybe_score.unwrap_or_default())); >::put(self.member_count); >::put(&pool); @@ -325,10 +318,7 @@ pub mod pallet { /// The `index` parameter of this function must be set to /// the index of the transactor in the `Pool`. #[pallet::weight(0)] - pub fn withdraw_candidacy( - origin: OriginFor, - index: u32 - ) -> DispatchResult { + pub fn withdraw_candidacy(origin: OriginFor, index: u32) -> DispatchResult { let who = ensure_signed(origin)?; let pool = >::get(); @@ -349,7 +339,7 @@ pub mod pallet { pub fn kick( origin: OriginFor, dest: ::Source, - index: u32 + index: u32, ) -> DispatchResult { T::KickOrigin::ensure_origin(origin)?; @@ -374,7 +364,7 @@ pub mod pallet { origin: OriginFor, dest: ::Source, index: u32, - score: T::Score + score: T::Score, ) -> DispatchResult { T::ScoreOrigin::ensure_origin(origin)?; @@ -391,10 +381,9 @@ pub mod pallet { // where we can insert while maintaining order. let item = (who, Some(score.clone())); let location = pool - .binary_search_by_key( - &Reverse(score), - |(_, maybe_score)| Reverse(maybe_score.unwrap_or_default()) - ) + .binary_search_by_key(&Reverse(score), |(_, maybe_score)| { + Reverse(maybe_score.unwrap_or_default()) + }) .unwrap_or_else(|l| l); pool.insert(location, item); @@ -419,16 +408,12 @@ pub mod pallet { } impl, I: 'static> Pallet { - /// Fetches the `MemberCount` highest scoring members from /// `Pool` and puts them into `Members`. /// /// The `notify` parameter is used to deduct which associated /// type function to invoke at the end of the method. - fn refresh_members( - pool: PoolT, - notify: ChangeReceiver - ) { + fn refresh_members(pool: PoolT, notify: ChangeReceiver) { let count = MemberCount::::get(); let mut new_members: Vec = pool @@ -446,10 +431,7 @@ impl, I: 'static> Pallet { ChangeReceiver::MembershipInitialized => T::MembershipInitialized::initialize_members(&new_members), ChangeReceiver::MembershipChanged => - T::MembershipChanged::set_members_sorted( - &new_members[..], - &old_members[..], - ), + T::MembershipChanged::set_members_sorted(&new_members[..], &old_members[..]), } } @@ -460,7 +442,7 @@ impl, I: 'static> Pallet { fn remove_member( mut pool: PoolT, remove: T::AccountId, - index: u32 + index: u32, ) -> Result<(), Error> { // all callers of this function in this pallet also check // the index for validity before calling this function. @@ -487,11 +469,7 @@ impl, I: 'static> Pallet { /// Checks if `index` is a valid number and if the element found /// at `index` in `Pool` is equal to `who`. - fn ensure_index( - pool: &PoolT, - who: &T::AccountId, - index: u32 - ) -> Result<(), Error> { + fn ensure_index(pool: &PoolT, who: &T::AccountId, index: u32) -> Result<(), Error> { ensure!(index < pool.len() as u32, Error::::InvalidIndex); let (index_who, _index_score) = &pool[index as usize]; diff --git a/frame/scored-pool/src/mock.rs b/frame/scored-pool/src/mock.rs index 30dc48dd19d0a..80ded36fbf0ab 100644 --- a/frame/scored-pool/src/mock.rs +++ b/frame/scored-pool/src/mock.rs @@ -20,13 +20,14 @@ use super::*; use crate as pallet_scored_pool; -use std::cell::RefCell; -use frame_support::{parameter_types, ord_parameter_types, traits::GenesisBuild}; +use frame_support::{ord_parameter_types, parameter_types, traits::GenesisBuild}; +use frame_system::EnsureSignedBy; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, testing::Header, + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, }; -use frame_system::EnsureSignedBy; +use std::cell::RefCell; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -145,32 +146,26 @@ pub fn new_test_ext() -> sp_io::TestExternalities { (40, 500_000), (99, 1), ], - }.assimilate_storage(&mut t).unwrap(); - pallet_scored_pool::GenesisConfig::{ - pool: vec![ - (5, None), - (10, Some(1)), - (20, Some(2)), - (31, Some(2)), - (40, Some(3)), - ], + } + .assimilate_storage(&mut t) + .unwrap(); + pallet_scored_pool::GenesisConfig:: { + pool: vec![(5, None), (10, Some(1)), (20, Some(2)), (31, Some(2)), (40, Some(3))], member_count: 2, - .. Default::default() - }.assimilate_storage(&mut t).unwrap(); + ..Default::default() + } + .assimilate_storage(&mut t) + .unwrap(); t.into() } /// Fetch an entity from the pool, if existent. pub fn fetch_from_pool(who: u64) -> Option<(u64, Option)> { - >::pool() - .into_iter() - .find(|item| item.0 == who) + >::pool().into_iter().find(|item| item.0 == who) } /// Find an entity in the pool. /// Returns its position in the `Pool` vec, if existent. pub fn find_in_pool(who: u64) -> Option { - >::pool() - .into_iter() - .position(|item| item.0 == who) + >::pool().into_iter().position(|item| item.0 == who) } diff --git a/frame/scored-pool/src/tests.rs b/frame/scored-pool/src/tests.rs index 4a3b8384b744f..0503e308e76a5 100644 --- a/frame/scored-pool/src/tests.rs +++ b/frame/scored-pool/src/tests.rs @@ -20,7 +20,7 @@ use super::*; use mock::*; -use frame_support::{assert_ok, assert_noop, traits::OnInitialize}; +use frame_support::{assert_noop, assert_ok, traits::OnInitialize}; use sp_runtime::traits::BadOrigin; type ScoredPool = Pallet; @@ -142,14 +142,12 @@ fn unscored_entities_must_not_be_used_for_filling_members() { // when // we remove every scored member - ScoredPool::pool() - .into_iter() - .for_each(|(who, score)| { - if let Some(_) = score { - let index = find_in_pool(who).expect("entity must be in pool") as u32; - assert_ok!(ScoredPool::kick(Origin::signed(KickOrigin::get()), who, index)); - } - }); + ScoredPool::pool().into_iter().for_each(|(who, score)| { + if let Some(_) = score { + let index = find_in_pool(who).expect("entity must be in pool") as u32; + assert_ok!(ScoredPool::kick(Origin::signed(KickOrigin::get()), who, index)); + } + }); // then // the `None` candidates should not have been filled in @@ -201,7 +199,10 @@ fn withdraw_candidacy_must_only_work_for_members() { new_test_ext().execute_with(|| { let who = 77; let index = 0; - assert_noop!( ScoredPool::withdraw_candidacy(Origin::signed(who), index), Error::::WrongAccountIndex); + assert_noop!( + ScoredPool::withdraw_candidacy(Origin::signed(who), index), + Error::::WrongAccountIndex + ); }); } @@ -210,9 +211,18 @@ fn oob_index_should_abort() { new_test_ext().execute_with(|| { let who = 40; let oob_index = ScoredPool::pool().len() as u32; - assert_noop!(ScoredPool::withdraw_candidacy(Origin::signed(who), oob_index), Error::::InvalidIndex); - assert_noop!(ScoredPool::score(Origin::signed(ScoreOrigin::get()), who, oob_index, 99), Error::::InvalidIndex); - assert_noop!(ScoredPool::kick(Origin::signed(KickOrigin::get()), who, oob_index), Error::::InvalidIndex); + assert_noop!( + ScoredPool::withdraw_candidacy(Origin::signed(who), oob_index), + Error::::InvalidIndex + ); + assert_noop!( + ScoredPool::score(Origin::signed(ScoreOrigin::get()), who, oob_index, 99), + Error::::InvalidIndex + ); + assert_noop!( + ScoredPool::kick(Origin::signed(KickOrigin::get()), who, oob_index), + Error::::InvalidIndex + ); }); } @@ -221,9 +231,18 @@ fn index_mismatches_should_abort() { new_test_ext().execute_with(|| { let who = 40; let index = 3; - assert_noop!(ScoredPool::withdraw_candidacy(Origin::signed(who), index), Error::::WrongAccountIndex); - assert_noop!(ScoredPool::score(Origin::signed(ScoreOrigin::get()), who, index, 99), Error::::WrongAccountIndex); - assert_noop!(ScoredPool::kick(Origin::signed(KickOrigin::get()), who, index), Error::::WrongAccountIndex); + assert_noop!( + ScoredPool::withdraw_candidacy(Origin::signed(who), index), + Error::::WrongAccountIndex + ); + assert_noop!( + ScoredPool::score(Origin::signed(ScoreOrigin::get()), who, index, 99), + Error::::WrongAccountIndex + ); + assert_noop!( + ScoredPool::kick(Origin::signed(KickOrigin::get()), who, index), + Error::::WrongAccountIndex + ); }); } diff --git a/frame/session/benchmarking/src/lib.rs b/frame/session/benchmarking/src/lib.rs index d9a50b431f2e7..117ef07d60a2b 100644 --- a/frame/session/benchmarking/src/lib.rs +++ b/frame/session/benchmarking/src/lib.rs @@ -22,8 +22,7 @@ mod mock; -use sp_std::prelude::*; -use sp_std::vec; +use sp_std::{prelude::*, vec}; use frame_benchmarking::{benchmarks, impl_benchmark_test_suite}; use frame_support::{ @@ -41,7 +40,10 @@ use sp_runtime::traits::{One, StaticLookup}; const MAX_VALIDATORS: u32 = 1000; pub struct Pallet(pallet_session::Module); -pub trait Config: pallet_session::Config + pallet_session::historical::Config + pallet_staking::Config {} +pub trait Config: + pallet_session::Config + pallet_session::historical::Config + pallet_staking::Config +{ +} impl OnInitialize for Pallet { fn on_initialize(n: T::BlockNumber) -> frame_support::weights::Weight { @@ -120,20 +122,12 @@ benchmarks! { /// proof for the first authority and returns its key and the proof. fn check_membership_proof_setup( n: u32, -) -> ( - (sp_runtime::KeyTypeId, &'static [u8; 32]), - sp_session::MembershipProof, -) { +) -> ((sp_runtime::KeyTypeId, &'static [u8; 32]), sp_session::MembershipProof) { pallet_staking::ValidatorCount::::put(n); // create validators and set random session keys - for (n, who) in create_validators::(n, 1000) - .unwrap() - .into_iter() - .enumerate() - { - use rand::RngCore; - use rand::SeedableRng; + for (n, who) in create_validators::(n, 1000).unwrap().into_iter().enumerate() { + use rand::{RngCore, SeedableRng}; let validator = T::Lookup::lookup(who).unwrap(); let controller = pallet_staking::Pallet::::bonded(validator).unwrap(); @@ -168,9 +162,4 @@ fn check_membership_proof_setup( (key, Historical::::prove(key).unwrap()) } -impl_benchmark_test_suite!( - Pallet, - crate::mock::new_test_ext(), - crate::mock::Test, - extra = false, -); +impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::Test, extra = false,); diff --git a/frame/session/benchmarking/src/mock.rs b/frame/session/benchmarking/src/mock.rs index a3f9b6b447c38..bd61acb9de180 100644 --- a/frame/session/benchmarking/src/mock.rs +++ b/frame/session/benchmarking/src/mock.rs @@ -19,9 +19,9 @@ #![cfg(test)] -use sp_runtime::traits::IdentityLookup; use frame_election_provider_support::onchain; use frame_support::parameter_types; +use sp_runtime::traits::IdentityLookup; type AccountId = u64; type AccountIndex = u32; @@ -114,7 +114,8 @@ impl pallet_session::SessionHandler for TestSessionHandler { _: bool, _: &[(AccountId, Ks)], _: &[(AccountId, Ks)], - ) {} + ) { + } fn on_disabled(_: usize) {} } diff --git a/frame/session/src/historical/mod.rs b/frame/session/src/historical/mod.rs index 3cfcbf98bf38c..c9b13e3c7f262 100644 --- a/frame/session/src/historical/mod.rs +++ b/frame/session/src/historical/mod.rs @@ -26,22 +26,27 @@ //! These roots and proofs of inclusion can be generated at any time during the current session. //! Afterwards, the proofs can be fed to a consensus module when reporting misbehavior. -use sp_std::prelude::*; -use codec::{Encode, Decode}; -use sp_runtime::KeyTypeId; -use sp_runtime::traits::{Convert, OpaqueKeys}; -use sp_session::{MembershipProof, ValidatorCount}; +use super::{Module as SessionModule, SessionIndex}; +use codec::{Decode, Encode}; use frame_support::{ - decl_module, decl_storage, Parameter, print, + decl_module, decl_storage, print, traits::{ValidatorSet, ValidatorSetWithIdentification}, + Parameter, +}; +use sp_runtime::{ + traits::{Convert, OpaqueKeys}, + KeyTypeId, +}; +use sp_session::{MembershipProof, ValidatorCount}; +use sp_std::prelude::*; +use sp_trie::{ + trie_types::{TrieDB, TrieDBMut}, + MemoryDB, Recorder, Trie, TrieMut, EMPTY_PREFIX, }; -use sp_trie::{MemoryDB, Trie, TrieMut, Recorder, EMPTY_PREFIX}; -use sp_trie::trie_types::{TrieDBMut, TrieDB}; -use super::{SessionIndex, Module as SessionModule}; -mod shared; pub mod offchain; pub mod onchain; +mod shared; /// Config necessary for the historical module. pub trait Config: super::Config { @@ -165,7 +170,7 @@ impl> NoteHi Err(reason) => { print("Failed to generate historical ancestry-inclusion proof."); print(reason); - } + }, }; } else { let previous_index = new_index.saturating_sub(1); @@ -201,7 +206,8 @@ where } /// A tuple of the validator's ID and their full identification. -pub type IdentificationTuple = (::ValidatorId, ::FullIdentification); +pub type IdentificationTuple = + (::ValidatorId, ::FullIdentification); /// A trie instance for checking and generating proofs. pub struct ProvingTrie { @@ -211,7 +217,8 @@ pub struct ProvingTrie { impl ProvingTrie { fn generate_for(validators: I) -> Result - where I: IntoIterator + where + I: IntoIterator, { let mut db = MemoryDB::default(); let mut root = Default::default(); @@ -230,23 +237,20 @@ impl ProvingTrie { // map each key to the owner index. for key_id in T::Keys::key_ids() { let key = keys.get_raw(*key_id); - let res = (key_id, key).using_encoded(|k| - i.using_encoded(|v| trie.insert(k, v)) - ); + let res = + (key_id, key).using_encoded(|k| i.using_encoded(|v| trie.insert(k, v))); let _ = res.map_err(|_| "failed to insert into trie")?; } // map each owner index to the full identification. - let _ = i.using_encoded(|k| full_id.using_encoded(|v| trie.insert(k, v))) + let _ = i + .using_encoded(|k| full_id.using_encoded(|v| trie.insert(k, v))) .map_err(|_| "failed to insert into trie")?; } } - Ok(ProvingTrie { - db, - root, - }) + Ok(ProvingTrie { db, root }) } fn from_nodes(root: T::Hash, nodes: &[Vec]) -> Self { @@ -257,10 +261,7 @@ impl ProvingTrie { HashDBT::insert(&mut memory_db, EMPTY_PREFIX, &node[..]); } - ProvingTrie { - db: memory_db, - root, - } + ProvingTrie { db: memory_db, root } } /// Prove the full verification data for a given key and key ID. @@ -291,11 +292,13 @@ impl ProvingTrie { // nodes within the current `MemoryDB` are insufficient to query the item. fn query(&self, key_id: KeyTypeId, key_data: &[u8]) -> Option> { let trie = TrieDB::new(&self.db, &self.root).ok()?; - let val_idx = (key_id, key_data).using_encoded(|s| trie.get(s)) + let val_idx = (key_id, key_data) + .using_encoded(|s| trie.get(s)) .ok()? .and_then(|raw| u32::decode(&mut &*raw).ok())?; - val_idx.using_encoded(|s| trie.get(s)) + val_idx + .using_encoded(|s| trie.get(s)) .ok()? .and_then(|raw| >::decode(&mut &*raw).ok()) } @@ -322,12 +325,11 @@ impl> frame_support::traits::KeyOwnerProofSystem<(KeyT let trie = ProvingTrie::::generate_for(validators).ok()?; let (id, data) = key; - trie.prove(id, data.as_ref()) - .map(|trie_nodes| MembershipProof { - session, - trie_nodes, - validator_count: count, - }) + trie.prove(id, data.as_ref()).map(|trie_nodes| MembershipProof { + session, + trie_nodes, + validator_count: count, + }) } fn check_proof(key: (KeyTypeId, D), proof: Self::Proof) -> Option> { @@ -339,7 +341,7 @@ impl> frame_support::traits::KeyOwnerProofSystem<(KeyT let count = >::validators().len() as ValidatorCount; if count != proof.validator_count { - return None; + return None } Some((owner, id)) @@ -349,7 +351,7 @@ impl> frame_support::traits::KeyOwnerProofSystem<(KeyT let (root, count) = >::get(&proof.session)?; if count != proof.validator_count { - return None; + return None } let trie = ProvingTrie::::from_nodes(root, &proof.trie_nodes); @@ -361,22 +363,22 @@ impl> frame_support::traits::KeyOwnerProofSystem<(KeyT #[cfg(test)] pub(crate) mod tests { use super::*; - use sp_runtime::key_types::DUMMY; - use sp_runtime::testing::UintAuthorityId; use crate::mock::{ - NEXT_VALIDATORS, force_new_session, - set_next_validators, Test, System, Session, + force_new_session, set_next_validators, Session, System, Test, NEXT_VALIDATORS, }; - use frame_support::traits::{KeyOwnerProofSystem, OnInitialize}; - use frame_support::BasicExternalities; + use frame_support::{ + traits::{KeyOwnerProofSystem, OnInitialize}, + BasicExternalities, + }; + use sp_runtime::{key_types::DUMMY, testing::UintAuthorityId}; type Historical = Module; pub(crate) fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - let keys: Vec<_> = NEXT_VALIDATORS.with(|l| + let keys: Vec<_> = NEXT_VALIDATORS.with(|l| { l.borrow().iter().cloned().map(|i| (i, i, UintAuthorityId(i).into())).collect() - ); + }); BasicExternalities::execute_with_storage(&mut t, || { for (ref k, ..) in &keys { frame_system::Pallet::::inc_providers(k); @@ -430,7 +432,6 @@ pub(crate) mod tests { System::set_block_number(i); Session::on_initialize(i); - } assert_eq!(StoredRange::get(), Some((0, 100))); @@ -461,7 +462,6 @@ pub(crate) mod tests { System::set_block_number(i); Session::on_initialize(i); - } assert_eq!(StoredRange::get(), Some((100, 200))); diff --git a/frame/session/src/historical/offchain.rs b/frame/session/src/historical/offchain.rs index 68cc78029f12c..8583c2bb439be 100644 --- a/frame/session/src/historical/offchain.rs +++ b/frame/session/src/historical/offchain.rs @@ -27,17 +27,18 @@ use sp_runtime::{ offchain::storage::{MutateStorageError, StorageRetrievalError, StorageValueRef}, - KeyTypeId + KeyTypeId, }; use sp_session::MembershipProof; -use super::super::{Pallet as SessionModule, SessionIndex}; -use super::{IdentificationTuple, ProvingTrie, Config}; +use super::{ + super::{Pallet as SessionModule, SessionIndex}, + Config, IdentificationTuple, ProvingTrie, +}; use super::shared; use sp_std::prelude::*; - /// A set of validators, which was used for a fixed session index. struct ValidatorSet { validator_set: Vec>, @@ -87,15 +88,13 @@ pub fn prove_session_membership>( let trie = ProvingTrie::::generate_for(validators.into_iter()).ok()?; let (id, data) = session_key; - trie.prove(id, data.as_ref()) - .map(|trie_nodes| MembershipProof { - session: session_index, - trie_nodes, - validator_count: count, - }) + trie.prove(id, data.as_ref()).map(|trie_nodes| MembershipProof { + session: session_index, + trie_nodes, + validator_count: count, + }) } - /// Attempt to prune anything that is older than `first_to_keep` session index. /// /// Due to re-organisation it could be that the `first_to_keep` might be less @@ -104,18 +103,20 @@ pub fn prove_session_membership>( pub fn prune_older_than(first_to_keep: SessionIndex) { let derived_key = shared::LAST_PRUNE.to_vec(); let entry = StorageValueRef::persistent(derived_key.as_ref()); - match entry.mutate(|current: Result, StorageRetrievalError>| -> Result<_, ()> { - match current { - Ok(Some(current)) if current < first_to_keep => Ok(first_to_keep), - // do not move the cursor, if the new one would be behind ours - Ok(Some(current)) => Ok(current), - Ok(None) => Ok(first_to_keep), - // if the storage contains undecodable data, overwrite with current anyways - // which might leak some entries being never purged, but that is acceptable - // in this context - Err(_) => Ok(first_to_keep), - } - }) { + match entry.mutate( + |current: Result, StorageRetrievalError>| -> Result<_, ()> { + match current { + Ok(Some(current)) if current < first_to_keep => Ok(first_to_keep), + // do not move the cursor, if the new one would be behind ours + Ok(Some(current)) => Ok(current), + Ok(None) => Ok(first_to_keep), + // if the storage contains undecodable data, overwrite with current anyways + // which might leak some entries being never purged, but that is acceptable + // in this context + Err(_) => Ok(first_to_keep), + } + }, + ) { Ok(new_value) => { // on a re-org this is not necessarily true, with the above they might be equal if new_value < first_to_keep { @@ -124,9 +125,9 @@ pub fn prune_older_than(first_to_keep: SessionIndex) { let _ = StorageValueRef::persistent(derived_key.as_ref()).clear(); } } - } - Err(MutateStorageError::ConcurrentModification(_)) => {} - Err(MutateStorageError::ValueFunctionFailed(_)) => {} + }, + Err(MutateStorageError::ConcurrentModification(_)) => {}, + Err(MutateStorageError::ValueFunctionFailed(_)) => {}, } } @@ -141,23 +142,22 @@ pub fn keep_newest(n_to_keep: usize) { #[cfg(test)] mod tests { - use super::super::{onchain, Module}; - use super::*; + use super::{ + super::{onchain, Module}, + *, + }; use crate::mock::{ force_new_session, set_next_validators, Session, System, Test, NEXT_VALIDATORS, }; use codec::Encode; use frame_support::traits::{KeyOwnerProofSystem, OnInitialize}; - use sp_core::crypto::key_types::DUMMY; - use sp_core::offchain::{ - testing::TestOffchainExt, - OffchainDbExt, - OffchainWorkerExt, - StorageKind, + use sp_core::{ + crypto::key_types::DUMMY, + offchain::{testing::TestOffchainExt, OffchainDbExt, OffchainWorkerExt, StorageKind}, }; - use sp_runtime::testing::UintAuthorityId; use frame_support::BasicExternalities; + use sp_runtime::testing::UintAuthorityId; type Historical = Module; @@ -166,16 +166,16 @@ mod tests { .build_storage::() .expect("Failed to create test externalities."); - let keys: Vec<_> = NEXT_VALIDATORS.with(|l| + let keys: Vec<_> = NEXT_VALIDATORS.with(|l| { l.borrow().iter().cloned().map(|i| (i, i, UintAuthorityId(i).into())).collect() - ); + }); BasicExternalities::execute_with_storage(&mut t, || { for (ref k, ..) in &keys { frame_system::Pallet::::inc_providers(k); } }); - crate::GenesisConfig::{ keys }.assimilate_storage(&mut t).unwrap(); + crate::GenesisConfig:: { keys }.assimilate_storage(&mut t).unwrap(); let mut ext = sp_io::TestExternalities::new(t); @@ -193,13 +193,13 @@ mod tests { #[test] fn encode_decode_roundtrip() { + use super::super::{super::Config as SessionConfig, Config as HistoricalConfig}; use codec::{Decode, Encode}; - use super::super::super::Config as SessionConfig; - use super::super::Config as HistoricalConfig; let sample = ( - 22u32 as ::ValidatorId, - 7_777_777 as ::FullIdentification); + 22u32 as ::ValidatorId, + 7_777_777 as ::FullIdentification, + ); let encoded = sample.encode(); let decoded = Decode::decode(&mut encoded.as_slice()).expect("Must decode"); @@ -210,7 +210,7 @@ mod tests { fn onchain_to_offchain() { let mut ext = new_test_ext(); - const DATA: &[u8] = &[7,8,9,10,11]; + const DATA: &[u8] = &[7, 8, 9, 10, 11]; ext.execute_with(|| { b"alphaomega"[..].using_encoded(|key| sp_io::offchain_index::set(key, DATA)); }); @@ -218,15 +218,13 @@ mod tests { ext.persist_offchain_overlay(); ext.execute_with(|| { - let data = - b"alphaomega"[..].using_encoded(|key| { + let data = b"alphaomega"[..].using_encoded(|key| { sp_io::offchain::local_storage_get(StorageKind::PERSISTENT, key) }); assert_eq!(data, Some(DATA.to_vec())); }); } - #[test] fn historical_proof_offchain() { let mut ext = new_test_ext(); @@ -251,8 +249,6 @@ mod tests { ext.persist_offchain_overlay(); ext.execute_with(|| { - - System::set_block_number(2); Session::on_initialize(2); assert_eq!(>::current_index(), 2); diff --git a/frame/session/src/historical/onchain.rs b/frame/session/src/historical/onchain.rs index 8fe63a79e1c59..514e343f4e0f6 100644 --- a/frame/session/src/historical/onchain.rs +++ b/frame/session/src/historical/onchain.rs @@ -20,9 +20,10 @@ use codec::Encode; use sp_runtime::traits::Convert; -use super::super::Config as SessionConfig; -use super::super::{Pallet as SessionModule, SessionIndex}; -use super::Config as HistoricalConfig; +use super::{ + super::{Config as SessionConfig, Pallet as SessionModule, SessionIndex}, + Config as HistoricalConfig, +}; use super::shared; use sp_std::prelude::*; diff --git a/frame/session/src/historical/shared.rs b/frame/session/src/historical/shared.rs index b054854d88fe8..e801aa80eef4c 100644 --- a/frame/session/src/historical/shared.rs +++ b/frame/session/src/historical/shared.rs @@ -18,10 +18,9 @@ //! Shared logic between on-chain and off-chain components used for slashing using an off-chain //! worker. - use super::SessionIndex; -use sp_std::prelude::*; use codec::Encode; +use sp_std::prelude::*; pub(super) const PREFIX: &[u8] = b"session_historical"; pub(super) const LAST_PRUNE: &[u8] = b"session_historical_last_prune"; @@ -30,10 +29,11 @@ pub(super) const LAST_PRUNE: &[u8] = b"session_historical_last_prune"; pub(super) fn derive_key>(prefix: P, session_index: SessionIndex) -> Vec { let prefix: &[u8] = prefix.as_ref(); session_index.using_encoded(|encoded_session_index| { - prefix.into_iter() + prefix + .into_iter() .chain(b"/".into_iter()) .chain(encoded_session_index.into_iter()) .copied() .collect::>() }) -} \ No newline at end of file +} diff --git a/frame/session/src/lib.rs b/frame/session/src/lib.rs index 5095ed0154657..cdeceb1ef53d2 100644 --- a/frame/session/src/lib.rs +++ b/frame/session/src/lib.rs @@ -106,31 +106,37 @@ #![cfg_attr(not(feature = "std"), no_std)] +#[cfg(feature = "historical")] +pub mod historical; #[cfg(test)] mod mock; #[cfg(test)] mod tests; -#[cfg(feature = "historical")] -pub mod historical; pub mod weights; -use sp_std::{prelude::*, marker::PhantomData, ops::{Sub, Rem}}; use codec::Decode; -use sp_runtime::{ - traits::{AtLeast32BitUnsigned, Convert, Member, One, OpaqueKeys, Zero}, - KeyTypeId, Perbill, Permill, RuntimeAppPublic, -}; -use sp_staking::SessionIndex; use frame_support::{ - ensure, decl_module, decl_event, decl_storage, decl_error, ConsensusEngineId, Parameter, + decl_error, decl_event, decl_module, decl_storage, + dispatch::{self, DispatchError, DispatchResult}, + ensure, traits::{ - Get, FindAuthor, ValidatorRegistration, EstimateNextSessionRotation, EstimateNextNewSession, - OneSessionHandler, ValidatorSet, + EstimateNextNewSession, EstimateNextSessionRotation, FindAuthor, Get, OneSessionHandler, + ValidatorRegistration, ValidatorSet, }, - dispatch::{self, DispatchResult, DispatchError}, weights::Weight, + ConsensusEngineId, Parameter, }; use frame_system::ensure_signed; +use sp_runtime::{ + traits::{AtLeast32BitUnsigned, Convert, Member, One, OpaqueKeys, Zero}, + KeyTypeId, Perbill, Permill, RuntimeAppPublic, +}; +use sp_staking::SessionIndex; +use sp_std::{ + marker::PhantomData, + ops::{Rem, Sub}, + prelude::*, +}; pub use weights::WeightInfo; /// Decides whether the session should be ended. @@ -147,10 +153,10 @@ pub trait ShouldEndSession { pub struct PeriodicSessions(PhantomData<(Period, Offset)>); impl< - BlockNumber: Rem + Sub + Zero + PartialOrd, - Period: Get, - Offset: Get, -> ShouldEndSession for PeriodicSessions + BlockNumber: Rem + Sub + Zero + PartialOrd, + Period: Get, + Offset: Get, + > ShouldEndSession for PeriodicSessions { fn should_end_session(now: BlockNumber) -> bool { let offset = Offset::get(); @@ -159,10 +165,10 @@ impl< } impl< - BlockNumber: AtLeast32BitUnsigned + Clone, - Period: Get, - Offset: Get -> EstimateNextSessionRotation for PeriodicSessions + BlockNumber: AtLeast32BitUnsigned + Clone, + Period: Get, + Offset: Get, + > EstimateNextSessionRotation for PeriodicSessions { fn average_session_length() -> BlockNumber { Period::get() @@ -177,15 +183,9 @@ impl< // (0% is never returned). let progress = if now >= offset { let current = (now - offset) % period.clone() + One::one(); - Some(Permill::from_rational( - current.clone(), - period.clone(), - )) + Some(Permill::from_rational(current.clone(), period.clone())) } else { - Some(Permill::from_rational( - now + One::one(), - offset, - )) + Some(Permill::from_rational(now + One::one(), offset)) }; // Weight note: `estimate_current_session_progress` has no storage reads and trivial @@ -257,7 +257,9 @@ pub trait SessionManager { } impl SessionManager for () { - fn new_session(_: SessionIndex) -> Option> { None } + fn new_session(_: SessionIndex) -> Option> { + None + } fn start_session(_: SessionIndex) {} fn end_session(_: SessionIndex) {} } @@ -591,9 +593,8 @@ impl Module { // Get queued session keys and validators. let session_keys = >::get(); - let validators = session_keys.iter() - .map(|(validator, _)| validator.clone()) - .collect::>(); + let validators = + session_keys.iter().map(|(validator, _)| validator.clone()).collect::>(); >::put(&validators); if changed { @@ -609,16 +610,15 @@ impl Module { // Get next validator set. let maybe_next_validators = T::SessionManager::new_session(session_index + 1); - let (next_validators, next_identities_changed) - = if let Some(validators) = maybe_next_validators - { - // NOTE: as per the documentation on `OnSessionEnding`, we consider - // the validator set as having changed even if the validators are the - // same as before, as underlying economic conditions may have changed. - (validators, true) - } else { - (>::get(), false) - }; + let (next_validators, next_identities_changed) = + if let Some(validators) = maybe_next_validators { + // NOTE: as per the documentation on `OnSessionEnding`, we consider + // the validator set as having changed even if the validators are the + // same as before, as underlying economic conditions may have changed. + (validators, true) + } else { + (>::get(), false) + }; // Queue next session keys. let (queued_amalgamated, next_changed) = { @@ -628,7 +628,9 @@ impl Module { let mut now_session_keys = session_keys.iter(); let mut check_next_changed = |keys: &T::Keys| { - if changed { return } + if changed { + return + } // since a new validator set always leads to `changed` starting // as true, we can ensure that `now_session_keys` and `next_validators` // have the same length. this function is called once per iteration. @@ -639,7 +641,8 @@ impl Module { } } }; - let queued_amalgamated = next_validators.into_iter() + let queued_amalgamated = next_validators + .into_iter() .map(|a| { let k = Self::load_keys(&a).unwrap_or_default(); check_next_changed(&k); @@ -657,11 +660,7 @@ impl Module { Self::deposit_event(Event::NewSession(session_index)); // Tell everyone about the new session keys. - T::SessionHandler::on_new_session::( - changed, - &session_keys, - &queued_amalgamated, - ); + T::SessionHandler::on_new_session::(changed, &session_keys, &queued_amalgamated); } /// Disable the validator of index `i`. @@ -695,7 +694,11 @@ impl Module { /// session is already disabled. /// If used with the staking module it allows to force a new era in such case. pub fn disable(c: &T::ValidatorId) -> sp_std::result::Result { - Self::validators().iter().position(|i| i == c).map(Self::disable_index).ok_or(()) + Self::validators() + .iter() + .position(|i| i == c) + .map(Self::disable_index) + .ok_or(()) } /// Upgrade the key type from some old type to a new type. Supports adding @@ -713,7 +716,8 @@ impl Module { /// it's recommended to initialize the keys to a (unique) dummy value with the expectation /// that all validators should invoke `set_keys` before those keys are actually /// required. - pub fn upgrade_keys(upgrade: F) where + pub fn upgrade_keys(upgrade: F) + where Old: OpaqueKeys + Member + Decode, F: Fn(T::ValidatorId, Old) -> T::Keys, { @@ -738,13 +742,13 @@ impl Module { Some(new_keys) }); - let _ = >::translate::, _>( - |k| { - k.map(|k| k.into_iter() + let _ = >::translate::, _>(|k| { + k.map(|k| { + k.into_iter() .map(|(val, old_keys)| (val.clone(), upgrade(val, old_keys))) - .collect::>()) - } - ); + .collect::>() + }) + }); } /// Perform the set_key operation, checking for duplicates. Does not set `Changed`. @@ -771,7 +775,10 @@ impl Module { /// /// This does not ensure that the reference counter in system is incremented appropriately, it /// must be done by the caller or the keys will be leaked in storage. - fn inner_set_keys(who: &T::ValidatorId, keys: T::Keys) -> Result, DispatchError> { + fn inner_set_keys( + who: &T::ValidatorId, + keys: T::Keys, + ) -> Result, DispatchError> { let old_keys = Self::load_keys(who); for id in T::Keys::key_ids() { @@ -789,7 +796,7 @@ impl Module { if let Some(old) = old_keys.as_ref().map(|k| k.get_raw(*id)) { if key == old { - continue; + continue } Self::clear_key_owner(*id, old); @@ -864,7 +871,8 @@ impl> FindAuthor for FindAccountFromAuthorIndex { fn find_author<'a, I>(digests: I) -> Option - where I: 'a + IntoIterator + where + I: 'a + IntoIterator, { let i = Inner::find_author(digests)?; diff --git a/frame/session/src/mock.rs b/frame/session/src/mock.rs index 1462b2326777e..7007286de6415 100644 --- a/frame/session/src/mock.rs +++ b/frame/session/src/mock.rs @@ -18,18 +18,19 @@ //! Mock helpers for Session. use super::*; -use std::cell::RefCell; +use crate as pallet_session; +#[cfg(feature = "historical")] +use crate::historical as pallet_session_historical; use frame_support::{parameter_types, BasicExternalities}; use sp_core::{crypto::key_types::DUMMY, H256}; use sp_runtime::{ - Perbill, impl_opaque_keys, - traits::{BlakeTwo256, IdentityLookup, ConvertInto}, + impl_opaque_keys, testing::{Header, UintAuthorityId}, + traits::{BlakeTwo256, ConvertInto, IdentityLookup}, + Perbill, }; use sp_staking::SessionIndex; -use crate as pallet_session; -#[cfg(feature = "historical")] -use crate::historical as pallet_session_historical; +use std::cell::RefCell; impl_opaque_keys! { pub struct MockSessionKeys { @@ -114,7 +115,12 @@ pub struct TestShouldEndSession; impl ShouldEndSession for TestShouldEndSession { fn should_end_session(now: u64) -> bool { let l = SESSION_LENGTH.with(|l| *l.borrow()); - now % l == 0 || FORCE_SESSION_END.with(|l| { let r = *l.borrow(); *l.borrow_mut() = false; r }) + now % l == 0 || + FORCE_SESSION_END.with(|l| { + let r = *l.borrow(); + *l.borrow_mut() = false; + r + }) } } @@ -128,11 +134,12 @@ impl SessionHandler for TestSessionHandler { _queued_validators: &[(u64, T)], ) { SESSION_CHANGED.with(|l| *l.borrow_mut() = changed); - AUTHORITIES.with(|l| - *l.borrow_mut() = validators.iter() + AUTHORITIES.with(|l| { + *l.borrow_mut() = validators + .iter() .map(|(_, id)| id.get::(DUMMY).unwrap_or_default()) .collect() - ); + }); } fn on_disabled(_validator_index: usize) { DISABLED.with(|l| *l.borrow_mut() = true) @@ -167,9 +174,7 @@ impl SessionManager for TestSessionManager { impl crate::historical::SessionManager for TestSessionManager { fn end_session(_: SessionIndex) {} fn start_session(_: SessionIndex) {} - fn new_session(new_index: SessionIndex) - -> Option> - { + fn new_session(new_index: SessionIndex) -> Option> { >::new_session(new_index) .map(|vals| vals.into_iter().map(|val| (val, val)).collect()) } @@ -180,11 +185,11 @@ pub fn authorities() -> Vec { } pub fn force_new_session() { - FORCE_SESSION_END.with(|l| *l.borrow_mut() = true ) + FORCE_SESSION_END.with(|l| *l.borrow_mut() = true) } pub fn set_session_length(x: u64) { - SESSION_LENGTH.with(|l| *l.borrow_mut() = x ) + SESSION_LENGTH.with(|l| *l.borrow_mut() = x) } pub fn session_changed() -> bool { @@ -205,9 +210,8 @@ pub fn reset_before_session_end_called() { pub fn new_test_ext() -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - let keys: Vec<_> = NEXT_VALIDATORS.with(|l| - l.borrow().iter().cloned().map(|i| (i, i, UintAuthorityId(i).into())).collect() - ); + let keys: Vec<_> = NEXT_VALIDATORS + .with(|l| l.borrow().iter().cloned().map(|i| (i, i, UintAuthorityId(i).into())).collect()); BasicExternalities::execute_with_storage(&mut t, || { for (ref k, ..) in &keys { frame_system::Pallet::::inc_providers(k); @@ -216,7 +220,9 @@ pub fn new_test_ext() -> sp_io::TestExternalities { // An additional identity that we use. frame_system::Pallet::::inc_providers(&69); }); - pallet_session::GenesisConfig:: { keys }.assimilate_storage(&mut t).unwrap(); + pallet_session::GenesisConfig:: { keys } + .assimilate_storage(&mut t) + .unwrap(); sp_io::TestExternalities::new(t) } diff --git a/frame/session/src/tests.rs b/frame/session/src/tests.rs index a551e1a4a2612..cb1a21bbd647a 100644 --- a/frame/session/src/tests.rs +++ b/frame/session/src/tests.rs @@ -18,17 +18,16 @@ // Tests for the Session Pallet use super::*; -use mock::Test; use codec::Decode; -use frame_support::{traits::OnInitialize, assert_ok, assert_noop}; -use sp_core::crypto::key_types::DUMMY; -use sp_runtime::testing::UintAuthorityId; +use frame_support::{assert_noop, assert_ok, traits::OnInitialize}; use mock::{ - SESSION_CHANGED, TEST_SESSION_CHANGED, authorities, force_new_session, - set_next_validators, set_session_length, session_changed, Origin, System, Session, - reset_before_session_end_called, before_session_end_called, new_test_ext, - PreUpgradeMockSessionKeys, + authorities, before_session_end_called, force_new_session, new_test_ext, + reset_before_session_end_called, session_changed, set_next_validators, set_session_length, + Origin, PreUpgradeMockSessionKeys, Session, System, Test, SESSION_CHANGED, + TEST_SESSION_CHANGED, }; +use sp_core::crypto::key_types::DUMMY; +use sp_runtime::testing::UintAuthorityId; fn initialize_block(block: u64) { SESSION_CHANGED.with(|l| *l.borrow_mut() = false); @@ -79,10 +78,10 @@ fn authorities_should_track_validators() { set_next_validators(vec![1, 2]); force_new_session(); initialize_block(1); - assert_eq!(Session::queued_keys(), vec![ - (1, UintAuthorityId(1).into()), - (2, UintAuthorityId(2).into()), - ]); + assert_eq!( + Session::queued_keys(), + vec![(1, UintAuthorityId(1).into()), (2, UintAuthorityId(2).into()),] + ); assert_eq!(Session::validators(), vec![1, 2, 3]); assert_eq!(authorities(), vec![UintAuthorityId(1), UintAuthorityId(2), UintAuthorityId(3)]); assert!(before_session_end_called()); @@ -90,10 +89,10 @@ fn authorities_should_track_validators() { force_new_session(); initialize_block(2); - assert_eq!(Session::queued_keys(), vec![ - (1, UintAuthorityId(1).into()), - (2, UintAuthorityId(2).into()), - ]); + assert_eq!( + Session::queued_keys(), + vec![(1, UintAuthorityId(1).into()), (2, UintAuthorityId(2).into()),] + ); assert_eq!(Session::validators(), vec![1, 2]); assert_eq!(authorities(), vec![UintAuthorityId(1), UintAuthorityId(2)]); assert!(before_session_end_called()); @@ -103,22 +102,28 @@ fn authorities_should_track_validators() { assert_ok!(Session::set_keys(Origin::signed(4), UintAuthorityId(4).into(), vec![])); force_new_session(); initialize_block(3); - assert_eq!(Session::queued_keys(), vec![ - (1, UintAuthorityId(1).into()), - (2, UintAuthorityId(2).into()), - (4, UintAuthorityId(4).into()), - ]); + assert_eq!( + Session::queued_keys(), + vec![ + (1, UintAuthorityId(1).into()), + (2, UintAuthorityId(2).into()), + (4, UintAuthorityId(4).into()), + ] + ); assert_eq!(Session::validators(), vec![1, 2]); assert_eq!(authorities(), vec![UintAuthorityId(1), UintAuthorityId(2)]); assert!(before_session_end_called()); force_new_session(); initialize_block(4); - assert_eq!(Session::queued_keys(), vec![ - (1, UintAuthorityId(1).into()), - (2, UintAuthorityId(2).into()), - (4, UintAuthorityId(4).into()), - ]); + assert_eq!( + Session::queued_keys(), + vec![ + (1, UintAuthorityId(1).into()), + (2, UintAuthorityId(2).into()), + (4, UintAuthorityId(4).into()), + ] + ); assert_eq!(Session::validators(), vec![1, 2, 4]); assert_eq!(authorities(), vec![UintAuthorityId(1), UintAuthorityId(2), UintAuthorityId(4)]); }); @@ -288,10 +293,7 @@ fn periodic_session_works() { // 1/10 of progress. assert!(P::should_end_session(3u64)); assert_eq!(P::estimate_next_session_rotation(3u64).0.unwrap(), 3); - assert_eq!( - P::estimate_current_session_progress(3u64).0.unwrap(), - Permill::from_percent(10), - ); + assert_eq!(P::estimate_current_session_progress(3u64).0.unwrap(), Permill::from_percent(10),); for i in (1u64..10).map(|i| 3 + i) { assert!(!P::should_end_session(i)); @@ -314,30 +316,22 @@ fn periodic_session_works() { // the new session starts and we proceed in 1/10 increments. assert!(P::should_end_session(13u64)); assert_eq!(P::estimate_next_session_rotation(13u64).0.unwrap(), 23); - assert_eq!( - P::estimate_current_session_progress(13u64).0.unwrap(), - Permill::from_percent(10) - ); + assert_eq!(P::estimate_current_session_progress(13u64).0.unwrap(), Permill::from_percent(10)); assert!(!P::should_end_session(14u64)); assert_eq!(P::estimate_next_session_rotation(14u64).0.unwrap(), 23); - assert_eq!( - P::estimate_current_session_progress(14u64).0.unwrap(), - Permill::from_percent(20) - ); + assert_eq!(P::estimate_current_session_progress(14u64).0.unwrap(), Permill::from_percent(20)); } #[test] fn session_keys_generate_output_works_as_set_keys_input() { new_test_ext().execute_with(|| { let new_keys = mock::MockSessionKeys::generate(None); - assert_ok!( - Session::set_keys( - Origin::signed(2), - ::Keys::decode(&mut &new_keys[..]).expect("Decode keys"), - vec![], - ) - ); + assert_ok!(Session::set_keys( + Origin::signed(2), + ::Keys::decode(&mut &new_keys[..]).expect("Decode keys"), + vec![], + )); }); } @@ -368,26 +362,13 @@ fn upgrade_keys() { assert_eq!(mock::VALIDATORS.with(|l| l.borrow().clone()), vec![1, 2, 3]); new_test_ext().execute_with(|| { - let pre_one = PreUpgradeMockSessionKeys { - a: [1u8; 32], - b: [1u8; 64], - }; - - let pre_two = PreUpgradeMockSessionKeys { - a: [2u8; 32], - b: [2u8; 64], - }; - - let pre_three = PreUpgradeMockSessionKeys { - a: [3u8; 32], - b: [3u8; 64], - }; - - let val_keys = vec![ - (1u64, pre_one), - (2u64, pre_two), - (3u64, pre_three), - ]; + let pre_one = PreUpgradeMockSessionKeys { a: [1u8; 32], b: [1u8; 64] }; + + let pre_two = PreUpgradeMockSessionKeys { a: [2u8; 32], b: [2u8; 64] }; + + let pre_three = PreUpgradeMockSessionKeys { a: [3u8; 32], b: [3u8; 64] }; + + let val_keys = vec![(1u64, pre_one), (2u64, pre_two), (3u64, pre_three)]; // Set `QueuedKeys`. { @@ -422,9 +403,7 @@ fn upgrade_keys() { // Do the upgrade and check sanity. let mock_keys_for = |val| mock::MockSessionKeys { dummy: UintAuthorityId(val) }; - Session::upgrade_keys::( - |val, _old_keys| mock_keys_for(val), - ); + Session::upgrade_keys::(|val, _old_keys| mock_keys_for(val)); // Check key ownership. for (i, ref keys) in val_keys.iter() { @@ -438,11 +417,7 @@ fn upgrade_keys() { // Check queued keys. assert_eq!( Session::queued_keys(), - vec![ - (1, mock_keys_for(1)), - (2, mock_keys_for(2)), - (3, mock_keys_for(3)), - ], + vec![(1, mock_keys_for(1)), (2, mock_keys_for(2)), (3, mock_keys_for(3)),], ); for i in 1u64..4 { diff --git a/frame/session/src/weights.rs b/frame/session/src/weights.rs index ec911d8c01cce..343bf5ffbf9a5 100644 --- a/frame/session/src/weights.rs +++ b/frame/session/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/session/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_session. diff --git a/frame/society/src/lib.rs b/frame/society/src/lib.rs index 75ee130b2c290..7b11e84d67236 100644 --- a/frame/society/src/lib.rs +++ b/frame/society/src/lib.rs @@ -251,26 +251,38 @@ mod mock; #[cfg(test)] mod tests; -use rand_chacha::{rand_core::{RngCore, SeedableRng}, ChaChaRng}; -use sp_std::prelude::*; -use codec::{Encode, Decode}; -use scale_info::TypeInfo; -use sp_runtime::{Percent, RuntimeDebug, +use codec::{Decode, Encode}; +use frame_support::{ + decl_error, decl_event, decl_module, decl_storage, + dispatch::DispatchResult, + ensure, traits::{ - StaticLookup, AccountIdConversion, Saturating, Zero, IntegerSquareRoot, Hash, - TrailingZeroInput, CheckedSub - } + BalanceStatus, ChangeMembers, Currency, EnsureOrigin, ExistenceRequirement::AllowDeath, + Get, Imbalance, OnUnbalanced, Randomness, ReservableCurrency, + }, + weights::Weight, + PalletId, +}; +use frame_system::{self as system, ensure_root, ensure_signed}; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaChaRng, }; -use frame_support::{decl_error, decl_module, decl_storage, decl_event, ensure, dispatch::DispatchResult, PalletId}; -use frame_support::weights::Weight; -use frame_support::traits::{ - Currency, ReservableCurrency, Randomness, Get, ChangeMembers, BalanceStatus, - ExistenceRequirement::AllowDeath, EnsureOrigin, OnUnbalanced, Imbalance +use scale_info::TypeInfo; +use sp_runtime::{ + traits::{ + AccountIdConversion, CheckedSub, Hash, IntegerSquareRoot, Saturating, StaticLookup, + TrailingZeroInput, Zero, + }, + Percent, RuntimeDebug, }; -use frame_system::{self as system, ensure_signed, ensure_root}; +use sp_std::prelude::*; -type BalanceOf = <>::Currency as Currency<::AccountId>>::Balance; -type NegativeImbalanceOf = <::Currency as Currency<::AccountId>>::NegativeImbalance; +type BalanceOf = + <>::Currency as Currency<::AccountId>>::Balance; +type NegativeImbalanceOf = <::Currency as Currency< + ::AccountId, +>>::NegativeImbalance; /// The module's configuration trait. pub trait Config: system::Config { @@ -1188,7 +1200,6 @@ fn pick_item<'a, R: RngCore, T>(rng: &mut R, items: &'a [T]) -> Option<&'a T> { /// Pick a new PRN, in the range [0, `max`] (inclusive). fn pick_usize<'a, R: RngCore>(rng: &mut R, max: usize) -> usize { - (rng.next_u32() % (max as u32 + 1)) as usize } @@ -1199,7 +1210,7 @@ impl, I: Instance> Module { mut bids: Vec>>, who: &T::AccountId, value: BalanceOf, - bid_kind: BidKind> + bid_kind: BidKind>, ) { const MAX_BID_COUNT: usize = 1000; @@ -1207,7 +1218,8 @@ impl, I: Instance> Module { // Insert new elements after the existing ones. This ensures new bids // with the same bid value are further down the list than existing ones. Ok(pos) => { - let different_bid = bids.iter() + let different_bid = bids + .iter() // Easily extract the index we are on .enumerate() // Skip ahead to the suggested position @@ -1219,25 +1231,13 @@ impl, I: Instance> Module { // If the element is not at the end of the list, insert the new element // in the spot. if let Some((p, _)) = different_bid { - bids.insert(p, Bid { - value, - who: who.clone(), - kind: bid_kind, - }); + bids.insert(p, Bid { value, who: who.clone(), kind: bid_kind }); // If the element is at the end of the list, push the element on the end. } else { - bids.push(Bid { - value, - who: who.clone(), - kind: bid_kind, - }); + bids.push(Bid { value, who: who.clone(), kind: bid_kind }); } }, - Err(pos) => bids.insert(pos, Bid { - value, - who: who.clone(), - kind: bid_kind, - }), + Err(pos) => bids.insert(pos, Bid { value, who: who.clone(), kind: bid_kind }), } // Keep it reasonably small. if bids.len() > MAX_BID_COUNT { @@ -1246,10 +1246,10 @@ impl, I: Instance> Module { BidKind::Deposit(deposit) => { let err_amount = T::Currency::unreserve(&popped, deposit); debug_assert!(err_amount.is_zero()); - } + }, BidKind::Vouch(voucher, _) => { >::remove(&voucher); - } + }, } Self::deposit_event(RawEvent::AutoUnbid(popped)); } @@ -1264,7 +1264,10 @@ impl, I: Instance> Module { } /// Check a user is a candidate. - fn is_candidate(candidates: &Vec>>, who: &T::AccountId) -> bool { + fn is_candidate( + candidates: &Vec>>, + who: &T::AccountId, + ) -> bool { // Looking up a candidate is the same as looking up a bid Self::is_bid(candidates, who) } @@ -1308,7 +1311,7 @@ impl, I: Instance> Module { T::MembershipChanged::change_members_sorted(&[], &[m.clone()], &members[..]); >::put(members); Ok(()) - } + }, } } @@ -1334,73 +1337,87 @@ impl, I: Instance> Module { // critical issues or side-effects. This is auto-correcting as members fall out of society. members.reserve(candidates.len()); - let maturity = >::block_number() - + Self::lock_duration(members.len() as u32); + let maturity = + >::block_number() + Self::lock_duration(members.len() as u32); let mut rewardees = Vec::new(); let mut total_approvals = 0; let mut total_slash = >::zero(); let mut total_payouts = >::zero(); - let accepted = candidates.into_iter().filter_map(|Bid {value, who: candidate, kind }| { - let mut approval_count = 0; - - // Creates a vector of (vote, member) for the given candidate - // and tallies total number of approve votes for that candidate. - let votes = members.iter() - .filter_map(|m| >::take(&candidate, m).map(|v| (v, m))) - .inspect(|&(v, _)| if v == Vote::Approve { approval_count += 1 }) - .collect::>(); - - // Select one of the votes at random. - // Note that `Vote::Skeptical` and `Vote::Reject` both reject the candidate. - let is_accepted = pick_item(&mut rng, &votes).map(|x| x.0) == Some(Vote::Approve); - - let matching_vote = if is_accepted { Vote::Approve } else { Vote::Reject }; - - let bad_vote = |m: &T::AccountId| { - // Voter voted wrong way (or was just a lazy skeptic) then reduce their payout - // and increase their strikes. after MaxStrikes then they go into suspension. - let amount = Self::slash_payout(m, T::WrongSideDeduction::get()); - - let strikes = >::mutate(m, |s| { - *s += 1; - *s - }); - if strikes >= T::MaxStrikes::get() { - Self::suspend_member(m); - } - amount - }; - - // Collect the voters who had a matching vote. - rewardees.extend(votes.into_iter() - .filter_map(|(v, m)| - if v == matching_vote { Some(m) } else { - total_slash += bad_vote(m); - None + let accepted = candidates + .into_iter() + .filter_map(|Bid { value, who: candidate, kind }| { + let mut approval_count = 0; + + // Creates a vector of (vote, member) for the given candidate + // and tallies total number of approve votes for that candidate. + let votes = members + .iter() + .filter_map(|m| >::take(&candidate, m).map(|v| (v, m))) + .inspect(|&(v, _)| { + if v == Vote::Approve { + approval_count += 1 + } + }) + .collect::>(); + + // Select one of the votes at random. + // Note that `Vote::Skeptical` and `Vote::Reject` both reject the candidate. + let is_accepted = + pick_item(&mut rng, &votes).map(|x| x.0) == Some(Vote::Approve); + + let matching_vote = if is_accepted { Vote::Approve } else { Vote::Reject }; + + let bad_vote = |m: &T::AccountId| { + // Voter voted wrong way (or was just a lazy skeptic) then reduce their payout + // and increase their strikes. after MaxStrikes then they go into suspension. + let amount = Self::slash_payout(m, T::WrongSideDeduction::get()); + + let strikes = >::mutate(m, |s| { + *s += 1; + *s + }); + if strikes >= T::MaxStrikes::get() { + Self::suspend_member(m); } - ).cloned() - ); + amount + }; + + // Collect the voters who had a matching vote. + rewardees.extend( + votes + .into_iter() + .filter_map(|(v, m)| { + if v == matching_vote { + Some(m) + } else { + total_slash += bad_vote(m); + None + } + }) + .cloned(), + ); - if is_accepted { - total_approvals += approval_count; - total_payouts += value; - members.push(candidate.clone()); + if is_accepted { + total_approvals += approval_count; + total_payouts += value; + members.push(candidate.clone()); - Self::pay_accepted_candidate(&candidate, value, kind, maturity); + Self::pay_accepted_candidate(&candidate, value, kind, maturity); - // We track here the total_approvals so that every candidate has a unique range - // of numbers from 0 to `total_approvals` with length `approval_count` so each - // candidate is proportionally represented when selecting a "primary" below. - Some((candidate, total_approvals, value)) - } else { - // Suspend Candidate - >::insert(&candidate, (value, kind)); - Self::deposit_event(RawEvent::CandidateSuspended(candidate)); - None - } - }).collect::>(); + // We track here the total_approvals so that every candidate has a unique range + // of numbers from 0 to `total_approvals` with length `approval_count` so each + // candidate is proportionally represented when selecting a "primary" below. + Some((candidate, total_approvals, value)) + } else { + // Suspend Candidate + >::insert(&candidate, (value, kind)); + Self::deposit_event(RawEvent::CandidateSuspended(candidate)); + None + } + }) + .collect::>(); // Clean up all votes. >::remove_all(None); @@ -1412,7 +1429,12 @@ impl, I: Instance> Module { Self::bump_payout(winner, maturity, total_slash); } else { // Move the slashed amount back from payouts account to local treasury. - let res = T::Currency::transfer(&Self::payouts(), &Self::account_id(), total_slash, AllowDeath); + let res = T::Currency::transfer( + &Self::payouts(), + &Self::account_id(), + total_slash, + AllowDeath, + ); debug_assert!(res.is_ok()); } } @@ -1424,7 +1446,12 @@ impl, I: Instance> Module { // this should never fail since we ensure we can afford the payouts in a previous // block, but there's not much we can do to recover if it fails anyway. - let res = T::Currency::transfer(&Self::account_id(), &Self::payouts(), total_payouts, AllowDeath); + let res = T::Currency::transfer( + &Self::account_id(), + &Self::payouts(), + total_payouts, + AllowDeath, + ); debug_assert!(res.is_ok()); } @@ -1434,10 +1461,15 @@ impl, I: Instance> Module { // Choose a random number between 0 and `total_approvals` let primary_point = pick_usize(&mut rng, total_approvals - 1); // Find the zero bid or the user who falls on that point - let primary = accepted.iter().find(|e| e.2.is_zero() || e.1 > primary_point) - .expect("e.1 of final item == total_approvals; \ - worst case find will always return that item; qed") - .0.clone(); + let primary = accepted + .iter() + .find(|e| e.2.is_zero() || e.1 > primary_point) + .expect( + "e.1 of final item == total_approvals; \ + worst case find will always return that item; qed", + ) + .0 + .clone(); let accounts = accepted.into_iter().map(|x| x.0).collect::>(); @@ -1465,9 +1497,10 @@ impl, I: Instance> Module { >::put(&candidates); // Select sqrt(n) random members from the society and make them skeptics. - let pick_member = |_| pick_item(&mut rng, &members[..]).expect("exited if members empty; qed"); + let pick_member = + |_| pick_item(&mut rng, &members[..]).expect("exited if members empty; qed"); for skeptic in (0..members.len().integer_sqrt()).map(pick_member) { - for Bid{ who: c, .. } in candidates.iter() { + for Bid { who: c, .. } in candidates.iter() { >::insert(c, skeptic, Vote::Skeptic); } } @@ -1488,7 +1521,7 @@ impl, I: Instance> Module { // whole slash is accounted for. *amount -= rest; rest = Zero::zero(); - break; + break } } >::insert(who, &payouts[dropped..]); @@ -1498,10 +1531,12 @@ impl, I: Instance> Module { /// Bump the payout amount of `who`, to be unlocked at the given block number. fn bump_payout(who: &T::AccountId, when: T::BlockNumber, value: BalanceOf) { - if !value.is_zero(){ - >::mutate(who, |payouts| match payouts.binary_search_by_key(&when, |x| x.0) { - Ok(index) => payouts[index].1 += value, - Err(index) => payouts.insert(index, (when, value)), + if !value.is_zero() { + >::mutate(who, |payouts| { + match payouts.binary_search_by_key(&when, |x| x.0) { + Ok(index) => payouts[index].1 += value, + Err(index) => payouts.insert(index, (when, value)), + } }); } } @@ -1529,7 +1564,7 @@ impl, I: Instance> Module { let err_amount = T::Currency::unreserve(candidate, deposit); debug_assert!(err_amount.is_zero()); value - } + }, BidKind::Vouch(voucher, tip) => { // Check that the voucher is still vouching, else some other logic may have removed their status. if >::take(&voucher) == Some(VouchingStatus::Vouching) { @@ -1540,7 +1575,7 @@ impl, I: Instance> Module { } else { value } - } + }, }; Self::bump_payout(candidate, maturity, value); @@ -1555,14 +1590,12 @@ impl, I: Instance> Module { let mut approval_count = 0; let mut rejection_count = 0; // Tallies total number of approve and reject votes for the defender. - members.iter() - .filter_map(|m| >::take(m)) - .for_each(|v| { - match v { - Vote::Approve => approval_count += 1, - _ => rejection_count += 1, - } - }); + members.iter().filter_map(|m| >::take(m)).for_each( + |v| match v { + Vote::Approve => approval_count += 1, + _ => rejection_count += 1, + }, + ); if approval_count <= rejection_count { // User has failed the challenge diff --git a/frame/society/src/mock.rs b/frame/society/src/mock.rs index 18cdda678da6f..2ae9f7b44ba73 100644 --- a/frame/society/src/mock.rs +++ b/frame/society/src/mock.rs @@ -21,16 +21,16 @@ use super::*; use crate as pallet_society; use frame_support::{ - parameter_types, ord_parameter_types, - traits::{OnInitialize, OnFinalize}, + ord_parameter_types, parameter_types, + traits::{OnFinalize, OnInitialize}, }; use frame_support_test::TestRandomness; +use frame_system::EnsureSignedBy; use sp_core::H256; use sp_runtime::{ testing::Header, traits::{BlakeTwo256, IdentityLookup}, }; -use frame_system::EnsureSignedBy; type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -156,14 +156,16 @@ impl EnvBuilder { pub fn execute R>(mut self, f: F) -> R { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); self.balances.push((Society::account_id(), self.balance.max(self.pot))); - pallet_balances::GenesisConfig:: { - balances: self.balances, - }.assimilate_storage(&mut t).unwrap(); - pallet_society::GenesisConfig::{ + pallet_balances::GenesisConfig:: { balances: self.balances } + .assimilate_storage(&mut t) + .unwrap(); + pallet_society::GenesisConfig:: { members: self.members, pot: self.pot, max_members: self.max_members, - }.assimilate_storage(&mut t).unwrap(); + } + .assimilate_storage(&mut t) + .unwrap(); let mut ext: sp_io::TestExternalities = t.into(); ext.execute_with(f) } @@ -210,12 +212,7 @@ pub fn run_to_block(n: u64) { pub fn create_bid( value: Balance, who: AccountId, - kind: BidKind -) -> Bid -{ - Bid { - who, - kind, - value - } + kind: BidKind, +) -> Bid { + Bid { who, kind, value } } diff --git a/frame/society/src/tests.rs b/frame/society/src/tests.rs index 7c83448395770..9f8e32dea5088 100644 --- a/frame/society/src/tests.rs +++ b/frame/society/src/tests.rs @@ -20,9 +20,9 @@ use super::*; use mock::*; -use frame_support::{assert_ok, assert_noop}; -use sp_runtime::traits::BadOrigin; +use frame_support::{assert_noop, assert_ok}; use sp_core::blake2_256; +use sp_runtime::traits::BadOrigin; #[test] fn founding_works() { @@ -118,10 +118,13 @@ fn bidding_works() { assert_eq!(Society::pot(), 1000); assert_eq!(Balances::free_balance(Society::account_id()), 10_000); // Choose smallest bidding users whose total is less than pot - assert_eq!(Society::candidates(), vec![ - create_bid(300, 30, BidKind::Deposit(25)), - create_bid(400, 40, BidKind::Deposit(25)), - ]); + assert_eq!( + Society::candidates(), + vec![ + create_bid(300, 30, BidKind::Deposit(25)), + create_bid(400, 40, BidKind::Deposit(25)), + ] + ); // A member votes for these candidates to join the society assert_ok!(Society::vote(Origin::signed(10), 30, true)); assert_ok!(Society::vote(Origin::signed(10), 40, true)); @@ -132,7 +135,7 @@ fn bidding_works() { assert_eq!(Balances::free_balance(Society::account_id()), 9_300); assert_eq!(Society::pot(), 1_300); // Left over from the original bids is 50 who satisfies the condition of bid less than pot. - assert_eq!(Society::candidates(), vec![ create_bid(500, 50, BidKind::Deposit(25)) ]); + assert_eq!(Society::candidates(), vec![create_bid(500, 50, BidKind::Deposit(25))]); // 40, now a member, can vote for 50 assert_ok!(Society::vote(Origin::signed(40), 50, true)); run_to_block(12); @@ -144,7 +147,7 @@ fn bidding_works() { // No more candidates satisfy the requirements assert_eq!(Society::candidates(), vec![]); assert_ok!(Society::defender_vote(Origin::signed(10), true)); // Keep defender around - // Next period + // Next period run_to_block(16); // Same members assert_eq!(Society::members(), vec![10, 30, 40, 50]); @@ -153,7 +156,7 @@ fn bidding_works() { // No payouts assert_eq!(Balances::free_balance(Society::account_id()), 8_800); // Candidate 60 now qualifies based on the increased pot size. - assert_eq!(Society::candidates(), vec![ create_bid(1900, 60, BidKind::Deposit(25)) ]); + assert_eq!(Society::candidates(), vec![create_bid(1900, 60, BidKind::Deposit(25))]); // Candidate 60 is voted in. assert_ok!(Society::vote(Origin::signed(50), 60, true)); run_to_block(20); @@ -183,7 +186,7 @@ fn unbidding_works() { assert_eq!(Balances::reserved_balance(30), 0); // 20 wins candidacy run_to_block(4); - assert_eq!(Society::candidates(), vec![ create_bid(1000, 20, BidKind::Deposit(25)) ]); + assert_eq!(Society::candidates(), vec![create_bid(1000, 20, BidKind::Deposit(25))]); }); } @@ -350,7 +353,10 @@ fn suspended_candidate_rejected_works() { assert_eq!(Society::suspended_candidate(20).is_some(), true); // Normal user cannot make judgement on suspended candidate - assert_noop!(Society::judge_suspended_candidate(Origin::signed(20), 20, Judgement::Approve), BadOrigin); + assert_noop!( + Society::judge_suspended_candidate(Origin::signed(20), 20, Judgement::Approve), + BadOrigin + ); // Suspension judgement origin makes no direct judgement assert_ok!(Society::judge_suspended_candidate(Origin::signed(2), 20, Judgement::Rebid)); @@ -391,7 +397,10 @@ fn vouch_works() { assert_ok!(Society::vouch(Origin::signed(10), 20, 1000, 100)); assert_eq!(>::get(10), Some(VouchingStatus::Vouching)); // A member cannot vouch twice at the same time - assert_noop!(Society::vouch(Origin::signed(10), 30, 100, 0), Error::::AlreadyVouching); + assert_noop!( + Society::vouch(Origin::signed(10), 30, 100, 0), + Error::::AlreadyVouching + ); // Vouching creates the right kind of bid assert_eq!(>::get(), vec![create_bid(1000, 20, BidKind::Vouch(10, 100))]); // Vouched user can become candidate @@ -475,7 +484,10 @@ fn unvouch_works() { assert_eq!(Society::members(), vec![10]); // 10 cannot vouch again - assert_noop!(Society::vouch(Origin::signed(10), 30, 100, 0), Error::::AlreadyVouching); + assert_noop!( + Society::vouch(Origin::signed(10), 30, 100, 0), + Error::::AlreadyVouching + ); // 10 cannot unvouch either, so they are banned forever. assert_noop!(Society::unvouch(Origin::signed(10), 0), Error::::NotVouching); }); @@ -654,7 +666,7 @@ fn bad_vote_slash_works() { assert_eq!(>::get(30), 0); assert_eq!(>::get(40), 0); // Their payout is slashed, a random person is rewarded - assert_eq!(>::get(10), vec![(5, 100), (9,2)]); + assert_eq!(>::get(10), vec![(5, 100), (9, 2)]); assert_eq!(>::get(20), vec![(5, 98)]); assert_eq!(>::get(30), vec![(5, 100)]); assert_eq!(>::get(40), vec![(5, 100)]); @@ -672,7 +684,10 @@ fn user_cannot_bid_twice() { assert_noop!(Society::bid(Origin::signed(30), 100), Error::::AlreadyBid); // Cannot vouch when already bid assert_ok!(Society::add_member(&50)); - assert_noop!(Society::vouch(Origin::signed(50), 20, 100, 100), Error::::AlreadyBid); + assert_noop!( + Society::vouch(Origin::signed(50), 20, 100, 100), + Error::::AlreadyBid + ); }); } @@ -794,7 +809,11 @@ fn max_limits_work() { assert_eq!(Society::candidates().len(), 4); // Fill up members with suspended candidates from the first rotation for i in 100..104 { - assert_ok!(Society::judge_suspended_candidate(Origin::signed(2), i, Judgement::Approve)); + assert_ok!(Society::judge_suspended_candidate( + Origin::signed(2), + i, + Judgement::Approve + )); } assert_eq!(Society::members().len(), 100); // Can't add any more members @@ -840,15 +859,18 @@ fn zero_bid_works() { assert_eq!(Society::pot(), 1000); assert_eq!(Balances::free_balance(Society::account_id()), 10_000); // Choose smallest bidding users whose total is less than pot, with only one zero bid. - assert_eq!(Society::candidates(), vec![ - create_bid(0, 30, BidKind::Deposit(25)), - create_bid(300, 50, BidKind::Deposit(25)), - create_bid(400, 60, BidKind::Deposit(25)), - ]); - assert_eq!(>::get(), vec![ - create_bid(0, 20, BidKind::Deposit(25)), - create_bid(0, 40, BidKind::Deposit(25)), - ]); + assert_eq!( + Society::candidates(), + vec![ + create_bid(0, 30, BidKind::Deposit(25)), + create_bid(300, 50, BidKind::Deposit(25)), + create_bid(400, 60, BidKind::Deposit(25)), + ] + ); + assert_eq!( + >::get(), + vec![create_bid(0, 20, BidKind::Deposit(25)), create_bid(0, 40, BidKind::Deposit(25)),] + ); // A member votes for these candidates to join the society assert_ok!(Society::vote(Origin::signed(10), 30, true)); assert_ok!(Society::vote(Origin::signed(10), 50, true)); @@ -878,7 +900,7 @@ fn bids_ordered_correctly() { for j in 0..5 { for i in 0..5 { - final_list.push(create_bid(j, 100 + (i * 5 + j) as u128, BidKind::Deposit(25))); + final_list.push(create_bid(j, 100 + (i * 5 + j) as u128, BidKind::Deposit(25))); } } diff --git a/frame/staking/reward-curve/src/lib.rs b/frame/staking/reward-curve/src/lib.rs index de912eee99ce2..c225c90457835 100644 --- a/frame/staking/reward-curve/src/lib.rs +++ b/frame/staking/reward-curve/src/lib.rs @@ -21,7 +21,7 @@ mod log; use log::log2; use proc_macro::TokenStream; -use proc_macro2::{TokenStream as TokenStream2, Span}; +use proc_macro2::{Span, TokenStream as TokenStream2}; use proc_macro_crate::{crate_name, FoundCrate}; use quote::{quote, ToTokens}; use std::convert::TryInto; @@ -82,7 +82,9 @@ pub fn build(input: TokenStream) -> TokenStream { let test_module = generate_test_module(&input); let imports = match crate_name("sp-runtime") { - Ok(FoundCrate::Itself) => quote!( extern crate sp_runtime as _sp_runtime; ), + Ok(FoundCrate::Itself) => quote!( + extern crate sp_runtime as _sp_runtime; + ), Ok(FoundCrate::Name(sp_runtime)) => { let ident = syn::Ident::new(&sp_runtime, Span::call_site()); quote!( extern crate #ident as _sp_runtime; ) @@ -99,7 +101,8 @@ pub fn build(input: TokenStream) -> TokenStream { #declaration }; #test_module - ).into() + ) + .into() } const MILLION: u32 = 1_000_000; @@ -134,10 +137,10 @@ struct Bounds { impl Bounds { fn check(&self, value: u32) -> bool { - let wrong = (self.min_strict && value <= self.min) - || (!self.min_strict && value < self.min) - || (self.max_strict && value >= self.max) - || (!self.max_strict && value > self.max); + let wrong = (self.min_strict && value <= self.min) || + (!self.min_strict && value < self.min) || + (self.max_strict && value >= self.max) || + (!self.max_strict && value > self.max); !wrong } @@ -156,17 +159,24 @@ impl core::fmt::Display for Bounds { } } -fn parse_field(input: ParseStream, bounds: Bounds) - -> syn::Result -{ +fn parse_field( + input: ParseStream, + bounds: Bounds, +) -> syn::Result { ::parse(&input)?; ::parse(&input)?; let value_lit = syn::LitInt::parse(&input)?; let value: u32 = value_lit.base10_parse()?; if !bounds.check(value) { - return Err(syn::Error::new(value_lit.span(), format!( - "Invalid {}: {}, must be in {}", Token::default().to_token_stream(), value, bounds, - ))); + return Err(syn::Error::new( + value_lit.span(), + format!( + "Invalid {}: {}, must be in {}", + Token::default().to_token_stream(), + value, + bounds, + ), + )) } Ok(value) @@ -187,54 +197,42 @@ impl Parse for INposInput { ::parse(&input)?; if !input.is_empty() { - return Err(input.error("expected end of input stream, no token expected")); + return Err(input.error("expected end of input stream, no token expected")) } - let min_inflation = parse_field::(&args_input, Bounds { - min: 0, - min_strict: true, - max: 1_000_000, - max_strict: false, - })?; + let min_inflation = parse_field::( + &args_input, + Bounds { min: 0, min_strict: true, max: 1_000_000, max_strict: false }, + )?; ::parse(&args_input)?; - let max_inflation = parse_field::(&args_input, Bounds { - min: min_inflation, - min_strict: true, - max: 1_000_000, - max_strict: false, - })?; + let max_inflation = parse_field::( + &args_input, + Bounds { min: min_inflation, min_strict: true, max: 1_000_000, max_strict: false }, + )?; ::parse(&args_input)?; - let ideal_stake = parse_field::(&args_input, Bounds { - min: 0_100_000, - min_strict: false, - max: 0_900_000, - max_strict: false, - })?; + let ideal_stake = parse_field::( + &args_input, + Bounds { min: 0_100_000, min_strict: false, max: 0_900_000, max_strict: false }, + )?; ::parse(&args_input)?; - let falloff = parse_field::(&args_input, Bounds { - min: 0_010_000, - min_strict: false, - max: 1_000_000, - max_strict: false, - })?; + let falloff = parse_field::( + &args_input, + Bounds { min: 0_010_000, min_strict: false, max: 1_000_000, max_strict: false }, + )?; ::parse(&args_input)?; - let max_piece_count = parse_field::(&args_input, Bounds { - min: 2, - min_strict: false, - max: 1_000, - max_strict: false, - })?; + let max_piece_count = parse_field::( + &args_input, + Bounds { min: 2, min_strict: false, max: 1_000, max_strict: false }, + )?; ::parse(&args_input)?; - let test_precision = parse_field::(&args_input, Bounds { - min: 0, - min_strict: false, - max: 1_000_000, - max_strict: false, - })?; + let test_precision = parse_field::( + &args_input, + Bounds { min: 0, min_strict: false, max: 1_000_000, max_strict: false }, + )?; >::parse(&args_input)?; if !args_input.is_empty() { - return Err(args_input.error("expected end of input stream, no token expected")); + return Err(args_input.error("expected end of input stream, no token expected")) } Ok(Self { @@ -263,7 +261,8 @@ impl INPoS { INPoS { i_0: input.min_inflation, i_ideal: (input.max_inflation as u64 * MILLION as u64 / input.ideal_stake as u64) - .try_into().unwrap(), + .try_into() + .unwrap(), i_ideal_times_x_ideal: input.max_inflation, x_ideal: input.ideal_stake, d: input.falloff, @@ -275,7 +274,7 @@ impl INPoS { // See web3 docs for the details fn compute_opposite_after_x_ideal(&self, y: u32) -> u32 { if y == self.i_0 { - return u32::MAX; + return u32::MAX } // Note: the log term calculated here represents a per_million value let log = log2(self.i_ideal_times_x_ideal - self.i_0, y - self.i_0); @@ -295,8 +294,8 @@ fn compute_points(input: &INposInput) -> Vec<(u32, u32)> { // For each point p: (next_p.0 - p.0) < segment_length && (next_p.1 - p.1) < segment_length. // This ensures that the total number of segment doesn't overflow max_piece_count. - let max_length = (input.max_inflation - input.min_inflation + 1_000_000 - inpos.x_ideal) - / (input.max_piece_count - 1); + let max_length = (input.max_inflation - input.min_inflation + 1_000_000 - inpos.x_ideal) / + (input.max_piece_count - 1); let mut delta_y = max_length; let mut y = input.max_inflation; @@ -322,16 +321,15 @@ fn compute_points(input: &INposInput) -> Vec<(u32, u32)> { let prev = points.last().unwrap(); // Compute the y corresponding to x=1_000_000 using the this point and the previous one. - let delta_y: u32 = ( - (next_x - 1_000_000) as u64 - * (prev.1 - next_y) as u64 - / (next_x - prev.0) as u64 - ).try_into().unwrap(); + let delta_y: u32 = ((next_x - 1_000_000) as u64 * (prev.1 - next_y) as u64 / + (next_x - prev.0) as u64) + .try_into() + .unwrap(); let y = next_y + delta_y; points.push((1_000_000, y)); - return points; + return points } points.push((next_x, next_y)); y = next_y; @@ -345,7 +343,8 @@ fn compute_points(input: &INposInput) -> Vec<(u32, u32)> { fn generate_piecewise_linear(points: Vec<(u32, u32)>) -> TokenStream2 { let mut points_tokens = quote!(); - let max = points.iter() + let max = points + .iter() .map(|&(_, x)| x) .max() .unwrap_or(0) @@ -354,13 +353,15 @@ fn generate_piecewise_linear(points: Vec<(u32, u32)>) -> TokenStream2 { .unwrap_or(1_000_000_000); for (x, y) in points { - let error = || panic!( - "Generated reward curve approximation doesn't fit into [0, 1] -> [0, 1] \ + let error = || { + panic!( + "Generated reward curve approximation doesn't fit into [0, 1] -> [0, 1] \ because of point: x = {:07} per million y = {:07} per million", - x, y - ); + x, y + ) + }; let x_perbill = x.checked_mul(1_000).unwrap_or_else(error); let y_perbill = y.checked_mul(1_000).unwrap_or_else(error); @@ -386,7 +387,7 @@ fn generate_test_module(input: &INposInput) -> TokenStream2 { let ident = &input.ident; let precision = input.test_precision; - let i_0 = inpos.i_0 as f64/ MILLION as f64; + let i_0 = inpos.i_0 as f64 / MILLION as f64; let i_ideal_times_x_ideal = inpos.i_ideal_times_x_ideal as f64 / MILLION as f64; let i_ideal = inpos.i_ideal as f64 / MILLION as f64; let x_ideal = inpos.x_ideal as f64 / MILLION as f64; @@ -443,5 +444,6 @@ fn generate_test_module(input: &INposInput) -> TokenStream2 { ); } } - ).into() + ) + .into() } diff --git a/frame/staking/reward-curve/src/log.rs b/frame/staking/reward-curve/src/log.rs index 747011a73e1db..06d2000619b5c 100644 --- a/frame/staking/reward-curve/src/log.rs +++ b/frame/staking/reward-curve/src/log.rs @@ -4,7 +4,7 @@ use std::convert::TryInto; macro_rules! pow2 { ($n:expr) => { 1_u32 << $n - } + }; } /// Returns the k_th per_million taylor term for a log2 function @@ -33,7 +33,7 @@ fn taylor_term(k: u32, y_num: u128, y_den: u128) -> u32 { /// * result represents a per-million output of log2 pub fn log2(p: u32, q: u32) -> u32 { assert!(p >= q); // keep p/q bound to [1, inf) - assert!(p <= u32::MAX/2); + assert!(p <= u32::MAX / 2); // This restriction should not be mandatory. But function is only tested and used for this. assert!(p <= 1_000_000); @@ -79,7 +79,7 @@ fn test_log() { let p: u32 = (1_000_000 as u64 * p as u64 / div as u64).try_into().unwrap(); let q: u32 = (1_000_000 as u64 * q as u64 / div as u64).try_into().unwrap(); - let res = - (log2(p, q) as i64); + let res = -(log2(p, q) as i64); let expected = ((q as f64 / p as f64).log(2.0) * 1_000_000 as f64).round() as i64; assert!((res - expected).abs() <= 6); } @@ -124,4 +124,4 @@ fn test_log_of_largest_input() { let expected = 19_931_568; let tolerance = 100; assert!((log2(p, q) as i32 - expected as i32).abs() < tolerance); -} \ No newline at end of file +} diff --git a/frame/staking/reward-fn/src/lib.rs b/frame/staking/reward-fn/src/lib.rs index 205f0207673a3..3f91c39b40550 100644 --- a/frame/staking/reward-fn/src/lib.rs +++ b/frame/staking/reward-fn/src/lib.rs @@ -19,8 +19,12 @@ //! Useful function for inflation for nominated proof of stake. -use sp_arithmetic::{Perquintill, PerThing, biguint::BigUint, traits::{Zero, SaturatedConversion}}; use core::convert::TryFrom; +use sp_arithmetic::{ + biguint::BigUint, + traits::{SaturatedConversion, Zero}, + PerThing, Perquintill, +}; /// Compute yearly inflation using function /// @@ -54,11 +58,7 @@ use core::convert::TryFrom; /// the global incentivization to get the `ideal_stake`. A higher number results in less typical /// inflation at the cost of greater volatility for validators. /// Must be more than 0.01. -pub fn compute_inflation( - stake: P, - ideal_stake: P, - falloff: P, -) -> P { +pub fn compute_inflation(stake: P, ideal_stake: P, falloff: P) -> P { if stake < ideal_stake { // ideal_stake is more than 0 because it is strictly more than stake return stake / ideal_stake @@ -98,9 +98,7 @@ pub fn compute_inflation( let res = compute_taylor_serie_part(&inpos_param); match u128::try_from(res.clone()) { - Ok(res) if res <= Into::::into(P::ACCURACY) => { - P::from_parts(res.saturated_into()) - }, + Ok(res) if res <= Into::::into(P::ACCURACY) => P::from_parts(res.saturated_into()), // If result is beyond bounds there is nothing we can do _ => { log::error!("Invalid inflation computation: unexpected result {:?}", res); @@ -109,7 +107,6 @@ pub fn compute_inflation( } } - /// Internal struct holding parameter info alongside other cached value. /// /// All expressed in part from `accuracy` @@ -149,12 +146,15 @@ fn compute_taylor_serie_part(p: &INPoSParam) -> BigUint { taylor_sum = taylor_sum.add(&last_taylor_term); } else { if taylor_sum >= last_taylor_term { - taylor_sum = taylor_sum.sub(&last_taylor_term) + taylor_sum = taylor_sum + .sub(&last_taylor_term) // NOTE: Should never happen as checked above .unwrap_or_else(|e| e); } else { taylor_sum_positive = !taylor_sum_positive; - taylor_sum = last_taylor_term.clone().sub(&taylor_sum) + taylor_sum = last_taylor_term + .clone() + .sub(&taylor_sum) // NOTE: Should never happen as checked above .unwrap_or_else(|e| e); } @@ -180,14 +180,13 @@ fn compute_taylor_serie_part(p: &INPoSParam) -> BigUint { /// /// `previous_taylor_term` and result are expressed with accuracy `INPoSParam.accuracy` fn compute_taylor_term(k: u32, previous_taylor_term: &BigUint, p: &INPoSParam) -> BigUint { - let x_minus_x_ideal = p.x.clone().sub(&p.x_ideal) - // NOTE: Should never happen, as x must be more than x_ideal - .unwrap_or_else(|_| BigUint::zero()); + let x_minus_x_ideal = + p.x.clone() + .sub(&p.x_ideal) + // NOTE: Should never happen, as x must be more than x_ideal + .unwrap_or_else(|_| BigUint::zero()); - let res = previous_taylor_term.clone() - .mul(&x_minus_x_ideal) - .mul(&p.ln2_div_d) - .div_unit(k); + let res = previous_taylor_term.clone().mul(&x_minus_x_ideal).mul(&p.ln2_div_d).div_unit(k); // p.accuracy is stripped by definition. let res = div_by_stripped(res, &p.accuracy); @@ -230,7 +229,5 @@ fn div_by_stripped(mut a: BigUint, b: &BigUint) -> BigUint { .div_unit(100_000) } - a.div(b, false) - .map(|res| res.0) - .unwrap_or_else(|| BigUint::zero()) + a.div(b, false).map(|res| res.0).unwrap_or_else(|| BigUint::zero()) } diff --git a/frame/staking/reward-fn/tests/test.rs b/frame/staking/reward-fn/tests/test.rs index 32daf9d09a76d..dc5b661c4098d 100644 --- a/frame/staking/reward-fn/tests/test.rs +++ b/frame/staking/reward-fn/tests/test.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use sp_arithmetic::{PerThing, Perbill, PerU16, Percent, Perquintill}; +use sp_arithmetic::{PerThing, PerU16, Perbill, Percent, Perquintill}; /// This test the precision and panics if error too big error. /// @@ -32,7 +32,7 @@ fn test_precision(stake: P, ideal_stake: P, falloff: P) { if error > 8f64 / accuracy_f64 && error > 8.0 * f64::EPSILON { panic!( "stake: {:?}, ideal_stake: {:?}, falloff: {:?}, res: {}, expect: {}", - stake, ideal_stake, falloff, res , expect + stake, ideal_stake, falloff, res, expect ); } } diff --git a/frame/staking/src/benchmarking.rs b/frame/staking/src/benchmarking.rs index ff7be272eec81..15a20dfb937c7 100644 --- a/frame/staking/src/benchmarking.rs +++ b/frame/staking/src/benchmarking.rs @@ -21,11 +21,11 @@ use super::*; use crate::Pallet as Staking; use testing_utils::*; -use sp_runtime::traits::One; -use frame_system::RawOrigin; pub use frame_benchmarking::{ - benchmarks, account, whitelisted_caller, whitelist_account, impl_benchmark_test_suite, + account, benchmarks, impl_benchmark_test_suite, whitelist_account, whitelisted_caller, }; +use frame_system::RawOrigin; +use sp_runtime::traits::One; const SEED: u32 = 0; const MAX_SPANS: u32 = 100; @@ -36,13 +36,15 @@ const MAX_SLASHES: u32 = 1000; // Add slashing spans to a user account. Not relevant for actual use, only to benchmark // read and write operations. fn add_slashing_spans(who: &T::AccountId, spans: u32) { - if spans == 0 { return } + if spans == 0 { + return + } // For the first slashing span, we initialize let mut slashing_spans = crate::slashing::SlashingSpans::new(0); SpanSlash::::insert((who, 0), crate::slashing::SpanRecord::default()); - for i in 1 .. spans { + for i in 1..spans { assert!(slashing_spans.end_span(i)); SpanSlash::::insert((who, i), crate::slashing::SpanRecord::default()); } @@ -56,7 +58,7 @@ pub fn create_validator_with_nominators( n: u32, upper_bound: u32, dead: bool, - destination: RewardDestination + destination: RewardDestination, ) -> Result<(T::AccountId, Vec<(T::AccountId, T::AccountId)>), &'static str> { // Clean up any existing state. clear_validators_and_nominators::(); @@ -64,10 +66,8 @@ pub fn create_validator_with_nominators( let mut points_individual = Vec::new(); let (v_stash, v_controller) = create_stash_controller::(0, 100, destination.clone())?; - let validator_prefs = ValidatorPrefs { - commission: Perbill::from_percent(50), - .. Default::default() - }; + let validator_prefs = + ValidatorPrefs { commission: Perbill::from_percent(50), ..Default::default() }; Staking::::validate(RawOrigin::Signed(v_controller).into(), validator_prefs)?; let stash_lookup: ::Source = T::Lookup::unlookup(v_stash.clone()); @@ -77,14 +77,17 @@ pub fn create_validator_with_nominators( let mut nominators = Vec::new(); // Give the validator n nominators, but keep total users in the system the same. - for i in 0 .. upper_bound { + for i in 0..upper_bound { let (n_stash, n_controller) = if !dead { create_stash_controller::(u32::MAX - i, 100, destination.clone())? } else { create_stash_and_dead_controller::(u32::MAX - i, 100, destination.clone())? }; if i < n { - Staking::::nominate(RawOrigin::Signed(n_controller.clone()).into(), vec![stash_lookup.clone()])?; + Staking::::nominate( + RawOrigin::Signed(n_controller.clone()).into(), + vec![stash_lookup.clone()], + )?; nominators.push((n_stash, n_controller)); } } @@ -639,7 +642,7 @@ benchmarks! { #[cfg(test)] mod tests { use super::*; - use crate::mock::{ExtBuilder, Test, Balances, Staking, Origin}; + use crate::mock::{Balances, ExtBuilder, Origin, Staking, Test}; use frame_support::assert_ok; #[test] @@ -654,7 +657,8 @@ mod tests { ::MAX_NOMINATIONS as usize, false, None, - ).unwrap(); + ) + .unwrap(); let count_validators = Validators::::iter().count(); let count_nominators = Nominators::::iter().count(); @@ -674,7 +678,8 @@ mod tests { ::MaxNominatorRewardedPerValidator::get() as u32, false, RewardDestination::Staked, - ).unwrap(); + ) + .unwrap(); assert_eq!(nominators.len() as u32, n); @@ -698,7 +703,8 @@ mod tests { ::MaxNominatorRewardedPerValidator::get() as u32, false, RewardDestination::Staked, - ).unwrap(); + ) + .unwrap(); // Add 20 slashing spans let num_of_slashing_spans = 20; @@ -706,14 +712,14 @@ mod tests { let slashing_spans = SlashingSpans::::get(&validator_stash).unwrap(); assert_eq!(slashing_spans.iter().count(), num_of_slashing_spans as usize); - for i in 0 .. num_of_slashing_spans { + for i in 0..num_of_slashing_spans { assert!(SpanSlash::::contains_key((&validator_stash, i))); } // Test everything is cleaned up assert_ok!(Staking::kill_stash(&validator_stash, num_of_slashing_spans)); assert!(SlashingSpans::::get(&validator_stash).is_none()); - for i in 0 .. num_of_slashing_spans { + for i in 0..num_of_slashing_spans { assert!(!SpanSlash::::contains_key((&validator_stash, i))); } }); @@ -726,13 +732,17 @@ mod tests { let n = 100; let selected_benchmark = SelectedBenchmark::payout_all; - let c = vec![(frame_benchmarking::BenchmarkParameter::v, v), (frame_benchmarking::BenchmarkParameter::n, n)]; + let c = vec![ + (frame_benchmarking::BenchmarkParameter::v, v), + (frame_benchmarking::BenchmarkParameter::n, n), + ]; let closure_to_benchmark = >::instance( &selected_benchmark, &c, - true - ).unwrap(); + true, + ) + .unwrap(); assert_ok!(closure_to_benchmark()); }); diff --git a/frame/staking/src/inflation.rs b/frame/staking/src/inflation.rs index e5259543fd4ba..30169e06768a5 100644 --- a/frame/staking/src/inflation.rs +++ b/frame/staking/src/inflation.rs @@ -20,7 +20,7 @@ //! The staking rate in NPoS is the total amount of tokens staked by nominators and validators, //! divided by the total token supply. -use sp_runtime::{Perbill, traits::AtLeast32BitUnsigned, curve::PiecewiseLinear}; +use sp_runtime::{curve::PiecewiseLinear, traits::AtLeast32BitUnsigned, Perbill}; /// The total payout to all validators (and their nominators) per era and maximum payout. /// @@ -33,16 +33,18 @@ pub fn compute_total_payout( yearly_inflation: &PiecewiseLinear<'static>, npos_token_staked: N, total_tokens: N, - era_duration: u64 -) -> (N, N) where N: AtLeast32BitUnsigned + Clone { + era_duration: u64, +) -> (N, N) +where + N: AtLeast32BitUnsigned + Clone, +{ // Milliseconds per year for the Julian year (365.25 days). const MILLISECONDS_PER_YEAR: u64 = 1000 * 3600 * 24 * 36525 / 100; let portion = Perbill::from_rational(era_duration as u64, MILLISECONDS_PER_YEAR); - let payout = portion * yearly_inflation.calculate_for_fraction_times_denominator( - npos_token_staked, - total_tokens.clone(), - ); + let payout = portion * + yearly_inflation + .calculate_for_fraction_times_denominator(npos_token_staked, total_tokens.clone()); let maximum = portion * (yearly_inflation.maximum * total_tokens); (payout, maximum) } @@ -98,7 +100,8 @@ mod test { 2_500_000_000_000_000_000_000_000_000u128, 5_000_000_000_000_000_000_000_000_000u128, HOUR - ).0, + ) + .0, 57_038_500_000_000_000_000_000 ); } diff --git a/frame/staking/src/lib.rs b/frame/staking/src/lib.rs index e94feaa43cd2c..db29ab90ba2b6 100644 --- a/frame/staking/src/lib.rs +++ b/frame/staking/src/lib.rs @@ -267,58 +267,50 @@ #![recursion_limit = "128"] #![cfg_attr(not(feature = "std"), no_std)] +#[cfg(any(feature = "runtime-benchmarks", test))] +pub mod benchmarking; #[cfg(test)] mod mock; -#[cfg(test)] -mod tests; #[cfg(any(feature = "runtime-benchmarks", test))] pub mod testing_utils; -#[cfg(any(feature = "runtime-benchmarks", test))] -pub mod benchmarking; +#[cfg(test)] +mod tests; -pub mod slashing; pub mod inflation; +pub mod slashing; pub mod weights; -use sp_std::{ - result, - prelude::*, - collections::btree_map::BTreeMap, - convert::From, -}; -use codec::{HasCompact, Encode, Decode}; -use scale_info::TypeInfo; +use codec::{Decode, Encode, HasCompact}; +use frame_election_provider_support::{data_provider, ElectionProvider, Supports, VoteWeight}; use frame_support::{ pallet_prelude::*, + traits::{ + Currency, CurrencyToVote, EnsureOrigin, EstimateNextNewSession, Get, Imbalance, + LockIdentifier, LockableCurrency, OnUnbalanced, UnixTime, WithdrawReasons, + }, weights::{ - Weight, WithPostDispatchInfo, constants::{WEIGHT_PER_MICROS, WEIGHT_PER_NANOS}, - }, - traits::{ - Currency, LockIdentifier, LockableCurrency, WithdrawReasons, OnUnbalanced, Imbalance, Get, - UnixTime, EstimateNextNewSession, EnsureOrigin, CurrencyToVote, + Weight, WithPostDispatchInfo, }, }; +use frame_system::{ensure_root, ensure_signed, offchain::SendTransactionTypes, pallet_prelude::*}; +pub use pallet::*; use pallet_session::historical; +use scale_info::TypeInfo; use sp_runtime::{ - Percent, Perbill, RuntimeDebug, DispatchError, curve::PiecewiseLinear, traits::{ - Convert, Zero, StaticLookup, CheckedSub, Saturating, SaturatedConversion, - AtLeast32BitUnsigned, Bounded, + AtLeast32BitUnsigned, Bounded, CheckedSub, Convert, SaturatedConversion, Saturating, + StaticLookup, Zero, }, + DispatchError, Perbill, Percent, RuntimeDebug, }; use sp_staking::{ + offence::{Offence, OffenceDetails, OffenceError, OnOffenceHandler, ReportOffence}, SessionIndex, - offence::{OnOffenceHandler, OffenceDetails, Offence, ReportOffence, OffenceError}, -}; -use frame_system::{ - ensure_signed, ensure_root, pallet_prelude::*, - offchain::SendTransactionTypes, }; -use frame_election_provider_support::{ElectionProvider, VoteWeight, Supports, data_provider}; +use sp_std::{collections::btree_map::BTreeMap, convert::From, prelude::*, result}; pub use weights::WeightInfo; -pub use pallet::*; const STAKING_ID: LockIdentifier = *b"staking "; pub(crate) const LOG_TARGET: &'static str = "runtime::staking"; @@ -424,10 +416,7 @@ pub struct ValidatorPrefs { impl Default for ValidatorPrefs { fn default() -> Self { - ValidatorPrefs { - commission: Default::default(), - blocked: false, - } + ValidatorPrefs { commission: Default::default(), blocked: false } } } @@ -463,20 +452,23 @@ pub struct StakingLedger { pub claimed_rewards: Vec, } -impl< - AccountId, - Balance: HasCompact + Copy + Saturating + AtLeast32BitUnsigned, -> StakingLedger { +impl + StakingLedger +{ /// Remove entries from `unlocking` that are sufficiently old and reduce the /// total by the sum of their balances. fn consolidate_unlocked(self, current_era: EraIndex) -> Self { let mut total = self.total; - let unlocking = self.unlocking.into_iter() - .filter(|chunk| if chunk.era > current_era { - true - } else { - total = total.saturating_sub(chunk.value); - false + let unlocking = self + .unlocking + .into_iter() + .filter(|chunk| { + if chunk.era > current_era { + true + } else { + total = total.saturating_sub(chunk.value); + false + } }) .collect(); @@ -485,7 +477,7 @@ impl< total, active: self.active, unlocking, - claimed_rewards: self.claimed_rewards + claimed_rewards: self.claimed_rewards, } } @@ -515,7 +507,8 @@ impl< } } -impl StakingLedger where +impl StakingLedger +where Balance: AtLeast32BitUnsigned + Saturating + Copy, { /// Slash the validator for a given amount of balance. This can grow the value @@ -524,39 +517,34 @@ impl StakingLedger where /// /// Slashes from `active` funds first, and then `unlocking`, starting with the /// chunks that are closest to unlocking. - fn slash( - &mut self, - mut value: Balance, - minimum_balance: Balance, - ) -> Balance { + fn slash(&mut self, mut value: Balance, minimum_balance: Balance) -> Balance { let pre_total = self.total; let total = &mut self.total; let active = &mut self.active; - let slash_out_of = | - total_remaining: &mut Balance, - target: &mut Balance, - value: &mut Balance, - | { - let mut slash_from_target = (*value).min(*target); + let slash_out_of = + |total_remaining: &mut Balance, target: &mut Balance, value: &mut Balance| { + let mut slash_from_target = (*value).min(*target); - if !slash_from_target.is_zero() { - *target -= slash_from_target; + if !slash_from_target.is_zero() { + *target -= slash_from_target; - // Don't leave a dust balance in the staking system. - if *target <= minimum_balance { - slash_from_target += *target; - *value += sp_std::mem::replace(target, Zero::zero()); - } + // Don't leave a dust balance in the staking system. + if *target <= minimum_balance { + slash_from_target += *target; + *value += sp_std::mem::replace(target, Zero::zero()); + } - *total_remaining = total_remaining.saturating_sub(slash_from_target); - *value -= slash_from_target; - } - }; + *total_remaining = total_remaining.saturating_sub(slash_from_target); + *value -= slash_from_target; + } + }; slash_out_of(total, active, &mut value); - let i = self.unlocking.iter_mut() + let i = self + .unlocking + .iter_mut() .map(|chunk| { slash_out_of(total, &mut chunk.value, &mut value); chunk.value @@ -598,7 +586,9 @@ pub struct IndividualExposure { } /// A snapshot of the stake backing a single validator in the system. -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, TypeInfo)] +#[derive( + PartialEq, Eq, PartialOrd, Ord, Clone, Encode, Decode, Default, RuntimeDebug, TypeInfo, +)] pub struct Exposure { /// The total balance backing this validator. #[codec(compact)] @@ -642,7 +632,8 @@ pub trait SessionInterface: frame_system::Config { fn prune_historical_up_to(up_to: SessionIndex); } -impl SessionInterface<::AccountId> for T where +impl SessionInterface<::AccountId> for T +where T: pallet_session::Config::AccountId>, T: pallet_session::historical::Config< FullIdentification = Exposure<::AccountId, BalanceOf>, @@ -650,8 +641,10 @@ impl SessionInterface<::AccountId> for T w >, T::SessionHandler: pallet_session::SessionHandler<::AccountId>, T::SessionManager: pallet_session::SessionManager<::AccountId>, - T::ValidatorIdOf: - Convert<::AccountId, Option<::AccountId>>, + T::ValidatorIdOf: Convert< + ::AccountId, + Option<::AccountId>, + >, { fn disable_validator(validator: &::AccountId) -> Result { >::disable(validator) @@ -692,10 +685,9 @@ impl EraPayout for () { /// Adaptor to turn a `PiecewiseLinear` curve definition into an `EraPayout` impl, used for /// backwards compatibility. pub struct ConvertCurve(sp_std::marker::PhantomData); -impl< - Balance: AtLeast32BitUnsigned + Clone, - T: Get<&'static PiecewiseLinear<'static>>, -> EraPayout for ConvertCurve { +impl>> + EraPayout for ConvertCurve +{ fn era_payout( total_staked: Balance, total_issuance: Balance, @@ -762,8 +754,14 @@ pub mod migrations { use super::*; pub fn pre_migrate() -> Result<(), &'static str> { - assert!(CounterForValidators::::get().is_zero(), "CounterForValidators already set."); - assert!(CounterForNominators::::get().is_zero(), "CounterForNominators already set."); + assert!( + CounterForValidators::::get().is_zero(), + "CounterForValidators already set." + ); + assert!( + CounterForNominators::::get().is_zero(), + "CounterForNominators already set." + ); assert!(StorageVersion::::get() == Releases::V6_0_0); Ok(()) } @@ -779,16 +777,14 @@ pub mod migrations { StorageVersion::::put(Releases::V7_0_0); log!(info, "Completed staking migration to Releases::V7_0_0"); - T::DbWeight::get().reads_writes( - validator_count.saturating_add(nominator_count).into(), - 2, - ) + T::DbWeight::get() + .reads_writes(validator_count.saturating_add(nominator_count).into(), 2) } } pub mod v6 { use super::*; - use frame_support::{traits::Get, weights::Weight, generate_storage_alias}; + use frame_support::{generate_storage_alias, traits::Get, weights::Weight}; // NOTE: value type doesn't matter, we just set it to () here. generate_storage_alias!(Staking, SnapshotValidators => Value<()>); @@ -806,7 +802,10 @@ pub mod migrations { log!(info, "QueuedElected.exits()? {:?}", QueuedElected::exists()); log!(info, "QueuedScore.exits()? {:?}", QueuedScore::exists()); // these must exist. - assert!(IsCurrentSessionFinal::exists(), "IsCurrentSessionFinal storage item not found!"); + assert!( + IsCurrentSessionFinal::exists(), + "IsCurrentSessionFinal storage item not found!" + ); assert!(EraElectionStatus::exists(), "EraElectionStatus storage item not found!"); Ok(()) } @@ -935,7 +934,9 @@ pub mod pallet { } #[pallet::type_value] - pub(crate) fn HistoryDepthOnEmpty() -> u32 { 84u32 } + pub(crate) fn HistoryDepthOnEmpty() -> u32 { + 84u32 + } /// Number of eras to keep in history. /// @@ -981,28 +982,22 @@ pub mod pallet { /// Map from all (unlocked) "controller" accounts to the info regarding the staking. #[pallet::storage] #[pallet::getter(fn ledger)] - pub type Ledger = StorageMap< - _, - Blake2_128Concat, T::AccountId, - StakingLedger>, - >; + pub type Ledger = + StorageMap<_, Blake2_128Concat, T::AccountId, StakingLedger>>; /// Where the reward payment should be made. Keyed by stash. #[pallet::storage] #[pallet::getter(fn payee)] - pub type Payee = StorageMap< - _, - Twox64Concat, T::AccountId, - RewardDestination, - ValueQuery, - >; + pub type Payee = + StorageMap<_, Twox64Concat, T::AccountId, RewardDestination, ValueQuery>; /// The map from (wannabe) validator stash key to the preferences of that validator. /// /// When updating this storage item, you must also update the `CounterForValidators`. #[pallet::storage] #[pallet::getter(fn validators)] - pub type Validators = StorageMap<_, Twox64Concat, T::AccountId, ValidatorPrefs, ValueQuery>; + pub type Validators = + StorageMap<_, Twox64Concat, T::AccountId, ValidatorPrefs, ValueQuery>; /// A tracker to keep count of the number of items in the `Validators` map. #[pallet::storage] @@ -1019,7 +1014,8 @@ pub mod pallet { /// When updating this storage item, you must also update the `CounterForNominators`. #[pallet::storage] #[pallet::getter(fn nominators)] - pub type Nominators = StorageMap<_, Twox64Concat, T::AccountId, Nominations>; + pub type Nominators = + StorageMap<_, Twox64Concat, T::AccountId, Nominations>; /// A tracker to keep count of the number of items in the `Nominators` map. #[pallet::storage] @@ -1065,8 +1061,10 @@ pub mod pallet { #[pallet::getter(fn eras_stakers)] pub type ErasStakers = StorageDoubleMap< _, - Twox64Concat, EraIndex, - Twox64Concat, T::AccountId, + Twox64Concat, + EraIndex, + Twox64Concat, + T::AccountId, Exposure>, ValueQuery, >; @@ -1086,8 +1084,10 @@ pub mod pallet { #[pallet::getter(fn eras_stakers_clipped)] pub type ErasStakersClipped = StorageDoubleMap< _, - Twox64Concat, EraIndex, - Twox64Concat, T::AccountId, + Twox64Concat, + EraIndex, + Twox64Concat, + T::AccountId, Exposure>, ValueQuery, >; @@ -1102,8 +1102,10 @@ pub mod pallet { #[pallet::getter(fn eras_validator_prefs)] pub type ErasValidatorPrefs = StorageDoubleMap< _, - Twox64Concat, EraIndex, - Twox64Concat, T::AccountId, + Twox64Concat, + EraIndex, + Twox64Concat, + T::AccountId, ValidatorPrefs, ValueQuery, >; @@ -1119,18 +1121,15 @@ pub mod pallet { /// If reward hasn't been set or has been removed then 0 reward is returned. #[pallet::storage] #[pallet::getter(fn eras_reward_points)] - pub type ErasRewardPoints = StorageMap< - _, - Twox64Concat, EraIndex, - EraRewardPoints, - ValueQuery, - >; + pub type ErasRewardPoints = + StorageMap<_, Twox64Concat, EraIndex, EraRewardPoints, ValueQuery>; /// The total amount staked for the last `HISTORY_DEPTH` eras. /// If total hasn't been set or has been removed then 0 stake is returned. #[pallet::storage] #[pallet::getter(fn eras_total_stake)] - pub type ErasTotalStake = StorageMap<_, Twox64Concat, EraIndex, BalanceOf, ValueQuery>; + pub type ErasTotalStake = + StorageMap<_, Twox64Concat, EraIndex, BalanceOf, ValueQuery>; /// Mode of era forcing. #[pallet::storage] @@ -1154,7 +1153,8 @@ pub mod pallet { #[pallet::storage] pub type UnappliedSlashes = StorageMap< _, - Twox64Concat, EraIndex, + Twox64Concat, + EraIndex, Vec>>, ValueQuery, >; @@ -1164,37 +1164,38 @@ pub mod pallet { /// Must contains information for eras for the range: /// `[active_era - bounding_duration; active_era]` #[pallet::storage] - pub(crate) type BondedEras = StorageValue<_, Vec<(EraIndex, SessionIndex)>, ValueQuery>; + pub(crate) type BondedEras = + StorageValue<_, Vec<(EraIndex, SessionIndex)>, ValueQuery>; /// All slashing events on validators, mapped by era to the highest slash proportion /// and slash value of the era. #[pallet::storage] pub(crate) type ValidatorSlashInEra = StorageDoubleMap< _, - Twox64Concat, EraIndex, - Twox64Concat, T::AccountId, + Twox64Concat, + EraIndex, + Twox64Concat, + T::AccountId, (Perbill, BalanceOf), >; /// All slashing events on nominators, mapped by era to the highest slash value of the era. #[pallet::storage] - pub(crate) type NominatorSlashInEra = StorageDoubleMap< - _, - Twox64Concat, EraIndex, - Twox64Concat, T::AccountId, - BalanceOf, - >; + pub(crate) type NominatorSlashInEra = + StorageDoubleMap<_, Twox64Concat, EraIndex, Twox64Concat, T::AccountId, BalanceOf>; /// Slashing spans for stash accounts. #[pallet::storage] - pub(crate) type SlashingSpans = StorageMap<_, Twox64Concat, T::AccountId, slashing::SlashingSpans>; + pub(crate) type SlashingSpans = + StorageMap<_, Twox64Concat, T::AccountId, slashing::SlashingSpans>; /// Records information about the maximum slash of a stash within a slashing span, /// as well as how much reward has been paid out. #[pallet::storage] pub(crate) type SpanSlash = StorageMap< _, - Twox64Concat, (T::AccountId, slashing::SpanIndex), + Twox64Concat, + (T::AccountId, slashing::SpanIndex), slashing::SpanRecord>, ValueQuery, >; @@ -1281,18 +1282,15 @@ pub mod pallet { RewardDestination::Staked, ); let _ = match status { - StakerStatus::Validator => { - >::validate( - T::Origin::from(Some(controller.clone()).into()), - Default::default(), - ) - }, - StakerStatus::Nominator(votes) => { - >::nominate( - T::Origin::from(Some(controller.clone()).into()), - votes.iter().map(|l| T::Lookup::unlookup(l.clone())).collect(), - ) - }, _ => Ok(()) + StakerStatus::Validator => >::validate( + T::Origin::from(Some(controller.clone()).into()), + Default::default(), + ), + StakerStatus::Nominator(votes) => >::nominate( + T::Origin::from(Some(controller.clone()).into()), + votes.iter().map(|l| T::Lookup::unlookup(l.clone())).collect(), + ), + _ => Ok(()), }; } } @@ -1536,7 +1534,10 @@ pub mod pallet { ledger.total += extra; ledger.active += extra; // Last check: the new active amount of ledger must be more than ED. - ensure!(ledger.active >= T::Currency::minimum_balance(), Error::::InsufficientBond); + ensure!( + ledger.active >= T::Currency::minimum_balance(), + Error::::InsufficientBond + ); Self::deposit_event(Event::::Bonded(stash, extra)); Self::update_ledger(&controller, &ledger); @@ -1564,13 +1565,13 @@ pub mod pallet { /// /// See also [`Call::withdraw_unbonded`]. #[pallet::weight(T::WeightInfo::unbond())] - pub fn unbond(origin: OriginFor, #[pallet::compact] value: BalanceOf) -> DispatchResult { + pub fn unbond( + origin: OriginFor, + #[pallet::compact] value: BalanceOf, + ) -> DispatchResult { let controller = ensure_signed(origin)?; let mut ledger = Self::ledger(&controller).ok_or(Error::::NotController)?; - ensure!( - ledger.unlocking.len() < MAX_UNLOCKING_CHUNKS, - Error::::NoMoreChunks, - ); + ensure!(ledger.unlocking.len() < MAX_UNLOCKING_CHUNKS, Error::::NoMoreChunks,); let mut value = value.min(ledger.active); @@ -1631,22 +1632,23 @@ pub mod pallet { ledger = ledger.consolidate_unlocked(current_era) } - let post_info_weight = if ledger.unlocking.is_empty() && ledger.active < T::Currency::minimum_balance() { - // This account must have called `unbond()` with some value that caused the active - // portion to fall below existential deposit + will have no more unlocking chunks - // left. We can now safely remove all staking-related information. - Self::kill_stash(&stash, num_slashing_spans)?; - // Remove the lock. - T::Currency::remove_lock(STAKING_ID, &stash); - // This is worst case scenario, so we use the full weight and return None - None - } else { - // This was the consequence of a partial unbond. just update the ledger and move on. - Self::update_ledger(&controller, &ledger); + let post_info_weight = + if ledger.unlocking.is_empty() && ledger.active < T::Currency::minimum_balance() { + // This account must have called `unbond()` with some value that caused the active + // portion to fall below existential deposit + will have no more unlocking chunks + // left. We can now safely remove all staking-related information. + Self::kill_stash(&stash, num_slashing_spans)?; + // Remove the lock. + T::Currency::remove_lock(STAKING_ID, &stash); + // This is worst case scenario, so we use the full weight and return None + None + } else { + // This was the consequence of a partial unbond. just update the ledger and move on. + Self::update_ledger(&controller, &ledger); - // This is only an update, so we use less overall weight. - Some(T::WeightInfo::withdraw_unbonded_update(num_slashing_spans)) - }; + // This is only an update, so we use less overall weight. + Some(T::WeightInfo::withdraw_unbonded_update(num_slashing_spans)) + }; // `old_total` should never be less than the new total because // `consolidate_unlocked` strictly subtracts balance. @@ -1677,7 +1679,10 @@ pub mod pallet { // If this error is reached, we need to adjust the `MinValidatorBond` and start calling `chill_other`. // Until then, we explicitly block new validators to protect the runtime. if let Some(max_validators) = MaxValidatorsCount::::get() { - ensure!(CounterForValidators::::get() < max_validators, Error::::TooManyValidators); + ensure!( + CounterForValidators::::get() < max_validators, + Error::::TooManyValidators + ); } } @@ -1713,7 +1718,10 @@ pub mod pallet { // If this error is reached, we need to adjust the `MinNominatorBond` and start calling `chill_other`. // Until then, we explicitly block new nominators to protect the runtime. if let Some(max_nominators) = MaxNominatorsCount::::get() { - ensure!(CounterForNominators::::get() < max_nominators, Error::::TooManyNominators); + ensure!( + CounterForNominators::::get() < max_nominators, + Error::::TooManyNominators + ); } } @@ -1722,13 +1730,18 @@ pub mod pallet { let old = Nominators::::get(stash).map_or_else(Vec::new, |x| x.targets); - let targets = targets.into_iter() + let targets = targets + .into_iter() .map(|t| T::Lookup::lookup(t).map_err(DispatchError::from)) - .map(|n| n.and_then(|n| if old.contains(&n) || !Validators::::get(&n).blocked { - Ok(n) - } else { - Err(Error::::BadTarget.into()) - })) + .map(|n| { + n.and_then(|n| { + if old.contains(&n) || !Validators::::get(&n).blocked { + Ok(n) + } else { + Err(Error::::BadTarget.into()) + } + }) + }) .collect::, _>>()?; let nominations = Nominations { @@ -2043,7 +2056,9 @@ pub mod pallet { /// NOTE: weights are assuming that payouts are made to alive stash account (Staked). /// Paying even a dead controller is cheaper weight-wise. We don't do any refunds here. /// # - #[pallet::weight(T::WeightInfo::payout_stakers_alive_staked(T::MaxNominatorRewardedPerValidator::get()))] + #[pallet::weight(T::WeightInfo::payout_stakers_alive_staked( + T::MaxNominatorRewardedPerValidator::get() + ))] pub fn payout_stakers( origin: OriginFor, validator_stash: T::AccountId, @@ -2078,10 +2093,11 @@ pub mod pallet { Self::deposit_event(Event::::Bonded(ledger.stash.clone(), value)); Self::update_ledger(&controller, &ledger); Ok(Some( - 35 * WEIGHT_PER_MICROS - + 50 * WEIGHT_PER_NANOS * (ledger.unlocking.len() as Weight) - + T::DbWeight::get().reads_writes(3, 2) - ).into()) + 35 * WEIGHT_PER_MICROS + + 50 * WEIGHT_PER_NANOS * (ledger.unlocking.len() as Weight) + + T::DbWeight::get().reads_writes(3, 2), + ) + .into()) } /// Set `HistoryDepth` value. This function will delete any history information @@ -2106,7 +2122,8 @@ pub mod pallet { /// - Writes Each: ErasValidatorReward, ErasRewardPoints, ErasTotalStake, ErasStartSessionIndex /// # #[pallet::weight(T::WeightInfo::set_history_depth(*_era_items_deleted))] - pub fn set_history_depth(origin: OriginFor, + pub fn set_history_depth( + origin: OriginFor, #[pallet::compact] new_history_depth: EraIndex, #[pallet::compact] _era_items_deleted: u32, ) -> DispatchResult { @@ -2164,20 +2181,29 @@ pub mod pallet { /// Note: Making this call only makes sense if you first set the validator preferences to /// block any further nominations. #[pallet::weight(T::WeightInfo::kick(who.len() as u32))] - pub fn kick(origin: OriginFor, who: Vec<::Source>) -> DispatchResult { + pub fn kick( + origin: OriginFor, + who: Vec<::Source>, + ) -> DispatchResult { let controller = ensure_signed(origin)?; let ledger = Self::ledger(&controller).ok_or(Error::::NotController)?; let stash = &ledger.stash; - for nom_stash in who.into_iter() + for nom_stash in who + .into_iter() .map(T::Lookup::lookup) .collect::, _>>()? .into_iter() { - Nominators::::mutate(&nom_stash, |maybe_nom| if let Some(ref mut nom) = maybe_nom { - if let Some(pos) = nom.targets.iter().position(|v| v == stash) { - nom.targets.swap_remove(pos); - Self::deposit_event(Event::::Kicked(nom_stash.clone(), stash.clone())); + Nominators::::mutate(&nom_stash, |maybe_nom| { + if let Some(ref mut nom) = maybe_nom { + if let Some(pos) = nom.targets.iter().position(|v| v == stash) { + nom.targets.swap_remove(pos); + Self::deposit_event(Event::::Kicked( + nom_stash.clone(), + stash.clone(), + )); + } } }); } @@ -2241,10 +2267,7 @@ pub mod pallet { // TODO: Maybe we can deprecate `chill` in the future. // https://github.com/paritytech/substrate/issues/9111 #[pallet::weight(T::WeightInfo::chill_other())] - pub fn chill_other( - origin: OriginFor, - controller: T::AccountId, - ) -> DispatchResult { + pub fn chill_other(origin: OriginFor, controller: T::AccountId) -> DispatchResult { // Anyone can call this function. let caller = ensure_signed(origin)?; let ledger = Self::ledger(&controller).ok_or(Error::::NotController)?; @@ -2263,14 +2286,22 @@ pub mod pallet { if caller != controller { let threshold = ChillThreshold::::get().ok_or(Error::::CannotChillOther)?; let min_active_bond = if Nominators::::contains_key(&stash) { - let max_nominator_count = MaxNominatorsCount::::get().ok_or(Error::::CannotChillOther)?; + let max_nominator_count = + MaxNominatorsCount::::get().ok_or(Error::::CannotChillOther)?; let current_nominator_count = CounterForNominators::::get(); - ensure!(threshold * max_nominator_count < current_nominator_count, Error::::CannotChillOther); + ensure!( + threshold * max_nominator_count < current_nominator_count, + Error::::CannotChillOther + ); MinNominatorBond::::get() } else if Validators::::contains_key(&stash) { - let max_validator_count = MaxValidatorsCount::::get().ok_or(Error::::CannotChillOther)?; + let max_validator_count = + MaxValidatorsCount::::get().ok_or(Error::::CannotChillOther)?; let current_validator_count = CounterForValidators::::get(); - ensure!(threshold * max_validator_count < current_validator_count, Error::::CannotChillOther); + ensure!( + threshold * max_validator_count < current_validator_count, + Error::::CannotChillOther + ); MinValidatorBond::::get() } else { Zero::zero() @@ -2313,35 +2344,40 @@ impl Pallet { }) } - fn do_payout_stakers(validator_stash: T::AccountId, era: EraIndex) -> DispatchResultWithPostInfo { + fn do_payout_stakers( + validator_stash: T::AccountId, + era: EraIndex, + ) -> DispatchResultWithPostInfo { // Validate input data let current_era = CurrentEra::::get().ok_or( - Error::::InvalidEraToReward.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) + Error::::InvalidEraToReward + .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)), )?; let history_depth = Self::history_depth(); ensure!( era <= current_era && era >= current_era.saturating_sub(history_depth), - Error::::InvalidEraToReward.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) + Error::::InvalidEraToReward + .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) ); // Note: if era has no reward to be claimed, era may be future. better not to update // `ledger.claimed_rewards` in this case. - let era_payout = >::get(&era) - .ok_or_else(|| - Error::::InvalidEraToReward - .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) - )?; + let era_payout = >::get(&era).ok_or_else(|| { + Error::::InvalidEraToReward + .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) + })?; let controller = Self::bonded(&validator_stash).ok_or( - Error::::NotStash.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) + Error::::NotStash.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)), )?; let mut ledger = >::get(&controller).ok_or_else(|| Error::::NotController)?; - ledger.claimed_rewards.retain(|&x| x >= current_era.saturating_sub(history_depth)); + ledger + .claimed_rewards + .retain(|&x| x >= current_era.saturating_sub(history_depth)); match ledger.claimed_rewards.binary_search(&era) { - Ok(_) => Err( - Error::::AlreadyClaimed.with_weight(T::WeightInfo::payout_stakers_alive_staked(0)) - )?, + Ok(_) => Err(Error::::AlreadyClaimed + .with_weight(T::WeightInfo::payout_stakers_alive_staked(0)))?, Err(pos) => ledger.claimed_rewards.insert(pos, era), } @@ -2360,7 +2396,9 @@ impl Pallet { let era_reward_points = >::get(&era); let total_reward_points = era_reward_points.total; - let validator_reward_points = era_reward_points.individual.get(&ledger.stash) + let validator_reward_points = era_reward_points + .individual + .get(&ledger.stash) .map(|points| *points) .unwrap_or_else(|| Zero::zero()); @@ -2371,10 +2409,8 @@ impl Pallet { // This is the fraction of the total reward that the validator and the // nominators will get. - let validator_total_reward_part = Perbill::from_rational( - validator_reward_points, - total_reward_points, - ); + let validator_total_reward_part = + Perbill::from_rational(validator_reward_points, total_reward_points); // This is how much validator + nominators are entitled to. let validator_total_payout = validator_total_reward_part * era_payout; @@ -2386,17 +2422,13 @@ impl Pallet { let validator_leftover_payout = validator_total_payout - validator_commission_payout; // Now let's calculate how this is split to the validator. - let validator_exposure_part = Perbill::from_rational( - exposure.own, - exposure.total, - ); + let validator_exposure_part = Perbill::from_rational(exposure.own, exposure.total); let validator_staking_payout = validator_exposure_part * validator_leftover_payout; // We can now make total validator payout: - if let Some(imbalance) = Self::make_payout( - &ledger.stash, - validator_staking_payout + validator_commission_payout - ) { + if let Some(imbalance) = + Self::make_payout(&ledger.stash, validator_staking_payout + validator_commission_payout) + { Self::deposit_event(Event::::Reward(ledger.stash, imbalance.peek())); } @@ -2407,12 +2439,10 @@ impl Pallet { // Lets now calculate how this is split to the nominators. // Reward only the clipped exposures. Note this is not necessarily sorted. for nominator in exposure.others.iter() { - let nominator_exposure_part = Perbill::from_rational( - nominator.value, - exposure.total, - ); + let nominator_exposure_part = Perbill::from_rational(nominator.value, exposure.total); - let nominator_reward: BalanceOf = nominator_exposure_part * validator_leftover_payout; + let nominator_reward: BalanceOf = + nominator_exposure_part * validator_leftover_payout; // We can now make nominator payout: if let Some(imbalance) = Self::make_payout(&nominator.who, nominator_reward) { // Note: this logic does not count payouts for `RewardDestination::None`. @@ -2430,14 +2460,9 @@ impl Pallet { /// This will also update the stash lock. fn update_ledger( controller: &T::AccountId, - ledger: &StakingLedger> + ledger: &StakingLedger>, ) { - T::Currency::set_lock( - STAKING_ID, - &ledger.stash, - ledger.total, - WithdrawReasons::all(), - ); + T::Currency::set_lock(STAKING_ID, &ledger.stash, ledger.total, WithdrawReasons::all()); >::insert(controller, ledger); } @@ -2456,11 +2481,8 @@ impl Pallet { let dest = Self::payee(stash); match dest { RewardDestination::Controller => Self::bonded(stash) - .and_then(|controller| - Some(T::Currency::deposit_creating(&controller, amount)) - ), - RewardDestination::Stash => - T::Currency::deposit_into_existing(stash, amount).ok(), + .and_then(|controller| Some(T::Currency::deposit_creating(&controller, amount))), + RewardDestination::Stash => T::Currency::deposit_into_existing(stash, amount).ok(), RewardDestination::Staked => Self::bonded(stash) .and_then(|c| Self::ledger(&c).map(|l| (c, l))) .and_then(|(controller, mut l)| { @@ -2470,9 +2492,8 @@ impl Pallet { Self::update_ledger(&controller, &l); r }), - RewardDestination::Account(dest_account) => { - Some(T::Currency::deposit_creating(&dest_account, amount)) - }, + RewardDestination::Account(dest_account) => + Some(T::Currency::deposit_creating(&dest_account, amount)), RewardDestination::None => None, } } @@ -2487,8 +2508,8 @@ impl Pallet { 0 }); - let era_length = session_index.checked_sub(current_era_start_session_index) - .unwrap_or(0); // Must never happen. + let era_length = + session_index.checked_sub(current_era_start_session_index).unwrap_or(0); // Must never happen. match ForceEra::::get() { // Will be set to `NotForcing` again if a new era has been triggered. @@ -2506,8 +2527,8 @@ impl Pallet { // New era. let maybe_new_era_validators = Self::try_trigger_new_era(session_index, is_genesis); - if maybe_new_era_validators.is_some() - && matches!(ForceEra::::get(), Forcing::ForceNew) + if maybe_new_era_validators.is_some() && + matches!(ForceEra::::get(), Forcing::ForceNew) { ForceEra::::put(Forcing::NotForcing); } @@ -2576,9 +2597,8 @@ impl Pallet { let first_kept = active_era - bonding_duration; // Prune out everything that's from before the first-kept index. - let n_to_prune = bonded.iter() - .take_while(|&&(era_idx, _)| era_idx < first_kept) - .count(); + let n_to_prune = + bonded.iter().take_while(|&&(era_idx, _)| era_idx < first_kept).count(); // Kill slashing metadata. for (pruned_era, _) in bonded.drain(..n_to_prune) { @@ -2647,7 +2667,10 @@ impl Pallet { /// In case election result has more than [`MinimumValidatorCount`] validator trigger a new era. /// /// In case a new era is planned, the new validator set is returned. - fn try_trigger_new_era(start_session_index: SessionIndex, is_genesis: bool) -> Option> { + fn try_trigger_new_era( + start_session_index: SessionIndex, + is_genesis: bool, + ) -> Option> { let (election_result, weight) = if is_genesis { T::GenesisElectionProvider::elect().map_err(|e| { log!(warn, "genesis election provider failed due to {:?}", e); @@ -2687,7 +2710,7 @@ impl Pallet { CurrentEra::::put(0); ErasStartSessionIndex::::insert(&0, &start_session_index); }, - _ => () + _ => (), } Self::deposit_event(Event::StakingElectionFailed); @@ -2766,7 +2789,7 @@ impl Pallet { .map(|(nominator, weight)| (nominator, to_currency(weight))) .for_each(|(nominator, stake)| { if nominator == validator { - own = own.saturating_add(stake); + own = own.saturating_add(stake); } else { others.push(IndividualExposure { who: nominator, value: stake }); } @@ -2817,16 +2840,18 @@ impl Pallet { /// Apply previously-unapplied slashes on the beginning of a new era, after a delay. fn apply_unapplied_slashes(active_era: EraIndex) { let slash_defer_duration = T::SlashDeferDuration::get(); - ::EarliestUnappliedSlash::mutate(|earliest| if let Some(ref mut earliest) = earliest { - let keep_from = active_era.saturating_sub(slash_defer_duration); - for era in (*earliest)..keep_from { - let era_slashes = ::UnappliedSlashes::take(&era); - for slash in era_slashes { - slashing::apply_slash::(slash); + ::EarliestUnappliedSlash::mutate(|earliest| { + if let Some(ref mut earliest) = earliest { + let keep_from = active_era.saturating_sub(slash_defer_duration); + for era in (*earliest)..keep_from { + let era_slashes = ::UnappliedSlashes::take(&era); + for slash in era_slashes { + slashing::apply_slash::(slash); + } } - } - *earliest = (*earliest).max(keep_from) + *earliest = (*earliest).max(keep_from) + } }) } @@ -2841,9 +2866,7 @@ impl Pallet { /// relatively to their points. /// /// COMPLEXITY: Complexity is `number_of_validator_to_reward x current_elected_len`. - pub fn reward_by_ids( - validators_points: impl IntoIterator - ) { + pub fn reward_by_ids(validators_points: impl IntoIterator) { if let Some(active_era) = Self::active_era() { >::mutate(active_era.index, |era_rewards| { for (validator, points) in validators_points.into_iter() { @@ -2993,7 +3016,7 @@ impl frame_election_provider_support::ElectionDataProvider>::iter().count() as u32 == CounterForValidators::::get()); if maybe_max_len.map_or(false, |max_len| voter_count > max_len) { - return Err("Voter snapshot too big"); + return Err("Voter snapshot too big") } let slashing_span_count = >::iter().count(); @@ -3009,7 +3032,7 @@ impl frame_election_provider_support::ElectionDataProvider::get() as usize; if maybe_max_len.map_or(false, |max_len| target_count > max_len) { - return Err("Target snapshot too big"); + return Err("Target snapshot too big") } let weight = ::DbWeight::get().reads(target_count as u64); @@ -3066,10 +3089,7 @@ impl frame_election_provider_support::ElectionDataProvider historical::SessionManager Option>)>> { - >::new_session_genesis(new_index).map(|validators| { - let current_era = Self::current_era() - // Must be some as a new era has been created. - .unwrap_or(0); + >::new_session_genesis(new_index).map( + |validators| { + let current_era = Self::current_era() + // Must be some as a new era has been created. + .unwrap_or(0); - validators.into_iter().map(|v| { - let exposure = Self::eras_stakers(current_era, &v); - (v, exposure) - }).collect() - }) + validators + .into_iter() + .map(|v| { + let exposure = Self::eras_stakers(current_era, &v); + (v, exposure) + }) + .collect() + }, + ) } fn start_session(start_index: SessionIndex) { >::start_session(start_index) @@ -3228,10 +3256,7 @@ where Self::reward_by_ids(vec![(author, 20)]) } fn note_uncle(author: T::AccountId, _age: T::BlockNumber) { - Self::reward_by_ids(vec![ - (>::author(), 2), - (author, 1) - ]) + Self::reward_by_ids(vec![(>::author(), 2), (author, 1)]) } } @@ -3374,15 +3399,14 @@ where let reward_cost = (2, 2); add_db_reads_writes( (1 + nominators_len) * slash_cost.0 + reward_cost.0 * reporters_len, - (1 + nominators_len) * slash_cost.1 + reward_cost.1 * reporters_len + (1 + nominators_len) * slash_cost.1 + reward_cost.1 * reporters_len, ); } } else { // Defer to end of some `slash_defer_duration` from now. - ::UnappliedSlashes::mutate( - active_era, - move |for_later| for_later.push(unapplied), - ); + ::UnappliedSlashes::mutate(active_era, move |for_later| { + for_later.push(unapplied) + }); add_db_reads_writes(1, 1); } } else { @@ -3414,9 +3438,7 @@ where if bonded_eras.first().filter(|(_, start)| offence_session >= *start).is_some() { R::report_offence(reporters, offence) } else { - >::deposit_event( - Event::::OldSlashingReportDiscarded(offence_session) - ); + >::deposit_event(Event::::OldSlashingReportDiscarded(offence_session)); Ok(()) } } diff --git a/frame/staking/src/mock.rs b/frame/staking/src/mock.rs index 3242a40ccd45e..d17076f4c36ff 100644 --- a/frame/staking/src/mock.rs +++ b/frame/staking/src/mock.rs @@ -17,8 +17,9 @@ //! Test utilities -use crate::*; use crate as staking; +use crate::*; +use frame_election_provider_support::onchain; use frame_support::{ assert_ok, parameter_types, traits::{Currency, FindAuthor, Get, OnInitialize, OneSessionHandler}, @@ -33,7 +34,6 @@ use sp_runtime::{ }; use sp_staking::offence::{OffenceDetails, OnOffenceHandler}; use std::{cell::RefCell, collections::HashSet}; -use frame_election_provider_support::onchain; pub const INIT_TIMESTAMP: u64 = 30_000; pub const BLOCK_TIME: u64 = 1000; @@ -54,16 +54,19 @@ impl OneSessionHandler for OtherSessionHandler { type Key = UintAuthorityId; fn on_genesis_session<'a, I: 'a>(_: I) - where I: Iterator, AccountId: 'a {} + where + I: Iterator, + AccountId: 'a, + { + } - fn on_new_session<'a, I: 'a>(_: bool, validators: I, _: I,) - where I: Iterator, AccountId: 'a + fn on_new_session<'a, I: 'a>(_: bool, validators: I, _: I) + where + I: Iterator, + AccountId: 'a, { SESSION.with(|x| { - *x.borrow_mut() = ( - validators.map(|x| x.0.clone()).collect(), - HashSet::new(), - ) + *x.borrow_mut() = (validators.map(|x| x.0.clone()).collect(), HashSet::new()) }); } @@ -107,7 +110,8 @@ frame_support::construct_runtime!( pub struct Author11; impl FindAuthor for Author11 { fn find_author<'a, I>(_digests: I) -> Option - where I: 'a + IntoIterator, + where + I: 'a + IntoIterator, { Some(11) } @@ -376,21 +380,14 @@ impl ExtBuilder { } fn build(self) -> sp_io::TestExternalities { sp_tracing::try_init_simple(); - let mut storage = frame_system::GenesisConfig::default() - .build_storage::() - .unwrap(); - let balance_factor = if ExistentialDeposit::get() > 1 { - 256 - } else { - 1 - }; + let mut storage = frame_system::GenesisConfig::default().build_storage::().unwrap(); + let balance_factor = if ExistentialDeposit::get() > 1 { 256 } else { 1 }; let num_validators = self.num_validators.unwrap_or(self.validator_count); // Check that the number of validators is sensible. assert!(num_validators <= 8); - let validators = (0..num_validators) - .map(|x| ((x + 1) * 10 + 1) as AccountId) - .collect::>(); + let validators = + (0..num_validators).map(|x| ((x + 1) * 10 + 1) as AccountId).collect::>(); let _ = pallet_balances::GenesisConfig:: { balances: vec![ @@ -419,7 +416,8 @@ impl ExtBuilder { // This allows us to have a total_payout different from 0. (999, 1_000_000_000_000), ], - }.assimilate_storage(&mut storage); + } + .assimilate_storage(&mut storage); let mut stakers = vec![]; if self.has_stakers { @@ -438,11 +436,11 @@ impl ExtBuilder { (31, 30, stake_31, StakerStatus::::Validator), (41, 40, balance_factor * 1000, status_41), // nominator - (101, 100, balance_factor * 500, StakerStatus::::Nominator(nominated)) + (101, 100, balance_factor * 500, StakerStatus::::Nominator(nominated)), ]; } - let _ = staking::GenesisConfig::{ - stakers: stakers, + let _ = staking::GenesisConfig:: { + stakers, validator_count: self.validator_count, minimum_validator_count: self.minimum_validator_count, invulnerables: self.invulnerables, @@ -454,12 +452,12 @@ impl ExtBuilder { .assimilate_storage(&mut storage); let _ = pallet_session::GenesisConfig:: { - keys: validators.iter().map(|x| ( - *x, - *x, - SessionKeys { other: UintAuthorityId(*x as u64) } - )).collect(), - }.assimilate_storage(&mut storage); + keys: validators + .iter() + .map(|x| (*x, *x, SessionKeys { other: UintAuthorityId(*x as u64) })) + .collect(), + } + .assimilate_storage(&mut storage); let mut ext = sp_io::TestExternalities::from(storage); ext.execute_with(|| { @@ -524,42 +522,46 @@ fn check_nominators() { // in if the nomination was submitted before the current era. let era = active_era(); >::iter() - .filter_map(|(nominator, nomination)| - if nomination.submitted_in > era { - Some(nominator) - } else { - None - }) + .filter_map( + |(nominator, nomination)| { + if nomination.submitted_in > era { + Some(nominator) + } else { + None + } + }, + ) .for_each(|nominator| { - // must be bonded. - assert_is_stash(nominator); - let mut sum = 0; - Session::validators() - .iter() - .map(|v| Staking::eras_stakers(era, v)) - .for_each(|e| { - let individual = e.others.iter().filter(|e| e.who == nominator).collect::>(); - let len = individual.len(); - match len { - 0 => { /* not supporting this validator at all. */ }, - 1 => sum += individual[0].value, - _ => panic!("nominator cannot back a validator more than once."), - }; - }); - - let nominator_stake = Staking::slashable_balance_of(&nominator); - // a nominator cannot over-spend. - assert!( - nominator_stake >= sum, - "failed: Nominator({}) stake({}) >= sum divided({})", - nominator, - nominator_stake, - sum, - ); + // must be bonded. + assert_is_stash(nominator); + let mut sum = 0; + Session::validators() + .iter() + .map(|v| Staking::eras_stakers(era, v)) + .for_each(|e| { + let individual = + e.others.iter().filter(|e| e.who == nominator).collect::>(); + let len = individual.len(); + match len { + 0 => { /* not supporting this validator at all. */ }, + 1 => sum += individual[0].value, + _ => panic!("nominator cannot back a validator more than once."), + }; + }); + + let nominator_stake = Staking::slashable_balance_of(&nominator); + // a nominator cannot over-spend. + assert!( + nominator_stake >= sum, + "failed: Nominator({}) stake({}) >= sum divided({})", + nominator, + nominator_stake, + sum, + ); - let diff = nominator_stake - sum; - assert!(diff < 100); - }); + let diff = nominator_stake - sum; + assert!(diff < 100); + }); } fn assert_is_stash(acc: AccountId) { @@ -569,10 +571,7 @@ fn assert_is_stash(acc: AccountId) { fn assert_ledger_consistent(ctrl: AccountId) { // ensures ledger.total == ledger.active + sum(ledger.unlocking). let ledger = Staking::ledger(ctrl).expect("Not a controller."); - let real_total: Balance = ledger - .unlocking - .iter() - .fold(ledger.active, |a, c| a + c.value); + let real_total: Balance = ledger.unlocking.iter().fold(ledger.active, |a, c| a + c.value); assert_eq!(real_total, ledger.total); assert!( ledger.active >= Balances::minimum_balance() || ledger.active == 0, @@ -594,16 +593,8 @@ pub(crate) fn current_era() -> EraIndex { pub(crate) fn bond_validator(stash: AccountId, ctrl: AccountId, val: Balance) { let _ = Balances::make_free_balance_be(&stash, val); let _ = Balances::make_free_balance_be(&ctrl, val); - assert_ok!(Staking::bond( - Origin::signed(stash), - ctrl, - val, - RewardDestination::Controller, - )); - assert_ok!(Staking::validate( - Origin::signed(ctrl), - ValidatorPrefs::default() - )); + assert_ok!(Staking::bond(Origin::signed(stash), ctrl, val, RewardDestination::Controller,)); + assert_ok!(Staking::validate(Origin::signed(ctrl), ValidatorPrefs::default())); } pub(crate) fn bond_nominator( @@ -614,12 +605,7 @@ pub(crate) fn bond_nominator( ) { let _ = Balances::make_free_balance_be(&stash, val); let _ = Balances::make_free_balance_be(&ctrl, val); - assert_ok!(Staking::bond( - Origin::signed(stash), - ctrl, - val, - RewardDestination::Controller, - )); + assert_ok!(Staking::bond(Origin::signed(stash), ctrl, val, RewardDestination::Controller,)); assert_ok!(Staking::nominate(Origin::signed(ctrl), target)); } @@ -715,9 +701,7 @@ pub(crate) fn reward_time_per_era() -> u64 { } pub(crate) fn reward_all_elected() { - let rewards = ::SessionInterface::validators() - .into_iter() - .map(|v| (v, 1)); + let rewards = ::SessionInterface::validators().into_iter().map(|v| (v, 1)); >::reward_by_ids(rewards) } @@ -741,26 +725,28 @@ pub(crate) fn on_offence_in_era( for &(bonded_era, start_session) in bonded_eras.iter() { if bonded_era == era { let _ = Staking::on_offence(offenders, slash_fraction, start_session); - return; + return } else if bonded_era > era { - break; + break } } if Staking::active_era().unwrap().index == era { - let _ = - Staking::on_offence( - offenders, - slash_fraction, - Staking::eras_start_session_index(era).unwrap() - ); + let _ = Staking::on_offence( + offenders, + slash_fraction, + Staking::eras_start_session_index(era).unwrap(), + ); } else { panic!("cannot slash in era {}", era); } } pub(crate) fn on_offence_now( - offenders: &[OffenceDetails>], + offenders: &[OffenceDetails< + AccountId, + pallet_session::historical::IdentificationTuple, + >], slash_fraction: &[Perbill], ) { let now = Staking::active_era().unwrap().index; @@ -769,29 +755,26 @@ pub(crate) fn on_offence_now( pub(crate) fn add_slash(who: &AccountId) { on_offence_now( - &[ - OffenceDetails { - offender: (who.clone(), Staking::eras_stakers(active_era(), who.clone())), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (who.clone(), Staking::eras_stakers(active_era(), who.clone())), + reporters: vec![], + }], &[Perbill::from_percent(10)], ); } /// Make all validator and nominator request their payment pub(crate) fn make_all_reward_payment(era: EraIndex) { - let validators_with_reward = - ErasRewardPoints::::get(era).individual.keys().cloned().collect::>(); + let validators_with_reward = ErasRewardPoints::::get(era) + .individual + .keys() + .cloned() + .collect::>(); // reward validators for validator_controller in validators_with_reward.iter().filter_map(Staking::bonded) { let ledger = >::get(&validator_controller).unwrap(); - assert_ok!(Staking::payout_stakers( - Origin::signed(1337), - ledger.stash, - era - )); + assert_ok!(Staking::payout_stakers(Origin::signed(1337), ledger.stash, era)); } } @@ -816,13 +799,11 @@ macro_rules! assert_session_era { } pub(crate) fn staking_events() -> Vec> { - System::events().into_iter().map(|r| r.event).filter_map(|e| { - if let Event::Staking(inner) = e { - Some(inner) - } else { - None - } - }).collect() + System::events() + .into_iter() + .map(|r| r.event) + .filter_map(|e| if let Event::Staking(inner) = e { Some(inner) } else { None }) + .collect() } pub(crate) fn balances(who: &AccountId) -> (Balance, Balance) { diff --git a/frame/staking/src/slashing.rs b/frame/staking/src/slashing.rs index e81d59c27c81b..c9bebeae97377 100644 --- a/frame/staking/src/slashing.rs +++ b/frame/staking/src/slashing.rs @@ -50,17 +50,20 @@ //! Based on research at use super::{ - EraIndex, Config, Pallet, Store, BalanceOf, Exposure, Perbill, SessionInterface, - NegativeImbalanceOf, UnappliedSlash, Error, + BalanceOf, Config, EraIndex, Error, Exposure, NegativeImbalanceOf, Pallet, Perbill, + SessionInterface, Store, UnappliedSlash, }; -use sp_runtime::{traits::{Zero, Saturating}, RuntimeDebug, DispatchResult}; +use codec::{Decode, Encode}; use frame_support::{ ensure, - traits::{Currency, OnUnbalanced, Imbalance}, + traits::{Currency, Imbalance, OnUnbalanced}, }; -use sp_std::vec::Vec; -use codec::{Encode, Decode}; use scale_info::TypeInfo; +use sp_runtime::{ + traits::{Saturating, Zero}, + DispatchResult, RuntimeDebug, +}; +use sp_std::vec::Vec; /// The proportion of the slashing reward to be paid out on the first slashing detection. /// This is f_1 in the paper. @@ -119,7 +122,9 @@ impl SlashingSpans { // that internal state is unchanged. pub(crate) fn end_span(&mut self, now: EraIndex) -> bool { let next_start = now + 1; - if next_start <= self.last_start { return false } + if next_start <= self.last_start { + return false + } let last_length = next_start - self.last_start; self.prior.insert(0, last_length); @@ -154,7 +159,8 @@ impl SlashingSpans { // If this returns `Some`, then it includes a range start..end of all the span // indices which were pruned. fn prune(&mut self, window_start: EraIndex) -> Option<(SpanIndex, SpanIndex)> { - let old_idx = self.iter() + let old_idx = self + .iter() .skip(1) // skip ongoing span. .position(|span| span.length.map_or(false, |len| span.start + len <= window_start)); @@ -164,7 +170,7 @@ impl SlashingSpans { self.prior.truncate(o); let new_earliest = self.span_index - self.prior.len() as SpanIndex; Some((earliest_span_index, new_earliest)) - } + }, None => None, }; @@ -215,18 +221,11 @@ pub(crate) struct SlashParams<'a, T: 'a + Config> { /// /// The pending slash record returned does not have initialized reporters. Those have /// to be set at a higher level, if any. -pub(crate) fn compute_slash(params: SlashParams) - -> Option>> -{ - let SlashParams { - stash, - slash, - exposure, - slash_era, - window_start, - now, - reward_proportion, - } = params.clone(); +pub(crate) fn compute_slash( + params: SlashParams, +) -> Option>> { + let SlashParams { stash, slash, exposure, slash_era, window_start, now, reward_proportion } = + params.clone(); let mut reward_payout = Zero::zero(); let mut val_slashed = Zero::zero(); @@ -237,22 +236,17 @@ pub(crate) fn compute_slash(params: SlashParams) // kick out the validator even if they won't be slashed, // as long as the misbehavior is from their most recent slashing span. kick_out_if_recent::(params); - return None; + return None } - let (prior_slash_p, _era_slash) = as Store>::ValidatorSlashInEra::get( - &slash_era, - stash, - ).unwrap_or((Perbill::zero(), Zero::zero())); + let (prior_slash_p, _era_slash) = + as Store>::ValidatorSlashInEra::get(&slash_era, stash) + .unwrap_or((Perbill::zero(), Zero::zero())); // compare slash proportions rather than slash values to avoid issues due to rounding // error. if slash.deconstruct() > prior_slash_p.deconstruct() { - as Store>::ValidatorSlashInEra::insert( - &slash_era, - stash, - &(slash, own_slash), - ); + as Store>::ValidatorSlashInEra::insert(&slash_era, stash, &(slash, own_slash)); } else { // we slash based on the max in era - this new event is not the max, // so neither the validator or any nominators will need an update. @@ -261,7 +255,7 @@ pub(crate) fn compute_slash(params: SlashParams) // pays out some reward even if the latest report is not max-in-era. // we opt to avoid the nominator lookups and edits and leave more rewards // for more drastic misbehavior. - return None; + return None } // apply slash to validator. @@ -274,10 +268,7 @@ pub(crate) fn compute_slash(params: SlashParams) reward_proportion, ); - let target_span = spans.compare_and_update_span_slash( - slash_era, - own_slash, - ); + let target_span = spans.compare_and_update_span_slash(slash_era, own_slash); if target_span == Some(spans.span_index()) { // misbehavior occurred within the current slashing span - take appropriate @@ -310,9 +301,7 @@ pub(crate) fn compute_slash(params: SlashParams) // doesn't apply any slash, but kicks out the validator if the misbehavior is from // the most recent slashing span. -fn kick_out_if_recent( - params: SlashParams, -) { +fn kick_out_if_recent(params: SlashParams) { // these are not updated by era-span or end-span. let mut reward_payout = Zero::zero(); let mut val_slashed = Zero::zero(); @@ -344,15 +333,8 @@ fn slash_nominators( prior_slash_p: Perbill, nominators_slashed: &mut Vec<(T::AccountId, BalanceOf)>, ) -> BalanceOf { - let SlashParams { - stash: _, - slash, - exposure, - slash_era, - window_start, - now, - reward_proportion, - } = params; + let SlashParams { stash: _, slash, exposure, slash_era, window_start, now, reward_proportion } = + params; let mut reward_payout = Zero::zero(); @@ -368,18 +350,12 @@ fn slash_nominators( let own_slash_by_validator = slash * nominator.value; let own_slash_difference = own_slash_by_validator.saturating_sub(own_slash_prior); - let mut era_slash = as Store>::NominatorSlashInEra::get( - &slash_era, - stash, - ).unwrap_or_else(|| Zero::zero()); + let mut era_slash = as Store>::NominatorSlashInEra::get(&slash_era, stash) + .unwrap_or_else(|| Zero::zero()); era_slash += own_slash_difference; - as Store>::NominatorSlashInEra::insert( - &slash_era, - stash, - &era_slash, - ); + as Store>::NominatorSlashInEra::insert(&slash_era, stash, &era_slash); era_slash }; @@ -394,10 +370,7 @@ fn slash_nominators( reward_proportion, ); - let target_span = spans.compare_and_update_span_slash( - slash_era, - era_slash, - ); + let target_span = spans.compare_and_update_span_slash(slash_era, era_slash); if target_span == Some(spans.span_index()) { // End the span, but don't chill the nominator. its nomination @@ -498,8 +471,8 @@ impl<'a, T: 'a + Config> InspectingSpans<'a, T> { span_record.slashed = slash; // compute reward. - let reward = REWARD_F1 - * (self.reward_proportion * slash).saturating_sub(span_record.paid_out); + let reward = + REWARD_F1 * (self.reward_proportion * slash).saturating_sub(span_record.paid_out); self.add_slash(difference, slash_era); changed = true; @@ -530,7 +503,9 @@ impl<'a, T: 'a + Config> InspectingSpans<'a, T> { impl<'a, T: 'a + Config> Drop for InspectingSpans<'a, T> { fn drop(&mut self) { // only update on disk if we slashed this account. - if !self.dirty { return } + if !self.dirty { + return + } if let Some((start, end)) = self.spans.prune(self.window_start) { for span_index in start..end { @@ -558,7 +533,10 @@ pub(crate) fn clear_stash_metadata( Some(s) => s, }; - ensure!(num_slashing_spans as usize >= spans.iter().count(), Error::::IncorrectSlashingSpans); + ensure!( + num_slashing_spans as usize >= spans.iter().count(), + Error::::IncorrectSlashingSpans + ); as Store>::SlashingSpans::remove(stash); @@ -607,9 +585,7 @@ pub fn do_slash( >::update_ledger(&controller, &ledger); // trigger the event - >::deposit_event( - super::Event::::Slash(stash.clone(), value) - ); + >::deposit_event(super::Event::::Slash(stash.clone(), value)); } } @@ -626,18 +602,12 @@ pub(crate) fn apply_slash(unapplied_slash: UnappliedSlash( - &nominator, - nominator_slash, - &mut reward_payout, - &mut slashed_imbalance, - ); + do_slash::(&nominator, nominator_slash, &mut reward_payout, &mut slashed_imbalance); } pay_reporters::(reward_payout, slashed_imbalance, &unapplied_slash.reporters); } - /// Apply a reward payout to some reporters, paying the rewards out of the slashed imbalance. fn pay_reporters( reward_payout: BalanceOf, @@ -775,17 +745,13 @@ mod tests { assert_eq!(spans.prune(1000), Some((8, 10))); assert_eq!( spans.iter().collect::>(), - vec![ - SlashingSpan { index: 10, start: 1000, length: None }, - ], + vec![SlashingSpan { index: 10, start: 1000, length: None },], ); assert_eq!(spans.prune(2000), None); assert_eq!( spans.iter().collect::>(), - vec![ - SlashingSpan { index: 10, start: 2000, length: None }, - ], + vec![SlashingSpan { index: 10, start: 2000, length: None },], ); // now all in one shot. @@ -798,9 +764,7 @@ mod tests { assert_eq!(spans.prune(2000), Some((6, 10))); assert_eq!( spans.iter().collect::>(), - vec![ - SlashingSpan { index: 10, start: 2000, length: None }, - ], + vec![SlashingSpan { index: 10, start: 2000, length: None },], ); } diff --git a/frame/staking/src/testing_utils.rs b/frame/staking/src/testing_utils.rs index 18b77d59b3e2e..0d9ae2c8e41a4 100644 --- a/frame/staking/src/testing_utils.rs +++ b/frame/staking/src/testing_utils.rs @@ -18,12 +18,14 @@ //! Testing utils for staking. Provides some common functions to setup staking state, such as //! bonding validators, nominators, and generating different types of solutions. -use crate::*; -use crate::Pallet as Staking; +use crate::{Pallet as Staking, *}; use frame_benchmarking::account; use frame_system::RawOrigin; +use rand_chacha::{ + rand_core::{RngCore, SeedableRng}, + ChaChaRng, +}; use sp_io::hashing::blake2_256; -use rand_chacha::{rand_core::{RngCore, SeedableRng}, ChaChaRng}; const SEED: u32 = 0; @@ -54,14 +56,18 @@ pub fn create_stash_controller( n: u32, balance_factor: u32, destination: RewardDestination, -) - -> Result<(T::AccountId, T::AccountId), &'static str> -{ +) -> Result<(T::AccountId, T::AccountId), &'static str> { let stash = create_funded_user::("stash", n, balance_factor); let controller = create_funded_user::("controller", n, balance_factor); - let controller_lookup: ::Source = T::Lookup::unlookup(controller.clone()); + let controller_lookup: ::Source = + T::Lookup::unlookup(controller.clone()); let amount = T::Currency::minimum_balance() * (balance_factor / 10).max(1).into(); - Staking::::bond(RawOrigin::Signed(stash.clone()).into(), controller_lookup, amount, destination)?; + Staking::::bond( + RawOrigin::Signed(stash.clone()).into(), + controller_lookup, + amount, + destination, + )?; return Ok((stash, controller)) } @@ -71,15 +77,19 @@ pub fn create_stash_and_dead_controller( n: u32, balance_factor: u32, destination: RewardDestination, -) - -> Result<(T::AccountId, T::AccountId), &'static str> -{ +) -> Result<(T::AccountId, T::AccountId), &'static str> { let stash = create_funded_user::("stash", n, balance_factor); // controller has no funds let controller = create_funded_user::("controller", n, 0); - let controller_lookup: ::Source = T::Lookup::unlookup(controller.clone()); + let controller_lookup: ::Source = + T::Lookup::unlookup(controller.clone()); let amount = T::Currency::minimum_balance() * (balance_factor / 10).max(1).into(); - Staking::::bond(RawOrigin::Signed(stash.clone()).into(), controller_lookup, amount, destination)?; + Staking::::bond( + RawOrigin::Signed(stash.clone()).into(), + controller_lookup, + amount, + destination, + )?; return Ok((stash, controller)) } @@ -89,12 +99,11 @@ pub fn create_validators( balance_factor: u32, ) -> Result::Source>, &'static str> { let mut validators: Vec<::Source> = Vec::with_capacity(max as usize); - for i in 0 .. max { - let (stash, controller) = create_stash_controller::(i, balance_factor, RewardDestination::Staked)?; - let validator_prefs = ValidatorPrefs { - commission: Perbill::from_percent(50), - .. Default::default() - }; + for i in 0..max { + let (stash, controller) = + create_stash_controller::(i, balance_factor, RewardDestination::Staked)?; + let validator_prefs = + ValidatorPrefs { commission: Perbill::from_percent(50), ..Default::default() }; Staking::::validate(RawOrigin::Signed(controller).into(), validator_prefs)?; let stash_lookup: ::Source = T::Lookup::unlookup(stash); validators.push(stash_lookup); @@ -126,20 +135,20 @@ pub fn create_validators_with_nominators_for_era( ) -> Result::Source>, &'static str> { clear_validators_and_nominators::(); - let mut validators_stash: Vec<::Source> - = Vec::with_capacity(validators as usize); + let mut validators_stash: Vec<::Source> = + Vec::with_capacity(validators as usize); let mut rng = ChaChaRng::from_seed(SEED.using_encoded(blake2_256)); // Create validators - for i in 0 .. validators { + for i in 0..validators { let balance_factor = if randomize_stake { rng.next_u32() % 255 + 10 } else { 100u32 }; - let (v_stash, v_controller) = create_stash_controller::(i, balance_factor, RewardDestination::Staked)?; - let validator_prefs = ValidatorPrefs { - commission: Perbill::from_percent(50), - .. Default::default() - }; + let (v_stash, v_controller) = + create_stash_controller::(i, balance_factor, RewardDestination::Staked)?; + let validator_prefs = + ValidatorPrefs { commission: Perbill::from_percent(50), ..Default::default() }; Staking::::validate(RawOrigin::Signed(v_controller.clone()).into(), validator_prefs)?; - let stash_lookup: ::Source = T::Lookup::unlookup(v_stash.clone()); + let stash_lookup: ::Source = + T::Lookup::unlookup(v_stash.clone()); validators_stash.push(stash_lookup.clone()); } @@ -147,25 +156,25 @@ pub fn create_validators_with_nominators_for_era( let validator_chosen = validators_stash[0..to_nominate].to_vec(); // Create nominators - for j in 0 .. nominators { + for j in 0..nominators { let balance_factor = if randomize_stake { rng.next_u32() % 255 + 10 } else { 100u32 }; - let (_n_stash, n_controller) = create_stash_controller::( - u32::MAX - j, - balance_factor, - RewardDestination::Staked, - )?; + let (_n_stash, n_controller) = + create_stash_controller::(u32::MAX - j, balance_factor, RewardDestination::Staked)?; // Have them randomly validate let mut available_validators = validator_chosen.clone(); let mut selected_validators: Vec<::Source> = Vec::with_capacity(edge_per_nominator); - for _ in 0 .. validators.min(edge_per_nominator as u32) { + for _ in 0..validators.min(edge_per_nominator as u32) { let selected = rng.next_u32() as usize % available_validators.len(); let validator = available_validators.remove(selected); selected_validators.push(validator); } - Staking::::nominate(RawOrigin::Signed(n_controller.clone()).into(), selected_validators)?; + Staking::::nominate( + RawOrigin::Signed(n_controller.clone()).into(), + selected_validators, + )?; } ValidatorCount::::put(validators); diff --git a/frame/staking/src/tests.rs b/frame/staking/src/tests.rs index 0a71681fe7fd3..152573b52df93 100644 --- a/frame/staking/src/tests.rs +++ b/frame/staking/src/tests.rs @@ -17,21 +17,21 @@ //! Tests for the module. -use super::{*, Event}; +use super::{Event, *}; +use frame_election_provider_support::Support; +use frame_support::{ + assert_noop, assert_ok, + traits::{Currency, OnInitialize, ReservableCurrency}, + weights::{extract_actual_weight, GetDispatchInfo}, +}; use mock::*; +use pallet_balances::Error as BalancesError; use sp_runtime::{ assert_eq_error_rate, traits::{BadOrigin, Dispatchable}, }; use sp_staking::offence::OffenceDetails; -use frame_support::{ - assert_ok, assert_noop, - traits::{Currency, ReservableCurrency, OnInitialize}, - weights::{extract_actual_weight, GetDispatchInfo}, -}; -use pallet_balances::Error as BalancesError; use substrate_test_utils::assert_eq_uvec; -use frame_election_provider_support::Support; #[test] fn force_unstake_works() { @@ -48,7 +48,10 @@ fn force_unstake_works() { // Force unstake requires root. assert_noop!(Staking::force_unstake(Origin::signed(11), 11, 2), BadOrigin); // Force unstake needs correct number of slashing spans (for weight calculation) - assert_noop!(Staking::force_unstake(Origin::root(), 11, 0), Error::::IncorrectSlashingSpans); + assert_noop!( + Staking::force_unstake(Origin::root(), 11, 0), + Error::::IncorrectSlashingSpans + ); // We now force them to unstake assert_ok!(Staking::force_unstake(Origin::root(), 11, 2)); // No longer bonded. @@ -90,26 +93,47 @@ fn basic_setup_works() { // Account 10 controls the stash from account 11, which is 100 * balance_factor units assert_eq!( Staking::ledger(&10), - Some(StakingLedger { stash: 11, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: vec![] }) + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![] + }) ); // Account 20 controls the stash from account 21, which is 200 * balance_factor units assert_eq!( Staking::ledger(&20), - Some(StakingLedger { stash: 21, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: vec![] }) + Some(StakingLedger { + stash: 21, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![] + }) ); // Account 1 does not control any stash assert_eq!(Staking::ledger(&1), None); // ValidatorPrefs are default - assert_eq_uvec!(>::iter().collect::>(), vec![ - (31, ValidatorPrefs::default()), - (21, ValidatorPrefs::default()), - (11, ValidatorPrefs::default()) - ]); + assert_eq_uvec!( + >::iter().collect::>(), + vec![ + (31, ValidatorPrefs::default()), + (21, ValidatorPrefs::default()), + (11, ValidatorPrefs::default()) + ] + ); assert_eq!( Staking::ledger(100), - Some(StakingLedger { stash: 101, total: 500, active: 500, unlocking: vec![], claimed_rewards: vec![] }) + Some(StakingLedger { + stash: 101, + total: 500, + active: 500, + unlocking: vec![], + claimed_rewards: vec![] + }) ); assert_eq!(Staking::nominators(101).unwrap().targets, vec![11, 21]); @@ -118,7 +142,7 @@ fn basic_setup_works() { Exposure { total: 1125, own: 1000, - others: vec![ IndividualExposure { who: 101, value: 125 }] + others: vec![IndividualExposure { who: 101, value: 125 }] }, ); assert_eq!( @@ -126,14 +150,13 @@ fn basic_setup_works() { Exposure { total: 1375, own: 1000, - others: vec![ IndividualExposure { who: 101, value: 375 }] + others: vec![IndividualExposure { who: 101, value: 375 }] }, ); // initial total stake = 1125 + 1375 assert_eq!(Staking::eras_total_stake(Staking::active_era().unwrap().index), 2500); - // The number of validators required. assert_eq!(Staking::validator_count(), 2); @@ -245,9 +268,9 @@ fn rewards_should_work() { assert_eq_error_rate!(Balances::total_balance(&21), init_balance_21, 2,); assert_eq_error_rate!( Balances::total_balance(&100), - init_balance_100 - + part_for_100_from_10 * total_payout_0 * 2/3 - + part_for_100_from_20 * total_payout_0 * 1/3, + init_balance_100 + + part_for_100_from_10 * total_payout_0 * 2 / 3 + + part_for_100_from_20 * total_payout_0 * 1 / 3, 2 ); assert_eq_error_rate!(Balances::total_balance(&101), init_balance_101, 2); @@ -283,9 +306,9 @@ fn rewards_should_work() { assert_eq_error_rate!(Balances::total_balance(&21), init_balance_21, 2,); assert_eq_error_rate!( Balances::total_balance(&100), - init_balance_100 - + part_for_100_from_10 * (total_payout_0 * 2/3 + total_payout_1) - + part_for_100_from_20 * total_payout_0 * 1/3, + init_balance_100 + + part_for_100_from_10 * (total_payout_0 * 2 / 3 + total_payout_1) + + part_for_100_from_20 * total_payout_0 * 1 / 3, 2 ); assert_eq_error_rate!(Balances::total_balance(&101), init_balance_101, 2); @@ -302,7 +325,9 @@ fn staking_should_work() { assert_eq_uvec!(validator_controllers(), vec![20, 10]); // put some money in account that we'll use. - for i in 1..5 { let _ = Balances::make_free_balance_be(&i, 2000); } + for i in 1..5 { + let _ = Balances::make_free_balance_be(&i, 2000); + } // --- Block 2: start_session(2); @@ -319,7 +344,6 @@ fn staking_should_work() { // No effects will be seen so far. Era has not been yet triggered. assert_eq_uvec!(validator_controllers(), vec![20, 10]); - // --- Block 4: the validators will now be queued. start_session(4); assert_eq!(Staking::active_era().unwrap().index, 1); @@ -375,7 +399,10 @@ fn blocking_and_kicking_works() { .num_validators(3) .build_and_execute(|| { // block validator 10/11 - assert_ok!(Staking::validate(Origin::signed(10), ValidatorPrefs { blocked: true, .. Default::default() })); + assert_ok!(Staking::validate( + Origin::signed(10), + ValidatorPrefs { blocked: true, ..Default::default() } + )); // attempt to nominate from 100/101... assert_ok!(Staking::nominate(Origin::signed(100), vec![11])); // should have worked since we're already nominated them @@ -385,7 +412,10 @@ fn blocking_and_kicking_works() { // should have been kicked now assert!(Nominators::::get(&101).unwrap().targets.is_empty()); // attempt to nominate from 100/101... - assert_noop!(Staking::nominate(Origin::signed(100), vec![11]), Error::::BadTarget); + assert_noop!( + Staking::nominate(Origin::signed(100), vec![11]), + Error::::BadTarget + ); }); } @@ -408,10 +438,8 @@ fn less_than_needed_candidates_works() { // But the exposure is updated in a simple way. No external votes exists. // This is purely self-vote. - assert!( - ErasStakers::::iter_prefix_values(Staking::active_era().unwrap().index) - .all(|exposure| exposure.others.is_empty()) - ); + assert!(ErasStakers::::iter_prefix_values(Staking::active_era().unwrap().index) + .all(|exposure| exposure.others.is_empty())); }); } @@ -426,7 +454,7 @@ fn no_candidate_emergency_condition() { .build_and_execute(|| { // initial validators assert_eq_uvec!(validator_controllers(), vec![10, 20, 30, 40]); - let prefs = ValidatorPrefs { commission: Perbill::one(), .. Default::default() }; + let prefs = ValidatorPrefs { commission: Perbill::one(), ..Default::default() }; ::Validators::insert(11, prefs.clone()); // set the minimum validator count. @@ -440,10 +468,7 @@ fn no_candidate_emergency_condition() { // try trigger new era mock::run_to_block(20); - assert_eq!( - *staking_events().last().unwrap(), - Event::StakingElectionFailed, - ); + assert_eq!(*staking_events().last().unwrap(), Event::StakingElectionFailed,); // No new era is created assert_eq!(current_era, CurrentEra::::get()); @@ -506,7 +531,11 @@ fn nominating_and_rewards_should_work() { // ------ check the staked value of all parties. // 30 and 40 are not chosen anymore - assert_eq!(ErasStakers::::iter_prefix_values(Staking::active_era().unwrap().index).count(), 2); + assert_eq!( + ErasStakers::::iter_prefix_values(Staking::active_era().unwrap().index) + .count(), + 2 + ); assert_eq!( Staking::eras_stakers(Staking::active_era().unwrap().index, 11), Exposure { @@ -578,10 +607,7 @@ fn nominators_also_get_slashed_pro_rata() { let slash_percent = Perbill::from_percent(5); let initial_exposure = Staking::eras_stakers(active_era(), 11); // 101 is a nominator for 11 - assert_eq!( - initial_exposure.others.first().unwrap().who, - 101, - ); + assert_eq!(initial_exposure.others.first().unwrap().who, 101,); // staked values; let nominator_stake = Staking::ledger(100).unwrap().active; @@ -594,13 +620,7 @@ fn nominators_also_get_slashed_pro_rata() { // 11 goes offline on_offence_now( - &[OffenceDetails { - offender: ( - 11, - initial_exposure.clone(), - ), - reporters: vec![], - }], + &[OffenceDetails { offender: (11, initial_exposure.clone()), reporters: vec![] }], &[slash_percent], ); @@ -611,24 +631,16 @@ fn nominators_also_get_slashed_pro_rata() { let slash_amount = slash_percent * exposed_stake; let validator_share = Perbill::from_rational(exposed_validator, exposed_stake) * slash_amount; - let nominator_share = Perbill::from_rational( - exposed_nominator, - exposed_stake, - ) * slash_amount; + let nominator_share = + Perbill::from_rational(exposed_nominator, exposed_stake) * slash_amount; // both slash amounts need to be positive for the test to make sense. assert!(validator_share > 0); assert!(nominator_share > 0); // both stakes must have been decreased pro-rata. - assert_eq!( - Staking::ledger(100).unwrap().active, - nominator_stake - nominator_share, - ); - assert_eq!( - Staking::ledger(10).unwrap().active, - validator_stake - validator_share, - ); + assert_eq!(Staking::ledger(100).unwrap().active, nominator_stake - nominator_share,); + assert_eq!(Staking::ledger(10).unwrap().active, validator_stake - validator_share,); assert_eq!( balances(&101).0, // free balance nominator_balance - nominator_share, @@ -651,14 +663,16 @@ fn double_staking_should_fail() { ExtBuilder::default().build_and_execute(|| { let arbitrary_value = 5; // 2 = controller, 1 stashed => ok - assert_ok!( - Staking::bond(Origin::signed(1), 2, arbitrary_value, - RewardDestination::default()) - ); + assert_ok!(Staking::bond( + Origin::signed(1), + 2, + arbitrary_value, + RewardDestination::default() + )); // 4 = not used so far, 1 stashed => not allowed. assert_noop!( - Staking::bond(Origin::signed(1), 4, arbitrary_value, - RewardDestination::default()), Error::::AlreadyBonded, + Staking::bond(Origin::signed(1), 4, arbitrary_value, RewardDestination::default()), + Error::::AlreadyBonded, ); // 1 = stashed => attempting to nominate should fail. assert_noop!(Staking::nominate(Origin::signed(1), vec![1]), Error::::NotController); @@ -833,7 +847,6 @@ fn forcing_new_era_works() { start_session(15); assert_eq!(active_era(), 6); - }); } @@ -892,10 +905,7 @@ fn cannot_reserve_staked_balance() { // Confirm account 11 (via controller 10) is totally staked assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 11).own, 1000); // Confirm account 11 cannot reserve as a result - assert_noop!( - Balances::reserve(&11, 1), - BalancesError::::LiquidityRestrictions, - ); + assert_noop!(Balances::reserve(&11, 1), BalancesError::::LiquidityRestrictions,); // Give account 11 extra free balance let _ = Balances::make_free_balance_be(&11, 10000); @@ -915,13 +925,16 @@ fn reward_destination_works() { // Check the balance of the stash account assert_eq!(Balances::free_balance(11), 1000); // Check how much is at stake - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); // Compute total payout now for whole duration as other parameter won't change let total_payout_0 = current_total_payout_for_duration(reward_time_per_era()); @@ -935,13 +948,16 @@ fn reward_destination_works() { // Check that reward went to the stash account of validator assert_eq!(Balances::free_balance(11), 1000 + total_payout_0); // Check that amount at stake increased accordingly - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + total_payout_0, - active: 1000 + total_payout_0, - unlocking: vec![], - claimed_rewards: vec![0], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + total_payout_0, + active: 1000 + total_payout_0, + unlocking: vec![], + claimed_rewards: vec![0], + }) + ); //Change RewardDestination to Stash >::insert(&11, RewardDestination::Stash); @@ -960,13 +976,16 @@ fn reward_destination_works() { // Record this value let recorded_stash_balance = 1000 + total_payout_0 + total_payout_1; // Check that amount at stake is NOT increased - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + total_payout_0, - active: 1000 + total_payout_0, - unlocking: vec![], - claimed_rewards: vec![0,1], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + total_payout_0, + active: 1000 + total_payout_0, + unlocking: vec![], + claimed_rewards: vec![0, 1], + }) + ); // Change RewardDestination to Controller >::insert(&11, RewardDestination::Controller); @@ -986,13 +1005,16 @@ fn reward_destination_works() { // Check that reward went to the controller account assert_eq!(Balances::free_balance(10), 1 + total_payout_2); // Check that amount at stake is NOT increased - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + total_payout_0, - active: 1000 + total_payout_0, - unlocking: vec![], - claimed_rewards: vec![0,1,2], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + total_payout_0, + active: 1000 + total_payout_0, + unlocking: vec![], + claimed_rewards: vec![0, 1, 2], + }) + ); // Check that amount in staked account is NOT increased. assert_eq!(Balances::free_balance(11), recorded_stash_balance); }); @@ -1005,10 +1027,10 @@ fn validator_payment_prefs_work() { // This test will focus on validator payment. ExtBuilder::default().build_and_execute(|| { let commission = Perbill::from_percent(40); - >::insert(&11, ValidatorPrefs { - commission: commission.clone(), - .. Default::default() - }); + >::insert( + &11, + ValidatorPrefs { commission: commission.clone(), ..Default::default() }, + ); // Reward controller so staked ratio doesn't change. >::insert(&11, RewardDestination::Controller); @@ -1035,7 +1057,6 @@ fn validator_payment_prefs_work() { assert_eq_error_rate!(Balances::total_balance(&10), balance_era_1_10 + reward_of_10, 2); assert_eq_error_rate!(Balances::total_balance(&100), balance_era_1_100 + reward_of_100, 2); }); - } #[test] @@ -1049,13 +1070,16 @@ fn bond_extra_works() { // Check that account 10 is bonded to account 11 assert_eq!(Staking::bonded(&11), Some(10)); // Check how much is at stake - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); // Give account 11 some large free balance greater than total let _ = Balances::make_free_balance_be(&11, 1000000); @@ -1063,24 +1087,30 @@ fn bond_extra_works() { // Call the bond_extra function from controller, add only 100 assert_ok!(Staking::bond_extra(Origin::signed(11), 100)); // There should be 100 more `total` and `active` in the ledger - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + 100, - active: 1000 + 100, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + 100, + active: 1000 + 100, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); // Call the bond_extra function with a large number, should handle it assert_ok!(Staking::bond_extra(Origin::signed(11), Balance::max_value())); // The full amount of the funds should now be in the total and active - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000000, - active: 1000000, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000000, + active: 1000000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); }); } @@ -1108,13 +1138,16 @@ fn bond_extra_and_withdraw_unbonded_works() { mock::start_active_era(1); // Initial state of 10 - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); assert_eq!( Staking::eras_stakers(Staking::active_era().unwrap().index, 11), Exposure { total: 1000, own: 1000, others: vec![] } @@ -1123,13 +1156,16 @@ fn bond_extra_and_withdraw_unbonded_works() { // deposit the extra 100 units Staking::bond_extra(Origin::signed(11), 100).unwrap(); - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + 100, - active: 1000 + 100, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + 100, + active: 1000 + 100, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); // Exposure is a snapshot! only updated after the next era update. assert_ne!( Staking::eras_stakers(Staking::active_era().unwrap().index, 11), @@ -1141,13 +1177,16 @@ fn bond_extra_and_withdraw_unbonded_works() { assert_eq!(Staking::active_era().unwrap().index, 2); // ledger should be the same. - assert_eq!(Staking::ledger(&10), Some(StakingLedger { - stash: 11, - total: 1000 + 100, - active: 1000 + 100, - unlocking: vec![], - claimed_rewards: vec![], - })); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000 + 100, + active: 1000 + 100, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); // Exposure is now updated. assert_eq!( Staking::eras_stakers(Staking::active_era().unwrap().index, 11), @@ -1162,7 +1201,7 @@ fn bond_extra_and_withdraw_unbonded_works() { stash: 11, total: 1000 + 100, active: 100, - unlocking: vec![UnlockChunk{ value: 1000, era: 2 + 3}], + unlocking: vec![UnlockChunk { value: 1000, era: 2 + 3 }], claimed_rewards: vec![] }), ); @@ -1175,7 +1214,7 @@ fn bond_extra_and_withdraw_unbonded_works() { stash: 11, total: 1000 + 100, active: 100, - unlocking: vec![UnlockChunk{ value: 1000, era: 2 + 3}], + unlocking: vec![UnlockChunk { value: 1000, era: 2 + 3 }], claimed_rewards: vec![] }), ); @@ -1191,7 +1230,7 @@ fn bond_extra_and_withdraw_unbonded_works() { stash: 11, total: 1000 + 100, active: 100, - unlocking: vec![UnlockChunk{ value: 1000, era: 2 + 3}], + unlocking: vec![UnlockChunk { value: 1000, era: 2 + 3 }], claimed_rewards: vec![] }), ); @@ -1218,7 +1257,7 @@ fn bond_extra_and_withdraw_unbonded_works() { fn too_many_unbond_calls_should_not_work() { ExtBuilder::default().build_and_execute(|| { // locked at era 0 until 3 - for _ in 0..MAX_UNLOCKING_CHUNKS-1 { + for _ in 0..MAX_UNLOCKING_CHUNKS - 1 { assert_ok!(Staking::unbond(Origin::signed(10), 1)); } @@ -1247,247 +1286,229 @@ fn rebond_works() { // * Given an account being bonded [and chosen as a validator](not mandatory) // * it can unbond a portion of its funds from the stash account. // * it can re-bond a portion of the funds scheduled to unlock. - ExtBuilder::default() - .nominate(false) - .build_and_execute(|| { - // Set payee to controller. avoids confusion - assert_ok!(Staking::set_payee( - Origin::signed(10), - RewardDestination::Controller - )); + ExtBuilder::default().nominate(false).build_and_execute(|| { + // Set payee to controller. avoids confusion + assert_ok!(Staking::set_payee(Origin::signed(10), RewardDestination::Controller)); - // Give account 11 some large free balance greater than total - let _ = Balances::make_free_balance_be(&11, 1000000); + // Give account 11 some large free balance greater than total + let _ = Balances::make_free_balance_be(&11, 1000000); - // confirm that 10 is a normal validator and gets paid at the end of the era. - mock::start_active_era(1); + // confirm that 10 is a normal validator and gets paid at the end of the era. + mock::start_active_era(1); - // Initial state of 10 - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - }) - ); + // Initial state of 10 + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); - mock::start_active_era(2); - assert_eq!(Staking::active_era().unwrap().index, 2); + mock::start_active_era(2); + assert_eq!(Staking::active_era().unwrap().index, 2); - // Try to rebond some funds. We get an error since no fund is unbonded. - assert_noop!( - Staking::rebond(Origin::signed(10), 500), - Error::::NoUnlockChunk, - ); + // Try to rebond some funds. We get an error since no fund is unbonded. + assert_noop!(Staking::rebond(Origin::signed(10), 500), Error::::NoUnlockChunk,); - // Unbond almost all of the funds in stash. - Staking::unbond(Origin::signed(10), 900).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 100, - unlocking: vec![UnlockChunk { - value: 900, - era: 2 + 3, - }], - claimed_rewards: vec![], - }) - ); + // Unbond almost all of the funds in stash. + Staking::unbond(Origin::signed(10), 900).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 100, + unlocking: vec![UnlockChunk { value: 900, era: 2 + 3 }], + claimed_rewards: vec![], + }) + ); - // Re-bond all the funds unbonded. - Staking::rebond(Origin::signed(10), 900).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - }) - ); + // Re-bond all the funds unbonded. + Staking::rebond(Origin::signed(10), 900).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); - // Unbond almost all of the funds in stash. - Staking::unbond(Origin::signed(10), 900).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 100, - unlocking: vec![UnlockChunk { value: 900, era: 5 }], - claimed_rewards: vec![], - }) - ); + // Unbond almost all of the funds in stash. + Staking::unbond(Origin::signed(10), 900).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 100, + unlocking: vec![UnlockChunk { value: 900, era: 5 }], + claimed_rewards: vec![], + }) + ); - // Re-bond part of the funds unbonded. - Staking::rebond(Origin::signed(10), 500).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 600, - unlocking: vec![UnlockChunk { value: 400, era: 5 }], - claimed_rewards: vec![], - }) - ); + // Re-bond part of the funds unbonded. + Staking::rebond(Origin::signed(10), 500).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 600, + unlocking: vec![UnlockChunk { value: 400, era: 5 }], + claimed_rewards: vec![], + }) + ); - // Re-bond the remainder of the funds unbonded. - Staking::rebond(Origin::signed(10), 500).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - }) - ); + // Re-bond the remainder of the funds unbonded. + Staking::rebond(Origin::signed(10), 500).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); - // Unbond parts of the funds in stash. - Staking::unbond(Origin::signed(10), 300).unwrap(); - Staking::unbond(Origin::signed(10), 300).unwrap(); - Staking::unbond(Origin::signed(10), 300).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 100, - unlocking: vec![ - UnlockChunk { value: 300, era: 5 }, - UnlockChunk { value: 300, era: 5 }, - UnlockChunk { value: 300, era: 5 }, - ], - claimed_rewards: vec![], - }) - ); + // Unbond parts of the funds in stash. + Staking::unbond(Origin::signed(10), 300).unwrap(); + Staking::unbond(Origin::signed(10), 300).unwrap(); + Staking::unbond(Origin::signed(10), 300).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 100, + unlocking: vec![ + UnlockChunk { value: 300, era: 5 }, + UnlockChunk { value: 300, era: 5 }, + UnlockChunk { value: 300, era: 5 }, + ], + claimed_rewards: vec![], + }) + ); - // Re-bond part of the funds unbonded. - Staking::rebond(Origin::signed(10), 500).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 600, - unlocking: vec![ - UnlockChunk { value: 300, era: 5 }, - UnlockChunk { value: 100, era: 5 }, - ], - claimed_rewards: vec![], - }) - ); - }) + // Re-bond part of the funds unbonded. + Staking::rebond(Origin::signed(10), 500).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 600, + unlocking: vec![ + UnlockChunk { value: 300, era: 5 }, + UnlockChunk { value: 100, era: 5 }, + ], + claimed_rewards: vec![], + }) + ); + }) } #[test] fn rebond_is_fifo() { // Rebond should proceed by reversing the most recent bond operations. - ExtBuilder::default() - .nominate(false) - .build_and_execute(|| { - // Set payee to controller. avoids confusion - assert_ok!(Staking::set_payee( - Origin::signed(10), - RewardDestination::Controller - )); + ExtBuilder::default().nominate(false).build_and_execute(|| { + // Set payee to controller. avoids confusion + assert_ok!(Staking::set_payee(Origin::signed(10), RewardDestination::Controller)); - // Give account 11 some large free balance greater than total - let _ = Balances::make_free_balance_be(&11, 1000000); + // Give account 11 some large free balance greater than total + let _ = Balances::make_free_balance_be(&11, 1000000); - // confirm that 10 is a normal validator and gets paid at the end of the era. - mock::start_active_era(1); + // confirm that 10 is a normal validator and gets paid at the end of the era. + mock::start_active_era(1); - // Initial state of 10 - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 1000, - unlocking: vec![], - claimed_rewards: vec![], - }) - ); + // Initial state of 10 + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![], + }) + ); - mock::start_active_era(2); + mock::start_active_era(2); - // Unbond some of the funds in stash. - Staking::unbond(Origin::signed(10), 400).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 600, - unlocking: vec![ - UnlockChunk { value: 400, era: 2 + 3 }, - ], - claimed_rewards: vec![], - }) - ); + // Unbond some of the funds in stash. + Staking::unbond(Origin::signed(10), 400).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 600, + unlocking: vec![UnlockChunk { value: 400, era: 2 + 3 },], + claimed_rewards: vec![], + }) + ); - mock::start_active_era(3); + mock::start_active_era(3); - // Unbond more of the funds in stash. - Staking::unbond(Origin::signed(10), 300).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 300, - unlocking: vec![ - UnlockChunk { value: 400, era: 2 + 3 }, - UnlockChunk { value: 300, era: 3 + 3 }, - ], - claimed_rewards: vec![], - }) - ); + // Unbond more of the funds in stash. + Staking::unbond(Origin::signed(10), 300).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 300, + unlocking: vec![ + UnlockChunk { value: 400, era: 2 + 3 }, + UnlockChunk { value: 300, era: 3 + 3 }, + ], + claimed_rewards: vec![], + }) + ); - mock::start_active_era(4); + mock::start_active_era(4); - // Unbond yet more of the funds in stash. - Staking::unbond(Origin::signed(10), 200).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 100, - unlocking: vec![ - UnlockChunk { value: 400, era: 2 + 3 }, - UnlockChunk { value: 300, era: 3 + 3 }, - UnlockChunk { value: 200, era: 4 + 3 }, - ], - claimed_rewards: vec![], - }) - ); + // Unbond yet more of the funds in stash. + Staking::unbond(Origin::signed(10), 200).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 100, + unlocking: vec![ + UnlockChunk { value: 400, era: 2 + 3 }, + UnlockChunk { value: 300, era: 3 + 3 }, + UnlockChunk { value: 200, era: 4 + 3 }, + ], + claimed_rewards: vec![], + }) + ); - // Re-bond half of the unbonding funds. - Staking::rebond(Origin::signed(10), 400).unwrap(); - assert_eq!( - Staking::ledger(&10), - Some(StakingLedger { - stash: 11, - total: 1000, - active: 500, - unlocking: vec![ - UnlockChunk { value: 400, era: 2 + 3 }, - UnlockChunk { value: 100, era: 3 + 3 }, - ], - claimed_rewards: vec![], - }) - ); - }) + // Re-bond half of the unbonding funds. + Staking::rebond(Origin::signed(10), 400).unwrap(); + assert_eq!( + Staking::ledger(&10), + Some(StakingLedger { + stash: 11, + total: 1000, + active: 500, + unlocking: vec![ + UnlockChunk { value: 400, era: 2 + 3 }, + UnlockChunk { value: 100, era: 3 + 3 }, + ], + claimed_rewards: vec![], + }) + ); + }) } #[test] @@ -1510,7 +1531,16 @@ fn reward_to_stake_works() { // Now lets lower account 20 stake assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 21).total, 69); - >::insert(&20, StakingLedger { stash: 21, total: 69, active: 69, unlocking: vec![], claimed_rewards: vec![] }); + >::insert( + &20, + StakingLedger { + stash: 21, + total: 69, + active: 69, + unlocking: vec![], + claimed_rewards: vec![], + }, + ); // Compute total payout now for whole duration as other parameter won't change let total_payout_0 = current_total_payout_for_duration(reward_time_per_era()); @@ -1531,8 +1561,14 @@ fn reward_to_stake_works() { mock::start_active_era(2); // -- new infos - assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, 1000 + total_payout_0 / 2); - assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 21).total, 69 + total_payout_0 / 2); + assert_eq!( + Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, + 1000 + total_payout_0 / 2 + ); + assert_eq!( + Staking::eras_stakers(Staking::active_era().unwrap().index, 21).total, + 69 + total_payout_0 / 2 + ); }); } @@ -1653,18 +1689,21 @@ fn on_free_balance_zero_stash_removes_nominator() { }); } - #[test] fn switching_roles() { // Test that it should be possible to switch between roles (nominator, validator, idle) with minimal overhead. ExtBuilder::default().nominate(false).build_and_execute(|| { // Reset reward destination - for i in &[10, 20] { assert_ok!(Staking::set_payee(Origin::signed(*i), RewardDestination::Controller)); } + for i in &[10, 20] { + assert_ok!(Staking::set_payee(Origin::signed(*i), RewardDestination::Controller)); + } assert_eq_uvec!(validator_controllers(), vec![20, 10]); // put some money in account that we'll use. - for i in 1..7 { let _ = Balances::deposit_creating(&i, 5000); } + for i in 1..7 { + let _ = Balances::deposit_creating(&i, 5000); + } // add 2 nominators assert_ok!(Staking::bond(Origin::signed(1), 2, 2000, RewardDestination::Controller)); @@ -1699,24 +1738,32 @@ fn switching_roles() { #[test] fn wrong_vote_is_null() { - ExtBuilder::default().nominate(false).validator_pool(true).build_and_execute(|| { - assert_eq_uvec!(validator_controllers(), vec![40, 30]); + ExtBuilder::default() + .nominate(false) + .validator_pool(true) + .build_and_execute(|| { + assert_eq_uvec!(validator_controllers(), vec![40, 30]); - // put some money in account that we'll use. - for i in 1..3 { let _ = Balances::deposit_creating(&i, 5000); } + // put some money in account that we'll use. + for i in 1..3 { + let _ = Balances::deposit_creating(&i, 5000); + } - // add 1 nominators - assert_ok!(Staking::bond(Origin::signed(1), 2, 2000, RewardDestination::default())); - assert_ok!(Staking::nominate(Origin::signed(2), vec![ - 11, 21, // good votes - 1, 2, 15, 1000, 25 // crap votes. No effect. - ])); + // add 1 nominators + assert_ok!(Staking::bond(Origin::signed(1), 2, 2000, RewardDestination::default())); + assert_ok!(Staking::nominate( + Origin::signed(2), + vec![ + 11, 21, // good votes + 1, 2, 15, 1000, 25 // crap votes. No effect. + ] + )); - // new block - mock::start_active_era(1); + // new block + mock::start_active_era(1); - assert_eq_uvec!(validator_controllers(), vec![20, 10]); - }); + assert_eq_uvec!(validator_controllers(), vec![20, 10]); + }); } #[test] @@ -1748,7 +1795,7 @@ fn bond_with_no_staked_value() { stash: 1, active: 0, total: 5, - unlocking: vec![UnlockChunk {value: 5, era: 3}], + unlocking: vec![UnlockChunk { value: 5, era: 3 }], claimed_rewards: vec![], }) ); @@ -1800,7 +1847,11 @@ fn bond_with_little_staked_value_bounded() { assert_eq!(Staking::eras_stakers(active_era(), 2).total, 0); // Old ones are rewarded. - assert_eq_error_rate!(Balances::free_balance(10), init_balance_10 + total_payout_0 / 3, 1); + assert_eq_error_rate!( + Balances::free_balance(10), + init_balance_10 + total_payout_0 / 3, + 1 + ); // no rewards paid to 2. This was initial election. assert_eq!(Balances::free_balance(2), init_balance_2); @@ -1814,7 +1865,11 @@ fn bond_with_little_staked_value_bounded() { assert_eq!(Staking::eras_stakers(active_era(), 2).total, 0); // 2 is now rewarded. - assert_eq_error_rate!(Balances::free_balance(2), init_balance_2 + total_payout_1 / 3, 1); + assert_eq_error_rate!( + Balances::free_balance(2), + init_balance_2 + total_payout_1 / 3, + 1 + ); assert_eq_error_rate!( Balances::free_balance(&10), init_balance_10 + total_payout_0 / 3 + total_payout_1 / 3, @@ -1893,7 +1948,7 @@ fn bond_with_duplicate_vote_should_be_ignored_by_election_provider_elected() { // give the man some money. let initial_balance = 1000; - for i in [1, 2, 3, 4,].iter() { + for i in [1, 2, 3, 4].iter() { let _ = Balances::make_free_balance_be(i, initial_balance); } @@ -1991,20 +2046,22 @@ fn reward_validator_slashing_validator_does_not_overflow() { // it is 0. Staking::bond(Origin::signed(2), 20000, stake - 1, RewardDestination::default()).unwrap(); // Override exposure of 11 - ErasStakers::::insert(0, 11, Exposure { - total: stake, - own: 1, - others: vec![ IndividualExposure { who: 2, value: stake - 1 }] - }); + ErasStakers::::insert( + 0, + 11, + Exposure { + total: stake, + own: 1, + others: vec![IndividualExposure { who: 2, value: stake - 1 }], + }, + ); // Check slashing on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(100)], ); @@ -2046,24 +2103,13 @@ fn add_reward_points_fns_works() { // Not mandatory but must be coherent with rewards assert_eq_uvec!(Session::validators(), vec![21, 11]); - >::reward_by_ids(vec![ - (21, 1), - (11, 1), - (11, 1), - ]); + >::reward_by_ids(vec![(21, 1), (11, 1), (11, 1)]); - >::reward_by_ids(vec![ - (21, 1), - (11, 1), - (11, 1), - ]); + >::reward_by_ids(vec![(21, 1), (11, 1), (11, 1)]); assert_eq!( ErasRewardPoints::::get(Staking::active_era().unwrap().index), - EraRewardPoints { - individual: vec![(11, 4), (21, 2)].into_iter().collect(), - total: 6, - }, + EraRewardPoints { individual: vec![(11, 4), (21, 2)].into_iter().collect(), total: 6 }, ); }) } @@ -2074,7 +2120,7 @@ fn unbonded_balance_is_not_slashable() { // total amount staked is slashable. assert_eq!(Staking::slashable_balance_of(&11), 1000); - assert_ok!(Staking::unbond(Origin::signed(10), 800)); + assert_ok!(Staking::unbond(Origin::signed(10), 800)); // only the active portion. assert_eq!(Staking::slashable_balance_of(&11), 200); @@ -2092,7 +2138,10 @@ fn era_is_always_same_length() { assert_eq!(Staking::eras_start_session_index(current_era()).unwrap(), session_per_era); mock::start_active_era(2); - assert_eq!(Staking::eras_start_session_index(current_era()).unwrap(), session_per_era * 2u32); + assert_eq!( + Staking::eras_start_session_index(current_era()).unwrap(), + session_per_era * 2u32 + ); let session = Session::current_index(); ForceEra::::put(Forcing::ForceNew); @@ -2102,7 +2151,10 @@ fn era_is_always_same_length() { assert_eq!(Staking::eras_start_session_index(current_era()).unwrap(), session + 2); mock::start_active_era(4); - assert_eq!(Staking::eras_start_session_index(current_era()).unwrap(), session + 2u32 + session_per_era); + assert_eq!( + Staking::eras_start_session_index(current_era()).unwrap(), + session + 2u32 + session_per_era + ); }); } @@ -2111,10 +2163,7 @@ fn offence_forces_new_era() { ExtBuilder::default().build_and_execute(|| { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(5)], @@ -2132,10 +2181,7 @@ fn offence_ensures_new_era_without_clobbering() { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(5)], @@ -2153,10 +2199,7 @@ fn offence_deselects_validator_even_when_slash_is_zero() { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(0)], @@ -2182,14 +2225,7 @@ fn slashing_performed_according_exposure() { // Handle an offence with a historical exposure. on_offence_now( &[OffenceDetails { - offender: ( - 11, - Exposure { - total: 500, - own: 500, - others: vec![], - }, - ), + offender: (11, Exposure { total: 500, own: 500, others: vec![] }), reporters: vec![], }], &[Perbill::from_percent(50)], @@ -2210,10 +2246,7 @@ fn slash_in_old_span_does_not_deselect() { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(0)], @@ -2236,10 +2269,7 @@ fn slash_in_old_span_does_not_deselect() { on_offence_in_era( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(0)], @@ -2253,10 +2283,7 @@ fn slash_in_old_span_does_not_deselect() { on_offence_in_era( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], // NOTE: A 100% slash here would clean up the account, causing de-registration. @@ -2279,14 +2306,14 @@ fn reporters_receive_their_slice() { // The reporters' reward is calculated from the total exposure. let initial_balance = 1125; - assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, initial_balance); + assert_eq!( + Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, + initial_balance + ); on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![1, 2], }], &[Perbill::from_percent(50)], @@ -2309,14 +2336,14 @@ fn subsequent_reports_in_same_span_pay_out_less() { // The reporters' reward is calculated from the total exposure. let initial_balance = 1125; - assert_eq!(Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, initial_balance); + assert_eq!( + Staking::eras_stakers(Staking::active_era().unwrap().index, 11).total, + initial_balance + ); on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![1], }], &[Perbill::from_percent(20)], @@ -2329,10 +2356,7 @@ fn subsequent_reports_in_same_span_pay_out_less() { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![1], }], &[Perbill::from_percent(50)], @@ -2357,8 +2381,8 @@ fn invulnerables_are_not_slashed() { let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 21); let initial_balance = Staking::slashable_balance_of(&21); - let nominator_balances: Vec<_> = exposure.others - .iter().map(|o| Balances::free_balance(&o.who)).collect(); + let nominator_balances: Vec<_> = + exposure.others.iter().map(|o| Balances::free_balance(&o.who)).collect(); on_offence_now( &[ @@ -2397,10 +2421,7 @@ fn dont_slash_if_fraction_is_zero() { on_offence_now( &[OffenceDetails { - offender: ( - 11, - Staking::eras_stakers(Staking::active_era().unwrap().index, 11), - ), + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], }], &[Perbill::from_percent(0)], @@ -2420,12 +2441,10 @@ fn only_slash_for_max_in_era() { assert_eq!(Balances::free_balance(11), 1000); on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(50)], ); @@ -2434,12 +2453,10 @@ fn only_slash_for_max_in_era() { assert_eq!(Staking::force_era(), Forcing::ForceNew); on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(25)], ); @@ -2447,12 +2464,10 @@ fn only_slash_for_max_in_era() { assert_eq!(Balances::free_balance(11), 500); on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(60)], ); @@ -2465,52 +2480,54 @@ fn only_slash_for_max_in_era() { fn garbage_collection_after_slashing() { // ensures that `SlashingSpans` and `SpanSlash` of an account is removed after reaping. ExtBuilder::default() - .existential_deposit(2) - .min_nominator_bond(2) - .min_validator_bond(2) - .build_and_execute(|| { - assert_eq!(Balances::free_balance(11), 256_000); + .existential_deposit(2) + .min_nominator_bond(2) + .min_validator_bond(2) + .build_and_execute(|| { + assert_eq!(Balances::free_balance(11), 256_000); - on_offence_now( - &[ - OffenceDetails { + on_offence_now( + &[OffenceDetails { offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], - }, - ], - &[Perbill::from_percent(10)], - ); + }], + &[Perbill::from_percent(10)], + ); - assert_eq!(Balances::free_balance(11), 256_000 - 25_600); - assert!(::SlashingSpans::get(&11).is_some()); - assert_eq!(::SpanSlash::get(&(11, 0)).amount_slashed(), &25_600); + assert_eq!(Balances::free_balance(11), 256_000 - 25_600); + assert!(::SlashingSpans::get(&11).is_some()); + assert_eq!( + ::SpanSlash::get(&(11, 0)).amount_slashed(), + &25_600 + ); - on_offence_now( - &[ - OffenceDetails { + on_offence_now( + &[OffenceDetails { offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), reporters: vec![], - }, - ], - &[Perbill::from_percent(100)], - ); + }], + &[Perbill::from_percent(100)], + ); - // validator and nominator slash in era are garbage-collected by era change, - // so we don't test those here. + // validator and nominator slash in era are garbage-collected by era change, + // so we don't test those here. - assert_eq!(Balances::free_balance(11), 2); - assert_eq!(Balances::total_balance(&11), 2); + assert_eq!(Balances::free_balance(11), 2); + assert_eq!(Balances::total_balance(&11), 2); - let slashing_spans = ::SlashingSpans::get(&11).unwrap(); - assert_eq!(slashing_spans.iter().count(), 2); + let slashing_spans = ::SlashingSpans::get(&11).unwrap(); + assert_eq!(slashing_spans.iter().count(), 2); - // reap_stash respects num_slashing_spans so that weight is accurate - assert_noop!(Staking::reap_stash(Origin::none(), 11, 0), Error::::IncorrectSlashingSpans); - assert_ok!(Staking::reap_stash(Origin::none(), 11, 2)); + // reap_stash respects num_slashing_spans so that weight is accurate + assert_noop!( + Staking::reap_stash(Origin::none(), 11, 0), + Error::::IncorrectSlashingSpans + ); + assert_ok!(Staking::reap_stash(Origin::none(), 11, 2)); - assert!(::SlashingSpans::get(&11).is_none()); - assert_eq!(::SpanSlash::get(&(11, 0)).amount_slashed(), &0); - }) + assert!(::SlashingSpans::get(&11).is_none()); + assert_eq!(::SpanSlash::get(&(11, 0)).amount_slashed(), &0); + }) } #[test] @@ -2527,13 +2544,8 @@ fn garbage_collection_on_window_pruning() { assert_eq!(Balances::free_balance(101), 2000); let nominated_value = exposure.others.iter().find(|o| o.who == 101).unwrap().value; - on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(now, 11)), - reporters: vec![], - }, - ], + on_offence_now( + &[OffenceDetails { offender: (11, Staking::eras_stakers(now, 11)), reporters: vec![] }], &[Perbill::from_percent(10)], ); @@ -2574,12 +2586,10 @@ fn slashing_nominators_by_span_max() { let nominated_value_21 = exposure_21.others.iter().find(|o| o.who == 101).unwrap().value; on_offence_in_era( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(10)], 2, ); @@ -2596,24 +2606,16 @@ fn slashing_nominators_by_span_max() { let get_span = |account| ::SlashingSpans::get(&account).unwrap(); - assert_eq!( - get_span(11).iter().collect::>(), - expected_spans, - ); + assert_eq!(get_span(11).iter().collect::>(), expected_spans,); - assert_eq!( - get_span(101).iter().collect::>(), - expected_spans, - ); + assert_eq!(get_span(101).iter().collect::>(), expected_spans,); // second slash: higher era, higher value, same span. on_offence_in_era( - &[ - OffenceDetails { - offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), + reporters: vec![], + }], &[Perbill::from_percent(30)], 3, ); @@ -2631,12 +2633,10 @@ fn slashing_nominators_by_span_max() { // third slash: in same era and on same validator as first, higher // in-era value, but lower slash value than slash 2. on_offence_in_era( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(20)], 2, ); @@ -2667,12 +2667,10 @@ fn slashes_are_summed_across_spans() { let get_span = |account| ::SlashingSpans::get(&account).unwrap(); on_offence_now( - &[ - OffenceDetails { - offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), + reporters: vec![], + }], &[Perbill::from_percent(10)], ); @@ -2692,12 +2690,10 @@ fn slashes_are_summed_across_spans() { assert_eq!(Staking::slashable_balance_of(&21), 900); on_offence_now( - &[ - OffenceDetails { - offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), - reporters: vec![], - }, - ], + &[OffenceDetails { + offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), + reporters: vec![], + }], &[Perbill::from_percent(10)], ); @@ -2714,84 +2710,68 @@ fn slashes_are_summed_across_spans() { #[test] fn deferred_slashes_are_deferred() { - ExtBuilder::default() - .slash_defer_duration(2) - .build_and_execute(|| { - mock::start_active_era(1); + ExtBuilder::default().slash_defer_duration(2).build_and_execute(|| { + mock::start_active_era(1); - assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(11), 1000); - let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); - assert_eq!(Balances::free_balance(101), 2000); - let nominated_value = exposure.others.iter().find(|o| o.who == 101).unwrap().value; + let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); + assert_eq!(Balances::free_balance(101), 2000); + let nominated_value = exposure.others.iter().find(|o| o.who == 101).unwrap().value; - on_offence_now( - &[ - OffenceDetails { - offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), - reporters: vec![], - }, - ], + on_offence_now( + &[OffenceDetails { + offender: (11, Staking::eras_stakers(Staking::active_era().unwrap().index, 11)), + reporters: vec![], + }], &[Perbill::from_percent(10)], ); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - mock::start_active_era(2); + mock::start_active_era(2); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - mock::start_active_era(3); + mock::start_active_era(3); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - // at the start of era 4, slashes from era 1 are processed, - // after being deferred for at least 2 full eras. - mock::start_active_era(4); + // at the start of era 4, slashes from era 1 are processed, + // after being deferred for at least 2 full eras. + mock::start_active_era(4); - assert_eq!(Balances::free_balance(11), 900); - assert_eq!(Balances::free_balance(101), 2000 - (nominated_value / 10)); - }) + assert_eq!(Balances::free_balance(11), 900); + assert_eq!(Balances::free_balance(101), 2000 - (nominated_value / 10)); + }) } #[test] fn remove_deferred() { - ExtBuilder::default() - .slash_defer_duration(2) - .build_and_execute(|| { - mock::start_active_era(1); + ExtBuilder::default().slash_defer_duration(2).build_and_execute(|| { + mock::start_active_era(1); - assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(11), 1000); - let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); - assert_eq!(Balances::free_balance(101), 2000); - let nominated_value = exposure.others.iter().find(|o| o.who == 101).unwrap().value; + let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); + assert_eq!(Balances::free_balance(101), 2000); + let nominated_value = exposure.others.iter().find(|o| o.who == 101).unwrap().value; - on_offence_now( - &[ - OffenceDetails { - offender: (11, exposure.clone()), - reporters: vec![], - }, - ], + on_offence_now( + &[OffenceDetails { offender: (11, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(10)], ); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - mock::start_active_era(2); + mock::start_active_era(2); - on_offence_in_era( - &[ - OffenceDetails { - offender: (11, exposure.clone()), - reporters: vec![], - }, - ], + on_offence_in_era( + &[OffenceDetails { offender: (11, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(15)], 1, ); @@ -2802,32 +2782,32 @@ fn remove_deferred() { Error::::EmptyTargets ); - assert_ok!(Staking::cancel_deferred_slash(Origin::root(), 1, vec![0])); + assert_ok!(Staking::cancel_deferred_slash(Origin::root(), 1, vec![0])); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - mock::start_active_era(3); + mock::start_active_era(3); - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - // at the start of era 4, slashes from era 1 are processed, - // after being deferred for at least 2 full eras. - mock::start_active_era(4); + // at the start of era 4, slashes from era 1 are processed, + // after being deferred for at least 2 full eras. + mock::start_active_era(4); - // the first slash for 10% was cancelled, so no effect. - assert_eq!(Balances::free_balance(11), 1000); - assert_eq!(Balances::free_balance(101), 2000); + // the first slash for 10% was cancelled, so no effect. + assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(101), 2000); - mock::start_active_era(5); + mock::start_active_era(5); - let slash_10 = Perbill::from_percent(10); - let slash_15 = Perbill::from_percent(15); - let initial_slash = slash_10 * nominated_value; + let slash_10 = Perbill::from_percent(10); + let slash_15 = Perbill::from_percent(15); + let initial_slash = slash_10 * nominated_value; - let total_slash = slash_15 * nominated_value; - let actual_slash = total_slash - initial_slash; + let total_slash = slash_15 * nominated_value; + let actual_slash = total_slash - initial_slash; // 5% slash (15 - 10) processed now. assert_eq!(Balances::free_balance(11), 950); @@ -2837,63 +2817,39 @@ fn remove_deferred() { #[test] fn remove_multi_deferred() { - ExtBuilder::default() - .slash_defer_duration(2) - .build_and_execute(|| { - mock::start_active_era(1); + ExtBuilder::default().slash_defer_duration(2).build_and_execute(|| { + mock::start_active_era(1); - assert_eq!(Balances::free_balance(11), 1000); + assert_eq!(Balances::free_balance(11), 1000); - let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); - assert_eq!(Balances::free_balance(101), 2000); + let exposure = Staking::eras_stakers(Staking::active_era().unwrap().index, 11); + assert_eq!(Balances::free_balance(101), 2000); - on_offence_now( - &[ - OffenceDetails { - offender: (11, exposure.clone()), - reporters: vec![], - }, - ], + on_offence_now( + &[OffenceDetails { offender: (11, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(10)], ); on_offence_now( - &[ - OffenceDetails { - offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), - reporters: vec![], - } - ], + &[OffenceDetails { + offender: (21, Staking::eras_stakers(Staking::active_era().unwrap().index, 21)), + reporters: vec![], + }], &[Perbill::from_percent(10)], ); on_offence_now( - &[ - OffenceDetails { - offender: (11, exposure.clone()), - reporters: vec![], - }, - ], + &[OffenceDetails { offender: (11, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(25)], ); on_offence_now( - &[ - OffenceDetails { - offender: (42, exposure.clone()), - reporters: vec![], - }, - ], + &[OffenceDetails { offender: (42, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(25)], ); on_offence_now( - &[ - OffenceDetails { - offender: (69, exposure.clone()), - reporters: vec![], - }, - ], + &[OffenceDetails { offender: (69, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(25)], ); @@ -2942,20 +2898,14 @@ fn slash_kicks_validators_not_nominators_and_disables_nominator_for_kicked_valid assert_eq!(exposure_21.total, 1000 + 375); on_offence_now( - &[OffenceDetails { - offender: (11, exposure_11.clone()), - reporters: vec![], - }], + &[OffenceDetails { offender: (11, exposure_11.clone()), reporters: vec![] }], &[Perbill::from_percent(10)], ); // post-slash balance let nominator_slash_amount_11 = 125 / 10; assert_eq!(Balances::free_balance(11), 900); - assert_eq!( - Balances::free_balance(101), - 2000 - nominator_slash_amount_11 - ); + assert_eq!(Balances::free_balance(101), 2000 - nominator_slash_amount_11); // This is the best way to check that the validator was chilled; `get` will // return default value. @@ -2967,9 +2917,7 @@ fn slash_kicks_validators_not_nominators_and_disables_nominator_for_kicked_valid // and make sure that the vote will be ignored even if the validator // re-registers. - let last_slash = ::SlashingSpans::get(&11) - .unwrap() - .last_nonzero_slash(); + let last_slash = ::SlashingSpans::get(&11).unwrap().last_nonzero_slash(); assert!(nominations.submitted_in < last_slash); // actually re-bond the slashed validator @@ -3082,12 +3030,7 @@ fn zero_slash_keeps_nominators() { assert_eq!(Balances::free_balance(101), 2000); on_offence_now( - &[ - OffenceDetails { - offender: (11, exposure.clone()), - reporters: vec![], - }, - ], + &[OffenceDetails { offender: (11, exposure.clone()), reporters: vec![] }], &[Perbill::from_percent(0)], ); @@ -3120,10 +3063,16 @@ fn six_session_delay() { // pallet-session is delaying session by one, thus the next session to plan is +2. assert_eq!(>::new_session(init_session + 2), None); - assert_eq!(>::new_session(init_session + 3), Some(val_set.clone())); + assert_eq!( + >::new_session(init_session + 3), + Some(val_set.clone()) + ); assert_eq!(>::new_session(init_session + 4), None); assert_eq!(>::new_session(init_session + 5), None); - assert_eq!(>::new_session(init_session + 6), Some(val_set.clone())); + assert_eq!( + >::new_session(init_session + 6), + Some(val_set.clone()) + ); >::end_session(init_session); >::start_session(init_session + 1); @@ -3171,14 +3120,12 @@ fn test_max_nominator_rewarded_per_validator_and_cant_steal_someone_else_reward( let controller = 20_000 + i as AccountId; let balance = 10_000 + i as Balance; Balances::make_free_balance_be(&stash, balance); - assert_ok!( - Staking::bond( - Origin::signed(stash), - controller, - balance, - RewardDestination::Stash - ) - ); + assert_ok!(Staking::bond( + Origin::signed(stash), + controller, + balance, + RewardDestination::Stash + )); assert_ok!(Staking::nominate(Origin::signed(controller), vec![11])); } mock::start_active_era(1); @@ -3259,7 +3206,13 @@ fn test_payout_stakers() { // We track rewards in `claimed_rewards` vec assert_eq!( Staking::ledger(&10), - Some(StakingLedger { stash: 11, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: vec![1] }) + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![1] + }) ); for i in 3..16 { @@ -3275,7 +3228,13 @@ fn test_payout_stakers() { // We track rewards in `claimed_rewards` vec assert_eq!( Staking::ledger(&10), - Some(StakingLedger { stash: 11, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: (1..=14).collect() }) + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: (1..=14).collect() + }) ); for i in 16..100 { @@ -3290,7 +3249,13 @@ fn test_payout_stakers() { assert_ok!(Staking::payout_stakers(Origin::signed(1337), 11, 98)); assert_eq!( Staking::ledger(&10), - Some(StakingLedger { stash: 11, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: vec![15, 98] }) + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![15, 98] + }) ); // Out of order claims works. @@ -3299,7 +3264,13 @@ fn test_payout_stakers() { assert_ok!(Staking::payout_stakers(Origin::signed(1337), 11, 42)); assert_eq!( Staking::ledger(&10), - Some(StakingLedger { stash: 11, total: 1000, active: 1000, unlocking: vec![], claimed_rewards: vec![15, 23, 42, 69, 98] }) + Some(StakingLedger { + stash: 11, + total: 1000, + active: 1000, + unlocking: vec![], + claimed_rewards: vec![15, 23, 42, 69, 98] + }) ); }); } @@ -3383,10 +3354,10 @@ fn payout_stakers_handles_weight_refund() { assert!(half_max_nom_rewarded > 0); assert!(max_nom_rewarded > half_max_nom_rewarded); - let max_nom_rewarded_weight - = ::WeightInfo::payout_stakers_alive_staked(max_nom_rewarded); - let half_max_nom_rewarded_weight - = ::WeightInfo::payout_stakers_alive_staked(half_max_nom_rewarded); + let max_nom_rewarded_weight = + ::WeightInfo::payout_stakers_alive_staked(max_nom_rewarded); + let half_max_nom_rewarded_weight = + ::WeightInfo::payout_stakers_alive_staked(half_max_nom_rewarded); let zero_nom_payouts_weight = ::WeightInfo::payout_stakers_alive_staked(0); assert!(zero_nom_payouts_weight > 0); assert!(half_max_nom_rewarded_weight > zero_nom_payouts_weight); @@ -3411,14 +3382,12 @@ fn payout_stakers_handles_weight_refund() { start_active_era(2); // Collect payouts when there are no nominators - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 1 }); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 1 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); - assert_eq!( - extract_actual_weight(&result, &info), - zero_nom_payouts_weight - ); + assert_eq!(extract_actual_weight(&result, &info), zero_nom_payouts_weight); // The validator is not rewarded in this era; so there will be zero payouts to claim for this era. @@ -3426,7 +3395,8 @@ fn payout_stakers_handles_weight_refund() { start_active_era(3); // Collect payouts for an era where the validator did not receive any points. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 2 }); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 2 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3439,7 +3409,8 @@ fn payout_stakers_handles_weight_refund() { start_active_era(4); // Collect payouts when the validator has `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 3 }); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 3 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); @@ -3462,14 +3433,16 @@ fn payout_stakers_handles_weight_refund() { start_active_era(6); // Collect payouts when the validator had `half_max_nom_rewarded` nominators. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert_ok!(result); assert_eq!(extract_actual_weight(&result, &info), max_nom_rewarded_weight); // Try and collect payouts for an era that has already been collected. - let call = TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); + let call = + TestRuntimeCall::Staking(StakingCall::payout_stakers { validator_stash: 11, era: 5 }); let info = call.get_dispatch_info(); let result = call.dispatch(Origin::signed(20)); assert!(result.is_err()); @@ -3665,7 +3638,6 @@ fn session_buffering_with_offset() { assert_eq!(current_era(), 2); assert_eq!(active_era(), 2); assert_eq!(Session::current_index(), 10); - }); } @@ -3717,7 +3689,6 @@ fn session_buffering_no_offset() { assert_eq!(current_era(), 2); assert_eq!(active_era(), 2); assert_eq!(Session::current_index(), 10); - }); } @@ -3758,10 +3729,7 @@ fn cannot_rebond_to_lower_than_ed() { ); // now bond a wee bit more - assert_noop!( - Staking::rebond(Origin::signed(20), 5), - Error::::InsufficientBond, - ); + assert_noop!(Staking::rebond(Origin::signed(20), 5), Error::::InsufficientBond,); }) } @@ -3796,10 +3764,7 @@ fn cannot_bond_extra_to_lower_than_ed() { stash: 21, total: 1000, active: 0, - unlocking: vec![UnlockChunk { - value: 1000, - era: 3 - }], + unlocking: vec![UnlockChunk { value: 1000, era: 3 }], claimed_rewards: vec![] } ); @@ -3866,8 +3831,8 @@ mod election_data_provider { #[test] fn targets_2sec_block() { let mut validators = 1000; - while ::WeightInfo::get_npos_targets(validators) - < 2 * frame_support::weights::constants::WEIGHT_PER_SECOND + while ::WeightInfo::get_npos_targets(validators) < + 2 * frame_support::weights::constants::WEIGHT_PER_SECOND { validators += 1; } @@ -3884,8 +3849,8 @@ mod election_data_provider { let slashing_spans = validators; let mut nominators = 1000; - while ::WeightInfo::get_npos_voters(validators, nominators, slashing_spans) - < 2 * frame_support::weights::constants::WEIGHT_PER_SECOND + while ::WeightInfo::get_npos_voters(validators, nominators, slashing_spans) < + 2 * frame_support::weights::constants::WEIGHT_PER_SECOND { nominators += 1; } @@ -3975,10 +3940,7 @@ mod election_data_provider { run_to_block(20); assert_eq!(Staking::next_election_prediction(System::block_number()), 45); assert_eq!(staking_events().len(), 1); - assert_eq!( - *staking_events().last().unwrap(), - Event::StakingElection - ); + assert_eq!(*staking_events().last().unwrap(), Event::StakingElection); for b in 21..45 { run_to_block(b); @@ -3989,10 +3951,7 @@ mod election_data_provider { run_to_block(45); assert_eq!(Staking::next_election_prediction(System::block_number()), 70); assert_eq!(staking_events().len(), 3); - assert_eq!( - *staking_events().last().unwrap(), - Event::StakingElection - ); + assert_eq!(*staking_events().last().unwrap(), Event::StakingElection); Staking::force_no_eras(Origin::root()).unwrap(); assert_eq!(Staking::next_election_prediction(System::block_number()), u64::MAX); @@ -4015,10 +3974,7 @@ mod election_data_provider { run_to_block(55); assert_eq!(Staking::next_election_prediction(System::block_number()), 55 + 25); assert_eq!(staking_events().len(), 6); - assert_eq!( - *staking_events().last().unwrap(), - Event::StakingElection - ); + assert_eq!(*staking_events().last().unwrap(), Event::StakingElection); // The new era has been planned, forcing is changed from `ForceNew` to `NotForcing`. assert_eq!(ForceEra::::get(), Forcing::NotForcing); }) @@ -4032,11 +3988,14 @@ mod election_data_provider { // not keep track of the count. This test should panic as we verify the count is accurate // after every test using the `post_checks` in `mock`. Validators::::insert(987654321, ValidatorPrefs::default()); - Nominators::::insert(987654321, Nominations { - targets: vec![], - submitted_in: Default::default(), - suppressed: false, - }); + Nominators::::insert( + 987654321, + Nominations { + targets: vec![], + submitted_in: Default::default(), + suppressed: false, + }, + ); }) } @@ -4049,7 +4008,10 @@ mod election_data_provider { .build_and_execute(|| { // 500 is not enough for any role assert_ok!(Staking::bond(Origin::signed(3), 4, 500, RewardDestination::Controller)); - assert_noop!(Staking::nominate(Origin::signed(4), vec![1]), Error::::InsufficientBond); + assert_noop!( + Staking::nominate(Origin::signed(4), vec![1]), + Error::::InsufficientBond + ); assert_noop!( Staking::validate(Origin::signed(4), ValidatorPrefs::default()), Error::::InsufficientBond, @@ -4069,12 +4031,18 @@ mod election_data_provider { assert_ok!(Staking::validate(Origin::signed(4), ValidatorPrefs::default())); // Can't unbond anything as validator - assert_noop!(Staking::unbond(Origin::signed(4), 500), Error::::InsufficientBond); + assert_noop!( + Staking::unbond(Origin::signed(4), 500), + Error::::InsufficientBond + ); // Once they are a nominator, they can unbond 500 assert_ok!(Staking::nominate(Origin::signed(4), vec![1])); assert_ok!(Staking::unbond(Origin::signed(4), 500)); - assert_noop!(Staking::unbond(Origin::signed(4), 500), Error::::InsufficientBond); + assert_noop!( + Staking::unbond(Origin::signed(4), 500), + Error::::InsufficientBond + ); // Once they are chilled they can unbond everything assert_ok!(Staking::chill(Origin::signed(4))); @@ -4089,7 +4057,7 @@ mod election_data_provider { .min_nominator_bond(1_000) .min_validator_bond(1_500) .build_and_execute(|| { - for i in 0 .. 15 { + for i in 0..15 { let a = 4 * i; let b = 4 * i + 1; let c = 4 * i + 2; @@ -4100,11 +4068,21 @@ mod election_data_provider { Balances::make_free_balance_be(&d, 100_000); // Nominator - assert_ok!(Staking::bond(Origin::signed(a), b, 1000, RewardDestination::Controller)); + assert_ok!(Staking::bond( + Origin::signed(a), + b, + 1000, + RewardDestination::Controller + )); assert_ok!(Staking::nominate(Origin::signed(b), vec![1])); // Validator - assert_ok!(Staking::bond(Origin::signed(c), d, 1500, RewardDestination::Controller)); + assert_ok!(Staking::bond( + Origin::signed(c), + d, + 1500, + RewardDestination::Controller + )); assert_ok!(Staking::validate(Origin::signed(d), ValidatorPrefs::default())); } @@ -4117,35 +4095,83 @@ mod election_data_provider { // `chill_other` to succeed from one user to another. // Can't chill these users - assert_noop!(Staking::chill_other(Origin::signed(1337), 1), Error::::CannotChillOther); - assert_noop!(Staking::chill_other(Origin::signed(1337), 3), Error::::CannotChillOther); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 1), + Error::::CannotChillOther + ); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 3), + Error::::CannotChillOther + ); // Change the minimum bond... but no limits. - assert_ok!(Staking::set_staking_limits(Origin::root(), 1_500, 2_000, None, None, None)); + assert_ok!(Staking::set_staking_limits( + Origin::root(), + 1_500, + 2_000, + None, + None, + None + )); // Still can't chill these users - assert_noop!(Staking::chill_other(Origin::signed(1337), 1), Error::::CannotChillOther); - assert_noop!(Staking::chill_other(Origin::signed(1337), 3), Error::::CannotChillOther); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 1), + Error::::CannotChillOther + ); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 3), + Error::::CannotChillOther + ); // Add limits, but no threshold - assert_ok!(Staking::set_staking_limits(Origin::root(), 1_500, 2_000, Some(10), Some(10), None)); + assert_ok!(Staking::set_staking_limits( + Origin::root(), + 1_500, + 2_000, + Some(10), + Some(10), + None + )); // Still can't chill these users - assert_noop!(Staking::chill_other(Origin::signed(1337), 1), Error::::CannotChillOther); - assert_noop!(Staking::chill_other(Origin::signed(1337), 3), Error::::CannotChillOther); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 1), + Error::::CannotChillOther + ); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 3), + Error::::CannotChillOther + ); // Add threshold, but no limits assert_ok!(Staking::set_staking_limits( - Origin::root(), 1_500, 2_000, None, None, Some(Percent::from_percent(0)) + Origin::root(), + 1_500, + 2_000, + None, + None, + Some(Percent::from_percent(0)) )); // Still can't chill these users - assert_noop!(Staking::chill_other(Origin::signed(1337), 1), Error::::CannotChillOther); - assert_noop!(Staking::chill_other(Origin::signed(1337), 3), Error::::CannotChillOther); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 1), + Error::::CannotChillOther + ); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 3), + Error::::CannotChillOther + ); // Add threshold and limits assert_ok!(Staking::set_staking_limits( - Origin::root(), 1_500, 2_000, Some(10), Some(10), Some(Percent::from_percent(75)) + Origin::root(), + 1_500, + 2_000, + Some(10), + Some(10), + Some(Percent::from_percent(75)) )); // 16 people total because tests start with 1 active one @@ -4153,7 +4179,7 @@ mod election_data_provider { assert_eq!(CounterForValidators::::get(), 16); // Users can now be chilled down to 7 people, so we try to remove 9 of them (starting with 16) - for i in 6 .. 15 { + for i in 6..15 { let b = 4 * i + 1; let d = 4 * i + 3; assert_ok!(Staking::chill_other(Origin::signed(1337), b)); @@ -4161,8 +4187,14 @@ mod election_data_provider { } // Cant go lower. - assert_noop!(Staking::chill_other(Origin::signed(1337), 1), Error::::CannotChillOther); - assert_noop!(Staking::chill_other(Origin::signed(1337), 3), Error::::CannotChillOther); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 1), + Error::::CannotChillOther + ); + assert_noop!( + Staking::chill_other(Origin::signed(1337), 3), + Error::::CannotChillOther + ); }) } @@ -4177,23 +4209,37 @@ mod election_data_provider { // Change the maximums let max = 10; assert_ok!(Staking::set_staking_limits( - Origin::root(), 10, 10, Some(max), Some(max), Some(Percent::from_percent(0)) + Origin::root(), + 10, + 10, + Some(max), + Some(max), + Some(Percent::from_percent(0)) )); // can create `max - validator_count` validators let mut some_existing_validator = AccountId::default(); - for i in 0 .. max - validator_count { + for i in 0..max - validator_count { let (_, controller) = testing_utils::create_stash_controller::( - i + 10_000_000, 100, RewardDestination::Controller, - ).unwrap(); - assert_ok!(Staking::validate(Origin::signed(controller), ValidatorPrefs::default())); + i + 10_000_000, + 100, + RewardDestination::Controller, + ) + .unwrap(); + assert_ok!(Staking::validate( + Origin::signed(controller), + ValidatorPrefs::default() + )); some_existing_validator = controller; } // but no more let (_, last_validator) = testing_utils::create_stash_controller::( - 1337, 100, RewardDestination::Controller, - ).unwrap(); + 1337, + 100, + RewardDestination::Controller, + ) + .unwrap(); assert_noop!( Staking::validate(Origin::signed(last_validator), ValidatorPrefs::default()), @@ -4202,29 +4248,44 @@ mod election_data_provider { // same with nominators let mut some_existing_nominator = AccountId::default(); - for i in 0 .. max - nominator_count { + for i in 0..max - nominator_count { let (_, controller) = testing_utils::create_stash_controller::( - i + 20_000_000, 100, RewardDestination::Controller, - ).unwrap(); + i + 20_000_000, + 100, + RewardDestination::Controller, + ) + .unwrap(); assert_ok!(Staking::nominate(Origin::signed(controller), vec![1])); some_existing_nominator = controller; } // one more is too many let (_, last_nominator) = testing_utils::create_stash_controller::( - 30_000_000, 100, RewardDestination::Controller, - ).unwrap(); - assert_noop!(Staking::nominate(Origin::signed(last_nominator), vec![1]), Error::::TooManyNominators); + 30_000_000, + 100, + RewardDestination::Controller, + ) + .unwrap(); + assert_noop!( + Staking::nominate(Origin::signed(last_nominator), vec![1]), + Error::::TooManyNominators + ); // Re-nominate works fine assert_ok!(Staking::nominate(Origin::signed(some_existing_nominator), vec![1])); // Re-validate works fine - assert_ok!(Staking::validate(Origin::signed(some_existing_validator), ValidatorPrefs::default())); + assert_ok!(Staking::validate( + Origin::signed(some_existing_validator), + ValidatorPrefs::default() + )); // No problem when we set to `None` again assert_ok!(Staking::set_staking_limits(Origin::root(), 10, 10, None, None, None)); assert_ok!(Staking::nominate(Origin::signed(last_nominator), vec![1])); - assert_ok!(Staking::validate(Origin::signed(last_validator), ValidatorPrefs::default())); + assert_ok!(Staking::validate( + Origin::signed(last_validator), + ValidatorPrefs::default() + )); }) } } diff --git a/frame/staking/src/weights.rs b/frame/staking/src/weights.rs index cf14e8b22362f..bcc7b8015003c 100644 --- a/frame/staking/src/weights.rs +++ b/frame/staking/src/weights.rs @@ -35,11 +35,13 @@ // --output=./frame/staking/src/weights.rs // --template=./.maintain/frame-weight-template.hbs - #![allow(unused_parens)] #![allow(unused_imports)] -use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; +use frame_support::{ + traits::Get, + weights::{constants::RocksDbWeight, Weight}, +}; use sp_std::marker::PhantomData; /// Weight functions needed for pallet_staking. @@ -47,11 +49,11 @@ pub trait WeightInfo { fn bond() -> Weight; fn bond_extra() -> Weight; fn unbond() -> Weight; - fn withdraw_unbonded_update(s: u32, ) -> Weight; - fn withdraw_unbonded_kill(s: u32, ) -> Weight; + fn withdraw_unbonded_update(s: u32) -> Weight; + fn withdraw_unbonded_kill(s: u32) -> Weight; fn validate() -> Weight; - fn kick(k: u32, ) -> Weight; - fn nominate(n: u32, ) -> Weight; + fn kick(k: u32) -> Weight; + fn nominate(n: u32) -> Weight; fn chill() -> Weight; fn set_payee() -> Weight; fn set_controller() -> Weight; @@ -59,17 +61,17 @@ pub trait WeightInfo { fn force_no_eras() -> Weight; fn force_new_era() -> Weight; fn force_new_era_always() -> Weight; - fn set_invulnerables(v: u32, ) -> Weight; - fn force_unstake(s: u32, ) -> Weight; - fn cancel_deferred_slash(s: u32, ) -> Weight; - fn payout_stakers_dead_controller(n: u32, ) -> Weight; - fn payout_stakers_alive_staked(n: u32, ) -> Weight; - fn rebond(l: u32, ) -> Weight; - fn set_history_depth(e: u32, ) -> Weight; - fn reap_stash(s: u32, ) -> Weight; - fn new_era(v: u32, n: u32, ) -> Weight; - fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight; - fn get_npos_targets(v: u32, ) -> Weight; + fn set_invulnerables(v: u32) -> Weight; + fn force_unstake(s: u32) -> Weight; + fn cancel_deferred_slash(s: u32) -> Weight; + fn payout_stakers_dead_controller(n: u32) -> Weight; + fn payout_stakers_alive_staked(n: u32) -> Weight; + fn rebond(l: u32) -> Weight; + fn set_history_depth(e: u32) -> Weight; + fn reap_stash(s: u32) -> Weight; + fn new_era(v: u32, n: u32) -> Weight; + fn get_npos_voters(v: u32, n: u32, s: u32) -> Weight; + fn get_npos_targets(v: u32) -> Weight; fn set_staking_limits() -> Weight; fn chill_other() -> Weight; } @@ -92,14 +94,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn withdraw_unbonded_update(s: u32, ) -> Weight { + fn withdraw_unbonded_update(s: u32) -> Weight { (52_279_000 as Weight) // Standard Error: 0 .saturating_add((68_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(4 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn withdraw_unbonded_kill(s: u32, ) -> Weight { + fn withdraw_unbonded_kill(s: u32) -> Weight { (86_629_000 as Weight) // Standard Error: 1_000 .saturating_add((2_379_000 as Weight).saturating_mul(s as Weight)) @@ -112,7 +114,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads(6 as Weight)) .saturating_add(T::DbWeight::get().writes(2 as Weight)) } - fn kick(k: u32, ) -> Weight { + fn kick(k: u32) -> Weight { (36_986_000 as Weight) // Standard Error: 13_000 .saturating_add((16_574_000 as Weight).saturating_mul(k as Weight)) @@ -120,7 +122,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(k as Weight))) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) } - fn nominate(n: u32, ) -> Weight { + fn nominate(n: u32) -> Weight { (43_228_000 as Weight) // Standard Error: 21_000 .saturating_add((5_119_000 as Weight).saturating_mul(n as Weight)) @@ -129,8 +131,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) } fn chill() -> Weight { - (17_800_000 as Weight) - .saturating_add(T::DbWeight::get().reads(3 as Weight)) + (17_800_000 as Weight).saturating_add(T::DbWeight::get().reads(3 as Weight)) } fn set_payee() -> Weight { (12_612_000 as Weight) @@ -143,28 +144,24 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) } fn set_validator_count() -> Weight { - (2_119_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_119_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_no_eras() -> Weight { - (2_320_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_320_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_new_era() -> Weight { - (2_269_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_269_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } fn force_new_era_always() -> Weight { - (2_334_000 as Weight) - .saturating_add(T::DbWeight::get().writes(1 as Weight)) + (2_334_000 as Weight).saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn set_invulnerables(v: u32, ) -> Weight { + fn set_invulnerables(v: u32) -> Weight { (2_354_000 as Weight) // Standard Error: 0 .saturating_add((5_000 as Weight).saturating_mul(v as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn force_unstake(s: u32, ) -> Weight { + fn force_unstake(s: u32) -> Weight { (61_556_000 as Weight) // Standard Error: 1_000 .saturating_add((2_377_000 as Weight).saturating_mul(s as Weight)) @@ -172,14 +169,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(6 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn cancel_deferred_slash(s: u32, ) -> Weight { + fn cancel_deferred_slash(s: u32) -> Weight { (3_367_105_000 as Weight) // Standard Error: 222_000 .saturating_add((19_817_000 as Weight).saturating_mul(s as Weight)) .saturating_add(T::DbWeight::get().reads(1 as Weight)) .saturating_add(T::DbWeight::get().writes(1 as Weight)) } - fn payout_stakers_dead_controller(n: u32, ) -> Weight { + fn payout_stakers_dead_controller(n: u32) -> Weight { (47_229_000 as Weight) // Standard Error: 53_000 .saturating_add((48_365_000 as Weight).saturating_mul(n as Weight)) @@ -188,7 +185,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(2 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(n as Weight))) } - fn payout_stakers_alive_staked(n: u32, ) -> Weight { + fn payout_stakers_alive_staked(n: u32) -> Weight { (156_788_000 as Weight) // Standard Error: 20_000 .saturating_add((61_280_000 as Weight).saturating_mul(n as Weight)) @@ -197,14 +194,14 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(3 as Weight)) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(n as Weight))) } - fn rebond(l: u32, ) -> Weight { + fn rebond(l: u32) -> Weight { (47_815_000 as Weight) // Standard Error: 1_000 .saturating_add((65_000 as Weight).saturating_mul(l as Weight)) .saturating_add(T::DbWeight::get().reads(3 as Weight)) .saturating_add(T::DbWeight::get().writes(3 as Weight)) } - fn set_history_depth(e: u32, ) -> Weight { + fn set_history_depth(e: u32) -> Weight { (0 as Weight) // Standard Error: 74_000 .saturating_add((34_945_000 as Weight).saturating_mul(e as Weight)) @@ -212,7 +209,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((7 as Weight).saturating_mul(e as Weight))) } - fn reap_stash(s: u32, ) -> Weight { + fn reap_stash(s: u32) -> Weight { (73_483_000 as Weight) // Standard Error: 0 .saturating_add((2_384_000 as Weight).saturating_mul(s as Weight)) @@ -220,7 +217,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(8 as Weight)) .saturating_add(T::DbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn new_era(v: u32, n: u32, ) -> Weight { + fn new_era(v: u32, n: u32) -> Weight { (0 as Weight) // Standard Error: 846_000 .saturating_add((305_234_000 as Weight).saturating_mul(v as Weight)) @@ -232,7 +229,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().writes(4 as Weight)) .saturating_add(T::DbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } - fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight { + fn get_npos_voters(v: u32, n: u32, s: u32) -> Weight { (0 as Weight) // Standard Error: 99_000 .saturating_add((25_735_000 as Weight).saturating_mul(v as Weight)) @@ -245,7 +242,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) } - fn get_npos_targets(v: u32, ) -> Weight { + fn get_npos_targets(v: u32) -> Weight { (0 as Weight) // Standard Error: 30_000 .saturating_add((11_065_000 as Weight).saturating_mul(v as Weight)) @@ -253,8 +250,7 @@ impl WeightInfo for SubstrateWeight { .saturating_add(T::DbWeight::get().reads((1 as Weight).saturating_mul(v as Weight))) } fn set_staking_limits() -> Weight { - (5_028_000 as Weight) - .saturating_add(T::DbWeight::get().writes(4 as Weight)) + (5_028_000 as Weight).saturating_add(T::DbWeight::get().writes(4 as Weight)) } fn chill_other() -> Weight { (35_758_000 as Weight) @@ -280,14 +276,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(6 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn withdraw_unbonded_update(s: u32, ) -> Weight { + fn withdraw_unbonded_update(s: u32) -> Weight { (52_279_000 as Weight) // Standard Error: 0 .saturating_add((68_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(4 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn withdraw_unbonded_kill(s: u32, ) -> Weight { + fn withdraw_unbonded_kill(s: u32) -> Weight { (86_629_000 as Weight) // Standard Error: 1_000 .saturating_add((2_379_000 as Weight).saturating_mul(s as Weight)) @@ -300,7 +296,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads(6 as Weight)) .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } - fn kick(k: u32, ) -> Weight { + fn kick(k: u32) -> Weight { (36_986_000 as Weight) // Standard Error: 13_000 .saturating_add((16_574_000 as Weight).saturating_mul(k as Weight)) @@ -308,7 +304,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(k as Weight))) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(k as Weight))) } - fn nominate(n: u32, ) -> Weight { + fn nominate(n: u32) -> Weight { (43_228_000 as Weight) // Standard Error: 21_000 .saturating_add((5_119_000 as Weight).saturating_mul(n as Weight)) @@ -317,8 +313,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(2 as Weight)) } fn chill() -> Weight { - (17_800_000 as Weight) - .saturating_add(RocksDbWeight::get().reads(3 as Weight)) + (17_800_000 as Weight).saturating_add(RocksDbWeight::get().reads(3 as Weight)) } fn set_payee() -> Weight { (12_612_000 as Weight) @@ -331,28 +326,24 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } fn set_validator_count() -> Weight { - (2_119_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_119_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn force_no_eras() -> Weight { - (2_320_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_320_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn force_new_era() -> Weight { - (2_269_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_269_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } fn force_new_era_always() -> Weight { - (2_334_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(1 as Weight)) + (2_334_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn set_invulnerables(v: u32, ) -> Weight { + fn set_invulnerables(v: u32) -> Weight { (2_354_000 as Weight) // Standard Error: 0 .saturating_add((5_000 as Weight).saturating_mul(v as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn force_unstake(s: u32, ) -> Weight { + fn force_unstake(s: u32) -> Weight { (61_556_000 as Weight) // Standard Error: 1_000 .saturating_add((2_377_000 as Weight).saturating_mul(s as Weight)) @@ -360,14 +351,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(6 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn cancel_deferred_slash(s: u32, ) -> Weight { + fn cancel_deferred_slash(s: u32) -> Weight { (3_367_105_000 as Weight) // Standard Error: 222_000 .saturating_add((19_817_000 as Weight).saturating_mul(s as Weight)) .saturating_add(RocksDbWeight::get().reads(1 as Weight)) .saturating_add(RocksDbWeight::get().writes(1 as Weight)) } - fn payout_stakers_dead_controller(n: u32, ) -> Weight { + fn payout_stakers_dead_controller(n: u32) -> Weight { (47_229_000 as Weight) // Standard Error: 53_000 .saturating_add((48_365_000 as Weight).saturating_mul(n as Weight)) @@ -376,7 +367,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(2 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(n as Weight))) } - fn payout_stakers_alive_staked(n: u32, ) -> Weight { + fn payout_stakers_alive_staked(n: u32) -> Weight { (156_788_000 as Weight) // Standard Error: 20_000 .saturating_add((61_280_000 as Weight).saturating_mul(n as Weight)) @@ -385,14 +376,14 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(3 as Weight)) .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(n as Weight))) } - fn rebond(l: u32, ) -> Weight { + fn rebond(l: u32) -> Weight { (47_815_000 as Weight) // Standard Error: 1_000 .saturating_add((65_000 as Weight).saturating_mul(l as Weight)) .saturating_add(RocksDbWeight::get().reads(3 as Weight)) .saturating_add(RocksDbWeight::get().writes(3 as Weight)) } - fn set_history_depth(e: u32, ) -> Weight { + fn set_history_depth(e: u32) -> Weight { (0 as Weight) // Standard Error: 74_000 .saturating_add((34_945_000 as Weight).saturating_mul(e as Weight)) @@ -400,7 +391,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(4 as Weight)) .saturating_add(RocksDbWeight::get().writes((7 as Weight).saturating_mul(e as Weight))) } - fn reap_stash(s: u32, ) -> Weight { + fn reap_stash(s: u32) -> Weight { (73_483_000 as Weight) // Standard Error: 0 .saturating_add((2_384_000 as Weight).saturating_mul(s as Weight)) @@ -408,7 +399,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(8 as Weight)) .saturating_add(RocksDbWeight::get().writes((1 as Weight).saturating_mul(s as Weight))) } - fn new_era(v: u32, n: u32, ) -> Weight { + fn new_era(v: u32, n: u32) -> Weight { (0 as Weight) // Standard Error: 846_000 .saturating_add((305_234_000 as Weight).saturating_mul(v as Weight)) @@ -420,7 +411,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().writes(4 as Weight)) .saturating_add(RocksDbWeight::get().writes((3 as Weight).saturating_mul(v as Weight))) } - fn get_npos_voters(v: u32, n: u32, s: u32, ) -> Weight { + fn get_npos_voters(v: u32, n: u32, s: u32) -> Weight { (0 as Weight) // Standard Error: 99_000 .saturating_add((25_735_000 as Weight).saturating_mul(v as Weight)) @@ -433,7 +424,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads((3 as Weight).saturating_mul(n as Weight))) .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(s as Weight))) } - fn get_npos_targets(v: u32, ) -> Weight { + fn get_npos_targets(v: u32) -> Weight { (0 as Weight) // Standard Error: 30_000 .saturating_add((11_065_000 as Weight).saturating_mul(v as Weight)) @@ -441,8 +432,7 @@ impl WeightInfo for () { .saturating_add(RocksDbWeight::get().reads((1 as Weight).saturating_mul(v as Weight))) } fn set_staking_limits() -> Weight { - (5_028_000 as Weight) - .saturating_add(RocksDbWeight::get().writes(4 as Weight)) + (5_028_000 as Weight).saturating_add(RocksDbWeight::get().writes(4 as Weight)) } fn chill_other() -> Weight { (35_758_000 as Weight) diff --git a/frame/sudo/src/lib.rs b/frame/sudo/src/lib.rs index 215007108459e..0933f5f96beea 100644 --- a/frame/sudo/src/lib.rs +++ b/frame/sudo/src/lib.rs @@ -93,13 +93,10 @@ #![cfg_attr(not(feature = "std"), no_std)] +use sp_runtime::{traits::StaticLookup, DispatchResult}; use sp_std::prelude::*; -use sp_runtime::{DispatchResult, traits::StaticLookup}; -use frame_support::{ - weights::GetDispatchInfo, - traits::UnfilteredDispatchable, -}; +use frame_support::{traits::UnfilteredDispatchable, weights::GetDispatchInfo}; #[cfg(test)] mod mock; @@ -110,9 +107,9 @@ pub use pallet::*; #[frame_support::pallet] pub mod pallet { + use super::{DispatchResult, *}; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::{*, DispatchResult}; #[pallet::config] pub trait Config: frame_system::Config { @@ -120,7 +117,7 @@ pub mod pallet { type Event: From> + IsType<::Event>; /// A sudo-able call. - type Call: Parameter + UnfilteredDispatchable + GetDispatchInfo; + type Call: Parameter + UnfilteredDispatchable + GetDispatchInfo; } #[pallet::pallet] @@ -233,7 +230,7 @@ pub mod pallet { pub fn sudo_as( origin: OriginFor, who: ::Source, - call: Box<::Call> + call: Box<::Call>, ) -> DispatchResultWithPostInfo { // This is a public call, so we ensure that the origin is some signed account. let sender = ensure_signed(origin)?; @@ -281,9 +278,7 @@ pub mod pallet { #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { - Self { - key: Default::default(), - } + Self { key: Default::default() } } } diff --git a/frame/sudo/src/mock.rs b/frame/sudo/src/mock.rs index dd0c1396979e9..12831f730501a 100644 --- a/frame/sudo/src/mock.rs +++ b/frame/sudo/src/mock.rs @@ -18,20 +18,25 @@ //! Test utilities use super::*; -use frame_support::{parameter_types, traits::GenesisBuild}; -use sp_core::H256; -use sp_runtime::{traits::{BlakeTwo256, IdentityLookup}, testing::Header}; -use sp_io; use crate as sudo; -use frame_support::traits::Filter; +use frame_support::{ + parameter_types, + traits::{Filter, GenesisBuild}, +}; use frame_system::limits; +use sp_core::H256; +use sp_io; +use sp_runtime::{ + testing::Header, + traits::{BlakeTwo256, IdentityLookup}, +}; // Logger module to track execution. #[frame_support::pallet] pub mod logger { + use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; - use super::*; #[pallet::config] pub trait Config: frame_system::Config { @@ -48,7 +53,7 @@ pub mod logger { pub fn privileged_i32_log( origin: OriginFor, i: i32, - weight: Weight + weight: Weight, ) -> DispatchResultWithPostInfo { // Ensure that the `origin` is `Root`. ensure_root(origin)?; @@ -61,7 +66,7 @@ pub mod logger { pub fn non_privileged_log( origin: OriginFor, i: i32, - weight: Weight + weight: Weight, ) -> DispatchResultWithPostInfo { // Ensure that the `origin` is some signed account. let sender = ensure_signed(origin)?; @@ -81,22 +86,13 @@ pub mod logger { #[pallet::storage] #[pallet::getter(fn account_log)] - pub(super) type AccountLog = StorageValue< - _, - Vec, - ValueQuery - >; + pub(super) type AccountLog = StorageValue<_, Vec, ValueQuery>; #[pallet::storage] #[pallet::getter(fn i32_log)] - pub(super) type I32Log = StorageValue< - _, - Vec, - ValueQuery - >; + pub(super) type I32Log = StorageValue<_, Vec, ValueQuery>; } - type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic; type Block = frame_system::mocking::MockBlock; @@ -168,8 +164,8 @@ pub type LoggerCall = logger::Call; // Build test environment by setting the root `key` for the Genesis. pub fn new_test_ext(root_key: u64) -> sp_io::TestExternalities { let mut t = frame_system::GenesisConfig::default().build_storage::().unwrap(); - sudo::GenesisConfig::{ - key: root_key, - }.assimilate_storage(&mut t).unwrap(); + sudo::GenesisConfig:: { key: root_key } + .assimilate_storage(&mut t) + .unwrap(); t.into() } diff --git a/frame/sudo/src/tests.rs b/frame/sudo/src/tests.rs index aa859c547c039..9437f20832c44 100644 --- a/frame/sudo/src/tests.rs +++ b/frame/sudo/src/tests.rs @@ -18,17 +18,17 @@ //! Tests for the module. use super::*; +use frame_support::{assert_noop, assert_ok}; use mock::{ - Sudo, SudoCall, Origin, Call, Test, new_test_ext, LoggerCall, Logger, System, - Event as TestEvent, + new_test_ext, Call, Event as TestEvent, Logger, LoggerCall, Origin, Sudo, SudoCall, System, + Test, }; -use frame_support::{assert_ok, assert_noop}; #[test] fn test_setup_works() { // Environment setup, logger storage, and sudo `key` retrieval should work as expected. new_test_ext(1).execute_with(|| { - assert_eq!(Sudo::key(), 1u64); + assert_eq!(Sudo::key(), 1u64); assert!(Logger::i32_log().is_empty()); assert!(Logger::account_log().is_empty()); }); @@ -105,7 +105,7 @@ fn set_key_basics() { new_test_ext(1).execute_with(|| { // A root `key` can change the root `key` assert_ok!(Sudo::set_key(Origin::signed(1), 2)); - assert_eq!(Sudo::key(), 2u64); + assert_eq!(Sudo::key(), 2u64); }); new_test_ext(1).execute_with(|| { @@ -146,14 +146,14 @@ fn sudo_as_basics() { let call = Box::new(Call::Logger(LoggerCall::non_privileged_log(42, 1))); assert_ok!(Sudo::sudo_as(Origin::signed(1), 2, call)); assert_eq!(Logger::i32_log(), vec![42i32]); - // The correct user makes the call within `sudo_as`. + // The correct user makes the call within `sudo_as`. assert_eq!(Logger::account_log(), vec![2]); }); } #[test] fn sudo_as_emits_events_correctly() { - new_test_ext(1).execute_with(|| { + new_test_ext(1).execute_with(|| { // Set block number to 1 because events are not emitted on block 0. System::set_block_number(1); diff --git a/frame/support/procedural/src/clone_no_bound.rs b/frame/support/procedural/src/clone_no_bound.rs index 1911fdfd9fb29..747900fd023f6 100644 --- a/frame/support/procedural/src/clone_no_bound.rs +++ b/frame/support/procedural/src/clone_no_bound.rs @@ -30,56 +30,61 @@ pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke let impl_ = match input.data { syn::Data::Struct(struct_) => match struct_.fields { syn::Fields::Named(named) => { - let fields = named.named.iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => #i: core::clone::Clone::clone(&self.#i) - )); + ) + }); quote::quote!( Self { #( #fields, )* } ) }, syn::Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => - core::clone::Clone::clone(&self.#i) - )); + let fields = + unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { + quote::quote_spanned!(i.span() => + core::clone::Clone::clone(&self.#i) + ) + }); quote::quote!( Self ( #( #fields, )* ) ) }, syn::Fields::Unit => { - quote::quote!( Self ) - } + quote::quote!(Self) + }, }, syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter() - .map(|variant| { - let ident = &variant.ident; - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let cloned = captured.clone() - .map(|i| quote::quote_spanned!(i.span() => - #i: core::clone::Clone::clone(#i) - )); - quote::quote!( - Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + #i: core::clone::Clone::clone(#i) ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed.unnamed.iter().enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let cloned = captured.clone() - .map(|i| quote::quote_spanned!(i.span() => - core::clone::Clone::clone(#i) - )); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => Self::#ident { #( #cloned, )*} + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let cloned = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + core::clone::Clone::clone(#i) ) - }, - syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), - } - }); + }); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => Self::#ident ( #( #cloned, )*) + ) + }, + syn::Fields::Unit => quote::quote!( Self::#ident => Self::#ident ), + } + }); quote::quote!(match self { #( #variants, )* @@ -99,5 +104,6 @@ pub fn derive_clone_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke } } }; - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/construct_runtime/expand/call.rs b/frame/support/procedural/src/construct_runtime/expand/call.rs index b6d886abff332..2532a680e21be 100644 --- a/frame/support/procedural/src/construct_runtime/expand/call.rs +++ b/frame/support/procedural/src/construct_runtime/expand/call.rs @@ -30,16 +30,16 @@ pub fn expand_outer_dispatch( let mut query_call_part_macros = Vec::new(); let mut pallet_names = Vec::new(); - let pallets_with_call = pallet_decls - .iter() - .filter(|decl| decl.exists_part("Call")); + let pallets_with_call = pallet_decls.iter().filter(|decl| decl.exists_part("Call")); for pallet_declaration in pallets_with_call { let name = &pallet_declaration.name; let path = &pallet_declaration.path; let index = pallet_declaration.index; - variant_defs.extend(quote!(#[codec(index = #index)] #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ),)); + variant_defs.extend( + quote!(#[codec(index = #index)] #name( #scrate::dispatch::CallableCallFor<#name, #runtime> ),), + ); variant_patterns.push(quote!(Call::#name(call))); pallet_names.push(name); query_call_part_macros.push(quote! { diff --git a/frame/support/procedural/src/construct_runtime/expand/config.rs b/frame/support/procedural/src/construct_runtime/expand/config.rs index 8dc2710b192d1..5e1b9d94700e6 100644 --- a/frame/support/procedural/src/construct_runtime/expand/config.rs +++ b/frame/support/procedural/src/construct_runtime/expand/config.rs @@ -18,7 +18,7 @@ use crate::construct_runtime::Pallet; use inflector::Inflector; use proc_macro2::TokenStream; -use quote::{ToTokens, format_ident, quote}; +use quote::{format_ident, quote, ToTokens}; use syn::Ident; pub fn expand_outer_config( @@ -37,15 +37,18 @@ pub fn expand_outer_config( let pallet_name = &decl.name; let path_str = path.into_token_stream().to_string(); let config = format_ident!("{}Config", pallet_name); - let field_name = &Ident::new( - &pallet_name.to_string().to_snake_case(), - decl.name.span(), - ); + let field_name = + &Ident::new(&pallet_name.to_string().to_snake_case(), decl.name.span()); let part_is_generic = !pallet_entry.generics.params.is_empty(); types.extend(expand_config_types(runtime, decl, &config, part_is_generic)); fields.extend(quote!(pub #field_name: #config,)); - build_storage_calls.extend(expand_config_build_storage_call(scrate, runtime, decl, &field_name)); + build_storage_calls.extend(expand_config_build_storage_call( + scrate, + runtime, + decl, + &field_name, + )); query_genesis_config_part_macros.push(quote! { #path::__substrate_genesis_config_check::is_genesis_config_defined!(#pallet_name); #[cfg(feature = "std")] @@ -97,15 +100,15 @@ fn expand_config_types( let path = &decl.path; match (decl.instance.as_ref(), part_is_generic) { - (Some(inst), true) => quote!{ + (Some(inst), true) => quote! { #[cfg(any(feature = "std", test))] pub type #config = #path::GenesisConfig<#runtime, #path::#inst>; }, - (None, true) => quote!{ + (None, true) => quote! { #[cfg(any(feature = "std", test))] pub type #config = #path::GenesisConfig<#runtime>; }, - (_, false) => quote!{ + (_, false) => quote! { #[cfg(any(feature = "std", test))] pub type #config = #path::GenesisConfig; }, @@ -125,7 +128,7 @@ fn expand_config_build_storage_call( quote!(#path::__InherentHiddenInstance) }; - quote!{ + quote! { #scrate::sp_runtime::BuildModuleGenesisStorage:: <#runtime, #instance>::build_module_genesis_storage(&self.#field_name, storage)?; } diff --git a/frame/support/procedural/src/construct_runtime/expand/event.rs b/frame/support/procedural/src/construct_runtime/expand/event.rs index 48b53ae7012aa..798646bf27334 100644 --- a/frame/support/procedural/src/construct_runtime/expand/event.rs +++ b/frame/support/procedural/src/construct_runtime/expand/event.rs @@ -43,7 +43,7 @@ pub fn expand_outer_event( be constructed: pallet `{}` must have generic `Event`", pallet_name, ); - return Err(syn::Error::new(pallet_name.span(), msg)); + return Err(syn::Error::new(pallet_name.span(), msg)) } let part_is_generic = !generics.params.is_empty(); @@ -54,7 +54,13 @@ pub fn expand_outer_event( (None, false) => quote!(#path::Event), }; - event_variants.extend(expand_event_variant(runtime, pallet_decl, index, instance, generics)); + event_variants.extend(expand_event_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); event_conversions.extend(expand_event_conversion(scrate, pallet_decl, &pallet_event)); query_event_part_macros.push(quote! { #path::__substrate_event_check::is_event_part_defined!(#pallet_name); @@ -95,16 +101,16 @@ fn expand_event_variant( match instance { Some(inst) if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#runtime, #path::#inst>),) - } + }, Some(inst) => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#path::#inst>),) - } + }, None if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Event<#runtime>),) - } + }, None => { quote!(#[codec(index = #index)] #variant_name(#path::Event),) - } + }, } } @@ -115,7 +121,7 @@ fn expand_event_conversion( ) -> TokenStream { let variant_name = &pallet.name; - quote!{ + quote! { impl From<#pallet_event> for Event { fn from(x: #pallet_event) -> Self { Event::#variant_name(x) diff --git a/frame/support/procedural/src/construct_runtime/expand/metadata.rs b/frame/support/procedural/src/construct_runtime/expand/metadata.rs index a4498521127fa..1a64059b04660 100644 --- a/frame/support/procedural/src/construct_runtime/expand/metadata.rs +++ b/frame/support/procedural/src/construct_runtime/expand/metadata.rs @@ -15,10 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License -use proc_macro2::TokenStream; use crate::construct_runtime::Pallet; -use syn::{Ident, TypePath}; +use proc_macro2::TokenStream; use quote::quote; +use syn::{Ident, TypePath}; pub fn expand_runtime_metadata( runtime: &Ident, @@ -48,7 +48,7 @@ pub fn expand_runtime_metadata( let constants = expand_pallet_metadata_constants(runtime, decl); let errors = expand_pallet_metadata_errors(runtime, decl); - quote!{ + quote! { #scrate::metadata::PalletMetadata { name: stringify!(#name), index: #index, @@ -62,7 +62,7 @@ pub fn expand_runtime_metadata( }) .collect::>(); - quote!{ + quote! { impl #runtime { pub fn metadata() -> #scrate::metadata::RuntimeMetadataPrefixed { #scrate::metadata::RuntimeMetadataLastVersion::new( @@ -98,7 +98,7 @@ fn expand_pallet_metadata_storage( let instance = decl.instance.as_ref().into_iter(); let path = &decl.path; - quote!{ + quote! { Some(#path::Pallet::<#runtime #(, #path::#instance)*>::storage_metadata()) } } else { @@ -115,7 +115,7 @@ fn expand_pallet_metadata_calls( let instance = decl.instance.as_ref().into_iter(); let path = &decl.path; - quote!{ + quote! { Some(#path::Pallet::<#runtime #(, #path::#instance)*>::call_functions()) } } else { @@ -131,8 +131,12 @@ fn expand_pallet_metadata_events( ) -> TokenStream { if filtered_names.contains(&"Event") { let path = &decl.path; - let part_is_generic = - !decl.find_part("Event").expect("Event part exists; qed").generics.params.is_empty(); + let part_is_generic = !decl + .find_part("Event") + .expect("Event part exists; qed") + .generics + .params + .is_empty(); let pallet_event = match (decl.instance.as_ref(), part_is_generic) { (Some(inst), true) => quote!(#path::Event::<#runtime, #path::#inst>), (Some(inst), false) => quote!(#path::Event::<#path::#inst>), @@ -140,7 +144,7 @@ fn expand_pallet_metadata_events( (None, false) => quote!(#path::Event), }; - quote!{ + quote! { Some( #scrate::metadata::PalletEventMetadata { ty: #scrate::scale_info::meta_type::<#pallet_event>() @@ -152,26 +156,20 @@ fn expand_pallet_metadata_events( } } -fn expand_pallet_metadata_constants( - runtime: &Ident, - decl: &Pallet, -) -> TokenStream { +fn expand_pallet_metadata_constants(runtime: &Ident, decl: &Pallet) -> TokenStream { let path = &decl.path; let instance = decl.instance.as_ref().into_iter(); - quote!{ + quote! { #path::Pallet::<#runtime #(, #path::#instance)*>::pallet_constants_metadata() } } -fn expand_pallet_metadata_errors( - runtime: &Ident, - decl: &Pallet, -) -> TokenStream { +fn expand_pallet_metadata_errors(runtime: &Ident, decl: &Pallet) -> TokenStream { let path = &decl.path; let instance = decl.instance.as_ref().into_iter(); - quote!{ + quote! { #path::Pallet::<#runtime #(, #path::#instance)*>::error_metadata() } } diff --git a/frame/support/procedural/src/construct_runtime/expand/origin.rs b/frame/support/procedural/src/construct_runtime/expand/origin.rs index 080f8d4c9dfd2..1c06c923f7f6c 100644 --- a/frame/support/procedural/src/construct_runtime/expand/origin.rs +++ b/frame/support/procedural/src/construct_runtime/expand/origin.rs @@ -18,7 +18,7 @@ use crate::construct_runtime::{Pallet, SYSTEM_PALLET_NAME}; use proc_macro2::TokenStream; use quote::quote; -use syn::{token, Ident, Generics}; +use syn::{token, Generics, Ident}; pub fn expand_outer_origin( runtime: &Ident, @@ -26,13 +26,14 @@ pub fn expand_outer_origin( pallets_token: token::Brace, scrate: &TokenStream, ) -> syn::Result { - let system_pallet = pallets.iter() - .find(|decl| decl.name == SYSTEM_PALLET_NAME) - .ok_or_else(|| syn::Error::new( - pallets_token.span, - "`System` pallet declaration is missing. \ + let system_pallet = + pallets.iter().find(|decl| decl.name == SYSTEM_PALLET_NAME).ok_or_else(|| { + syn::Error::new( + pallets_token.span, + "`System` pallet declaration is missing. \ Please add this line: `System: frame_system::{Pallet, Call, Storage, Config, Event},`", - ))?; + ) + })?; let mut caller_variants = TokenStream::new(); let mut pallet_conversions = TokenStream::new(); @@ -52,15 +53,23 @@ pub fn expand_outer_origin( be constructed: pallet `{}` must have generic `Origin`", name ); - return Err(syn::Error::new(name.span(), msg)); + return Err(syn::Error::new(name.span(), msg)) } - caller_variants.extend( - expand_origin_caller_variant(runtime, pallet_decl, index, instance, generics), - ); - pallet_conversions.extend( - expand_origin_pallet_conversions(scrate, runtime, pallet_decl, instance, generics), - ); + caller_variants.extend(expand_origin_caller_variant( + runtime, + pallet_decl, + index, + instance, + generics, + )); + pallet_conversions.extend(expand_origin_pallet_conversions( + scrate, + runtime, + pallet_decl, + instance, + generics, + )); query_origin_part_macros.push(quote! { #path::__substrate_origin_check::is_origin_part_defined!(#name); }); @@ -273,16 +282,16 @@ fn expand_origin_caller_variant( match instance { Some(inst) if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Origin<#runtime, #path::#inst>),) - } + }, Some(inst) => { quote!(#[codec(index = #index)] #variant_name(#path::Origin<#path::#inst>),) - } + }, None if part_is_generic => { quote!(#[codec(index = #index)] #variant_name(#path::Origin<#runtime>),) - } + }, None => { quote!(#[codec(index = #index)] #variant_name(#path::Origin),) - } + }, } } @@ -304,7 +313,7 @@ fn expand_origin_pallet_conversions( None => quote!(#path::Origin), }; - quote!{ + quote! { impl From<#pallet_origin> for OriginCaller { fn from(x: #pallet_origin) -> Self { OriginCaller::#variant_name(x) diff --git a/frame/support/procedural/src/construct_runtime/mod.rs b/frame/support/procedural/src/construct_runtime/mod.rs index 00b4cfa56e41d..8aacd8f0aa810 100644 --- a/frame/support/procedural/src/construct_runtime/mod.rs +++ b/frame/support/procedural/src/construct_runtime/mod.rs @@ -18,14 +18,15 @@ mod expand; mod parse; -use frame_support_procedural_tools::syn_ext as ext; -use frame_support_procedural_tools::{generate_crate_access, generate_hidden_includes}; +use frame_support_procedural_tools::{ + generate_crate_access, generate_hidden_includes, syn_ext as ext, +}; use parse::{PalletDeclaration, PalletPart, PalletPath, RuntimeDefinition, WhereSection}; use proc_macro::TokenStream; use proc_macro2::TokenStream as TokenStream2; use quote::quote; -use syn::{Ident, Result}; use std::collections::HashMap; +use syn::{Ident, Result}; /// The fixed name of the system pallet. const SYSTEM_PALLET_NAME: &str = "System"; @@ -65,48 +66,44 @@ fn complete_pallets(decl: impl Iterator) -> syn::Resul let mut last_index: Option = None; let mut names = HashMap::new(); - decl - .map(|pallet| { - let final_index = match pallet.index { - Some(i) => i, - None => last_index.map_or(Some(0), |i| i.checked_add(1)) - .ok_or_else(|| { - let msg = "Pallet index doesn't fit into u8, index is 256"; - syn::Error::new(pallet.name.span(), msg) - })?, - }; - - last_index = Some(final_index); - - if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { - let msg = format!( - "Pallet indices are conflicting: Both pallets {} and {} are at index {}", - used_pallet, - pallet.name, - final_index, - ); - let mut err = syn::Error::new(used_pallet.span(), &msg); - err.combine(syn::Error::new(pallet.name.span(), msg)); - return Err(err); - } + decl.map(|pallet| { + let final_index = match pallet.index { + Some(i) => i, + None => last_index.map_or(Some(0), |i| i.checked_add(1)).ok_or_else(|| { + let msg = "Pallet index doesn't fit into u8, index is 256"; + syn::Error::new(pallet.name.span(), msg) + })?, + }; - if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { - let msg = "Two pallets with the same name!"; + last_index = Some(final_index); - let mut err = syn::Error::new(used_pallet, &msg); - err.combine(syn::Error::new(pallet.name.span(), &msg)); - return Err(err); - } + if let Some(used_pallet) = indices.insert(final_index, pallet.name.clone()) { + let msg = format!( + "Pallet indices are conflicting: Both pallets {} and {} are at index {}", + used_pallet, pallet.name, final_index, + ); + let mut err = syn::Error::new(used_pallet.span(), &msg); + err.combine(syn::Error::new(pallet.name.span(), msg)); + return Err(err) + } - Ok(Pallet { - name: pallet.name, - index: final_index, - path: pallet.path, - instance: pallet.instance, - pallet_parts: pallet.pallet_parts, - }) + if let Some(used_pallet) = names.insert(pallet.name.clone(), pallet.name.span()) { + let msg = "Two pallets with the same name!"; + + let mut err = syn::Error::new(used_pallet, &msg); + err.combine(syn::Error::new(pallet.name.span(), &msg)); + return Err(err) + } + + Ok(Pallet { + name: pallet.name, + index: final_index, + path: pallet.path, + instance: pallet.instance, + pallet_parts: pallet.pallet_parts, }) - .collect() + }) + .collect() } pub fn construct_runtime(input: TokenStream) -> TokenStream { @@ -119,17 +116,9 @@ pub fn construct_runtime(input: TokenStream) -> TokenStream { fn construct_runtime_parsed(definition: RuntimeDefinition) -> Result { let RuntimeDefinition { name, - where_section: WhereSection { - block, - node_block, - unchecked_extrinsic, - .. - }, + where_section: WhereSection { block, node_block, unchecked_extrinsic, .. }, pallets: - ext::Braces { - content: ext::Punctuated { inner: pallets, .. }, - token: pallets_token, - }, + ext::Braces { content: ext::Punctuated { inner: pallets, .. }, token: pallets_token }, .. } = definition; @@ -148,13 +137,8 @@ fn construct_runtime_parsed(definition: RuntimeDefinition) -> Result( let type_name = &pallet_declaration.name; let pallet = &pallet_declaration.path; let mut generics = vec![quote!(#runtime)]; - generics.extend( - pallet_declaration - .instance - .iter() - .map(|name| quote!(#pallet::#name)), - ); + generics.extend(pallet_declaration.instance.iter().map(|name| quote!(#pallet::#name))); let type_decl = quote!( pub type #type_name = #pallet::Pallet <#(#generics),*>; ); @@ -227,11 +206,13 @@ fn decl_all_pallets<'a>( } // Make nested tuple structure like (((Babe, Consensus), Grandpa), ...) // But ignore the system pallet. - let all_pallets = names.iter() + let all_pallets = names + .iter() .filter(|n| **n != SYSTEM_PALLET_NAME) .fold(TokenStream2::default(), |combined, name| quote!((#name, #combined))); - let all_pallets_with_system = names.iter() + let all_pallets_with_system = names + .iter() .fold(TokenStream2::default(), |combined, name| quote!((#name, #combined))); quote!( @@ -261,8 +242,7 @@ fn decl_pallet_runtime_setup( let names = pallet_declarations.iter().map(|d| &d.name); let names2 = pallet_declarations.iter().map(|d| &d.name); let name_strings = pallet_declarations.iter().map(|d| d.name.to_string()); - let indices = pallet_declarations.iter() - .map(|pallet| pallet.index as usize); + let indices = pallet_declarations.iter().map(|pallet| pallet.index as usize); quote!( /// Provides an implementation of `PalletInfo` to provide information diff --git a/frame/support/procedural/src/construct_runtime/parse.rs b/frame/support/procedural/src/construct_runtime/parse.rs index 2d242749cfe01..6f2fd82e73f4b 100644 --- a/frame/support/procedural/src/construct_runtime/parse.rs +++ b/frame/support/procedural/src/construct_runtime/parse.rs @@ -77,9 +77,9 @@ impl Parse for WhereSection { definitions.push(definition); if !input.peek(Token![,]) { if !input.peek(token::Brace) { - return Err(input.error("Expected `,` or `{`")); + return Err(input.error("Expected `,` or `{`")) } - break; + break } input.parse::()?; } @@ -87,23 +87,14 @@ impl Parse for WhereSection { let node_block = remove_kind(input, WhereKind::NodeBlock, &mut definitions)?.value; let unchecked_extrinsic = remove_kind(input, WhereKind::UncheckedExtrinsic, &mut definitions)?.value; - if let Some(WhereDefinition { - ref kind_span, - ref kind, - .. - }) = definitions.first() - { + if let Some(WhereDefinition { ref kind_span, ref kind, .. }) = definitions.first() { let msg = format!( "`{:?}` was declared above. Please use exactly one declaration for `{:?}`.", kind, kind ); - return Err(Error::new(*kind_span, msg)); + return Err(Error::new(*kind_span, msg)) } - Ok(Self { - block, - node_block, - unchecked_extrinsic, - }) + Ok(Self { block, node_block, unchecked_extrinsic }) } } @@ -127,17 +118,11 @@ impl Parse for WhereDefinition { let (kind_span, kind) = if lookahead.peek(keyword::Block) { (input.parse::()?.span(), WhereKind::Block) } else if lookahead.peek(keyword::NodeBlock) { - ( - input.parse::()?.span(), - WhereKind::NodeBlock, - ) + (input.parse::()?.span(), WhereKind::NodeBlock) } else if lookahead.peek(keyword::UncheckedExtrinsic) { - ( - input.parse::()?.span(), - WhereKind::UncheckedExtrinsic, - ) + (input.parse::()?.span(), WhereKind::UncheckedExtrinsic) } else { - return Err(lookahead.error()); + return Err(lookahead.error()) }; Ok(Self { @@ -187,13 +172,7 @@ impl Parse for PalletDeclaration { None }; - let parsed = Self { - name, - path, - instance, - pallet_parts, - index, - }; + let parsed = Self { name, path, instance, pallet_parts, index }; Ok(parsed) } @@ -214,17 +193,17 @@ impl Parse for PalletPath { let mut lookahead = input.lookahead1(); let mut segments = Punctuated::new(); - if lookahead.peek(Token![crate]) - || lookahead.peek(Token![self]) - || lookahead.peek(Token![super]) - || lookahead.peek(Ident) + if lookahead.peek(Token![crate]) || + lookahead.peek(Token![self]) || + lookahead.peek(Token![super]) || + lookahead.peek(Ident) { let ident = input.call(Ident::parse_any)?; segments.push(PathSegment { ident, arguments: PathArguments::None }); let _: Token![::] = input.parse()?; lookahead = input.lookahead1(); } else { - return Err(lookahead.error()); + return Err(lookahead.error()) } while lookahead.peek(Ident) { @@ -235,15 +214,10 @@ impl Parse for PalletPath { } if !lookahead.peek(token::Brace) && !lookahead.peek(Token![<]) { - return Err(lookahead.error()); + return Err(lookahead.error()) } - Ok(Self { - inner: Path { - leading_colon: None, - segments, - } - }) + Ok(Self { inner: Path { leading_colon: None, segments } }) } } @@ -257,7 +231,7 @@ impl quote::ToTokens for PalletPath { /// /// `{ Call, Event }` fn parse_pallet_parts(input: ParseStream) -> Result> { - let pallet_parts :ext::Braces> = input.parse()?; + let pallet_parts: ext::Braces> = input.parse()?; let mut resolved = HashSet::new(); for part in pallet_parts.content.inner.iter() { @@ -266,7 +240,7 @@ fn parse_pallet_parts(input: ParseStream) -> Result> { "`{}` was already declared before. Please remove the duplicate declaration", part.name(), ); - return Err(Error::new(part.keyword.span(), msg)); + return Err(Error::new(part.keyword.span(), msg)) } } @@ -371,13 +345,10 @@ impl Parse for PalletPart { keyword.name(), valid_generics, ); - return Err(syn::Error::new(keyword.span(), msg)); + return Err(syn::Error::new(keyword.span(), msg)) } - Ok(Self { - keyword, - generics, - }) + Ok(Self { keyword, generics }) } } diff --git a/frame/support/procedural/src/debug_no_bound.rs b/frame/support/procedural/src/debug_no_bound.rs index 7a5509cf986dc..acfd8d0cabc8a 100644 --- a/frame/support/procedural/src/debug_no_bound.rs +++ b/frame/support/procedural/src/debug_no_bound.rs @@ -30,9 +30,10 @@ pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke let impl_ = match input.data { syn::Data::Struct(struct_) => match struct_.fields { syn::Fields::Named(named) => { - let fields = named.named.iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) )); + let fields = + named.named.iter().map(|i| &i.ident).map( + |i| quote::quote_spanned!(i.span() => .field(stringify!(#i), &self.#i) ), + ); quote::quote!( fmt.debug_struct(stringify!(#input_ident)) @@ -41,7 +42,10 @@ pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke ) }, syn::Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().enumerate() + let fields = unnamed + .unnamed + .iter() + .enumerate() .map(|(i, _)| syn::Index::from(i)) .map(|i| quote::quote_spanned!(i.span() => .field(&self.#i) )); @@ -51,46 +55,50 @@ pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke .finish() ) }, - syn::Fields::Unit => quote::quote!( fmt.write_str(stringify!(#input_ident)) ), + syn::Fields::Unit => quote::quote!(fmt.write_str(stringify!(#input_ident))), }, syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter() - .map(|variant| { - let ident = &variant.ident; - let full_variant_str = format!("{}::{}", input_ident, ident); - match &variant.fields { - syn::Fields::Named(named) => { - let captured = named.named.iter().map(|i| &i.ident); - let debugged = captured.clone() - .map(|i| quote::quote_spanned!(i.span() => - .field(stringify!(#i), &#i) - )); - quote::quote!( - Self::#ident { #( ref #captured, )* } => { - fmt.debug_struct(#full_variant_str) - #( #debugged )* - .finish() - } + let variants = enum_.variants.iter().map(|variant| { + let ident = &variant.ident; + let full_variant_str = format!("{}::{}", input_ident, ident); + match &variant.fields { + syn::Fields::Named(named) => { + let captured = named.named.iter().map(|i| &i.ident); + let debugged = captured.clone().map(|i| { + quote::quote_spanned!(i.span() => + .field(stringify!(#i), &#i) ) - }, - syn::Fields::Unnamed(unnamed) => { - let captured = unnamed.unnamed.iter().enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let debugged = captured.clone() - .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); - quote::quote!( - Self::#ident ( #( ref #captured, )* ) => { - fmt.debug_tuple(#full_variant_str) - #( #debugged )* - .finish() - } - ) - }, - syn::Fields::Unit => quote::quote!( - Self::#ident => fmt.write_str(#full_variant_str) - ), - } - }); + }); + quote::quote!( + Self::#ident { #( ref #captured, )* } => { + fmt.debug_struct(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unnamed(unnamed) => { + let captured = unnamed + .unnamed + .iter() + .enumerate() + .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); + let debugged = captured + .clone() + .map(|i| quote::quote_spanned!(i.span() => .field(&#i))); + quote::quote!( + Self::#ident ( #( ref #captured, )* ) => { + fmt.debug_tuple(#full_variant_str) + #( #debugged )* + .finish() + } + ) + }, + syn::Fields::Unit => quote::quote!( + Self::#ident => fmt.write_str(#full_variant_str) + ), + } + }); quote::quote!(match *self { #( #variants, )* @@ -110,5 +118,6 @@ pub fn derive_debug_no_bound(input: proc_macro::TokenStream) -> proc_macro::Toke } } }; - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/default_no_bound.rs b/frame/support/procedural/src/default_no_bound.rs index ed35e057f0377..38d6e19b1732f 100644 --- a/frame/support/procedural/src/default_no_bound.rs +++ b/frame/support/procedural/src/default_no_bound.rs @@ -30,56 +30,60 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To let impl_ = match input.data { syn::Data::Struct(struct_) => match struct_.fields { syn::Fields::Named(named) => { - let fields = named.named.iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => #i: core::default::Default::default() - )); + ) + }); quote::quote!( Self { #( #fields, )* } ) }, syn::Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().enumerate() - .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => - core::default::Default::default() - )); + let fields = + unnamed.unnamed.iter().enumerate().map(|(i, _)| syn::Index::from(i)).map(|i| { + quote::quote_spanned!(i.span() => + core::default::Default::default() + ) + }); quote::quote!( Self ( #( #fields, )* ) ) }, syn::Fields::Unit => { - quote::quote!( Self ) - } + quote::quote!(Self) + }, }, - syn::Data::Enum(enum_) => { + syn::Data::Enum(enum_) => if let Some(first_variant) = enum_.variants.first() { let variant_ident = &first_variant.ident; match &first_variant.fields { syn::Fields::Named(named) => { - let fields = named.named.iter() - .map(|i| &i.ident) - .map(|i| quote::quote_spanned!(i.span() => + let fields = named.named.iter().map(|i| &i.ident).map(|i| { + quote::quote_spanned!(i.span() => #i: core::default::Default::default() - )); + ) + }); quote::quote!( #name :: #ty_generics :: #variant_ident { #( #fields, )* } ) }, syn::Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().enumerate() + let fields = unnamed + .unnamed + .iter() + .enumerate() .map(|(i, _)| syn::Index::from(i)) - .map(|i| quote::quote_spanned!(i.span() => - core::default::Default::default() - )); + .map(|i| { + quote::quote_spanned!(i.span() => + core::default::Default::default() + ) + }); quote::quote!( #name :: #ty_generics :: #variant_ident ( #( #fields, )* ) ) }, syn::Fields::Unit => quote::quote!( #name :: #ty_generics :: #variant_ident ), } } else { - quote::quote!( Self ) - } - - }, + quote::quote!(Self) + }, syn::Data::Union(_) => { let msg = "Union type not supported by `derive(CloneNoBound)`"; return syn::Error::new(input.span(), msg).to_compile_error().into() @@ -94,5 +98,6 @@ pub fn derive_default_no_bound(input: proc_macro::TokenStream) -> proc_macro::To } } }; - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/dummy_part_checker.rs b/frame/support/procedural/src/dummy_part_checker.rs index f1649aebe970f..792b17a8f7758 100644 --- a/frame/support/procedural/src/dummy_part_checker.rs +++ b/frame/support/procedural/src/dummy_part_checker.rs @@ -1,18 +1,17 @@ -use proc_macro::TokenStream; use crate::COUNTER; +use proc_macro::TokenStream; pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { if !input.is_empty() { return syn::Error::new(proc_macro2::Span::call_site(), "No arguments expected") - .to_compile_error().into() + .to_compile_error() + .into() } let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let no_op_macro_ident = syn::Ident::new( - &format!("__dummy_part_checker_{}", count), - proc_macro2::Span::call_site(), - ); + let no_op_macro_ident = + syn::Ident::new(&format!("__dummy_part_checker_{}", count), proc_macro2::Span::call_site()); quote::quote!( #[macro_export] @@ -58,5 +57,6 @@ pub fn generate_dummy_part_checker(input: TokenStream) -> TokenStream { #[doc(hidden)] pub use #no_op_macro_ident as is_origin_part_defined; } - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/key_prefix.rs b/frame/support/procedural/src/key_prefix.rs index a2c98b0121ad4..3f424e8b8b8dd 100644 --- a/frame/support/procedural/src/key_prefix.rs +++ b/frame/support/procedural/src/key_prefix.rs @@ -16,14 +16,14 @@ // limitations under the License. use proc_macro2::{Span, TokenStream}; -use quote::{ToTokens, format_ident, quote}; +use quote::{format_ident, quote, ToTokens}; use syn::{Ident, Result}; const MAX_IDENTS: usize = 18; pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result { if !input.is_empty() { - return Err(syn::Error::new(Span::call_site(), "No arguments expected")); + return Err(syn::Error::new(Span::call_site(), "No arguments expected")) } let mut all_trait_impls = TokenStream::new(); @@ -36,13 +36,17 @@ pub fn impl_key_prefix_for_tuples(input: proc_macro::TokenStream) -> Result>(); - let kargs = prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); + let hashers = current_tuple + .iter() + .map(|ident| format_ident!("Hasher{}", ident)) + .collect::>(); + let kargs = + prefixes.iter().map(|ident| format_ident!("KArg{}", ident)).collect::>(); let partial_keygen = generate_keygen(prefixes); let suffix_keygen = generate_keygen(suffixes); let suffix_tuple = generate_tuple(suffixes); - let trait_impls = quote!{ + let trait_impls = quote! { impl< #(#current_tuple: FullCodec + StaticTypeInfo,)* #(#hashers: StorageHasher,)* diff --git a/frame/support/procedural/src/lib.rs b/frame/support/procedural/src/lib.rs index 9ac648f5e795a..c79b82e7b1a1c 100644 --- a/frame/support/procedural/src/lib.rs +++ b/frame/support/procedural/src/lib.rs @@ -19,21 +19,21 @@ #![recursion_limit = "512"] -mod storage; +mod clone_no_bound; mod construct_runtime; -mod pallet; -mod pallet_version; -mod transactional; mod debug_no_bound; -mod clone_no_bound; -mod partial_eq_no_bound; mod default_no_bound; -mod key_prefix; mod dummy_part_checker; +mod key_prefix; +mod pallet; +mod pallet_version; +mod partial_eq_no_bound; +mod storage; +mod transactional; -pub(crate) use storage::INHERENT_INSTANCE_NAME; use proc_macro::TokenStream; use std::cell::RefCell; +pub(crate) use storage::INHERENT_INSTANCE_NAME; thread_local! { /// A global counter, can be used to generate a relatively unique identifier. @@ -413,7 +413,8 @@ pub fn derive_runtime_debug_no_bound(input: TokenStream) -> TokenStream { } } }; - ).into() + ) + .into() } #[cfg(feature = "std")] @@ -444,7 +445,8 @@ pub fn derive_eq_no_bound(input: TokenStream) -> TokenStream { const _: () = { impl #impl_generics core::cmp::Eq for #name #ty_generics #where_clause {} }; - ).into() + ) + .into() } /// derive `Default` but do no bound any generic. Docs are at `frame_support::DefaultNoBound`. @@ -455,12 +457,15 @@ pub fn derive_default_no_bound(input: TokenStream) -> TokenStream { #[proc_macro_attribute] pub fn require_transactional(attr: TokenStream, input: TokenStream) -> TokenStream { - transactional::require_transactional(attr, input).unwrap_or_else(|e| e.to_compile_error().into()) + transactional::require_transactional(attr, input) + .unwrap_or_else(|e| e.to_compile_error().into()) } #[proc_macro] pub fn crate_to_pallet_version(input: TokenStream) -> TokenStream { - pallet_version::crate_to_pallet_version(input).unwrap_or_else(|e| e.to_compile_error()).into() + pallet_version::crate_to_pallet_version(input) + .unwrap_or_else(|e| e.to_compile_error()) + .into() } /// The number of module instances supported by the runtime, starting at index 1, @@ -471,7 +476,9 @@ pub(crate) const NUMBER_OF_INSTANCE: u8 = 16; /// It implements the trait `HasKeyPrefix` and `HasReversibleKeyPrefix` for tuple of `Key`. #[proc_macro] pub fn impl_key_prefix_for_tuples(input: TokenStream) -> TokenStream { - key_prefix::impl_key_prefix_for_tuples(input).unwrap_or_else(syn::Error::into_compile_error).into() + key_prefix::impl_key_prefix_for_tuples(input) + .unwrap_or_else(syn::Error::into_compile_error) + .into() } /// Internal macro use by frame_support to generate dummy part checker for old pallet declaration diff --git a/frame/support/procedural/src/pallet/expand/call.rs b/frame/support/procedural/src/pallet/expand/call.rs index 758703afb2fae..01da27266b020 100644 --- a/frame/support/procedural/src/pallet/expand/call.rs +++ b/frame/support/procedural/src/pallet/expand/call.rs @@ -15,8 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::Def; -use crate::COUNTER; +use crate::{pallet::Def, COUNTER}; use syn::spanned::Spanned; /// * Generate enum call and implement various trait on it. @@ -30,7 +29,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let docs = call.docs.clone(); (span, where_clause, methods, docs) - } + }, None => (def.item.span(), None, Vec::new(), Vec::new()), }; let frame_support = &def.frame_support; @@ -42,11 +41,13 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let pallet_ident = &def.pallet_struct.pallet; let fn_name = methods.iter().map(|method| &method.name).collect::>(); - let new_call_variant_fn_name = fn_name.iter() + let new_call_variant_fn_name = fn_name + .iter() .map(|fn_name| quote::format_ident!("new_call_variant_{}", fn_name)) .collect::>(); - let new_call_variant_doc = fn_name.iter() + let new_call_variant_doc = fn_name + .iter() .map(|fn_name| format!("Create a call with the variant `{}`.", fn_name)) .collect::>(); @@ -54,16 +55,20 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let fn_doc = methods.iter().map(|method| &method.docs).collect::>(); - let args_name = methods.iter() + let args_name = methods + .iter() .map(|method| method.args.iter().map(|(_, name, _)| name.clone()).collect::>()) .collect::>(); - let args_type = methods.iter() + let args_type = methods + .iter() .map(|method| method.args.iter().map(|(_, _, type_)| type_.clone()).collect::>()) .collect::>(); let args_compact_attr = methods.iter().map(|method| { - method.args.iter() + method + .args + .iter() .map(|(is_compact, _, type_)| { if *is_compact { quote::quote_spanned!(type_.span() => #[codec(compact)] ) @@ -77,14 +82,10 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { let default_docs = [syn::parse_quote!( r"Contains one variant per dispatchable that can be called by an extrinsic." )]; - let docs = if docs.is_empty() { - &default_docs[..] - } else { - &docs[..] - }; + let docs = if docs.is_empty() { &default_docs[..] } else { &docs[..] }; let maybe_compile_error = if def.call.is_none() { - quote::quote!{ + quote::quote! { compile_error!(concat!( "`", stringify!($pallet_name), diff --git a/frame/support/procedural/src/pallet/expand/constants.rs b/frame/support/procedural/src/pallet/expand/constants.rs index 0d7e6f8d714ef..5e4f5a200b285 100644 --- a/frame/support/procedural/src/pallet/expand/constants.rs +++ b/frame/support/procedural/src/pallet/expand/constants.rs @@ -69,41 +69,38 @@ pub fn expand_constants(def: &mut Def) -> proc_macro2::TokenStream { } }); - let consts = config_consts.chain(extra_consts) - .map(|const_| { - let const_type = &const_.type_; - let ident = &const_.ident; - let ident_str = format!("{}", ident); - let doc = const_.doc.clone().into_iter(); - let default_byte_impl = &const_.default_byte_impl; - let default_byte_getter = syn::Ident::new( - &format!("{}DefaultByteGetter", ident), - ident.span() + let consts = config_consts.chain(extra_consts).map(|const_| { + let const_type = &const_.type_; + let ident = &const_.ident; + let ident_str = format!("{}", ident); + let doc = const_.doc.clone().into_iter(); + let default_byte_impl = &const_.default_byte_impl; + let default_byte_getter = + syn::Ident::new(&format!("{}DefaultByteGetter", ident), ident.span()); + + quote::quote!({ + #[allow(non_upper_case_types)] + #[allow(non_camel_case_types)] + struct #default_byte_getter<#type_decl_gen>( + #frame_support::sp_std::marker::PhantomData<(#type_use_gen)> ); - quote::quote!({ - #[allow(non_upper_case_types)] - #[allow(non_camel_case_types)] - struct #default_byte_getter<#type_decl_gen>( - #frame_support::sp_std::marker::PhantomData<(#type_use_gen)> - ); - - impl<#type_impl_gen> #default_byte_getter<#type_use_gen> - #completed_where_clause - { - fn default_byte(&self) -> #frame_support::sp_std::vec::Vec { - #default_byte_impl - } + impl<#type_impl_gen> #default_byte_getter<#type_use_gen> + #completed_where_clause + { + fn default_byte(&self) -> #frame_support::sp_std::vec::Vec { + #default_byte_impl } + } - #frame_support::metadata::PalletConstantMetadata { - name: #ident_str, - ty: #frame_support::scale_info::meta_type::<#const_type>(), - value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), - docs: #frame_support::sp_std::vec![ #( #doc ),* ], - } - }) - }); + #frame_support::metadata::PalletConstantMetadata { + name: #ident_str, + ty: #frame_support::scale_info::meta_type::<#const_type>(), + value: #default_byte_getter::<#type_use_gen>(Default::default()).default_byte(), + docs: #frame_support::sp_std::vec![ #( #doc ),* ], + } + }) + }); quote::quote!( impl<#type_impl_gen> #pallet_ident<#type_use_gen> #completed_where_clause{ diff --git a/frame/support/procedural/src/pallet/expand/error.rs b/frame/support/procedural/src/pallet/expand/error.rs index 208efa068967f..91d3df3416be4 100644 --- a/frame/support/procedural/src/pallet/expand/error.rs +++ b/frame/support/procedural/src/pallet/expand/error.rs @@ -20,11 +20,7 @@ use frame_support_procedural_tools::get_doc_literals; /// * impl various trait on Error pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { - let error = if let Some(error) = &def.error { - error - } else { - return Default::default() - }; + let error = if let Some(error) = &def.error { error } else { return Default::default() }; let error_ident = &error.error; let frame_support = &def.frame_support; @@ -42,16 +38,14 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { ) ); - let as_u8_matches = error.variants.iter().enumerate() - .map(|(i, (variant, _))| { - quote::quote_spanned!(error.attr_span => Self::#variant => #i as u8,) - }); + let as_u8_matches = error.variants.iter().enumerate().map( + |(i, (variant, _))| quote::quote_spanned!(error.attr_span => Self::#variant => #i as u8,), + ); - let as_str_matches = error.variants.iter() - .map(|(variant, _)| { - let variant_str = format!("{}", variant); - quote::quote_spanned!(error.attr_span => Self::#variant => #variant_str,) - }); + let as_str_matches = error.variants.iter().map(|(variant, _)| { + let variant_str = format!("{}", variant); + quote::quote_spanned!(error.attr_span => Self::#variant => #variant_str,) + }); let error_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; @@ -64,12 +58,12 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { error_item.variants.insert(0, phantom_variant); // derive TypeInfo for error metadata - error_item.attrs.push( - syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] ) - ); - error_item.attrs.push( - syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] ) - ); + error_item + .attrs + .push(syn::parse_quote!( #[derive(#frame_support::scale_info::TypeInfo)] )); + error_item + .attrs + .push(syn::parse_quote!( #[scale_info(skip_type_params(#type_use_gen))] )); if get_doc_literals(&error_item.attrs).is_empty() { error_item.attrs.push(syn::parse_quote!( diff --git a/frame/support/procedural/src/pallet/expand/event.rs b/frame/support/procedural/src/pallet/expand/event.rs index 12554fb8781e5..5af5170a87316 100644 --- a/frame/support/procedural/src/pallet/expand/event.rs +++ b/frame/support/procedural/src/pallet/expand/event.rs @@ -15,10 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::COUNTER; -use crate::pallet::{ - Def, - parse::event::PalletEventDepositAttr, +use crate::{ + pallet::{parse::event::PalletEventDepositAttr, Def}, + COUNTER, }; use frame_support_procedural_tools::get_doc_literals; use syn::{spanned::Spanned, Ident}; @@ -33,10 +32,8 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { let ident = Ident::new(&format!("__is_event_part_defined_{}", count), event.attr_span); (event, ident) } else { - let macro_ident = Ident::new( - &format!("__is_event_part_defined_{}", count), - def.item.span(), - ); + let macro_ident = + Ident::new(&format!("__is_event_part_defined_{}", count), def.item.span()); return quote::quote! { #[doc(hidden)] @@ -53,27 +50,25 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { )); } } - + #[doc(hidden)] pub use #macro_ident as is_event_part_defined; } - }; + } }; let event_where_clause = &event.where_clause; // NOTE: actually event where clause must be a subset of config where clause because of // `type Event: From>`. But we merge either way for potential better error message - let completed_where_clause = super::merge_where_clauses(&[ - &event.where_clause, - &def.config.where_clause, - ]); + let completed_where_clause = + super::merge_where_clauses(&[&event.where_clause, &def.config.where_clause]); let event_ident = &event.event; let frame_system = &def.frame_system; let frame_support = &def.frame_support; let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); - let event_impl_gen= &event.gen_kind.type_impl_gen(event.attr_span); + let event_impl_gen = &event.gen_kind.type_impl_gen(event.attr_span); let event_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[event.index]; @@ -122,7 +117,9 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { )); // skip requirement for type params to implement `TypeInfo` - event_item.attrs.push(syn::parse_quote!( #[scale_info(skip_type_params(#event_use_gen))] )); + event_item + .attrs + .push(syn::parse_quote!( #[scale_info(skip_type_params(#event_use_gen))] )); let deposit_event = if let Some(deposit_event) = &event.deposit_event { let event_use_gen = &event.gen_kind.type_use_gen(event.attr_span); @@ -161,7 +158,7 @@ pub fn expand_event(def: &mut Def) -> proc_macro2::TokenStream { macro_rules! #macro_ident { ($pallet_name:ident) => {}; } - + #[doc(hidden)] pub use #macro_ident as is_event_part_defined; } diff --git a/frame/support/procedural/src/pallet/expand/genesis_build.rs b/frame/support/procedural/src/pallet/expand/genesis_build.rs index 374d21001d6a1..c68f2339cfced 100644 --- a/frame/support/procedural/src/pallet/expand/genesis_build.rs +++ b/frame/support/procedural/src/pallet/expand/genesis_build.rs @@ -40,8 +40,8 @@ pub fn expand_genesis_build(def: &mut Def) -> proc_macro2::TokenStream { let gen_cfg_use_gen = genesis_config.gen_kind.type_use_gen(genesis_build.attr_span); - let genesis_build_item = &mut def.item.content.as_mut() - .expect("Checked by def parser").1[genesis_build.index]; + let genesis_build_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_build.index]; let genesis_build_item_impl = if let syn::Item::Impl(impl_) = genesis_build_item { impl_ diff --git a/frame/support/procedural/src/pallet/expand/genesis_config.rs b/frame/support/procedural/src/pallet/expand/genesis_config.rs index 76a7e1b744d79..7c1e61dffc0ec 100644 --- a/frame/support/procedural/src/pallet/expand/genesis_config.rs +++ b/frame/support/procedural/src/pallet/expand/genesis_config.rs @@ -15,10 +15,9 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::Def; +use crate::{pallet::Def, COUNTER}; use frame_support_procedural_tools::get_doc_literals; -use crate::COUNTER; -use syn::{Ident, spanned::Spanned}; +use syn::{spanned::Spanned, Ident}; /// * add various derive trait on GenesisConfig struct. pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { @@ -38,15 +37,11 @@ pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { (genesis_config, def_macro_ident, std_macro_ident) } else { - let def_macro_ident = Ident::new( - &format!("__is_genesis_config_defined_{}", count), - def.item.span(), - ); + let def_macro_ident = + Ident::new(&format!("__is_genesis_config_defined_{}", count), def.item.span()); - let std_macro_ident = Ident::new( - &format!("__is_std_enabled_for_genesis_{}", count), - def.item.span(), - ); + let std_macro_ident = + Ident::new(&format!("__is_std_enabled_for_genesis_{}", count), def.item.span()); return quote::quote! { #[doc(hidden)] @@ -75,18 +70,18 @@ pub fn expand_genesis_config(def: &mut Def) -> proc_macro2::TokenStream { #[doc(hidden)] pub use #std_macro_ident as is_std_enabled_for_genesis; } - }; + } }; let frame_support = &def.frame_support; - let genesis_config_item = &mut def.item.content.as_mut() - .expect("Checked by def parser").1[genesis_config.index]; + let genesis_config_item = + &mut def.item.content.as_mut().expect("Checked by def parser").1[genesis_config.index]; let serde_crate = format!("{}::serde", frame_support); match genesis_config_item { - syn::Item::Enum(syn::ItemEnum { attrs, ..}) | + syn::Item::Enum(syn::ItemEnum { attrs, .. }) | syn::Item::Struct(syn::ItemStruct { attrs, .. }) | syn::Item::Type(syn::ItemType { attrs, .. }) => { if get_doc_literals(&attrs).is_empty() { diff --git a/frame/support/procedural/src/pallet/expand/hooks.rs b/frame/support/procedural/src/pallet/expand/hooks.rs index 6e21c892d8ebb..c279a83d3daaf 100644 --- a/frame/support/procedural/src/pallet/expand/hooks.rs +++ b/frame/support/procedural/src/pallet/expand/hooks.rs @@ -59,7 +59,7 @@ pub fn expand_hooks(def: &mut Def) -> proc_macro2::TokenStream { let hooks_impl = if def.hooks.is_none() { let frame_system = &def.frame_system; - quote::quote!{ + quote::quote! { impl<#type_impl_gen> #frame_support::traits::Hooks<::BlockNumber> for Pallet<#type_use_gen> {} diff --git a/frame/support/procedural/src/pallet/expand/inherent.rs b/frame/support/procedural/src/pallet/expand/inherent.rs index f1d58b28a5142..185211ecd4df2 100644 --- a/frame/support/procedural/src/pallet/expand/inherent.rs +++ b/frame/support/procedural/src/pallet/expand/inherent.rs @@ -15,11 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::Def; +use crate::{pallet::Def, COUNTER}; use proc_macro2::TokenStream; use quote::quote; -use crate::COUNTER; -use syn::{Ident, spanned::Spanned}; +use syn::{spanned::Spanned, Ident}; pub fn expand_inherents(def: &mut Def) -> TokenStream { let count = COUNTER.with(|counter| counter.borrow_mut().inc()); @@ -48,7 +47,7 @@ pub fn expand_inherents(def: &mut Def) -> TokenStream { #maybe_compile_error } } - + #[doc(hidden)] pub use #macro_ident as is_inherent_part_defined; } diff --git a/frame/support/procedural/src/pallet/expand/instances.rs b/frame/support/procedural/src/pallet/expand/instances.rs index 9f48563ab7e6c..ceb86fcad7ea8 100644 --- a/frame/support/procedural/src/pallet/expand/instances.rs +++ b/frame/support/procedural/src/pallet/expand/instances.rs @@ -15,9 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +use crate::{pallet::Def, NUMBER_OF_INSTANCE}; use proc_macro2::Span; -use crate::pallet::Def; -use crate::NUMBER_OF_INSTANCE; /// * Provide inherent instance to be used by construct_runtime /// * Provide Instance1 ..= Instance16 for instantiable pallet @@ -25,7 +24,9 @@ pub fn expand_instances(def: &mut Def) -> proc_macro2::TokenStream { let frame_support = &def.frame_support; let inherent_ident = syn::Ident::new(crate::INHERENT_INSTANCE_NAME, Span::call_site()); let instances = if def.config.has_instance { - (1..=NUMBER_OF_INSTANCE).map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())).collect() + (1..=NUMBER_OF_INSTANCE) + .map(|i| syn::Ident::new(&format!("Instance{}", i), Span::call_site())) + .collect() } else { vec![] }; diff --git a/frame/support/procedural/src/pallet/expand/mod.rs b/frame/support/procedural/src/pallet/expand/mod.rs index 97ae2f61ae83b..1c8883977c765 100644 --- a/frame/support/procedural/src/pallet/expand/mod.rs +++ b/frame/support/procedural/src/pallet/expand/mod.rs @@ -15,21 +15,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -mod constants; -mod pallet_struct; mod call; mod config; +mod constants; mod error; mod event; -mod storage; +mod genesis_build; +mod genesis_config; mod hooks; -mod store_trait; mod inherent; mod instances; -mod genesis_build; -mod genesis_config; -mod type_value; mod origin; +mod pallet_struct; +mod storage; +mod store_trait; +mod type_value; mod validate_unsigned; use crate::pallet::Def; @@ -98,7 +98,11 @@ pub fn expand(mut def: Def) -> proc_macro2::TokenStream { #validate_unsigned ); - def.item.content.as_mut().expect("This is checked by parsing").1 + def.item + .content + .as_mut() + .expect("This is checked by parsing") + .1 .push(syn::Item::Verbatim(new_items)); def.item.into_token_stream() diff --git a/frame/support/procedural/src/pallet/expand/origin.rs b/frame/support/procedural/src/pallet/expand/origin.rs index 578c641b43e41..987512f69a02b 100644 --- a/frame/support/procedural/src/pallet/expand/origin.rs +++ b/frame/support/procedural/src/pallet/expand/origin.rs @@ -18,7 +18,7 @@ use crate::{pallet::Def, COUNTER}; use proc_macro2::TokenStream; use quote::quote; -use syn::{Ident, spanned::Spanned}; +use syn::{spanned::Spanned, Ident}; pub fn expand_origins(def: &mut Def) -> TokenStream { let count = COUNTER.with(|counter| counter.borrow_mut().inc()); @@ -47,7 +47,7 @@ pub fn expand_origins(def: &mut Def) -> TokenStream { #maybe_compile_error } } - + #[doc(hidden)] pub use #macro_ident as is_origin_part_defined; } diff --git a/frame/support/procedural/src/pallet/expand/pallet_struct.rs b/frame/support/procedural/src/pallet/expand/pallet_struct.rs index bb0b2ba2cb6cc..a8f83ed56717a 100644 --- a/frame/support/procedural/src/pallet/expand/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/expand/pallet_struct.rs @@ -15,7 +15,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, expand::merge_where_clauses}; +use crate::pallet::{expand::merge_where_clauses, Def}; use frame_support_procedural_tools::get_doc_literals; /// * Add derive trait on Pallet @@ -99,29 +99,25 @@ pub fn expand_pallet_struct(def: &mut Def) -> proc_macro2::TokenStream { // Depending on the flag `generate_storage_info` we use partial or full storage info from // storage. - let ( - storage_info_span, - storage_info_trait, - storage_info_method, - ) = if let Some(span) = def.pallet_struct.generate_storage_info { - ( - span, - quote::quote_spanned!(span => StorageInfoTrait), - quote::quote_spanned!(span => storage_info), - ) - } else { - let span = def.pallet_struct.attr_span; - ( - span, - quote::quote_spanned!(span => PartialStorageInfoTrait), - quote::quote_spanned!(span => partial_storage_info), - ) - }; + let (storage_info_span, storage_info_trait, storage_info_method) = + if let Some(span) = def.pallet_struct.generate_storage_info { + ( + span, + quote::quote_spanned!(span => StorageInfoTrait), + quote::quote_spanned!(span => storage_info), + ) + } else { + let span = def.pallet_struct.attr_span; + ( + span, + quote::quote_spanned!(span => PartialStorageInfoTrait), + quote::quote_spanned!(span => partial_storage_info), + ) + }; let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); - let storage_cfg_attrs = &def.storages.iter() - .map(|storage| &storage.cfg_attrs) - .collect::>(); + let storage_cfg_attrs = + &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); let storage_info = quote::quote_spanned!(storage_info_span => impl<#type_impl_gen> #frame_support::traits::StorageInfoTrait diff --git a/frame/support/procedural/src/pallet/expand/storage.rs b/frame/support/procedural/src/pallet/expand/storage.rs index f3b8e91219abd..4506812a4f288 100644 --- a/frame/support/procedural/src/pallet/expand/storage.rs +++ b/frame/support/procedural/src/pallet/expand/storage.rs @@ -15,8 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::{Def, parse::storage::StorageDef}; -use crate::pallet::parse::storage::{Metadata, QueryKind, StorageGenerics}; +use crate::pallet::{ + parse::storage::{Metadata, QueryKind, StorageDef, StorageGenerics}, + Def, +}; use std::collections::HashSet; /// Generate the prefix_ident related the the storage. @@ -29,10 +31,7 @@ fn prefix_ident(storage: &StorageDef) -> syn::Ident { /// Check for duplicated storage prefixes. This step is necessary since users can specify an /// alternative storage prefix using the #[pallet::storage_prefix] syntax, and we need to ensure /// that the prefix specified by the user is not a duplicate of an existing one. -fn check_prefix_duplicates( - storage_def: &StorageDef, - set: &mut HashSet, -) -> syn::Result<()> { +fn check_prefix_duplicates(storage_def: &StorageDef, set: &mut HashSet) -> syn::Result<()> { let prefix = storage_def.prefix(); if !set.insert(prefix.clone()) { @@ -40,7 +39,7 @@ fn check_prefix_duplicates( storage_def.prefix_span(), format!("Duplicate storage prefixes found for `{}`", prefix), ); - return Err(err); + return Err(err) } Ok(()) @@ -84,10 +83,8 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { let default_query_kind: syn::Type = syn::parse_quote!(#frame_support::storage::types::OptionQuery); - let default_on_empty: syn::Type = - syn::parse_quote!(#frame_support::traits::GetDefault); - let default_max_values: syn::Type = - syn::parse_quote!(#frame_support::traits::GetDefault); + let default_on_empty: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); + let default_max_values: syn::Type = syn::parse_quote!(#frame_support::traits::GetDefault); if let Some(named_generics) = storage_def.named_generics.clone() { args.args.clear(); @@ -99,7 +96,7 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { args.args.push(syn::GenericArgument::Type(query_kind)); let on_empty = on_empty.unwrap_or_else(|| default_on_empty.clone()); args.args.push(syn::GenericArgument::Type(on_empty)); - } + }, StorageGenerics::Map { hasher, key, value, query_kind, on_empty, max_values } => { args.args.push(syn::GenericArgument::Type(hasher)); args.args.push(syn::GenericArgument::Type(key)); @@ -110,9 +107,16 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { args.args.push(syn::GenericArgument::Type(on_empty)); let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); args.args.push(syn::GenericArgument::Type(max_values)); - } + }, StorageGenerics::DoubleMap { - hasher1, key1, hasher2, key2, value, query_kind, on_empty, max_values, + hasher1, + key1, + hasher2, + key2, + value, + query_kind, + on_empty, + max_values, } => { args.args.push(syn::GenericArgument::Type(hasher1)); args.args.push(syn::GenericArgument::Type(key1)); @@ -125,8 +129,8 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { args.args.push(syn::GenericArgument::Type(on_empty)); let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); args.args.push(syn::GenericArgument::Type(max_values)); - } - StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values, } => { + }, + StorageGenerics::NMap { keygen, value, query_kind, on_empty, max_values } => { args.args.push(syn::GenericArgument::Type(keygen)); args.args.push(syn::GenericArgument::Type(value)); let query_kind = query_kind.unwrap_or_else(|| default_query_kind.clone()); @@ -135,7 +139,7 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { args.args.push(syn::GenericArgument::Type(on_empty)); let max_values = max_values.unwrap_or_else(|| default_max_values.clone()); args.args.push(syn::GenericArgument::Type(max_values)); - } + }, } } else { args.args[0] = syn::parse_quote!( #prefix_ident<#type_use_gen> ); @@ -153,44 +157,42 @@ pub fn process_generics(def: &mut Def) -> syn::Result<()> { /// * generate metadatas pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { if let Err(e) = process_generics(def) { - return e.into_compile_error().into(); + return e.into_compile_error().into() } let frame_support = &def.frame_support; let frame_system = &def.frame_system; let pallet_ident = &def.pallet_struct.pallet; + let entries = def.storages.iter().map(|storage| { + let docs = &storage.docs; - let entries = def.storages.iter() - .map(|storage| { - let docs = &storage.docs; + let ident = &storage.ident; + let gen = &def.type_use_generics(storage.attr_span); + let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); - let ident = &storage.ident; - let gen = &def.type_use_generics(storage.attr_span); - let full_ident = quote::quote_spanned!(storage.attr_span => #ident<#gen> ); + let cfg_attrs = &storage.cfg_attrs; - let cfg_attrs = &storage.cfg_attrs; + quote::quote_spanned!(storage.attr_span => + #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { + name: <#full_ident as #frame_support::storage::StorageEntryMetadata>::NAME, + modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, + ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), + default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), + docs: #frame_support::sp_std::vec![ + #( #docs, )* + ], + } + ) + }); - quote::quote_spanned!(storage.attr_span => - #(#cfg_attrs)* #frame_support::metadata::StorageEntryMetadata { - name: <#full_ident as #frame_support::storage::StorageEntryMetadata>::NAME, - modifier: <#full_ident as #frame_support::storage::StorageEntryMetadata>::MODIFIER, - ty: <#full_ident as #frame_support::storage::StorageEntryMetadata>::ty(), - default: <#full_ident as #frame_support::storage::StorageEntryMetadata>::default(), - docs: #frame_support::sp_std::vec![ - #( #docs, )* - ], - } - ) - }); - - let getters = def.storages.iter() - .map(|storage| if let Some(getter) = &storage.getter { - let completed_where_clause = super::merge_where_clauses(&[ - &storage.where_clause, - &def.config.where_clause, - ]); - let docs = storage.docs.iter() + let getters = def.storages.iter().map(|storage| { + if let Some(getter) = &storage.getter { + let completed_where_clause = + super::merge_where_clauses(&[&storage.where_clause, &def.config.where_clause]); + let docs = storage + .docs + .iter() .map(|d| quote::quote_spanned!(storage.attr_span => #[doc = #d])); let ident = &storage.ident; @@ -290,11 +292,12 @@ pub fn expand_storages(def: &mut Def) -> proc_macro2::TokenStream { } } ) - } + }, } } else { Default::default() - }); + } + }); let prefix_structs = def.storages.iter().map(|storage_def| { let type_impl_gen = &def.type_impl_generics(storage_def.attr_span); diff --git a/frame/support/procedural/src/pallet/expand/store_trait.rs b/frame/support/procedural/src/pallet/expand/store_trait.rs index 81ed52ac87a68..36cc08b732fe5 100644 --- a/frame/support/procedural/src/pallet/expand/store_trait.rs +++ b/frame/support/procedural/src/pallet/expand/store_trait.rs @@ -22,11 +22,8 @@ use syn::spanned::Spanned; /// * generate Store trait with all storages, /// * implement Store trait for Pallet. pub fn expand_store_trait(def: &mut Def) -> proc_macro2::TokenStream { - let (trait_vis, trait_store) = if let Some(store) = &def.pallet_struct.store { - store - } else { - return Default::default() - }; + let (trait_vis, trait_store) = + if let Some(store) = &def.pallet_struct.store { store } else { return Default::default() }; let type_impl_gen = &def.type_impl_generics(trait_store.span()); let type_use_gen = &def.type_use_generics(trait_store.span()); @@ -37,7 +34,8 @@ pub fn expand_store_trait(def: &mut Def) -> proc_macro2::TokenStream { let completed_where_clause = super::merge_where_clauses(&where_clauses); let storage_names = &def.storages.iter().map(|storage| &storage.ident).collect::>(); - let storage_cfg_attrs = &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); + let storage_cfg_attrs = + &def.storages.iter().map(|storage| &storage.cfg_attrs).collect::>(); quote::quote_spanned!(trait_store.span() => #trait_vis trait #trait_store { diff --git a/frame/support/procedural/src/pallet/expand/validate_unsigned.rs b/frame/support/procedural/src/pallet/expand/validate_unsigned.rs index 1abf7d893b933..5f30d712e9a51 100644 --- a/frame/support/procedural/src/pallet/expand/validate_unsigned.rs +++ b/frame/support/procedural/src/pallet/expand/validate_unsigned.rs @@ -15,15 +15,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::pallet::Def; +use crate::{pallet::Def, COUNTER}; use proc_macro2::TokenStream; use quote::quote; -use crate::COUNTER; -use syn::{Ident, spanned::Spanned}; +use syn::{spanned::Spanned, Ident}; pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { let count = COUNTER.with(|counter| counter.borrow_mut().inc()); - let macro_ident = Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); + let macro_ident = + Ident::new(&format!("__is_validate_unsigned_part_defined_{}", count), def.item.span()); let maybe_compile_error = if def.validate_unsigned.is_none() { quote! { @@ -48,7 +48,7 @@ pub fn expand_validate_unsigned(def: &mut Def) -> TokenStream { #maybe_compile_error } } - + #[doc(hidden)] pub use #macro_ident as is_validate_unsigned_part_defined; } diff --git a/frame/support/procedural/src/pallet/mod.rs b/frame/support/procedural/src/pallet/mod.rs index 560d57d50e037..93797906d04d9 100644 --- a/frame/support/procedural/src/pallet/mod.rs +++ b/frame/support/procedural/src/pallet/mod.rs @@ -25,21 +25,22 @@ //! This step will modify the ItemMod by adding some derive attributes or phantom data variants //! to user defined types. And also crate new types and implement block. -mod parse; mod expand; +mod parse; pub use parse::Def; use syn::spanned::Spanned; pub fn pallet( attr: proc_macro::TokenStream, - item: proc_macro::TokenStream + item: proc_macro::TokenStream, ) -> proc_macro::TokenStream { if !attr.is_empty() { - let msg = "Invalid pallet macro call: expected no attributes, e.g. macro call must be just \ + let msg = + "Invalid pallet macro call: expected no attributes, e.g. macro call must be just \ `#[frame_support::pallet]` or `#[pallet]`"; let span = proc_macro2::TokenStream::from(attr).span(); - return syn::Error::new(span, msg).to_compile_error().into(); + return syn::Error::new(span, msg).to_compile_error().into() } let item = syn::parse_macro_input!(item as syn::ItemMod); diff --git a/frame/support/procedural/src/pallet/parse/call.rs b/frame/support/procedural/src/pallet/parse/call.rs index 380f01d9eecd6..0563568f33311 100644 --- a/frame/support/procedural/src/pallet/parse/call.rs +++ b/frame/support/procedural/src/pallet/parse/call.rs @@ -76,9 +76,7 @@ impl syn::parse::Parse for FunctionAttr { let weight_content; syn::parenthesized!(weight_content in content); - Ok(FunctionAttr { - weight: weight_content.parse::()?, - }) + Ok(FunctionAttr { weight: weight_content.parse::()? }) } } @@ -101,7 +99,6 @@ impl syn::parse::Parse for ArgAttrIsCompact { /// Check the syntax is `OriginFor` pub fn check_dispatchable_first_arg_type(ty: &syn::Type) -> syn::Result<()> { - pub struct CheckDispatchableFirstArg; impl syn::parse::Parse for CheckDispatchableFirstArg { fn parse(input: syn::parse::ParseStream) -> syn::Result { @@ -114,13 +111,12 @@ pub fn check_dispatchable_first_arg_type(ty: &syn::Type) -> syn::Result<()> { } } - syn::parse2::(ty.to_token_stream()) - .map_err(|e| { - let msg = "Invalid type: expected `OriginFor`"; - let mut err = syn::Error::new(ty.span(), msg); - err.combine(e); - err - })?; + syn::parse2::(ty.to_token_stream()).map_err(|e| { + let msg = "Invalid type: expected `OriginFor`"; + let mut err = syn::Error::new(ty.span(), msg); + err.combine(e); + err + })?; Ok(()) } @@ -129,12 +125,12 @@ impl CallDef { pub fn try_from( attr_span: proc_macro2::Span, index: usize, - item: &mut syn::Item + item: &mut syn::Item, ) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")); + return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) }; let mut instances = vec![]; @@ -159,18 +155,18 @@ impl CallDef { _ => method.vis.span(), }; - return Err(syn::Error::new(span, msg)); + return Err(syn::Error::new(span, msg)) } match method.sig.inputs.first() { None => { let msg = "Invalid pallet::call, must have at least origin arg"; - return Err(syn::Error::new(method.sig.span(), msg)); + return Err(syn::Error::new(method.sig.span(), msg)) }, Some(syn::FnArg::Receiver(_)) => { let msg = "Invalid pallet::call, first argument must be a typed argument, \ e.g. `origin: OriginFor`"; - return Err(syn::Error::new(method.sig.span(), msg)); + return Err(syn::Error::new(method.sig.span(), msg)) }, Some(syn::FnArg::Typed(arg)) => { check_dispatchable_first_arg_type(&*arg.ty)?; @@ -182,7 +178,7 @@ impl CallDef { } else { let msg = "Invalid pallet::call, require return type \ DispatchResultWithPostInfo"; - return Err(syn::Error::new(method.sig.span(), msg)); + return Err(syn::Error::new(method.sig.span(), msg)) } let mut call_var_attrs: Vec = @@ -194,7 +190,7 @@ impl CallDef { } else { "Invalid pallet::call, too many weight attributes given" }; - return Err(syn::Error::new(method.sig.span(), msg)); + return Err(syn::Error::new(method.sig.span(), msg)) } let weight = call_var_attrs.pop().unwrap().weight; @@ -211,14 +207,14 @@ impl CallDef { if arg_attrs.len() > 1 { let msg = "Invalid pallet::call, argument has too many attributes"; - return Err(syn::Error::new(arg.span(), msg)); + return Err(syn::Error::new(arg.span(), msg)) } let arg_ident = if let syn::Pat::Ident(pat) = &*arg.pat { pat.ident.clone() } else { let msg = "Invalid pallet::call, argument must be ident"; - return Err(syn::Error::new(arg.pat.span(), msg)); + return Err(syn::Error::new(arg.pat.span(), msg)) }; args.push((!arg_attrs.is_empty(), arg_ident, arg.ty.clone())); @@ -226,15 +222,10 @@ impl CallDef { let docs = get_doc_literals(&method.attrs); - methods.push(CallVariantDef { - name: method.sig.ident.clone(), - weight, - args, - docs, - }); + methods.push(CallVariantDef { name: method.sig.ident.clone(), weight, args, docs }); } else { let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(impl_item.span(), msg)); + return Err(syn::Error::new(impl_item.span(), msg)) } } diff --git a/frame/support/procedural/src/pallet/parse/config.rs b/frame/support/procedural/src/pallet/parse/config.rs index 6a476165de036..712c20ffc7b4c 100644 --- a/frame/support/procedural/src/pallet/parse/config.rs +++ b/frame/support/procedural/src/pallet/parse/config.rs @@ -16,10 +16,10 @@ // limitations under the License. use super::helper; -use frame_support_procedural_tools::get_doc_literals; use core::convert::TryFrom; -use syn::spanned::Spanned; +use frame_support_procedural_tools::get_doc_literals; use quote::ToTokens; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -67,23 +67,26 @@ impl TryFrom<&syn::TraitItemType> for ConstMetadataDef { type Error = syn::Error; fn try_from(trait_ty: &syn::TraitItemType) -> Result { - let err = |span, msg| - syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)); + let err = |span, msg| { + syn::Error::new(span, format!("Invalid usage of `#[pallet::constant]`: {}", msg)) + }; let doc = get_doc_literals(&trait_ty.attrs); let ident = trait_ty.ident.clone(); - let bound = trait_ty.bounds + let bound = trait_ty + .bounds .iter() - .find_map(|b| + .find_map(|b| { if let syn::TypeParamBound::Trait(tb) = b { - tb.path.segments + tb.path + .segments .last() - .and_then(|s| if s.ident == "Get" { Some(s) } else { None } ) + .and_then(|s| if s.ident == "Get" { Some(s) } else { None }) } else { None } - ) + }) .ok_or_else(|| err(trait_ty.span(), "`Get` trait bound not found"))?; - let type_arg = if let syn::PathArguments::AngleBracketed (ref ab) = bound.arguments { + let type_arg = if let syn::PathArguments::AngleBracketed(ref ab) = bound.arguments { if ab.args.len() == 1 { if let syn::GenericArgument::Type(ref ty) = ab.args[0] { Ok(ty) @@ -215,15 +218,15 @@ impl syn::parse::Parse for FromEventParse { fn check_event_type( frame_system: &syn::Ident, trait_item: &syn::TraitItem, - trait_has_instance: bool -) -> syn::Result { + trait_has_instance: bool, +) -> syn::Result { if let syn::TraitItem::Type(type_) = trait_item { if type_.ident == "Event" { // Check event has no generics if !type_.generics.params.is_empty() || type_.generics.where_clause.is_some() { let msg = "Invalid `type Event`, associated type `Event` is reserved and must have\ no generics nor where_clause"; - return Err(syn::Error::new(trait_item.span(), msg)); + return Err(syn::Error::new(trait_item.span(), msg)) } // Check bound contains IsType and From @@ -238,28 +241,28 @@ fn check_event_type( bound: `IsType<::Event>`", frame_system, ); - return Err(syn::Error::new(type_.span(), msg)); + return Err(syn::Error::new(type_.span(), msg)) } - let from_event_bound = type_.bounds.iter().find_map(|s| { - syn::parse2::(s.to_token_stream()).ok() - }); + let from_event_bound = type_ + .bounds + .iter() + .find_map(|s| syn::parse2::(s.to_token_stream()).ok()); let from_event_bound = if let Some(b) = from_event_bound { b } else { let msg = "Invalid `type Event`, associated type `Event` is reserved and must \ bound: `From` or `From>` or `From>`"; - return Err(syn::Error::new(type_.span(), msg)); + return Err(syn::Error::new(type_.span(), msg)) }; - if from_event_bound.is_generic - && (from_event_bound.has_instance != trait_has_instance) + if from_event_bound.is_generic && (from_event_bound.has_instance != trait_has_instance) { let msg = "Invalid `type Event`, associated type `Event` bounds inconsistent \ `From`. Config and generic Event must be both with instance or \ without instance"; - return Err(syn::Error::new(type_.span(), msg)); + return Err(syn::Error::new(type_.span(), msg)) } Ok(true) @@ -273,16 +276,14 @@ fn check_event_type( /// Replace ident `Self` by `T` pub fn replace_self_by_t(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream { - input.into_iter() + input + .into_iter() .map(|token_tree| match token_tree { proc_macro2::TokenTree::Group(group) => - proc_macro2::Group::new( - group.delimiter(), - replace_self_by_t(group.stream()) - ).into(), + proc_macro2::Group::new(group.delimiter(), replace_self_by_t(group.stream())).into(), proc_macro2::TokenTree::Ident(ident) if ident == "Self" => proc_macro2::Ident::new("T", ident.span()).into(), - other => other + other => other, }) .collect() } @@ -298,27 +299,27 @@ impl ConfigDef { item } else { let msg = "Invalid pallet::config, expected trait definition"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; if !matches!(item.vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::config, trait must be public"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } syn::parse2::(item.ident.to_token_stream())?; - let where_clause = { let stream = replace_self_by_t(item.generics.where_clause.to_token_stream()); - syn::parse2::>(stream) - .expect("Internal error: replacing `Self` by `T` should result in valid where - clause") + syn::parse2::>(stream).expect( + "Internal error: replacing `Self` by `T` should result in valid where + clause", + ) }; if item.generics.params.len() > 1 { let msg = "Invalid pallet::config, expected no more than one generic"; - return Err(syn::Error::new(item.generics.params[2].span(), msg)); + return Err(syn::Error::new(item.generics.params[2].span(), msg)) } let has_instance = if item.generics.params.first().is_some() { @@ -332,15 +333,15 @@ impl ConfigDef { let mut consts_metadata = vec![]; for trait_item in &mut item.items { // Parse for event - has_event_type = has_event_type - || check_event_type(frame_system, trait_item, has_instance)?; + has_event_type = + has_event_type || check_event_type(frame_system, trait_item, has_instance)?; // Parse for constant let type_attrs_const: Vec = helper::take_item_pallet_attrs(trait_item)?; if type_attrs_const.len() > 1 { let msg = "Invalid attribute in pallet::config, only one attribute is expected"; - return Err(syn::Error::new(type_attrs_const[1].span(), msg)); + return Err(syn::Error::new(type_attrs_const[1].span(), msg)) } if type_attrs_const.len() == 1 { @@ -350,17 +351,17 @@ impl ConfigDef { consts_metadata.push(constant); }, _ => { - let msg = "Invalid pallet::constant in pallet::config, expected type trait \ + let msg = + "Invalid pallet::constant in pallet::config, expected type trait \ item"; - return Err(syn::Error::new(trait_item.span(), msg)); + return Err(syn::Error::new(trait_item.span(), msg)) }, } } } - let attr: Option = helper::take_first_item_pallet_attr( - &mut item.attrs - )?; + let attr: Option = + helper::take_first_item_pallet_attr(&mut item.attrs)?; let disable_system_supertrait_check = attr.is_some(); @@ -373,10 +374,9 @@ impl ConfigDef { let found = if item.supertraits.is_empty() { "none".to_string() } else { - let mut found = item.supertraits.iter() - .fold(String::new(), |acc, s| { - format!("{}`{}`, ", acc, quote::quote!(#s).to_string()) - }); + let mut found = item.supertraits.iter().fold(String::new(), |acc, s| { + format!("{}`{}`, ", acc, quote::quote!(#s).to_string()) + }); found.pop(); found.pop(); found @@ -388,19 +388,11 @@ impl ConfigDef { (try `pub trait Config: frame_system::Config {{ ...` or \ `pub trait Config: frame_system::Config {{ ...`). \ To disable this check, use `#[pallet::disable_frame_system_supertrait_check]`", - frame_system, - found, + frame_system, found, ); - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } - Ok(Self { - index, - has_instance, - consts_metadata, - has_event_type, - where_clause, - attr_span, - }) + Ok(Self { index, has_instance, consts_metadata, has_event_type, where_clause, attr_span }) } } diff --git a/frame/support/procedural/src/pallet/parse/error.rs b/frame/support/procedural/src/pallet/parse/error.rs index 969123eb637f0..9c9a95105c53c 100644 --- a/frame/support/procedural/src/pallet/parse/error.rs +++ b/frame/support/procedural/src/pallet/parse/error.rs @@ -17,8 +17,8 @@ use super::helper; use frame_support_procedural_tools::get_doc_literals; -use syn::spanned::Spanned; use quote::ToTokens; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -49,11 +49,11 @@ impl ErrorDef { let item = if let syn::Item::Enum(item) = item { item } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")); + return Err(syn::Error::new(item.span(), "Invalid pallet::error, expected item enum")) }; if !matches!(item.vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::error, `Error` must be public"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } let mut instances = vec![]; @@ -61,34 +61,30 @@ impl ErrorDef { if item.generics.where_clause.is_some() { let msg = "Invalid pallet::error, where clause is not allowed on pallet error item"; - return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)); + return Err(syn::Error::new(item.generics.where_clause.as_ref().unwrap().span(), msg)) } let error = syn::parse2::(item.ident.to_token_stream())?; - let variants = item.variants.iter() + let variants = item + .variants + .iter() .map(|variant| { if !matches!(variant.fields, syn::Fields::Unit) { let msg = "Invalid pallet::error, unexpected fields, must be `Unit`"; - return Err(syn::Error::new(variant.fields.span(), msg)); + return Err(syn::Error::new(variant.fields.span(), msg)) } if variant.discriminant.is_some() { let msg = "Invalid pallet::error, unexpected discriminant, discriminant \ are not supported"; let span = variant.discriminant.as_ref().unwrap().0.span(); - return Err(syn::Error::new(span, msg)); + return Err(syn::Error::new(span, msg)) } Ok((variant.ident.clone(), get_doc_literals(&variant.attrs))) }) .collect::>()?; - Ok(ErrorDef { - attr_span, - index, - variants, - instances, - error, - }) + Ok(ErrorDef { attr_span, index, variants, instances, error }) } } diff --git a/frame/support/procedural/src/pallet/parse/event.rs b/frame/support/procedural/src/pallet/parse/event.rs index ce4b90b16030e..041bdf3efebb3 100644 --- a/frame/support/procedural/src/pallet/parse/event.rs +++ b/frame/support/procedural/src/pallet/parse/event.rs @@ -16,8 +16,8 @@ // limitations under the License. use super::helper; -use syn::spanned::Spanned; use quote::ToTokens; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -87,7 +87,7 @@ impl PalletEventAttrInfo { if deposit_event.is_none() { deposit_event = Some(attr) } else { - return Err(syn::Error::new(attr.span, "Duplicate attribute")); + return Err(syn::Error::new(attr.span, "Duplicate attribute")) } } @@ -107,13 +107,14 @@ impl EventDef { return Err(syn::Error::new(item.span(), "Invalid pallet::event, expected item enum")) }; - let event_attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; + let event_attrs: Vec = + helper::take_item_pallet_attrs(&mut item.attrs)?; let attr_info = PalletEventAttrInfo::from_attrs(event_attrs)?; let deposit_event = attr_info.deposit_event; if !matches!(item.vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::event, `Error` must be public"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } let where_clause = item.generics.where_clause.clone(); @@ -125,10 +126,7 @@ impl EventDef { instances.push(u); } else { // construct_runtime only allow non generic event for non instantiable pallet. - instances.push(helper::InstanceUsage { - has_instance: false, - span: item.ident.span(), - }) + instances.push(helper::InstanceUsage { has_instance: false, span: item.ident.span() }) } let has_instance = item.generics.type_params().any(|t| t.ident == "I"); @@ -138,14 +136,6 @@ impl EventDef { let event = syn::parse2::(item.ident.to_token_stream())?; - Ok(EventDef { - attr_span, - index, - instances, - deposit_event, - event, - gen_kind, - where_clause, - }) + Ok(EventDef { attr_span, index, instances, deposit_event, event, gen_kind, where_clause }) } } diff --git a/frame/support/procedural/src/pallet/parse/extra_constants.rs b/frame/support/procedural/src/pallet/parse/extra_constants.rs index d56bcf51b6aac..c1324df6c22f1 100644 --- a/frame/support/procedural/src/pallet/parse/extra_constants.rs +++ b/frame/support/procedural/src/pallet/parse/extra_constants.rs @@ -53,14 +53,11 @@ pub struct ExtraConstantDef { } impl ExtraConstantsDef { - pub fn try_from( - index: usize, - item: &mut syn::Item - ) -> syn::Result { + pub fn try_from(index: usize, item: &mut syn::Item) -> syn::Result { let item = if let syn::Item::Impl(item) = item { item } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")); + return Err(syn::Error::new(item.span(), "Invalid pallet::call, expected item impl")) }; let mut instances = vec![]; @@ -79,28 +76,28 @@ impl ExtraConstantsDef { method } else { let msg = "Invalid pallet::call, only method accepted"; - return Err(syn::Error::new(impl_item.span(), msg)); + return Err(syn::Error::new(impl_item.span(), msg)) }; if !method.sig.inputs.is_empty() { let msg = "Invalid pallet::extra_constants, method must have 0 args"; - return Err(syn::Error::new(method.sig.span(), msg)); + return Err(syn::Error::new(method.sig.span(), msg)) } if !method.sig.generics.params.is_empty() { let msg = "Invalid pallet::extra_constants, method must have 0 generics"; - return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)); + return Err(syn::Error::new(method.sig.generics.params[0].span(), msg)) } if method.sig.generics.where_clause.is_some() { let msg = "Invalid pallet::extra_constants, method must have no where clause"; - return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)); + return Err(syn::Error::new(method.sig.generics.where_clause.span(), msg)) } let type_ = match &method.sig.output { syn::ReturnType::Default => { let msg = "Invalid pallet::extra_constants, method must have a return type"; - return Err(syn::Error::new(method.span(), msg)); + return Err(syn::Error::new(method.span(), msg)) }, syn::ReturnType::Type(_, type_) => *type_.clone(), }; diff --git a/frame/support/procedural/src/pallet/parse/genesis_build.rs b/frame/support/procedural/src/pallet/parse/genesis_build.rs index 1438c400b17f1..82e297b4e26e8 100644 --- a/frame/support/procedural/src/pallet/parse/genesis_build.rs +++ b/frame/support/procedural/src/pallet/parse/genesis_build.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// Definition for pallet genesis build implementation. pub struct GenesisBuildDef { @@ -40,24 +40,22 @@ impl GenesisBuildDef { item } else { let msg = "Invalid pallet::genesis_build, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; - let item_trait = &item.trait_.as_ref() + let item_trait = &item + .trait_ + .as_ref() .ok_or_else(|| { let msg = "Invalid pallet::genesis_build, expected impl<..> GenesisBuild<..> \ for GenesisConfig<..>"; syn::Error::new(item.span(), msg) - })?.1; + })? + .1; let mut instances = vec![]; instances.push(helper::check_genesis_builder_usage(&item_trait)?); - Ok(Self { - attr_span, - index, - instances, - where_clause: item.generics.where_clause.clone(), - }) + Ok(Self { attr_span, index, instances, where_clause: item.generics.where_clause.clone() }) } } diff --git a/frame/support/procedural/src/pallet/parse/genesis_config.rs b/frame/support/procedural/src/pallet/parse/genesis_config.rs index 729d1241390a5..a0cf7de1a846b 100644 --- a/frame/support/procedural/src/pallet/parse/genesis_config.rs +++ b/frame/support/procedural/src/pallet/parse/genesis_config.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// Definition for pallet genesis config type. /// @@ -42,7 +42,7 @@ impl GenesisConfigDef { syn::Item::Struct(item) => (&item.vis, &item.ident, &item.generics), _ => { let msg = "Invalid pallet::genesis_config, expected enum or struct"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }, }; @@ -60,19 +60,14 @@ impl GenesisConfigDef { if !matches!(vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::genesis_config, GenesisConfig must be public"; - return Err(syn::Error::new(item_span, msg)); + return Err(syn::Error::new(item_span, msg)) } if ident != "GenesisConfig" { let msg = "Invalid pallet::genesis_config, ident must `GenesisConfig`"; - return Err(syn::Error::new(ident.span(), msg)); + return Err(syn::Error::new(ident.span(), msg)) } - Ok(GenesisConfigDef { - index, - genesis_config: ident.clone(), - instances, - gen_kind, - }) + Ok(GenesisConfigDef { index, genesis_config: ident.clone(), instances, gen_kind }) } } diff --git a/frame/support/procedural/src/pallet/parse/helper.rs b/frame/support/procedural/src/pallet/parse/helper.rs index 30a2ccd9ee012..2590e86b58b0e 100644 --- a/frame/support/procedural/src/pallet/parse/helper.rs +++ b/frame/support/procedural/src/pallet/parse/helper.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use quote::ToTokens; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -47,20 +47,15 @@ pub trait MutItemAttrs { } /// Take the first pallet attribute (e.g. attribute like `#[pallet..]`) and decode it to `Attr` -pub fn take_first_item_pallet_attr(item: &mut impl MutItemAttrs) -> syn::Result> where +pub fn take_first_item_pallet_attr(item: &mut impl MutItemAttrs) -> syn::Result> +where Attr: syn::parse::Parse, { - let attrs = if let Some(attrs) = item.mut_item_attrs() { - attrs - } else { - return Ok(None) - }; - - if let Some(index) = attrs.iter() - .position(|attr| - attr.path.segments.first().map_or(false, |segment| segment.ident == "pallet") - ) - { + let attrs = if let Some(attrs) = item.mut_item_attrs() { attrs } else { return Ok(None) }; + + if let Some(index) = attrs.iter().position(|attr| { + attr.path.segments.first().map_or(false, |segment| segment.ident == "pallet") + }) { let pallet_attr = attrs.remove(index); Ok(Some(syn::parse2(pallet_attr.into_token_stream())?)) } else { @@ -69,7 +64,8 @@ pub fn take_first_item_pallet_attr(item: &mut impl MutItemAttrs) -> syn::R } /// Take all the pallet attributes (e.g. attribute like `#[pallet..]`) and decode them to `Attr` -pub fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> where +pub fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result> +where Attr: syn::parse::Parse, { let mut pallet_attrs = Vec::new(); @@ -83,13 +79,16 @@ pub fn take_item_pallet_attrs(item: &mut impl MutItemAttrs) -> syn::Result /// Get all the cfg attributes (e.g. attribute like `#[cfg..]`) and decode them to `Attr` pub fn get_item_cfg_attrs(attrs: &[syn::Attribute]) -> Vec { - attrs.iter().filter_map(|attr| { - if attr.path.segments.first().map_or(false, |segment| segment.ident == "cfg") { - Some(attr.clone()) - } else { - None - } - }).collect::>() + attrs + .iter() + .filter_map(|attr| { + if attr.path.segments.first().map_or(false, |segment| segment.ident == "cfg") { + Some(attr.clone()) + } else { + None + } + }) + .collect::>() } impl MutItemAttrs for syn::Item { @@ -116,7 +115,6 @@ impl MutItemAttrs for syn::Item { } } - impl MutItemAttrs for syn::TraitItem { fn mut_item_attrs(&mut self) -> Option<&mut Vec> { match self { @@ -149,7 +147,7 @@ impl syn::parse::Parse for Unit { syn::parenthesized!(content in input); if !content.is_empty() { let msg = "unexpected tokens, expected nothing inside parenthesis as `()`"; - return Err(syn::Error::new(content.span(), msg)); + return Err(syn::Error::new(content.span(), msg)) } Ok(Self) } @@ -162,7 +160,7 @@ impl syn::parse::Parse for StaticLifetime { let lifetime = input.parse::()?; if lifetime.ident != "static" { let msg = "unexpected tokens, expected `static`"; - return Err(syn::Error::new(lifetime.ident.span(), msg)); + return Err(syn::Error::new(lifetime.ident.span(), msg)) } Ok(Self) } @@ -173,10 +171,7 @@ impl syn::parse::Parse for StaticLifetime { /// `span` is used in case generics is empty (empty generics has span == call_site). /// /// return the instance if found. -pub fn check_config_def_gen( - gen: &syn::Generics, - span: proc_macro2::Span, -) -> syn::Result<()> { +pub fn check_config_def_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result<()> { let expected = "expected `I: 'static = ()`"; pub struct CheckTraitDefGenerics; impl syn::parse::Parse for CheckTraitDefGenerics { @@ -191,13 +186,12 @@ pub fn check_config_def_gen( } } - syn::parse2::(gen.params.to_token_stream()) - .map_err(|e| { - let msg = format!("Invalid generics: {}", expected); - let mut err = syn::Error::new(span, msg); - err.combine(e); - err - })?; + syn::parse2::(gen.params.to_token_stream()).map_err(|e| { + let msg = format!("Invalid generics: {}", expected); + let mut err = syn::Error::new(span, msg); + err.combine(e); + err + })?; Ok(()) } @@ -217,10 +211,7 @@ pub fn check_type_def_gen_no_bounds( pub struct Checker(InstanceUsage); impl syn::parse::Parse for Checker { fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - has_instance: false, - span: input.span(), - }; + let mut instance_usage = InstanceUsage { has_instance: false, span: input.span() }; input.parse::()?; if input.peek(syn::Token![,]) { @@ -241,7 +232,8 @@ pub fn check_type_def_gen_no_bounds( let mut err = syn::Error::new(span, msg); err.combine(e); err - })?.0; + })? + .0; Ok(i) } @@ -269,10 +261,7 @@ pub fn check_type_def_optional_gen( return Ok(Self(None)) } - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; input.parse::()?; @@ -321,9 +310,13 @@ pub fn check_type_def_optional_gen( let mut err = syn::Error::new(span, msg); err.combine(e); err - })?.0 + })? + .0 // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { i.span = span; i }); + .map(|mut i| { + i.span = span; + i + }); Ok(i) } @@ -338,10 +331,7 @@ pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; input.parse::()?; input.parse::()?; @@ -363,7 +353,8 @@ pub fn check_pallet_struct_usage(type_: &Box) -> syn::Result) -> syn::Result syn::Result { +pub fn check_impl_gen(gen: &syn::Generics, span: proc_macro2::Span) -> syn::Result { let expected = "expected `impl` or `impl, I: 'static>`"; pub struct Checker(InstanceUsage); impl syn::parse::Parse for Checker { fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; input.parse::()?; input.parse::()?; @@ -411,7 +396,8 @@ pub fn check_impl_gen( let mut err = syn::Error::new(span, format!("Invalid generics: {}", expected)); err.combine(e); err - })?.0; + })? + .0; Ok(i) } @@ -434,10 +420,7 @@ pub fn check_type_def_gen( pub struct Checker(InstanceUsage); impl syn::parse::Parse for Checker { fn parse(input: syn::parse::ParseStream) -> syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; input.parse::()?; @@ -486,7 +469,8 @@ pub fn check_type_def_gen( let mut err = syn::Error::new(span, msg); err.combine(e); err - })?.0; + })? + .0; // Span can be call_site if generic is empty. Thus we replace it. i.span = span; @@ -504,10 +488,7 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result syn::Result { - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; input.parse::()?; input.parse::()?; @@ -529,7 +510,8 @@ pub fn check_genesis_builder_usage(type_: &syn::Path) -> syn::Result()?; input.parse::()?; - let mut instance_usage = InstanceUsage { - span: input.span(), - has_instance: false, - }; + let mut instance_usage = InstanceUsage { span: input.span(), has_instance: false }; if input.is_empty() { return Ok(Self(Some(instance_usage))) @@ -586,17 +565,19 @@ pub fn check_type_value_gen( let mut err = syn::Error::new(span, msg); err.combine(e); err - })?.0 + })? + .0 // Span can be call_site if generic is empty. Thus we replace it. - .map(|mut i| { i.span = span; i }); + .map(|mut i| { + i.span = span; + i + }); Ok(i) } /// Check the keyword `DispatchResultWithPostInfo` or `DispatchResult`. -pub fn check_pallet_call_return_type( - type_: &syn::Type, -) -> syn::Result<()> { +pub fn check_pallet_call_return_type(type_: &syn::Type) -> syn::Result<()> { pub struct Checker; impl syn::parse::Parse for Checker { fn parse(input: syn::parse::ParseStream) -> syn::Result { diff --git a/frame/support/procedural/src/pallet/parse/hooks.rs b/frame/support/procedural/src/pallet/parse/hooks.rs index 99ae3ed625414..1dd86498f22d5 100644 --- a/frame/support/procedural/src/pallet/parse/hooks.rs +++ b/frame/support/procedural/src/pallet/parse/hooks.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// Implementation of the pallet hooks. pub struct HooksDef { @@ -42,30 +42,31 @@ impl HooksDef { item } else { let msg = "Invalid pallet::hooks, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; let mut instances = vec![]; instances.push(helper::check_pallet_struct_usage(&item.self_ty)?); instances.push(helper::check_impl_gen(&item.generics, item.impl_token.span())?); - let item_trait = &item.trait_.as_ref() + let item_trait = &item + .trait_ + .as_ref() .ok_or_else(|| { let msg = "Invalid pallet::hooks, expected impl<..> Hooks \ for Pallet<..>"; syn::Error::new(item.span(), msg) - })?.1; + })? + .1; - if item_trait.segments.len() != 1 - || item_trait.segments[0].ident != "Hooks" - { + if item_trait.segments.len() != 1 || item_trait.segments[0].ident != "Hooks" { let msg = format!( "Invalid pallet::hooks, expected trait to be `Hooks` found `{}`\ , you can import from `frame_support::pallet_prelude`", quote::quote!(#item_trait) ); - return Err(syn::Error::new(item_trait.span(), msg)); + return Err(syn::Error::new(item_trait.span(), msg)) } let has_runtime_upgrade = item.items.iter().any(|i| match i { diff --git a/frame/support/procedural/src/pallet/parse/inherent.rs b/frame/support/procedural/src/pallet/parse/inherent.rs index a3f12b1574981..de5ad8f795db5 100644 --- a/frame/support/procedural/src/pallet/parse/inherent.rs +++ b/frame/support/procedural/src/pallet/parse/inherent.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// The definition of the pallet inherent implementation. pub struct InherentDef { @@ -32,22 +32,22 @@ impl InherentDef { item } else { let msg = "Invalid pallet::inherent, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; if item.trait_.is_none() { let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { if last.ident != "ProvideInherent" { let msg = "Invalid pallet::inherent, expected trait ProvideInherent"; - return Err(syn::Error::new(last.span(), msg)); + return Err(syn::Error::new(last.span(), msg)) } } else { let msg = "Invalid pallet::inherent, expected impl<..> ProvideInherent for Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } let mut instances = vec![]; diff --git a/frame/support/procedural/src/pallet/parse/mod.rs b/frame/support/procedural/src/pallet/parse/mod.rs index 2f378c52e8b33..c7367e582044b 100644 --- a/frame/support/procedural/src/pallet/parse/mod.rs +++ b/frame/support/procedural/src/pallet/parse/mod.rs @@ -19,24 +19,24 @@ //! //! Parse the module into `Def` struct through `Def::try_from` function. -pub mod config; -pub mod pallet_struct; -pub mod hooks; pub mod call; +pub mod config; pub mod error; -pub mod origin; -pub mod inherent; -pub mod storage; pub mod event; -pub mod helper; -pub mod genesis_config; +pub mod extra_constants; pub mod genesis_build; -pub mod validate_unsigned; +pub mod genesis_config; +pub mod helper; +pub mod hooks; +pub mod inherent; +pub mod origin; +pub mod pallet_struct; +pub mod storage; pub mod type_value; -pub mod extra_constants; +pub mod validate_unsigned; -use syn::spanned::Spanned; use frame_support_procedural_tools::generate_crate_access_2018; +use syn::spanned::Spanned; /// Parsed definition of a pallet. pub struct Def { @@ -67,11 +67,14 @@ impl Def { let frame_support = generate_crate_access_2018("frame-support")?; let item_span = item.span(); - let items = &mut item.content.as_mut() + let items = &mut item + .content + .as_mut() .ok_or_else(|| { let msg = "Invalid pallet definition, expected mod to be inlined."; syn::Error::new(item_span, msg) - })?.1; + })? + .1; let mut config = None; let mut pallet_struct = None; @@ -128,13 +131,12 @@ impl Def { }, Some(PalletAttr::TypeValue(span)) => type_values.push(type_value::TypeValueDef::try_from(span, index, item)?), - Some(PalletAttr::ExtraConstants(_)) => { + Some(PalletAttr::ExtraConstants(_)) => extra_constants = - Some(extra_constants::ExtraConstantsDef::try_from(index, item)?) - }, + Some(extra_constants::ExtraConstantsDef::try_from(index, item)?), Some(attr) => { let msg = "Invalid duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)); + return Err(syn::Error::new(attr.span(), msg)) }, None => (), } @@ -148,12 +150,13 @@ impl Def { genesis_config.as_ref().map_or("unused", |_| "used"), genesis_build.as_ref().map_or("unused", |_| "used"), ); - return Err(syn::Error::new(item_span, msg)); + return Err(syn::Error::new(item_span, msg)) } let def = Def { item, - config: config.ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, + config: config + .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::config]`"))?, pallet_struct: pallet_struct .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, hooks, @@ -181,10 +184,7 @@ impl Def { /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared /// and trait defines type Event, or not declared and no trait associated type. fn check_event_usage(&self) -> syn::Result<()> { - match ( - self.config.has_event_type, - self.event.is_some(), - ) { + match (self.config.has_event_type, self.event.is_some()) { (true, false) => { let msg = "Invalid usage of Event, `Config` contains associated type `Event`, \ but enum `Event` is not declared (i.e. no use of `#[pallet::event]`). \ @@ -197,7 +197,7 @@ impl Def { An Event associated type must be declare on trait `Config`."; Err(syn::Error::new(proc_macro2::Span::call_site(), msg)) }, - _ => Ok(()) + _ => Ok(()), } } @@ -235,19 +235,18 @@ impl Def { instances.extend_from_slice(&extra_constants.instances[..]); } - let mut errors = instances.into_iter() - .filter_map(|instances| { - if instances.has_instance == self.config.has_instance { - return None - } - let msg = if self.config.has_instance { - "Invalid generic declaration, trait is defined with instance but generic use none" - } else { - "Invalid generic declaration, trait is defined without instance but generic use \ + let mut errors = instances.into_iter().filter_map(|instances| { + if instances.has_instance == self.config.has_instance { + return None + } + let msg = if self.config.has_instance { + "Invalid generic declaration, trait is defined with instance but generic use none" + } else { + "Invalid generic declaration, trait is defined without instance but generic use \ some" - }; - Some(syn::Error::new(instances.span, msg)) - }); + }; + Some(syn::Error::new(instances.span, msg)) + }); if let Some(mut first_error) = errors.next() { for error in errors { @@ -351,7 +350,8 @@ impl GenericKind { match self { GenericKind::None => quote::quote!(), GenericKind::Config => quote::quote_spanned!(span => T: Config), - GenericKind::ConfigAndInstance => quote::quote_spanned!(span => T: Config, I: 'static), + GenericKind::ConfigAndInstance => + quote::quote_spanned!(span => T: Config, I: 'static), } } diff --git a/frame/support/procedural/src/pallet/parse/origin.rs b/frame/support/procedural/src/pallet/parse/origin.rs index 2b47978b808a8..c4e1197ac511c 100644 --- a/frame/support/procedural/src/pallet/parse/origin.rs +++ b/frame/support/procedural/src/pallet/parse/origin.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// Definition of the pallet origin type. /// @@ -42,7 +42,7 @@ impl OriginDef { syn::Item::Type(item) => (&item.vis, &item.ident, &item.generics), _ => { let msg = "Invalid pallet::origin, expected enum or struct or type"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }, }; @@ -54,27 +54,19 @@ impl OriginDef { instances.push(u); } else { // construct_runtime only allow generic event for instantiable pallet. - instances.push(helper::InstanceUsage { - has_instance: false, - span: ident.span(), - }) + instances.push(helper::InstanceUsage { has_instance: false, span: ident.span() }) } if !matches!(vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::origin, Origin must be public"; - return Err(syn::Error::new(item_span, msg)); + return Err(syn::Error::new(item_span, msg)) } if ident != "Origin" { let msg = "Invalid pallet::origin, ident must `Origin`"; - return Err(syn::Error::new(ident.span(), msg)); + return Err(syn::Error::new(ident.span(), msg)) } - Ok(OriginDef { - index, - has_instance, - is_generic, - instances, - }) + Ok(OriginDef { index, has_instance, is_generic, instances }) } } diff --git a/frame/support/procedural/src/pallet/parse/pallet_struct.rs b/frame/support/procedural/src/pallet/parse/pallet_struct.rs index ba85da2d9e684..088b647fad7de 100644 --- a/frame/support/procedural/src/pallet/parse/pallet_struct.rs +++ b/frame/support/procedural/src/pallet/parse/pallet_struct.rs @@ -16,8 +16,8 @@ // limitations under the License. use super::helper; -use syn::spanned::Spanned; use quote::ToTokens; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -49,11 +49,7 @@ pub struct PalletStructDef { /// * `#[pallet::generate_store($vis trait Store)]` /// * `#[pallet::generate_storage_info]` pub enum PalletStructAttr { - GenerateStore { - span: proc_macro2::Span, - vis: syn::Visibility, - keyword: keyword::Store, - }, + GenerateStore { span: proc_macro2::Span, vis: syn::Visibility, keyword: keyword::Store }, GenerateStorageInfoTrait(proc_macro2::Span), } @@ -103,7 +99,7 @@ impl PalletStructDef { item } else { let msg = "Invalid pallet::pallet, expected struct definition"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; let mut store = None; @@ -115,12 +111,14 @@ impl PalletStructDef { PalletStructAttr::GenerateStore { vis, keyword, .. } if store.is_none() => { store = Some((vis, keyword)); }, - PalletStructAttr::GenerateStorageInfoTrait(span) if generate_storage_info.is_none() => { + PalletStructAttr::GenerateStorageInfoTrait(span) + if generate_storage_info.is_none() => + { generate_storage_info = Some(span); - }, + } attr => { let msg = "Unexpected duplicated attribute"; - return Err(syn::Error::new(attr.span(), msg)); + return Err(syn::Error::new(attr.span(), msg)) }, } } @@ -129,12 +127,12 @@ impl PalletStructDef { if !matches!(item.vis, syn::Visibility::Public(_)) { let msg = "Invalid pallet::pallet, Pallet must be public"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } if item.generics.where_clause.is_some() { let msg = "Invalid pallet::pallet, where clause not supported on Pallet declaration"; - return Err(syn::Error::new(item.generics.where_clause.span(), msg)); + return Err(syn::Error::new(item.generics.where_clause.span(), msg)) } let mut instances = vec![]; diff --git a/frame/support/procedural/src/pallet/parse/storage.rs b/frame/support/procedural/src/pallet/parse/storage.rs index 82317200dcdd1..dfa6d9d274988 100644 --- a/frame/support/procedural/src/pallet/parse/storage.rs +++ b/frame/support/procedural/src/pallet/parse/storage.rs @@ -17,9 +17,9 @@ use super::helper; use frame_support_procedural_tools::get_doc_literals; -use syn::spanned::Spanned; use quote::ToTokens; use std::collections::HashMap; +use syn::spanned::Spanned; /// List of additional token to be used for parsing. mod keyword { @@ -70,11 +70,10 @@ impl syn::parse::Parse for PalletStorageAttr { let renamed_prefix = content.parse::()?; // Ensure the renamed prefix is a proper Rust identifier - syn::parse_str::(&renamed_prefix.value()) - .map_err(|_| { - let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); - syn::Error::new(renamed_prefix.span(), msg) - })?; + syn::parse_str::(&renamed_prefix.value()).map_err(|_| { + let msg = format!("`{}` is not a valid identifier", renamed_prefix.value()); + syn::Error::new(renamed_prefix.span(), msg) + })?; Ok(Self::StorageName(renamed_prefix, attr_span)) } else { @@ -87,16 +86,8 @@ impl syn::parse::Parse for PalletStorageAttr { pub enum Metadata { Value { value: syn::Type }, Map { value: syn::Type, key: syn::Type }, - DoubleMap { - value: syn::Type, - key1: syn::Type, - key2: syn::Type - }, - NMap { - keys: Vec, - keygen: syn::Type, - value: syn::Type, - }, + DoubleMap { value: syn::Type, key1: syn::Type, key2: syn::Type }, + NMap { keys: Vec, keygen: syn::Type, value: syn::Type }, } pub enum QueryKind { @@ -182,11 +173,8 @@ impl StorageGenerics { Self::DoubleMap { value, key1, key2, .. } => Metadata::DoubleMap { value, key1, key2 }, Self::Map { value, key, .. } => Metadata::Map { value, key }, Self::Value { value, .. } => Metadata::Value { value }, - Self::NMap { keygen, value, .. } => Metadata::NMap { - keys: collect_keys(&keygen)?, - keygen, - value, - }, + Self::NMap { keygen, value, .. } => + Metadata::NMap { keys: collect_keys(&keygen)?, keygen, value }, }; Ok(res) @@ -195,11 +183,10 @@ impl StorageGenerics { /// Return the query kind from the defined generics fn query_kind(&self) -> Option { match &self { - Self::DoubleMap { query_kind, .. } - | Self::Map { query_kind, .. } - | Self::Value { query_kind, .. } - | Self::NMap { query_kind, .. } - => query_kind.clone(), + Self::DoubleMap { query_kind, .. } | + Self::Map { query_kind, .. } | + Self::Value { query_kind, .. } | + Self::NMap { query_kind, .. } => query_kind.clone(), } } } @@ -226,7 +213,10 @@ fn check_generics( let mut e = format!( "`{}` expect generics {}and optional generics {}", storage_type_name, - mandatory_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), + mandatory_generics + .iter() + .map(|name| format!("`{}`, ", name)) + .collect::(), &optional_generics.iter().map(|name| format!("`{}`, ", name)).collect::(), ); e.pop(); @@ -236,14 +226,12 @@ fn check_generics( }; for (gen_name, gen_binding) in map { - if !mandatory_generics.contains(&gen_name.as_str()) - && !optional_generics.contains(&gen_name.as_str()) + if !mandatory_generics.contains(&gen_name.as_str()) && + !optional_generics.contains(&gen_name.as_str()) { let msg = format!( "Invalid pallet::storage, Unexpected generic `{}` for `{}`. {}", - gen_name, - storage_type_name, - expectation, + gen_name, storage_type_name, expectation, ); errors.push(syn::Error::new(gen_binding.span(), msg)); } @@ -253,8 +241,7 @@ fn check_generics( if !map.contains_key(&mandatory_generic.to_string()) { let msg = format!( "Invalid pallet::storage, cannot find `{}` generic, required for `{}`.", - mandatory_generic, - storage_type_name + mandatory_generic, storage_type_name ); errors.push(syn::Error::new(args_span, msg)); } @@ -285,7 +272,7 @@ fn process_named_generics( let msg = "Invalid pallet::storage, Duplicated named generic"; let mut err = syn::Error::new(arg.ident.span(), msg); err.combine(syn::Error::new(other.ident.span(), msg)); - return Err(err); + return Err(err) } parsed.insert(arg.ident.to_string(), arg.clone()); } @@ -301,15 +288,14 @@ fn process_named_generics( )?; StorageGenerics::Value { - value: parsed.remove("Value") + value: parsed + .remove("Value") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - query_kind: parsed.remove("QueryKind") - .map(|binding| binding.ty), - on_empty: parsed.remove("OnEmpty") - .map(|binding| binding.ty), + query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), + on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), } - } + }, StorageKind::Map => { check_generics( &parsed, @@ -320,20 +306,23 @@ fn process_named_generics( )?; StorageGenerics::Map { - hasher: parsed.remove("Hasher") + hasher: parsed + .remove("Hasher") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - key: parsed.remove("Key") + key: parsed + .remove("Key") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - value: parsed.remove("Value") + value: parsed + .remove("Value") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), max_values: parsed.remove("MaxValues").map(|binding| binding.ty), } - } + }, StorageKind::DoubleMap => { check_generics( &parsed, @@ -344,26 +333,31 @@ fn process_named_generics( )?; StorageGenerics::DoubleMap { - hasher1: parsed.remove("Hasher1") + hasher1: parsed + .remove("Hasher1") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - key1: parsed.remove("Key1") + key1: parsed + .remove("Key1") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - hasher2: parsed.remove("Hasher2") + hasher2: parsed + .remove("Hasher2") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - key2: parsed.remove("Key2") + key2: parsed + .remove("Key2") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - value: parsed.remove("Value") + value: parsed + .remove("Value") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), max_values: parsed.remove("MaxValues").map(|binding| binding.ty), } - } + }, StorageKind::NMap => { check_generics( &parsed, @@ -374,17 +368,19 @@ fn process_named_generics( )?; StorageGenerics::NMap { - keygen: parsed.remove("Key") + keygen: parsed + .remove("Key") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), - value: parsed.remove("Value") + value: parsed + .remove("Value") .map(|binding| binding.ty) .expect("checked above as mandatory generic"), query_kind: parsed.remove("QueryKind").map(|binding| binding.ty), on_empty: parsed.remove("OnEmpty").map(|binding| binding.ty), max_values: parsed.remove("MaxValues").map(|binding| binding.ty), } - } + }, }; let metadata = generics.metadata()?; @@ -400,41 +396,32 @@ fn process_unnamed_generics( args: &[syn::Type], ) -> syn::Result<(Option, Metadata, Option)> { let retrieve_arg = |arg_pos| { - args.get(arg_pos) - .cloned() - .ok_or_else(|| { - let msg = format!( - "Invalid pallet::storage, unexpected number of generic argument, \ + args.get(arg_pos).cloned().ok_or_else(|| { + let msg = format!( + "Invalid pallet::storage, unexpected number of generic argument, \ expect at least {} args, found {}.", - arg_pos + 1, - args.len(), - ); - syn::Error::new(args_span, msg) - }) + arg_pos + 1, + args.len(), + ); + syn::Error::new(args_span, msg) + }) }; let prefix_arg = retrieve_arg(0)?; - syn::parse2::(prefix_arg.to_token_stream()) - .map_err(|e| { - let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ + syn::parse2::(prefix_arg.to_token_stream()).map_err(|e| { + let msg = "Invalid pallet::storage, for unnamed generic arguments the type \ first generic argument must be `_`, the argument is then replaced by macro."; - let mut err = syn::Error::new(prefix_arg.span(), msg); - err.combine(e); - err - })?; + let mut err = syn::Error::new(prefix_arg.span(), msg); + err.combine(e); + err + })?; let res = match storage { - StorageKind::Value => ( - None, - Metadata::Value { value: retrieve_arg(1)? }, - retrieve_arg(2).ok(), - ), + StorageKind::Value => + (None, Metadata::Value { value: retrieve_arg(1)? }, retrieve_arg(2).ok()), StorageKind::Map => ( None, - Metadata::Map { - key: retrieve_arg(2)?, - value: retrieve_arg(3)?, - }, + Metadata::Map { key: retrieve_arg(2)?, value: retrieve_arg(3)? }, retrieve_arg(4).ok(), ), StorageKind::DoubleMap => ( @@ -449,15 +436,7 @@ fn process_unnamed_generics( StorageKind::NMap => { let keygen = retrieve_arg(1)?; let keys = collect_keys(&keygen)?; - ( - None, - Metadata::NMap { - keys, - keygen, - value: retrieve_arg(2)?, - }, - retrieve_arg(3).ok(), - ) + (None, Metadata::NMap { keys, keygen, value: retrieve_arg(2)? }, retrieve_arg(3).ok()) }, }; @@ -480,8 +459,8 @@ fn process_generics( found `{}`.", found, ); - return Err(syn::Error::new(segment.ident.span(), msg)); - } + return Err(syn::Error::new(segment.ident.span(), msg)) + }, }; let args_span = segment.arguments.span(); @@ -491,12 +470,14 @@ fn process_generics( _ => { let msg = "Invalid pallet::storage, invalid number of generic generic arguments, \ expect more that 0 generic arguments."; - return Err(syn::Error::new(segment.span(), msg)); - } + return Err(syn::Error::new(segment.span(), msg)) + }, }; if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Type(_))) { - let args = args.args.iter() + let args = args + .args + .iter() .map(|gen| match gen { syn::GenericArgument::Type(gen) => gen.clone(), _ => unreachable!("It is asserted above that all generics are types"), @@ -504,7 +485,9 @@ fn process_generics( .collect::>(); process_unnamed_generics(&storage_kind, args_span, &args) } else if args.args.iter().all(|gen| matches!(gen, syn::GenericArgument::Binding(_))) { - let args = args.args.iter() + let args = args + .args + .iter() .map(|gen| match gen { syn::GenericArgument::Binding(gen) => gen.clone(), _ => unreachable!("It is asserted above that all generics are bindings"), @@ -522,11 +505,7 @@ fn process_generics( /// Parse the 2nd type argument to `StorageNMap` and return its keys. fn collect_keys(keygen: &syn::Type) -> syn::Result> { if let syn::Type::Tuple(tup) = keygen { - tup - .elems - .iter() - .map(extract_key) - .collect::>>() + tup.elems.iter().map(extract_key).collect::>>() } else { Ok(vec![extract_key(keygen)?]) } @@ -538,7 +517,7 @@ fn extract_key(ty: &syn::Type) -> syn::Result { typ } else { let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(ty.span(), msg)); + return Err(syn::Error::new(ty.span(), msg)) }; let key_struct = typ.path.segments.last().ok_or_else(|| { @@ -547,28 +526,31 @@ fn extract_key(ty: &syn::Type) -> syn::Result { })?; if key_struct.ident != "Key" && key_struct.ident != "NMapKey" { let msg = "Invalid pallet::storage, expected Key or NMapKey struct"; - return Err(syn::Error::new(key_struct.ident.span(), msg)); + return Err(syn::Error::new(key_struct.ident.span(), msg)) } let ty_params = if let syn::PathArguments::AngleBracketed(args) = &key_struct.arguments { args } else { let msg = "Invalid pallet::storage, expected angle bracketed arguments"; - return Err(syn::Error::new(key_struct.arguments.span(), msg)); + return Err(syn::Error::new(key_struct.arguments.span(), msg)) }; if ty_params.args.len() != 2 { - let msg = format!("Invalid pallet::storage, unexpected number of generic arguments \ - for Key struct, expected 2 args, found {}", ty_params.args.len()); - return Err(syn::Error::new(ty_params.span(), msg)); + let msg = format!( + "Invalid pallet::storage, unexpected number of generic arguments \ + for Key struct, expected 2 args, found {}", + ty_params.args.len() + ); + return Err(syn::Error::new(ty_params.span(), msg)) } let key = match &ty_params.args[1] { syn::GenericArgument::Type(key_ty) => key_ty.clone(), _ => { let msg = "Invalid pallet::storage, expected type"; - return Err(syn::Error::new(ty_params.args[1].span(), msg)); - } + return Err(syn::Error::new(ty_params.args[1].span(), msg)) + }, }; Ok(key) @@ -577,8 +559,7 @@ fn extract_key(ty: &syn::Type) -> syn::Result { impl StorageDef { /// Return the storage prefix for this storage item pub fn prefix(&self) -> String { - self - .rename_as + self.rename_as .as_ref() .map(syn::LitStr::value) .unwrap_or(self.ident.to_string()) @@ -587,11 +568,7 @@ impl StorageDef { /// Return either the span of the ident or the span of the literal in the /// #[storage_prefix] attribute pub fn prefix_span(&self) -> proc_macro2::Span { - self - .rename_as - .as_ref() - .map(syn::LitStr::span) - .unwrap_or(self.ident.span()) + self.rename_as.as_ref().map(syn::LitStr::span).unwrap_or(self.ident.span()) } pub fn try_from( @@ -602,7 +579,7 @@ impl StorageDef { let item = if let syn::Item::Type(item) = item { item } else { - return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")); + return Err(syn::Error::new(item.span(), "Invalid pallet::storage, expect item type.")) }; let attrs: Vec = helper::take_item_pallet_attrs(&mut item.attrs)?; @@ -611,23 +588,19 @@ impl StorageDef { .partition::, _>(|attr| matches!(attr, PalletStorageAttr::Getter(..))); if getters.len() > 1 { let msg = "Invalid pallet::storage, multiple argument pallet::getter found"; - return Err(syn::Error::new(getters[1].attr_span(), msg)); + return Err(syn::Error::new(getters[1].attr_span(), msg)) } if names.len() > 1 { let msg = "Invalid pallet::storage, multiple argument pallet::storage_prefix found"; - return Err(syn::Error::new(names[1].attr_span(), msg)); + return Err(syn::Error::new(names[1].attr_span(), msg)) } - let getter = getters.pop().map(|attr| { - match attr { - PalletStorageAttr::Getter(ident, _) => ident, - _ => unreachable!(), - } + let getter = getters.pop().map(|attr| match attr { + PalletStorageAttr::Getter(ident, _) => ident, + _ => unreachable!(), }); - let rename_as = names.pop().map(|attr| { - match attr { - PalletStorageAttr::StorageName(lit, _) => lit, - _ => unreachable!(), - } + let rename_as = names.pop().map(|attr| match attr { + PalletStorageAttr::StorageName(lit, _) => lit, + _ => unreachable!(), }); let cfg_attrs = helper::get_item_cfg_attrs(&item.attrs); @@ -642,12 +615,12 @@ impl StorageDef { typ } else { let msg = "Invalid pallet::storage, expected type path"; - return Err(syn::Error::new(item.ty.span(), msg)); + return Err(syn::Error::new(item.ty.span(), msg)) }; if typ.path.segments.len() != 1 { let msg = "Invalid pallet::storage, expected type path with one segment"; - return Err(syn::Error::new(item.ty.span(), msg)); + return Err(syn::Error::new(item.ty.span(), msg)) } let (named_generics, metadata, query_kind) = process_generics(&typ.path.segments[0])?; @@ -655,11 +628,11 @@ impl StorageDef { let query_kind = query_kind .map(|query_kind| match query_kind { syn::Type::Path(path) - if path.path.segments.last().map_or(false, |s| s.ident == "OptionQuery") - => Some(QueryKind::OptionQuery), + if path.path.segments.last().map_or(false, |s| s.ident == "OptionQuery") => + Some(QueryKind::OptionQuery), syn::Type::Path(path) - if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") - => Some(QueryKind::ValueQuery), + if path.path.segments.last().map_or(false, |s| s.ident == "ValueQuery") => + Some(QueryKind::ValueQuery), _ => None, }) .unwrap_or(Some(QueryKind::OptionQuery)); // This value must match the default generic. @@ -668,7 +641,7 @@ impl StorageDef { let msg = "Invalid pallet::storage, cannot generate getter because QueryKind is not \ identifiable. QueryKind must be `OptionQuery`, `ValueQuery`, or default one to be \ identifiable."; - return Err(syn::Error::new(getter.unwrap().span(), msg)); + return Err(syn::Error::new(getter.unwrap().span(), msg)) } Ok(StorageDef { diff --git a/frame/support/procedural/src/pallet/parse/type_value.rs b/frame/support/procedural/src/pallet/parse/type_value.rs index 58e6105818e01..7b9d57472db4b 100644 --- a/frame/support/procedural/src/pallet/parse/type_value.rs +++ b/frame/support/procedural/src/pallet/parse/type_value.rs @@ -50,28 +50,31 @@ impl TypeValueDef { item } else { let msg = "Invalid pallet::type_value, expected item fn"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; - if !item.attrs.is_empty() { let msg = "Invalid pallet::type_value, unexpected attribute"; - return Err(syn::Error::new(item.attrs[0].span(), msg)); + return Err(syn::Error::new(item.attrs[0].span(), msg)) } - if let Some(span) = item.sig.constness.as_ref().map(|t| t.span()) + if let Some(span) = item + .sig + .constness + .as_ref() + .map(|t| t.span()) .or_else(|| item.sig.asyncness.as_ref().map(|t| t.span())) .or_else(|| item.sig.unsafety.as_ref().map(|t| t.span())) .or_else(|| item.sig.abi.as_ref().map(|t| t.span())) .or_else(|| item.sig.variadic.as_ref().map(|t| t.span())) { let msg = "Invalid pallet::type_value, unexpected token"; - return Err(syn::Error::new(span, msg)); + return Err(syn::Error::new(span, msg)) } if !item.sig.inputs.is_empty() { let msg = "Invalid pallet::type_value, unexpected argument"; - return Err(syn::Error::new(item.sig.inputs[0].span(), msg)); + return Err(syn::Error::new(item.sig.inputs[0].span(), msg)) } let vis = item.vis.clone(); @@ -81,7 +84,7 @@ impl TypeValueDef { syn::ReturnType::Type(_, type_) => type_, syn::ReturnType::Default => { let msg = "Invalid pallet::type_value, expected return type"; - return Err(syn::Error::new(item.sig.span(), msg)); + return Err(syn::Error::new(item.sig.span(), msg)) }, }; diff --git a/frame/support/procedural/src/pallet/parse/validate_unsigned.rs b/frame/support/procedural/src/pallet/parse/validate_unsigned.rs index 0a406413f3940..87e2a326f1862 100644 --- a/frame/support/procedural/src/pallet/parse/validate_unsigned.rs +++ b/frame/support/procedural/src/pallet/parse/validate_unsigned.rs @@ -15,8 +15,8 @@ // See the License for the specific language governing permissions and // limitations under the License. -use syn::spanned::Spanned; use super::helper; +use syn::spanned::Spanned; /// The definition of the pallet validate unsigned implementation. pub struct ValidateUnsignedDef { @@ -32,24 +32,24 @@ impl ValidateUnsignedDef { item } else { let msg = "Invalid pallet::validate_unsigned, expected item impl"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) }; if item.trait_.is_none() { let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } if let Some(last) = item.trait_.as_ref().unwrap().1.segments.last() { if last.ident != "ValidateUnsigned" { let msg = "Invalid pallet::validate_unsigned, expected trait ValidateUnsigned"; - return Err(syn::Error::new(last.span(), msg)); + return Err(syn::Error::new(last.span(), msg)) } } else { let msg = "Invalid pallet::validate_unsigned, expected impl<..> ValidateUnsigned for \ Pallet<..>"; - return Err(syn::Error::new(item.span(), msg)); + return Err(syn::Error::new(item.span(), msg)) } let mut instances = vec![]; diff --git a/frame/support/procedural/src/pallet_version.rs b/frame/support/procedural/src/pallet_version.rs index 0f3c478d4977a..f0821f343c035 100644 --- a/frame/support/procedural/src/pallet_version.rs +++ b/frame/support/procedural/src/pallet_version.rs @@ -17,10 +17,10 @@ //! Implementation of macros related to pallet versioning. -use proc_macro2::{TokenStream, Span}; -use syn::{Result, Error}; -use std::{env, str::FromStr}; use frame_support_procedural_tools::generate_crate_access_2018; +use proc_macro2::{Span, TokenStream}; +use std::{env, str::FromStr}; +use syn::{Error, Result}; /// Get the version from the given version environment variable. /// diff --git a/frame/support/procedural/src/partial_eq_no_bound.rs b/frame/support/procedural/src/partial_eq_no_bound.rs index 1c37be8021c9e..3dbabf3f5d39a 100644 --- a/frame/support/procedural/src/partial_eq_no_bound.rs +++ b/frame/support/procedural/src/partial_eq_no_bound.rs @@ -30,41 +30,47 @@ pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro: let impl_ = match input.data { syn::Data::Struct(struct_) => match struct_.fields { syn::Fields::Named(named) => { - let fields = named.named.iter() + let fields = named + .named + .iter() .map(|i| &i.ident) .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); quote::quote!( true #( && #fields )* ) }, syn::Fields::Unnamed(unnamed) => { - let fields = unnamed.unnamed.iter().enumerate() + let fields = unnamed + .unnamed + .iter() + .enumerate() .map(|(i, _)| syn::Index::from(i)) .map(|i| quote::quote_spanned!(i.span() => self.#i == other.#i )); quote::quote!( true #( && #fields )* ) }, syn::Fields::Unit => { - quote::quote!( true ) - } + quote::quote!(true) + }, }, syn::Data::Enum(enum_) => { - let variants = enum_.variants.iter() - .map(|variant| { + let variants = + enum_.variants.iter().map(|variant| { let ident = &variant.ident; match &variant.fields { syn::Fields::Named(named) => { let names = named.named.iter().map(|i| &i.ident); - let other_names = names.clone() - .enumerate() - .map(|(n, ident)| - syn::Ident::new(&format!("_{}", n), ident.span()) - ); + let other_names = names.clone().enumerate().map(|(n, ident)| { + syn::Ident::new(&format!("_{}", n), ident.span()) + }); let capture = names.clone(); - let other_capture = names.clone().zip(other_names.clone()) + let other_capture = names + .clone() + .zip(other_names.clone()) .map(|(i, other_i)| quote::quote!(#i: #other_i)); - let eq = names.zip(other_names) - .map(|(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i)); + let eq = names.zip(other_names).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); quote::quote!( ( Self::#ident { #( #capture, )* }, @@ -73,12 +79,18 @@ pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro: ) }, syn::Fields::Unnamed(unnamed) => { - let names = unnamed.unnamed.iter().enumerate() + let names = unnamed + .unnamed + .iter() + .enumerate() .map(|(i, f)| syn::Ident::new(&format!("_{}", i), f.span())); - let other_names = unnamed.unnamed.iter().enumerate() - .map(|(i, f)| syn::Ident::new(&format!("_{}_other", i), f.span())); - let eq = names.clone().zip(other_names.clone()) - .map(|(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i)); + let other_names = + unnamed.unnamed.iter().enumerate().map(|(i, f)| { + syn::Ident::new(&format!("_{}_other", i), f.span()) + }); + let eq = names.clone().zip(other_names.clone()).map( + |(i, other_i)| quote::quote_spanned!(i.span() => #i == #other_i), + ); quote::quote!( ( Self::#ident ( #( #names, )* ), @@ -122,5 +134,6 @@ pub fn derive_partial_eq_no_bound(input: proc_macro::TokenStream) -> proc_macro: } } }; - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/storage/genesis_config/builder_def.rs b/frame/support/procedural/src/storage/genesis_config/builder_def.rs index 5b73928951cfa..9669212f198fc 100644 --- a/frame/support/procedural/src/storage/genesis_config/builder_def.rs +++ b/frame/support/procedural/src/storage/genesis_config/builder_def.rs @@ -17,11 +17,11 @@ //! Builder logic definition used to build genesis storage. +use super::super::{DeclStorageDefExt, StorageLineTypeDef}; use frame_support_procedural_tools::syn_ext as ext; use proc_macro2::TokenStream; -use syn::spanned::Spanned; use quote::{quote, quote_spanned}; -use super::super::{DeclStorageDefExt, StorageLineTypeDef}; +use syn::spanned::Spanned; /// Definition of builder blocks, each block insert some value in the storage. /// They must be called inside externalities, and with `self` being the genesis config. @@ -79,7 +79,7 @@ impl BuilderDef { if let Some(data) = data { blocks.push(match &line.storage_type { StorageLineTypeDef::Simple(_) if line.is_option => { - quote!{{ + quote! {{ #data let v: Option<&#value_type>= data; if let Some(v) = v { @@ -88,7 +88,7 @@ impl BuilderDef { }} }, StorageLineTypeDef::Simple(_) if !line.is_option => { - quote!{{ + quote! {{ #data let v: &#value_type = data; <#storage_struct as #scrate::#storage_trait>::put::<&#value_type>(v); @@ -97,7 +97,7 @@ impl BuilderDef { StorageLineTypeDef::Simple(_) => unreachable!(), StorageLineTypeDef::Map(map) => { let key = &map.key; - quote!{{ + quote! {{ #data let data: &#scrate::sp_std::vec::Vec<(#key, #value_type)> = data; data.iter().for_each(|(k, v)| { @@ -110,7 +110,7 @@ impl BuilderDef { StorageLineTypeDef::DoubleMap(map) => { let key1 = &map.key1; let key2 = &map.key2; - quote!{{ + quote! {{ #data let data: &#scrate::sp_std::vec::Vec<(#key1, #key2, #value_type)> = data; data.iter().for_each(|(k1, k2, v)| { @@ -122,12 +122,8 @@ impl BuilderDef { }, StorageLineTypeDef::NMap(map) => { let key_tuple = map.to_key_tuple(); - let key_arg = if map.keys.len() == 1 { - quote!((k,)) - } else { - quote!(k) - }; - quote!{{ + let key_arg = if map.keys.len() == 1 { quote!((k,)) } else { quote!(k) }; + quote! {{ #data let data: &#scrate::sp_std::vec::Vec<(#key_tuple, #value_type)> = data; data.iter().for_each(|(k, v)| { @@ -148,10 +144,6 @@ impl BuilderDef { }); } - - Self { - blocks, - is_generic, - } + Self { blocks, is_generic } } } diff --git a/frame/support/procedural/src/storage/genesis_config/genesis_config_def.rs b/frame/support/procedural/src/storage/genesis_config/genesis_config_def.rs index c54349136cf05..fbdaab06b4895 100644 --- a/frame/support/procedural/src/storage/genesis_config/genesis_config_def.rs +++ b/frame/support/procedural/src/storage/genesis_config/genesis_config_def.rs @@ -17,11 +17,11 @@ //! Genesis config definition. +use super::super::{DeclStorageDefExt, StorageLineTypeDef}; use frame_support_procedural_tools::syn_ext as ext; use proc_macro2::TokenStream; -use syn::{spanned::Spanned, parse_quote}; use quote::quote; -use super::super::{DeclStorageDefExt, StorageLineTypeDef}; +use syn::{parse_quote, spanned::Spanned}; pub struct GenesisConfigFieldDef { pub name: syn::Ident, @@ -47,30 +47,28 @@ impl GenesisConfigDef { pub fn from_def(def: &DeclStorageDefExt) -> syn::Result { let fields = Self::get_genesis_config_field_defs(def)?; - let is_generic = fields.iter() + let is_generic = fields + .iter() .any(|field| ext::type_contains_ident(&field.typ, &def.module_runtime_generic)); - let ( - genesis_struct_decl, - genesis_impl, - genesis_struct, - genesis_where_clause - ) = if is_generic { - let runtime_generic = &def.module_runtime_generic; - let runtime_trait = &def.module_runtime_trait; - let optional_instance = &def.optional_instance; - let optional_instance_bound = &def.optional_instance_bound; - let optional_instance_bound_optional_default = &def.optional_instance_bound_optional_default; - let where_clause = &def.where_clause; - ( - quote!(<#runtime_generic: #runtime_trait, #optional_instance_bound_optional_default>), - quote!(<#runtime_generic: #runtime_trait, #optional_instance_bound>), - quote!(<#runtime_generic, #optional_instance>), - where_clause.clone(), - ) - } else { - (quote!(), quote!(), quote!(), None) - }; + let (genesis_struct_decl, genesis_impl, genesis_struct, genesis_where_clause) = + if is_generic { + let runtime_generic = &def.module_runtime_generic; + let runtime_trait = &def.module_runtime_trait; + let optional_instance = &def.optional_instance; + let optional_instance_bound = &def.optional_instance_bound; + let optional_instance_bound_optional_default = + &def.optional_instance_bound_optional_default; + let where_clause = &def.where_clause; + ( + quote!(<#runtime_generic: #runtime_trait, #optional_instance_bound_optional_default>), + quote!(<#runtime_generic: #runtime_trait, #optional_instance_bound>), + quote!(<#runtime_generic, #optional_instance>), + where_clause.clone(), + ) + } else { + (quote!(), quote!(), quote!(), None) + }; Ok(Self { is_generic, @@ -82,14 +80,14 @@ impl GenesisConfigDef { }) } - fn get_genesis_config_field_defs(def: &DeclStorageDefExt) - -> syn::Result> - { + fn get_genesis_config_field_defs( + def: &DeclStorageDefExt, + ) -> syn::Result> { let mut config_field_defs = Vec::new(); - for (config_field, line) in def.storage_lines.iter() - .filter_map(|line| line.config.as_ref().map(|config_field| (config_field.clone(), line))) - { + for (config_field, line) in def.storage_lines.iter().filter_map(|line| { + line.config.as_ref().map(|config_field| (config_field.clone(), line)) + }) { let value_type = &line.value_type; let typ = match &line.storage_type { @@ -107,18 +105,20 @@ impl GenesisConfigDef { StorageLineTypeDef::NMap(map) => { let key_tuple = map.to_key_tuple(); parse_quote!( Vec<(#key_tuple, #value_type)> ) - } + }, }; - let default = line.default_value.as_ref() - .map(|d| { - if line.is_option { - quote!( #d.unwrap_or_default() ) - } else { - quote!( #d ) - } - }) - .unwrap_or_else(|| quote!( Default::default() )); + let default = + line.default_value + .as_ref() + .map(|d| { + if line.is_option { + quote!( #d.unwrap_or_default() ) + } else { + quote!( #d ) + } + }) + .unwrap_or_else(|| quote!(Default::default())); config_field_defs.push(GenesisConfigFieldDef { name: config_field, @@ -129,22 +129,26 @@ impl GenesisConfigDef { } for line in &def.extra_genesis_config_lines { - let attrs = line.attrs.iter() + let attrs = line + .attrs + .iter() .map(|attr| { let meta = attr.parse_meta()?; if meta.path().is_ident("cfg") { return Err(syn::Error::new( meta.span(), - "extra genesis config items do not support `cfg` attribute" - )); + "extra genesis config items do not support `cfg` attribute", + )) } Ok(meta) }) .collect::>()?; - let default = line.default.as_ref().map(|e| quote!( #e )) - .unwrap_or_else(|| quote!( Default::default() )); - + let default = line + .default + .as_ref() + .map(|e| quote!( #e )) + .unwrap_or_else(|| quote!(Default::default())); config_field_defs.push(GenesisConfigFieldDef { name: line.name.clone(), diff --git a/frame/support/procedural/src/storage/genesis_config/mod.rs b/frame/support/procedural/src/storage/genesis_config/mod.rs index abc7af729f064..d2d1afb017736 100644 --- a/frame/support/procedural/src/storage/genesis_config/mod.rs +++ b/frame/support/procedural/src/storage/genesis_config/mod.rs @@ -18,14 +18,14 @@ //! Declaration of genesis config structure and implementation of build storage trait and //! functions. -use proc_macro2::{TokenStream, Span}; -use quote::quote; use super::DeclStorageDefExt; -pub use genesis_config_def::GenesisConfigDef; pub use builder_def::BuilderDef; +pub use genesis_config_def::GenesisConfigDef; +use proc_macro2::{Span, TokenStream}; +use quote::quote; -mod genesis_config_def; mod builder_def; +mod genesis_config_def; const DEFAULT_INSTANCE_NAME: &str = "__GeneratedInstance"; @@ -118,19 +118,16 @@ fn impl_build_storage( let genesis_impl = &genesis_config.genesis_impl; let genesis_where_clause = &genesis_config.genesis_where_clause; - let ( - fn_generic, - fn_traitinstance, - fn_where_clause - ) = if !genesis_config.is_generic && builders.is_generic { - ( - quote!( <#runtime_generic: #runtime_trait, #optional_instance_bound> ), - quote!( #runtime_generic, #optional_instance ), - Some(&def.where_clause), - ) - } else { - (quote!(), quote!(), None) - }; + let (fn_generic, fn_traitinstance, fn_where_clause) = + if !genesis_config.is_generic && builders.is_generic { + ( + quote!( <#runtime_generic: #runtime_trait, #optional_instance_bound> ), + quote!( #runtime_generic, #optional_instance ), + Some(&def.where_clause), + ) + } else { + (quote!(), quote!(), None) + }; let builder_blocks = &builders.blocks; @@ -138,7 +135,7 @@ fn impl_build_storage( #scrate::sp_runtime::BuildModuleGenesisStorage<#runtime_generic, #inherent_instance> ); - quote!{ + quote! { #[cfg(feature = "std")] impl#genesis_impl GenesisConfig#genesis_struct #genesis_where_clause { /// Build the storage for this module. @@ -189,7 +186,7 @@ pub fn genesis_config_and_build_storage(def: &DeclStorageDefExt) -> TokenStream decl_genesis_config_and_impl_default(scrate, &genesis_config); let impl_build_storage = impl_build_storage(scrate, def, &genesis_config, &builders); - quote!{ + quote! { #decl_genesis_config_and_impl_default #impl_build_storage } diff --git a/frame/support/procedural/src/storage/getters.rs b/frame/support/procedural/src/storage/getters.rs index 32155239acdc6..988e6fa096243 100644 --- a/frame/support/procedural/src/storage/getters.rs +++ b/frame/support/procedural/src/storage/getters.rs @@ -17,15 +17,17 @@ //! Implementation of getters on module structure. +use super::{DeclStorageDefExt, StorageLineTypeDef}; use proc_macro2::TokenStream; use quote::quote; -use super::{DeclStorageDefExt, StorageLineTypeDef}; pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { let scrate = &def.hidden_crate; let mut getters = TokenStream::new(); - for (get_fn, line) in def.storage_lines.iter() + for (get_fn, line) in def + .storage_lines + .iter() .filter_map(|line| line.getter.as_ref().map(|get_fn| (get_fn, line))) { let attrs = &line.doc_attrs; @@ -35,7 +37,7 @@ pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { let getter = match &line.storage_type { StorageLineTypeDef::Simple(value) => { - quote!{ + quote! { #( #[ #attrs ] )* pub fn #get_fn() -> #value { <#storage_struct as #scrate::#storage_trait>::get() @@ -45,7 +47,7 @@ pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { StorageLineTypeDef::Map(map) => { let key = &map.key; let value = &map.value; - quote!{ + quote! { #( #[ #attrs ] )* pub fn #get_fn>(key: K) -> #value { <#storage_struct as #scrate::#storage_trait>::get(key) @@ -56,7 +58,7 @@ pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { let key1 = &map.key1; let key2 = &map.key2; let value = &map.value; - quote!{ + quote! { pub fn #get_fn(k1: KArg1, k2: KArg2) -> #value where KArg1: #scrate::codec::EncodeLike<#key1>, @@ -69,7 +71,7 @@ pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { StorageLineTypeDef::NMap(map) => { let keygen = map.to_keygen_struct(&def.hidden_crate); let value = &map.value; - quote!{ + quote! { pub fn #get_fn(key: KArg) -> #value where KArg: #scrate::storage::types::EncodeLikeTuple< @@ -80,7 +82,7 @@ pub fn impl_getters(def: &DeclStorageDefExt) -> TokenStream { <#storage_struct as #scrate::#storage_trait>::get(key) } } - } + }, }; getters.extend(getter); } diff --git a/frame/support/procedural/src/storage/instance_trait.rs b/frame/support/procedural/src/storage/instance_trait.rs index 7f44d3e03deac..00a73d6fbd6e7 100644 --- a/frame/support/procedural/src/storage/instance_trait.rs +++ b/frame/support/procedural/src/storage/instance_trait.rs @@ -18,10 +18,10 @@ //! Implementation of the trait instance and the instance structures implementing it. //! (For not instantiable traits there is still the inherent instance implemented). -use proc_macro2::{TokenStream, Span}; -use quote::quote; use super::DeclStorageDefExt; use crate::NUMBER_OF_INSTANCE; +use proc_macro2::{Span, TokenStream}; +use quote::quote; pub(crate) const INHERENT_INSTANCE_NAME: &str = "__InherentHiddenInstance"; @@ -52,14 +52,12 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { index: i, } }) - .chain( - module_instance.instance_default.as_ref().map(|ident| InstanceDef { - prefix: String::new(), - instance_struct: ident.clone(), - doc: quote!(#[doc=r"Default module instance"]), - index: 0, - }) - ); + .chain(module_instance.instance_default.as_ref().map(|ident| InstanceDef { + prefix: String::new(), + instance_struct: ident.clone(), + doc: quote!(#[doc=r"Default module instance"]), + index: 0, + })); for instance_def in instance_defs { impls.extend(create_and_impl_instance_struct(scrate, &instance_def, def)); @@ -70,8 +68,8 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { let inherent_instance = syn::Ident::new(INHERENT_INSTANCE_NAME, Span::call_site()); // Implementation of inherent instance. - if let Some(default_instance) = def.module_instance.as_ref() - .and_then(|i| i.instance_default.as_ref()) + if let Some(default_instance) = + def.module_instance.as_ref().and_then(|i| i.instance_default.as_ref()) { impls.extend(quote! { /// Hidden instance generated to be internally used when module is used without @@ -97,10 +95,7 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { impls } -fn reexport_instance_trait( - scrate: &TokenStream, - def: &DeclStorageDefExt, -) -> TokenStream { +fn reexport_instance_trait(scrate: &TokenStream, def: &DeclStorageDefExt) -> TokenStream { if let Some(i) = def.module_instance.as_ref() { let instance_trait = &i.instance_trait; quote!( diff --git a/frame/support/procedural/src/storage/metadata.rs b/frame/support/procedural/src/storage/metadata.rs index 3438500dc7903..8cbcc6322efee 100644 --- a/frame/support/procedural/src/storage/metadata.rs +++ b/frame/support/procedural/src/storage/metadata.rs @@ -17,16 +17,16 @@ //! Implementation of `storage_metadata` on module structure, used by construct_runtime. +use super::{DeclStorageDefExt, StorageLineDefExt, StorageLineTypeDef}; use frame_support_procedural_tools::get_doc_literals; use proc_macro2::TokenStream; use quote::quote; -use super::{DeclStorageDefExt, StorageLineDefExt, StorageLineTypeDef}; fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> TokenStream { let value_type = &line.value_type; match &line.storage_type { StorageLineTypeDef::Simple(_) => { - quote!{ + quote! { #scrate::metadata::StorageEntryType::Plain( #scrate::scale_info::meta_type::<#value_type>() ) @@ -35,7 +35,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> StorageLineTypeDef::Map(map) => { let hasher = map.hasher.into_metadata(); let key = &map.key; - quote!{ + quote! { #scrate::metadata::StorageEntryType::Map { hasher: #scrate::metadata::#hasher, key: #scrate::scale_info::meta_type::<#key>(), @@ -48,7 +48,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> let hasher2 = map.hasher2.into_metadata(); let key1 = &map.key1; let key2 = &map.key2; - quote!{ + quote! { #scrate::metadata::StorageEntryType::DoubleMap { hasher: #scrate::metadata::#hasher1, key1: #scrate::scale_info::meta_type::<#key1>(), @@ -60,11 +60,12 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> }, StorageLineTypeDef::NMap(map) => { let key_tuple = &map.to_key_tuple(); - let hashers = map.hashers + let hashers = map + .hashers .iter() .map(|hasher| hasher.to_storage_hasher_struct()) .collect::>(); - quote!{ + quote! { #scrate::metadata::StorageEntryType::NMap { keys: #scrate::scale_info::meta_type::<#key_tuple>(), hashers: #scrate::sp_std::vec! [ @@ -73,7 +74,7 @@ fn storage_line_metadata_type(scrate: &TokenStream, line: &StorageLineDefExt) -> value: #scrate::scale_info::meta_type::<#value_type>(), } } - } + }, } } @@ -82,12 +83,17 @@ fn default_byte_getter( line: &StorageLineDefExt, def: &DeclStorageDefExt, ) -> (TokenStream, TokenStream) { - let default = line.default_value.as_ref().map(|d| quote!( #d )) - .unwrap_or_else(|| quote!( Default::default() )); + let default = line + .default_value + .as_ref() + .map(|d| quote!( #d )) + .unwrap_or_else(|| quote!(Default::default())); let str_name = line.name.to_string(); - let struct_name = syn::Ident::new(&("__GetByteStruct".to_string() + &str_name), line.name.span()); - let cache_name = syn::Ident::new(&("__CACHE_GET_BYTE_STRUCT_".to_string() + &str_name), line.name.span()); + let struct_name = + syn::Ident::new(&("__GetByteStruct".to_string() + &str_name), line.name.span()); + let cache_name = + syn::Ident::new(&("__CACHE_GET_BYTE_STRUCT_".to_string() + &str_name), line.name.span()); let runtime_generic = &def.module_runtime_generic; let runtime_trait = &def.module_runtime_trait; @@ -159,10 +165,8 @@ pub fn impl_metadata(def: &DeclStorageDefExt) -> TokenStream { let ty = storage_line_metadata_type(scrate, line); - let ( - default_byte_getter_struct_def, - default_byte_getter_struct_instance, - ) = default_byte_getter(scrate, line, def); + let (default_byte_getter_struct_def, default_byte_getter_struct_instance) = + default_byte_getter(scrate, line, def); let docs = get_doc_literals(&line.attrs); diff --git a/frame/support/procedural/src/storage/mod.rs b/frame/support/procedural/src/storage/mod.rs index 570ef447a43cb..27964d7012a28 100644 --- a/frame/support/procedural/src/storage/mod.rs +++ b/frame/support/procedural/src/storage/mod.rs @@ -17,22 +17,22 @@ //! `decl_storage` input definition and expansion. -mod storage_struct; -mod storage_info; -mod parse; -mod store_trait; +mod genesis_config; mod getters; -mod metadata; mod instance_trait; -mod genesis_config; +mod metadata; +mod parse; mod print_pallet_upgrade; +mod storage_info; +mod storage_struct; +mod store_trait; pub(crate) use instance_trait::INHERENT_INSTANCE_NAME; -use quote::quote; use frame_support_procedural_tools::{ - generate_crate_access, generate_hidden_includes, syn_ext as ext + generate_crate_access, generate_hidden_includes, syn_ext as ext, }; +use quote::quote; /// All information contained in input of decl_storage pub struct DeclStorageDef { @@ -115,34 +115,37 @@ pub struct DeclStorageDefExt { impl From for DeclStorageDefExt { fn from(mut def: DeclStorageDef) -> Self { - let hidden_crate_name = def.hidden_crate.as_ref().map(|i| i.to_string()) + let hidden_crate_name = def + .hidden_crate + .as_ref() + .map(|i| i.to_string()) .unwrap_or_else(|| "decl_storage".to_string()); let hidden_crate = generate_crate_access(&hidden_crate_name, "frame-support"); let hidden_imports = generate_hidden_includes(&hidden_crate_name, "frame-support"); let storage_lines = def.storage_lines.drain(..).collect::>(); - let storage_lines = storage_lines.into_iter() + let storage_lines = storage_lines + .into_iter() .map(|line| StorageLineDefExt::from_def(line, &def, &hidden_crate)) .collect(); - let ( - optional_instance, - optional_instance_bound, - optional_instance_bound_optional_default, - ) = if let Some(instance) = def.module_instance.as_ref() { - let instance_generic = &instance.instance_generic; - let instance_trait= &instance.instance_trait; - let optional_equal_instance_default = instance.instance_default.as_ref() - .map(|d| quote!( = #d )); - ( - Some(quote!(#instance_generic)), - Some(quote!(#instance_generic: #instance_trait)), - Some(quote!(#instance_generic: #instance_trait #optional_equal_instance_default)), - ) - } else { - (None, None, None) - }; + let (optional_instance, optional_instance_bound, optional_instance_bound_optional_default) = + if let Some(instance) = def.module_instance.as_ref() { + let instance_generic = &instance.instance_generic; + let instance_trait = &instance.instance_trait; + let optional_equal_instance_default = + instance.instance_default.as_ref().map(|d| quote!( = #d )); + ( + Some(quote!(#instance_generic)), + Some(quote!(#instance_generic: #instance_trait)), + Some( + quote!(#instance_generic: #instance_trait #optional_equal_instance_default), + ), + ) + } else { + (None, None, None) + }; let module_runtime_generic = &def.module_runtime_generic; let module_runtime_trait = &def.module_runtime_trait; @@ -255,22 +258,20 @@ impl StorageLineDefExt { hidden_crate: &proc_macro2::TokenStream, ) -> Self { let is_generic = match &storage_def.storage_type { - StorageLineTypeDef::Simple(value) => { - ext::type_contains_ident(&value, &def.module_runtime_generic) - }, - StorageLineTypeDef::Map(map) => { - ext::type_contains_ident(&map.key, &def.module_runtime_generic) - || ext::type_contains_ident(&map.value, &def.module_runtime_generic) - } - StorageLineTypeDef::DoubleMap(map) => { - ext::type_contains_ident(&map.key1, &def.module_runtime_generic) - || ext::type_contains_ident(&map.key2, &def.module_runtime_generic) - || ext::type_contains_ident(&map.value, &def.module_runtime_generic) - } - StorageLineTypeDef::NMap(map) => { - map.keys.iter().any(|key| ext::type_contains_ident(key, &def.module_runtime_generic)) - || ext::type_contains_ident(&map.value, &def.module_runtime_generic) - } + StorageLineTypeDef::Simple(value) => + ext::type_contains_ident(&value, &def.module_runtime_generic), + StorageLineTypeDef::Map(map) => + ext::type_contains_ident(&map.key, &def.module_runtime_generic) || + ext::type_contains_ident(&map.value, &def.module_runtime_generic), + StorageLineTypeDef::DoubleMap(map) => + ext::type_contains_ident(&map.key1, &def.module_runtime_generic) || + ext::type_contains_ident(&map.key2, &def.module_runtime_generic) || + ext::type_contains_ident(&map.value, &def.module_runtime_generic), + StorageLineTypeDef::NMap(map) => + map.keys + .iter() + .any(|key| ext::type_contains_ident(key, &def.module_runtime_generic)) || + ext::type_contains_ident(&map.value, &def.module_runtime_generic), }; let query_type = match &storage_def.storage_type { @@ -280,15 +281,13 @@ impl StorageLineDefExt { StorageLineTypeDef::NMap(map) => map.value.clone(), }; let is_option = ext::extract_type_option(&query_type).is_some(); - let value_type = ext::extract_type_option(&query_type).unwrap_or_else(|| query_type.clone()); + let value_type = + ext::extract_type_option(&query_type).unwrap_or_else(|| query_type.clone()); let module_runtime_generic = &def.module_runtime_generic; let module_runtime_trait = &def.module_runtime_trait; - let optional_storage_runtime_comma = if is_generic { - Some(quote!( #module_runtime_generic, )) - } else { - None - }; + let optional_storage_runtime_comma = + if is_generic { Some(quote!( #module_runtime_generic, )) } else { None }; let optional_storage_runtime_bound_comma = if is_generic { Some(quote!( #module_runtime_generic: #module_runtime_trait, )) } else { @@ -304,11 +303,8 @@ impl StorageLineDefExt { #storage_name<#optional_storage_runtime_comma #optional_instance_generic> ); - let optional_storage_where_clause = if is_generic { - def.where_clause.as_ref().map(|w| quote!( #w )) - } else { - None - }; + let optional_storage_where_clause = + if is_generic { def.where_clause.as_ref().map(|w| quote!( #w )) } else { None }; let storage_trait_truncated = match &storage_def.storage_type { StorageLineTypeDef::Simple(_) => { @@ -326,13 +322,15 @@ impl StorageLineDefExt { StorageLineTypeDef::NMap(map) => { let keygen = map.to_keygen_struct(hidden_crate); quote!( StorageNMap<#keygen, #value_type> ) - } + }, }; let storage_trait = quote!( storage::#storage_trait_truncated ); let storage_generator_trait = quote!( storage::generator::#storage_trait_truncated ); - let doc_attrs = storage_def.attrs.iter() + let doc_attrs = storage_def + .attrs + .iter() .filter_map(|a| a.parse_meta().ok()) .filter(|m| m.path().is_ident("doc")) .collect(); @@ -396,27 +394,28 @@ impl NMapDef { if self.keys.len() == 1 { let hasher = &self.hashers[0].to_storage_hasher_struct(); let key = &self.keys[0]; - return quote!( #scrate::storage::types::Key<#scrate::#hasher, #key> ); + return quote!( #scrate::storage::types::Key<#scrate::#hasher, #key> ) } - let key_hasher = self.keys.iter().zip(&self.hashers).map(|(key, hasher)| { - let hasher = hasher.to_storage_hasher_struct(); - quote!( #scrate::storage::types::Key<#scrate::#hasher, #key> ) - }) - .collect::>(); + let key_hasher = self + .keys + .iter() + .zip(&self.hashers) + .map(|(key, hasher)| { + let hasher = hasher.to_storage_hasher_struct(); + quote!( #scrate::storage::types::Key<#scrate::#hasher, #key> ) + }) + .collect::>(); quote!(( #(#key_hasher,)* )) } fn to_key_tuple(&self) -> proc_macro2::TokenStream { if self.keys.len() == 1 { let key = &self.keys[0]; - return quote!(#key); + return quote!(#key) } - let tuple = self.keys.iter().map(|key| { - quote!(#key) - }) - .collect::>(); + let tuple = self.keys.iter().map(|key| quote!(#key)).collect::>(); quote!(( #(#tuple,)* )) } } @@ -442,25 +441,25 @@ pub enum HasherKind { impl HasherKind { fn to_storage_hasher_struct(&self) -> proc_macro2::TokenStream { match self { - HasherKind::Blake2_256 => quote!( Blake2_256 ), - HasherKind::Blake2_128 => quote!( Blake2_128 ), - HasherKind::Blake2_128Concat => quote!( Blake2_128Concat ), - HasherKind::Twox256 => quote!( Twox256 ), - HasherKind::Twox128 => quote!( Twox128 ), - HasherKind::Twox64Concat => quote!( Twox64Concat ), - HasherKind::Identity => quote!( Identity ), + HasherKind::Blake2_256 => quote!(Blake2_256), + HasherKind::Blake2_128 => quote!(Blake2_128), + HasherKind::Blake2_128Concat => quote!(Blake2_128Concat), + HasherKind::Twox256 => quote!(Twox256), + HasherKind::Twox128 => quote!(Twox128), + HasherKind::Twox64Concat => quote!(Twox64Concat), + HasherKind::Identity => quote!(Identity), } } fn into_metadata(&self) -> proc_macro2::TokenStream { match self { - HasherKind::Blake2_256 => quote!( StorageHasher::Blake2_256 ), - HasherKind::Blake2_128 => quote!( StorageHasher::Blake2_128 ), - HasherKind::Blake2_128Concat => quote!( StorageHasher::Blake2_128Concat ), - HasherKind::Twox256 => quote!( StorageHasher::Twox256 ), - HasherKind::Twox128 => quote!( StorageHasher::Twox128 ), - HasherKind::Twox64Concat => quote!( StorageHasher::Twox64Concat ), - HasherKind::Identity => quote!( StorageHasher::Identity ), + HasherKind::Blake2_256 => quote!(StorageHasher::Blake2_256), + HasherKind::Blake2_128 => quote!(StorageHasher::Blake2_128), + HasherKind::Blake2_128Concat => quote!(StorageHasher::Blake2_128Concat), + HasherKind::Twox256 => quote!(StorageHasher::Twox256), + HasherKind::Twox128 => quote!(StorageHasher::Twox128), + HasherKind::Twox64Concat => quote!(StorageHasher::Twox64Concat), + HasherKind::Identity => quote!(StorageHasher::Identity), } } } @@ -502,5 +501,6 @@ pub fn decl_storage_impl(input: proc_macro::TokenStream) -> proc_macro::TokenStr #genesis_config #storage_struct #storage_info - ).into() + ) + .into() } diff --git a/frame/support/procedural/src/storage/parse.rs b/frame/support/procedural/src/storage/parse.rs index ca97b7957c108..d3b73843da179 100644 --- a/frame/support/procedural/src/storage/parse.rs +++ b/frame/support/procedural/src/storage/parse.rs @@ -17,8 +17,8 @@ //! Parsing of decl_storage input. -use frame_support_procedural_tools::{ToTokens, Parse, syn_ext as ext}; -use syn::{Ident, Token, spanned::Spanned}; +use frame_support_procedural_tools::{syn_ext as ext, Parse, ToTokens}; +use syn::{spanned::Spanned, Ident, Token}; mod keyword { syn::custom_keyword!(generate_storage_info); @@ -367,48 +367,35 @@ fn get_module_instance( it is now defined at frame_support::traits::Instance. Expect `Instance` found `{}`", instantiable.as_ref().unwrap(), ); - return Err(syn::Error::new(instantiable.span(), msg)); + return Err(syn::Error::new(instantiable.span(), msg)) } match (instance, instantiable, default_instance) { - (Some(instance), Some(instantiable), default_instance) => { + (Some(instance), Some(instantiable), default_instance) => Ok(Some(super::ModuleInstanceDef { instance_generic: instance, instance_trait: instantiable, instance_default: default_instance, - })) - }, + })), (None, None, None) => Ok(None), - (Some(instance), None, _) => Err( - syn::Error::new( - instance.span(), - format!( - "Expect instantiable trait bound for instance: {}. {}", - instance, - right_syntax, - ) - ) - ), - (None, Some(instantiable), _) => Err( - syn::Error::new( - instantiable.span(), - format!( - "Expect instance generic for bound instantiable: {}. {}", - instantiable, - right_syntax, - ) - ) - ), - (None, _, Some(default_instance)) => Err( - syn::Error::new( - default_instance.span(), - format!( - "Expect instance generic for default instance: {}. {}", - default_instance, - right_syntax, - ) - ) - ), + (Some(instance), None, _) => Err(syn::Error::new( + instance.span(), + format!("Expect instantiable trait bound for instance: {}. {}", instance, right_syntax,), + )), + (None, Some(instantiable), _) => Err(syn::Error::new( + instantiable.span(), + format!( + "Expect instance generic for bound instantiable: {}. {}", + instantiable, right_syntax, + ), + )), + (None, _, Some(default_instance)) => Err(syn::Error::new( + default_instance.span(), + format!( + "Expect instance generic for default instance: {}. {}", + default_instance, right_syntax, + ), + )), } } @@ -417,37 +404,37 @@ pub fn parse(input: syn::parse::ParseStream) -> syn::Result { - extra_genesis_config_lines.push(super::ExtraGenesisLineDef{ + extra_genesis_config_lines.push(super::ExtraGenesisLineDef { attrs: def.attrs.inner, name: def.extra_field.content, typ: def.extra_type, default: def.default_value.inner.map(|o| o.expr), }); - } + }, AddExtraGenesisLineEnum::AddExtraGenesisBuild(def) => { if extra_genesis_build.is_some() { return Err(syn::Error::new( def.span(), - "Only one build expression allowed for extra genesis" + "Only one build expression allowed for extra genesis", )) } extra_genesis_build = Some(def.expr.content); - } + }, } } @@ -496,68 +483,65 @@ fn parse_storage_line_defs( }; if let Some(ref config) = config { - storage_lines.iter().filter_map(|sl| sl.config.as_ref()).try_for_each(|other_config| { - if other_config == config { - Err(syn::Error::new( - config.span(), - "`config()`/`get()` with the same name already defined.", - )) - } else { - Ok(()) - } - })?; + storage_lines.iter().filter_map(|sl| sl.config.as_ref()).try_for_each( + |other_config| { + if other_config == config { + Err(syn::Error::new( + config.span(), + "`config()`/`get()` with the same name already defined.", + )) + } else { + Ok(()) + } + }, + )?; } let max_values = match &line.storage_type { - DeclStorageType::Map(_) | DeclStorageType::DoubleMap(_) | DeclStorageType::NMap(_) => { - line.max_values.inner.map(|i| i.expr.content) - }, - DeclStorageType::Simple(_) => { + DeclStorageType::Map(_) | DeclStorageType::DoubleMap(_) | DeclStorageType::NMap(_) => + line.max_values.inner.map(|i| i.expr.content), + DeclStorageType::Simple(_) => if let Some(max_values) = line.max_values.inner { let msg = "unexpected max_values attribute for storage value."; let span = max_values.max_values_keyword.span(); - return Err(syn::Error::new(span, msg)); + return Err(syn::Error::new(span, msg)) } else { Some(syn::parse_quote!(1u32)) - } - }, + }, }; let span = line.storage_type.span(); - let no_hasher_error = || syn::Error::new( - span, - "Default hasher has been removed, use explicit hasher(blake2_128_concat) instead." - ); + let no_hasher_error = || { + syn::Error::new( + span, + "Default hasher has been removed, use explicit hasher(blake2_128_concat) instead.", + ) + }; let storage_type = match line.storage_type { - DeclStorageType::Map(map) => super::StorageLineTypeDef::Map( - super::MapDef { - hasher: map.hasher.inner.ok_or_else(no_hasher_error)?.into(), - key: map.key, - value: map.value, - } - ), - DeclStorageType::DoubleMap(map) => super::StorageLineTypeDef::DoubleMap( - Box::new(super::DoubleMapDef { + DeclStorageType::Map(map) => super::StorageLineTypeDef::Map(super::MapDef { + hasher: map.hasher.inner.ok_or_else(no_hasher_error)?.into(), + key: map.key, + value: map.value, + }), + DeclStorageType::DoubleMap(map) => + super::StorageLineTypeDef::DoubleMap(Box::new(super::DoubleMapDef { hasher1: map.hasher1.inner.ok_or_else(no_hasher_error)?.into(), hasher2: map.hasher2.inner.ok_or_else(no_hasher_error)?.into(), key1: map.key1, key2: map.key2, value: map.value, - }) - ), - DeclStorageType::NMap(map) => super::StorageLineTypeDef::NMap( - super::NMapDef { - hashers: map - .storage_keys - .inner - .iter() - .map(|pair| Ok(pair.hasher.inner.clone().ok_or_else(no_hasher_error)?.into())) - .collect::, syn::Error>>()?, - keys: map.storage_keys.inner.iter().map(|pair| pair.key.clone()).collect(), - value: map.value, - } - ), + })), + DeclStorageType::NMap(map) => super::StorageLineTypeDef::NMap(super::NMapDef { + hashers: map + .storage_keys + .inner + .iter() + .map(|pair| Ok(pair.hasher.inner.clone().ok_or_else(no_hasher_error)?.into())) + .collect::, syn::Error>>()?, + keys: map.storage_keys.inner.iter().map(|pair| pair.key.clone()).collect(), + value: map.value, + }), DeclStorageType::Simple(expr) => super::StorageLineTypeDef::Simple(expr), }; diff --git a/frame/support/procedural/src/storage/print_pallet_upgrade.rs b/frame/support/procedural/src/storage/print_pallet_upgrade.rs index a6f64a588b633..03f09a7edb48e 100644 --- a/frame/support/procedural/src/storage/print_pallet_upgrade.rs +++ b/frame/support/procedural/src/storage/print_pallet_upgrade.rs @@ -1,6 +1,6 @@ use super::StorageLineTypeDef; -use quote::ToTokens; use frame_support_procedural_tools::clean_type_string; +use quote::ToTokens; /// Environment variable that tells us to print pallet upgrade helper. const PRINT_PALLET_UPGRADE: &str = "PRINT_PALLET_UPGRADE"; @@ -10,7 +10,7 @@ fn check_print_pallet_upgrade() -> bool { } /// Convert visibilty as now objects are defined in a module. -fn convert_vis(vis: &syn::Visibility) -> &'static str{ +fn convert_vis(vis: &syn::Visibility) -> &'static str { match vis { syn::Visibility::Inherited => "pub(super)", syn::Visibility::Public(_) => "pub", @@ -31,23 +31,13 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { let scrate = "e::quote!(frame_support); - let config_gen = if def.optional_instance.is_some() { - "" - } else { - Default::default() - }; + let config_gen = + if def.optional_instance.is_some() { "" } else { Default::default() }; - let impl_gen = if def.optional_instance.is_some() { - ", I: 'static>" - } else { - "" - }; + let impl_gen = + if def.optional_instance.is_some() { ", I: 'static>" } else { "" }; - let decl_gen = if def.optional_instance.is_some() { - "" - } else { - "" - }; + let decl_gen = if def.optional_instance.is_some() { "" } else { "" }; let full_decl_gen = if def.optional_instance.is_some() { ", I: 'static = ()>" @@ -55,17 +45,9 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { "" }; - let use_gen = if def.optional_instance.is_some() { - "" - } else { - "" - }; + let use_gen = if def.optional_instance.is_some() { "" } else { "" }; - let use_gen_tuple = if def.optional_instance.is_some() { - "<(T, I)>" - } else { - "" - }; + let use_gen_tuple = if def.optional_instance.is_some() { "<(T, I)>" } else { "" }; let mut genesis_config = String::new(); let mut genesis_build = String::new(); @@ -80,17 +62,11 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { }, }; - let genesis_config_impl_gen = if genesis_config_def.is_generic { - impl_gen - } else { - Default::default() - }; + let genesis_config_impl_gen = + if genesis_config_def.is_generic { impl_gen } else { Default::default() }; - let genesis_config_use_gen = if genesis_config_def.is_generic { - use_gen - } else { - Default::default() - }; + let genesis_config_use_gen = + if genesis_config_def.is_generic { use_gen } else { Default::default() }; let genesis_config_decl_gen = if genesis_config_def.is_generic { if def.optional_instance.is_some() { @@ -105,26 +81,31 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { let mut genesis_config_decl_fields = String::new(); let mut genesis_config_default_fields = String::new(); for field in &genesis_config_def.fields { - genesis_config_decl_fields.push_str(&format!(" + genesis_config_decl_fields.push_str(&format!( + " {attrs}pub {name}: {typ},", - attrs = field.attrs.iter() - .fold(String::new(), |res, attr| { - format!("{}#[{}] + attrs = field.attrs.iter().fold(String::new(), |res, attr| { + format!( + "{}#[{}] ", - res, attr.to_token_stream()) - }), + res, + attr.to_token_stream() + ) + }), name = field.name, typ = to_cleaned_string(&field.typ), )); - genesis_config_default_fields.push_str(&format!(" + genesis_config_default_fields.push_str(&format!( + " {name}: {default},", name = field.name, default = to_cleaned_string(&field.default), )); } - genesis_config = format!(" + genesis_config = format!( + " #[pallet::genesis_config] pub struct GenesisConfig{genesis_config_decl_gen} // TODO_MAYBE_WHERE_CLAUSE @@ -147,16 +128,18 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { genesis_config_use_gen = genesis_config_use_gen, ); - let genesis_config_build = genesis_config_builder_def.blocks.iter() - .fold(String::new(), |res, block| { - format!("{} + let genesis_config_build = + genesis_config_builder_def.blocks.iter().fold(String::new(), |res, block| { + format!( + "{} {}", res, to_cleaned_string(block), ) }); - genesis_build = format!(" + genesis_build = format!( + " #[pallet::genesis_build] impl{impl_gen} GenesisBuild{use_gen} for GenesisConfig{genesis_config_use_gen} // TODO_MAYBE_WHERE_CLAUSE @@ -176,7 +159,8 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { let storage_vis = convert_vis(&line.visibility); let getter = if let Some(getter) = &line.getter { - format!(" + format!( + " #[pallet::getter(fn {getter})]", getter = getter ) @@ -186,9 +170,12 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { let value_type = &line.value_type; - let default_value_type_value = line.default_value.as_ref() + let default_value_type_value = line + .default_value + .as_ref() .map(|default_expr| { - format!(" + format!( + " #[pallet::type_value] {storage_vis} fn DefaultFor{name} /* TODO_MAYBE_GENERICS */ () -> {value_type} {{ {default_expr} @@ -212,13 +199,16 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { ", ValueQuery" }; - let comma_default_value_getter_name = line.default_value.as_ref() + let comma_default_value_getter_name = line + .default_value + .as_ref() .map(|_| format!(", DefaultFor{}", line.name)) .unwrap_or_else(String::new); let typ = match &line.storage_type { StorageLineTypeDef::Map(map) => { - format!("StorageMap<_, {hasher}, {key}, {value_type}{comma_query_kind}\ + format!( + "StorageMap<_, {hasher}, {key}, {value_type}{comma_query_kind}\ {comma_default_value_getter_name}>", hasher = &map.hasher.to_storage_hasher_struct(), key = to_cleaned_string(&map.key), @@ -228,7 +218,8 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { ) }, StorageLineTypeDef::DoubleMap(double_map) => { - format!("StorageDoubleMap<_, {hasher1}, {key1}, {hasher2}, {key2}, {value_type}\ + format!( + "StorageDoubleMap<_, {hasher1}, {key1}, {hasher2}, {key2}, {value_type}\ {comma_query_kind}{comma_default_value_getter_name}>", hasher1 = double_map.hasher1.to_storage_hasher_struct(), key1 = to_cleaned_string(&double_map.key1), @@ -240,16 +231,18 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { ) }, StorageLineTypeDef::NMap(map) => { - format!("StorageNMap<_, {keygen}, {value_type}{comma_query_kind}\ + format!( + "StorageNMap<_, {keygen}, {value_type}{comma_query_kind}\ {comma_default_value_getter_name}>", keygen = map.to_keygen_struct(&def.hidden_crate), value_type = to_cleaned_string(&value_type), comma_query_kind = comma_query_kind, comma_default_value_getter_name = comma_default_value_getter_name, ) - } + }, StorageLineTypeDef::Simple(_) => { - format!("StorageValue<_, {value_type}{comma_query_kind}\ + format!( + "StorageValue<_, {value_type}{comma_query_kind}\ {comma_default_value_getter_name}>", value_type = to_cleaned_string(&value_type), comma_query_kind = comma_query_kind, @@ -265,7 +258,8 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { "" }; - storages.push_str(&format!(" + storages.push_str(&format!( + " {default_value_type_value}{doc} #[pallet::storage]{getter} {storage_vis} type {name}{full_decl_gen} = {typ};{additional_comment}", @@ -276,21 +270,21 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { full_decl_gen = full_decl_gen, typ = typ, additional_comment = additional_comment, - doc = line.doc_attrs.iter() - .fold(String::new(), |mut res, attr| { - if let syn::Meta::NameValue(name_value) = attr { - if name_value.path.is_ident("doc") { - if let syn::Lit::Str(string) = &name_value.lit { - res = format!("{} + doc = line.doc_attrs.iter().fold(String::new(), |mut res, attr| { + if let syn::Meta::NameValue(name_value) = attr { + if name_value.path.is_ident("doc") { + if let syn::Lit::Str(string) = &name_value.lit { + res = format!( + "{} ///{}", - res, - string.value(), - ); - } + res, + string.value(), + ); } } - res - }), + } + res + }), )); } @@ -308,7 +302,8 @@ pub fn maybe_print_pallet_upgrade(def: &super::DeclStorageDefExt) { "" }; - println!(" + println!( + " // Template for pallet upgrade for {pallet_name} pub use pallet::*; diff --git a/frame/support/procedural/src/storage/storage_info.rs b/frame/support/procedural/src/storage/storage_info.rs index c7707f6cb724b..844896409f851 100644 --- a/frame/support/procedural/src/storage/storage_info.rs +++ b/frame/support/procedural/src/storage/storage_info.rs @@ -17,9 +17,9 @@ //! Implementation of trait `StorageInfoTrait` on module structure. +use super::DeclStorageDefExt; use proc_macro2::TokenStream; use quote::quote; -use super::DeclStorageDefExt; pub fn impl_storage_info(def: &DeclStorageDefExt) -> TokenStream { let scrate = &def.hidden_crate; diff --git a/frame/support/procedural/src/storage/storage_struct.rs b/frame/support/procedural/src/storage/storage_struct.rs index 3b182983cd4ea..b318225681c1d 100644 --- a/frame/support/procedural/src/storage/storage_struct.rs +++ b/frame/support/procedural/src/storage/storage_struct.rs @@ -17,16 +17,15 @@ //! Implementation of storage structures and implementation of storage traits on them. -use proc_macro2::{TokenStream, Ident, Span}; +use super::{instance_trait::INHERENT_INSTANCE_NAME, DeclStorageDefExt, StorageLineTypeDef}; +use proc_macro2::{Ident, Span, TokenStream}; use quote::quote; -use super::{ - DeclStorageDefExt, StorageLineTypeDef, - instance_trait::INHERENT_INSTANCE_NAME, -}; fn from_optional_value_to_query(is_option: bool, default: &Option) -> TokenStream { - let default = default.as_ref().map(|d| quote!( #d )) - .unwrap_or_else(|| quote!( Default::default() )); + let default = default + .as_ref() + .map(|d| quote!( #d )) + .unwrap_or_else(|| quote!(Default::default())); if !is_option { // raw type case @@ -40,10 +39,10 @@ fn from_optional_value_to_query(is_option: bool, default: &Option) -> fn from_query_to_optional_value(is_option: bool) -> TokenStream { if !is_option { // raw type case - quote!( Some(v) ) + quote!(Some(v)) } else { // Option<> type case - quote!( v ) + quote!(v) } } @@ -52,7 +51,6 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { let mut impls = TokenStream::new(); for line in &def.storage_lines { - // Propagate doc attributes. let attrs = &line.doc_attrs; @@ -60,7 +58,8 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { let optional_storage_runtime_comma = &line.optional_storage_runtime_comma; let optional_storage_runtime_bound_comma = &line.optional_storage_runtime_bound_comma; let optional_storage_where_clause = &line.optional_storage_where_clause; - let optional_instance_bound_optional_default = &def.optional_instance_bound_optional_default; + let optional_instance_bound_optional_default = + &def.optional_instance_bound_optional_default; let optional_instance_bound = &def.optional_instance_bound; let optional_instance = &def.optional_instance; let name = &line.name; @@ -87,10 +86,8 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { Ident::new(INHERENT_INSTANCE_NAME, Span::call_site()) }; - let storage_name_bstr = syn::LitByteStr::new( - line.name.to_string().as_ref(), - line.name.span() - ); + let storage_name_bstr = + syn::LitByteStr::new(line.name.to_string().as_ref(), line.name.span()); let storage_generator_trait = &line.storage_generator_trait; let storage_struct = &line.storage_struct; @@ -242,7 +239,7 @@ pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { } } ) - } + }, }; let max_values = if let Some(max_values) = &line.max_values { diff --git a/frame/support/procedural/src/storage/store_trait.rs b/frame/support/procedural/src/storage/store_trait.rs index 18adadbc61050..7dde92cf9a75d 100644 --- a/frame/support/procedural/src/storage/store_trait.rs +++ b/frame/support/procedural/src/storage/store_trait.rs @@ -17,26 +17,26 @@ //! Declaration of store trait and implementation on module structure. +use super::DeclStorageDefExt; use proc_macro2::TokenStream; use quote::quote; -use super::DeclStorageDefExt; pub fn decl_and_impl(def: &DeclStorageDefExt) -> TokenStream { - let decl_store_items = def.storage_lines.iter() - .map(|sline| &sline.name) - .fold(TokenStream::new(), |mut items, name| { + let decl_store_items = def.storage_lines.iter().map(|sline| &sline.name).fold( + TokenStream::new(), + |mut items, name| { items.extend(quote!(type #name;)); items - }); + }, + ); - let impl_store_items = def.storage_lines.iter() - .fold(TokenStream::new(), |mut items, line| { - let name = &line.name; - let storage_struct = &line.storage_struct; + let impl_store_items = def.storage_lines.iter().fold(TokenStream::new(), |mut items, line| { + let name = &line.name; + let storage_struct = &line.storage_struct; - items.extend(quote!(type #name = #storage_struct;)); - items - }); + items.extend(quote!(type #name = #storage_struct;)); + items + }); let visibility = &def.visibility; let store_trait = &def.store_trait; diff --git a/frame/support/procedural/src/transactional.rs b/frame/support/procedural/src/transactional.rs index 6ef26834cf024..403f1cd02bac7 100644 --- a/frame/support/procedural/src/transactional.rs +++ b/frame/support/procedural/src/transactional.rs @@ -15,10 +15,10 @@ // See the License for the specific language governing permissions and // limitations under the License. +use frame_support_procedural_tools::generate_crate_access_2018; use proc_macro::TokenStream; use quote::quote; use syn::{ItemFn, Result}; -use frame_support_procedural_tools::generate_crate_access_2018; pub fn transactional(_attr: TokenStream, input: TokenStream) -> Result { let ItemFn { attrs, vis, sig, block } = syn::parse(input)?; diff --git a/frame/support/procedural/tools/derive/src/lib.rs b/frame/support/procedural/tools/derive/src/lib.rs index 15394e0c559d4..7922105895608 100644 --- a/frame/support/procedural/tools/derive/src/lib.rs +++ b/frame/support/procedural/tools/derive/src/lib.rs @@ -23,14 +23,14 @@ use proc_macro::TokenStream; use proc_macro2::Span; -use syn::parse_macro_input; use quote::quote; +use syn::parse_macro_input; pub(crate) fn fields_idents( fields: impl Iterator, ) -> impl Iterator { fields.enumerate().map(|(ix, field)| { - field.ident.map(|i| quote!{#i}).unwrap_or_else(|| { + field.ident.map(|i| quote! {#i}).unwrap_or_else(|| { let f_ix: syn::Ident = syn::Ident::new(&format!("f_{}", ix), Span::call_site()); quote!( #f_ix ) }) @@ -42,10 +42,7 @@ pub(crate) fn fields_access( ) -> impl Iterator { fields.enumerate().map(|(ix, field)| { field.ident.map(|i| quote!( #i )).unwrap_or_else(|| { - let f_ix: syn::Index = syn::Index { - index: ix as u32, - span: Span::call_site(), - }; + let f_ix: syn::Index = syn::Index { index: ix as u32, span: Span::call_site() }; quote!( #f_ix ) }) }) @@ -64,15 +61,10 @@ pub fn derive_parse(input: TokenStream) -> TokenStream { } fn derive_parse_struct(input: syn::ItemStruct) -> TokenStream { - let syn::ItemStruct { - ident, - generics, - fields, - .. - } = input; + let syn::ItemStruct { ident, generics, fields, .. } = input; let field_names = { let name = fields_idents(fields.iter().map(Clone::clone)); - quote!{ + quote! { #( #name, )* @@ -110,12 +102,7 @@ pub fn derive_totokens(input: TokenStream) -> TokenStream { } fn derive_totokens_struct(input: syn::ItemStruct) -> TokenStream { - let syn::ItemStruct { - ident, - generics, - fields, - .. - } = input; + let syn::ItemStruct { ident, generics, fields, .. } = input; let fields = fields_access(fields.iter().map(Clone::clone)); let tokens = quote! { @@ -133,12 +120,7 @@ fn derive_totokens_struct(input: syn::ItemStruct) -> TokenStream { } fn derive_totokens_enum(input: syn::ItemEnum) -> TokenStream { - let syn::ItemEnum { - ident, - generics, - variants, - .. - } = input; + let syn::ItemEnum { ident, generics, variants, .. } = input; let variants = variants.iter().map(|v| { let v_ident = v.ident.clone(); let fields_build = if v.fields.iter().count() > 0 { diff --git a/frame/support/procedural/tools/src/docs.rs b/frame/support/procedural/tools/src/docs.rs index d51313a10804f..e5a01747fcaf7 100644 --- a/frame/support/procedural/tools/src/docs.rs +++ b/frame/support/procedural/tools/src/docs.rs @@ -18,7 +18,8 @@ #[cfg(feature = "metadata-docs")] /// Return all doc attributes literals found. pub fn get_doc_literals(attrs: &Vec) -> Vec { - attrs.iter() + attrs + .iter() .filter_map(|attr| { if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() { if meta.path.get_ident().map_or(false, |ident| ident == "doc") { diff --git a/frame/support/procedural/tools/src/lib.rs b/frame/support/procedural/tools/src/lib.rs index 8b8887f110f13..c262e8f3c58a6 100644 --- a/frame/support/procedural/tools/src/lib.rs +++ b/frame/support/procedural/tools/src/lib.rs @@ -23,8 +23,8 @@ pub use frame_support_procedural_tools_derive::*; use proc_macro_crate::{crate_name, FoundCrate}; -use syn::parse::Error; use quote::quote; +use syn::parse::Error; mod docs; pub mod syn_ext; @@ -32,7 +32,7 @@ pub mod syn_ext; pub use docs::get_doc_literals; // FIXME #1569, remove the following functions, which are copied from sp-api-macros -use proc_macro2::{TokenStream, Span}; +use proc_macro2::{Span, TokenStream}; use syn::Ident; fn generate_hidden_includes_mod_name(unique_id: &str) -> Ident { @@ -42,7 +42,7 @@ fn generate_hidden_includes_mod_name(unique_id: &str) -> Ident { /// Generates the access to the `frame-support` crate. pub fn generate_crate_access(unique_id: &str, def_crate: &str) -> TokenStream { if std::env::var("CARGO_PKG_NAME").unwrap() == def_crate { - quote::quote!( frame_support ) + quote::quote!(frame_support) } else { let mod_name = generate_hidden_includes_mod_name(unique_id); quote::quote!( self::#mod_name::hidden_include ) @@ -58,12 +58,8 @@ pub fn generate_crate_access_2018(def_crate: &str) -> Result let name = def_crate.to_string().replace("-", "_"); Ok(syn::Ident::new(&name, Span::call_site())) }, - Ok(FoundCrate::Name(name)) => { - Ok(Ident::new(&name, Span::call_site())) - }, - Err(e) => { - Err(Error::new(Span::call_site(), e)) - } + Ok(FoundCrate::Name(name)) => Ok(Ident::new(&name, Span::call_site())), + Err(e) => Err(Error::new(Span::call_site(), e)), } } @@ -85,7 +81,7 @@ pub fn generate_hidden_includes(unique_id: &str, def_crate: &str) -> TokenStream Err(e) => { let err = Error::new(Span::call_site(), e).to_compile_error(); quote!( #err ) - } + }, } } diff --git a/frame/support/procedural/tools/src/syn_ext.rs b/frame/support/procedural/tools/src/syn_ext.rs index 36bd03fed1bef..a9e9ef573985f 100644 --- a/frame/support/procedural/tools/src/syn_ext.rs +++ b/frame/support/procedural/tools/src/syn_ext.rs @@ -19,11 +19,15 @@ //! Extension to syn types, mainly for parsing // end::description[] -use syn::{visit::{Visit, self}, parse::{Parse, ParseStream, Result}, Ident}; +use frame_support_procedural_tools_derive::{Parse, ToTokens}; use proc_macro2::{TokenStream, TokenTree}; use quote::ToTokens; use std::iter::once; -use frame_support_procedural_tools_derive::{ToTokens, Parse}; +use syn::{ + parse::{Parse, ParseStream, Result}, + visit::{self, Visit}, + Ident, +}; /// stop parsing here getting remaining token as content /// Warn duplicate stream (part of) @@ -35,7 +39,6 @@ pub struct StopParse { // inner macro really dependant on syn naming convention, do not export macro_rules! groups_impl { ($name:ident, $tok:ident, $deli:ident, $parse:ident) => { - #[derive(Debug)] pub struct $name